1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word)
512 if expecting_statement_delimiter && word.keyword == Keyword::END =>
513 {
514 break;
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 Keyword::SECURITY => self.parse_security_label().map(Into::into),
723 _ => self.expected("an SQL statement", next_token),
724 },
725 Token::LParen => {
726 self.prev_token();
727 self.parse_query().map(Into::into)
728 }
729 _ => self.expected("an SQL statement", next_token),
730 }
731 }
732
733 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
737 let case_token = self.expect_keyword(Keyword::CASE)?;
738
739 let match_expr = if self.peek_keyword(Keyword::WHEN) {
740 None
741 } else {
742 Some(self.parse_expr()?)
743 };
744
745 self.expect_keyword_is(Keyword::WHEN)?;
746 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
747 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
748 })?;
749
750 let else_block = if self.parse_keyword(Keyword::ELSE) {
751 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
752 } else {
753 None
754 };
755
756 let mut end_case_token = self.expect_keyword(Keyword::END)?;
757 if self.peek_keyword(Keyword::CASE) {
758 end_case_token = self.expect_keyword(Keyword::CASE)?;
759 }
760
761 Ok(CaseStatement {
762 case_token: AttachedToken(case_token),
763 match_expr,
764 when_blocks,
765 else_block,
766 end_case_token: AttachedToken(end_case_token),
767 })
768 }
769
770 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
774 self.expect_keyword_is(Keyword::IF)?;
775 let if_block = self.parse_conditional_statement_block(&[
776 Keyword::ELSE,
777 Keyword::ELSEIF,
778 Keyword::END,
779 ])?;
780
781 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
782 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
783 parser.parse_conditional_statement_block(&[
784 Keyword::ELSEIF,
785 Keyword::ELSE,
786 Keyword::END,
787 ])
788 })?
789 } else {
790 vec![]
791 };
792
793 let else_block = if self.parse_keyword(Keyword::ELSE) {
794 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
795 } else {
796 None
797 };
798
799 self.expect_keyword_is(Keyword::END)?;
800 let end_token = self.expect_keyword(Keyword::IF)?;
801
802 Ok(IfStatement {
803 if_block,
804 elseif_blocks,
805 else_block,
806 end_token: Some(AttachedToken(end_token)),
807 })
808 }
809
810 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
814 self.expect_keyword_is(Keyword::WHILE)?;
815 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
816
817 Ok(WhileStatement { while_block })
818 }
819
820 fn parse_conditional_statement_block(
828 &mut self,
829 terminal_keywords: &[Keyword],
830 ) -> Result<ConditionalStatementBlock, ParserError> {
831 let start_token = self.get_current_token().clone(); let mut then_token = None;
833
834 let condition = match &start_token.token {
835 Token::Word(w) if w.keyword == Keyword::ELSE => None,
836 Token::Word(w) if w.keyword == Keyword::WHILE => {
837 let expr = self.parse_expr()?;
838 Some(expr)
839 }
840 _ => {
841 let expr = self.parse_expr()?;
842 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
843 Some(expr)
844 }
845 };
846
847 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
848
849 Ok(ConditionalStatementBlock {
850 start_token: AttachedToken(start_token),
851 condition,
852 then_token,
853 conditional_statements,
854 })
855 }
856
857 pub(crate) fn parse_conditional_statements(
860 &mut self,
861 terminal_keywords: &[Keyword],
862 ) -> Result<ConditionalStatements, ParserError> {
863 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
864 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
865 let statements = self.parse_statement_list(terminal_keywords)?;
866 let end_token = self.expect_keyword(Keyword::END)?;
867
868 ConditionalStatements::BeginEnd(BeginEndStatements {
869 begin_token: AttachedToken(begin_token),
870 statements,
871 end_token: AttachedToken(end_token),
872 })
873 } else {
874 ConditionalStatements::Sequence {
875 statements: self.parse_statement_list(terminal_keywords)?,
876 }
877 };
878 Ok(conditional_statements)
879 }
880
881 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
885 self.expect_keyword_is(Keyword::RAISE)?;
886
887 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
888 self.expect_token(&Token::Eq)?;
889 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
890 } else {
891 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
892 };
893
894 Ok(RaiseStatement { value })
895 }
896 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
900 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
901
902 self.expect_keyword_is(Keyword::ON)?;
903 let token = self.next_token();
904
905 let (object_type, object_name) = match token.token {
906 Token::Word(w) if w.keyword == Keyword::AGGREGATE => {
907 (CommentObject::Aggregate, self.parse_object_name(false)?)
908 }
909 Token::Word(w) if w.keyword == Keyword::COLLATION => {
910 (CommentObject::Collation, self.parse_object_name(false)?)
911 }
912 Token::Word(w) if w.keyword == Keyword::COLUMN => {
913 (CommentObject::Column, self.parse_object_name(false)?)
914 }
915 Token::Word(w) if w.keyword == Keyword::DATABASE => {
916 (CommentObject::Database, self.parse_object_name(false)?)
917 }
918 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
919 (CommentObject::Domain, self.parse_object_name(false)?)
920 }
921 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
922 (CommentObject::Extension, self.parse_object_name(false)?)
923 }
924 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
925 (CommentObject::Function, self.parse_object_name(false)?)
926 }
927 Token::Word(w) if w.keyword == Keyword::INDEX => {
928 (CommentObject::Index, self.parse_object_name(false)?)
929 }
930 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
931 self.expect_keyword_is(Keyword::VIEW)?;
932 (
933 CommentObject::MaterializedView,
934 self.parse_object_name(false)?,
935 )
936 }
937 Token::Word(w) if w.keyword == Keyword::POLICY => {
938 (CommentObject::Policy, self.parse_object_name(false)?)
939 }
940 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
941 (CommentObject::Procedure, self.parse_object_name(false)?)
942 }
943 Token::Word(w) if w.keyword == Keyword::ROLE => {
944 (CommentObject::Role, self.parse_object_name(false)?)
945 }
946 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
947 (CommentObject::Schema, self.parse_object_name(false)?)
948 }
949 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
950 (CommentObject::Sequence, self.parse_object_name(false)?)
951 }
952 Token::Word(w) if w.keyword == Keyword::TABLE => {
953 (CommentObject::Table, self.parse_object_name(false)?)
954 }
955 Token::Word(w) if w.keyword == Keyword::TRIGGER => {
956 (CommentObject::Trigger, self.parse_object_name(false)?)
957 }
958 Token::Word(w) if w.keyword == Keyword::TYPE => {
959 (CommentObject::Type, self.parse_object_name(false)?)
960 }
961 Token::Word(w) if w.keyword == Keyword::USER => {
962 (CommentObject::User, self.parse_object_name(false)?)
963 }
964 Token::Word(w) if w.keyword == Keyword::VIEW => {
965 (CommentObject::View, self.parse_object_name(false)?)
966 }
967 _ => self.expected("comment object_type", token)?,
968 };
969
970 let arguments = match object_type {
971 CommentObject::Function | CommentObject::Procedure | CommentObject::Aggregate => {
972 if self.consume_token(&Token::LParen) {
973 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
974 self.expect_token(&Token::RParen)?;
975 Some(list)
976 } else {
977 None
978 }
979 }
980 _ => None,
981 };
982
983 if object_type == CommentObject::Aggregate && arguments.is_none() {
984 return Err(ParserError::ParserError(
985 "COMMENT ON AGGREGATE requires an argument list, e.g. AGGREGATE foo(int)".into(),
986 ));
987 }
988
989 let relation = match object_type {
990 CommentObject::Trigger | CommentObject::Policy => {
991 self.expect_keyword_is(Keyword::ON)?;
992 Some(self.parse_object_name(false)?)
993 }
994 _ => None,
995 };
996
997 self.expect_keyword_is(Keyword::IS)?;
998 let comment = if self.parse_keyword(Keyword::NULL) {
999 None
1000 } else {
1001 Some(self.parse_literal_string()?)
1002 };
1003 Ok(Statement::Comment {
1004 object_type,
1005 object_name,
1006 arguments,
1007 relation,
1008 comment,
1009 if_exists,
1010 })
1011 }
1012
1013 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
1015 let mut channel = None;
1016 let mut tables: Vec<ObjectName> = vec![];
1017 let mut read_lock = false;
1018 let mut export = false;
1019
1020 if !dialect_of!(self is MySqlDialect | GenericDialect) {
1021 return parser_err!(
1022 "Unsupported statement FLUSH",
1023 self.peek_token_ref().span.start
1024 );
1025 }
1026
1027 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
1028 Some(FlushLocation::NoWriteToBinlog)
1029 } else if self.parse_keyword(Keyword::LOCAL) {
1030 Some(FlushLocation::Local)
1031 } else {
1032 None
1033 };
1034
1035 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
1036 FlushType::BinaryLogs
1037 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
1038 FlushType::EngineLogs
1039 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1040 FlushType::ErrorLogs
1041 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1042 FlushType::GeneralLogs
1043 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1044 FlushType::Hosts
1045 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1046 FlushType::Privileges
1047 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1048 FlushType::OptimizerCosts
1049 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1050 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1051 channel = Some(self.parse_object_name(false).unwrap().to_string());
1052 }
1053 FlushType::RelayLogs
1054 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1055 FlushType::SlowLogs
1056 } else if self.parse_keyword(Keyword::STATUS) {
1057 FlushType::Status
1058 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1059 FlushType::UserResources
1060 } else if self.parse_keywords(&[Keyword::LOGS]) {
1061 FlushType::Logs
1062 } else if self.parse_keywords(&[Keyword::TABLES]) {
1063 loop {
1064 let next_token = self.next_token();
1065 match &next_token.token {
1066 Token::Word(w) => match w.keyword {
1067 Keyword::WITH => {
1068 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1069 }
1070 Keyword::FOR => {
1071 export = self.parse_keyword(Keyword::EXPORT);
1072 }
1073 Keyword::NoKeyword => {
1074 self.prev_token();
1075 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1076 }
1077 _ => {}
1078 },
1079 _ => {
1080 break;
1081 }
1082 }
1083 }
1084
1085 FlushType::Tables
1086 } else {
1087 return self.expected_ref(
1088 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1089 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1090 self.peek_token_ref(),
1091 );
1092 };
1093
1094 Ok(Statement::Flush {
1095 object_type,
1096 location,
1097 channel,
1098 read_lock,
1099 export,
1100 tables,
1101 })
1102 }
1103
1104 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1106 let repair = self.parse_keyword(Keyword::REPAIR);
1107 self.expect_keyword_is(Keyword::TABLE)?;
1108 let table_name = self.parse_object_name(false)?;
1109 let partition_action = self
1110 .maybe_parse(|parser| {
1111 let pa = match parser.parse_one_of_keywords(&[
1112 Keyword::ADD,
1113 Keyword::DROP,
1114 Keyword::SYNC,
1115 ]) {
1116 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1117 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1118 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1119 _ => None,
1120 };
1121 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1122 Ok(pa)
1123 })?
1124 .unwrap_or_default();
1125 Ok(Msck {
1126 repair,
1127 table_name,
1128 partition_action,
1129 })
1130 }
1131
1132 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1134 let table = self.parse_keyword(Keyword::TABLE);
1135 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1136
1137 let table_names = self.parse_comma_separated(|p| {
1138 let only = p.parse_keyword(Keyword::ONLY);
1139 let name = p.parse_object_name(false)?;
1140 let has_asterisk = p.consume_token(&Token::Mul);
1141 Ok(TruncateTableTarget {
1142 name,
1143 only,
1144 has_asterisk,
1145 })
1146 })?;
1147
1148 let mut partitions = None;
1149 if self.parse_keyword(Keyword::PARTITION) {
1150 self.expect_token(&Token::LParen)?;
1151 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1152 self.expect_token(&Token::RParen)?;
1153 }
1154
1155 let mut identity = None;
1156 let mut cascade = None;
1157
1158 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1159 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1160 Some(TruncateIdentityOption::Restart)
1161 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1162 Some(TruncateIdentityOption::Continue)
1163 } else {
1164 None
1165 };
1166
1167 cascade = self.parse_cascade_option();
1168 };
1169
1170 let on_cluster = self.parse_optional_on_cluster()?;
1171
1172 Ok(Truncate {
1173 table_names,
1174 partitions,
1175 table,
1176 if_exists,
1177 identity,
1178 cascade,
1179 on_cluster,
1180 })
1181 }
1182
1183 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1184 if self.parse_keyword(Keyword::CASCADE) {
1185 Some(CascadeOption::Cascade)
1186 } else if self.parse_keyword(Keyword::RESTRICT) {
1187 Some(CascadeOption::Restrict)
1188 } else {
1189 None
1190 }
1191 }
1192
1193 pub fn parse_attach_duckdb_database_options(
1195 &mut self,
1196 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1197 if !self.consume_token(&Token::LParen) {
1198 return Ok(vec![]);
1199 }
1200
1201 let mut options = vec![];
1202 loop {
1203 if self.parse_keyword(Keyword::READ_ONLY) {
1204 let boolean = if self.parse_keyword(Keyword::TRUE) {
1205 Some(true)
1206 } else if self.parse_keyword(Keyword::FALSE) {
1207 Some(false)
1208 } else {
1209 None
1210 };
1211 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1212 } else if self.parse_keyword(Keyword::TYPE) {
1213 let ident = self.parse_identifier()?;
1214 options.push(AttachDuckDBDatabaseOption::Type(ident));
1215 } else {
1216 return self
1217 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1218 };
1219
1220 if self.consume_token(&Token::RParen) {
1221 return Ok(options);
1222 } else if self.consume_token(&Token::Comma) {
1223 continue;
1224 } else {
1225 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1226 }
1227 }
1228 }
1229
1230 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1232 let database = self.parse_keyword(Keyword::DATABASE);
1233 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1234 let database_path = self.parse_identifier()?;
1235 let database_alias = if self.parse_keyword(Keyword::AS) {
1236 Some(self.parse_identifier()?)
1237 } else {
1238 None
1239 };
1240
1241 let attach_options = self.parse_attach_duckdb_database_options()?;
1242 Ok(Statement::AttachDuckDBDatabase {
1243 if_not_exists,
1244 database,
1245 database_path,
1246 database_alias,
1247 attach_options,
1248 })
1249 }
1250
1251 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1253 let database = self.parse_keyword(Keyword::DATABASE);
1254 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1255 let database_alias = self.parse_identifier()?;
1256 Ok(Statement::DetachDuckDBDatabase {
1257 if_exists,
1258 database,
1259 database_alias,
1260 })
1261 }
1262
1263 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1265 let database = self.parse_keyword(Keyword::DATABASE);
1266 let database_file_name = self.parse_expr()?;
1267 self.expect_keyword_is(Keyword::AS)?;
1268 let schema_name = self.parse_identifier()?;
1269 Ok(Statement::AttachDatabase {
1270 database,
1271 schema_name,
1272 database_file_name,
1273 })
1274 }
1275
1276 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1278 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1279 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1280 let mut for_columns = false;
1281 let mut cache_metadata = false;
1282 let mut noscan = false;
1283 let mut partitions = None;
1284 let mut compute_statistics = false;
1285 let mut columns = vec![];
1286
1287 if table_name.is_some() && self.consume_token(&Token::LParen) {
1289 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1290 self.expect_token(&Token::RParen)?;
1291 }
1292
1293 loop {
1294 match self.parse_one_of_keywords(&[
1295 Keyword::PARTITION,
1296 Keyword::FOR,
1297 Keyword::CACHE,
1298 Keyword::NOSCAN,
1299 Keyword::COMPUTE,
1300 ]) {
1301 Some(Keyword::PARTITION) => {
1302 self.expect_token(&Token::LParen)?;
1303 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1304 self.expect_token(&Token::RParen)?;
1305 }
1306 Some(Keyword::NOSCAN) => noscan = true,
1307 Some(Keyword::FOR) => {
1308 self.expect_keyword_is(Keyword::COLUMNS)?;
1309
1310 columns = self
1311 .maybe_parse(|parser| {
1312 parser.parse_comma_separated(|p| p.parse_identifier())
1313 })?
1314 .unwrap_or_default();
1315 for_columns = true
1316 }
1317 Some(Keyword::CACHE) => {
1318 self.expect_keyword_is(Keyword::METADATA)?;
1319 cache_metadata = true
1320 }
1321 Some(Keyword::COMPUTE) => {
1322 self.expect_keyword_is(Keyword::STATISTICS)?;
1323 compute_statistics = true
1324 }
1325 _ => break,
1326 }
1327 }
1328
1329 Ok(Analyze {
1330 has_table_keyword,
1331 table_name,
1332 for_columns,
1333 columns,
1334 partitions,
1335 cache_metadata,
1336 noscan,
1337 compute_statistics,
1338 })
1339 }
1340
1341 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1343 let index = self.index;
1344
1345 let next_token = self.next_token();
1346 match next_token.token {
1347 t @ (Token::Word(_) | Token::SingleQuotedString(_))
1348 if self.peek_token_ref().token == Token::Period =>
1349 {
1350 let mut id_parts: Vec<Ident> = vec![match t {
1351 Token::Word(w) => w.into_ident(next_token.span),
1352 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1353 _ => {
1354 return Err(ParserError::ParserError(
1355 "Internal parser error: unexpected token type".to_string(),
1356 ))
1357 }
1358 }];
1359
1360 while self.consume_token(&Token::Period) {
1361 let next_token = self.next_token();
1362 match next_token.token {
1363 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1364 Token::SingleQuotedString(s) => {
1365 id_parts.push(Ident::with_quote('\'', s))
1367 }
1368 Token::Placeholder(s) => {
1369 id_parts.push(Ident::new(s))
1372 }
1373 Token::Mul => {
1374 return Ok(Expr::QualifiedWildcard(
1375 ObjectName::from(id_parts),
1376 AttachedToken(next_token),
1377 ));
1378 }
1379 _ => {
1380 return self.expected("an identifier or a '*' after '.'", next_token);
1381 }
1382 }
1383 }
1384 }
1385 Token::Mul => {
1386 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1387 }
1388 Token::LParen => {
1390 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1391 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1392 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1395 }
1396 }
1397 _ => (),
1398 };
1399
1400 self.index = index;
1401 self.parse_expr()
1402 }
1403
1404 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1406 self.parse_subexpr(self.dialect.prec_unknown())
1407 }
1408
1409 pub fn parse_expr_with_alias_and_order_by(
1411 &mut self,
1412 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1413 let expr = self.parse_expr()?;
1414
1415 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1416 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1417 }
1418 let alias = self.parse_optional_alias_inner(None, validator)?;
1419 let order_by = OrderByOptions {
1420 asc: self.parse_asc_desc(),
1421 nulls_first: None,
1422 };
1423 Ok(ExprWithAliasAndOrderBy {
1424 expr: ExprWithAlias { expr, alias },
1425 order_by,
1426 })
1427 }
1428
1429 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1431 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1432 let _guard = self.recursion_counter.try_decrease()?;
1433 debug!("parsing expr");
1434 let mut expr = self.parse_prefix()?;
1435
1436 expr = self.parse_compound_expr(expr, vec![])?;
1437
1438 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1442 expr = Expr::Collate {
1443 expr: Box::new(expr),
1444 collation: self.parse_object_name(false)?,
1445 };
1446 }
1447
1448 debug!("prefix: {expr:?}");
1449 loop {
1450 let next_precedence = self.get_next_precedence()?;
1451 debug!("next precedence: {next_precedence:?}");
1452
1453 if precedence >= next_precedence {
1454 break;
1455 }
1456
1457 if Token::Period == self.peek_token_ref().token {
1460 break;
1461 }
1462
1463 expr = self.parse_infix(expr, next_precedence)?;
1464 }
1465 Ok(expr)
1466 }
1467
1468 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1470 let condition = self.parse_expr()?;
1471 let message = if self.parse_keyword(Keyword::AS) {
1472 Some(self.parse_expr()?)
1473 } else {
1474 None
1475 };
1476
1477 Ok(Statement::Assert { condition, message })
1478 }
1479
1480 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1482 let name = self.parse_identifier()?;
1483 Ok(Statement::Savepoint { name })
1484 }
1485
1486 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1488 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1489 let name = self.parse_identifier()?;
1490
1491 Ok(Statement::ReleaseSavepoint { name })
1492 }
1493
1494 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1496 let channel = self.parse_identifier()?;
1497 Ok(Statement::LISTEN { channel })
1498 }
1499
1500 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1502 let channel = if self.consume_token(&Token::Mul) {
1503 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1504 } else {
1505 match self.parse_identifier() {
1506 Ok(expr) => expr,
1507 _ => {
1508 self.prev_token();
1509 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1510 }
1511 }
1512 };
1513 Ok(Statement::UNLISTEN { channel })
1514 }
1515
1516 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1518 let channel = self.parse_identifier()?;
1519 let payload = if self.consume_token(&Token::Comma) {
1520 Some(self.parse_literal_string()?)
1521 } else {
1522 None
1523 };
1524 Ok(Statement::NOTIFY { channel, payload })
1525 }
1526
1527 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1529 if self.peek_keyword(Keyword::TABLE) {
1530 self.expect_keyword(Keyword::TABLE)?;
1531 let rename_tables = self.parse_comma_separated(|parser| {
1532 let old_name = parser.parse_object_name(false)?;
1533 parser.expect_keyword(Keyword::TO)?;
1534 let new_name = parser.parse_object_name(false)?;
1535
1536 Ok(RenameTable { old_name, new_name })
1537 })?;
1538 Ok(rename_tables.into())
1539 } else {
1540 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1541 }
1542 }
1543
1544 fn parse_expr_prefix_by_reserved_word(
1547 &mut self,
1548 w: &Word,
1549 w_span: Span,
1550 ) -> Result<Option<Expr>, ParserError> {
1551 match w.keyword {
1552 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1553 self.prev_token();
1554 Ok(Some(Expr::Value(self.parse_value()?)))
1555 }
1556 Keyword::NULL => {
1557 self.prev_token();
1558 Ok(Some(Expr::Value(self.parse_value()?)))
1559 }
1560 Keyword::CURRENT_CATALOG
1561 | Keyword::CURRENT_USER
1562 | Keyword::SESSION_USER
1563 | Keyword::USER
1564 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1565 {
1566 Ok(Some(Expr::Function(Function {
1567 name: ObjectName::from(vec![w.to_ident(w_span)]),
1568 uses_odbc_syntax: false,
1569 parameters: FunctionArguments::None,
1570 args: FunctionArguments::None,
1571 null_treatment: None,
1572 filter: None,
1573 over: None,
1574 within_group: vec![],
1575 })))
1576 }
1577 Keyword::CURRENT_TIMESTAMP
1578 | Keyword::CURRENT_TIME
1579 | Keyword::CURRENT_DATE
1580 | Keyword::LOCALTIME
1581 | Keyword::LOCALTIMESTAMP => {
1582 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1583 }
1584 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1585 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1586 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1587 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1588 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1589 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1590 Keyword::EXISTS
1591 if !dialect_of!(self is DatabricksDialect)
1593 || matches!(
1594 self.peek_nth_token_ref(1).token,
1595 Token::Word(Word {
1596 keyword: Keyword::SELECT | Keyword::WITH,
1597 ..
1598 })
1599 ) =>
1600 {
1601 Ok(Some(self.parse_exists_expr(false)?))
1602 }
1603 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1604 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1605 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1606 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1607 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1608 }
1609 Keyword::SUBSTR | Keyword::SUBSTRING => {
1610 self.prev_token();
1611 Ok(Some(self.parse_substring()?))
1612 }
1613 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1614 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1615 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1616 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1618 self.expect_token(&Token::LBracket)?;
1619 Ok(Some(self.parse_array_expr(true)?))
1620 }
1621 Keyword::ARRAY
1622 if self.peek_token_ref().token == Token::LParen
1623 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1624 {
1625 self.expect_token(&Token::LParen)?;
1626 let query = self.parse_query()?;
1627 self.expect_token(&Token::RParen)?;
1628 Ok(Some(Expr::Function(Function {
1629 name: ObjectName::from(vec![w.to_ident(w_span)]),
1630 uses_odbc_syntax: false,
1631 parameters: FunctionArguments::None,
1632 args: FunctionArguments::Subquery(query),
1633 filter: None,
1634 null_treatment: None,
1635 over: None,
1636 within_group: vec![],
1637 })))
1638 }
1639 Keyword::NOT => Ok(Some(self.parse_not()?)),
1640 Keyword::MATCH if self.dialect.supports_match_against() => {
1641 Ok(Some(self.parse_match_against()?))
1642 }
1643 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1644 let struct_expr = self.parse_struct_literal()?;
1645 Ok(Some(struct_expr))
1646 }
1647 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1648 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1649 Ok(Some(Expr::Prior(Box::new(expr))))
1650 }
1651 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1652 Ok(Some(self.parse_duckdb_map_literal()?))
1653 }
1654 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1655 Ok(Some(self.parse_lambda_expr()?))
1656 }
1657 _ if self.dialect.supports_geometric_types() => match w.keyword {
1658 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1659 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1660 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1661 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1662 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1663 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1664 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1665 _ => Ok(None),
1666 },
1667 _ => Ok(None),
1668 }
1669 }
1670
1671 fn parse_expr_prefix_by_unreserved_word(
1673 &mut self,
1674 w: &Word,
1675 w_span: Span,
1676 ) -> Result<Expr, ParserError> {
1677 let is_outer_join = self.peek_outer_join_operator();
1678 match &self.peek_token_ref().token {
1679 Token::LParen if !is_outer_join => {
1680 let id_parts = vec![w.to_ident(w_span)];
1681 self.parse_function(ObjectName::from(id_parts))
1682 }
1683 Token::SingleQuotedString(_)
1685 | Token::DoubleQuotedString(_)
1686 | Token::HexStringLiteral(_)
1687 if w.value.starts_with('_') =>
1688 {
1689 Ok(Expr::Prefixed {
1690 prefix: w.to_ident(w_span),
1691 value: self.parse_introduced_string_expr()?.into(),
1692 })
1693 }
1694 Token::SingleQuotedString(_)
1696 | Token::DoubleQuotedString(_)
1697 | Token::HexStringLiteral(_)
1698 if w.value.starts_with('_') =>
1699 {
1700 Ok(Expr::Prefixed {
1701 prefix: w.to_ident(w_span),
1702 value: self.parse_introduced_string_expr()?.into(),
1703 })
1704 }
1705 Token::Arrow if self.dialect.supports_lambda_functions() => {
1709 self.expect_token(&Token::Arrow)?;
1710 Ok(Expr::Lambda(LambdaFunction {
1711 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1712 name: w.to_ident(w_span),
1713 data_type: None,
1714 }),
1715 body: Box::new(self.parse_expr()?),
1716 syntax: LambdaSyntax::Arrow,
1717 }))
1718 }
1719 Token::Word(_)
1723 if self.dialect.supports_lambda_functions()
1724 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1725 {
1726 let data_type = self.parse_data_type()?;
1727 self.expect_token(&Token::Arrow)?;
1728 Ok(Expr::Lambda(LambdaFunction {
1729 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1730 name: w.to_ident(w_span),
1731 data_type: Some(data_type),
1732 }),
1733 body: Box::new(self.parse_expr()?),
1734 syntax: LambdaSyntax::Arrow,
1735 }))
1736 }
1737 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1738 }
1739 }
1740
1741 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1744 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1745 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1746 } else {
1747 false
1748 }
1749 }
1750
1751 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1753 if let Some(prefix) = self.dialect.parse_prefix(self) {
1755 return prefix;
1756 }
1757
1758 let loc = self.peek_token_ref().span.start;
1775 let opt_expr = self.maybe_parse(|parser| {
1776 match parser.parse_data_type()? {
1777 DataType::Interval { .. } => parser.parse_interval(),
1778 DataType::Custom(ref name, ref modifiers)
1789 if modifiers.is_empty()
1790 && Self::is_simple_unquoted_object_name(name, "xml")
1791 && parser.dialect.supports_xml_expressions() =>
1792 {
1793 Ok(Expr::TypedString(TypedString {
1794 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1795 value: parser.parse_value()?,
1796 uses_odbc_syntax: false,
1797 }))
1798 }
1799 DataType::Custom(..) => parser_err!("dummy", loc),
1800 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1802 Ok(Expr::Cast {
1803 kind: CastKind::Cast,
1804 expr: Box::new(parser.parse_expr()?),
1805 data_type: DataType::Binary(None),
1806 array: false,
1807 format: None,
1808 })
1809 }
1810 data_type => Ok(Expr::TypedString(TypedString {
1811 data_type,
1812 value: parser.parse_value()?,
1813 uses_odbc_syntax: false,
1814 })),
1815 }
1816 })?;
1817
1818 if let Some(expr) = opt_expr {
1819 return Ok(expr);
1820 }
1821
1822 let dialect = self.dialect;
1826
1827 self.advance_token();
1828 let next_token_index = self.get_current_index();
1829 let next_token = self.get_current_token();
1830 let span = next_token.span;
1831 let expr = match &next_token.token {
1832 Token::Word(w) => {
1833 let w = w.clone();
1842 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1843 Ok(Some(expr)) => Ok(expr),
1845
1846 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1848
1849 Err(e) => {
1856 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1857 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1858 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1859 }) {
1860 return Ok(expr);
1861 }
1862 }
1863 return Err(e);
1864 }
1865 }
1866 } Token::LBracket => self.parse_array_expr(false),
1869 tok @ Token::Minus | tok @ Token::Plus => {
1870 let op = if *tok == Token::Plus {
1871 UnaryOperator::Plus
1872 } else {
1873 UnaryOperator::Minus
1874 };
1875 Ok(Expr::UnaryOp {
1876 op,
1877 expr: Box::new(
1878 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1879 ),
1880 })
1881 }
1882 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1883 op: UnaryOperator::BangNot,
1884 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1885 }),
1886 tok @ Token::DoubleExclamationMark
1887 | tok @ Token::PGSquareRoot
1888 | tok @ Token::PGCubeRoot
1889 | tok @ Token::AtSign
1890 if dialect_is!(dialect is PostgreSqlDialect) =>
1891 {
1892 let op = match tok {
1893 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1894 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1895 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1896 Token::AtSign => UnaryOperator::PGAbs,
1897 _ => {
1898 return Err(ParserError::ParserError(
1899 "Internal parser error: unexpected unary operator token".to_string(),
1900 ))
1901 }
1902 };
1903 Ok(Expr::UnaryOp {
1904 op,
1905 expr: Box::new(
1906 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1907 ),
1908 })
1909 }
1910 Token::Tilde => Ok(Expr::UnaryOp {
1911 op: UnaryOperator::BitwiseNot,
1912 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1913 }),
1914 tok @ Token::Sharp
1915 | tok @ Token::AtDashAt
1916 | tok @ Token::AtAt
1917 | tok @ Token::QuestionMarkDash
1918 | tok @ Token::QuestionPipe
1919 if self.dialect.supports_geometric_types() =>
1920 {
1921 let op = match tok {
1922 Token::Sharp => UnaryOperator::Hash,
1923 Token::AtDashAt => UnaryOperator::AtDashAt,
1924 Token::AtAt => UnaryOperator::DoubleAt,
1925 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1926 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1927 _ => {
1928 return Err(ParserError::ParserError(format!(
1929 "Unexpected token in unary operator parsing: {tok:?}"
1930 )))
1931 }
1932 };
1933 Ok(Expr::UnaryOp {
1934 op,
1935 expr: Box::new(
1936 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1937 ),
1938 })
1939 }
1940 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1941 {
1942 self.prev_token();
1943 Ok(Expr::Value(self.parse_value()?))
1944 }
1945 Token::UnicodeStringLiteral(_) => {
1946 self.prev_token();
1947 Ok(Expr::Value(self.parse_value()?))
1948 }
1949 Token::Number(_, _)
1950 | Token::SingleQuotedString(_)
1951 | Token::DoubleQuotedString(_)
1952 | Token::TripleSingleQuotedString(_)
1953 | Token::TripleDoubleQuotedString(_)
1954 | Token::DollarQuotedString(_)
1955 | Token::SingleQuotedByteStringLiteral(_)
1956 | Token::DoubleQuotedByteStringLiteral(_)
1957 | Token::TripleSingleQuotedByteStringLiteral(_)
1958 | Token::TripleDoubleQuotedByteStringLiteral(_)
1959 | Token::SingleQuotedRawStringLiteral(_)
1960 | Token::DoubleQuotedRawStringLiteral(_)
1961 | Token::TripleSingleQuotedRawStringLiteral(_)
1962 | Token::TripleDoubleQuotedRawStringLiteral(_)
1963 | Token::NationalStringLiteral(_)
1964 | Token::QuoteDelimitedStringLiteral(_)
1965 | Token::NationalQuoteDelimitedStringLiteral(_)
1966 | Token::HexStringLiteral(_) => {
1967 self.prev_token();
1968 Ok(Expr::Value(self.parse_value()?))
1969 }
1970 Token::LParen => {
1971 let expr =
1972 if let Some(expr) = self.try_parse_expr_sub_query()? {
1973 expr
1974 } else if let Some(lambda) = self.try_parse_lambda()? {
1975 return Ok(lambda);
1976 } else {
1977 let exprs = self.with_state(ParserState::Normal, |p| {
1988 p.parse_comma_separated(Parser::parse_expr)
1989 })?;
1990 match exprs.len() {
1991 0 => return Err(ParserError::ParserError(
1992 "Internal parser error: parse_comma_separated returned empty list"
1993 .to_string(),
1994 )),
1995 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1996 _ => Expr::Tuple(exprs),
1997 }
1998 };
1999 self.expect_token(&Token::RParen)?;
2000 Ok(expr)
2001 }
2002 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
2003 self.prev_token();
2004 Ok(Expr::Value(self.parse_value()?))
2005 }
2006 Token::LBrace => {
2007 self.prev_token();
2008 self.parse_lbrace_expr()
2009 }
2010 _ => self.expected_at("an expression", next_token_index),
2011 }?;
2012
2013 Ok(expr)
2014 }
2015
2016 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
2017 Ok(Expr::TypedString(TypedString {
2018 data_type: DataType::GeometricType(kind),
2019 value: self.parse_value()?,
2020 uses_odbc_syntax: false,
2021 }))
2022 }
2023
2024 pub fn parse_compound_expr(
2031 &mut self,
2032 root: Expr,
2033 mut chain: Vec<AccessExpr>,
2034 ) -> Result<Expr, ParserError> {
2035 let mut ending_wildcard: Option<TokenWithSpan> = None;
2036 loop {
2037 if self.consume_token(&Token::Period) {
2038 let next_token = self.peek_token_ref();
2039 match &next_token.token {
2040 Token::Mul => {
2041 if dialect_of!(self is PostgreSqlDialect) {
2044 ending_wildcard = Some(self.next_token());
2045 } else {
2046 self.prev_token(); }
2053
2054 break;
2055 }
2056 Token::SingleQuotedString(s) => {
2057 let expr =
2058 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2059 chain.push(AccessExpr::Dot(expr));
2060 self.advance_token(); }
2062 Token::Placeholder(s) => {
2063 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2066 chain.push(AccessExpr::Dot(expr));
2067 self.advance_token(); }
2069 _ => {
2074 let expr = self.maybe_parse(|parser| {
2075 let expr = parser
2076 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2077 match &expr {
2078 Expr::CompoundFieldAccess { .. }
2079 | Expr::CompoundIdentifier(_)
2080 | Expr::Identifier(_)
2081 | Expr::Value(_)
2082 | Expr::Function(_) => Ok(expr),
2083 _ => parser.expected_ref(
2084 "an identifier or value",
2085 parser.peek_token_ref(),
2086 ),
2087 }
2088 })?;
2089
2090 match expr {
2091 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2100 chain.push(AccessExpr::Dot(*root));
2101 chain.extend(access_chain);
2102 }
2103 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2104 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2105 ),
2106 Some(expr) => {
2107 chain.push(AccessExpr::Dot(expr));
2108 }
2109 None => {
2113 chain.push(AccessExpr::Dot(Expr::Identifier(
2114 self.parse_identifier()?,
2115 )));
2116 }
2117 }
2118 }
2119 }
2120 } else if !self.dialect.supports_partiql()
2121 && self.peek_token_ref().token == Token::LBracket
2122 {
2123 self.parse_multi_dim_subscript(&mut chain)?;
2124 } else {
2125 break;
2126 }
2127 }
2128
2129 let tok_index = self.get_current_index();
2130 if let Some(wildcard_token) = ending_wildcard {
2131 if !Self::is_all_ident(&root, &chain) {
2132 return self
2133 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2134 };
2135 Ok(Expr::QualifiedWildcard(
2136 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2137 AttachedToken(wildcard_token),
2138 ))
2139 } else if self.maybe_parse_outer_join_operator() {
2140 if !Self::is_all_ident(&root, &chain) {
2141 return self.expected_at("column identifier before (+)", tok_index);
2142 };
2143 let expr = if chain.is_empty() {
2144 root
2145 } else {
2146 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2147 };
2148 Ok(Expr::OuterJoin(expr.into()))
2149 } else {
2150 Self::build_compound_expr(root, chain)
2151 }
2152 }
2153
2154 fn build_compound_expr(
2159 root: Expr,
2160 mut access_chain: Vec<AccessExpr>,
2161 ) -> Result<Expr, ParserError> {
2162 if access_chain.is_empty() {
2163 return Ok(root);
2164 }
2165
2166 if Self::is_all_ident(&root, &access_chain) {
2167 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2168 root,
2169 access_chain,
2170 )?));
2171 }
2172
2173 if matches!(root, Expr::Identifier(_))
2178 && matches!(
2179 access_chain.last(),
2180 Some(AccessExpr::Dot(Expr::Function(_)))
2181 )
2182 && access_chain
2183 .iter()
2184 .rev()
2185 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2187 {
2188 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2189 return parser_err!("expected function expression", root.span().start);
2190 };
2191
2192 let compound_func_name = [root]
2193 .into_iter()
2194 .chain(access_chain.into_iter().flat_map(|access| match access {
2195 AccessExpr::Dot(expr) => Some(expr),
2196 _ => None,
2197 }))
2198 .flat_map(|expr| match expr {
2199 Expr::Identifier(ident) => Some(ident),
2200 _ => None,
2201 })
2202 .map(ObjectNamePart::Identifier)
2203 .chain(func.name.0)
2204 .collect::<Vec<_>>();
2205 func.name = ObjectName(compound_func_name);
2206
2207 return Ok(Expr::Function(func));
2208 }
2209
2210 if access_chain.len() == 1
2215 && matches!(
2216 access_chain.last(),
2217 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2218 )
2219 {
2220 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2221 return parser_err!("expected (+) expression", root.span().start);
2222 };
2223
2224 if !Self::is_all_ident(&root, &[]) {
2225 return parser_err!("column identifier before (+)", root.span().start);
2226 };
2227
2228 let token_start = root.span().start;
2229 let mut idents = Self::exprs_to_idents(root, vec![])?;
2230 match *inner_expr {
2231 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2232 Expr::Identifier(suffix) => idents.push(suffix),
2233 _ => {
2234 return parser_err!("column identifier before (+)", token_start);
2235 }
2236 }
2237
2238 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2239 }
2240
2241 Ok(Expr::CompoundFieldAccess {
2242 root: Box::new(root),
2243 access_chain,
2244 })
2245 }
2246
2247 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2248 match k {
2249 Keyword::LOCAL => Some(ContextModifier::Local),
2250 Keyword::GLOBAL => Some(ContextModifier::Global),
2251 Keyword::SESSION => Some(ContextModifier::Session),
2252 _ => None,
2253 }
2254 }
2255
2256 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2258 if !matches!(root, Expr::Identifier(_)) {
2259 return false;
2260 }
2261 fields
2262 .iter()
2263 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2264 }
2265
2266 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2268 let mut idents = vec![];
2269 if let Expr::Identifier(root) = root {
2270 idents.push(root);
2271 for x in fields {
2272 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2273 idents.push(ident);
2274 } else {
2275 return parser_err!(
2276 format!("Expected identifier, found: {}", x),
2277 x.span().start
2278 );
2279 }
2280 }
2281 Ok(idents)
2282 } else {
2283 parser_err!(
2284 format!("Expected identifier, found: {}", root),
2285 root.span().start
2286 )
2287 }
2288 }
2289
2290 fn peek_outer_join_operator(&mut self) -> bool {
2292 if !self.dialect.supports_outer_join_operator() {
2293 return false;
2294 }
2295
2296 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2297 Token::LParen == maybe_lparen.token
2298 && Token::Plus == maybe_plus.token
2299 && Token::RParen == maybe_rparen.token
2300 }
2301
2302 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2305 self.dialect.supports_outer_join_operator()
2306 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2307 }
2308
2309 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2311 self.expect_token(&Token::LParen)?;
2312 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2313 self.expect_token(&Token::RParen)?;
2314
2315 Ok(options)
2316 }
2317
2318 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2319 let name = self.parse_identifier()?;
2320
2321 let next_token = self.peek_token_ref();
2322 if next_token == &Token::Comma || next_token == &Token::RParen {
2323 return Ok(UtilityOption { name, arg: None });
2324 }
2325 let arg = self.parse_expr()?;
2326
2327 Ok(UtilityOption {
2328 name,
2329 arg: Some(arg),
2330 })
2331 }
2332
2333 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2334 if !self.peek_sub_query() {
2335 return Ok(None);
2336 }
2337
2338 Ok(Some(Expr::Subquery(self.parse_query()?)))
2339 }
2340
2341 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2342 if !self.dialect.supports_lambda_functions() {
2343 return Ok(None);
2344 }
2345 self.maybe_parse(|p| {
2346 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2347 p.expect_token(&Token::RParen)?;
2348 p.expect_token(&Token::Arrow)?;
2349 let expr = p.parse_expr()?;
2350 Ok(Expr::Lambda(LambdaFunction {
2351 params: OneOrManyWithParens::Many(params),
2352 body: Box::new(expr),
2353 syntax: LambdaSyntax::Arrow,
2354 }))
2355 })
2356 }
2357
2358 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2368 let params = self.parse_lambda_function_parameters()?;
2370 self.expect_token(&Token::Colon)?;
2372 let body = self.parse_expr()?;
2374 Ok(Expr::Lambda(LambdaFunction {
2375 params,
2376 body: Box::new(body),
2377 syntax: LambdaSyntax::LambdaKeyword,
2378 }))
2379 }
2380
2381 fn parse_lambda_function_parameters(
2383 &mut self,
2384 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2385 let params = if self.consume_token(&Token::LParen) {
2387 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2389 self.expect_token(&Token::RParen)?;
2390 OneOrManyWithParens::Many(params)
2391 } else {
2392 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2394 if params.len() == 1 {
2395 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2396 } else {
2397 OneOrManyWithParens::Many(params)
2398 }
2399 };
2400 Ok(params)
2401 }
2402
2403 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2405 let name = self.parse_identifier()?;
2406 let data_type = match &self.peek_token_ref().token {
2407 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2408 _ => None,
2409 };
2410 Ok(LambdaFunctionParameter { name, data_type })
2411 }
2412
2413 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2420 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2422 return Ok(Some(expr));
2423 }
2424 self.maybe_parse_odbc_body_datetime()
2426 }
2427
2428 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2439 self.maybe_parse(|p| {
2440 let token = p.next_token().clone();
2441 let word_string = token.token.to_string();
2442 let data_type = match word_string.as_str() {
2443 "t" => DataType::Time(None, TimezoneInfo::None),
2444 "d" => DataType::Date,
2445 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2446 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2447 };
2448 let value = p.parse_value()?;
2449 Ok(Expr::TypedString(TypedString {
2450 data_type,
2451 value,
2452 uses_odbc_syntax: true,
2453 }))
2454 })
2455 }
2456
2457 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2466 self.maybe_parse(|p| {
2467 p.expect_keyword(Keyword::FN)?;
2468 let fn_name = p.parse_object_name(false)?;
2469 let mut fn_call = p.parse_function_call(fn_name)?;
2470 fn_call.uses_odbc_syntax = true;
2471 Ok(Expr::Function(fn_call))
2472 })
2473 }
2474
2475 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2477 self.parse_function_call(name).map(Expr::Function)
2478 }
2479
2480 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2481 self.expect_token(&Token::LParen)?;
2482
2483 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2486 let subquery = self.parse_query()?;
2487 self.expect_token(&Token::RParen)?;
2488 return Ok(Function {
2489 name,
2490 uses_odbc_syntax: false,
2491 parameters: FunctionArguments::None,
2492 args: FunctionArguments::Subquery(subquery),
2493 filter: None,
2494 null_treatment: None,
2495 over: None,
2496 within_group: vec![],
2497 });
2498 }
2499
2500 let mut args = self.parse_function_argument_list()?;
2501 let mut parameters = FunctionArguments::None;
2502 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2505 && self.consume_token(&Token::LParen)
2506 {
2507 parameters = FunctionArguments::List(args);
2508 args = self.parse_function_argument_list()?;
2509 }
2510
2511 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2512 self.expect_token(&Token::LParen)?;
2513 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2514 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2515 self.expect_token(&Token::RParen)?;
2516 order_by
2517 } else {
2518 vec![]
2519 };
2520
2521 let filter = if self.dialect.supports_filter_during_aggregation()
2522 && self.parse_keyword(Keyword::FILTER)
2523 && self.consume_token(&Token::LParen)
2524 && self.parse_keyword(Keyword::WHERE)
2525 {
2526 let filter = Some(Box::new(self.parse_expr()?));
2527 self.expect_token(&Token::RParen)?;
2528 filter
2529 } else {
2530 None
2531 };
2532
2533 let null_treatment = if args
2536 .clauses
2537 .iter()
2538 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2539 {
2540 self.parse_null_treatment()?
2541 } else {
2542 None
2543 };
2544
2545 let over = if self.parse_keyword(Keyword::OVER) {
2546 if self.consume_token(&Token::LParen) {
2547 let window_spec = self.parse_window_spec()?;
2548 Some(WindowType::WindowSpec(window_spec))
2549 } else {
2550 Some(WindowType::NamedWindow(self.parse_identifier()?))
2551 }
2552 } else {
2553 None
2554 };
2555
2556 Ok(Function {
2557 name,
2558 uses_odbc_syntax: false,
2559 parameters,
2560 args: FunctionArguments::List(args),
2561 null_treatment,
2562 filter,
2563 over,
2564 within_group,
2565 })
2566 }
2567
2568 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2570 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2571 Some(keyword) => {
2572 self.expect_keyword_is(Keyword::NULLS)?;
2573
2574 Ok(match keyword {
2575 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2576 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2577 _ => None,
2578 })
2579 }
2580 None => Ok(None),
2581 }
2582 }
2583
2584 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2586 let args = if self.consume_token(&Token::LParen) {
2587 FunctionArguments::List(self.parse_function_argument_list()?)
2588 } else {
2589 FunctionArguments::None
2590 };
2591 Ok(Expr::Function(Function {
2592 name,
2593 uses_odbc_syntax: false,
2594 parameters: FunctionArguments::None,
2595 args,
2596 filter: None,
2597 over: None,
2598 null_treatment: None,
2599 within_group: vec![],
2600 }))
2601 }
2602
2603 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2605 let next_token = self.next_token();
2606 match &next_token.token {
2607 Token::Word(w) => match w.keyword {
2608 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2609 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2610 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2611 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2612 },
2613 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2614 }
2615 }
2616
2617 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2619 let units = self.parse_window_frame_units()?;
2620 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2621 let start_bound = self.parse_window_frame_bound()?;
2622 self.expect_keyword_is(Keyword::AND)?;
2623 let end_bound = Some(self.parse_window_frame_bound()?);
2624 (start_bound, end_bound)
2625 } else {
2626 (self.parse_window_frame_bound()?, None)
2627 };
2628 Ok(WindowFrame {
2629 units,
2630 start_bound,
2631 end_bound,
2632 })
2633 }
2634
2635 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2637 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2638 Ok(WindowFrameBound::CurrentRow)
2639 } else {
2640 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2641 None
2642 } else {
2643 Some(Box::new(match &self.peek_token_ref().token {
2644 Token::SingleQuotedString(_) => self.parse_interval()?,
2645 _ => self.parse_expr()?,
2646 }))
2647 };
2648 if self.parse_keyword(Keyword::PRECEDING) {
2649 Ok(WindowFrameBound::Preceding(rows))
2650 } else if self.parse_keyword(Keyword::FOLLOWING) {
2651 Ok(WindowFrameBound::Following(rows))
2652 } else {
2653 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2654 }
2655 }
2656 }
2657
2658 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2660 if self.dialect.supports_group_by_expr() {
2661 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2662 self.expect_token(&Token::LParen)?;
2663 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2664 self.expect_token(&Token::RParen)?;
2665 Ok(Expr::GroupingSets(result))
2666 } else if self.parse_keyword(Keyword::CUBE) {
2667 self.expect_token(&Token::LParen)?;
2668 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2669 self.expect_token(&Token::RParen)?;
2670 Ok(Expr::Cube(result))
2671 } else if self.parse_keyword(Keyword::ROLLUP) {
2672 self.expect_token(&Token::LParen)?;
2673 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2674 self.expect_token(&Token::RParen)?;
2675 Ok(Expr::Rollup(result))
2676 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2677 Ok(Expr::Tuple(vec![]))
2681 } else {
2682 self.parse_expr()
2683 }
2684 } else {
2685 self.parse_expr()
2687 }
2688 }
2689
2690 fn parse_tuple(
2694 &mut self,
2695 lift_singleton: bool,
2696 allow_empty: bool,
2697 ) -> Result<Vec<Expr>, ParserError> {
2698 if lift_singleton {
2699 if self.consume_token(&Token::LParen) {
2700 let result = if allow_empty && self.consume_token(&Token::RParen) {
2701 vec![]
2702 } else {
2703 let result = self.parse_comma_separated(Parser::parse_expr)?;
2704 self.expect_token(&Token::RParen)?;
2705 result
2706 };
2707 Ok(result)
2708 } else {
2709 Ok(vec![self.parse_expr()?])
2710 }
2711 } else {
2712 self.expect_token(&Token::LParen)?;
2713 let result = if allow_empty && self.consume_token(&Token::RParen) {
2714 vec![]
2715 } else {
2716 let result = self.parse_comma_separated(Parser::parse_expr)?;
2717 self.expect_token(&Token::RParen)?;
2718 result
2719 };
2720 Ok(result)
2721 }
2722 }
2723
2724 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2726 let case_token = AttachedToken(self.get_current_token().clone());
2727 let mut operand = None;
2728 if !self.parse_keyword(Keyword::WHEN) {
2729 operand = Some(Box::new(self.parse_expr()?));
2730 self.expect_keyword_is(Keyword::WHEN)?;
2731 }
2732 let mut conditions = vec![];
2733 loop {
2734 let condition = self.parse_expr()?;
2735 self.expect_keyword_is(Keyword::THEN)?;
2736 let result = self.parse_expr()?;
2737 conditions.push(CaseWhen { condition, result });
2738 if !self.parse_keyword(Keyword::WHEN) {
2739 break;
2740 }
2741 }
2742 let else_result = if self.parse_keyword(Keyword::ELSE) {
2743 Some(Box::new(self.parse_expr()?))
2744 } else {
2745 None
2746 };
2747 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2748 Ok(Expr::Case {
2749 case_token,
2750 end_token,
2751 operand,
2752 conditions,
2753 else_result,
2754 })
2755 }
2756
2757 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2759 if self.parse_keyword(Keyword::FORMAT) {
2760 let value = self.parse_value()?;
2761 match self.parse_optional_time_zone()? {
2762 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2763 None => Ok(Some(CastFormat::Value(value))),
2764 }
2765 } else {
2766 Ok(None)
2767 }
2768 }
2769
2770 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2772 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2773 self.parse_value().map(Some)
2774 } else {
2775 Ok(None)
2776 }
2777 }
2778
2779 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2781 self.expect_token(&Token::LParen)?;
2782 let data_type = self.parse_data_type()?;
2783 self.expect_token(&Token::Comma)?;
2784 let expr = self.parse_expr()?;
2785 let styles = if self.consume_token(&Token::Comma) {
2786 self.parse_comma_separated(Parser::parse_expr)?
2787 } else {
2788 Default::default()
2789 };
2790 self.expect_token(&Token::RParen)?;
2791 Ok(Expr::Convert {
2792 is_try,
2793 expr: Box::new(expr),
2794 data_type: Some(data_type),
2795 charset: None,
2796 target_before_value: true,
2797 styles,
2798 })
2799 }
2800
2801 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2806 if self.dialect.convert_type_before_value() {
2807 return self.parse_mssql_convert(is_try);
2808 }
2809 self.expect_token(&Token::LParen)?;
2810 let expr = self.parse_expr()?;
2811 if self.parse_keyword(Keyword::USING) {
2812 let charset = self.parse_object_name(false)?;
2813 self.expect_token(&Token::RParen)?;
2814 return Ok(Expr::Convert {
2815 is_try,
2816 expr: Box::new(expr),
2817 data_type: None,
2818 charset: Some(charset),
2819 target_before_value: false,
2820 styles: vec![],
2821 });
2822 }
2823 self.expect_token(&Token::Comma)?;
2824 let data_type = self.parse_data_type()?;
2825 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2826 Some(self.parse_object_name(false)?)
2827 } else {
2828 None
2829 };
2830 self.expect_token(&Token::RParen)?;
2831 Ok(Expr::Convert {
2832 is_try,
2833 expr: Box::new(expr),
2834 data_type: Some(data_type),
2835 charset,
2836 target_before_value: false,
2837 styles: vec![],
2838 })
2839 }
2840
2841 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2843 self.expect_token(&Token::LParen)?;
2844 let expr = self.parse_expr()?;
2845 self.expect_keyword_is(Keyword::AS)?;
2846 let data_type = self.parse_data_type()?;
2847 let array = self.parse_keyword(Keyword::ARRAY);
2848 let format = self.parse_optional_cast_format()?;
2849 self.expect_token(&Token::RParen)?;
2850 Ok(Expr::Cast {
2851 kind,
2852 expr: Box::new(expr),
2853 data_type,
2854 array,
2855 format,
2856 })
2857 }
2858
2859 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2861 self.expect_token(&Token::LParen)?;
2862 let exists_node = Expr::Exists {
2863 negated,
2864 subquery: self.parse_query()?,
2865 };
2866 self.expect_token(&Token::RParen)?;
2867 Ok(exists_node)
2868 }
2869
2870 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2872 self.expect_token(&Token::LParen)?;
2873 let field = self.parse_date_time_field()?;
2874
2875 let syntax = if self.parse_keyword(Keyword::FROM) {
2876 ExtractSyntax::From
2877 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2878 {
2879 ExtractSyntax::Comma
2880 } else {
2881 return Err(ParserError::ParserError(
2882 "Expected 'FROM' or ','".to_string(),
2883 ));
2884 };
2885
2886 let expr = self.parse_expr()?;
2887 self.expect_token(&Token::RParen)?;
2888 Ok(Expr::Extract {
2889 field,
2890 expr: Box::new(expr),
2891 syntax,
2892 })
2893 }
2894
2895 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2897 self.expect_token(&Token::LParen)?;
2898 let expr = self.parse_expr()?;
2899 let field = if self.parse_keyword(Keyword::TO) {
2901 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2903 } else if self.consume_token(&Token::Comma) {
2904 let v = self.parse_value()?;
2906 if matches!(v.value, Value::Number(_, _)) {
2907 CeilFloorKind::Scale(v)
2908 } else {
2909 return Err(ParserError::ParserError(
2910 "Scale field can only be of number type".to_string(),
2911 ));
2912 }
2913 } else {
2914 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2915 };
2916 self.expect_token(&Token::RParen)?;
2917 if is_ceil {
2918 Ok(Expr::Ceil {
2919 expr: Box::new(expr),
2920 field,
2921 })
2922 } else {
2923 Ok(Expr::Floor {
2924 expr: Box::new(expr),
2925 field,
2926 })
2927 }
2928 }
2929
2930 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2932 let between_prec = self.dialect.prec_value(Precedence::Between);
2933 let position_expr = self.maybe_parse(|p| {
2934 p.expect_token(&Token::LParen)?;
2936
2937 let expr = p.parse_subexpr(between_prec)?;
2939 p.expect_keyword_is(Keyword::IN)?;
2940 let from = p.parse_expr()?;
2941 p.expect_token(&Token::RParen)?;
2942 Ok(Expr::Position {
2943 expr: Box::new(expr),
2944 r#in: Box::new(from),
2945 })
2946 })?;
2947 match position_expr {
2948 Some(expr) => Ok(expr),
2949 None => self.parse_function(ObjectName::from(vec![ident])),
2952 }
2953 }
2954
2955 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2957 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2958 Keyword::SUBSTR => true,
2959 Keyword::SUBSTRING => false,
2960 _ => {
2961 self.prev_token();
2962 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2963 }
2964 };
2965 self.expect_token(&Token::LParen)?;
2966 let expr = self.parse_expr()?;
2967 let mut from_expr = None;
2968 let special = self.consume_token(&Token::Comma);
2969 if special || self.parse_keyword(Keyword::FROM) {
2970 from_expr = Some(self.parse_expr()?);
2971 }
2972
2973 let mut to_expr = None;
2974 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2975 to_expr = Some(self.parse_expr()?);
2976 }
2977 self.expect_token(&Token::RParen)?;
2978
2979 Ok(Expr::Substring {
2980 expr: Box::new(expr),
2981 substring_from: from_expr.map(Box::new),
2982 substring_for: to_expr.map(Box::new),
2983 special,
2984 shorthand,
2985 })
2986 }
2987
2988 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2992 self.expect_token(&Token::LParen)?;
2994 let expr = self.parse_expr()?;
2995 self.expect_keyword_is(Keyword::PLACING)?;
2996 let what_expr = self.parse_expr()?;
2997 self.expect_keyword_is(Keyword::FROM)?;
2998 let from_expr = self.parse_expr()?;
2999 let mut for_expr = None;
3000 if self.parse_keyword(Keyword::FOR) {
3001 for_expr = Some(self.parse_expr()?);
3002 }
3003 self.expect_token(&Token::RParen)?;
3004
3005 Ok(Expr::Overlay {
3006 expr: Box::new(expr),
3007 overlay_what: Box::new(what_expr),
3008 overlay_from: Box::new(from_expr),
3009 overlay_for: for_expr.map(Box::new),
3010 })
3011 }
3012
3013 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
3019 self.expect_token(&Token::LParen)?;
3020 let mut trim_where = None;
3021 if let Token::Word(word) = &self.peek_token_ref().token {
3022 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
3023 trim_where = Some(self.parse_trim_where()?);
3024 }
3025 }
3026 let expr = self.parse_expr()?;
3027 if self.parse_keyword(Keyword::FROM) {
3028 let trim_what = Box::new(expr);
3029 let expr = self.parse_expr()?;
3030 self.expect_token(&Token::RParen)?;
3031 Ok(Expr::Trim {
3032 expr: Box::new(expr),
3033 trim_where,
3034 trim_what: Some(trim_what),
3035 trim_characters: None,
3036 })
3037 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3038 {
3039 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3040 self.expect_token(&Token::RParen)?;
3041 Ok(Expr::Trim {
3042 expr: Box::new(expr),
3043 trim_where: None,
3044 trim_what: None,
3045 trim_characters: Some(characters),
3046 })
3047 } else {
3048 self.expect_token(&Token::RParen)?;
3049 Ok(Expr::Trim {
3050 expr: Box::new(expr),
3051 trim_where,
3052 trim_what: None,
3053 trim_characters: None,
3054 })
3055 }
3056 }
3057
3058 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3062 let next_token = self.next_token();
3063 match &next_token.token {
3064 Token::Word(w) => match w.keyword {
3065 Keyword::BOTH => Ok(TrimWhereField::Both),
3066 Keyword::LEADING => Ok(TrimWhereField::Leading),
3067 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3068 _ => self.expected("trim_where field", next_token)?,
3069 },
3070 _ => self.expected("trim_where field", next_token),
3071 }
3072 }
3073
3074 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3077 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3078 self.expect_token(&Token::RBracket)?;
3079 Ok(Expr::Array(Array { elem: exprs, named }))
3080 }
3081
3082 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3086 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3087 if self.parse_keyword(Keyword::ERROR) {
3088 Ok(Some(ListAggOnOverflow::Error))
3089 } else {
3090 self.expect_keyword_is(Keyword::TRUNCATE)?;
3091 let filler = match &self.peek_token_ref().token {
3092 Token::Word(w)
3093 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3094 {
3095 None
3096 }
3097 Token::SingleQuotedString(_)
3098 | Token::EscapedStringLiteral(_)
3099 | Token::UnicodeStringLiteral(_)
3100 | Token::NationalStringLiteral(_)
3101 | Token::QuoteDelimitedStringLiteral(_)
3102 | Token::NationalQuoteDelimitedStringLiteral(_)
3103 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3104 _ => self.expected_ref(
3105 "either filler, WITH, or WITHOUT in LISTAGG",
3106 self.peek_token_ref(),
3107 )?,
3108 };
3109 let with_count = self.parse_keyword(Keyword::WITH);
3110 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3111 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3112 }
3113 self.expect_keyword_is(Keyword::COUNT)?;
3114 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3115 }
3116 } else {
3117 Ok(None)
3118 }
3119 }
3120
3121 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3128 let next_token = self.next_token();
3129 match &next_token.token {
3130 Token::Word(w) => match w.keyword {
3131 Keyword::YEAR => Ok(DateTimeField::Year),
3132 Keyword::YEARS => Ok(DateTimeField::Years),
3133 Keyword::MONTH => Ok(DateTimeField::Month),
3134 Keyword::MONTHS => Ok(DateTimeField::Months),
3135 Keyword::WEEK => {
3136 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3137 && self.consume_token(&Token::LParen)
3138 {
3139 let week_day = self.parse_identifier()?;
3140 self.expect_token(&Token::RParen)?;
3141 Some(week_day)
3142 } else {
3143 None
3144 };
3145 Ok(DateTimeField::Week(week_day))
3146 }
3147 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3148 Keyword::DAY => Ok(DateTimeField::Day),
3149 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3150 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3151 Keyword::DAYS => Ok(DateTimeField::Days),
3152 Keyword::DATE => Ok(DateTimeField::Date),
3153 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3154 Keyword::HOUR => Ok(DateTimeField::Hour),
3155 Keyword::HOURS => Ok(DateTimeField::Hours),
3156 Keyword::MINUTE => Ok(DateTimeField::Minute),
3157 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3158 Keyword::SECOND => Ok(DateTimeField::Second),
3159 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3160 Keyword::CENTURY => Ok(DateTimeField::Century),
3161 Keyword::DECADE => Ok(DateTimeField::Decade),
3162 Keyword::DOY => Ok(DateTimeField::Doy),
3163 Keyword::DOW => Ok(DateTimeField::Dow),
3164 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3165 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3166 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3167 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3168 Keyword::JULIAN => Ok(DateTimeField::Julian),
3169 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3170 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3171 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3172 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3173 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3174 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3175 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3176 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3177 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3178 Keyword::TIME => Ok(DateTimeField::Time),
3179 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3180 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3181 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3182 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3183 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3184 _ if self.dialect.allow_extract_custom() => {
3185 self.prev_token();
3186 let custom = self.parse_identifier()?;
3187 Ok(DateTimeField::Custom(custom))
3188 }
3189 _ => self.expected("date/time field", next_token),
3190 },
3191 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3192 self.prev_token();
3193 let custom = self.parse_identifier()?;
3194 Ok(DateTimeField::Custom(custom))
3195 }
3196 _ => self.expected("date/time field", next_token),
3197 }
3198 }
3199
3200 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3204 match &self.peek_token_ref().token {
3205 Token::Word(w) => match w.keyword {
3206 Keyword::EXISTS => {
3207 let negated = true;
3208 let _ = self.parse_keyword(Keyword::EXISTS);
3209 self.parse_exists_expr(negated)
3210 }
3211 _ => Ok(Expr::UnaryOp {
3212 op: UnaryOperator::Not,
3213 expr: Box::new(
3214 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3215 ),
3216 }),
3217 },
3218 _ => Ok(Expr::UnaryOp {
3219 op: UnaryOperator::Not,
3220 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3221 }),
3222 }
3223 }
3224
3225 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3235 let token = self.expect_token(&Token::LBrace)?;
3236
3237 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3238 self.expect_token(&Token::RBrace)?;
3239 return Ok(fn_expr);
3240 }
3241
3242 if self.dialect.supports_dictionary_syntax() {
3243 self.prev_token(); return self.parse_dictionary();
3245 }
3246
3247 self.expected("an expression", token)
3248 }
3249
3250 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3256 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3257
3258 self.expect_keyword_is(Keyword::AGAINST)?;
3259
3260 self.expect_token(&Token::LParen)?;
3261
3262 let match_value = self.parse_value()?;
3264
3265 let in_natural_language_mode_keywords = &[
3266 Keyword::IN,
3267 Keyword::NATURAL,
3268 Keyword::LANGUAGE,
3269 Keyword::MODE,
3270 ];
3271
3272 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3273
3274 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3275
3276 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3277 if self.parse_keywords(with_query_expansion_keywords) {
3278 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3279 } else {
3280 Some(SearchModifier::InNaturalLanguageMode)
3281 }
3282 } else if self.parse_keywords(in_boolean_mode_keywords) {
3283 Some(SearchModifier::InBooleanMode)
3284 } else if self.parse_keywords(with_query_expansion_keywords) {
3285 Some(SearchModifier::WithQueryExpansion)
3286 } else {
3287 None
3288 };
3289
3290 self.expect_token(&Token::RParen)?;
3291
3292 Ok(Expr::MatchAgainst {
3293 columns,
3294 match_value,
3295 opt_search_modifier,
3296 })
3297 }
3298
3299 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3315 let value = if self.dialect.require_interval_qualifier() {
3324 self.parse_expr()?
3326 } else {
3327 self.parse_prefix()?
3330 };
3331
3332 let leading_field = if self.next_token_is_temporal_unit() {
3338 Some(self.parse_date_time_field()?)
3339 } else if self.dialect.require_interval_qualifier() {
3340 return parser_err!(
3341 "INTERVAL requires a unit after the literal value",
3342 self.peek_token_ref().span.start
3343 );
3344 } else {
3345 None
3346 };
3347
3348 let (leading_precision, last_field, fsec_precision) =
3349 if leading_field == Some(DateTimeField::Second) {
3350 let last_field = None;
3356 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3357 (leading_precision, last_field, fsec_precision)
3358 } else {
3359 let leading_precision = self.parse_optional_precision()?;
3360 if self.parse_keyword(Keyword::TO) {
3361 let last_field = Some(self.parse_date_time_field()?);
3362 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3363 self.parse_optional_precision()?
3364 } else {
3365 None
3366 };
3367 (leading_precision, last_field, fsec_precision)
3368 } else {
3369 (leading_precision, None, None)
3370 }
3371 };
3372
3373 Ok(Expr::Interval(Interval {
3374 value: Box::new(value),
3375 leading_field,
3376 leading_precision,
3377 last_field,
3378 fractional_seconds_precision: fsec_precision,
3379 }))
3380 }
3381
3382 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3385 if let Token::Word(word) = &self.peek_token_ref().token {
3386 matches!(
3387 word.keyword,
3388 Keyword::YEAR
3389 | Keyword::YEARS
3390 | Keyword::MONTH
3391 | Keyword::MONTHS
3392 | Keyword::WEEK
3393 | Keyword::WEEKS
3394 | Keyword::DAY
3395 | Keyword::DAYS
3396 | Keyword::HOUR
3397 | Keyword::HOURS
3398 | Keyword::MINUTE
3399 | Keyword::MINUTES
3400 | Keyword::SECOND
3401 | Keyword::SECONDS
3402 | Keyword::CENTURY
3403 | Keyword::DECADE
3404 | Keyword::DOW
3405 | Keyword::DOY
3406 | Keyword::EPOCH
3407 | Keyword::ISODOW
3408 | Keyword::ISOYEAR
3409 | Keyword::JULIAN
3410 | Keyword::MICROSECOND
3411 | Keyword::MICROSECONDS
3412 | Keyword::MILLENIUM
3413 | Keyword::MILLENNIUM
3414 | Keyword::MILLISECOND
3415 | Keyword::MILLISECONDS
3416 | Keyword::NANOSECOND
3417 | Keyword::NANOSECONDS
3418 | Keyword::QUARTER
3419 | Keyword::TIMEZONE
3420 | Keyword::TIMEZONE_HOUR
3421 | Keyword::TIMEZONE_MINUTE
3422 )
3423 } else {
3424 false
3425 }
3426 }
3427
3428 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3436 self.prev_token();
3438 let (fields, trailing_bracket) =
3439 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3440 if trailing_bracket.0 {
3441 return parser_err!(
3442 "unmatched > in STRUCT literal",
3443 self.peek_token_ref().span.start
3444 );
3445 }
3446
3447 self.expect_token(&Token::LParen)?;
3449 let values = self
3450 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3451 self.expect_token(&Token::RParen)?;
3452
3453 Ok(Expr::Struct { values, fields })
3454 }
3455
3456 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3470 let expr = self.parse_expr()?;
3471 if self.parse_keyword(Keyword::AS) {
3472 if typed_syntax {
3473 return parser_err!("Typed syntax does not allow AS", {
3474 self.prev_token();
3475 self.peek_token_ref().span.start
3476 });
3477 }
3478 let field_name = self.parse_identifier()?;
3479 Ok(Expr::Named {
3480 expr: expr.into(),
3481 name: field_name,
3482 })
3483 } else {
3484 Ok(expr)
3485 }
3486 }
3487
3488 fn parse_struct_type_def<F>(
3501 &mut self,
3502 mut elem_parser: F,
3503 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3504 where
3505 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3506 {
3507 self.expect_keyword_is(Keyword::STRUCT)?;
3508
3509 if self.peek_token_ref().token != Token::Lt {
3511 return Ok((Default::default(), false.into()));
3512 }
3513 self.next_token();
3514
3515 let mut field_defs = vec![];
3516 let trailing_bracket = loop {
3517 let (def, trailing_bracket) = elem_parser(self)?;
3518 field_defs.push(def);
3519 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3521 break trailing_bracket;
3522 }
3523 };
3524
3525 Ok((
3526 field_defs,
3527 self.expect_closing_angle_bracket(trailing_bracket)?,
3528 ))
3529 }
3530
3531 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3533 self.expect_keyword_is(Keyword::STRUCT)?;
3534 self.expect_token(&Token::LParen)?;
3535 let struct_body = self.parse_comma_separated(|parser| {
3536 let field_name = parser.parse_identifier()?;
3537 let field_type = parser.parse_data_type()?;
3538
3539 Ok(StructField {
3540 field_name: Some(field_name),
3541 field_type,
3542 options: None,
3543 })
3544 });
3545 self.expect_token(&Token::RParen)?;
3546 struct_body
3547 }
3548
3549 fn parse_struct_field_def(
3561 &mut self,
3562 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3563 let is_named_field = matches!(
3566 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3567 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3568 );
3569
3570 let field_name = if is_named_field {
3571 let name = self.parse_identifier()?;
3572 let _ = self.consume_token(&Token::Colon);
3573 Some(name)
3574 } else {
3575 None
3576 };
3577
3578 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3579
3580 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3581 Ok((
3582 StructField {
3583 field_name,
3584 field_type,
3585 options,
3586 },
3587 trailing_bracket,
3588 ))
3589 }
3590
3591 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3601 self.expect_keyword_is(Keyword::UNION)?;
3602
3603 self.expect_token(&Token::LParen)?;
3604
3605 let fields = self.parse_comma_separated(|p| {
3606 Ok(UnionField {
3607 field_name: p.parse_identifier()?,
3608 field_type: p.parse_data_type()?,
3609 })
3610 })?;
3611
3612 self.expect_token(&Token::RParen)?;
3613
3614 Ok(fields)
3615 }
3616
3617 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3628 self.expect_token(&Token::LBrace)?;
3629
3630 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3631
3632 self.expect_token(&Token::RBrace)?;
3633
3634 Ok(Expr::Dictionary(fields))
3635 }
3636
3637 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3648 let key = self.parse_identifier()?;
3649
3650 self.expect_token(&Token::Colon)?;
3651
3652 let expr = self.parse_expr()?;
3653
3654 Ok(DictionaryField {
3655 key,
3656 value: Box::new(expr),
3657 })
3658 }
3659
3660 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3670 self.expect_token(&Token::LBrace)?;
3671 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3672 self.expect_token(&Token::RBrace)?;
3673 Ok(Expr::Map(Map { entries: fields }))
3674 }
3675
3676 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3686 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3688
3689 self.expect_token(&Token::Colon)?;
3690
3691 let value = self.parse_expr()?;
3692
3693 Ok(MapEntry {
3694 key: Box::new(key),
3695 value: Box::new(value),
3696 })
3697 }
3698
3699 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3709 self.expect_keyword_is(Keyword::MAP)?;
3710 self.expect_token(&Token::LParen)?;
3711 let key_data_type = self.parse_data_type()?;
3712 self.expect_token(&Token::Comma)?;
3713 let value_data_type = self.parse_data_type()?;
3714 self.expect_token(&Token::RParen)?;
3715
3716 Ok((key_data_type, value_data_type))
3717 }
3718
3719 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3729 self.expect_keyword_is(Keyword::TUPLE)?;
3730 self.expect_token(&Token::LParen)?;
3731 let mut field_defs = vec![];
3732 loop {
3733 let (def, _) = self.parse_struct_field_def()?;
3734 field_defs.push(def);
3735 if !self.consume_token(&Token::Comma) {
3736 break;
3737 }
3738 }
3739 self.expect_token(&Token::RParen)?;
3740
3741 Ok(field_defs)
3742 }
3743
3744 fn expect_closing_angle_bracket(
3749 &mut self,
3750 trailing_bracket: MatchedTrailingBracket,
3751 ) -> Result<MatchedTrailingBracket, ParserError> {
3752 let trailing_bracket = if !trailing_bracket.0 {
3753 match &self.peek_token_ref().token {
3754 Token::Gt => {
3755 self.next_token();
3756 false.into()
3757 }
3758 Token::ShiftRight => {
3759 self.next_token();
3760 true.into()
3761 }
3762 _ => return self.expected_ref(">", self.peek_token_ref()),
3763 }
3764 } else {
3765 false.into()
3766 };
3767
3768 Ok(trailing_bracket)
3769 }
3770
3771 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3773 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3775 return infix;
3776 }
3777
3778 let dialect = self.dialect;
3779
3780 self.advance_token();
3781 let tok = self.get_current_token();
3782 debug!("infix: {tok:?}");
3783 let tok_index = self.get_current_index();
3784 let span = tok.span;
3785 let regular_binary_operator = match &tok.token {
3786 Token::Spaceship => Some(BinaryOperator::Spaceship),
3787 Token::DoubleEq => Some(BinaryOperator::Eq),
3788 Token::Assignment => Some(BinaryOperator::Assignment),
3789 Token::Eq => Some(BinaryOperator::Eq),
3790 Token::Neq => Some(BinaryOperator::NotEq),
3791 Token::Gt => Some(BinaryOperator::Gt),
3792 Token::GtEq => Some(BinaryOperator::GtEq),
3793 Token::Lt => Some(BinaryOperator::Lt),
3794 Token::LtEq => Some(BinaryOperator::LtEq),
3795 Token::Plus => Some(BinaryOperator::Plus),
3796 Token::Minus => Some(BinaryOperator::Minus),
3797 Token::Mul => Some(BinaryOperator::Multiply),
3798 Token::Mod => Some(BinaryOperator::Modulo),
3799 Token::StringConcat => Some(BinaryOperator::StringConcat),
3800 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3801 Token::Caret => {
3802 if dialect_is!(dialect is PostgreSqlDialect) {
3805 Some(BinaryOperator::PGExp)
3806 } else {
3807 Some(BinaryOperator::BitwiseXor)
3808 }
3809 }
3810 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3811 Token::Div => Some(BinaryOperator::Divide),
3812 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3813 Some(BinaryOperator::DuckIntegerDivide)
3814 }
3815 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3816 Some(BinaryOperator::PGBitwiseShiftLeft)
3817 }
3818 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3819 Some(BinaryOperator::PGBitwiseShiftRight)
3820 }
3821 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3822 Some(BinaryOperator::PGBitwiseXor)
3823 }
3824 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3825 Some(BinaryOperator::PGOverlap)
3826 }
3827 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3828 Some(BinaryOperator::PGOverlap)
3829 }
3830 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3831 Some(BinaryOperator::And)
3832 }
3833 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3834 Some(BinaryOperator::PGStartsWith)
3835 }
3836 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3837 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3838 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3839 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3840 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3841 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3842 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3843 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3844 Token::Arrow => Some(BinaryOperator::Arrow),
3845 Token::LongArrow => Some(BinaryOperator::LongArrow),
3846 Token::HashArrow => Some(BinaryOperator::HashArrow),
3847 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3848 Token::AtArrow => Some(BinaryOperator::AtArrow),
3849 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3850 Token::HashMinus => Some(BinaryOperator::HashMinus),
3851 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3852 Token::AtAt => Some(BinaryOperator::AtAt),
3853 Token::Question => Some(BinaryOperator::Question),
3854 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3855 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3856 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3857 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3858 Some(BinaryOperator::DoubleHash)
3859 }
3860
3861 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3862 Some(BinaryOperator::AndLt)
3863 }
3864 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3865 Some(BinaryOperator::AndGt)
3866 }
3867 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3868 Some(BinaryOperator::QuestionDash)
3869 }
3870 Token::AmpersandLeftAngleBracketVerticalBar
3871 if self.dialect.supports_geometric_types() =>
3872 {
3873 Some(BinaryOperator::AndLtPipe)
3874 }
3875 Token::VerticalBarAmpersandRightAngleBracket
3876 if self.dialect.supports_geometric_types() =>
3877 {
3878 Some(BinaryOperator::PipeAndGt)
3879 }
3880 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3881 Some(BinaryOperator::LtDashGt)
3882 }
3883 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3884 Some(BinaryOperator::LtCaret)
3885 }
3886 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3887 Some(BinaryOperator::GtCaret)
3888 }
3889 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3890 Some(BinaryOperator::QuestionHash)
3891 }
3892 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3893 Some(BinaryOperator::QuestionDoublePipe)
3894 }
3895 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3896 Some(BinaryOperator::QuestionDashPipe)
3897 }
3898 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3899 Some(BinaryOperator::TildeEq)
3900 }
3901 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3902 Some(BinaryOperator::LtLtPipe)
3903 }
3904 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3905 Some(BinaryOperator::PipeGtGt)
3906 }
3907 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3908
3909 Token::Word(w) => match w.keyword {
3910 Keyword::AND => Some(BinaryOperator::And),
3911 Keyword::OR => Some(BinaryOperator::Or),
3912 Keyword::XOR => Some(BinaryOperator::Xor),
3913 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3914 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3915 self.expect_token(&Token::LParen)?;
3916 let mut idents = vec![];
3921 loop {
3922 self.advance_token();
3923 idents.push(self.get_current_token().to_string());
3924 if !self.consume_token(&Token::Period) {
3925 break;
3926 }
3927 }
3928 self.expect_token(&Token::RParen)?;
3929 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3930 }
3931 _ => None,
3932 },
3933 _ => None,
3934 };
3935
3936 let tok = self.token_at(tok_index);
3937 if let Some(op) = regular_binary_operator {
3938 if let Some(keyword) =
3939 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3940 {
3941 self.expect_token(&Token::LParen)?;
3942 let right = if self.peek_sub_query() {
3943 self.prev_token(); self.parse_subexpr(precedence)?
3947 } else {
3948 let right = self.parse_subexpr(precedence)?;
3950 self.expect_token(&Token::RParen)?;
3951 right
3952 };
3953
3954 if !matches!(
3955 op,
3956 BinaryOperator::Gt
3957 | BinaryOperator::Lt
3958 | BinaryOperator::GtEq
3959 | BinaryOperator::LtEq
3960 | BinaryOperator::Eq
3961 | BinaryOperator::NotEq
3962 | BinaryOperator::PGRegexMatch
3963 | BinaryOperator::PGRegexIMatch
3964 | BinaryOperator::PGRegexNotMatch
3965 | BinaryOperator::PGRegexNotIMatch
3966 | BinaryOperator::PGLikeMatch
3967 | BinaryOperator::PGILikeMatch
3968 | BinaryOperator::PGNotLikeMatch
3969 | BinaryOperator::PGNotILikeMatch
3970 ) {
3971 return parser_err!(
3972 format!(
3973 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3974 ),
3975 span.start
3976 );
3977 };
3978
3979 Ok(match keyword {
3980 Keyword::ALL => Expr::AllOp {
3981 left: Box::new(expr),
3982 compare_op: op,
3983 right: Box::new(right),
3984 },
3985 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3986 left: Box::new(expr),
3987 compare_op: op,
3988 right: Box::new(right),
3989 is_some: keyword == Keyword::SOME,
3990 },
3991 unexpected_keyword => return Err(ParserError::ParserError(
3992 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3993 )),
3994 })
3995 } else {
3996 Ok(Expr::BinaryOp {
3997 left: Box::new(expr),
3998 op,
3999 right: Box::new(self.parse_subexpr(precedence)?),
4000 })
4001 }
4002 } else if let Token::Word(w) = &tok.token {
4003 match w.keyword {
4004 Keyword::IS => {
4005 if self.parse_keyword(Keyword::NULL) {
4006 Ok(Expr::IsNull(Box::new(expr)))
4007 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
4008 Ok(Expr::IsNotNull(Box::new(expr)))
4009 } else if self.parse_keywords(&[Keyword::TRUE]) {
4010 Ok(Expr::IsTrue(Box::new(expr)))
4011 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
4012 Ok(Expr::IsNotTrue(Box::new(expr)))
4013 } else if self.parse_keywords(&[Keyword::FALSE]) {
4014 Ok(Expr::IsFalse(Box::new(expr)))
4015 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
4016 Ok(Expr::IsNotFalse(Box::new(expr)))
4017 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
4018 Ok(Expr::IsUnknown(Box::new(expr)))
4019 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
4020 Ok(Expr::IsNotUnknown(Box::new(expr)))
4021 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
4022 let expr2 = self.parse_expr()?;
4023 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
4024 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
4025 {
4026 let expr2 = self.parse_expr()?;
4027 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
4028 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
4029 Ok(is_normalized)
4030 } else {
4031 self.expected_ref(
4032 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
4033 self.peek_token_ref(),
4034 )
4035 }
4036 }
4037 Keyword::AT => {
4038 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4039 Ok(Expr::AtTimeZone {
4040 timestamp: Box::new(expr),
4041 time_zone: Box::new(self.parse_subexpr(precedence)?),
4042 })
4043 }
4044 Keyword::NOT
4045 | Keyword::IN
4046 | Keyword::BETWEEN
4047 | Keyword::LIKE
4048 | Keyword::ILIKE
4049 | Keyword::SIMILAR
4050 | Keyword::REGEXP
4051 | Keyword::RLIKE => {
4052 self.prev_token();
4053 let negated = self.parse_keyword(Keyword::NOT);
4054 let regexp = self.parse_keyword(Keyword::REGEXP);
4055 let rlike = self.parse_keyword(Keyword::RLIKE);
4056 let null = if !self.in_column_definition_state() {
4057 self.parse_keyword(Keyword::NULL)
4058 } else {
4059 false
4060 };
4061 if regexp || rlike {
4062 Ok(Expr::RLike {
4063 negated,
4064 expr: Box::new(expr),
4065 pattern: Box::new(
4066 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4067 ),
4068 regexp,
4069 })
4070 } else if negated && null {
4071 Ok(Expr::IsNotNull(Box::new(expr)))
4072 } else if self.parse_keyword(Keyword::IN) {
4073 self.parse_in(expr, negated)
4074 } else if self.parse_keyword(Keyword::BETWEEN) {
4075 self.parse_between(expr, negated)
4076 } else if self.parse_keyword(Keyword::LIKE) {
4077 Ok(Expr::Like {
4078 negated,
4079 any: self.parse_keyword(Keyword::ANY),
4080 expr: Box::new(expr),
4081 pattern: Box::new(
4082 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4083 ),
4084 escape_char: self.parse_escape_char()?,
4085 })
4086 } else if self.parse_keyword(Keyword::ILIKE) {
4087 Ok(Expr::ILike {
4088 negated,
4089 any: self.parse_keyword(Keyword::ANY),
4090 expr: Box::new(expr),
4091 pattern: Box::new(
4092 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4093 ),
4094 escape_char: self.parse_escape_char()?,
4095 })
4096 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4097 Ok(Expr::SimilarTo {
4098 negated,
4099 expr: Box::new(expr),
4100 pattern: Box::new(
4101 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4102 ),
4103 escape_char: self.parse_escape_char()?,
4104 })
4105 } else {
4106 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4107 }
4108 }
4109 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4110 Ok(Expr::IsNotNull(Box::new(expr)))
4111 }
4112 Keyword::MEMBER => {
4113 if self.parse_keyword(Keyword::OF) {
4114 self.expect_token(&Token::LParen)?;
4115 let array = self.parse_expr()?;
4116 self.expect_token(&Token::RParen)?;
4117 Ok(Expr::MemberOf(MemberOf {
4118 value: Box::new(expr),
4119 array: Box::new(array),
4120 }))
4121 } else {
4122 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4123 }
4124 }
4125 _ => parser_err!(
4127 format!("No infix parser for token {:?}", tok.token),
4128 tok.span.start
4129 ),
4130 }
4131 } else if Token::DoubleColon == *tok {
4132 Ok(Expr::Cast {
4133 kind: CastKind::DoubleColon,
4134 expr: Box::new(expr),
4135 data_type: self.parse_data_type()?,
4136 array: false,
4137 format: None,
4138 })
4139 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4140 Ok(Expr::UnaryOp {
4141 op: UnaryOperator::PGPostfixFactorial,
4142 expr: Box::new(expr),
4143 })
4144 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4145 || (Token::Colon == *tok)
4146 {
4147 self.prev_token();
4148 self.parse_json_access(expr)
4149 } else {
4150 parser_err!(
4152 format!("No infix parser for token {:?}", tok.token),
4153 tok.span.start
4154 )
4155 }
4156 }
4157
4158 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4160 if self.parse_keyword(Keyword::ESCAPE) {
4161 Ok(Some(self.parse_value()?))
4162 } else {
4163 Ok(None)
4164 }
4165 }
4166
4167 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4177 let lower_bound = if self.consume_token(&Token::Colon) {
4179 None
4180 } else {
4181 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4183 };
4184
4185 if self.consume_token(&Token::RBracket) {
4187 if let Some(lower_bound) = lower_bound {
4188 return Ok(Subscript::Index { index: lower_bound });
4189 };
4190 return Ok(Subscript::Slice {
4191 lower_bound,
4192 upper_bound: None,
4193 stride: None,
4194 });
4195 }
4196
4197 if lower_bound.is_some() {
4199 self.expect_token(&Token::Colon)?;
4200 }
4201
4202 let upper_bound = if self.consume_token(&Token::RBracket) {
4204 return Ok(Subscript::Slice {
4205 lower_bound,
4206 upper_bound: None,
4207 stride: None,
4208 });
4209 } else {
4210 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4212 };
4213
4214 if self.consume_token(&Token::RBracket) {
4216 return Ok(Subscript::Slice {
4217 lower_bound,
4218 upper_bound,
4219 stride: None,
4220 });
4221 }
4222
4223 self.expect_token(&Token::Colon)?;
4225 let stride = if self.consume_token(&Token::RBracket) {
4226 None
4227 } else {
4228 Some(self.parse_expr()?)
4229 };
4230
4231 if stride.is_some() {
4232 self.expect_token(&Token::RBracket)?;
4233 }
4234
4235 Ok(Subscript::Slice {
4236 lower_bound,
4237 upper_bound,
4238 stride,
4239 })
4240 }
4241
4242 pub fn parse_multi_dim_subscript(
4244 &mut self,
4245 chain: &mut Vec<AccessExpr>,
4246 ) -> Result<(), ParserError> {
4247 while self.consume_token(&Token::LBracket) {
4248 self.parse_subscript(chain)?;
4249 }
4250 Ok(())
4251 }
4252
4253 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4257 let subscript = self.parse_subscript_inner()?;
4258 chain.push(AccessExpr::Subscript(subscript));
4259 Ok(())
4260 }
4261
4262 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4263 let token = self.next_token();
4264 match token.token {
4265 Token::Word(Word {
4266 value,
4267 quote_style: quote_style @ (Some('"') | Some('`') | None),
4270 keyword: _,
4273 }) => Ok(JsonPathElem::Dot {
4274 key: value,
4275 quoted: quote_style.is_some(),
4276 }),
4277
4278 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4282
4283 _ => self.expected("variant object key name", token),
4284 }
4285 }
4286
4287 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4288 let path = self.parse_json_path()?;
4289 Ok(Expr::JsonAccess {
4290 value: Box::new(expr),
4291 path,
4292 })
4293 }
4294
4295 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4296 let mut path = Vec::new();
4297 loop {
4298 match self.next_token().token {
4299 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4300 self.next_token();
4301 let key = self.parse_wildcard_expr()?;
4302 self.expect_token(&Token::RBracket)?;
4303 path.push(JsonPathElem::ColonBracket { key });
4304 }
4305 Token::Colon if path.is_empty() => {
4306 path.push(self.parse_json_path_object_key()?);
4307 }
4308 Token::Period if !path.is_empty() => {
4309 path.push(self.parse_json_path_object_key()?);
4310 }
4311 Token::LBracket => {
4312 let key = self.parse_wildcard_expr()?;
4313 self.expect_token(&Token::RBracket)?;
4314
4315 path.push(JsonPathElem::Bracket { key });
4316 }
4317 _ => {
4318 self.prev_token();
4319 break;
4320 }
4321 };
4322 }
4323
4324 debug_assert!(!path.is_empty());
4325 Ok(JsonPath { path })
4326 }
4327
4328 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4330 if self.parse_keyword(Keyword::UNNEST) {
4333 self.expect_token(&Token::LParen)?;
4334 let array_expr = self.parse_expr()?;
4335 self.expect_token(&Token::RParen)?;
4336 return Ok(Expr::InUnnest {
4337 expr: Box::new(expr),
4338 array_expr: Box::new(array_expr),
4339 negated,
4340 });
4341 }
4342 self.expect_token(&Token::LParen)?;
4343 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4344 Some(subquery) => Expr::InSubquery {
4345 expr: Box::new(expr),
4346 subquery,
4347 negated,
4348 },
4349 None => Expr::InList {
4350 expr: Box::new(expr),
4351 list: if self.dialect.supports_in_empty_list() {
4352 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4353 } else {
4354 self.parse_comma_separated(Parser::parse_expr)?
4355 },
4356 negated,
4357 },
4358 };
4359 self.expect_token(&Token::RParen)?;
4360 Ok(in_op)
4361 }
4362
4363 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4365 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4368 self.expect_keyword_is(Keyword::AND)?;
4369 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4370 Ok(Expr::Between {
4371 expr: Box::new(expr),
4372 negated,
4373 low: Box::new(low),
4374 high: Box::new(high),
4375 })
4376 }
4377
4378 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4380 Ok(Expr::Cast {
4381 kind: CastKind::DoubleColon,
4382 expr: Box::new(expr),
4383 data_type: self.parse_data_type()?,
4384 array: false,
4385 format: None,
4386 })
4387 }
4388
4389 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4391 self.dialect.get_next_precedence_default(self)
4392 }
4393
4394 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4397 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4398 }
4399
4400 pub fn peek_token(&self) -> TokenWithSpan {
4405 self.peek_nth_token(0)
4406 }
4407
4408 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4411 self.peek_nth_token_ref(0)
4412 }
4413
4414 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4437 self.peek_tokens_with_location()
4438 .map(|with_loc| with_loc.token)
4439 }
4440
4441 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4446 let mut index = self.index;
4447 core::array::from_fn(|_| loop {
4448 let token = self.tokens.get(index);
4449 index += 1;
4450 if let Some(TokenWithSpan {
4451 token: Token::Whitespace(_),
4452 span: _,
4453 }) = token
4454 {
4455 continue;
4456 }
4457 break token.cloned().unwrap_or(TokenWithSpan {
4458 token: Token::EOF,
4459 span: Span::empty(),
4460 });
4461 })
4462 }
4463
4464 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4469 let mut index = self.index;
4470 core::array::from_fn(|_| loop {
4471 let token = self.tokens.get(index);
4472 index += 1;
4473 if let Some(TokenWithSpan {
4474 token: Token::Whitespace(_),
4475 span: _,
4476 }) = token
4477 {
4478 continue;
4479 }
4480 break token.unwrap_or(&EOF_TOKEN);
4481 })
4482 }
4483
4484 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4486 self.peek_nth_token_ref(n).clone()
4487 }
4488
4489 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4491 let mut index = self.index;
4492 loop {
4493 index += 1;
4494 match self.tokens.get(index - 1) {
4495 Some(TokenWithSpan {
4496 token: Token::Whitespace(_),
4497 span: _,
4498 }) => continue,
4499 non_whitespace => {
4500 if n == 0 {
4501 return non_whitespace.unwrap_or(&EOF_TOKEN);
4502 }
4503 n -= 1;
4504 }
4505 }
4506 }
4507 }
4508
4509 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4512 self.peek_nth_token_no_skip(0)
4513 }
4514
4515 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4517 self.tokens
4518 .get(self.index + n)
4519 .cloned()
4520 .unwrap_or(TokenWithSpan {
4521 token: Token::EOF,
4522 span: Span::empty(),
4523 })
4524 }
4525
4526 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4528 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4529 }
4530
4531 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4535 let index = self.index;
4536 let matched = self.parse_keywords(expected);
4537 self.index = index;
4538 matched
4539 }
4540
4541 pub fn next_token(&mut self) -> TokenWithSpan {
4546 self.advance_token();
4547 self.get_current_token().clone()
4548 }
4549
4550 pub fn get_current_index(&self) -> usize {
4555 self.index.saturating_sub(1)
4556 }
4557
4558 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4560 self.index += 1;
4561 self.tokens.get(self.index - 1)
4562 }
4563
4564 pub fn advance_token(&mut self) {
4568 loop {
4569 self.index += 1;
4570 match self.tokens.get(self.index - 1) {
4571 Some(TokenWithSpan {
4572 token: Token::Whitespace(_),
4573 span: _,
4574 }) => continue,
4575 _ => break,
4576 }
4577 }
4578 }
4579
4580 pub fn get_current_token(&self) -> &TokenWithSpan {
4584 self.token_at(self.index.saturating_sub(1))
4585 }
4586
4587 pub fn get_previous_token(&self) -> &TokenWithSpan {
4591 self.token_at(self.index.saturating_sub(2))
4592 }
4593
4594 pub fn get_next_token(&self) -> &TokenWithSpan {
4598 self.token_at(self.index)
4599 }
4600
4601 pub fn prev_token(&mut self) {
4608 loop {
4609 assert!(self.index > 0);
4610 self.index -= 1;
4611 if let Some(TokenWithSpan {
4612 token: Token::Whitespace(_),
4613 span: _,
4614 }) = self.tokens.get(self.index)
4615 {
4616 continue;
4617 }
4618 return;
4619 }
4620 }
4621
4622 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4624 parser_err!(
4625 format!("Expected: {expected}, found: {found}"),
4626 found.span.start
4627 )
4628 }
4629
4630 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4632 parser_err!(
4633 format!("Expected: {expected}, found: {found}"),
4634 found.span.start
4635 )
4636 }
4637
4638 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4640 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4641 parser_err!(
4642 format!("Expected: {expected}, found: {found}"),
4643 found.span.start
4644 )
4645 }
4646
4647 #[must_use]
4650 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4651 if self.peek_keyword(expected) {
4652 self.advance_token();
4653 true
4654 } else {
4655 false
4656 }
4657 }
4658
4659 #[must_use]
4660 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4664 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4665 }
4666
4667 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4675 self.keyword_with_tokens(expected, tokens, true)
4676 }
4677
4678 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4683 self.keyword_with_tokens(expected, tokens, false)
4684 }
4685
4686 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4687 match &self.peek_token_ref().token {
4688 Token::Word(w) if expected == w.keyword => {
4689 for (idx, token) in tokens.iter().enumerate() {
4690 if self.peek_nth_token_ref(idx + 1).token != *token {
4691 return false;
4692 }
4693 }
4694
4695 if consume {
4696 for _ in 0..(tokens.len() + 1) {
4697 self.advance_token();
4698 }
4699 }
4700
4701 true
4702 }
4703 _ => false,
4704 }
4705 }
4706
4707 #[must_use]
4711 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4712 self.parse_keywords_indexed(keywords).is_some()
4713 }
4714
4715 #[must_use]
4718 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4719 let start_index = self.index;
4720 let mut first_keyword_index = None;
4721 for &keyword in keywords {
4722 if !self.parse_keyword(keyword) {
4723 self.index = start_index;
4724 return None;
4725 }
4726 if first_keyword_index.is_none() {
4727 first_keyword_index = Some(self.index.saturating_sub(1));
4728 }
4729 }
4730 first_keyword_index
4731 }
4732
4733 #[must_use]
4736 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4737 for keyword in keywords {
4738 if self.peek_keyword(*keyword) {
4739 return Some(*keyword);
4740 }
4741 }
4742 None
4743 }
4744
4745 #[must_use]
4749 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4750 match &self.peek_token_ref().token {
4751 Token::Word(w) => {
4752 keywords
4753 .iter()
4754 .find(|keyword| **keyword == w.keyword)
4755 .map(|keyword| {
4756 self.advance_token();
4757 *keyword
4758 })
4759 }
4760 _ => None,
4761 }
4762 }
4763
4764 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4767 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4768 Ok(keyword)
4769 } else {
4770 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4771 self.expected_ref(
4772 &format!("one of {}", keywords.join(" or ")),
4773 self.peek_token_ref(),
4774 )
4775 }
4776 }
4777
4778 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4783 if self.parse_keyword(expected) {
4784 Ok(self.get_current_token().clone())
4785 } else {
4786 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4787 }
4788 }
4789
4790 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4796 if self.parse_keyword(expected) {
4797 Ok(())
4798 } else {
4799 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4800 }
4801 }
4802
4803 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4806 for &kw in expected {
4807 self.expect_keyword_is(kw)?;
4808 }
4809 Ok(())
4810 }
4811
4812 #[must_use]
4816 pub fn consume_token(&mut self, expected: &Token) -> bool {
4817 if self.peek_token_ref() == expected {
4818 self.advance_token();
4819 true
4820 } else {
4821 false
4822 }
4823 }
4824
4825 #[must_use]
4829 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4830 let index = self.index;
4831 for token in tokens {
4832 if !self.consume_token(token) {
4833 self.index = index;
4834 return false;
4835 }
4836 }
4837 true
4838 }
4839
4840 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4842 if self.peek_token_ref() == expected {
4843 Ok(self.next_token())
4844 } else {
4845 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4846 }
4847 }
4848
4849 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4850 where
4851 <T as FromStr>::Err: Display,
4852 {
4853 s.parse::<T>().map_err(|e| {
4854 ParserError::ParserError(format!(
4855 "Could not parse '{s}' as {}: {e}{loc}",
4856 core::any::type_name::<T>()
4857 ))
4858 })
4859 }
4860
4861 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4863 let trailing_commas =
4869 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4870
4871 self.parse_comma_separated_with_trailing_commas(
4872 |p| p.parse_select_item(),
4873 trailing_commas,
4874 Self::is_reserved_for_column_alias,
4875 )
4876 }
4877
4878 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4880 let mut values = vec![];
4881 loop {
4882 values.push(self.parse_grant_permission()?);
4883 if !self.consume_token(&Token::Comma) {
4884 break;
4885 } else if self.options.trailing_commas {
4886 match &self.peek_token_ref().token {
4887 Token::Word(kw) if kw.keyword == Keyword::ON => {
4888 break;
4889 }
4890 Token::RParen
4891 | Token::SemiColon
4892 | Token::EOF
4893 | Token::RBracket
4894 | Token::RBrace => break,
4895 _ => continue,
4896 }
4897 }
4898 }
4899 Ok(values)
4900 }
4901
4902 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4904 let trailing_commas = self.dialect.supports_from_trailing_commas();
4905
4906 self.parse_comma_separated_with_trailing_commas(
4907 Parser::parse_table_and_joins,
4908 trailing_commas,
4909 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4910 )
4911 }
4912
4913 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4920 &mut self,
4921 trailing_commas: bool,
4922 is_reserved_keyword: &R,
4923 ) -> bool
4924 where
4925 R: Fn(&Keyword, &mut Parser) -> bool,
4926 {
4927 if !self.consume_token(&Token::Comma) {
4928 true
4929 } else if trailing_commas {
4930 let token = self.next_token().token;
4931 let is_end = match token {
4932 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4933 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4934 true
4935 }
4936 _ => false,
4937 };
4938 self.prev_token();
4939
4940 is_end
4941 } else {
4942 false
4943 }
4944 }
4945
4946 fn is_parse_comma_separated_end(&mut self) -> bool {
4949 self.is_parse_comma_separated_end_with_trailing_commas(
4950 self.options.trailing_commas,
4951 &Self::is_reserved_for_column_alias,
4952 )
4953 }
4954
4955 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4957 where
4958 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4959 {
4960 self.parse_comma_separated_with_trailing_commas(
4961 f,
4962 self.options.trailing_commas,
4963 Self::is_reserved_for_column_alias,
4964 )
4965 }
4966
4967 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4972 &mut self,
4973 mut f: F,
4974 trailing_commas: bool,
4975 is_reserved_keyword: R,
4976 ) -> Result<Vec<T>, ParserError>
4977 where
4978 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4979 R: Fn(&Keyword, &mut Parser) -> bool,
4980 {
4981 let mut values = vec![];
4982 loop {
4983 values.push(f(self)?);
4984 if self.is_parse_comma_separated_end_with_trailing_commas(
4985 trailing_commas,
4986 &is_reserved_keyword,
4987 ) {
4988 break;
4989 }
4990 }
4991 Ok(values)
4992 }
4993
4994 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4996 where
4997 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4998 {
4999 let mut values = vec![];
5000 loop {
5001 values.push(f(self)?);
5002 if !self.consume_token(&Token::Period) {
5003 break;
5004 }
5005 }
5006 Ok(values)
5007 }
5008
5009 pub fn parse_keyword_separated<T, F>(
5011 &mut self,
5012 keyword: Keyword,
5013 mut f: F,
5014 ) -> Result<Vec<T>, ParserError>
5015 where
5016 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5017 {
5018 let mut values = vec![];
5019 loop {
5020 values.push(f(self)?);
5021 if !self.parse_keyword(keyword) {
5022 break;
5023 }
5024 }
5025 Ok(values)
5026 }
5027
5028 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5030 where
5031 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5032 {
5033 self.expect_token(&Token::LParen)?;
5034 let res = f(self)?;
5035 self.expect_token(&Token::RParen)?;
5036 Ok(res)
5037 }
5038
5039 pub fn parse_comma_separated0<T, F>(
5042 &mut self,
5043 f: F,
5044 end_token: Token,
5045 ) -> Result<Vec<T>, ParserError>
5046 where
5047 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5048 {
5049 if self.peek_token_ref().token == end_token {
5050 return Ok(vec![]);
5051 }
5052
5053 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5054 let _ = self.consume_token(&Token::Comma);
5055 return Ok(vec![]);
5056 }
5057
5058 self.parse_comma_separated(f)
5059 }
5060
5061 pub(crate) fn parse_statement_list(
5065 &mut self,
5066 terminal_keywords: &[Keyword],
5067 ) -> Result<Vec<Statement>, ParserError> {
5068 let mut values = vec![];
5069 loop {
5070 match &self.peek_nth_token_ref(0).token {
5071 Token::EOF => break,
5072 Token::Word(w)
5073 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
5074 {
5075 break;
5076 }
5077 _ => {}
5078 }
5079
5080 values.push(self.parse_statement()?);
5081 self.expect_token(&Token::SemiColon)?;
5082 }
5083 Ok(values)
5084 }
5085
5086 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5090 !parser.dialect.is_column_alias(kw, parser)
5091 }
5092
5093 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5097 where
5098 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5099 {
5100 match self.try_parse(f) {
5101 Ok(t) => Ok(Some(t)),
5102 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5103 _ => Ok(None),
5104 }
5105 }
5106
5107 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5109 where
5110 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5111 {
5112 let index = self.index;
5113 match f(self) {
5114 Ok(t) => Ok(t),
5115 Err(e) => {
5116 self.index = index;
5118 Err(e)
5119 }
5120 }
5121 }
5122
5123 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5126 let loc = self.peek_token_ref().span.start;
5127 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5128 Some(Keyword::ALL) => {
5129 if self.peek_keyword(Keyword::DISTINCT) {
5130 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5131 }
5132 Some(Distinct::All)
5133 }
5134 Some(Keyword::DISTINCT) => {
5135 if self.peek_keyword(Keyword::ALL) {
5136 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5137 }
5138 Some(Distinct::Distinct)
5139 }
5140 None => return Ok(None),
5141 _ => return parser_err!("ALL or DISTINCT", loc),
5142 };
5143
5144 let Some(Distinct::Distinct) = distinct else {
5145 return Ok(distinct);
5146 };
5147 if !self.parse_keyword(Keyword::ON) {
5148 return Ok(Some(Distinct::Distinct));
5149 }
5150
5151 self.expect_token(&Token::LParen)?;
5152 let col_names = if self.consume_token(&Token::RParen) {
5153 self.prev_token();
5154 Vec::new()
5155 } else {
5156 self.parse_comma_separated(Parser::parse_expr)?
5157 };
5158 self.expect_token(&Token::RParen)?;
5159 Ok(Some(Distinct::On(col_names)))
5160 }
5161
5162 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5164 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5165 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5166 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5167 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5168 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5169 let global: Option<bool> = if global {
5170 Some(true)
5171 } else if local {
5172 Some(false)
5173 } else {
5174 None
5175 };
5176 let temporary = self
5177 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5178 .is_some();
5179 let persistent = dialect_of!(self is DuckDbDialect)
5180 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5181 let create_view_params = self.parse_create_view_params()?;
5182 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5183 self.parse_create_snapshot_table().map(Into::into)
5184 } else if self.parse_keyword(Keyword::TABLE) {
5185 self.parse_create_table(or_replace, temporary, global, transient)
5186 .map(Into::into)
5187 } else if self.peek_keyword(Keyword::MATERIALIZED)
5188 || self.peek_keyword(Keyword::VIEW)
5189 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5190 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5191 {
5192 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5193 .map(Into::into)
5194 } else if self.parse_keyword(Keyword::POLICY) {
5195 self.parse_create_policy().map(Into::into)
5196 } else if self.parse_keyword(Keyword::EXTERNAL) {
5197 self.parse_create_external_table(or_replace).map(Into::into)
5198 } else if self.parse_keyword(Keyword::FUNCTION) {
5199 self.parse_create_function(or_alter, or_replace, temporary)
5200 } else if self.parse_keyword(Keyword::DOMAIN) {
5201 self.parse_create_domain().map(Into::into)
5202 } else if self.parse_keyword(Keyword::TRIGGER) {
5203 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5204 .map(Into::into)
5205 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5206 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5207 .map(Into::into)
5208 } else if self.parse_keyword(Keyword::MACRO) {
5209 self.parse_create_macro(or_replace, temporary)
5210 } else if self.parse_keyword(Keyword::SECRET) {
5211 self.parse_create_secret(or_replace, temporary, persistent)
5212 } else if self.parse_keyword(Keyword::USER) {
5213 if self.parse_keyword(Keyword::MAPPING) {
5214 self.parse_create_user_mapping().map(Into::into)
5215 } else {
5216 self.parse_create_user(or_replace).map(Into::into)
5217 }
5218 } else if self.parse_keyword(Keyword::AGGREGATE) {
5219 self.parse_create_aggregate(or_replace).map(Into::into)
5220 } else if self.peek_keyword(Keyword::TRUSTED)
5221 || self.peek_keyword(Keyword::PROCEDURAL)
5222 || self.peek_keyword(Keyword::LANGUAGE)
5223 {
5224 let trusted = self.parse_keyword(Keyword::TRUSTED);
5225 let procedural = self.parse_keyword(Keyword::PROCEDURAL);
5226 if self.parse_keyword(Keyword::LANGUAGE) {
5227 self.parse_create_language(or_replace, trusted, procedural)
5228 .map(Into::into)
5229 } else {
5230 self.expected_ref(
5231 "LANGUAGE after TRUSTED or PROCEDURAL",
5232 self.peek_token_ref(),
5233 )
5234 }
5235 } else if self.parse_keyword(Keyword::TRANSFORM) {
5236 self.parse_create_transform(or_replace).map(Into::into)
5237 } else if or_replace {
5238 self.expected_ref(
5239 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5240 self.peek_token_ref(),
5241 )
5242 } else if self.parse_keyword(Keyword::CAST) {
5243 self.parse_create_cast().map(Into::into)
5244 } else if self.parse_keyword(Keyword::CONVERSION) {
5245 self.parse_create_conversion(false).map(Into::into)
5246 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CONVERSION]) {
5247 self.parse_create_conversion(true).map(Into::into)
5248 } else if self.parse_keyword(Keyword::RULE) {
5249 self.parse_create_rule().map(Into::into)
5250 } else if self.parse_keyword(Keyword::EXTENSION) {
5251 self.parse_create_extension().map(Into::into)
5252 } else if self.parse_keyword(Keyword::INDEX) {
5253 self.parse_create_index(false).map(Into::into)
5254 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5255 self.parse_create_index(true).map(Into::into)
5256 } else if self.parse_keyword(Keyword::VIRTUAL) {
5257 self.parse_create_virtual_table()
5258 } else if self.parse_keyword(Keyword::SCHEMA) {
5259 self.parse_create_schema()
5260 } else if self.parse_keyword(Keyword::DATABASE) {
5261 self.parse_create_database()
5262 } else if self.parse_keyword(Keyword::ROLE) {
5263 self.parse_create_role().map(Into::into)
5264 } else if self.parse_keyword(Keyword::SEQUENCE) {
5265 self.parse_create_sequence(temporary)
5266 } else if self.parse_keyword(Keyword::COLLATION) {
5267 self.parse_create_collation().map(Into::into)
5268 } else if self.parse_keyword(Keyword::TYPE) {
5269 self.parse_create_type()
5270 } else if self.parse_keyword(Keyword::PROCEDURE) {
5271 self.parse_create_procedure(or_alter)
5272 } else if self.parse_keyword(Keyword::CONNECTOR) {
5273 self.parse_create_connector().map(Into::into)
5274 } else if self.parse_keyword(Keyword::OPERATOR) {
5275 if self.parse_keyword(Keyword::FAMILY) {
5277 self.parse_create_operator_family().map(Into::into)
5278 } else if self.parse_keyword(Keyword::CLASS) {
5279 self.parse_create_operator_class().map(Into::into)
5280 } else {
5281 self.parse_create_operator().map(Into::into)
5282 }
5283 } else if self.parse_keyword(Keyword::SERVER) {
5284 self.parse_pg_create_server()
5285 } else if self.parse_keyword(Keyword::FOREIGN) {
5286 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5287 self.parse_create_foreign_data_wrapper().map(Into::into)
5288 } else if self.parse_keyword(Keyword::TABLE) {
5289 self.parse_create_foreign_table().map(Into::into)
5290 } else {
5291 self.expected_ref(
5292 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5293 self.peek_token_ref(),
5294 )
5295 }
5296 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5297 self.parse_create_text_search()
5298 } else if self.parse_keyword(Keyword::PUBLICATION) {
5299 self.parse_create_publication().map(Into::into)
5300 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5301 self.parse_create_subscription().map(Into::into)
5302 } else if self.parse_keyword(Keyword::STATISTICS) {
5303 self.parse_create_statistics().map(Into::into)
5304 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::METHOD]) {
5305 self.parse_create_access_method().map(Into::into)
5306 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::TRIGGER]) {
5307 self.parse_create_event_trigger().map(Into::into)
5308 } else if self.parse_keyword(Keyword::TABLESPACE) {
5309 self.parse_create_tablespace().map(Into::into)
5310 } else {
5311 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5312 }
5313 }
5314
5315 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5316 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5317 let name = self.parse_identifier()?;
5318 let options = self
5319 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5320 .options;
5321 let with_tags = self.parse_keyword(Keyword::WITH);
5322 let tags = if self.parse_keyword(Keyword::TAG) {
5323 self.parse_key_value_options(true, &[])?.options
5324 } else {
5325 vec![]
5326 };
5327 Ok(CreateUser {
5328 or_replace,
5329 if_not_exists,
5330 name,
5331 options: KeyValueOptions {
5332 options,
5333 delimiter: KeyValueOptionsDelimiter::Space,
5334 },
5335 with_tags,
5336 tags: KeyValueOptions {
5337 options: tags,
5338 delimiter: KeyValueOptionsDelimiter::Comma,
5339 },
5340 })
5341 }
5342
5343 pub fn parse_create_secret(
5345 &mut self,
5346 or_replace: bool,
5347 temporary: bool,
5348 persistent: bool,
5349 ) -> Result<Statement, ParserError> {
5350 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5351
5352 let mut storage_specifier = None;
5353 let mut name = None;
5354 if self.peek_token_ref().token != Token::LParen {
5355 if self.parse_keyword(Keyword::IN) {
5356 storage_specifier = self.parse_identifier().ok()
5357 } else {
5358 name = self.parse_identifier().ok();
5359 }
5360
5361 if storage_specifier.is_none()
5363 && self.peek_token_ref().token != Token::LParen
5364 && self.parse_keyword(Keyword::IN)
5365 {
5366 storage_specifier = self.parse_identifier().ok();
5367 }
5368 }
5369
5370 self.expect_token(&Token::LParen)?;
5371 self.expect_keyword_is(Keyword::TYPE)?;
5372 let secret_type = self.parse_identifier()?;
5373
5374 let mut options = Vec::new();
5375 if self.consume_token(&Token::Comma) {
5376 options.append(&mut self.parse_comma_separated(|p| {
5377 let key = p.parse_identifier()?;
5378 let value = p.parse_identifier()?;
5379 Ok(SecretOption { key, value })
5380 })?);
5381 }
5382 self.expect_token(&Token::RParen)?;
5383
5384 let temp = match (temporary, persistent) {
5385 (true, false) => Some(true),
5386 (false, true) => Some(false),
5387 (false, false) => None,
5388 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5389 };
5390
5391 Ok(Statement::CreateSecret {
5392 or_replace,
5393 temporary: temp,
5394 if_not_exists,
5395 name,
5396 storage_specifier,
5397 secret_type,
5398 options,
5399 })
5400 }
5401
5402 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5404 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5405 if self.parse_keyword(Keyword::TABLE) {
5406 let table_name = self.parse_object_name(false)?;
5407 if self.peek_token_ref().token != Token::EOF {
5408 if let Token::Word(word) = &self.peek_token_ref().token {
5409 if word.keyword == Keyword::OPTIONS {
5410 options = self.parse_options(Keyword::OPTIONS)?
5411 }
5412 };
5413
5414 if self.peek_token_ref().token != Token::EOF {
5415 let (a, q) = self.parse_as_query()?;
5416 has_as = a;
5417 query = Some(q);
5418 }
5419
5420 Ok(Statement::Cache {
5421 table_flag,
5422 table_name,
5423 has_as,
5424 options,
5425 query,
5426 })
5427 } else {
5428 Ok(Statement::Cache {
5429 table_flag,
5430 table_name,
5431 has_as,
5432 options,
5433 query,
5434 })
5435 }
5436 } else {
5437 table_flag = Some(self.parse_object_name(false)?);
5438 if self.parse_keyword(Keyword::TABLE) {
5439 let table_name = self.parse_object_name(false)?;
5440 if self.peek_token_ref().token != Token::EOF {
5441 if let Token::Word(word) = &self.peek_token_ref().token {
5442 if word.keyword == Keyword::OPTIONS {
5443 options = self.parse_options(Keyword::OPTIONS)?
5444 }
5445 };
5446
5447 if self.peek_token_ref().token != Token::EOF {
5448 let (a, q) = self.parse_as_query()?;
5449 has_as = a;
5450 query = Some(q);
5451 }
5452
5453 Ok(Statement::Cache {
5454 table_flag,
5455 table_name,
5456 has_as,
5457 options,
5458 query,
5459 })
5460 } else {
5461 Ok(Statement::Cache {
5462 table_flag,
5463 table_name,
5464 has_as,
5465 options,
5466 query,
5467 })
5468 }
5469 } else {
5470 if self.peek_token_ref().token == Token::EOF {
5471 self.prev_token();
5472 }
5473 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5474 }
5475 }
5476 }
5477
5478 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5480 match &self.peek_token_ref().token {
5481 Token::Word(word) => match word.keyword {
5482 Keyword::AS => {
5483 self.next_token();
5484 Ok((true, self.parse_query()?))
5485 }
5486 _ => Ok((false, self.parse_query()?)),
5487 },
5488 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5489 }
5490 }
5491
5492 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5494 self.expect_keyword_is(Keyword::TABLE)?;
5495 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5496 let table_name = self.parse_object_name(false)?;
5497 Ok(Statement::UNCache {
5498 table_name,
5499 if_exists,
5500 })
5501 }
5502
5503 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5505 self.expect_keyword_is(Keyword::TABLE)?;
5506 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5507 let table_name = self.parse_object_name(false)?;
5508 self.expect_keyword_is(Keyword::USING)?;
5509 let module_name = self.parse_identifier()?;
5510 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5515 Ok(Statement::CreateVirtualTable {
5516 name: table_name,
5517 if_not_exists,
5518 module_name,
5519 module_args,
5520 })
5521 }
5522
5523 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5525 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5526
5527 let schema_name = self.parse_schema_name()?;
5528
5529 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5530 Some(self.parse_expr()?)
5531 } else {
5532 None
5533 };
5534
5535 let with = if self.peek_keyword(Keyword::WITH) {
5536 Some(self.parse_options(Keyword::WITH)?)
5537 } else {
5538 None
5539 };
5540
5541 let options = if self.peek_keyword(Keyword::OPTIONS) {
5542 Some(self.parse_options(Keyword::OPTIONS)?)
5543 } else {
5544 None
5545 };
5546
5547 let clone = if self.parse_keyword(Keyword::CLONE) {
5548 Some(self.parse_object_name(false)?)
5549 } else {
5550 None
5551 };
5552
5553 Ok(Statement::CreateSchema {
5554 schema_name,
5555 if_not_exists,
5556 with,
5557 options,
5558 default_collate_spec,
5559 clone,
5560 })
5561 }
5562
5563 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5564 if self.parse_keyword(Keyword::AUTHORIZATION) {
5565 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5566 } else {
5567 let name = self.parse_object_name(false)?;
5568
5569 if self.parse_keyword(Keyword::AUTHORIZATION) {
5570 Ok(SchemaName::NamedAuthorization(
5571 name,
5572 self.parse_identifier()?,
5573 ))
5574 } else {
5575 Ok(SchemaName::Simple(name))
5576 }
5577 }
5578 }
5579
5580 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5582 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5583 let db_name = self.parse_object_name(false)?;
5584 let mut location = None;
5585 let mut managed_location = None;
5586 loop {
5587 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5588 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5589 Some(Keyword::MANAGEDLOCATION) => {
5590 managed_location = Some(self.parse_literal_string()?)
5591 }
5592 _ => break,
5593 }
5594 }
5595 let clone = if self.parse_keyword(Keyword::CLONE) {
5596 Some(self.parse_object_name(false)?)
5597 } else {
5598 None
5599 };
5600
5601 let mut default_charset = None;
5609 let mut default_collation = None;
5610 loop {
5611 let has_default = self.parse_keyword(Keyword::DEFAULT);
5612 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5613 || self.parse_keyword(Keyword::CHARSET)
5614 {
5615 let _ = self.consume_token(&Token::Eq);
5616 default_charset = Some(self.parse_identifier()?.value);
5617 } else if self.parse_keyword(Keyword::COLLATE) {
5618 let _ = self.consume_token(&Token::Eq);
5619 default_collation = Some(self.parse_identifier()?.value);
5620 } else if has_default {
5621 self.prev_token();
5623 break;
5624 } else {
5625 break;
5626 }
5627 }
5628
5629 Ok(Statement::CreateDatabase {
5630 db_name,
5631 if_not_exists: ine,
5632 location,
5633 managed_location,
5634 or_replace: false,
5635 transient: false,
5636 clone,
5637 data_retention_time_in_days: None,
5638 max_data_extension_time_in_days: None,
5639 external_volume: None,
5640 catalog: None,
5641 replace_invalid_characters: None,
5642 default_ddl_collation: None,
5643 storage_serialization_policy: None,
5644 comment: None,
5645 default_charset,
5646 default_collation,
5647 catalog_sync: None,
5648 catalog_sync_namespace_mode: None,
5649 catalog_sync_namespace_flatten_delimiter: None,
5650 with_tags: None,
5651 with_contacts: None,
5652 })
5653 }
5654
5655 pub fn parse_optional_create_function_using(
5657 &mut self,
5658 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5659 if !self.parse_keyword(Keyword::USING) {
5660 return Ok(None);
5661 };
5662 let keyword =
5663 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5664
5665 let uri = self.parse_literal_string()?;
5666
5667 match keyword {
5668 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5669 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5670 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5671 _ => self.expected(
5672 "JAR, FILE or ARCHIVE, got {:?}",
5673 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5674 ),
5675 }
5676 }
5677
5678 pub fn parse_create_function(
5680 &mut self,
5681 or_alter: bool,
5682 or_replace: bool,
5683 temporary: bool,
5684 ) -> Result<Statement, ParserError> {
5685 if dialect_of!(self is HiveDialect) {
5686 self.parse_hive_create_function(or_replace, temporary)
5687 .map(Into::into)
5688 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5689 self.parse_postgres_create_function(or_replace, temporary)
5690 .map(Into::into)
5691 } else if dialect_of!(self is DuckDbDialect) {
5692 self.parse_create_macro(or_replace, temporary)
5693 } else if dialect_of!(self is BigQueryDialect) {
5694 self.parse_bigquery_create_function(or_replace, temporary)
5695 .map(Into::into)
5696 } else if dialect_of!(self is MsSqlDialect) {
5697 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5698 .map(Into::into)
5699 } else {
5700 self.prev_token();
5701 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5702 }
5703 }
5704
5705 fn parse_postgres_create_function(
5709 &mut self,
5710 or_replace: bool,
5711 temporary: bool,
5712 ) -> Result<CreateFunction, ParserError> {
5713 let name = self.parse_object_name(false)?;
5714
5715 self.expect_token(&Token::LParen)?;
5716 let args = if Token::RParen != self.peek_token_ref().token {
5717 self.parse_comma_separated(Parser::parse_function_arg)?
5718 } else {
5719 vec![]
5720 };
5721 self.expect_token(&Token::RParen)?;
5722
5723 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5724 Some(self.parse_function_return_type()?)
5725 } else {
5726 None
5727 };
5728
5729 #[derive(Default)]
5730 struct Body {
5731 language: Option<Ident>,
5732 behavior: Option<FunctionBehavior>,
5733 function_body: Option<CreateFunctionBody>,
5734 called_on_null: Option<FunctionCalledOnNull>,
5735 parallel: Option<FunctionParallel>,
5736 security: Option<FunctionSecurity>,
5737 }
5738 let mut body = Body::default();
5739 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5740 loop {
5741 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5742 if field.is_some() {
5743 return Err(ParserError::ParserError(format!(
5744 "{name} specified more than once",
5745 )));
5746 }
5747 Ok(())
5748 }
5749 if self.parse_keyword(Keyword::AS) {
5750 ensure_not_set(&body.function_body, "AS")?;
5751 body.function_body = Some(self.parse_create_function_body_string()?);
5752 } else if self.parse_keyword(Keyword::LANGUAGE) {
5753 ensure_not_set(&body.language, "LANGUAGE")?;
5754 body.language = Some(self.parse_identifier()?);
5755 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5756 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5757 body.behavior = Some(FunctionBehavior::Immutable);
5758 } else if self.parse_keyword(Keyword::STABLE) {
5759 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5760 body.behavior = Some(FunctionBehavior::Stable);
5761 } else if self.parse_keyword(Keyword::VOLATILE) {
5762 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5763 body.behavior = Some(FunctionBehavior::Volatile);
5764 } else if self.parse_keywords(&[
5765 Keyword::CALLED,
5766 Keyword::ON,
5767 Keyword::NULL,
5768 Keyword::INPUT,
5769 ]) {
5770 ensure_not_set(
5771 &body.called_on_null,
5772 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5773 )?;
5774 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5775 } else if self.parse_keywords(&[
5776 Keyword::RETURNS,
5777 Keyword::NULL,
5778 Keyword::ON,
5779 Keyword::NULL,
5780 Keyword::INPUT,
5781 ]) {
5782 ensure_not_set(
5783 &body.called_on_null,
5784 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5785 )?;
5786 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5787 } else if self.parse_keyword(Keyword::STRICT) {
5788 ensure_not_set(
5789 &body.called_on_null,
5790 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5791 )?;
5792 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5793 } else if self.parse_keyword(Keyword::PARALLEL) {
5794 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5795 if self.parse_keyword(Keyword::UNSAFE) {
5796 body.parallel = Some(FunctionParallel::Unsafe);
5797 } else if self.parse_keyword(Keyword::RESTRICTED) {
5798 body.parallel = Some(FunctionParallel::Restricted);
5799 } else if self.parse_keyword(Keyword::SAFE) {
5800 body.parallel = Some(FunctionParallel::Safe);
5801 } else {
5802 return self
5803 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5804 }
5805 } else if self.parse_keyword(Keyword::SECURITY) {
5806 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5807 if self.parse_keyword(Keyword::DEFINER) {
5808 body.security = Some(FunctionSecurity::Definer);
5809 } else if self.parse_keyword(Keyword::INVOKER) {
5810 body.security = Some(FunctionSecurity::Invoker);
5811 } else {
5812 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5813 }
5814 } else if self.parse_keyword(Keyword::SET) {
5815 let name = self.parse_object_name(false)?;
5816 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5817 FunctionSetValue::FromCurrent
5818 } else {
5819 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5820 return self.expected_ref("= or TO", self.peek_token_ref());
5821 }
5822 if self.parse_keyword(Keyword::DEFAULT) {
5823 FunctionSetValue::Default
5824 } else {
5825 let values = self.parse_comma_separated(Parser::parse_expr)?;
5826 FunctionSetValue::Values(values)
5827 }
5828 };
5829 set_params.push(FunctionDefinitionSetParam { name, value });
5830 } else if self.parse_keyword(Keyword::RETURN) {
5831 ensure_not_set(&body.function_body, "RETURN")?;
5832 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5833 } else {
5834 break;
5835 }
5836 }
5837
5838 Ok(CreateFunction {
5839 or_alter: false,
5840 or_replace,
5841 temporary,
5842 name,
5843 args: Some(args),
5844 return_type,
5845 behavior: body.behavior,
5846 called_on_null: body.called_on_null,
5847 parallel: body.parallel,
5848 security: body.security,
5849 set_params,
5850 language: body.language,
5851 function_body: body.function_body,
5852 if_not_exists: false,
5853 using: None,
5854 determinism_specifier: None,
5855 options: None,
5856 remote_connection: None,
5857 })
5858 }
5859
5860 fn parse_hive_create_function(
5864 &mut self,
5865 or_replace: bool,
5866 temporary: bool,
5867 ) -> Result<CreateFunction, ParserError> {
5868 let name = self.parse_object_name(false)?;
5869 self.expect_keyword_is(Keyword::AS)?;
5870
5871 let body = self.parse_create_function_body_string()?;
5872 let using = self.parse_optional_create_function_using()?;
5873
5874 Ok(CreateFunction {
5875 or_alter: false,
5876 or_replace,
5877 temporary,
5878 name,
5879 function_body: Some(body),
5880 using,
5881 if_not_exists: false,
5882 args: None,
5883 return_type: None,
5884 behavior: None,
5885 called_on_null: None,
5886 parallel: None,
5887 security: None,
5888 set_params: vec![],
5889 language: None,
5890 determinism_specifier: None,
5891 options: None,
5892 remote_connection: None,
5893 })
5894 }
5895
5896 fn parse_bigquery_create_function(
5900 &mut self,
5901 or_replace: bool,
5902 temporary: bool,
5903 ) -> Result<CreateFunction, ParserError> {
5904 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5905 let (name, args) = self.parse_create_function_name_and_params()?;
5906
5907 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5908 Some(self.parse_function_return_type()?)
5909 } else {
5910 None
5911 };
5912
5913 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5914 Some(FunctionDeterminismSpecifier::Deterministic)
5915 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5916 Some(FunctionDeterminismSpecifier::NotDeterministic)
5917 } else {
5918 None
5919 };
5920
5921 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5922 Some(self.parse_identifier()?)
5923 } else {
5924 None
5925 };
5926
5927 let remote_connection =
5928 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5929 Some(self.parse_object_name(false)?)
5930 } else {
5931 None
5932 };
5933
5934 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5937
5938 let function_body = if remote_connection.is_none() {
5939 self.expect_keyword_is(Keyword::AS)?;
5940 let expr = self.parse_expr()?;
5941 if options.is_none() {
5942 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5943 Some(CreateFunctionBody::AsBeforeOptions {
5944 body: expr,
5945 link_symbol: None,
5946 })
5947 } else {
5948 Some(CreateFunctionBody::AsAfterOptions(expr))
5949 }
5950 } else {
5951 None
5952 };
5953
5954 Ok(CreateFunction {
5955 or_alter: false,
5956 or_replace,
5957 temporary,
5958 if_not_exists,
5959 name,
5960 args: Some(args),
5961 return_type,
5962 function_body,
5963 language,
5964 determinism_specifier,
5965 options,
5966 remote_connection,
5967 using: None,
5968 behavior: None,
5969 called_on_null: None,
5970 parallel: None,
5971 security: None,
5972 set_params: vec![],
5973 })
5974 }
5975
5976 fn parse_mssql_create_function(
5980 &mut self,
5981 or_alter: bool,
5982 or_replace: bool,
5983 temporary: bool,
5984 ) -> Result<CreateFunction, ParserError> {
5985 let (name, args) = self.parse_create_function_name_and_params()?;
5986
5987 self.expect_keyword(Keyword::RETURNS)?;
5988
5989 let return_table = self.maybe_parse(|p| {
5990 let return_table_name = p.parse_identifier()?;
5991
5992 p.expect_keyword_is(Keyword::TABLE)?;
5993 p.prev_token();
5994
5995 let table_column_defs = match p.parse_data_type()? {
5996 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5997 table_column_defs
5998 }
5999 _ => parser_err!(
6000 "Expected table column definitions after TABLE keyword",
6001 p.peek_token_ref().span.start
6002 )?,
6003 };
6004
6005 Ok(DataType::NamedTable {
6006 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
6007 columns: table_column_defs,
6008 })
6009 })?;
6010
6011 let data_type = match return_table {
6012 Some(table_type) => table_type,
6013 None => self.parse_data_type()?,
6014 };
6015 let return_type = Some(FunctionReturnType::DataType(data_type));
6016
6017 let _ = self.parse_keyword(Keyword::AS);
6018
6019 let function_body = if self.peek_keyword(Keyword::BEGIN) {
6020 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
6021 let statements = self.parse_statement_list(&[Keyword::END])?;
6022 let end_token = self.expect_keyword(Keyword::END)?;
6023
6024 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
6025 begin_token: AttachedToken(begin_token),
6026 statements,
6027 end_token: AttachedToken(end_token),
6028 }))
6029 } else if self.parse_keyword(Keyword::RETURN) {
6030 if self.peek_token_ref().token == Token::LParen {
6031 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
6032 } else if self.peek_keyword(Keyword::SELECT) {
6033 let select = self.parse_select()?;
6034 Some(CreateFunctionBody::AsReturnSelect(select))
6035 } else {
6036 parser_err!(
6037 "Expected a subquery (or bare SELECT statement) after RETURN",
6038 self.peek_token_ref().span.start
6039 )?
6040 }
6041 } else {
6042 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
6043 };
6044
6045 Ok(CreateFunction {
6046 or_alter,
6047 or_replace,
6048 temporary,
6049 if_not_exists: false,
6050 name,
6051 args: Some(args),
6052 return_type,
6053 function_body,
6054 language: None,
6055 determinism_specifier: None,
6056 options: None,
6057 remote_connection: None,
6058 using: None,
6059 behavior: None,
6060 called_on_null: None,
6061 parallel: None,
6062 security: None,
6063 set_params: vec![],
6064 })
6065 }
6066
6067 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
6068 if self.parse_keyword(Keyword::SETOF) {
6069 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
6070 } else {
6071 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
6072 }
6073 }
6074
6075 fn parse_create_function_name_and_params(
6076 &mut self,
6077 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6078 let name = self.parse_object_name(false)?;
6079 let parse_function_param =
6080 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6081 let name = parser.parse_identifier()?;
6082 let data_type = parser.parse_data_type()?;
6083 let default_expr = if parser.consume_token(&Token::Eq) {
6084 Some(parser.parse_expr()?)
6085 } else {
6086 None
6087 };
6088
6089 Ok(OperateFunctionArg {
6090 mode: None,
6091 name: Some(name),
6092 data_type,
6093 default_expr,
6094 })
6095 };
6096 self.expect_token(&Token::LParen)?;
6097 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6098 self.expect_token(&Token::RParen)?;
6099 Ok((name, args))
6100 }
6101
6102 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6103 let mode = if self.parse_keyword(Keyword::IN) {
6104 Some(ArgMode::In)
6105 } else if self.parse_keyword(Keyword::OUT) {
6106 Some(ArgMode::Out)
6107 } else if self.parse_keyword(Keyword::INOUT) {
6108 Some(ArgMode::InOut)
6109 } else if self.parse_keyword(Keyword::VARIADIC) {
6110 Some(ArgMode::Variadic)
6111 } else {
6112 None
6113 };
6114
6115 let mut name = None;
6117 let mut data_type = self.parse_data_type()?;
6118
6119 let data_type_idx = self.get_current_index();
6123
6124 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6126 if parser.peek_keyword(Keyword::DEFAULT) {
6127 parser_err!(
6129 "The DEFAULT keyword is not a type",
6130 parser.peek_token_ref().span.start
6131 )
6132 } else {
6133 parser.parse_data_type()
6134 }
6135 }
6136
6137 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6138 let token = self.token_at(data_type_idx);
6139
6140 if !matches!(token.token, Token::Word(_)) {
6142 return self.expected("a name or type", token.clone());
6143 }
6144
6145 name = Some(Ident::new(token.to_string()));
6146 data_type = next_data_type;
6147 }
6148
6149 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6150 {
6151 Some(self.parse_expr()?)
6152 } else {
6153 None
6154 };
6155 Ok(OperateFunctionArg {
6156 mode,
6157 name,
6158 data_type,
6159 default_expr,
6160 })
6161 }
6162
6163 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6164 let mode = if self.parse_keyword(Keyword::IN) {
6165 Some(ArgMode::In)
6166 } else {
6167 if self
6168 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6169 .is_some()
6170 {
6171 return self.expected_ref(
6172 "IN or argument type in aggregate signature",
6173 self.peek_token_ref(),
6174 );
6175 }
6176 None
6177 };
6178
6179 let mut name = None;
6182 let mut data_type = self.parse_data_type()?;
6183 let data_type_idx = self.get_current_index();
6184
6185 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6186 if parser.peek_keyword(Keyword::DEFAULT)
6187 || parser.peek_keyword(Keyword::ORDER)
6188 || parser.peek_token_ref().token == Token::Comma
6189 || parser.peek_token_ref().token == Token::RParen
6190 {
6191 parser_err!(
6193 "The current token cannot start an aggregate argument type",
6194 parser.peek_token_ref().span.start
6195 )
6196 } else {
6197 parser.parse_data_type()
6198 }
6199 }
6200
6201 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6202 let token = self.token_at(data_type_idx);
6203 if !matches!(token.token, Token::Word(_)) {
6204 return self.expected("a name or type", token.clone());
6205 }
6206
6207 name = Some(Ident::new(token.to_string()));
6208 data_type = next_data_type;
6209 }
6210
6211 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6212 return self.expected_ref(
6213 "',' or ')' or ORDER BY after aggregate argument type",
6214 self.peek_token_ref(),
6215 );
6216 }
6217
6218 Ok(OperateFunctionArg {
6219 mode,
6220 name,
6221 data_type,
6222 default_expr: None,
6223 })
6224 }
6225
6226 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6232 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6233 {
6234 self.prev_token();
6235 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6236 }
6237 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6238 let trigger_name = self.parse_object_name(false)?;
6239 let table_name = if self.parse_keyword(Keyword::ON) {
6240 Some(self.parse_object_name(false)?)
6241 } else {
6242 None
6243 };
6244 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6245 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6246 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6247 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6248 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6249 )),
6250 None => None,
6251 };
6252 Ok(DropTrigger {
6253 if_exists,
6254 trigger_name,
6255 table_name,
6256 option,
6257 })
6258 }
6259
6260 pub fn parse_create_trigger(
6262 &mut self,
6263 temporary: bool,
6264 or_alter: bool,
6265 or_replace: bool,
6266 is_constraint: bool,
6267 ) -> Result<CreateTrigger, ParserError> {
6268 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6269 {
6270 self.prev_token();
6271 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6272 }
6273
6274 let name = self.parse_object_name(false)?;
6275 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6276
6277 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6278 self.expect_keyword_is(Keyword::ON)?;
6279 let table_name = self.parse_object_name(false)?;
6280
6281 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6282 self.parse_object_name(true).ok()
6283 } else {
6284 None
6285 };
6286
6287 let characteristics = self.parse_constraint_characteristics()?;
6288
6289 let mut referencing = vec![];
6290 if self.parse_keyword(Keyword::REFERENCING) {
6291 while let Some(refer) = self.parse_trigger_referencing()? {
6292 referencing.push(refer);
6293 }
6294 }
6295
6296 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6297 let include_each = self.parse_keyword(Keyword::EACH);
6298 let trigger_object =
6299 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6300 Keyword::ROW => TriggerObject::Row,
6301 Keyword::STATEMENT => TriggerObject::Statement,
6302 unexpected_keyword => return Err(ParserError::ParserError(
6303 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6304 )),
6305 };
6306
6307 Some(if include_each {
6308 TriggerObjectKind::ForEach(trigger_object)
6309 } else {
6310 TriggerObjectKind::For(trigger_object)
6311 })
6312 } else {
6313 let _ = self.parse_keyword(Keyword::FOR);
6314
6315 None
6316 };
6317
6318 let condition = self
6319 .parse_keyword(Keyword::WHEN)
6320 .then(|| self.parse_expr())
6321 .transpose()?;
6322
6323 let mut exec_body = None;
6324 let mut statements = None;
6325 if self.parse_keyword(Keyword::EXECUTE) {
6326 exec_body = Some(self.parse_trigger_exec_body()?);
6327 } else {
6328 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6329 }
6330
6331 Ok(CreateTrigger {
6332 or_alter,
6333 temporary,
6334 or_replace,
6335 is_constraint,
6336 name,
6337 period,
6338 period_before_table: true,
6339 events,
6340 table_name,
6341 referenced_table_name,
6342 referencing,
6343 trigger_object,
6344 condition,
6345 exec_body,
6346 statements_as: false,
6347 statements,
6348 characteristics,
6349 })
6350 }
6351
6352 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6354 Ok(
6355 match self.expect_one_of_keywords(&[
6356 Keyword::FOR,
6357 Keyword::BEFORE,
6358 Keyword::AFTER,
6359 Keyword::INSTEAD,
6360 ])? {
6361 Keyword::FOR => TriggerPeriod::For,
6362 Keyword::BEFORE => TriggerPeriod::Before,
6363 Keyword::AFTER => TriggerPeriod::After,
6364 Keyword::INSTEAD => self
6365 .expect_keyword_is(Keyword::OF)
6366 .map(|_| TriggerPeriod::InsteadOf)?,
6367 unexpected_keyword => return Err(ParserError::ParserError(
6368 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6369 )),
6370 },
6371 )
6372 }
6373
6374 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6376 Ok(
6377 match self.expect_one_of_keywords(&[
6378 Keyword::INSERT,
6379 Keyword::UPDATE,
6380 Keyword::DELETE,
6381 Keyword::TRUNCATE,
6382 ])? {
6383 Keyword::INSERT => TriggerEvent::Insert,
6384 Keyword::UPDATE => {
6385 if self.parse_keyword(Keyword::OF) {
6386 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6387 TriggerEvent::Update(cols)
6388 } else {
6389 TriggerEvent::Update(vec![])
6390 }
6391 }
6392 Keyword::DELETE => TriggerEvent::Delete,
6393 Keyword::TRUNCATE => TriggerEvent::Truncate,
6394 unexpected_keyword => return Err(ParserError::ParserError(
6395 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6396 )),
6397 },
6398 )
6399 }
6400
6401 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6403 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6404 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6405 TriggerReferencingType::OldTable
6406 }
6407 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6408 TriggerReferencingType::NewTable
6409 }
6410 _ => {
6411 return Ok(None);
6412 }
6413 };
6414
6415 let is_as = self.parse_keyword(Keyword::AS);
6416 let transition_relation_name = self.parse_object_name(false)?;
6417 Ok(Some(TriggerReferencing {
6418 refer_type,
6419 is_as,
6420 transition_relation_name,
6421 }))
6422 }
6423
6424 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6431 let exec_type = match self
6432 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6433 {
6434 Keyword::FUNCTION => TriggerExecBodyType::Function,
6435 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6436 unexpected_keyword => {
6437 return Err(ParserError::ParserError(format!(
6438 "Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"
6439 )))
6440 }
6441 };
6442
6443 let func_name = self.parse_object_name(false)?;
6444
6445 let args = if self.consume_token(&Token::LParen) {
6446 if self.consume_token(&Token::RParen) {
6447 Some(vec![])
6448 } else {
6449 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
6450 self.expect_token(&Token::RParen)?;
6451 Some(exprs)
6452 }
6453 } else {
6454 None
6455 };
6456
6457 Ok(TriggerExecBody {
6458 exec_type,
6459 func_name,
6460 args,
6461 })
6462 }
6463
6464 pub fn parse_create_macro(
6466 &mut self,
6467 or_replace: bool,
6468 temporary: bool,
6469 ) -> Result<Statement, ParserError> {
6470 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6471 let name = self.parse_object_name(false)?;
6472 self.expect_token(&Token::LParen)?;
6473 let args = if self.consume_token(&Token::RParen) {
6474 self.prev_token();
6475 None
6476 } else {
6477 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6478 };
6479
6480 self.expect_token(&Token::RParen)?;
6481 self.expect_keyword_is(Keyword::AS)?;
6482
6483 Ok(Statement::CreateMacro {
6484 or_replace,
6485 temporary,
6486 name,
6487 args,
6488 definition: if self.parse_keyword(Keyword::TABLE) {
6489 MacroDefinition::Table(self.parse_query()?)
6490 } else {
6491 MacroDefinition::Expr(self.parse_expr()?)
6492 },
6493 })
6494 } else {
6495 self.prev_token();
6496 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6497 }
6498 }
6499
6500 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6501 let name = self.parse_identifier()?;
6502
6503 let default_expr =
6504 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6505 Some(self.parse_expr()?)
6506 } else {
6507 None
6508 };
6509 Ok(MacroArg { name, default_expr })
6510 }
6511
6512 pub fn parse_create_external_table(
6514 &mut self,
6515 or_replace: bool,
6516 ) -> Result<CreateTable, ParserError> {
6517 self.expect_keyword_is(Keyword::TABLE)?;
6518 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6519 let table_name = self.parse_object_name(false)?;
6520 let (columns, constraints) = self.parse_columns()?;
6521
6522 let hive_distribution = self.parse_hive_distribution()?;
6523 let hive_formats = self.parse_hive_formats()?;
6524
6525 let file_format = if let Some(ref hf) = hive_formats {
6526 if let Some(ref ff) = hf.storage {
6527 match ff {
6528 HiveIOFormat::FileFormat { format } => Some(*format),
6529 _ => None,
6530 }
6531 } else {
6532 None
6533 }
6534 } else {
6535 None
6536 };
6537 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6538 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6539 let table_options = if !table_properties.is_empty() {
6540 CreateTableOptions::TableProperties(table_properties)
6541 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6542 CreateTableOptions::Options(options)
6543 } else {
6544 CreateTableOptions::None
6545 };
6546 Ok(CreateTableBuilder::new(table_name)
6547 .columns(columns)
6548 .constraints(constraints)
6549 .hive_distribution(hive_distribution)
6550 .hive_formats(hive_formats)
6551 .table_options(table_options)
6552 .or_replace(or_replace)
6553 .if_not_exists(if_not_exists)
6554 .external(true)
6555 .file_format(file_format)
6556 .location(location)
6557 .build())
6558 }
6559
6560 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6564 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6565 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6566 let table_name = self.parse_object_name(true)?;
6567
6568 self.expect_keyword_is(Keyword::CLONE)?;
6569 let clone = Some(self.parse_object_name(true)?);
6570
6571 let version =
6572 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6573 {
6574 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6575 } else {
6576 None
6577 };
6578
6579 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6580 CreateTableOptions::Options(options)
6581 } else {
6582 CreateTableOptions::None
6583 };
6584
6585 Ok(CreateTableBuilder::new(table_name)
6586 .snapshot(true)
6587 .if_not_exists(if_not_exists)
6588 .clone_clause(clone)
6589 .version(version)
6590 .table_options(table_options)
6591 .build())
6592 }
6593
6594 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6596 let next_token = self.next_token();
6597 match &next_token.token {
6598 Token::Word(w) => match w.keyword {
6599 Keyword::AVRO => Ok(FileFormat::AVRO),
6600 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6601 Keyword::ORC => Ok(FileFormat::ORC),
6602 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6603 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6604 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6605 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6606 _ => self.expected("fileformat", next_token),
6607 },
6608 _ => self.expected("fileformat", next_token),
6609 }
6610 }
6611
6612 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6613 if self.consume_token(&Token::Eq) {
6614 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6615 } else {
6616 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6617 }
6618 }
6619
6620 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6622 let next_token = self.next_token();
6623 match &next_token.token {
6624 Token::Word(w) => match w.keyword {
6625 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6626 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6627 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6628 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6629 _ => self.expected("fileformat", next_token),
6630 },
6631 _ => self.expected("fileformat", next_token),
6632 }
6633 }
6634
6635 pub fn parse_create_view(
6637 &mut self,
6638 or_alter: bool,
6639 or_replace: bool,
6640 temporary: bool,
6641 create_view_params: Option<CreateViewParams>,
6642 ) -> Result<CreateView, ParserError> {
6643 let secure = self.parse_keyword(Keyword::SECURE);
6644 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6645 self.expect_keyword_is(Keyword::VIEW)?;
6646 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6647 let if_not_exists_first =
6650 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6651 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6652 let name_before_not_exists = !if_not_exists_first
6653 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6654 let if_not_exists = if_not_exists_first || name_before_not_exists;
6655 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6656 let columns = self.parse_view_columns()?;
6659 let mut options = CreateTableOptions::None;
6660 let with_options = self.parse_options(Keyword::WITH)?;
6661 if !with_options.is_empty() {
6662 options = CreateTableOptions::With(with_options);
6663 }
6664
6665 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6666 self.expect_keyword_is(Keyword::BY)?;
6667 self.parse_parenthesized_column_list(Optional, false)?
6668 } else {
6669 vec![]
6670 };
6671
6672 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6673 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6674 if !opts.is_empty() {
6675 options = CreateTableOptions::Options(opts);
6676 }
6677 };
6678 }
6679
6680 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6681 && self.parse_keyword(Keyword::TO)
6682 {
6683 Some(self.parse_object_name(false)?)
6684 } else {
6685 None
6686 };
6687
6688 let comment = if self.dialect.supports_create_view_comment_syntax()
6689 && self.parse_keyword(Keyword::COMMENT)
6690 {
6691 self.expect_token(&Token::Eq)?;
6692 Some(self.parse_comment_value()?)
6693 } else {
6694 None
6695 };
6696
6697 self.expect_keyword_is(Keyword::AS)?;
6698 let query = self.parse_query()?;
6699 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6702 && self.parse_keywords(&[
6703 Keyword::WITH,
6704 Keyword::NO,
6705 Keyword::SCHEMA,
6706 Keyword::BINDING,
6707 ]);
6708
6709 let with_data = if materialized && self.parse_keyword(Keyword::WITH) {
6712 if self.parse_keyword(Keyword::NO) {
6713 self.expect_keyword_is(Keyword::DATA)?;
6714 Some(false)
6715 } else {
6716 self.expect_keyword_is(Keyword::DATA)?;
6717 Some(true)
6718 }
6719 } else {
6720 None
6721 };
6722
6723 Ok(CreateView {
6724 or_alter,
6725 name,
6726 columns,
6727 query,
6728 materialized,
6729 secure,
6730 or_replace,
6731 options,
6732 cluster_by,
6733 comment,
6734 with_no_schema_binding,
6735 if_not_exists,
6736 temporary,
6737 copy_grants,
6738 to,
6739 params: create_view_params,
6740 name_before_not_exists,
6741 with_data,
6742 })
6743 }
6744
6745 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6749 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6750 self.expect_token(&Token::Eq)?;
6751 Some(
6752 match self.expect_one_of_keywords(&[
6753 Keyword::UNDEFINED,
6754 Keyword::MERGE,
6755 Keyword::TEMPTABLE,
6756 ])? {
6757 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6758 Keyword::MERGE => CreateViewAlgorithm::Merge,
6759 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6760 _ => {
6761 self.prev_token();
6762 let found = self.next_token();
6763 return self
6764 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6765 }
6766 },
6767 )
6768 } else {
6769 None
6770 };
6771 let definer = if self.parse_keyword(Keyword::DEFINER) {
6772 self.expect_token(&Token::Eq)?;
6773 Some(self.parse_grantee_name()?)
6774 } else {
6775 None
6776 };
6777 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6778 Some(
6779 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6780 Keyword::DEFINER => CreateViewSecurity::Definer,
6781 Keyword::INVOKER => CreateViewSecurity::Invoker,
6782 _ => {
6783 self.prev_token();
6784 let found = self.next_token();
6785 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6786 }
6787 },
6788 )
6789 } else {
6790 None
6791 };
6792 if algorithm.is_some() || definer.is_some() || security.is_some() {
6793 Ok(Some(CreateViewParams {
6794 algorithm,
6795 definer,
6796 security,
6797 }))
6798 } else {
6799 Ok(None)
6800 }
6801 }
6802
6803 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6805 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6806 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6807
6808 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6811 vec![Keyword::AUTHORIZATION]
6812 } else if dialect_of!(self is PostgreSqlDialect) {
6813 vec![
6814 Keyword::LOGIN,
6815 Keyword::NOLOGIN,
6816 Keyword::INHERIT,
6817 Keyword::NOINHERIT,
6818 Keyword::BYPASSRLS,
6819 Keyword::NOBYPASSRLS,
6820 Keyword::PASSWORD,
6821 Keyword::CREATEDB,
6822 Keyword::NOCREATEDB,
6823 Keyword::CREATEROLE,
6824 Keyword::NOCREATEROLE,
6825 Keyword::SUPERUSER,
6826 Keyword::NOSUPERUSER,
6827 Keyword::REPLICATION,
6828 Keyword::NOREPLICATION,
6829 Keyword::CONNECTION,
6830 Keyword::VALID,
6831 Keyword::IN,
6832 Keyword::ROLE,
6833 Keyword::ADMIN,
6834 Keyword::USER,
6835 ]
6836 } else {
6837 vec![]
6838 };
6839
6840 let mut authorization_owner = None;
6842 let mut login = None;
6844 let mut inherit = None;
6845 let mut bypassrls = None;
6846 let mut password = None;
6847 let mut create_db = None;
6848 let mut create_role = None;
6849 let mut superuser = None;
6850 let mut replication = None;
6851 let mut connection_limit = None;
6852 let mut valid_until = None;
6853 let mut in_role = vec![];
6854 let mut in_group = vec![];
6855 let mut role = vec![];
6856 let mut user = vec![];
6857 let mut admin = vec![];
6858
6859 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6860 let loc = self
6861 .tokens
6862 .get(self.index - 1)
6863 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6864 match keyword {
6865 Keyword::AUTHORIZATION => {
6866 if authorization_owner.is_some() {
6867 parser_err!("Found multiple AUTHORIZATION", loc)
6868 } else {
6869 authorization_owner = Some(self.parse_object_name(false)?);
6870 Ok(())
6871 }
6872 }
6873 Keyword::LOGIN | Keyword::NOLOGIN => {
6874 if login.is_some() {
6875 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6876 } else {
6877 login = Some(keyword == Keyword::LOGIN);
6878 Ok(())
6879 }
6880 }
6881 Keyword::INHERIT | Keyword::NOINHERIT => {
6882 if inherit.is_some() {
6883 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6884 } else {
6885 inherit = Some(keyword == Keyword::INHERIT);
6886 Ok(())
6887 }
6888 }
6889 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6890 if bypassrls.is_some() {
6891 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6892 } else {
6893 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6894 Ok(())
6895 }
6896 }
6897 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6898 if create_db.is_some() {
6899 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6900 } else {
6901 create_db = Some(keyword == Keyword::CREATEDB);
6902 Ok(())
6903 }
6904 }
6905 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6906 if create_role.is_some() {
6907 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6908 } else {
6909 create_role = Some(keyword == Keyword::CREATEROLE);
6910 Ok(())
6911 }
6912 }
6913 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6914 if superuser.is_some() {
6915 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6916 } else {
6917 superuser = Some(keyword == Keyword::SUPERUSER);
6918 Ok(())
6919 }
6920 }
6921 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6922 if replication.is_some() {
6923 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6924 } else {
6925 replication = Some(keyword == Keyword::REPLICATION);
6926 Ok(())
6927 }
6928 }
6929 Keyword::PASSWORD => {
6930 if password.is_some() {
6931 parser_err!("Found multiple PASSWORD", loc)
6932 } else {
6933 password = if self.parse_keyword(Keyword::NULL) {
6934 Some(Password::NullPassword)
6935 } else {
6936 Some(Password::Password(Expr::Value(self.parse_value()?)))
6937 };
6938 Ok(())
6939 }
6940 }
6941 Keyword::CONNECTION => {
6942 self.expect_keyword_is(Keyword::LIMIT)?;
6943 if connection_limit.is_some() {
6944 parser_err!("Found multiple CONNECTION LIMIT", loc)
6945 } else {
6946 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6947 Ok(())
6948 }
6949 }
6950 Keyword::VALID => {
6951 self.expect_keyword_is(Keyword::UNTIL)?;
6952 if valid_until.is_some() {
6953 parser_err!("Found multiple VALID UNTIL", loc)
6954 } else {
6955 valid_until = Some(Expr::Value(self.parse_value()?));
6956 Ok(())
6957 }
6958 }
6959 Keyword::IN => {
6960 if self.parse_keyword(Keyword::ROLE) {
6961 if !in_role.is_empty() {
6962 parser_err!("Found multiple IN ROLE", loc)
6963 } else {
6964 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6965 Ok(())
6966 }
6967 } else if self.parse_keyword(Keyword::GROUP) {
6968 if !in_group.is_empty() {
6969 parser_err!("Found multiple IN GROUP", loc)
6970 } else {
6971 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6972 Ok(())
6973 }
6974 } else {
6975 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6976 }
6977 }
6978 Keyword::ROLE => {
6979 if !role.is_empty() {
6980 parser_err!("Found multiple ROLE", loc)
6981 } else {
6982 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6983 Ok(())
6984 }
6985 }
6986 Keyword::USER => {
6987 if !user.is_empty() {
6988 parser_err!("Found multiple USER", loc)
6989 } else {
6990 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6991 Ok(())
6992 }
6993 }
6994 Keyword::ADMIN => {
6995 if !admin.is_empty() {
6996 parser_err!("Found multiple ADMIN", loc)
6997 } else {
6998 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6999 Ok(())
7000 }
7001 }
7002 _ => break,
7003 }?
7004 }
7005
7006 Ok(CreateRole {
7007 names,
7008 if_not_exists,
7009 login,
7010 inherit,
7011 bypassrls,
7012 password,
7013 create_db,
7014 create_role,
7015 replication,
7016 superuser,
7017 connection_limit,
7018 valid_until,
7019 in_role,
7020 in_group,
7021 role,
7022 user,
7023 admin,
7024 authorization_owner,
7025 })
7026 }
7027
7028 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
7030 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
7031 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
7032 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
7033 Some(Keyword::SESSION_USER) => Owner::SessionUser,
7034 Some(unexpected_keyword) => return Err(ParserError::ParserError(
7035 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
7036 )),
7037 None => {
7038 match self.parse_identifier() {
7039 Ok(ident) => Owner::Ident(ident),
7040 Err(e) => {
7041 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
7042 }
7043 }
7044 }
7045 };
7046 Ok(owner)
7047 }
7048
7049 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
7051 let name = self.parse_object_name(false)?;
7052 self.expect_keyword_is(Keyword::AS)?;
7053 let data_type = self.parse_data_type()?;
7054 let collation = if self.parse_keyword(Keyword::COLLATE) {
7055 Some(self.parse_identifier()?)
7056 } else {
7057 None
7058 };
7059 let default = if self.parse_keyword(Keyword::DEFAULT) {
7060 Some(self.parse_expr()?)
7061 } else {
7062 None
7063 };
7064 let mut constraints = Vec::new();
7065 while let Some(constraint) = self.parse_optional_table_constraint()? {
7066 constraints.push(constraint);
7067 }
7068
7069 Ok(CreateDomain {
7070 name,
7071 data_type,
7072 collation,
7073 default,
7074 constraints,
7075 })
7076 }
7077
7078 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
7088 let name = self.parse_identifier()?;
7089 self.expect_keyword_is(Keyword::ON)?;
7090 let table_name = self.parse_object_name(false)?;
7091
7092 let policy_type = if self.parse_keyword(Keyword::AS) {
7093 let keyword =
7094 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
7095 Some(match keyword {
7096 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
7097 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
7098 unexpected_keyword => return Err(ParserError::ParserError(
7099 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
7100 )),
7101 })
7102 } else {
7103 None
7104 };
7105
7106 let command = if self.parse_keyword(Keyword::FOR) {
7107 let keyword = self.expect_one_of_keywords(&[
7108 Keyword::ALL,
7109 Keyword::SELECT,
7110 Keyword::INSERT,
7111 Keyword::UPDATE,
7112 Keyword::DELETE,
7113 ])?;
7114 Some(match keyword {
7115 Keyword::ALL => CreatePolicyCommand::All,
7116 Keyword::SELECT => CreatePolicyCommand::Select,
7117 Keyword::INSERT => CreatePolicyCommand::Insert,
7118 Keyword::UPDATE => CreatePolicyCommand::Update,
7119 Keyword::DELETE => CreatePolicyCommand::Delete,
7120 unexpected_keyword => return Err(ParserError::ParserError(
7121 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7122 )),
7123 })
7124 } else {
7125 None
7126 };
7127
7128 let to = if self.parse_keyword(Keyword::TO) {
7129 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7130 } else {
7131 None
7132 };
7133
7134 let using = if self.parse_keyword(Keyword::USING) {
7135 self.expect_token(&Token::LParen)?;
7136 let expr = self.parse_expr()?;
7137 self.expect_token(&Token::RParen)?;
7138 Some(expr)
7139 } else {
7140 None
7141 };
7142
7143 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7144 self.expect_token(&Token::LParen)?;
7145 let expr = self.parse_expr()?;
7146 self.expect_token(&Token::RParen)?;
7147 Some(expr)
7148 } else {
7149 None
7150 };
7151
7152 Ok(CreatePolicy {
7153 name,
7154 table_name,
7155 policy_type,
7156 command,
7157 to,
7158 using,
7159 with_check,
7160 })
7161 }
7162
7163 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7173 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7174 let name = self.parse_identifier()?;
7175
7176 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7177 Some(self.parse_literal_string()?)
7178 } else {
7179 None
7180 };
7181
7182 let url = if self.parse_keyword(Keyword::URL) {
7183 Some(self.parse_literal_string()?)
7184 } else {
7185 None
7186 };
7187
7188 let comment = self.parse_optional_inline_comment()?;
7189
7190 let with_dcproperties =
7191 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7192 properties if !properties.is_empty() => Some(properties),
7193 _ => None,
7194 };
7195
7196 Ok(CreateConnector {
7197 name,
7198 if_not_exists,
7199 connector_type,
7200 url,
7201 comment,
7202 with_dcproperties,
7203 })
7204 }
7205
7206 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7212 let mut parts = vec![];
7213 loop {
7214 parts.push(ObjectNamePart::Identifier(Ident::new(
7215 self.next_token().to_string(),
7216 )));
7217 if !self.consume_token(&Token::Period) {
7218 break;
7219 }
7220 }
7221 Ok(ObjectName(parts))
7222 }
7223
7224 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7228 let name = self.parse_operator_name()?;
7229 self.expect_token(&Token::LParen)?;
7230
7231 let mut function: Option<ObjectName> = None;
7232 let mut is_procedure = false;
7233 let mut left_arg: Option<DataType> = None;
7234 let mut right_arg: Option<DataType> = None;
7235 let mut options: Vec<OperatorOption> = Vec::new();
7236
7237 loop {
7238 let keyword = self.expect_one_of_keywords(&[
7239 Keyword::FUNCTION,
7240 Keyword::PROCEDURE,
7241 Keyword::LEFTARG,
7242 Keyword::RIGHTARG,
7243 Keyword::COMMUTATOR,
7244 Keyword::NEGATOR,
7245 Keyword::RESTRICT,
7246 Keyword::JOIN,
7247 Keyword::HASHES,
7248 Keyword::MERGES,
7249 ])?;
7250
7251 match keyword {
7252 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7253 options.push(OperatorOption::Hashes);
7254 }
7255 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7256 options.push(OperatorOption::Merges);
7257 }
7258 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7259 self.expect_token(&Token::Eq)?;
7260 function = Some(self.parse_object_name(false)?);
7261 is_procedure = keyword == Keyword::PROCEDURE;
7262 }
7263 Keyword::LEFTARG if left_arg.is_none() => {
7264 self.expect_token(&Token::Eq)?;
7265 left_arg = Some(self.parse_data_type()?);
7266 }
7267 Keyword::RIGHTARG if right_arg.is_none() => {
7268 self.expect_token(&Token::Eq)?;
7269 right_arg = Some(self.parse_data_type()?);
7270 }
7271 Keyword::COMMUTATOR
7272 if !options
7273 .iter()
7274 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7275 {
7276 self.expect_token(&Token::Eq)?;
7277 if self.parse_keyword(Keyword::OPERATOR) {
7278 self.expect_token(&Token::LParen)?;
7279 let op = self.parse_operator_name()?;
7280 self.expect_token(&Token::RParen)?;
7281 options.push(OperatorOption::Commutator(op));
7282 } else {
7283 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7284 }
7285 }
7286 Keyword::NEGATOR
7287 if !options
7288 .iter()
7289 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7290 {
7291 self.expect_token(&Token::Eq)?;
7292 if self.parse_keyword(Keyword::OPERATOR) {
7293 self.expect_token(&Token::LParen)?;
7294 let op = self.parse_operator_name()?;
7295 self.expect_token(&Token::RParen)?;
7296 options.push(OperatorOption::Negator(op));
7297 } else {
7298 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7299 }
7300 }
7301 Keyword::RESTRICT
7302 if !options
7303 .iter()
7304 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7305 {
7306 self.expect_token(&Token::Eq)?;
7307 options.push(OperatorOption::Restrict(Some(
7308 self.parse_object_name(false)?,
7309 )));
7310 }
7311 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7312 self.expect_token(&Token::Eq)?;
7313 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7314 }
7315 _ => {
7316 return Err(ParserError::ParserError(format!(
7317 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7318 keyword
7319 )))
7320 }
7321 }
7322
7323 if !self.consume_token(&Token::Comma) {
7324 break;
7325 }
7326 }
7327
7328 self.expect_token(&Token::RParen)?;
7330
7331 let function = function.ok_or_else(|| {
7333 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7334 })?;
7335
7336 Ok(CreateOperator {
7337 name,
7338 function,
7339 is_procedure,
7340 left_arg,
7341 right_arg,
7342 options,
7343 })
7344 }
7345
7346 pub fn parse_create_aggregate(
7350 &mut self,
7351 or_replace: bool,
7352 ) -> Result<CreateAggregate, ParserError> {
7353 let name = self.parse_object_name(false)?;
7354
7355 self.expect_token(&Token::LParen)?;
7357 let args = if self.consume_token(&Token::Mul) {
7358 vec![]
7360 } else if self.consume_token(&Token::RParen) {
7361 self.prev_token();
7362 vec![]
7363 } else {
7364 self.parse_comma_separated(|p| p.parse_data_type())?
7365 };
7366 self.expect_token(&Token::RParen)?;
7367
7368 self.expect_token(&Token::LParen)?;
7370 let mut options: Vec<CreateAggregateOption> = Vec::new();
7371 loop {
7372 let token = self.next_token();
7373 match &token.token {
7374 Token::RParen => break,
7375 Token::Comma => continue,
7376 Token::Word(word) => {
7377 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7378 options.push(option);
7379 }
7380 other => {
7381 return Err(ParserError::ParserError(format!(
7382 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7383 )));
7384 }
7385 }
7386 }
7387
7388 Ok(CreateAggregate {
7389 or_replace,
7390 name,
7391 args,
7392 options,
7393 })
7394 }
7395
7396 fn parse_create_aggregate_option(
7397 &mut self,
7398 key: &str,
7399 ) -> Result<CreateAggregateOption, ParserError> {
7400 match key {
7401 "SFUNC" => {
7402 self.expect_token(&Token::Eq)?;
7403 Ok(CreateAggregateOption::Sfunc(self.parse_object_name(false)?))
7404 }
7405 "STYPE" => {
7406 self.expect_token(&Token::Eq)?;
7407 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7408 }
7409 "SSPACE" => {
7410 self.expect_token(&Token::Eq)?;
7411 let size = self.parse_literal_uint()?;
7412 Ok(CreateAggregateOption::Sspace(size))
7413 }
7414 "FINALFUNC" => {
7415 self.expect_token(&Token::Eq)?;
7416 Ok(CreateAggregateOption::Finalfunc(
7417 self.parse_object_name(false)?,
7418 ))
7419 }
7420 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7421 "FINALFUNC_MODIFY" => {
7422 self.expect_token(&Token::Eq)?;
7423 Ok(CreateAggregateOption::FinalfuncModify(
7424 self.parse_aggregate_modify_kind()?,
7425 ))
7426 }
7427 "COMBINEFUNC" => {
7428 self.expect_token(&Token::Eq)?;
7429 Ok(CreateAggregateOption::Combinefunc(
7430 self.parse_object_name(false)?,
7431 ))
7432 }
7433 "SERIALFUNC" => {
7434 self.expect_token(&Token::Eq)?;
7435 Ok(CreateAggregateOption::Serialfunc(
7436 self.parse_object_name(false)?,
7437 ))
7438 }
7439 "DESERIALFUNC" => {
7440 self.expect_token(&Token::Eq)?;
7441 Ok(CreateAggregateOption::Deserialfunc(
7442 self.parse_object_name(false)?,
7443 ))
7444 }
7445 "INITCOND" => {
7446 self.expect_token(&Token::Eq)?;
7447 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7448 }
7449 "MSFUNC" => {
7450 self.expect_token(&Token::Eq)?;
7451 Ok(CreateAggregateOption::Msfunc(
7452 self.parse_object_name(false)?,
7453 ))
7454 }
7455 "MINVFUNC" => {
7456 self.expect_token(&Token::Eq)?;
7457 Ok(CreateAggregateOption::Minvfunc(
7458 self.parse_object_name(false)?,
7459 ))
7460 }
7461 "MSTYPE" => {
7462 self.expect_token(&Token::Eq)?;
7463 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7464 }
7465 "MSSPACE" => {
7466 self.expect_token(&Token::Eq)?;
7467 let size = self.parse_literal_uint()?;
7468 Ok(CreateAggregateOption::Msspace(size))
7469 }
7470 "MFINALFUNC" => {
7471 self.expect_token(&Token::Eq)?;
7472 Ok(CreateAggregateOption::Mfinalfunc(
7473 self.parse_object_name(false)?,
7474 ))
7475 }
7476 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7477 "MFINALFUNC_MODIFY" => {
7478 self.expect_token(&Token::Eq)?;
7479 Ok(CreateAggregateOption::MfinalfuncModify(
7480 self.parse_aggregate_modify_kind()?,
7481 ))
7482 }
7483 "MINITCOND" => {
7484 self.expect_token(&Token::Eq)?;
7485 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7486 }
7487 "SORTOP" => {
7488 self.expect_token(&Token::Eq)?;
7489 Ok(CreateAggregateOption::Sortop(
7490 self.parse_object_name(false)?,
7491 ))
7492 }
7493 "PARALLEL" => {
7494 self.expect_token(&Token::Eq)?;
7495 let parallel = match self.expect_one_of_keywords(&[
7496 Keyword::SAFE,
7497 Keyword::RESTRICTED,
7498 Keyword::UNSAFE,
7499 ])? {
7500 Keyword::SAFE => FunctionParallel::Safe,
7501 Keyword::RESTRICTED => FunctionParallel::Restricted,
7502 Keyword::UNSAFE => FunctionParallel::Unsafe,
7503 other => {
7504 return Err(ParserError::ParserError(format!(
7505 "Internal parser error: unexpected keyword `{other}` for PARALLEL"
7506 )))
7507 }
7508 };
7509 Ok(CreateAggregateOption::Parallel(parallel))
7510 }
7511 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7512 other => Err(ParserError::ParserError(format!(
7513 "Unknown CREATE AGGREGATE option: {other}"
7514 ))),
7515 }
7516 }
7517
7518 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7519 let token = self.next_token();
7520 match &token.token {
7521 Token::Word(word) => match word.value.to_uppercase().as_str() {
7522 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7523 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7524 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7525 other => Err(ParserError::ParserError(format!(
7526 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7527 ))),
7528 },
7529 other => Err(ParserError::ParserError(format!(
7530 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7531 ))),
7532 }
7533 }
7534
7535 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7539 let name = self.parse_object_name(false)?;
7540 self.expect_keyword(Keyword::USING)?;
7541 let using = self.parse_identifier()?;
7542
7543 Ok(CreateOperatorFamily { name, using })
7544 }
7545
7546 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7550 let name = self.parse_object_name(false)?;
7551 let default = self.parse_keyword(Keyword::DEFAULT);
7552 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7553 let for_type = self.parse_data_type()?;
7554 self.expect_keyword(Keyword::USING)?;
7555 let using = self.parse_identifier()?;
7556
7557 let family = if self.parse_keyword(Keyword::FAMILY) {
7558 Some(self.parse_object_name(false)?)
7559 } else {
7560 None
7561 };
7562
7563 self.expect_keyword(Keyword::AS)?;
7564
7565 let mut items = vec![];
7566 loop {
7567 if self.parse_keyword(Keyword::OPERATOR) {
7568 let strategy_number = self.parse_literal_uint()?;
7569 let operator_name = self.parse_operator_name()?;
7570
7571 let op_types = if self.consume_token(&Token::LParen) {
7573 let left = self.parse_data_type()?;
7574 self.expect_token(&Token::Comma)?;
7575 let right = self.parse_data_type()?;
7576 self.expect_token(&Token::RParen)?;
7577 Some(OperatorArgTypes { left, right })
7578 } else {
7579 None
7580 };
7581
7582 let purpose = if self.parse_keyword(Keyword::FOR) {
7584 if self.parse_keyword(Keyword::SEARCH) {
7585 Some(OperatorPurpose::ForSearch)
7586 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7587 let sort_family = self.parse_object_name(false)?;
7588 Some(OperatorPurpose::ForOrderBy { sort_family })
7589 } else {
7590 return self
7591 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7592 }
7593 } else {
7594 None
7595 };
7596
7597 items.push(OperatorClassItem::Operator {
7598 strategy_number,
7599 operator_name,
7600 op_types,
7601 purpose,
7602 });
7603 } else if self.parse_keyword(Keyword::FUNCTION) {
7604 let support_number = self.parse_literal_uint()?;
7605
7606 let op_types = if self.consume_token(&Token::LParen)
7608 && self.peek_token_ref().token != Token::RParen
7609 {
7610 let mut types = vec![];
7611 loop {
7612 types.push(self.parse_data_type()?);
7613 if !self.consume_token(&Token::Comma) {
7614 break;
7615 }
7616 }
7617 self.expect_token(&Token::RParen)?;
7618 Some(types)
7619 } else if self.consume_token(&Token::LParen) {
7620 self.expect_token(&Token::RParen)?;
7621 Some(vec![])
7622 } else {
7623 None
7624 };
7625
7626 let function_name = self.parse_object_name(false)?;
7627
7628 let argument_types = if self.consume_token(&Token::LParen) {
7630 let mut types = vec![];
7631 loop {
7632 if self.peek_token_ref().token == Token::RParen {
7633 break;
7634 }
7635 types.push(self.parse_data_type()?);
7636 if !self.consume_token(&Token::Comma) {
7637 break;
7638 }
7639 }
7640 self.expect_token(&Token::RParen)?;
7641 types
7642 } else {
7643 vec![]
7644 };
7645
7646 items.push(OperatorClassItem::Function {
7647 support_number,
7648 op_types,
7649 function_name,
7650 argument_types,
7651 });
7652 } else if self.parse_keyword(Keyword::STORAGE) {
7653 let storage_type = self.parse_data_type()?;
7654 items.push(OperatorClassItem::Storage { storage_type });
7655 } else {
7656 break;
7657 }
7658
7659 if !self.consume_token(&Token::Comma) {
7661 break;
7662 }
7663 }
7664
7665 Ok(CreateOperatorClass {
7666 name,
7667 default,
7668 for_type,
7669 using,
7670 family,
7671 items,
7672 })
7673 }
7674
7675 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7677 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7679 && self.parse_keyword(Keyword::TEMPORARY);
7680 let persistent = dialect_of!(self is DuckDbDialect)
7681 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7682
7683 let object_type = if self.parse_keyword(Keyword::TABLE) {
7684 ObjectType::Table
7685 } else if self.parse_keyword(Keyword::COLLATION) {
7686 ObjectType::Collation
7687 } else if self.parse_keyword(Keyword::VIEW) {
7688 ObjectType::View
7689 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7690 ObjectType::MaterializedView
7691 } else if self.parse_keyword(Keyword::INDEX) {
7692 ObjectType::Index
7693 } else if self.parse_keyword(Keyword::ROLE) {
7694 ObjectType::Role
7695 } else if self.parse_keyword(Keyword::SCHEMA) {
7696 ObjectType::Schema
7697 } else if self.parse_keyword(Keyword::DATABASE) {
7698 ObjectType::Database
7699 } else if self.parse_keyword(Keyword::SEQUENCE) {
7700 ObjectType::Sequence
7701 } else if self.parse_keyword(Keyword::STAGE) {
7702 ObjectType::Stage
7703 } else if self.parse_keyword(Keyword::TYPE) {
7704 ObjectType::Type
7705 } else if self.parse_keyword(Keyword::USER) {
7706 ObjectType::User
7707 } else if self.parse_keyword(Keyword::STREAM) {
7708 ObjectType::Stream
7709 } else if self.parse_keyword(Keyword::FUNCTION) {
7710 return self.parse_drop_function().map(Into::into);
7711 } else if self.parse_keyword(Keyword::POLICY) {
7712 return self.parse_drop_policy().map(Into::into);
7713 } else if self.parse_keyword(Keyword::CONNECTOR) {
7714 return self.parse_drop_connector();
7715 } else if self.parse_keyword(Keyword::DOMAIN) {
7716 return self.parse_drop_domain().map(Into::into);
7717 } else if self.parse_keyword(Keyword::PROCEDURE) {
7718 return self.parse_drop_procedure();
7719 } else if self.parse_keyword(Keyword::SECRET) {
7720 return self.parse_drop_secret(temporary, persistent);
7721 } else if self.parse_keyword(Keyword::TRIGGER) {
7722 return self.parse_drop_trigger().map(Into::into);
7723 } else if self.parse_keyword(Keyword::EXTENSION) {
7724 return self.parse_drop_extension();
7725 } else if self.parse_keyword(Keyword::OPERATOR) {
7726 return if self.parse_keyword(Keyword::FAMILY) {
7728 self.parse_drop_operator_family()
7729 } else if self.parse_keyword(Keyword::CLASS) {
7730 self.parse_drop_operator_class()
7731 } else {
7732 self.parse_drop_operator()
7733 };
7734 } else {
7735 return self.expected_ref(
7736 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7737 self.peek_token_ref(),
7738 );
7739 };
7740 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7743 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7744
7745 let loc = self.peek_token_ref().span.start;
7746 let cascade = self.parse_keyword(Keyword::CASCADE);
7747 let restrict = self.parse_keyword(Keyword::RESTRICT);
7748 let purge = self.parse_keyword(Keyword::PURGE);
7749 if cascade && restrict {
7750 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7751 }
7752 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7753 return parser_err!(
7754 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7755 loc
7756 );
7757 }
7758 let table = if self.parse_keyword(Keyword::ON) {
7759 Some(self.parse_object_name(false)?)
7760 } else {
7761 None
7762 };
7763 Ok(Statement::Drop {
7764 object_type,
7765 if_exists,
7766 names,
7767 cascade,
7768 restrict,
7769 purge,
7770 temporary,
7771 table,
7772 })
7773 }
7774
7775 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7776 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7777 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7778 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7779 _ => None,
7780 }
7781 }
7782
7783 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7788 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7789 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7790 let drop_behavior = self.parse_optional_drop_behavior();
7791 Ok(DropFunction {
7792 if_exists,
7793 func_desc,
7794 drop_behavior,
7795 })
7796 }
7797
7798 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7804 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7805 let name = self.parse_identifier()?;
7806 self.expect_keyword_is(Keyword::ON)?;
7807 let table_name = self.parse_object_name(false)?;
7808 let drop_behavior = self.parse_optional_drop_behavior();
7809 Ok(DropPolicy {
7810 if_exists,
7811 name,
7812 table_name,
7813 drop_behavior,
7814 })
7815 }
7816 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7822 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7823 let name = self.parse_identifier()?;
7824 Ok(Statement::DropConnector { if_exists, name })
7825 }
7826
7827 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7831 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7832 let name = self.parse_object_name(false)?;
7833 let drop_behavior = self.parse_optional_drop_behavior();
7834 Ok(DropDomain {
7835 if_exists,
7836 name,
7837 drop_behavior,
7838 })
7839 }
7840
7841 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7846 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7847 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7848 let drop_behavior = self.parse_optional_drop_behavior();
7849 Ok(Statement::DropProcedure {
7850 if_exists,
7851 proc_desc,
7852 drop_behavior,
7853 })
7854 }
7855
7856 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7857 let name = self.parse_object_name(false)?;
7858
7859 let args = if self.consume_token(&Token::LParen) {
7860 if self.consume_token(&Token::RParen) {
7861 Some(vec![])
7862 } else {
7863 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7864 self.expect_token(&Token::RParen)?;
7865 Some(args)
7866 }
7867 } else {
7868 None
7869 };
7870
7871 Ok(FunctionDesc { name, args })
7872 }
7873
7874 fn parse_drop_secret(
7876 &mut self,
7877 temporary: bool,
7878 persistent: bool,
7879 ) -> Result<Statement, ParserError> {
7880 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7881 let name = self.parse_identifier()?;
7882 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7883 self.parse_identifier().ok()
7884 } else {
7885 None
7886 };
7887 let temp = match (temporary, persistent) {
7888 (true, false) => Some(true),
7889 (false, true) => Some(false),
7890 (false, false) => None,
7891 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7892 };
7893
7894 Ok(Statement::DropSecret {
7895 if_exists,
7896 temporary: temp,
7897 name,
7898 storage_specifier,
7899 })
7900 }
7901
7902 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7912 if dialect_of!(self is BigQueryDialect) {
7913 return self.parse_big_query_declare();
7914 }
7915 if dialect_of!(self is SnowflakeDialect) {
7916 return self.parse_snowflake_declare();
7917 }
7918 if dialect_of!(self is MsSqlDialect) {
7919 return self.parse_mssql_declare();
7920 }
7921
7922 let name = self.parse_identifier()?;
7923
7924 let binary = Some(self.parse_keyword(Keyword::BINARY));
7925 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7926 Some(true)
7927 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7928 Some(false)
7929 } else {
7930 None
7931 };
7932 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7933 Some(true)
7934 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7935 Some(false)
7936 } else {
7937 None
7938 };
7939
7940 self.expect_keyword_is(Keyword::CURSOR)?;
7941 let declare_type = Some(DeclareType::Cursor);
7942
7943 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7944 Some(keyword) => {
7945 self.expect_keyword_is(Keyword::HOLD)?;
7946
7947 match keyword {
7948 Keyword::WITH => Some(true),
7949 Keyword::WITHOUT => Some(false),
7950 unexpected_keyword => return Err(ParserError::ParserError(
7951 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7952 )),
7953 }
7954 }
7955 None => None,
7956 };
7957
7958 self.expect_keyword_is(Keyword::FOR)?;
7959
7960 let query = Some(self.parse_query()?);
7961
7962 Ok(Statement::Declare {
7963 stmts: vec![Declare {
7964 names: vec![name],
7965 data_type: None,
7966 assignment: None,
7967 declare_type,
7968 binary,
7969 sensitive,
7970 scroll,
7971 hold,
7972 for_query: query,
7973 }],
7974 })
7975 }
7976
7977 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7985 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7986
7987 let data_type = match &self.peek_token_ref().token {
7988 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7989 _ => Some(self.parse_data_type()?),
7990 };
7991
7992 let expr = if data_type.is_some() {
7993 if self.parse_keyword(Keyword::DEFAULT) {
7994 Some(self.parse_expr()?)
7995 } else {
7996 None
7997 }
7998 } else {
7999 self.expect_keyword_is(Keyword::DEFAULT)?;
8002 Some(self.parse_expr()?)
8003 };
8004
8005 Ok(Statement::Declare {
8006 stmts: vec![Declare {
8007 names,
8008 data_type,
8009 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
8010 declare_type: None,
8011 binary: None,
8012 sensitive: None,
8013 scroll: None,
8014 hold: None,
8015 for_query: None,
8016 }],
8017 })
8018 }
8019
8020 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
8045 let mut stmts = vec![];
8046 loop {
8047 let name = self.parse_identifier()?;
8048 let (declare_type, for_query, assigned_expr, data_type) =
8049 if self.parse_keyword(Keyword::CURSOR) {
8050 self.expect_keyword_is(Keyword::FOR)?;
8051 match &self.peek_token_ref().token {
8052 Token::Word(w) if w.keyword == Keyword::SELECT => (
8053 Some(DeclareType::Cursor),
8054 Some(self.parse_query()?),
8055 None,
8056 None,
8057 ),
8058 _ => (
8059 Some(DeclareType::Cursor),
8060 None,
8061 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
8062 None,
8063 ),
8064 }
8065 } else if self.parse_keyword(Keyword::RESULTSET) {
8066 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
8067 self.parse_snowflake_variable_declaration_expression()?
8068 } else {
8069 None
8071 };
8072
8073 (Some(DeclareType::ResultSet), None, assigned_expr, None)
8074 } else if self.parse_keyword(Keyword::EXCEPTION) {
8075 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
8076 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
8077 } else {
8078 None
8080 };
8081
8082 (Some(DeclareType::Exception), None, assigned_expr, None)
8083 } else {
8084 let (assigned_expr, data_type) = if let Some(assigned_expr) =
8086 self.parse_snowflake_variable_declaration_expression()?
8087 {
8088 (Some(assigned_expr), None)
8089 } else if let Token::Word(_) = &self.peek_token_ref().token {
8090 let data_type = self.parse_data_type()?;
8091 (
8092 self.parse_snowflake_variable_declaration_expression()?,
8093 Some(data_type),
8094 )
8095 } else {
8096 (None, None)
8097 };
8098 (None, None, assigned_expr, data_type)
8099 };
8100 let stmt = Declare {
8101 names: vec![name],
8102 data_type,
8103 assignment: assigned_expr,
8104 declare_type,
8105 binary: None,
8106 sensitive: None,
8107 scroll: None,
8108 hold: None,
8109 for_query,
8110 };
8111
8112 stmts.push(stmt);
8113 if self.consume_token(&Token::SemiColon) {
8114 match &self.peek_token_ref().token {
8115 Token::Word(w)
8116 if ALL_KEYWORDS
8117 .binary_search(&w.value.to_uppercase().as_str())
8118 .is_err() =>
8119 {
8120 continue;
8122 }
8123 _ => {
8124 self.prev_token();
8126 }
8127 }
8128 }
8129
8130 break;
8131 }
8132
8133 Ok(Statement::Declare { stmts })
8134 }
8135
8136 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8148 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8149
8150 Ok(Statement::Declare { stmts })
8151 }
8152
8153 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8164 let name = {
8165 let ident = self.parse_identifier()?;
8166 if !ident.value.starts_with('@')
8167 && !matches!(
8168 &self.peek_token_ref().token,
8169 Token::Word(w) if w.keyword == Keyword::CURSOR
8170 )
8171 {
8172 Err(ParserError::TokenizerError(
8173 "Invalid MsSql variable declaration.".to_string(),
8174 ))
8175 } else {
8176 Ok(ident)
8177 }
8178 }?;
8179
8180 let (declare_type, data_type) = match &self.peek_token_ref().token {
8181 Token::Word(w) => match w.keyword {
8182 Keyword::CURSOR => {
8183 self.next_token();
8184 (Some(DeclareType::Cursor), None)
8185 }
8186 Keyword::AS => {
8187 self.next_token();
8188 (None, Some(self.parse_data_type()?))
8189 }
8190 _ => (None, Some(self.parse_data_type()?)),
8191 },
8192 _ => (None, Some(self.parse_data_type()?)),
8193 };
8194
8195 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8196 self.next_token();
8197 let query = Some(self.parse_query()?);
8198 (query, None)
8199 } else {
8200 let assignment = self.parse_mssql_variable_declaration_expression()?;
8201 (None, assignment)
8202 };
8203
8204 Ok(Declare {
8205 names: vec![name],
8206 data_type,
8207 assignment,
8208 declare_type,
8209 binary: None,
8210 sensitive: None,
8211 scroll: None,
8212 hold: None,
8213 for_query,
8214 })
8215 }
8216
8217 pub fn parse_snowflake_variable_declaration_expression(
8225 &mut self,
8226 ) -> Result<Option<DeclareAssignment>, ParserError> {
8227 Ok(match &self.peek_token_ref().token {
8228 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8229 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8231 }
8232 Token::Assignment => {
8233 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8235 self.parse_expr()?,
8236 )))
8237 }
8238 _ => None,
8239 })
8240 }
8241
8242 pub fn parse_mssql_variable_declaration_expression(
8249 &mut self,
8250 ) -> Result<Option<DeclareAssignment>, ParserError> {
8251 Ok(match &self.peek_token_ref().token {
8252 Token::Eq => {
8253 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8255 self.parse_expr()?,
8256 )))
8257 }
8258 _ => None,
8259 })
8260 }
8261
8262 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8264 let direction = if self.parse_keyword(Keyword::NEXT) {
8265 FetchDirection::Next
8266 } else if self.parse_keyword(Keyword::PRIOR) {
8267 FetchDirection::Prior
8268 } else if self.parse_keyword(Keyword::FIRST) {
8269 FetchDirection::First
8270 } else if self.parse_keyword(Keyword::LAST) {
8271 FetchDirection::Last
8272 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8273 FetchDirection::Absolute {
8274 limit: self.parse_number_value()?,
8275 }
8276 } else if self.parse_keyword(Keyword::RELATIVE) {
8277 FetchDirection::Relative {
8278 limit: self.parse_number_value()?,
8279 }
8280 } else if self.parse_keyword(Keyword::FORWARD) {
8281 if self.parse_keyword(Keyword::ALL) {
8282 FetchDirection::ForwardAll
8283 } else {
8284 FetchDirection::Forward {
8285 limit: Some(self.parse_number_value()?),
8287 }
8288 }
8289 } else if self.parse_keyword(Keyword::BACKWARD) {
8290 if self.parse_keyword(Keyword::ALL) {
8291 FetchDirection::BackwardAll
8292 } else {
8293 FetchDirection::Backward {
8294 limit: Some(self.parse_number_value()?),
8296 }
8297 }
8298 } else if self.parse_keyword(Keyword::ALL) {
8299 FetchDirection::All
8300 } else {
8301 FetchDirection::Count {
8302 limit: self.parse_number_value()?,
8303 }
8304 };
8305
8306 let position = if self.peek_keyword(Keyword::FROM) {
8307 self.expect_keyword(Keyword::FROM)?;
8308 FetchPosition::From
8309 } else if self.peek_keyword(Keyword::IN) {
8310 self.expect_keyword(Keyword::IN)?;
8311 FetchPosition::In
8312 } else {
8313 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8314 };
8315
8316 let name = self.parse_identifier()?;
8317
8318 let into = if self.parse_keyword(Keyword::INTO) {
8319 Some(self.parse_object_name(false)?)
8320 } else {
8321 None
8322 };
8323
8324 Ok(Statement::Fetch {
8325 name,
8326 direction,
8327 position,
8328 into,
8329 })
8330 }
8331
8332 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8334 let object_type = if self.parse_keyword(Keyword::ALL) {
8335 DiscardObject::ALL
8336 } else if self.parse_keyword(Keyword::PLANS) {
8337 DiscardObject::PLANS
8338 } else if self.parse_keyword(Keyword::SEQUENCES) {
8339 DiscardObject::SEQUENCES
8340 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8341 DiscardObject::TEMP
8342 } else {
8343 return self.expected_ref(
8344 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8345 self.peek_token_ref(),
8346 );
8347 };
8348 Ok(Statement::Discard { object_type })
8349 }
8350
8351 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8353 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8354 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8355
8356 let mut using = None;
8357
8358 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8359 let index_name = self.parse_object_name(false)?;
8360 using = self.parse_optional_using_then_index_type()?;
8362 self.expect_keyword_is(Keyword::ON)?;
8363 Some(index_name)
8364 } else {
8365 None
8366 };
8367
8368 let table_name = self.parse_object_name(false)?;
8369
8370 using = self.parse_optional_using_then_index_type()?.or(using);
8373
8374 let columns = self.parse_parenthesized_index_column_list()?;
8375
8376 let include = if self.parse_keyword(Keyword::INCLUDE) {
8377 self.expect_token(&Token::LParen)?;
8378 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8379 self.expect_token(&Token::RParen)?;
8380 columns
8381 } else {
8382 vec![]
8383 };
8384
8385 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8386 let not = self.parse_keyword(Keyword::NOT);
8387 self.expect_keyword_is(Keyword::DISTINCT)?;
8388 Some(!not)
8389 } else {
8390 None
8391 };
8392
8393 let with = if self.dialect.supports_create_index_with_clause()
8394 && self.parse_keyword(Keyword::WITH)
8395 {
8396 self.expect_token(&Token::LParen)?;
8397 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8398 self.expect_token(&Token::RParen)?;
8399 with_params
8400 } else {
8401 Vec::new()
8402 };
8403
8404 let predicate = if self.parse_keyword(Keyword::WHERE) {
8405 Some(self.parse_expr()?)
8406 } else {
8407 None
8408 };
8409
8410 let index_options = self.parse_index_options()?;
8416
8417 let mut alter_options = Vec::new();
8419 while self
8420 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8421 .is_some()
8422 {
8423 alter_options.push(self.parse_alter_table_operation()?)
8424 }
8425
8426 Ok(CreateIndex {
8427 name: index_name,
8428 table_name,
8429 using,
8430 columns,
8431 unique,
8432 concurrently,
8433 if_not_exists,
8434 include,
8435 nulls_distinct,
8436 with,
8437 predicate,
8438 index_options,
8439 alter_options,
8440 })
8441 }
8442
8443 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8445 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8446 let name = self.parse_identifier()?;
8447
8448 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8449 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8450 Some(self.parse_identifier()?)
8451 } else {
8452 None
8453 };
8454
8455 let version = if self.parse_keyword(Keyword::VERSION) {
8456 Some(self.parse_identifier()?)
8457 } else {
8458 None
8459 };
8460
8461 let cascade = self.parse_keyword(Keyword::CASCADE);
8462
8463 (schema, version, cascade)
8464 } else {
8465 (None, None, false)
8466 };
8467
8468 Ok(CreateExtension {
8469 name,
8470 if_not_exists,
8471 schema,
8472 version,
8473 cascade,
8474 })
8475 }
8476
8477 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8479 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8480 let name = self.parse_object_name(false)?;
8481
8482 let definition = if self.parse_keyword(Keyword::FROM) {
8483 CreateCollationDefinition::From(self.parse_object_name(false)?)
8484 } else if self.consume_token(&Token::LParen) {
8485 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8486 self.expect_token(&Token::RParen)?;
8487 CreateCollationDefinition::Options(options)
8488 } else {
8489 return self.expected_ref(
8490 "FROM or parenthesized option list after CREATE COLLATION name",
8491 self.peek_token_ref(),
8492 );
8493 };
8494
8495 Ok(CreateCollation {
8496 if_not_exists,
8497 name,
8498 definition,
8499 })
8500 }
8501
8502 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8504 if self.parse_keyword(Keyword::CONFIGURATION) {
8505 let name = self.parse_object_name(false)?;
8506 self.expect_token(&Token::LParen)?;
8507 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8508 self.expect_token(&Token::RParen)?;
8509 Ok(Statement::CreateTextSearchConfiguration(
8510 CreateTextSearchConfiguration { name, options },
8511 ))
8512 } else if self.parse_keyword(Keyword::DICTIONARY) {
8513 let name = self.parse_object_name(false)?;
8514 self.expect_token(&Token::LParen)?;
8515 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8516 self.expect_token(&Token::RParen)?;
8517 Ok(Statement::CreateTextSearchDictionary(
8518 CreateTextSearchDictionary { name, options },
8519 ))
8520 } else if self.parse_keyword(Keyword::PARSER) {
8521 let name = self.parse_object_name(false)?;
8522 self.expect_token(&Token::LParen)?;
8523 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8524 self.expect_token(&Token::RParen)?;
8525 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8526 name,
8527 options,
8528 }))
8529 } else if self.parse_keyword(Keyword::TEMPLATE) {
8530 let name = self.parse_object_name(false)?;
8531 self.expect_token(&Token::LParen)?;
8532 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8533 self.expect_token(&Token::RParen)?;
8534 Ok(Statement::CreateTextSearchTemplate(
8535 CreateTextSearchTemplate { name, options },
8536 ))
8537 } else {
8538 self.expected_ref(
8539 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8540 self.peek_token_ref(),
8541 )
8542 }
8543 }
8544
8545 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8547 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8548 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8549 let cascade_or_restrict =
8550 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8551 Ok(Statement::DropExtension(DropExtension {
8552 names,
8553 if_exists,
8554 cascade_or_restrict: cascade_or_restrict
8555 .map(|k| match k {
8556 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8557 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8558 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8559 })
8560 .transpose()?,
8561 }))
8562 }
8563
8564 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8567 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8568 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8569 let drop_behavior = self.parse_optional_drop_behavior();
8570 Ok(Statement::DropOperator(DropOperator {
8571 if_exists,
8572 operators,
8573 drop_behavior,
8574 }))
8575 }
8576
8577 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8580 let name = self.parse_operator_name()?;
8581 self.expect_token(&Token::LParen)?;
8582
8583 let left_type = if self.parse_keyword(Keyword::NONE) {
8585 None
8586 } else {
8587 Some(self.parse_data_type()?)
8588 };
8589
8590 self.expect_token(&Token::Comma)?;
8591
8592 let right_type = self.parse_data_type()?;
8594
8595 self.expect_token(&Token::RParen)?;
8596
8597 Ok(DropOperatorSignature {
8598 name,
8599 left_type,
8600 right_type,
8601 })
8602 }
8603
8604 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8608 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8609 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8610 self.expect_keyword(Keyword::USING)?;
8611 let using = self.parse_identifier()?;
8612 let drop_behavior = self.parse_optional_drop_behavior();
8613 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8614 if_exists,
8615 names,
8616 using,
8617 drop_behavior,
8618 }))
8619 }
8620
8621 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8625 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8626 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8627 self.expect_keyword(Keyword::USING)?;
8628 let using = self.parse_identifier()?;
8629 let drop_behavior = self.parse_optional_drop_behavior();
8630 Ok(Statement::DropOperatorClass(DropOperatorClass {
8631 if_exists,
8632 names,
8633 using,
8634 drop_behavior,
8635 }))
8636 }
8637
8638 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8642 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8643 self.expect_token(&Token::LParen)?;
8644 let columns =
8645 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8646 self.expect_token(&Token::RParen)?;
8647 Ok(HiveDistributionStyle::PARTITIONED { columns })
8648 } else {
8649 Ok(HiveDistributionStyle::NONE)
8650 }
8651 }
8652
8653 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8657 let token = self.next_token();
8658 match &token.token {
8659 Token::Word(w) => match w.keyword {
8660 Keyword::AUTO => Ok(DistStyle::Auto),
8661 Keyword::EVEN => Ok(DistStyle::Even),
8662 Keyword::KEY => Ok(DistStyle::Key),
8663 Keyword::ALL => Ok(DistStyle::All),
8664 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8665 },
8666 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8667 }
8668 }
8669
8670 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8672 let mut hive_format: Option<HiveFormat> = None;
8673 loop {
8674 match self.parse_one_of_keywords(&[
8675 Keyword::ROW,
8676 Keyword::STORED,
8677 Keyword::LOCATION,
8678 Keyword::WITH,
8679 ]) {
8680 Some(Keyword::ROW) => {
8681 hive_format
8682 .get_or_insert_with(HiveFormat::default)
8683 .row_format = Some(self.parse_row_format()?);
8684 }
8685 Some(Keyword::STORED) => {
8686 self.expect_keyword_is(Keyword::AS)?;
8687 if self.parse_keyword(Keyword::INPUTFORMAT) {
8688 let input_format = self.parse_expr()?;
8689 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8690 let output_format = self.parse_expr()?;
8691 hive_format.get_or_insert_with(HiveFormat::default).storage =
8692 Some(HiveIOFormat::IOF {
8693 input_format,
8694 output_format,
8695 });
8696 } else {
8697 let format = self.parse_file_format()?;
8698 hive_format.get_or_insert_with(HiveFormat::default).storage =
8699 Some(HiveIOFormat::FileFormat { format });
8700 }
8701 }
8702 Some(Keyword::LOCATION) => {
8703 hive_format.get_or_insert_with(HiveFormat::default).location =
8704 Some(self.parse_literal_string()?);
8705 }
8706 Some(Keyword::WITH) => {
8707 self.prev_token();
8708 let properties = self
8709 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8710 if !properties.is_empty() {
8711 hive_format
8712 .get_or_insert_with(HiveFormat::default)
8713 .serde_properties = Some(properties);
8714 } else {
8715 break;
8716 }
8717 }
8718 None => break,
8719 _ => break,
8720 }
8721 }
8722
8723 Ok(hive_format)
8724 }
8725
8726 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8728 self.expect_keyword_is(Keyword::FORMAT)?;
8729 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8730 Some(Keyword::SERDE) => {
8731 let class = self.parse_literal_string()?;
8732 Ok(HiveRowFormat::SERDE { class })
8733 }
8734 _ => {
8735 let mut row_delimiters = vec![];
8736
8737 loop {
8738 match self.parse_one_of_keywords(&[
8739 Keyword::FIELDS,
8740 Keyword::COLLECTION,
8741 Keyword::MAP,
8742 Keyword::LINES,
8743 Keyword::NULL,
8744 ]) {
8745 Some(Keyword::FIELDS)
8746 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8747 {
8748 row_delimiters.push(HiveRowDelimiter {
8749 delimiter: HiveDelimiter::FieldsTerminatedBy,
8750 char: self.parse_identifier()?,
8751 });
8752
8753 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8754 row_delimiters.push(HiveRowDelimiter {
8755 delimiter: HiveDelimiter::FieldsEscapedBy,
8756 char: self.parse_identifier()?,
8757 });
8758 }
8759 }
8760 Some(Keyword::COLLECTION)
8761 if self.parse_keywords(&[
8762 Keyword::ITEMS,
8763 Keyword::TERMINATED,
8764 Keyword::BY,
8765 ]) =>
8766 {
8767 row_delimiters.push(HiveRowDelimiter {
8768 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8769 char: self.parse_identifier()?,
8770 });
8771 }
8772 Some(Keyword::MAP)
8773 if self.parse_keywords(&[
8774 Keyword::KEYS,
8775 Keyword::TERMINATED,
8776 Keyword::BY,
8777 ]) =>
8778 {
8779 row_delimiters.push(HiveRowDelimiter {
8780 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8781 char: self.parse_identifier()?,
8782 });
8783 }
8784 Some(Keyword::LINES)
8785 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8786 {
8787 row_delimiters.push(HiveRowDelimiter {
8788 delimiter: HiveDelimiter::LinesTerminatedBy,
8789 char: self.parse_identifier()?,
8790 });
8791 }
8792 Some(Keyword::NULL)
8793 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
8794 {
8795 row_delimiters.push(HiveRowDelimiter {
8796 delimiter: HiveDelimiter::NullDefinedAs,
8797 char: self.parse_identifier()?,
8798 });
8799 }
8800 _ => {
8801 break;
8802 }
8803 }
8804 }
8805
8806 Ok(HiveRowFormat::DELIMITED {
8807 delimiters: row_delimiters,
8808 })
8809 }
8810 }
8811 }
8812
8813 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8814 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8815 Ok(Some(self.parse_identifier()?))
8816 } else {
8817 Ok(None)
8818 }
8819 }
8820
8821 pub fn parse_create_table(
8823 &mut self,
8824 or_replace: bool,
8825 temporary: bool,
8826 global: Option<bool>,
8827 transient: bool,
8828 ) -> Result<CreateTable, ParserError> {
8829 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8830 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8831 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8832
8833 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8843 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8844 } else {
8845 None
8846 };
8847
8848 let on_cluster = self.parse_optional_on_cluster()?;
8850
8851 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8852
8853 let clone = if self.parse_keyword(Keyword::CLONE) {
8854 self.parse_object_name(allow_unquoted_hyphen).ok()
8855 } else {
8856 None
8857 };
8858
8859 let (columns, constraints) = self.parse_columns()?;
8861 let comment_after_column_def =
8862 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8863 let next_token = self.next_token();
8864 match next_token.token {
8865 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8866 _ => self.expected("comment", next_token)?,
8867 }
8868 } else {
8869 None
8870 };
8871
8872 let for_values = if partition_of.is_some() {
8874 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8875 Some(self.parse_partition_for_values()?)
8876 } else {
8877 return self.expected_ref(
8878 "FOR VALUES or DEFAULT after PARTITION OF",
8879 self.peek_token_ref(),
8880 );
8881 }
8882 } else {
8883 None
8884 };
8885
8886 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8888
8889 let hive_distribution = self.parse_hive_distribution()?;
8890 let clustered_by = self.parse_optional_clustered_by()?;
8891 let hive_formats = self.parse_hive_formats()?;
8892
8893 let create_table_config = self.parse_optional_create_table_config()?;
8894
8895 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8898 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8899 {
8900 Some(Box::new(self.parse_expr()?))
8901 } else {
8902 None
8903 };
8904
8905 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8906 if self.consume_token(&Token::LParen) {
8907 let columns = if self.peek_token_ref().token != Token::RParen {
8908 self.parse_comma_separated(|p| p.parse_expr())?
8909 } else {
8910 vec![]
8911 };
8912 self.expect_token(&Token::RParen)?;
8913 Some(OneOrManyWithParens::Many(columns))
8914 } else {
8915 Some(OneOrManyWithParens::One(self.parse_expr()?))
8916 }
8917 } else {
8918 None
8919 };
8920
8921 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8922 Some(self.parse_create_table_on_commit()?)
8923 } else {
8924 None
8925 };
8926
8927 let strict = self.parse_keyword(Keyword::STRICT);
8928
8929 let backup = if self.parse_keyword(Keyword::BACKUP) {
8931 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8932 Some(keyword == Keyword::YES)
8933 } else {
8934 None
8935 };
8936
8937 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8939 Some(self.parse_dist_style()?)
8940 } else {
8941 None
8942 };
8943 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8944 self.expect_token(&Token::LParen)?;
8945 let expr = self.parse_expr()?;
8946 self.expect_token(&Token::RParen)?;
8947 Some(expr)
8948 } else {
8949 None
8950 };
8951 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8952 self.expect_token(&Token::LParen)?;
8953 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8954 self.expect_token(&Token::RParen)?;
8955 Some(columns)
8956 } else {
8957 None
8958 };
8959
8960 let query = if self.parse_keyword(Keyword::AS) {
8962 Some(self.parse_query()?)
8963 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8964 {
8965 self.prev_token();
8967 Some(self.parse_query()?)
8968 } else {
8969 None
8970 };
8971
8972 Ok(CreateTableBuilder::new(table_name)
8973 .temporary(temporary)
8974 .columns(columns)
8975 .constraints(constraints)
8976 .or_replace(or_replace)
8977 .if_not_exists(if_not_exists)
8978 .transient(transient)
8979 .hive_distribution(hive_distribution)
8980 .hive_formats(hive_formats)
8981 .global(global)
8982 .query(query)
8983 .without_rowid(without_rowid)
8984 .like(like)
8985 .clone_clause(clone)
8986 .comment_after_column_def(comment_after_column_def)
8987 .order_by(order_by)
8988 .on_commit(on_commit)
8989 .on_cluster(on_cluster)
8990 .clustered_by(clustered_by)
8991 .partition_by(create_table_config.partition_by)
8992 .cluster_by(create_table_config.cluster_by)
8993 .inherits(create_table_config.inherits)
8994 .partition_of(partition_of)
8995 .for_values(for_values)
8996 .table_options(create_table_config.table_options)
8997 .primary_key(primary_key)
8998 .strict(strict)
8999 .backup(backup)
9000 .diststyle(diststyle)
9001 .distkey(distkey)
9002 .sortkey(sortkey)
9003 .build())
9004 }
9005
9006 fn maybe_parse_create_table_like(
9007 &mut self,
9008 allow_unquoted_hyphen: bool,
9009 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
9010 let like = if self.dialect.supports_create_table_like_parenthesized()
9011 && self.consume_token(&Token::LParen)
9012 {
9013 if self.parse_keyword(Keyword::LIKE) {
9014 let name = self.parse_object_name(allow_unquoted_hyphen)?;
9015 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
9016 Some(CreateTableLikeDefaults::Including)
9017 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
9018 Some(CreateTableLikeDefaults::Excluding)
9019 } else {
9020 None
9021 };
9022 self.expect_token(&Token::RParen)?;
9023 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
9024 name,
9025 defaults,
9026 }))
9027 } else {
9028 self.prev_token();
9030 None
9031 }
9032 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
9033 let name = self.parse_object_name(allow_unquoted_hyphen)?;
9034 Some(CreateTableLikeKind::Plain(CreateTableLike {
9035 name,
9036 defaults: None,
9037 }))
9038 } else {
9039 None
9040 };
9041 Ok(like)
9042 }
9043
9044 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
9045 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
9046 Ok(OnCommit::DeleteRows)
9047 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
9048 Ok(OnCommit::PreserveRows)
9049 } else if self.parse_keywords(&[Keyword::DROP]) {
9050 Ok(OnCommit::Drop)
9051 } else {
9052 parser_err!(
9053 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
9054 self.peek_token_ref()
9055 )
9056 }
9057 }
9058
9059 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
9065 if self.parse_keyword(Keyword::DEFAULT) {
9066 return Ok(ForValues::Default);
9067 }
9068
9069 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9070
9071 if self.parse_keyword(Keyword::IN) {
9072 self.expect_token(&Token::LParen)?;
9074 if self.peek_token_ref().token == Token::RParen {
9075 return self.expected_ref("at least one value", self.peek_token_ref());
9076 }
9077 let values = self.parse_comma_separated(Parser::parse_expr)?;
9078 self.expect_token(&Token::RParen)?;
9079 Ok(ForValues::In(values))
9080 } else if self.parse_keyword(Keyword::FROM) {
9081 self.expect_token(&Token::LParen)?;
9083 if self.peek_token_ref().token == Token::RParen {
9084 return self.expected_ref("at least one value", self.peek_token_ref());
9085 }
9086 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9087 self.expect_token(&Token::RParen)?;
9088 self.expect_keyword(Keyword::TO)?;
9089 self.expect_token(&Token::LParen)?;
9090 if self.peek_token_ref().token == Token::RParen {
9091 return self.expected_ref("at least one value", self.peek_token_ref());
9092 }
9093 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9094 self.expect_token(&Token::RParen)?;
9095 Ok(ForValues::From { from, to })
9096 } else if self.parse_keyword(Keyword::WITH) {
9097 self.expect_token(&Token::LParen)?;
9099 self.expect_keyword(Keyword::MODULUS)?;
9100 let modulus = self.parse_literal_uint()?;
9101 self.expect_token(&Token::Comma)?;
9102 self.expect_keyword(Keyword::REMAINDER)?;
9103 let remainder = self.parse_literal_uint()?;
9104 self.expect_token(&Token::RParen)?;
9105 Ok(ForValues::With { modulus, remainder })
9106 } else {
9107 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9108 }
9109 }
9110
9111 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9113 if self.parse_keyword(Keyword::MINVALUE) {
9114 Ok(PartitionBoundValue::MinValue)
9115 } else if self.parse_keyword(Keyword::MAXVALUE) {
9116 Ok(PartitionBoundValue::MaxValue)
9117 } else {
9118 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9119 }
9120 }
9121
9122 fn parse_optional_create_table_config(
9128 &mut self,
9129 ) -> Result<CreateTableConfiguration, ParserError> {
9130 let mut table_options = CreateTableOptions::None;
9131
9132 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9133 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9134 } else {
9135 None
9136 };
9137
9138 let with_options = self.parse_options(Keyword::WITH)?;
9140 if !with_options.is_empty() {
9141 table_options = CreateTableOptions::With(with_options)
9142 }
9143
9144 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9145 if !table_properties.is_empty() {
9146 table_options = CreateTableOptions::TableProperties(table_properties);
9147 }
9148 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9149 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9150 {
9151 Some(Box::new(self.parse_expr()?))
9152 } else {
9153 None
9154 };
9155
9156 let mut cluster_by = None;
9157 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9158 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9159 cluster_by = Some(WrappedCollection::NoWrapping(
9160 self.parse_comma_separated(|p| p.parse_expr())?,
9161 ));
9162 };
9163
9164 if let Token::Word(word) = &self.peek_token_ref().token {
9165 if word.keyword == Keyword::OPTIONS {
9166 table_options =
9167 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9168 }
9169 };
9170 }
9171
9172 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9173 let plain_options = self.parse_plain_options()?;
9174 if !plain_options.is_empty() {
9175 table_options = CreateTableOptions::Plain(plain_options)
9176 }
9177 };
9178
9179 Ok(CreateTableConfiguration {
9180 partition_by,
9181 cluster_by,
9182 inherits,
9183 table_options,
9184 })
9185 }
9186
9187 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9188 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9191 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9192 }
9193
9194 if self.parse_keywords(&[Keyword::COMMENT]) {
9197 let has_eq = self.consume_token(&Token::Eq);
9198 let value = self.next_token();
9199
9200 let comment = match (has_eq, value.token) {
9201 (true, Token::SingleQuotedString(s)) => {
9202 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9203 }
9204 (false, Token::SingleQuotedString(s)) => {
9205 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9206 }
9207 (_, token) => {
9208 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9209 }
9210 };
9211 return comment;
9212 }
9213
9214 if self.parse_keywords(&[Keyword::ENGINE]) {
9217 let _ = self.consume_token(&Token::Eq);
9218 let value = self.next_token();
9219
9220 let engine = match value.token {
9221 Token::Word(w) => {
9222 let parameters = if self.peek_token_ref().token == Token::LParen {
9223 self.parse_parenthesized_identifiers()?
9224 } else {
9225 vec![]
9226 };
9227
9228 Ok(Some(SqlOption::NamedParenthesizedList(
9229 NamedParenthesizedList {
9230 key: Ident::new("ENGINE"),
9231 name: Some(Ident::new(w.value)),
9232 values: parameters,
9233 },
9234 )))
9235 }
9236 _ => {
9237 return self.expected("Token::Word", value)?;
9238 }
9239 };
9240
9241 return engine;
9242 }
9243
9244 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9246 let _ = self.consume_token(&Token::Eq);
9247 let value = self.next_token();
9248
9249 let tablespace = match value.token {
9250 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9251 let storage = match self.parse_keyword(Keyword::STORAGE) {
9252 true => {
9253 let _ = self.consume_token(&Token::Eq);
9254 let storage_token = self.next_token();
9255 match &storage_token.token {
9256 Token::Word(w) => match w.value.to_uppercase().as_str() {
9257 "DISK" => Some(StorageType::Disk),
9258 "MEMORY" => Some(StorageType::Memory),
9259 _ => self
9260 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9261 },
9262 _ => self.expected("Token::Word", storage_token)?,
9263 }
9264 }
9265 false => None,
9266 };
9267
9268 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9269 name,
9270 storage,
9271 })))
9272 }
9273 _ => {
9274 return self.expected("Token::Word", value)?;
9275 }
9276 };
9277
9278 return tablespace;
9279 }
9280
9281 if self.parse_keyword(Keyword::UNION) {
9283 let _ = self.consume_token(&Token::Eq);
9284 let value = self.next_token();
9285
9286 match value.token {
9287 Token::LParen => {
9288 let tables: Vec<Ident> =
9289 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9290 self.expect_token(&Token::RParen)?;
9291
9292 return Ok(Some(SqlOption::NamedParenthesizedList(
9293 NamedParenthesizedList {
9294 key: Ident::new("UNION"),
9295 name: None,
9296 values: tables,
9297 },
9298 )));
9299 }
9300 _ => {
9301 return self.expected("Token::LParen", value)?;
9302 }
9303 }
9304 }
9305
9306 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9308 Ident::new("DEFAULT CHARSET")
9309 } else if self.parse_keyword(Keyword::CHARSET) {
9310 Ident::new("CHARSET")
9311 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9312 Ident::new("DEFAULT CHARACTER SET")
9313 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9314 Ident::new("CHARACTER SET")
9315 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9316 Ident::new("DEFAULT COLLATE")
9317 } else if self.parse_keyword(Keyword::COLLATE) {
9318 Ident::new("COLLATE")
9319 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9320 Ident::new("DATA DIRECTORY")
9321 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9322 Ident::new("INDEX DIRECTORY")
9323 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9324 Ident::new("KEY_BLOCK_SIZE")
9325 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9326 Ident::new("ROW_FORMAT")
9327 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9328 Ident::new("PACK_KEYS")
9329 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9330 Ident::new("STATS_AUTO_RECALC")
9331 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9332 Ident::new("STATS_PERSISTENT")
9333 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9334 Ident::new("STATS_SAMPLE_PAGES")
9335 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9336 Ident::new("DELAY_KEY_WRITE")
9337 } else if self.parse_keyword(Keyword::COMPRESSION) {
9338 Ident::new("COMPRESSION")
9339 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9340 Ident::new("ENCRYPTION")
9341 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9342 Ident::new("MAX_ROWS")
9343 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9344 Ident::new("MIN_ROWS")
9345 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9346 Ident::new("AUTOEXTEND_SIZE")
9347 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9348 Ident::new("AVG_ROW_LENGTH")
9349 } else if self.parse_keyword(Keyword::CHECKSUM) {
9350 Ident::new("CHECKSUM")
9351 } else if self.parse_keyword(Keyword::CONNECTION) {
9352 Ident::new("CONNECTION")
9353 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9354 Ident::new("ENGINE_ATTRIBUTE")
9355 } else if self.parse_keyword(Keyword::PASSWORD) {
9356 Ident::new("PASSWORD")
9357 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9358 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9359 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9360 Ident::new("INSERT_METHOD")
9361 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9362 Ident::new("AUTO_INCREMENT")
9363 } else {
9364 return Ok(None);
9365 };
9366
9367 let _ = self.consume_token(&Token::Eq);
9368
9369 let value = match self
9370 .maybe_parse(|parser| parser.parse_value())?
9371 .map(Expr::Value)
9372 {
9373 Some(expr) => expr,
9374 None => Expr::Identifier(self.parse_identifier()?),
9375 };
9376
9377 Ok(Some(SqlOption::KeyValue { key, value }))
9378 }
9379
9380 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9382 let mut options = Vec::new();
9383
9384 while let Some(option) = self.parse_plain_option()? {
9385 options.push(option);
9386 let _ = self.consume_token(&Token::Comma);
9389 }
9390
9391 Ok(options)
9392 }
9393
9394 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9396 let comment = if self.parse_keyword(Keyword::COMMENT) {
9397 let has_eq = self.consume_token(&Token::Eq);
9398 let comment = self.parse_comment_value()?;
9399 Some(if has_eq {
9400 CommentDef::WithEq(comment)
9401 } else {
9402 CommentDef::WithoutEq(comment)
9403 })
9404 } else {
9405 None
9406 };
9407 Ok(comment)
9408 }
9409
9410 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9412 let next_token = self.next_token();
9413 let value = match next_token.token {
9414 Token::SingleQuotedString(str) => str,
9415 Token::DollarQuotedString(str) => str.value,
9416 _ => self.expected("string literal", next_token)?,
9417 };
9418 Ok(value)
9419 }
9420
9421 pub fn parse_optional_procedure_parameters(
9423 &mut self,
9424 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9425 let mut params = vec![];
9426 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9427 return Ok(Some(params));
9428 }
9429 loop {
9430 if let Token::Word(_) = &self.peek_token_ref().token {
9431 params.push(self.parse_procedure_param()?)
9432 }
9433 let comma = self.consume_token(&Token::Comma);
9434 if self.consume_token(&Token::RParen) {
9435 break;
9437 } else if !comma {
9438 return self.expected_ref(
9439 "',' or ')' after parameter definition",
9440 self.peek_token_ref(),
9441 );
9442 }
9443 }
9444 Ok(Some(params))
9445 }
9446
9447 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9449 let mut columns = vec![];
9450 let mut constraints = vec![];
9451 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9452 return Ok((columns, constraints));
9453 }
9454
9455 loop {
9456 if let Some(constraint) = self.parse_optional_table_constraint()? {
9457 constraints.push(constraint);
9458 } else if let Token::Word(_) = &self.peek_token_ref().token {
9459 columns.push(self.parse_column_def()?);
9460 } else {
9461 return self.expected_ref(
9462 "column name or constraint definition",
9463 self.peek_token_ref(),
9464 );
9465 }
9466
9467 let comma = self.consume_token(&Token::Comma);
9468 let rparen = self.peek_token_ref().token == Token::RParen;
9469
9470 if !comma && !rparen {
9471 return self
9472 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9473 };
9474
9475 if rparen
9476 && (!comma
9477 || self.dialect.supports_column_definition_trailing_commas()
9478 || self.options.trailing_commas)
9479 {
9480 let _ = self.consume_token(&Token::RParen);
9481 break;
9482 }
9483 }
9484
9485 Ok((columns, constraints))
9486 }
9487
9488 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9490 let mode = if self.parse_keyword(Keyword::IN) {
9491 Some(ArgMode::In)
9492 } else if self.parse_keyword(Keyword::OUT) {
9493 Some(ArgMode::Out)
9494 } else if self.parse_keyword(Keyword::INOUT) {
9495 Some(ArgMode::InOut)
9496 } else {
9497 None
9498 };
9499 let name = self.parse_identifier()?;
9500 let data_type = self.parse_data_type()?;
9501 let default = if self.consume_token(&Token::Eq) {
9502 Some(self.parse_expr()?)
9503 } else {
9504 None
9505 };
9506
9507 Ok(ProcedureParam {
9508 name,
9509 data_type,
9510 mode,
9511 default,
9512 })
9513 }
9514
9515 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9517 self.parse_column_def_inner(false)
9518 }
9519
9520 fn parse_column_def_inner(
9521 &mut self,
9522 optional_data_type: bool,
9523 ) -> Result<ColumnDef, ParserError> {
9524 let col_name = self.parse_identifier()?;
9525 let data_type = if self.is_column_type_sqlite_unspecified() {
9526 DataType::Unspecified
9527 } else if optional_data_type {
9528 self.maybe_parse(|parser| parser.parse_data_type())?
9529 .unwrap_or(DataType::Unspecified)
9530 } else {
9531 self.parse_data_type()?
9532 };
9533 let mut options = vec![];
9534 loop {
9535 if self.parse_keyword(Keyword::CONSTRAINT) {
9536 let name = Some(self.parse_identifier()?);
9537 if let Some(option) = self.parse_optional_column_option()? {
9538 options.push(ColumnOptionDef { name, option });
9539 } else {
9540 return self.expected_ref(
9541 "constraint details after CONSTRAINT <name>",
9542 self.peek_token_ref(),
9543 );
9544 }
9545 } else if let Some(option) = self.parse_optional_column_option()? {
9546 options.push(ColumnOptionDef { name: None, option });
9547 } else {
9548 break;
9549 };
9550 }
9551 Ok(ColumnDef {
9552 name: col_name,
9553 data_type,
9554 options,
9555 })
9556 }
9557
9558 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9559 if dialect_of!(self is SQLiteDialect) {
9560 match &self.peek_token_ref().token {
9561 Token::Word(word) => matches!(
9562 word.keyword,
9563 Keyword::CONSTRAINT
9564 | Keyword::PRIMARY
9565 | Keyword::NOT
9566 | Keyword::UNIQUE
9567 | Keyword::CHECK
9568 | Keyword::DEFAULT
9569 | Keyword::COLLATE
9570 | Keyword::REFERENCES
9571 | Keyword::GENERATED
9572 | Keyword::AS
9573 ),
9574 _ => true, }
9576 } else {
9577 false
9578 }
9579 }
9580
9581 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9583 if let Some(option) = self.dialect.parse_column_option(self)? {
9584 return option;
9585 }
9586
9587 self.with_state(
9588 ColumnDefinition,
9589 |parser| -> Result<Option<ColumnOption>, ParserError> {
9590 parser.parse_optional_column_option_inner()
9591 },
9592 )
9593 }
9594
9595 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9596 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9597 Ok(Some(ColumnOption::CharacterSet(
9598 self.parse_object_name(false)?,
9599 )))
9600 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9601 Ok(Some(ColumnOption::Collation(
9602 self.parse_object_name(false)?,
9603 )))
9604 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9605 Ok(Some(ColumnOption::NotNull))
9606 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9607 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9608 } else if self.parse_keyword(Keyword::NULL) {
9609 Ok(Some(ColumnOption::Null))
9610 } else if self.parse_keyword(Keyword::DEFAULT) {
9611 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9612 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9613 && self.parse_keyword(Keyword::MATERIALIZED)
9614 {
9615 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9616 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9617 && self.parse_keyword(Keyword::ALIAS)
9618 {
9619 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9620 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9621 && self.parse_keyword(Keyword::EPHEMERAL)
9622 {
9623 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9626 Ok(Some(ColumnOption::Ephemeral(None)))
9627 } else {
9628 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9629 }
9630 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9631 let characteristics = self.parse_constraint_characteristics()?;
9632 Ok(Some(
9633 PrimaryKeyConstraint {
9634 name: None,
9635 index_name: None,
9636 index_type: None,
9637 columns: vec![],
9638 index_options: vec![],
9639 characteristics,
9640 }
9641 .into(),
9642 ))
9643 } else if self.parse_keyword(Keyword::UNIQUE) {
9644 let index_type_display =
9645 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9646 KeyOrIndexDisplay::Key
9647 } else {
9648 KeyOrIndexDisplay::None
9649 };
9650 let characteristics = self.parse_constraint_characteristics()?;
9651 Ok(Some(
9652 UniqueConstraint {
9653 name: None,
9654 index_name: None,
9655 index_type_display,
9656 index_type: None,
9657 columns: vec![],
9658 index_options: vec![],
9659 characteristics,
9660 nulls_distinct: NullsDistinctOption::None,
9661 }
9662 .into(),
9663 ))
9664 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9665 let characteristics = self.parse_constraint_characteristics()?;
9668 Ok(Some(
9669 PrimaryKeyConstraint {
9670 name: None,
9671 index_name: None,
9672 index_type: None,
9673 columns: vec![],
9674 index_options: vec![],
9675 characteristics,
9676 }
9677 .into(),
9678 ))
9679 } else if self.parse_keyword(Keyword::REFERENCES) {
9680 let foreign_table = self.parse_object_name(false)?;
9681 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9684 let mut match_kind = None;
9685 let mut on_delete = None;
9686 let mut on_update = None;
9687 loop {
9688 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9689 match_kind = Some(self.parse_match_kind()?);
9690 } else if on_delete.is_none()
9691 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9692 {
9693 on_delete = Some(self.parse_referential_action()?);
9694 } else if on_update.is_none()
9695 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9696 {
9697 on_update = Some(self.parse_referential_action()?);
9698 } else {
9699 break;
9700 }
9701 }
9702 let characteristics = self.parse_constraint_characteristics()?;
9703
9704 Ok(Some(
9705 ForeignKeyConstraint {
9706 name: None, index_name: None, columns: vec![], foreign_table,
9710 referred_columns,
9711 on_delete,
9712 on_update,
9713 match_kind,
9714 characteristics,
9715 }
9716 .into(),
9717 ))
9718 } else if self.parse_keyword(Keyword::CHECK) {
9719 self.expect_token(&Token::LParen)?;
9720 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9722 self.expect_token(&Token::RParen)?;
9723
9724 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9725 Some(true)
9726 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9727 Some(false)
9728 } else {
9729 None
9730 };
9731
9732 Ok(Some(
9733 CheckConstraint {
9734 name: None, expr: Box::new(expr),
9736 enforced,
9737 }
9738 .into(),
9739 ))
9740 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9741 && dialect_of!(self is MySqlDialect | GenericDialect)
9742 {
9743 Ok(Some(ColumnOption::DialectSpecific(vec![
9745 Token::make_keyword("AUTO_INCREMENT"),
9746 ])))
9747 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9748 && dialect_of!(self is SQLiteDialect | GenericDialect)
9749 {
9750 Ok(Some(ColumnOption::DialectSpecific(vec![
9752 Token::make_keyword("AUTOINCREMENT"),
9753 ])))
9754 } else if self.parse_keyword(Keyword::ASC)
9755 && self.dialect.supports_asc_desc_in_column_definition()
9756 {
9757 Ok(Some(ColumnOption::DialectSpecific(vec![
9759 Token::make_keyword("ASC"),
9760 ])))
9761 } else if self.parse_keyword(Keyword::DESC)
9762 && self.dialect.supports_asc_desc_in_column_definition()
9763 {
9764 Ok(Some(ColumnOption::DialectSpecific(vec![
9766 Token::make_keyword("DESC"),
9767 ])))
9768 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9769 && dialect_of!(self is MySqlDialect | GenericDialect)
9770 {
9771 let expr = self.parse_expr()?;
9772 Ok(Some(ColumnOption::OnUpdate(expr)))
9773 } else if self.parse_keyword(Keyword::GENERATED) {
9774 self.parse_optional_column_option_generated()
9775 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9776 && self.parse_keyword(Keyword::OPTIONS)
9777 {
9778 self.prev_token();
9779 Ok(Some(ColumnOption::Options(
9780 self.parse_options(Keyword::OPTIONS)?,
9781 )))
9782 } else if self.parse_keyword(Keyword::AS)
9783 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9784 {
9785 self.parse_optional_column_option_as()
9786 } else if self.parse_keyword(Keyword::SRID)
9787 && dialect_of!(self is MySqlDialect | GenericDialect)
9788 {
9789 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9790 } else if self.parse_keyword(Keyword::IDENTITY)
9791 && dialect_of!(self is MsSqlDialect | GenericDialect)
9792 {
9793 let parameters = if self.consume_token(&Token::LParen) {
9794 let seed = self.parse_number()?;
9795 self.expect_token(&Token::Comma)?;
9796 let increment = self.parse_number()?;
9797 self.expect_token(&Token::RParen)?;
9798
9799 Some(IdentityPropertyFormatKind::FunctionCall(
9800 IdentityParameters { seed, increment },
9801 ))
9802 } else {
9803 None
9804 };
9805 Ok(Some(ColumnOption::Identity(
9806 IdentityPropertyKind::Identity(IdentityProperty {
9807 parameters,
9808 order: None,
9809 }),
9810 )))
9811 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9812 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9813 {
9814 Ok(Some(ColumnOption::OnConflict(
9816 self.expect_one_of_keywords(&[
9817 Keyword::ROLLBACK,
9818 Keyword::ABORT,
9819 Keyword::FAIL,
9820 Keyword::IGNORE,
9821 Keyword::REPLACE,
9822 ])?,
9823 )))
9824 } else if self.parse_keyword(Keyword::INVISIBLE) {
9825 Ok(Some(ColumnOption::Invisible))
9826 } else {
9827 Ok(None)
9828 }
9829 }
9830
9831 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9832 let name = self.parse_object_name(false)?;
9833 self.expect_token(&Token::Eq)?;
9834 let value = self.parse_literal_string()?;
9835
9836 Ok(Tag::new(name, value))
9837 }
9838
9839 fn parse_optional_column_option_generated(
9840 &mut self,
9841 ) -> Result<Option<ColumnOption>, ParserError> {
9842 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9843 let mut sequence_options = vec![];
9844 if self.expect_token(&Token::LParen).is_ok() {
9845 sequence_options = self.parse_create_sequence_options()?;
9846 self.expect_token(&Token::RParen)?;
9847 }
9848 Ok(Some(ColumnOption::Generated {
9849 generated_as: GeneratedAs::Always,
9850 sequence_options: Some(sequence_options),
9851 generation_expr: None,
9852 generation_expr_mode: None,
9853 generated_keyword: true,
9854 }))
9855 } else if self.parse_keywords(&[
9856 Keyword::BY,
9857 Keyword::DEFAULT,
9858 Keyword::AS,
9859 Keyword::IDENTITY,
9860 ]) {
9861 let mut sequence_options = vec![];
9862 if self.expect_token(&Token::LParen).is_ok() {
9863 sequence_options = self.parse_create_sequence_options()?;
9864 self.expect_token(&Token::RParen)?;
9865 }
9866 Ok(Some(ColumnOption::Generated {
9867 generated_as: GeneratedAs::ByDefault,
9868 sequence_options: Some(sequence_options),
9869 generation_expr: None,
9870 generation_expr_mode: None,
9871 generated_keyword: true,
9872 }))
9873 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9874 if self.expect_token(&Token::LParen).is_ok() {
9875 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9876 self.expect_token(&Token::RParen)?;
9877 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9878 Ok((
9879 GeneratedAs::ExpStored,
9880 Some(GeneratedExpressionMode::Stored),
9881 ))
9882 } else if dialect_of!(self is PostgreSqlDialect) {
9883 self.expected_ref("STORED", self.peek_token_ref())
9885 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9886 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9887 } else {
9888 Ok((GeneratedAs::Always, None))
9889 }?;
9890
9891 Ok(Some(ColumnOption::Generated {
9892 generated_as: gen_as,
9893 sequence_options: None,
9894 generation_expr: Some(expr),
9895 generation_expr_mode: expr_mode,
9896 generated_keyword: true,
9897 }))
9898 } else {
9899 Ok(None)
9900 }
9901 } else {
9902 Ok(None)
9903 }
9904 }
9905
9906 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9907 self.expect_token(&Token::LParen)?;
9909 let expr = self.parse_expr()?;
9910 self.expect_token(&Token::RParen)?;
9911
9912 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9913 (
9914 GeneratedAs::ExpStored,
9915 Some(GeneratedExpressionMode::Stored),
9916 )
9917 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9918 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9919 } else {
9920 (GeneratedAs::Always, None)
9921 };
9922
9923 Ok(Some(ColumnOption::Generated {
9924 generated_as: gen_as,
9925 sequence_options: None,
9926 generation_expr: Some(expr),
9927 generation_expr_mode: expr_mode,
9928 generated_keyword: false,
9929 }))
9930 }
9931
9932 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9934 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9935 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9936 {
9937 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9938
9939 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9940 self.expect_token(&Token::LParen)?;
9941 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9942 self.expect_token(&Token::RParen)?;
9943 Some(sorted_by_columns)
9944 } else {
9945 None
9946 };
9947
9948 self.expect_keyword_is(Keyword::INTO)?;
9949 let num_buckets = self.parse_number_value()?.value;
9950 self.expect_keyword_is(Keyword::BUCKETS)?;
9951 Some(ClusteredBy {
9952 columns,
9953 sorted_by,
9954 num_buckets,
9955 })
9956 } else {
9957 None
9958 };
9959 Ok(clustered_by)
9960 }
9961
9962 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9966 if self.parse_keyword(Keyword::RESTRICT) {
9967 Ok(ReferentialAction::Restrict)
9968 } else if self.parse_keyword(Keyword::CASCADE) {
9969 Ok(ReferentialAction::Cascade)
9970 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9971 Ok(ReferentialAction::SetNull)
9972 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9973 Ok(ReferentialAction::NoAction)
9974 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9975 Ok(ReferentialAction::SetDefault)
9976 } else {
9977 self.expected_ref(
9978 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9979 self.peek_token_ref(),
9980 )
9981 }
9982 }
9983
9984 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9986 if self.parse_keyword(Keyword::FULL) {
9987 Ok(ConstraintReferenceMatchKind::Full)
9988 } else if self.parse_keyword(Keyword::PARTIAL) {
9989 Ok(ConstraintReferenceMatchKind::Partial)
9990 } else if self.parse_keyword(Keyword::SIMPLE) {
9991 Ok(ConstraintReferenceMatchKind::Simple)
9992 } else {
9993 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9994 }
9995 }
9996
9997 fn parse_constraint_using_index(
10000 &mut self,
10001 name: Option<Ident>,
10002 ) -> Result<ConstraintUsingIndex, ParserError> {
10003 let index_name = self.parse_identifier()?;
10004 let characteristics = self.parse_constraint_characteristics()?;
10005 Ok(ConstraintUsingIndex {
10006 name,
10007 index_name,
10008 characteristics,
10009 })
10010 }
10011
10012 pub fn parse_constraint_characteristics(
10014 &mut self,
10015 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
10016 let mut cc = ConstraintCharacteristics::default();
10017
10018 loop {
10019 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
10020 {
10021 cc.deferrable = Some(false);
10022 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
10023 cc.deferrable = Some(true);
10024 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
10025 if self.parse_keyword(Keyword::DEFERRED) {
10026 cc.initially = Some(DeferrableInitial::Deferred);
10027 } else if self.parse_keyword(Keyword::IMMEDIATE) {
10028 cc.initially = Some(DeferrableInitial::Immediate);
10029 } else {
10030 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
10031 }
10032 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
10033 cc.enforced = Some(true);
10034 } else if cc.enforced.is_none()
10035 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
10036 {
10037 cc.enforced = Some(false);
10038 } else {
10039 break;
10040 }
10041 }
10042
10043 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
10044 Ok(Some(cc))
10045 } else {
10046 Ok(None)
10047 }
10048 }
10049
10050 pub fn parse_optional_table_constraint(
10052 &mut self,
10053 ) -> Result<Option<TableConstraint>, ParserError> {
10054 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
10055 if self.dialect.supports_constraint_keyword_without_name()
10056 && self
10057 .peek_one_of_keywords(&[
10058 Keyword::CHECK,
10059 Keyword::PRIMARY,
10060 Keyword::UNIQUE,
10061 Keyword::FOREIGN,
10062 ])
10063 .is_some()
10064 {
10065 None
10066 } else {
10067 Some(self.parse_identifier()?)
10068 }
10069 } else {
10070 None
10071 };
10072
10073 if name.is_none()
10078 && self
10079 .peek_one_of_keywords(&[Keyword::FULLTEXT, Keyword::SPATIAL])
10080 .is_some()
10081 && !dialect_of!(self is GenericDialect | MySqlDialect)
10082 {
10083 return Ok(None);
10084 }
10085
10086 let next_token = self.next_token();
10087 match next_token.token {
10088 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
10089 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10092 return Ok(Some(TableConstraint::UniqueUsingIndex(
10093 self.parse_constraint_using_index(name)?,
10094 )));
10095 }
10096
10097 let index_type_display = self.parse_index_type_display();
10098 if !dialect_of!(self is GenericDialect | MySqlDialect)
10099 && !index_type_display.is_none()
10100 {
10101 return self.expected_ref(
10102 "`index_name` or `(column_name [, ...])`",
10103 self.peek_token_ref(),
10104 );
10105 }
10106
10107 let nulls_distinct = self.parse_optional_nulls_distinct()?;
10108
10109 let index_name = self.parse_optional_ident()?;
10111 let index_type = self.parse_optional_using_then_index_type()?;
10112
10113 let columns = self.parse_parenthesized_index_column_list()?;
10114 let index_options = self.parse_index_options()?;
10115 let characteristics = self.parse_constraint_characteristics()?;
10116 Ok(Some(
10117 UniqueConstraint {
10118 name,
10119 index_name,
10120 index_type_display,
10121 index_type,
10122 columns,
10123 index_options,
10124 characteristics,
10125 nulls_distinct,
10126 }
10127 .into(),
10128 ))
10129 }
10130 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10131 self.expect_keyword_is(Keyword::KEY)?;
10133
10134 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10137 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10138 self.parse_constraint_using_index(name)?,
10139 )));
10140 }
10141
10142 let index_name = self.parse_optional_ident()?;
10144 let index_type = self.parse_optional_using_then_index_type()?;
10145
10146 let columns = self.parse_parenthesized_index_column_list()?;
10147 let index_options = self.parse_index_options()?;
10148 let characteristics = self.parse_constraint_characteristics()?;
10149 Ok(Some(
10150 PrimaryKeyConstraint {
10151 name,
10152 index_name,
10153 index_type,
10154 columns,
10155 index_options,
10156 characteristics,
10157 }
10158 .into(),
10159 ))
10160 }
10161 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10162 self.expect_keyword_is(Keyword::KEY)?;
10163 let index_name = self.parse_optional_ident()?;
10164 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10165 self.expect_keyword_is(Keyword::REFERENCES)?;
10166 let foreign_table = self.parse_object_name(false)?;
10167 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10168 let mut match_kind = None;
10169 let mut on_delete = None;
10170 let mut on_update = None;
10171 loop {
10172 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10173 match_kind = Some(self.parse_match_kind()?);
10174 } else if on_delete.is_none()
10175 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10176 {
10177 on_delete = Some(self.parse_referential_action()?);
10178 } else if on_update.is_none()
10179 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10180 {
10181 on_update = Some(self.parse_referential_action()?);
10182 } else {
10183 break;
10184 }
10185 }
10186
10187 let characteristics = self.parse_constraint_characteristics()?;
10188
10189 Ok(Some(
10190 ForeignKeyConstraint {
10191 name,
10192 index_name,
10193 columns,
10194 foreign_table,
10195 referred_columns,
10196 on_delete,
10197 on_update,
10198 match_kind,
10199 characteristics,
10200 }
10201 .into(),
10202 ))
10203 }
10204 Token::Word(w) if w.keyword == Keyword::CHECK => {
10205 self.expect_token(&Token::LParen)?;
10206 let expr = Box::new(self.parse_expr()?);
10207 self.expect_token(&Token::RParen)?;
10208
10209 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10210 Some(true)
10211 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10212 Some(false)
10213 } else {
10214 None
10215 };
10216
10217 Ok(Some(
10218 CheckConstraint {
10219 name,
10220 expr,
10221 enforced,
10222 }
10223 .into(),
10224 ))
10225 }
10226 Token::Word(w)
10227 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10228 && dialect_of!(self is GenericDialect | MySqlDialect)
10229 && name.is_none() =>
10230 {
10231 let display_as_key = w.keyword == Keyword::KEY;
10232
10233 let name = match &self.peek_token_ref().token {
10234 Token::Word(word) if word.keyword == Keyword::USING => None,
10235 _ => self.parse_optional_ident()?,
10236 };
10237
10238 let index_type = self.parse_optional_using_then_index_type()?;
10239 let columns = self.parse_parenthesized_index_column_list()?;
10240 let index_options = self.parse_index_options()?;
10241
10242 Ok(Some(
10243 IndexConstraint {
10244 display_as_key,
10245 name,
10246 index_type,
10247 columns,
10248 index_options,
10249 }
10250 .into(),
10251 ))
10252 }
10253 Token::Word(w)
10254 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10255 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10256 {
10257 if let Some(name) = name {
10258 return self.expected(
10259 "FULLTEXT or SPATIAL option without constraint name",
10260 TokenWithSpan {
10261 token: Token::make_keyword(&name.to_string()),
10262 span: next_token.span,
10263 },
10264 );
10265 }
10266
10267 let fulltext = w.keyword == Keyword::FULLTEXT;
10268
10269 let index_type_display = self.parse_index_type_display();
10270
10271 let opt_index_name = self.parse_optional_ident()?;
10272
10273 let columns = self.parse_parenthesized_index_column_list()?;
10274
10275 Ok(Some(
10276 FullTextOrSpatialConstraint {
10277 fulltext,
10278 index_type_display,
10279 opt_index_name,
10280 columns,
10281 }
10282 .into(),
10283 ))
10284 }
10285 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10286 let index_method = if self.parse_keyword(Keyword::USING) {
10287 Some(self.parse_identifier()?)
10288 } else {
10289 None
10290 };
10291
10292 self.expect_token(&Token::LParen)?;
10293 let elements = self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10294 self.expect_token(&Token::RParen)?;
10295
10296 let include = if self.parse_keyword(Keyword::INCLUDE) {
10297 self.expect_token(&Token::LParen)?;
10298 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10299 self.expect_token(&Token::RParen)?;
10300 cols
10301 } else {
10302 vec![]
10303 };
10304
10305 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10306 self.expect_token(&Token::LParen)?;
10307 let predicate = self.parse_expr()?;
10308 self.expect_token(&Token::RParen)?;
10309 Some(Box::new(predicate))
10310 } else {
10311 None
10312 };
10313
10314 let characteristics = self.parse_constraint_characteristics()?;
10315
10316 Ok(Some(
10317 ExclusionConstraint {
10318 name,
10319 index_method,
10320 elements,
10321 include,
10322 where_clause,
10323 characteristics,
10324 }
10325 .into(),
10326 ))
10327 }
10328 _ => {
10329 if name.is_some() {
10330 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10331 } else {
10332 self.prev_token();
10333 Ok(None)
10334 }
10335 }
10336 }
10337 }
10338
10339 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10340 let expr = self.parse_expr()?;
10341 self.expect_keyword_is(Keyword::WITH)?;
10342 let operator_token = self.next_token();
10343 let operator = operator_token.token.to_string();
10344 Ok(ExclusionElement { expr, operator })
10345 }
10346
10347 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10348 Ok(if self.parse_keyword(Keyword::NULLS) {
10349 let not = self.parse_keyword(Keyword::NOT);
10350 self.expect_keyword_is(Keyword::DISTINCT)?;
10351 if not {
10352 NullsDistinctOption::NotDistinct
10353 } else {
10354 NullsDistinctOption::Distinct
10355 }
10356 } else {
10357 NullsDistinctOption::None
10358 })
10359 }
10360
10361 pub fn maybe_parse_options(
10363 &mut self,
10364 keyword: Keyword,
10365 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10366 if let Token::Word(word) = &self.peek_token_ref().token {
10367 if word.keyword == keyword {
10368 return Ok(Some(self.parse_options(keyword)?));
10369 }
10370 };
10371 Ok(None)
10372 }
10373
10374 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10376 if self.parse_keyword(keyword) {
10377 self.expect_token(&Token::LParen)?;
10378 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10379 self.expect_token(&Token::RParen)?;
10380 Ok(options)
10381 } else {
10382 Ok(vec![])
10383 }
10384 }
10385
10386 pub fn parse_options_with_keywords(
10388 &mut self,
10389 keywords: &[Keyword],
10390 ) -> Result<Vec<SqlOption>, ParserError> {
10391 if self.parse_keywords(keywords) {
10392 self.expect_token(&Token::LParen)?;
10393 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10394 self.expect_token(&Token::RParen)?;
10395 Ok(options)
10396 } else {
10397 Ok(vec![])
10398 }
10399 }
10400
10401 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10403 Ok(if self.parse_keyword(Keyword::BTREE) {
10404 IndexType::BTree
10405 } else if self.parse_keyword(Keyword::HASH) {
10406 IndexType::Hash
10407 } else if self.parse_keyword(Keyword::GIN) {
10408 IndexType::GIN
10409 } else if self.parse_keyword(Keyword::GIST) {
10410 IndexType::GiST
10411 } else if self.parse_keyword(Keyword::SPGIST) {
10412 IndexType::SPGiST
10413 } else if self.parse_keyword(Keyword::BRIN) {
10414 IndexType::BRIN
10415 } else if self.parse_keyword(Keyword::BLOOM) {
10416 IndexType::Bloom
10417 } else {
10418 IndexType::Custom(self.parse_identifier()?)
10419 })
10420 }
10421
10422 pub fn parse_optional_using_then_index_type(
10429 &mut self,
10430 ) -> Result<Option<IndexType>, ParserError> {
10431 if self.parse_keyword(Keyword::USING) {
10432 Ok(Some(self.parse_index_type()?))
10433 } else {
10434 Ok(None)
10435 }
10436 }
10437
10438 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10442 self.maybe_parse(|parser| parser.parse_identifier())
10443 }
10444
10445 #[must_use]
10446 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10448 if self.parse_keyword(Keyword::KEY) {
10449 KeyOrIndexDisplay::Key
10450 } else if self.parse_keyword(Keyword::INDEX) {
10451 KeyOrIndexDisplay::Index
10452 } else {
10453 KeyOrIndexDisplay::None
10454 }
10455 }
10456
10457 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10459 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10460 Ok(Some(IndexOption::Using(index_type)))
10461 } else if self.parse_keyword(Keyword::COMMENT) {
10462 let s = self.parse_literal_string()?;
10463 Ok(Some(IndexOption::Comment(s)))
10464 } else {
10465 Ok(None)
10466 }
10467 }
10468
10469 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10471 let mut options = Vec::new();
10472
10473 loop {
10474 match self.parse_optional_index_option()? {
10475 Some(index_option) => options.push(index_option),
10476 None => return Ok(options),
10477 }
10478 }
10479 }
10480
10481 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10483 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10484
10485 match &self.peek_token_ref().token {
10486 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10487 Ok(SqlOption::Ident(self.parse_identifier()?))
10488 }
10489 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10490 self.parse_option_partition()
10491 }
10492 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10493 self.parse_option_clustered()
10494 }
10495 _ => {
10496 let name = self.parse_identifier()?;
10497 self.expect_token(&Token::Eq)?;
10498 let value = self.parse_expr()?;
10499
10500 Ok(SqlOption::KeyValue { key: name, value })
10501 }
10502 }
10503 }
10504
10505 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10507 if self.parse_keywords(&[
10508 Keyword::CLUSTERED,
10509 Keyword::COLUMNSTORE,
10510 Keyword::INDEX,
10511 Keyword::ORDER,
10512 ]) {
10513 Ok(SqlOption::Clustered(
10514 TableOptionsClustered::ColumnstoreIndexOrder(
10515 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10516 ),
10517 ))
10518 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10519 Ok(SqlOption::Clustered(
10520 TableOptionsClustered::ColumnstoreIndex,
10521 ))
10522 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10523 self.expect_token(&Token::LParen)?;
10524
10525 let columns = self.parse_comma_separated(|p| {
10526 let name = p.parse_identifier()?;
10527 let asc = p.parse_asc_desc();
10528
10529 Ok(ClusteredIndex { name, asc })
10530 })?;
10531
10532 self.expect_token(&Token::RParen)?;
10533
10534 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10535 } else {
10536 Err(ParserError::ParserError(
10537 "invalid CLUSTERED sequence".to_string(),
10538 ))
10539 }
10540 }
10541
10542 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10544 self.expect_keyword_is(Keyword::PARTITION)?;
10545 self.expect_token(&Token::LParen)?;
10546 let column_name = self.parse_identifier()?;
10547
10548 self.expect_keyword_is(Keyword::RANGE)?;
10549 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10550 Some(PartitionRangeDirection::Left)
10551 } else if self.parse_keyword(Keyword::RIGHT) {
10552 Some(PartitionRangeDirection::Right)
10553 } else {
10554 None
10555 };
10556
10557 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10558 self.expect_token(&Token::LParen)?;
10559
10560 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10561
10562 self.expect_token(&Token::RParen)?;
10563 self.expect_token(&Token::RParen)?;
10564
10565 Ok(SqlOption::Partition {
10566 column_name,
10567 range_direction,
10568 for_values,
10569 })
10570 }
10571
10572 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10574 self.expect_token(&Token::LParen)?;
10575 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10576 self.expect_token(&Token::RParen)?;
10577 Ok(Partition::Partitions(partitions))
10578 }
10579
10580 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10582 self.expect_token(&Token::LParen)?;
10583 self.expect_keyword_is(Keyword::SELECT)?;
10584 let projection = self.parse_projection()?;
10585 let group_by = self.parse_optional_group_by()?;
10586 let order_by = self.parse_optional_order_by()?;
10587 self.expect_token(&Token::RParen)?;
10588 Ok(ProjectionSelect {
10589 projection,
10590 group_by,
10591 order_by,
10592 })
10593 }
10594 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10596 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10597 let name = self.parse_identifier()?;
10598 let query = self.parse_projection_select()?;
10599 Ok(AlterTableOperation::AddProjection {
10600 if_not_exists,
10601 name,
10602 select: query,
10603 })
10604 }
10605
10606 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10610 self.expect_keyword_is(Keyword::ALTER)?;
10611 self.expect_keyword_is(Keyword::SORTKEY)?;
10612 self.expect_token(&Token::LParen)?;
10613 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10614 self.expect_token(&Token::RParen)?;
10615 Ok(AlterTableOperation::AlterSortKey { columns })
10616 }
10617
10618 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10620 let operation = if self.parse_keyword(Keyword::ADD) {
10621 if let Some(constraint) = self.parse_optional_table_constraint()? {
10622 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10623 AlterTableOperation::AddConstraint {
10624 constraint,
10625 not_valid,
10626 }
10627 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10628 && self.parse_keyword(Keyword::PROJECTION)
10629 {
10630 return self.parse_alter_table_add_projection();
10631 } else {
10632 let if_not_exists =
10633 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10634 let mut new_partitions = vec![];
10635 loop {
10636 if self.parse_keyword(Keyword::PARTITION) {
10637 new_partitions.push(self.parse_partition()?);
10638 } else {
10639 break;
10640 }
10641 }
10642 if !new_partitions.is_empty() {
10643 AlterTableOperation::AddPartitions {
10644 if_not_exists,
10645 new_partitions,
10646 }
10647 } else {
10648 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10649
10650 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10651 {
10652 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10653 || if_not_exists
10654 } else {
10655 false
10656 };
10657
10658 let column_def = self.parse_column_def()?;
10659
10660 let column_position = self.parse_column_position()?;
10661
10662 AlterTableOperation::AddColumn {
10663 column_keyword,
10664 if_not_exists,
10665 column_def,
10666 column_position,
10667 }
10668 }
10669 }
10670 } else if self.parse_keyword(Keyword::RENAME) {
10671 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10672 let old_name = self.parse_identifier()?;
10673 self.expect_keyword_is(Keyword::TO)?;
10674 let new_name = self.parse_identifier()?;
10675 AlterTableOperation::RenameConstraint { old_name, new_name }
10676 } else if self.parse_keyword(Keyword::TO) {
10677 let table_name = self.parse_object_name(false)?;
10678 AlterTableOperation::RenameTable {
10679 table_name: RenameTableNameKind::To(table_name),
10680 }
10681 } else if self.parse_keyword(Keyword::AS) {
10682 let table_name = self.parse_object_name(false)?;
10683 AlterTableOperation::RenameTable {
10684 table_name: RenameTableNameKind::As(table_name),
10685 }
10686 } else {
10687 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10689 self.expect_keyword_is(Keyword::TO)?;
10690 let new_column_name = self.parse_identifier()?;
10691 AlterTableOperation::RenameColumn {
10692 old_column_name,
10693 new_column_name,
10694 }
10695 }
10696 } else if self.parse_keyword(Keyword::DISABLE) {
10697 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10698 AlterTableOperation::DisableRowLevelSecurity {}
10699 } else if self.parse_keyword(Keyword::RULE) {
10700 let name = self.parse_identifier()?;
10701 AlterTableOperation::DisableRule { name }
10702 } else if self.parse_keyword(Keyword::TRIGGER) {
10703 let name = self.parse_identifier()?;
10704 AlterTableOperation::DisableTrigger { name }
10705 } else {
10706 return self.expected_ref(
10707 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10708 self.peek_token_ref(),
10709 );
10710 }
10711 } else if self.parse_keyword(Keyword::ENABLE) {
10712 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10713 let name = self.parse_identifier()?;
10714 AlterTableOperation::EnableAlwaysRule { name }
10715 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10716 let name = self.parse_identifier()?;
10717 AlterTableOperation::EnableAlwaysTrigger { name }
10718 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10719 AlterTableOperation::EnableRowLevelSecurity {}
10720 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10721 let name = self.parse_identifier()?;
10722 AlterTableOperation::EnableReplicaRule { name }
10723 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10724 let name = self.parse_identifier()?;
10725 AlterTableOperation::EnableReplicaTrigger { name }
10726 } else if self.parse_keyword(Keyword::RULE) {
10727 let name = self.parse_identifier()?;
10728 AlterTableOperation::EnableRule { name }
10729 } else if self.parse_keyword(Keyword::TRIGGER) {
10730 let name = self.parse_identifier()?;
10731 AlterTableOperation::EnableTrigger { name }
10732 } else {
10733 return self.expected_ref(
10734 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10735 self.peek_token_ref(),
10736 );
10737 }
10738 } else if self.parse_keywords(&[
10739 Keyword::FORCE,
10740 Keyword::ROW,
10741 Keyword::LEVEL,
10742 Keyword::SECURITY,
10743 ]) {
10744 AlterTableOperation::ForceRowLevelSecurity
10745 } else if self.parse_keywords(&[
10746 Keyword::NO,
10747 Keyword::FORCE,
10748 Keyword::ROW,
10749 Keyword::LEVEL,
10750 Keyword::SECURITY,
10751 ]) {
10752 AlterTableOperation::NoForceRowLevelSecurity
10753 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10754 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10755 {
10756 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10757 let name = self.parse_identifier()?;
10758 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10759 Some(self.parse_identifier()?)
10760 } else {
10761 None
10762 };
10763 AlterTableOperation::ClearProjection {
10764 if_exists,
10765 name,
10766 partition,
10767 }
10768 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10769 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10770 {
10771 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10772 let name = self.parse_identifier()?;
10773 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10774 Some(self.parse_identifier()?)
10775 } else {
10776 None
10777 };
10778 AlterTableOperation::MaterializeProjection {
10779 if_exists,
10780 name,
10781 partition,
10782 }
10783 } else if self.parse_keyword(Keyword::DROP) {
10784 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10785 self.expect_token(&Token::LParen)?;
10786 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10787 self.expect_token(&Token::RParen)?;
10788 AlterTableOperation::DropPartitions {
10789 partitions,
10790 if_exists: true,
10791 }
10792 } else if self.parse_keyword(Keyword::PARTITION) {
10793 self.expect_token(&Token::LParen)?;
10794 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10795 self.expect_token(&Token::RParen)?;
10796 AlterTableOperation::DropPartitions {
10797 partitions,
10798 if_exists: false,
10799 }
10800 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10801 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10802 let name = self.parse_identifier()?;
10803 let drop_behavior = self.parse_optional_drop_behavior();
10804 AlterTableOperation::DropConstraint {
10805 if_exists,
10806 name,
10807 drop_behavior,
10808 }
10809 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10810 let drop_behavior = self.parse_optional_drop_behavior();
10811 AlterTableOperation::DropPrimaryKey { drop_behavior }
10812 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10813 let name = self.parse_identifier()?;
10814 let drop_behavior = self.parse_optional_drop_behavior();
10815 AlterTableOperation::DropForeignKey {
10816 name,
10817 drop_behavior,
10818 }
10819 } else if self.parse_keyword(Keyword::INDEX) {
10820 let name = self.parse_identifier()?;
10821 AlterTableOperation::DropIndex { name }
10822 } else if self.parse_keyword(Keyword::PROJECTION)
10823 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10824 {
10825 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10826 let name = self.parse_identifier()?;
10827 AlterTableOperation::DropProjection { if_exists, name }
10828 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10829 AlterTableOperation::DropClusteringKey
10830 } else {
10831 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10833 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10834 self.parse_comma_separated(Parser::parse_identifier)?
10835 } else {
10836 vec![self.parse_identifier()?]
10837 };
10838 let drop_behavior = self.parse_optional_drop_behavior();
10839 AlterTableOperation::DropColumn {
10840 has_column_keyword,
10841 column_names,
10842 if_exists,
10843 drop_behavior,
10844 }
10845 }
10846 } else if self.parse_keyword(Keyword::PARTITION) {
10847 self.expect_token(&Token::LParen)?;
10848 let before = self.parse_comma_separated(Parser::parse_expr)?;
10849 self.expect_token(&Token::RParen)?;
10850 self.expect_keyword_is(Keyword::RENAME)?;
10851 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10852 self.expect_token(&Token::LParen)?;
10853 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10854 self.expect_token(&Token::RParen)?;
10855 AlterTableOperation::RenamePartitions {
10856 old_partitions: before,
10857 new_partitions: renames,
10858 }
10859 } else if self.parse_keyword(Keyword::CHANGE) {
10860 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10862 let new_name = self.parse_identifier()?;
10863 let data_type = self.parse_data_type()?;
10864 let mut options = vec![];
10865 while let Some(option) = self.parse_optional_column_option()? {
10866 options.push(option);
10867 }
10868
10869 let column_position = self.parse_column_position()?;
10870
10871 AlterTableOperation::ChangeColumn {
10872 old_name,
10873 new_name,
10874 data_type,
10875 options,
10876 column_position,
10877 }
10878 } else if self.parse_keyword(Keyword::MODIFY) {
10879 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10881 let data_type = self.parse_data_type()?;
10882 let mut options = vec![];
10883 while let Some(option) = self.parse_optional_column_option()? {
10884 options.push(option);
10885 }
10886
10887 let column_position = self.parse_column_position()?;
10888
10889 AlterTableOperation::ModifyColumn {
10890 col_name,
10891 data_type,
10892 options,
10893 column_position,
10894 }
10895 } else if self.parse_keyword(Keyword::ALTER) {
10896 if self.peek_keyword(Keyword::SORTKEY) {
10897 self.prev_token();
10898 return self.parse_alter_sort_key();
10899 }
10900
10901 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10903 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10904
10905 let op: AlterColumnOperation = if self.parse_keywords(&[
10906 Keyword::SET,
10907 Keyword::NOT,
10908 Keyword::NULL,
10909 ]) {
10910 AlterColumnOperation::SetNotNull {}
10911 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10912 AlterColumnOperation::DropNotNull {}
10913 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10914 AlterColumnOperation::SetDefault {
10915 value: self.parse_expr()?,
10916 }
10917 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10918 AlterColumnOperation::DropDefault {}
10919 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10920 self.parse_set_data_type(true)?
10921 } else if self.parse_keyword(Keyword::TYPE) {
10922 self.parse_set_data_type(false)?
10923 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10924 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10925 Some(GeneratedAs::Always)
10926 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10927 Some(GeneratedAs::ByDefault)
10928 } else {
10929 None
10930 };
10931
10932 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10933
10934 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10935
10936 if self.peek_token_ref().token == Token::LParen {
10937 self.expect_token(&Token::LParen)?;
10938 sequence_options = Some(self.parse_create_sequence_options()?);
10939 self.expect_token(&Token::RParen)?;
10940 }
10941
10942 AlterColumnOperation::AddGenerated {
10943 generated_as,
10944 sequence_options,
10945 }
10946 } else {
10947 let message = if is_postgresql {
10948 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10949 } else {
10950 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10951 };
10952
10953 return self.expected_ref(message, self.peek_token_ref());
10954 };
10955 AlterTableOperation::AlterColumn { column_name, op }
10956 } else if self.parse_keyword(Keyword::SWAP) {
10957 self.expect_keyword_is(Keyword::WITH)?;
10958 let table_name = self.parse_object_name(false)?;
10959 AlterTableOperation::SwapWith { table_name }
10960 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10961 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10962 {
10963 let new_owner = self.parse_owner()?;
10964 AlterTableOperation::OwnerTo { new_owner }
10965 } else if dialect_of!(self is PostgreSqlDialect)
10966 && self.parse_keywords(&[Keyword::ATTACH, Keyword::PARTITION])
10967 {
10968 let partition_name = self.parse_object_name(false)?;
10969 let partition_bound = self.parse_partition_for_values()?;
10970 AlterTableOperation::AttachPartitionOf {
10971 partition_name,
10972 partition_bound,
10973 }
10974 } else if dialect_of!(self is PostgreSqlDialect)
10975 && self.parse_keywords(&[Keyword::DETACH, Keyword::PARTITION])
10976 {
10977 let partition_name = self.parse_object_name(false)?;
10978 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
10979 let finalize = self.parse_keyword(Keyword::FINALIZE);
10980 AlterTableOperation::DetachPartitionOf {
10981 partition_name,
10982 concurrently,
10983 finalize,
10984 }
10985 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10986 && self.parse_keyword(Keyword::ATTACH)
10987 {
10988 AlterTableOperation::AttachPartition {
10989 partition: self.parse_part_or_partition()?,
10990 }
10991 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10992 && self.parse_keyword(Keyword::DETACH)
10993 {
10994 AlterTableOperation::DetachPartition {
10995 partition: self.parse_part_or_partition()?,
10996 }
10997 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10998 && self.parse_keyword(Keyword::FREEZE)
10999 {
11000 let partition = self.parse_part_or_partition()?;
11001 let with_name = if self.parse_keyword(Keyword::WITH) {
11002 self.expect_keyword_is(Keyword::NAME)?;
11003 Some(self.parse_identifier()?)
11004 } else {
11005 None
11006 };
11007 AlterTableOperation::FreezePartition {
11008 partition,
11009 with_name,
11010 }
11011 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
11012 && self.parse_keyword(Keyword::UNFREEZE)
11013 {
11014 let partition = self.parse_part_or_partition()?;
11015 let with_name = if self.parse_keyword(Keyword::WITH) {
11016 self.expect_keyword_is(Keyword::NAME)?;
11017 Some(self.parse_identifier()?)
11018 } else {
11019 None
11020 };
11021 AlterTableOperation::UnfreezePartition {
11022 partition,
11023 with_name,
11024 }
11025 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
11026 self.expect_token(&Token::LParen)?;
11027 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
11028 self.expect_token(&Token::RParen)?;
11029 AlterTableOperation::ClusterBy { exprs }
11030 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
11031 AlterTableOperation::SuspendRecluster
11032 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
11033 AlterTableOperation::ResumeRecluster
11034 } else if self.parse_keyword(Keyword::LOCK) {
11035 let equals = self.consume_token(&Token::Eq);
11036 let lock = match self.parse_one_of_keywords(&[
11037 Keyword::DEFAULT,
11038 Keyword::EXCLUSIVE,
11039 Keyword::NONE,
11040 Keyword::SHARED,
11041 ]) {
11042 Some(Keyword::DEFAULT) => AlterTableLock::Default,
11043 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
11044 Some(Keyword::NONE) => AlterTableLock::None,
11045 Some(Keyword::SHARED) => AlterTableLock::Shared,
11046 _ => self.expected_ref(
11047 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
11048 self.peek_token_ref(),
11049 )?,
11050 };
11051 AlterTableOperation::Lock { equals, lock }
11052 } else if self.parse_keyword(Keyword::ALGORITHM) {
11053 let equals = self.consume_token(&Token::Eq);
11054 let algorithm = match self.parse_one_of_keywords(&[
11055 Keyword::DEFAULT,
11056 Keyword::INSTANT,
11057 Keyword::INPLACE,
11058 Keyword::COPY,
11059 ]) {
11060 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
11061 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
11062 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
11063 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
11064 _ => self.expected_ref(
11065 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
11066 self.peek_token_ref(),
11067 )?,
11068 };
11069 AlterTableOperation::Algorithm { equals, algorithm }
11070 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
11071 let equals = self.consume_token(&Token::Eq);
11072 let value = self.parse_number_value()?;
11073 AlterTableOperation::AutoIncrement { equals, value }
11074 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
11075 let identity = if self.parse_keyword(Keyword::NOTHING) {
11076 ReplicaIdentity::Nothing
11077 } else if self.parse_keyword(Keyword::FULL) {
11078 ReplicaIdentity::Full
11079 } else if self.parse_keyword(Keyword::DEFAULT) {
11080 ReplicaIdentity::Default
11081 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
11082 ReplicaIdentity::Index(self.parse_identifier()?)
11083 } else {
11084 return self.expected_ref(
11085 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
11086 self.peek_token_ref(),
11087 );
11088 };
11089
11090 AlterTableOperation::ReplicaIdentity { identity }
11091 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11092 let name = self.parse_identifier()?;
11093 AlterTableOperation::ValidateConstraint { name }
11094 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11095 let tablespace_name = self.parse_identifier()?;
11096 AlterTableOperation::SetTablespace { tablespace_name }
11097 } else {
11098 let mut options =
11099 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
11100 if !options.is_empty() {
11101 AlterTableOperation::SetTblProperties {
11102 table_properties: options,
11103 }
11104 } else {
11105 options = self.parse_options(Keyword::SET)?;
11106 if !options.is_empty() {
11107 AlterTableOperation::SetOptionsParens { options }
11108 } else {
11109 return self.expected_ref(
11110 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
11111 self.peek_token_ref(),
11112 );
11113 }
11114 }
11115 };
11116 Ok(operation)
11117 }
11118
11119 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
11120 let data_type = self.parse_data_type()?;
11121 let using = if self.dialect.supports_alter_column_type_using()
11122 && self.parse_keyword(Keyword::USING)
11123 {
11124 Some(self.parse_expr()?)
11125 } else {
11126 None
11127 };
11128 Ok(AlterColumnOperation::SetDataType {
11129 data_type,
11130 using,
11131 had_set,
11132 })
11133 }
11134
11135 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
11136 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11137 match keyword {
11138 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11139 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11140 unexpected_keyword => Err(ParserError::ParserError(
11142 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11143 )),
11144 }
11145 }
11146
11147 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11149 let object_type = self.expect_one_of_keywords(&[
11150 Keyword::VIEW,
11151 Keyword::TYPE,
11152 Keyword::COLLATION,
11153 Keyword::TABLE,
11154 Keyword::INDEX,
11155 Keyword::FUNCTION,
11156 Keyword::AGGREGATE,
11157 Keyword::ROLE,
11158 Keyword::POLICY,
11159 Keyword::CONNECTOR,
11160 Keyword::ICEBERG,
11161 Keyword::SCHEMA,
11162 Keyword::USER,
11163 Keyword::OPERATOR,
11164 Keyword::DOMAIN,
11165 Keyword::TRIGGER,
11166 Keyword::EXTENSION,
11167 Keyword::PROCEDURE,
11168 Keyword::DEFAULT,
11169 ])?;
11170 match object_type {
11171 Keyword::SCHEMA => {
11172 self.prev_token();
11173 self.prev_token();
11174 self.parse_alter_schema()
11175 }
11176 Keyword::VIEW => self.parse_alter_view(),
11177 Keyword::TYPE => self.parse_alter_type(),
11178 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11179 Keyword::TABLE => self.parse_alter_table(false),
11180 Keyword::ICEBERG => {
11181 self.expect_keyword(Keyword::TABLE)?;
11182 self.parse_alter_table(true)
11183 }
11184 Keyword::DEFAULT => self.parse_alter_default_privileges().map(Into::into),
11185 Keyword::INDEX => {
11186 let index_name = self.parse_object_name(false)?;
11187 let operation = if self.parse_keyword(Keyword::RENAME) {
11188 if self.parse_keyword(Keyword::TO) {
11189 let index_name = self.parse_object_name(false)?;
11190 AlterIndexOperation::RenameIndex { index_name }
11191 } else {
11192 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11193 }
11194 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11195 let tablespace_name = self.parse_identifier()?;
11196 AlterIndexOperation::SetTablespace { tablespace_name }
11197 } else {
11198 return self.expected_ref(
11199 "RENAME or SET TABLESPACE after ALTER INDEX",
11200 self.peek_token_ref(),
11201 );
11202 };
11203
11204 Ok(Statement::AlterIndex {
11205 name: index_name,
11206 operation,
11207 })
11208 }
11209 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11210 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11211 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11212 Keyword::OPERATOR => {
11213 if self.parse_keyword(Keyword::FAMILY) {
11214 self.parse_alter_operator_family().map(Into::into)
11215 } else if self.parse_keyword(Keyword::CLASS) {
11216 self.parse_alter_operator_class().map(Into::into)
11217 } else {
11218 self.parse_alter_operator().map(Into::into)
11219 }
11220 }
11221 Keyword::ROLE => self.parse_alter_role(),
11222 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11223 Keyword::CONNECTOR => self.parse_alter_connector(),
11224 Keyword::USER => self.parse_alter_user().map(Into::into),
11225 Keyword::DOMAIN => self.parse_alter_domain(),
11226 Keyword::TRIGGER => self.parse_alter_trigger(),
11227 Keyword::EXTENSION => self.parse_alter_extension(),
11228 unexpected_keyword => Err(ParserError::ParserError(
11230 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE, DEFAULT}}, got {unexpected_keyword:?}"),
11231 )),
11232 }
11233 }
11234
11235 fn parse_alter_aggregate_signature(
11236 &mut self,
11237 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11238 let name = self.parse_object_name(false)?;
11239 self.expect_token(&Token::LParen)?;
11240
11241 if self.consume_token(&Token::Mul) {
11242 self.expect_token(&Token::RParen)?;
11243 return Ok((
11244 FunctionDesc {
11245 name,
11246 args: Some(vec![]),
11247 },
11248 true,
11249 None,
11250 ));
11251 }
11252
11253 let args =
11254 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11255 vec![]
11256 } else {
11257 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11258 };
11259
11260 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11261 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11262 } else {
11263 None
11264 };
11265
11266 self.expect_token(&Token::RParen)?;
11267 Ok((
11268 FunctionDesc {
11269 name,
11270 args: Some(args),
11271 },
11272 false,
11273 aggregate_order_by,
11274 ))
11275 }
11276
11277 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11278 let action = if self.parse_keywords(&[
11279 Keyword::CALLED,
11280 Keyword::ON,
11281 Keyword::NULL,
11282 Keyword::INPUT,
11283 ]) {
11284 Some(AlterFunctionAction::CalledOnNull(
11285 FunctionCalledOnNull::CalledOnNullInput,
11286 ))
11287 } else if self.parse_keywords(&[
11288 Keyword::RETURNS,
11289 Keyword::NULL,
11290 Keyword::ON,
11291 Keyword::NULL,
11292 Keyword::INPUT,
11293 ]) {
11294 Some(AlterFunctionAction::CalledOnNull(
11295 FunctionCalledOnNull::ReturnsNullOnNullInput,
11296 ))
11297 } else if self.parse_keyword(Keyword::STRICT) {
11298 Some(AlterFunctionAction::CalledOnNull(
11299 FunctionCalledOnNull::Strict,
11300 ))
11301 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11302 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11303 } else if self.parse_keyword(Keyword::STABLE) {
11304 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11305 } else if self.parse_keyword(Keyword::VOLATILE) {
11306 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11307 } else if self.parse_keyword(Keyword::NOT) {
11308 self.expect_keyword(Keyword::LEAKPROOF)?;
11309 Some(AlterFunctionAction::Leakproof(false))
11310 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11311 Some(AlterFunctionAction::Leakproof(true))
11312 } else if self.parse_keyword(Keyword::EXTERNAL) {
11313 self.expect_keyword(Keyword::SECURITY)?;
11314 let security = if self.parse_keyword(Keyword::DEFINER) {
11315 FunctionSecurity::Definer
11316 } else if self.parse_keyword(Keyword::INVOKER) {
11317 FunctionSecurity::Invoker
11318 } else {
11319 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11320 };
11321 Some(AlterFunctionAction::Security {
11322 external: true,
11323 security,
11324 })
11325 } else if self.parse_keyword(Keyword::SECURITY) {
11326 let security = if self.parse_keyword(Keyword::DEFINER) {
11327 FunctionSecurity::Definer
11328 } else if self.parse_keyword(Keyword::INVOKER) {
11329 FunctionSecurity::Invoker
11330 } else {
11331 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11332 };
11333 Some(AlterFunctionAction::Security {
11334 external: false,
11335 security,
11336 })
11337 } else if self.parse_keyword(Keyword::PARALLEL) {
11338 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11339 FunctionParallel::Unsafe
11340 } else if self.parse_keyword(Keyword::RESTRICTED) {
11341 FunctionParallel::Restricted
11342 } else if self.parse_keyword(Keyword::SAFE) {
11343 FunctionParallel::Safe
11344 } else {
11345 return self
11346 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11347 };
11348 Some(AlterFunctionAction::Parallel(parallel))
11349 } else if self.parse_keyword(Keyword::COST) {
11350 Some(AlterFunctionAction::Cost(self.parse_number()?))
11351 } else if self.parse_keyword(Keyword::ROWS) {
11352 Some(AlterFunctionAction::Rows(self.parse_number()?))
11353 } else if self.parse_keyword(Keyword::SUPPORT) {
11354 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11355 } else if self.parse_keyword(Keyword::SET) {
11356 let name = self.parse_object_name(false)?;
11357 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11358 FunctionSetValue::FromCurrent
11359 } else {
11360 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11361 return self.expected_ref("= or TO", self.peek_token_ref());
11362 }
11363 if self.parse_keyword(Keyword::DEFAULT) {
11364 FunctionSetValue::Default
11365 } else {
11366 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11367 }
11368 };
11369 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11370 name,
11371 value,
11372 }))
11373 } else if self.parse_keyword(Keyword::RESET) {
11374 let reset_config = if self.parse_keyword(Keyword::ALL) {
11375 ResetConfig::ALL
11376 } else {
11377 ResetConfig::ConfigName(self.parse_object_name(false)?)
11378 };
11379 Some(AlterFunctionAction::Reset(reset_config))
11380 } else {
11381 None
11382 };
11383
11384 Ok(action)
11385 }
11386
11387 fn parse_alter_function_actions(
11388 &mut self,
11389 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11390 let mut actions = vec![];
11391 while let Some(action) = self.parse_alter_function_action()? {
11392 actions.push(action);
11393 }
11394 if actions.is_empty() {
11395 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11396 }
11397 let restrict = self.parse_keyword(Keyword::RESTRICT);
11398 Ok((actions, restrict))
11399 }
11400
11401 pub fn parse_alter_function(
11403 &mut self,
11404 kind: AlterFunctionKind,
11405 ) -> Result<Statement, ParserError> {
11406 let (function, aggregate_star, aggregate_order_by) = match kind {
11407 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11408 (self.parse_function_desc()?, false, None)
11409 }
11410 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11411 };
11412
11413 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11414 let new_name = self.parse_identifier()?;
11415 AlterFunctionOperation::RenameTo { new_name }
11416 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11417 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11418 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11419 AlterFunctionOperation::SetSchema {
11420 schema_name: self.parse_object_name(false)?,
11421 }
11422 } else if matches!(
11423 kind,
11424 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11425 ) && self.parse_keyword(Keyword::NO)
11426 {
11427 if !self.parse_keyword(Keyword::DEPENDS) {
11428 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11429 }
11430 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11431 AlterFunctionOperation::DependsOnExtension {
11432 no: true,
11433 extension_name: self.parse_object_name(false)?,
11434 }
11435 } else if matches!(
11436 kind,
11437 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11438 ) && self.parse_keyword(Keyword::DEPENDS)
11439 {
11440 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11441 AlterFunctionOperation::DependsOnExtension {
11442 no: false,
11443 extension_name: self.parse_object_name(false)?,
11444 }
11445 } else if matches!(
11446 kind,
11447 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11448 ) {
11449 let (actions, restrict) = self.parse_alter_function_actions()?;
11450 AlterFunctionOperation::Actions { actions, restrict }
11451 } else {
11452 return self.expected_ref(
11453 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11454 self.peek_token_ref(),
11455 );
11456 };
11457
11458 Ok(Statement::AlterFunction(AlterFunction {
11459 kind,
11460 function,
11461 aggregate_order_by,
11462 aggregate_star,
11463 operation,
11464 }))
11465 }
11466
11467 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11469 let name = self.parse_object_name(false)?;
11470
11471 let operation = if self.parse_keyword(Keyword::ADD) {
11472 if let Some(constraint) = self.parse_optional_table_constraint()? {
11473 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11474 AlterDomainOperation::AddConstraint {
11475 constraint,
11476 not_valid,
11477 }
11478 } else {
11479 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11480 }
11481 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11482 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11483 let name = self.parse_identifier()?;
11484 let drop_behavior = self.parse_optional_drop_behavior();
11485 AlterDomainOperation::DropConstraint {
11486 if_exists,
11487 name,
11488 drop_behavior,
11489 }
11490 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11491 AlterDomainOperation::DropDefault
11492 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11493 let old_name = self.parse_identifier()?;
11494 self.expect_keyword_is(Keyword::TO)?;
11495 let new_name = self.parse_identifier()?;
11496 AlterDomainOperation::RenameConstraint { old_name, new_name }
11497 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11498 let new_name = self.parse_identifier()?;
11499 AlterDomainOperation::RenameTo { new_name }
11500 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11501 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11502 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11503 AlterDomainOperation::SetSchema {
11504 schema_name: self.parse_object_name(false)?,
11505 }
11506 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11507 AlterDomainOperation::SetDefault {
11508 default: self.parse_expr()?,
11509 }
11510 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11511 let name = self.parse_identifier()?;
11512 AlterDomainOperation::ValidateConstraint { name }
11513 } else {
11514 return self.expected_ref(
11515 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11516 self.peek_token_ref(),
11517 );
11518 };
11519
11520 Ok(AlterDomain { name, operation }.into())
11521 }
11522
11523 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11525 let name = self.parse_identifier()?;
11526 self.expect_keyword_is(Keyword::ON)?;
11527 let table_name = self.parse_object_name(false)?;
11528
11529 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11530 let new_name = self.parse_identifier()?;
11531 AlterTriggerOperation::RenameTo { new_name }
11532 } else {
11533 return self.expected_ref(
11534 "RENAME TO after ALTER TRIGGER ... ON ...",
11535 self.peek_token_ref(),
11536 );
11537 };
11538
11539 Ok(AlterTrigger {
11540 name,
11541 table_name,
11542 operation,
11543 }
11544 .into())
11545 }
11546
11547 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11549 let name = self.parse_identifier()?;
11550
11551 let operation = if self.parse_keyword(Keyword::UPDATE) {
11552 let version = if self.parse_keyword(Keyword::TO) {
11553 Some(self.parse_identifier()?)
11554 } else {
11555 None
11556 };
11557 AlterExtensionOperation::UpdateTo { version }
11558 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11559 AlterExtensionOperation::SetSchema {
11560 schema_name: self.parse_object_name(false)?,
11561 }
11562 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11563 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11564 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11565 let new_name = self.parse_identifier()?;
11566 AlterExtensionOperation::RenameTo { new_name }
11567 } else {
11568 return self.expected_ref(
11569 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11570 self.peek_token_ref(),
11571 );
11572 };
11573
11574 Ok(AlterExtension { name, operation }.into())
11575 }
11576
11577 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11579 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11580 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11582 let on_cluster = self.parse_optional_on_cluster()?;
11583 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11584
11585 let mut location = None;
11586 if self.parse_keyword(Keyword::LOCATION) {
11587 location = Some(HiveSetLocation {
11588 has_set: false,
11589 location: self.parse_identifier()?,
11590 });
11591 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11592 location = Some(HiveSetLocation {
11593 has_set: true,
11594 location: self.parse_identifier()?,
11595 });
11596 }
11597
11598 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11599 self.peek_token_ref().clone()
11600 } else {
11601 self.get_current_token().clone()
11602 };
11603
11604 Ok(AlterTable {
11605 name: table_name,
11606 if_exists,
11607 only,
11608 operations,
11609 location,
11610 on_cluster,
11611 table_type: if iceberg {
11612 Some(AlterTableType::Iceberg)
11613 } else {
11614 None
11615 },
11616 end_token: AttachedToken(end_token),
11617 }
11618 .into())
11619 }
11620
11621 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11623 let name = self.parse_object_name(false)?;
11624 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11625
11626 let with_options = self.parse_options(Keyword::WITH)?;
11627
11628 self.expect_keyword_is(Keyword::AS)?;
11629 let query = self.parse_query()?;
11630
11631 Ok(Statement::AlterView {
11632 name,
11633 columns,
11634 query,
11635 with_options,
11636 })
11637 }
11638
11639 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11641 let name = self.parse_object_name(false)?;
11642
11643 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11644 let new_name = self.parse_identifier()?;
11645 AlterTypeOperation::Rename(AlterTypeRename { new_name })
11646 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11647 let existing_enum_value = self.parse_identifier()?;
11648 self.expect_keyword(Keyword::TO)?;
11649 let new_enum_value = self.parse_identifier()?;
11650 AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11651 from: existing_enum_value,
11652 to: new_enum_value,
11653 })
11654 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::ATTRIBUTE]) {
11655 let old_name = self.parse_identifier()?;
11656 self.expect_keyword(Keyword::TO)?;
11657 let new_name = self.parse_identifier()?;
11658 let drop_behavior = self.parse_optional_drop_behavior();
11659 AlterTypeOperation::RenameAttribute {
11660 old_name,
11661 new_name,
11662 drop_behavior,
11663 }
11664 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11665 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11666 let new_enum_value = self.parse_identifier()?;
11667 let position = if self.parse_keyword(Keyword::BEFORE) {
11668 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11669 } else if self.parse_keyword(Keyword::AFTER) {
11670 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11671 } else {
11672 None
11673 };
11674 AlterTypeOperation::AddValue(AlterTypeAddValue {
11675 if_not_exists,
11676 value: new_enum_value,
11677 position,
11678 })
11679 } else if self.parse_keywords(&[Keyword::ADD, Keyword::ATTRIBUTE]) {
11680 let attr_name = self.parse_identifier()?;
11681 let data_type = self.parse_data_type()?;
11682 let collation = if self.parse_keyword(Keyword::COLLATE) {
11683 Some(self.parse_object_name(false)?)
11684 } else {
11685 None
11686 };
11687 let drop_behavior = self.parse_optional_drop_behavior();
11688 AlterTypeOperation::AddAttribute {
11689 name: attr_name,
11690 data_type,
11691 collation,
11692 drop_behavior,
11693 }
11694 } else if self.parse_keywords(&[Keyword::DROP, Keyword::ATTRIBUTE]) {
11695 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11696 let attr_name = self.parse_identifier()?;
11697 let drop_behavior = self.parse_optional_drop_behavior();
11698 AlterTypeOperation::DropAttribute {
11699 if_exists,
11700 name: attr_name,
11701 drop_behavior,
11702 }
11703 } else if self.parse_keywords(&[Keyword::ALTER, Keyword::ATTRIBUTE]) {
11704 let attr_name = self.parse_identifier()?;
11705 let _ = self.parse_keywords(&[Keyword::SET, Keyword::DATA]);
11706 self.expect_keyword(Keyword::TYPE)?;
11707 let data_type = self.parse_data_type()?;
11708 let collation = if self.parse_keyword(Keyword::COLLATE) {
11709 Some(self.parse_object_name(false)?)
11710 } else {
11711 None
11712 };
11713 let drop_behavior = self.parse_optional_drop_behavior();
11714 AlterTypeOperation::AlterAttribute {
11715 name: attr_name,
11716 data_type,
11717 collation,
11718 drop_behavior,
11719 }
11720 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11721 let new_owner = self.parse_owner()?;
11722 AlterTypeOperation::OwnerTo { new_owner }
11723 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11724 let new_schema = self.parse_object_name(false)?;
11725 AlterTypeOperation::SetSchema { new_schema }
11726 } else {
11727 return self.expected_ref(
11728 "{RENAME TO | RENAME VALUE | RENAME ATTRIBUTE | ADD VALUE | \
11729 ADD ATTRIBUTE | DROP ATTRIBUTE | ALTER ATTRIBUTE | OWNER TO | SET SCHEMA}",
11730 self.peek_token_ref(),
11731 );
11732 };
11733
11734 Ok(Statement::AlterType(AlterType { name, operation }))
11735 }
11736
11737 pub fn parse_alter_default_privileges(
11744 &mut self,
11745 ) -> Result<AlterDefaultPrivileges, ParserError> {
11746 self.expect_keyword(Keyword::PRIVILEGES)?;
11747
11748 let for_roles = if self.parse_keyword(Keyword::FOR) {
11749 self.expect_one_of_keywords(&[Keyword::ROLE, Keyword::USER])?;
11751 self.parse_comma_separated(Parser::parse_identifier)?
11752 } else {
11753 Vec::new()
11754 };
11755
11756 let in_schemas = if self.parse_keywords(&[Keyword::IN, Keyword::SCHEMA]) {
11757 self.parse_comma_separated(Parser::parse_identifier)?
11758 } else {
11759 Vec::new()
11760 };
11761
11762 let action = self.parse_alter_default_privileges_action()?;
11763
11764 Ok(AlterDefaultPrivileges {
11765 for_roles,
11766 in_schemas,
11767 action,
11768 })
11769 }
11770
11771 fn parse_alter_default_privileges_action(
11772 &mut self,
11773 ) -> Result<AlterDefaultPrivilegesAction, ParserError> {
11774 let kw = self.expect_one_of_keywords(&[Keyword::GRANT, Keyword::REVOKE])?;
11775 match kw {
11776 Keyword::GRANT => {
11777 let privileges = self.parse_alter_default_privileges_privileges()?;
11778 self.expect_keyword(Keyword::ON)?;
11779 let object_type = self.parse_alter_default_privileges_object_type()?;
11780 self.expect_keyword(Keyword::TO)?;
11781 let grantees = self.parse_grantees()?;
11782 let with_grant_option =
11783 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
11784 Ok(AlterDefaultPrivilegesAction::Grant {
11785 privileges,
11786 object_type,
11787 grantees,
11788 with_grant_option,
11789 })
11790 }
11791 Keyword::REVOKE => {
11792 let grant_option_for =
11793 self.parse_keywords(&[Keyword::GRANT, Keyword::OPTION, Keyword::FOR]);
11794 let privileges = self.parse_alter_default_privileges_privileges()?;
11795 self.expect_keyword(Keyword::ON)?;
11796 let object_type = self.parse_alter_default_privileges_object_type()?;
11797 self.expect_keyword(Keyword::FROM)?;
11798 let grantees = self.parse_grantees()?;
11799 let cascade = self.parse_cascade_option();
11800 Ok(AlterDefaultPrivilegesAction::Revoke {
11801 grant_option_for,
11802 privileges,
11803 object_type,
11804 grantees,
11805 cascade,
11806 })
11807 }
11808 unexpected_keyword => Err(ParserError::ParserError(format!(
11809 "Internal parser error: expected GRANT or REVOKE, got {unexpected_keyword:?}"
11810 ))),
11811 }
11812 }
11813
11814 fn parse_alter_default_privileges_privileges(&mut self) -> Result<Privileges, ParserError> {
11815 if self.parse_keyword(Keyword::ALL) {
11816 Ok(Privileges::All {
11817 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
11818 })
11819 } else {
11820 Ok(Privileges::Actions(self.parse_actions_list()?))
11821 }
11822 }
11823
11824 fn parse_alter_default_privileges_object_type(
11825 &mut self,
11826 ) -> Result<AlterDefaultPrivilegesObjectType, ParserError> {
11827 let kw = self.expect_one_of_keywords(&[
11828 Keyword::TABLES,
11829 Keyword::SEQUENCES,
11830 Keyword::FUNCTIONS,
11831 Keyword::ROUTINES,
11832 Keyword::TYPES,
11833 Keyword::SCHEMAS,
11834 ])?;
11835 match kw {
11836 Keyword::TABLES => Ok(AlterDefaultPrivilegesObjectType::Tables),
11837 Keyword::SEQUENCES => Ok(AlterDefaultPrivilegesObjectType::Sequences),
11838 Keyword::FUNCTIONS => Ok(AlterDefaultPrivilegesObjectType::Functions),
11839 Keyword::ROUTINES => Ok(AlterDefaultPrivilegesObjectType::Routines),
11840 Keyword::TYPES => Ok(AlterDefaultPrivilegesObjectType::Types),
11841 Keyword::SCHEMAS => Ok(AlterDefaultPrivilegesObjectType::Schemas),
11842 unexpected_keyword => Err(ParserError::ParserError(format!(
11843 "Internal parser error: expected one of {{TABLES, SEQUENCES, FUNCTIONS, ROUTINES, TYPES, SCHEMAS}}, got {unexpected_keyword:?}"
11844 ))),
11845 }
11846 }
11847
11848 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11852 let name = self.parse_object_name(false)?;
11853 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11854 AlterCollationOperation::RenameTo {
11855 new_name: self.parse_identifier()?,
11856 }
11857 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11858 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11859 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11860 AlterCollationOperation::SetSchema {
11861 schema_name: self.parse_object_name(false)?,
11862 }
11863 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11864 AlterCollationOperation::RefreshVersion
11865 } else {
11866 return self.expected_ref(
11867 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11868 self.peek_token_ref(),
11869 );
11870 };
11871
11872 Ok(AlterCollation { name, operation })
11873 }
11874
11875 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11879 let name = self.parse_operator_name()?;
11880
11881 self.expect_token(&Token::LParen)?;
11883
11884 let left_type = if self.parse_keyword(Keyword::NONE) {
11885 None
11886 } else {
11887 Some(self.parse_data_type()?)
11888 };
11889
11890 self.expect_token(&Token::Comma)?;
11891 let right_type = self.parse_data_type()?;
11892 self.expect_token(&Token::RParen)?;
11893
11894 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11896 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11897 Owner::CurrentRole
11898 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11899 Owner::CurrentUser
11900 } else if self.parse_keyword(Keyword::SESSION_USER) {
11901 Owner::SessionUser
11902 } else {
11903 Owner::Ident(self.parse_identifier()?)
11904 };
11905 AlterOperatorOperation::OwnerTo(owner)
11906 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11907 let schema_name = self.parse_object_name(false)?;
11908 AlterOperatorOperation::SetSchema { schema_name }
11909 } else if self.parse_keyword(Keyword::SET) {
11910 self.expect_token(&Token::LParen)?;
11911
11912 let mut options = Vec::new();
11913 loop {
11914 let keyword = self.expect_one_of_keywords(&[
11915 Keyword::RESTRICT,
11916 Keyword::JOIN,
11917 Keyword::COMMUTATOR,
11918 Keyword::NEGATOR,
11919 Keyword::HASHES,
11920 Keyword::MERGES,
11921 ])?;
11922
11923 match keyword {
11924 Keyword::RESTRICT => {
11925 self.expect_token(&Token::Eq)?;
11926 let proc_name = if self.parse_keyword(Keyword::NONE) {
11927 None
11928 } else {
11929 Some(self.parse_object_name(false)?)
11930 };
11931 options.push(OperatorOption::Restrict(proc_name));
11932 }
11933 Keyword::JOIN => {
11934 self.expect_token(&Token::Eq)?;
11935 let proc_name = if self.parse_keyword(Keyword::NONE) {
11936 None
11937 } else {
11938 Some(self.parse_object_name(false)?)
11939 };
11940 options.push(OperatorOption::Join(proc_name));
11941 }
11942 Keyword::COMMUTATOR => {
11943 self.expect_token(&Token::Eq)?;
11944 let op_name = self.parse_operator_name()?;
11945 options.push(OperatorOption::Commutator(op_name));
11946 }
11947 Keyword::NEGATOR => {
11948 self.expect_token(&Token::Eq)?;
11949 let op_name = self.parse_operator_name()?;
11950 options.push(OperatorOption::Negator(op_name));
11951 }
11952 Keyword::HASHES => {
11953 options.push(OperatorOption::Hashes);
11954 }
11955 Keyword::MERGES => {
11956 options.push(OperatorOption::Merges);
11957 }
11958 unexpected_keyword => return Err(ParserError::ParserError(
11959 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11960 )),
11961 }
11962
11963 if !self.consume_token(&Token::Comma) {
11964 break;
11965 }
11966 }
11967
11968 self.expect_token(&Token::RParen)?;
11969 AlterOperatorOperation::Set { options }
11970 } else {
11971 return self.expected_ref(
11972 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11973 self.peek_token_ref(),
11974 );
11975 };
11976
11977 Ok(AlterOperator {
11978 name,
11979 left_type,
11980 right_type,
11981 operation,
11982 })
11983 }
11984
11985 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11987 let strategy_number = self.parse_literal_uint()?;
11988 let operator_name = self.parse_operator_name()?;
11989
11990 self.expect_token(&Token::LParen)?;
11992 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11993 self.expect_token(&Token::RParen)?;
11994
11995 let purpose = if self.parse_keyword(Keyword::FOR) {
11997 if self.parse_keyword(Keyword::SEARCH) {
11998 Some(OperatorPurpose::ForSearch)
11999 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12000 let sort_family = self.parse_object_name(false)?;
12001 Some(OperatorPurpose::ForOrderBy { sort_family })
12002 } else {
12003 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
12004 }
12005 } else {
12006 None
12007 };
12008
12009 Ok(OperatorFamilyItem::Operator {
12010 strategy_number,
12011 operator_name,
12012 op_types,
12013 purpose,
12014 })
12015 }
12016
12017 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
12019 let support_number = self.parse_literal_uint()?;
12020
12021 let op_types =
12023 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
12024 let types = self.parse_comma_separated(Parser::parse_data_type)?;
12025 self.expect_token(&Token::RParen)?;
12026 Some(types)
12027 } else if self.consume_token(&Token::LParen) {
12028 self.expect_token(&Token::RParen)?;
12029 Some(vec![])
12030 } else {
12031 None
12032 };
12033
12034 let function_name = self.parse_object_name(false)?;
12035
12036 let argument_types = if self.consume_token(&Token::LParen) {
12038 if self.peek_token_ref().token == Token::RParen {
12039 self.expect_token(&Token::RParen)?;
12040 vec![]
12041 } else {
12042 let types = self.parse_comma_separated(Parser::parse_data_type)?;
12043 self.expect_token(&Token::RParen)?;
12044 types
12045 }
12046 } else {
12047 vec![]
12048 };
12049
12050 Ok(OperatorFamilyItem::Function {
12051 support_number,
12052 op_types,
12053 function_name,
12054 argument_types,
12055 })
12056 }
12057
12058 fn parse_operator_family_drop_operator(
12060 &mut self,
12061 ) -> Result<OperatorFamilyDropItem, ParserError> {
12062 let strategy_number = self.parse_literal_uint()?;
12063
12064 self.expect_token(&Token::LParen)?;
12066 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12067 self.expect_token(&Token::RParen)?;
12068
12069 Ok(OperatorFamilyDropItem::Operator {
12070 strategy_number,
12071 op_types,
12072 })
12073 }
12074
12075 fn parse_operator_family_drop_function(
12077 &mut self,
12078 ) -> Result<OperatorFamilyDropItem, ParserError> {
12079 let support_number = self.parse_literal_uint()?;
12080
12081 self.expect_token(&Token::LParen)?;
12083 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12084 self.expect_token(&Token::RParen)?;
12085
12086 Ok(OperatorFamilyDropItem::Function {
12087 support_number,
12088 op_types,
12089 })
12090 }
12091
12092 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
12094 if self.parse_keyword(Keyword::OPERATOR) {
12095 self.parse_operator_family_add_operator()
12096 } else if self.parse_keyword(Keyword::FUNCTION) {
12097 self.parse_operator_family_add_function()
12098 } else {
12099 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12100 }
12101 }
12102
12103 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
12105 if self.parse_keyword(Keyword::OPERATOR) {
12106 self.parse_operator_family_drop_operator()
12107 } else if self.parse_keyword(Keyword::FUNCTION) {
12108 self.parse_operator_family_drop_function()
12109 } else {
12110 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12111 }
12112 }
12113
12114 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
12117 let name = self.parse_object_name(false)?;
12118 self.expect_keyword(Keyword::USING)?;
12119 let using = self.parse_identifier()?;
12120
12121 let operation = if self.parse_keyword(Keyword::ADD) {
12122 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
12123 AlterOperatorFamilyOperation::Add { items }
12124 } else if self.parse_keyword(Keyword::DROP) {
12125 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
12126 AlterOperatorFamilyOperation::Drop { items }
12127 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12128 let new_name = self.parse_object_name(false)?;
12129 AlterOperatorFamilyOperation::RenameTo { new_name }
12130 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12131 let owner = self.parse_owner()?;
12132 AlterOperatorFamilyOperation::OwnerTo(owner)
12133 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12134 let schema_name = self.parse_object_name(false)?;
12135 AlterOperatorFamilyOperation::SetSchema { schema_name }
12136 } else {
12137 return self.expected_ref(
12138 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
12139 self.peek_token_ref(),
12140 );
12141 };
12142
12143 Ok(AlterOperatorFamily {
12144 name,
12145 using,
12146 operation,
12147 })
12148 }
12149
12150 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
12154 let name = self.parse_object_name(false)?;
12155 self.expect_keyword(Keyword::USING)?;
12156 let using = self.parse_identifier()?;
12157
12158 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12159 let new_name = self.parse_object_name(false)?;
12160 AlterOperatorClassOperation::RenameTo { new_name }
12161 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12162 let owner = self.parse_owner()?;
12163 AlterOperatorClassOperation::OwnerTo(owner)
12164 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12165 let schema_name = self.parse_object_name(false)?;
12166 AlterOperatorClassOperation::SetSchema { schema_name }
12167 } else {
12168 return self.expected_ref(
12169 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
12170 self.peek_token_ref(),
12171 );
12172 };
12173
12174 Ok(AlterOperatorClass {
12175 name,
12176 using,
12177 operation,
12178 })
12179 }
12180
12181 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
12185 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
12186 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
12187 let name = self.parse_object_name(false)?;
12188 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
12189 self.prev_token();
12190 let options = self.parse_options(Keyword::OPTIONS)?;
12191 AlterSchemaOperation::SetOptionsParens { options }
12192 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
12193 let collate = self.parse_expr()?;
12194 AlterSchemaOperation::SetDefaultCollate { collate }
12195 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
12196 let replica = self.parse_identifier()?;
12197 let options = if self.peek_keyword(Keyword::OPTIONS) {
12198 Some(self.parse_options(Keyword::OPTIONS)?)
12199 } else {
12200 None
12201 };
12202 AlterSchemaOperation::AddReplica { replica, options }
12203 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
12204 let replica = self.parse_identifier()?;
12205 AlterSchemaOperation::DropReplica { replica }
12206 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12207 let new_name = self.parse_object_name(false)?;
12208 AlterSchemaOperation::Rename { name: new_name }
12209 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12210 let owner = self.parse_owner()?;
12211 AlterSchemaOperation::OwnerTo { owner }
12212 } else {
12213 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
12214 };
12215 Ok(Statement::AlterSchema(AlterSchema {
12216 name,
12217 if_exists,
12218 operations: vec![operation],
12219 }))
12220 }
12221
12222 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
12225 let object_name = self.parse_object_name(false)?;
12226 if self.peek_token_ref().token == Token::LParen {
12227 match self.parse_function(object_name)? {
12228 Expr::Function(f) => Ok(Statement::Call(f)),
12229 other => parser_err!(
12230 format!("Expected a simple procedure call but found: {other}"),
12231 self.peek_token_ref().span.start
12232 ),
12233 }
12234 } else {
12235 Ok(Statement::Call(Function {
12236 name: object_name,
12237 uses_odbc_syntax: false,
12238 parameters: FunctionArguments::None,
12239 args: FunctionArguments::None,
12240 over: None,
12241 filter: None,
12242 null_treatment: None,
12243 within_group: vec![],
12244 }))
12245 }
12246 }
12247
12248 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
12250 let source;
12251 if self.consume_token(&Token::LParen) {
12252 source = CopySource::Query(self.parse_query()?);
12253 self.expect_token(&Token::RParen)?;
12254 } else {
12255 let table_name = self.parse_object_name(false)?;
12256 let columns = self.parse_parenthesized_column_list(Optional, false)?;
12257 source = CopySource::Table {
12258 table_name,
12259 columns,
12260 };
12261 }
12262 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
12263 Some(Keyword::FROM) => false,
12264 Some(Keyword::TO) => true,
12265 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
12266 };
12267 if !to {
12268 if let CopySource::Query(_) = source {
12271 return Err(ParserError::ParserError(
12272 "COPY ... FROM does not support query as a source".to_string(),
12273 ));
12274 }
12275 }
12276 let target = if self.parse_keyword(Keyword::STDIN) {
12277 CopyTarget::Stdin
12278 } else if self.parse_keyword(Keyword::STDOUT) {
12279 CopyTarget::Stdout
12280 } else if self.parse_keyword(Keyword::PROGRAM) {
12281 CopyTarget::Program {
12282 command: self.parse_literal_string()?,
12283 }
12284 } else {
12285 CopyTarget::File {
12286 filename: self.parse_literal_string()?,
12287 }
12288 };
12289 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
12291 if self.consume_token(&Token::LParen) {
12292 options = self.parse_comma_separated(Parser::parse_copy_option)?;
12293 self.expect_token(&Token::RParen)?;
12294 }
12295 let mut legacy_options = vec![];
12296 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
12297 legacy_options.push(opt);
12298 }
12299 let values =
12300 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
12301 self.expect_token(&Token::SemiColon)?;
12302 self.parse_tsv()
12303 } else {
12304 vec![]
12305 };
12306 Ok(Statement::Copy {
12307 source,
12308 to,
12309 target,
12310 options,
12311 legacy_options,
12312 values,
12313 })
12314 }
12315
12316 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12318 self.expect_keyword(Keyword::OPEN)?;
12319 Ok(Statement::Open(OpenStatement {
12320 cursor_name: self.parse_identifier()?,
12321 }))
12322 }
12323
12324 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12326 let cursor = if self.parse_keyword(Keyword::ALL) {
12327 CloseCursor::All
12328 } else {
12329 let name = self.parse_identifier()?;
12330
12331 CloseCursor::Specific { name }
12332 };
12333
12334 Ok(Statement::Close { cursor })
12335 }
12336
12337 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12338 let ret = match self.parse_one_of_keywords(&[
12339 Keyword::FORMAT,
12340 Keyword::FREEZE,
12341 Keyword::DELIMITER,
12342 Keyword::NULL,
12343 Keyword::HEADER,
12344 Keyword::QUOTE,
12345 Keyword::ESCAPE,
12346 Keyword::FORCE_QUOTE,
12347 Keyword::FORCE_NOT_NULL,
12348 Keyword::FORCE_NULL,
12349 Keyword::ENCODING,
12350 ]) {
12351 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12352 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12353 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12354 Some(Keyword::FALSE)
12355 )),
12356 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12357 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12358 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12359 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12360 Some(Keyword::FALSE)
12361 )),
12362 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12363 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12364 Some(Keyword::FORCE_QUOTE) => {
12365 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12366 }
12367 Some(Keyword::FORCE_NOT_NULL) => {
12368 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12369 }
12370 Some(Keyword::FORCE_NULL) => {
12371 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12372 }
12373 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12374 _ => self.expected_ref("option", self.peek_token_ref())?,
12375 };
12376 Ok(ret)
12377 }
12378
12379 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12380 if self.parse_keyword(Keyword::FORMAT) {
12382 let _ = self.parse_keyword(Keyword::AS);
12383 }
12384
12385 let ret = match self.parse_one_of_keywords(&[
12386 Keyword::ACCEPTANYDATE,
12387 Keyword::ACCEPTINVCHARS,
12388 Keyword::ADDQUOTES,
12389 Keyword::ALLOWOVERWRITE,
12390 Keyword::BINARY,
12391 Keyword::BLANKSASNULL,
12392 Keyword::BZIP2,
12393 Keyword::CLEANPATH,
12394 Keyword::COMPUPDATE,
12395 Keyword::CREDENTIALS,
12396 Keyword::CSV,
12397 Keyword::DATEFORMAT,
12398 Keyword::DELIMITER,
12399 Keyword::EMPTYASNULL,
12400 Keyword::ENCRYPTED,
12401 Keyword::ESCAPE,
12402 Keyword::EXTENSION,
12403 Keyword::FIXEDWIDTH,
12404 Keyword::GZIP,
12405 Keyword::HEADER,
12406 Keyword::IAM_ROLE,
12407 Keyword::IGNOREHEADER,
12408 Keyword::JSON,
12409 Keyword::MANIFEST,
12410 Keyword::MAXFILESIZE,
12411 Keyword::NULL,
12412 Keyword::PARALLEL,
12413 Keyword::PARQUET,
12414 Keyword::PARTITION,
12415 Keyword::REGION,
12416 Keyword::REMOVEQUOTES,
12417 Keyword::ROWGROUPSIZE,
12418 Keyword::STATUPDATE,
12419 Keyword::TIMEFORMAT,
12420 Keyword::TRUNCATECOLUMNS,
12421 Keyword::ZSTD,
12422 ]) {
12423 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12424 Some(Keyword::ACCEPTINVCHARS) => {
12425 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12427 Some(self.parse_literal_string()?)
12428 } else {
12429 None
12430 };
12431 CopyLegacyOption::AcceptInvChars(ch)
12432 }
12433 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12434 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12435 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12436 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12437 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12438 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12439 Some(Keyword::COMPUPDATE) => {
12440 let preset = self.parse_keyword(Keyword::PRESET);
12441 let enabled = match self.parse_one_of_keywords(&[
12442 Keyword::TRUE,
12443 Keyword::FALSE,
12444 Keyword::ON,
12445 Keyword::OFF,
12446 ]) {
12447 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12448 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12449 _ => None,
12450 };
12451 CopyLegacyOption::CompUpdate { preset, enabled }
12452 }
12453 Some(Keyword::CREDENTIALS) => {
12454 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12455 }
12456 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12457 let mut opts = vec![];
12458 while let Some(opt) =
12459 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12460 {
12461 opts.push(opt);
12462 }
12463 opts
12464 }),
12465 Some(Keyword::DATEFORMAT) => {
12466 let _ = self.parse_keyword(Keyword::AS);
12467 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12468 Some(self.parse_literal_string()?)
12469 } else {
12470 None
12471 };
12472 CopyLegacyOption::DateFormat(fmt)
12473 }
12474 Some(Keyword::DELIMITER) => {
12475 let _ = self.parse_keyword(Keyword::AS);
12476 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12477 }
12478 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12479 Some(Keyword::ENCRYPTED) => {
12480 let auto = self.parse_keyword(Keyword::AUTO);
12481 CopyLegacyOption::Encrypted { auto }
12482 }
12483 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12484 Some(Keyword::EXTENSION) => {
12485 let ext = self.parse_literal_string()?;
12486 CopyLegacyOption::Extension(ext)
12487 }
12488 Some(Keyword::FIXEDWIDTH) => {
12489 let spec = self.parse_literal_string()?;
12490 CopyLegacyOption::FixedWidth(spec)
12491 }
12492 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12493 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12494 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12495 Some(Keyword::IGNOREHEADER) => {
12496 let _ = self.parse_keyword(Keyword::AS);
12497 let num_rows = self.parse_literal_uint()?;
12498 CopyLegacyOption::IgnoreHeader(num_rows)
12499 }
12500 Some(Keyword::JSON) => {
12501 let _ = self.parse_keyword(Keyword::AS);
12502 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12503 Some(self.parse_literal_string()?)
12504 } else {
12505 None
12506 };
12507 CopyLegacyOption::Json(fmt)
12508 }
12509 Some(Keyword::MANIFEST) => {
12510 let verbose = self.parse_keyword(Keyword::VERBOSE);
12511 CopyLegacyOption::Manifest { verbose }
12512 }
12513 Some(Keyword::MAXFILESIZE) => {
12514 let _ = self.parse_keyword(Keyword::AS);
12515 let size = self.parse_number_value()?;
12516 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12517 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12518 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12519 _ => None,
12520 };
12521 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12522 }
12523 Some(Keyword::NULL) => {
12524 let _ = self.parse_keyword(Keyword::AS);
12525 CopyLegacyOption::Null(self.parse_literal_string()?)
12526 }
12527 Some(Keyword::PARALLEL) => {
12528 let enabled = match self.parse_one_of_keywords(&[
12529 Keyword::TRUE,
12530 Keyword::FALSE,
12531 Keyword::ON,
12532 Keyword::OFF,
12533 ]) {
12534 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12535 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12536 _ => None,
12537 };
12538 CopyLegacyOption::Parallel(enabled)
12539 }
12540 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12541 Some(Keyword::PARTITION) => {
12542 self.expect_keyword(Keyword::BY)?;
12543 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12544 let include = self.parse_keyword(Keyword::INCLUDE);
12545 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12546 }
12547 Some(Keyword::REGION) => {
12548 let _ = self.parse_keyword(Keyword::AS);
12549 let region = self.parse_literal_string()?;
12550 CopyLegacyOption::Region(region)
12551 }
12552 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12553 Some(Keyword::ROWGROUPSIZE) => {
12554 let _ = self.parse_keyword(Keyword::AS);
12555 let file_size = self.parse_file_size()?;
12556 CopyLegacyOption::RowGroupSize(file_size)
12557 }
12558 Some(Keyword::STATUPDATE) => {
12559 let enabled = match self.parse_one_of_keywords(&[
12560 Keyword::TRUE,
12561 Keyword::FALSE,
12562 Keyword::ON,
12563 Keyword::OFF,
12564 ]) {
12565 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12566 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12567 _ => None,
12568 };
12569 CopyLegacyOption::StatUpdate(enabled)
12570 }
12571 Some(Keyword::TIMEFORMAT) => {
12572 let _ = self.parse_keyword(Keyword::AS);
12573 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12574 Some(self.parse_literal_string()?)
12575 } else {
12576 None
12577 };
12578 CopyLegacyOption::TimeFormat(fmt)
12579 }
12580 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12581 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12582 _ => self.expected_ref("option", self.peek_token_ref())?,
12583 };
12584 Ok(ret)
12585 }
12586
12587 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12588 let size = self.parse_number_value()?;
12589 let unit = self.maybe_parse_file_size_unit();
12590 Ok(FileSize { size, unit })
12591 }
12592
12593 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12594 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12595 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12596 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12597 _ => None,
12598 }
12599 }
12600
12601 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12602 if self.parse_keyword(Keyword::DEFAULT) {
12603 Ok(IamRoleKind::Default)
12604 } else {
12605 let arn = self.parse_literal_string()?;
12606 Ok(IamRoleKind::Arn(arn))
12607 }
12608 }
12609
12610 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12611 let ret = match self.parse_one_of_keywords(&[
12612 Keyword::HEADER,
12613 Keyword::QUOTE,
12614 Keyword::ESCAPE,
12615 Keyword::FORCE,
12616 ]) {
12617 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12618 Some(Keyword::QUOTE) => {
12619 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12621 }
12622 Some(Keyword::ESCAPE) => {
12623 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12625 }
12626 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12627 CopyLegacyCsvOption::ForceNotNull(
12628 self.parse_comma_separated(|p| p.parse_identifier())?,
12629 )
12630 }
12631 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12632 CopyLegacyCsvOption::ForceQuote(
12633 self.parse_comma_separated(|p| p.parse_identifier())?,
12634 )
12635 }
12636 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12637 };
12638 Ok(ret)
12639 }
12640
12641 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12642 let s = self.parse_literal_string()?;
12643 if s.len() != 1 {
12644 let loc = self
12645 .tokens
12646 .get(self.index - 1)
12647 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12648 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12649 }
12650 Ok(s.chars().next().unwrap())
12651 }
12652
12653 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12656 self.parse_tab_value()
12657 }
12658
12659 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12661 let mut values = vec![];
12662 let mut content = String::new();
12663 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12664 match t {
12665 Token::Whitespace(Whitespace::Tab) => {
12666 values.push(Some(core::mem::take(&mut content)));
12667 }
12668 Token::Whitespace(Whitespace::Newline) => {
12669 values.push(Some(core::mem::take(&mut content)));
12670 }
12671 Token::Backslash => {
12672 if self.consume_token(&Token::Period) {
12673 return values;
12674 }
12675 if let Token::Word(w) = self.next_token().token {
12676 if w.value == "N" {
12677 values.push(None);
12678 }
12679 }
12680 }
12681 _ => {
12682 content.push_str(&t.to_string());
12683 }
12684 }
12685 }
12686 values
12687 }
12688
12689 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12691 let next_token = self.next_token();
12692 let span = next_token.span;
12693 let ok_value = |value: Value| Ok(value.with_span(span));
12694 match next_token.token {
12695 Token::Word(w) => match w.keyword {
12696 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12697 ok_value(Value::Boolean(true))
12698 }
12699 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12700 ok_value(Value::Boolean(false))
12701 }
12702 Keyword::NULL => ok_value(Value::Null),
12703 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12704 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12705 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12706 _ => self.expected(
12707 "A value?",
12708 TokenWithSpan {
12709 token: Token::Word(w),
12710 span,
12711 },
12712 )?,
12713 },
12714 _ => self.expected(
12715 "a concrete value",
12716 TokenWithSpan {
12717 token: Token::Word(w),
12718 span,
12719 },
12720 ),
12721 },
12722 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12726 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12727 self.maybe_concat_string_literal(s.to_string()),
12728 )),
12729 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12730 self.maybe_concat_string_literal(s.to_string()),
12731 )),
12732 Token::TripleSingleQuotedString(ref s) => {
12733 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12734 }
12735 Token::TripleDoubleQuotedString(ref s) => {
12736 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12737 }
12738 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12739 Token::SingleQuotedByteStringLiteral(ref s) => {
12740 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12741 }
12742 Token::DoubleQuotedByteStringLiteral(ref s) => {
12743 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12744 }
12745 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12746 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12747 }
12748 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12749 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12750 }
12751 Token::SingleQuotedRawStringLiteral(ref s) => {
12752 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12753 }
12754 Token::DoubleQuotedRawStringLiteral(ref s) => {
12755 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12756 }
12757 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12758 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12759 }
12760 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12761 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12762 }
12763 Token::NationalStringLiteral(ref s) => {
12764 ok_value(Value::NationalStringLiteral(s.to_string()))
12765 }
12766 Token::QuoteDelimitedStringLiteral(v) => {
12767 ok_value(Value::QuoteDelimitedStringLiteral(v))
12768 }
12769 Token::NationalQuoteDelimitedStringLiteral(v) => {
12770 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12771 }
12772 Token::EscapedStringLiteral(ref s) => {
12773 ok_value(Value::EscapedStringLiteral(s.to_string()))
12774 }
12775 Token::UnicodeStringLiteral(ref s) => {
12776 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12777 }
12778 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12779 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12780 tok @ Token::Colon | tok @ Token::AtSign => {
12781 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12789 let ident = match next_token.token {
12790 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12791 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12792 _ => self.expected("placeholder", next_token),
12793 }?;
12794 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12795 .with_span(Span::new(span.start, ident.span.end)))
12796 }
12797 unexpected => self.expected(
12798 "a value",
12799 TokenWithSpan {
12800 token: unexpected,
12801 span,
12802 },
12803 ),
12804 }
12805 }
12806
12807 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12808 if self.dialect.supports_string_literal_concatenation() {
12809 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12810 self.peek_token_ref().token
12811 {
12812 str.push_str(s);
12813 self.advance_token();
12814 }
12815 } else if self
12816 .dialect
12817 .supports_string_literal_concatenation_with_newline()
12818 {
12819 let mut after_newline = false;
12822 loop {
12823 match self.peek_token_no_skip().token {
12824 Token::Whitespace(Whitespace::Newline) => {
12825 after_newline = true;
12826 self.next_token_no_skip();
12827 }
12828 Token::Whitespace(_) => {
12829 self.next_token_no_skip();
12830 }
12831 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12832 if after_newline =>
12833 {
12834 str.push_str(s.clone().as_str());
12835 self.next_token_no_skip();
12836 after_newline = false;
12837 }
12838 _ => break,
12839 }
12840 }
12841 }
12842
12843 str
12844 }
12845
12846 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12848 let value_wrapper = self.parse_value()?;
12849 match &value_wrapper.value {
12850 Value::Number(_, _) => Ok(value_wrapper),
12851 Value::Placeholder(_) => Ok(value_wrapper),
12852 _ => {
12853 self.prev_token();
12854 self.expected_ref("literal number", self.peek_token_ref())
12855 }
12856 }
12857 }
12858
12859 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12862 let next_token = self.next_token();
12863 match next_token.token {
12864 Token::Plus => Ok(Expr::UnaryOp {
12865 op: UnaryOperator::Plus,
12866 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12867 }),
12868 Token::Minus => Ok(Expr::UnaryOp {
12869 op: UnaryOperator::Minus,
12870 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12871 }),
12872 _ => {
12873 self.prev_token();
12874 Ok(Expr::Value(self.parse_number_value()?))
12875 }
12876 }
12877 }
12878
12879 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12880 let next_token = self.next_token();
12881 let span = next_token.span;
12882 match next_token.token {
12883 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12884 Value::SingleQuotedString(s.to_string()).with_span(span),
12885 )),
12886 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12887 Value::DoubleQuotedString(s.to_string()).with_span(span),
12888 )),
12889 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12890 Value::HexStringLiteral(s.to_string()).with_span(span),
12891 )),
12892 unexpected => self.expected(
12893 "a string value",
12894 TokenWithSpan {
12895 token: unexpected,
12896 span,
12897 },
12898 ),
12899 }
12900 }
12901
12902 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12904 let next_token = self.next_token();
12905 match next_token.token {
12906 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12907 _ => self.expected("literal int", next_token),
12908 }
12909 }
12910
12911 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12914 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12915 let peek_token = parser.peek_token();
12916 let span = peek_token.span;
12917 match peek_token.token {
12918 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12919 {
12920 parser.next_token();
12921 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12922 }
12923 _ => Ok(Expr::Value(
12924 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12925 )),
12926 }
12927 };
12928
12929 Ok(CreateFunctionBody::AsBeforeOptions {
12930 body: parse_string_expr(self)?,
12931 link_symbol: if self.consume_token(&Token::Comma) {
12932 Some(parse_string_expr(self)?)
12933 } else {
12934 None
12935 },
12936 })
12937 }
12938
12939 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12941 let next_token = self.next_token();
12942 match next_token.token {
12943 Token::Word(Word {
12944 value,
12945 keyword: Keyword::NoKeyword,
12946 ..
12947 }) => Ok(value),
12948 Token::SingleQuotedString(s) => Ok(s),
12949 Token::DoubleQuotedString(s) => Ok(s),
12950 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12951 Ok(s)
12952 }
12953 Token::UnicodeStringLiteral(s) => Ok(s),
12954 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12955 Ok(s.value)
12956 }
12957 _ => self.expected("literal string", next_token),
12958 }
12959 }
12960
12961 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12963 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12964 Some(Keyword::TRUE) => Ok(true),
12965 Some(Keyword::FALSE) => Ok(false),
12966 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12967 }
12968 }
12969
12970 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12972 let neg = self.parse_keyword(Keyword::NOT);
12973 let normalized_form = self.maybe_parse(|parser| {
12974 match parser.parse_one_of_keywords(&[
12975 Keyword::NFC,
12976 Keyword::NFD,
12977 Keyword::NFKC,
12978 Keyword::NFKD,
12979 ]) {
12980 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12981 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12982 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12983 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12984 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12985 }
12986 })?;
12987 if self.parse_keyword(Keyword::NORMALIZED) {
12988 return Ok(Expr::IsNormalized {
12989 expr: Box::new(expr),
12990 form: normalized_form,
12991 negated: neg,
12992 });
12993 }
12994 self.expected_ref("unicode normalization form", self.peek_token_ref())
12995 }
12996
12997 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12999 self.expect_token(&Token::LParen)?;
13000 let values = self.parse_comma_separated(|parser| {
13001 let name = parser.parse_literal_string()?;
13002 let e = if parser.consume_token(&Token::Eq) {
13003 let value = parser.parse_number()?;
13004 EnumMember::NamedValue(name, value)
13005 } else {
13006 EnumMember::Name(name)
13007 };
13008 Ok(e)
13009 })?;
13010 self.expect_token(&Token::RParen)?;
13011
13012 Ok(values)
13013 }
13014
13015 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
13017 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
13018 if trailing_bracket.0 {
13019 return parser_err!(
13020 format!("unmatched > after parsing data type {ty}"),
13021 self.peek_token_ref()
13022 );
13023 }
13024
13025 Ok(ty)
13026 }
13027
13028 fn parse_data_type_helper(
13029 &mut self,
13030 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
13031 let dialect = self.dialect;
13032 self.advance_token();
13033 let next_token = self.get_current_token();
13034 let next_token_index = self.get_current_index();
13035
13036 let mut trailing_bracket: MatchedTrailingBracket = false.into();
13037 let mut data = match &next_token.token {
13038 Token::Word(w) => match w.keyword {
13039 Keyword::BOOLEAN => Ok(DataType::Boolean),
13040 Keyword::BOOL => Ok(DataType::Bool),
13041 Keyword::FLOAT => {
13042 let precision = self.parse_exact_number_optional_precision_scale()?;
13043
13044 if self.parse_keyword(Keyword::UNSIGNED) {
13045 Ok(DataType::FloatUnsigned(precision))
13046 } else {
13047 Ok(DataType::Float(precision))
13048 }
13049 }
13050 Keyword::REAL => {
13051 if self.parse_keyword(Keyword::UNSIGNED) {
13052 Ok(DataType::RealUnsigned)
13053 } else {
13054 Ok(DataType::Real)
13055 }
13056 }
13057 Keyword::FLOAT4 => Ok(DataType::Float4),
13058 Keyword::FLOAT32 => Ok(DataType::Float32),
13059 Keyword::FLOAT64 => Ok(DataType::Float64),
13060 Keyword::FLOAT8 => Ok(DataType::Float8),
13061 Keyword::DOUBLE => {
13062 if self.parse_keyword(Keyword::PRECISION) {
13063 if self.parse_keyword(Keyword::UNSIGNED) {
13064 Ok(DataType::DoublePrecisionUnsigned)
13065 } else {
13066 Ok(DataType::DoublePrecision)
13067 }
13068 } else {
13069 let precision = self.parse_exact_number_optional_precision_scale()?;
13070
13071 if self.parse_keyword(Keyword::UNSIGNED) {
13072 Ok(DataType::DoubleUnsigned(precision))
13073 } else {
13074 Ok(DataType::Double(precision))
13075 }
13076 }
13077 }
13078 Keyword::TINYINT => {
13079 let optional_precision = self.parse_optional_precision();
13080 if self.parse_keyword(Keyword::UNSIGNED) {
13081 Ok(DataType::TinyIntUnsigned(optional_precision?))
13082 } else {
13083 if dialect.supports_data_type_signed_suffix() {
13084 let _ = self.parse_keyword(Keyword::SIGNED);
13085 }
13086 Ok(DataType::TinyInt(optional_precision?))
13087 }
13088 }
13089 Keyword::INT2 => {
13090 let optional_precision = self.parse_optional_precision();
13091 if self.parse_keyword(Keyword::UNSIGNED) {
13092 Ok(DataType::Int2Unsigned(optional_precision?))
13093 } else {
13094 Ok(DataType::Int2(optional_precision?))
13095 }
13096 }
13097 Keyword::SMALLINT => {
13098 let optional_precision = self.parse_optional_precision();
13099 if self.parse_keyword(Keyword::UNSIGNED) {
13100 Ok(DataType::SmallIntUnsigned(optional_precision?))
13101 } else {
13102 if dialect.supports_data_type_signed_suffix() {
13103 let _ = self.parse_keyword(Keyword::SIGNED);
13104 }
13105 Ok(DataType::SmallInt(optional_precision?))
13106 }
13107 }
13108 Keyword::MEDIUMINT => {
13109 let optional_precision = self.parse_optional_precision();
13110 if self.parse_keyword(Keyword::UNSIGNED) {
13111 Ok(DataType::MediumIntUnsigned(optional_precision?))
13112 } else {
13113 if dialect.supports_data_type_signed_suffix() {
13114 let _ = self.parse_keyword(Keyword::SIGNED);
13115 }
13116 Ok(DataType::MediumInt(optional_precision?))
13117 }
13118 }
13119 Keyword::INT => {
13120 let optional_precision = self.parse_optional_precision();
13121 if self.parse_keyword(Keyword::UNSIGNED) {
13122 Ok(DataType::IntUnsigned(optional_precision?))
13123 } else {
13124 if dialect.supports_data_type_signed_suffix() {
13125 let _ = self.parse_keyword(Keyword::SIGNED);
13126 }
13127 Ok(DataType::Int(optional_precision?))
13128 }
13129 }
13130 Keyword::INT4 => {
13131 let optional_precision = self.parse_optional_precision();
13132 if self.parse_keyword(Keyword::UNSIGNED) {
13133 Ok(DataType::Int4Unsigned(optional_precision?))
13134 } else {
13135 Ok(DataType::Int4(optional_precision?))
13136 }
13137 }
13138 Keyword::INT8 => {
13139 let optional_precision = self.parse_optional_precision();
13140 if self.parse_keyword(Keyword::UNSIGNED) {
13141 Ok(DataType::Int8Unsigned(optional_precision?))
13142 } else {
13143 Ok(DataType::Int8(optional_precision?))
13144 }
13145 }
13146 Keyword::INT16 => Ok(DataType::Int16),
13147 Keyword::INT32 => Ok(DataType::Int32),
13148 Keyword::INT64 => Ok(DataType::Int64),
13149 Keyword::INT128 => Ok(DataType::Int128),
13150 Keyword::INT256 => Ok(DataType::Int256),
13151 Keyword::INTEGER => {
13152 let optional_precision = self.parse_optional_precision();
13153 if self.parse_keyword(Keyword::UNSIGNED) {
13154 Ok(DataType::IntegerUnsigned(optional_precision?))
13155 } else {
13156 if dialect.supports_data_type_signed_suffix() {
13157 let _ = self.parse_keyword(Keyword::SIGNED);
13158 }
13159 Ok(DataType::Integer(optional_precision?))
13160 }
13161 }
13162 Keyword::BIGINT => {
13163 let optional_precision = self.parse_optional_precision();
13164 if self.parse_keyword(Keyword::UNSIGNED) {
13165 Ok(DataType::BigIntUnsigned(optional_precision?))
13166 } else {
13167 if dialect.supports_data_type_signed_suffix() {
13168 let _ = self.parse_keyword(Keyword::SIGNED);
13169 }
13170 Ok(DataType::BigInt(optional_precision?))
13171 }
13172 }
13173 Keyword::HUGEINT => Ok(DataType::HugeInt),
13174 Keyword::UBIGINT => Ok(DataType::UBigInt),
13175 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
13176 Keyword::USMALLINT => Ok(DataType::USmallInt),
13177 Keyword::UTINYINT => Ok(DataType::UTinyInt),
13178 Keyword::UINT8 => Ok(DataType::UInt8),
13179 Keyword::UINT16 => Ok(DataType::UInt16),
13180 Keyword::UINT32 => Ok(DataType::UInt32),
13181 Keyword::UINT64 => Ok(DataType::UInt64),
13182 Keyword::UINT128 => Ok(DataType::UInt128),
13183 Keyword::UINT256 => Ok(DataType::UInt256),
13184 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
13185 Keyword::NVARCHAR => {
13186 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
13187 }
13188 Keyword::CHARACTER => {
13189 if self.parse_keyword(Keyword::VARYING) {
13190 Ok(DataType::CharacterVarying(
13191 self.parse_optional_character_length()?,
13192 ))
13193 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13194 Ok(DataType::CharacterLargeObject(
13195 self.parse_optional_precision()?,
13196 ))
13197 } else {
13198 Ok(DataType::Character(self.parse_optional_character_length()?))
13199 }
13200 }
13201 Keyword::CHAR => {
13202 if self.parse_keyword(Keyword::VARYING) {
13203 Ok(DataType::CharVarying(
13204 self.parse_optional_character_length()?,
13205 ))
13206 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13207 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
13208 } else {
13209 Ok(DataType::Char(self.parse_optional_character_length()?))
13210 }
13211 }
13212 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
13213 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
13214 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
13215 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
13216 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
13217 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
13218 Keyword::LONGBLOB => Ok(DataType::LongBlob),
13219 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
13220 Keyword::BIT => {
13221 if self.parse_keyword(Keyword::VARYING) {
13222 Ok(DataType::BitVarying(self.parse_optional_precision()?))
13223 } else {
13224 Ok(DataType::Bit(self.parse_optional_precision()?))
13225 }
13226 }
13227 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
13228 Keyword::UUID => Ok(DataType::Uuid),
13229 Keyword::DATE => Ok(DataType::Date),
13230 Keyword::DATE32 => Ok(DataType::Date32),
13231 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
13232 Keyword::DATETIME64 => {
13233 self.prev_token();
13234 let (precision, time_zone) = self.parse_datetime_64()?;
13235 Ok(DataType::Datetime64(precision, time_zone))
13236 }
13237 Keyword::TIMESTAMP => {
13238 let precision = self.parse_optional_precision()?;
13239 let tz = if self.parse_keyword(Keyword::WITH) {
13240 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13241 TimezoneInfo::WithTimeZone
13242 } else if self.parse_keyword(Keyword::WITHOUT) {
13243 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13244 TimezoneInfo::WithoutTimeZone
13245 } else {
13246 TimezoneInfo::None
13247 };
13248 Ok(DataType::Timestamp(precision, tz))
13249 }
13250 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
13251 self.parse_optional_precision()?,
13252 TimezoneInfo::Tz,
13253 )),
13254 Keyword::TIMESTAMP_NTZ => {
13255 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
13256 }
13257 Keyword::TIME => {
13258 let precision = self.parse_optional_precision()?;
13259 let tz = if self.parse_keyword(Keyword::WITH) {
13260 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13261 TimezoneInfo::WithTimeZone
13262 } else if self.parse_keyword(Keyword::WITHOUT) {
13263 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13264 TimezoneInfo::WithoutTimeZone
13265 } else {
13266 TimezoneInfo::None
13267 };
13268 Ok(DataType::Time(precision, tz))
13269 }
13270 Keyword::TIMETZ => Ok(DataType::Time(
13271 self.parse_optional_precision()?,
13272 TimezoneInfo::Tz,
13273 )),
13274 Keyword::INTERVAL => {
13275 if self.dialect.supports_interval_options() {
13276 let fields = self.maybe_parse_optional_interval_fields()?;
13277 let precision = self.parse_optional_precision()?;
13278 Ok(DataType::Interval { fields, precision })
13279 } else {
13280 Ok(DataType::Interval {
13281 fields: None,
13282 precision: None,
13283 })
13284 }
13285 }
13286 Keyword::JSON => Ok(DataType::JSON),
13287 Keyword::JSONB => Ok(DataType::JSONB),
13288 Keyword::REGCLASS => Ok(DataType::Regclass),
13289 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
13290 Keyword::FIXEDSTRING => {
13291 self.expect_token(&Token::LParen)?;
13292 let character_length = self.parse_literal_uint()?;
13293 self.expect_token(&Token::RParen)?;
13294 Ok(DataType::FixedString(character_length))
13295 }
13296 Keyword::TEXT => Ok(DataType::Text),
13297 Keyword::TINYTEXT => Ok(DataType::TinyText),
13298 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
13299 Keyword::LONGTEXT => Ok(DataType::LongText),
13300 Keyword::BYTEA => Ok(DataType::Bytea),
13301 Keyword::NUMERIC => Ok(DataType::Numeric(
13302 self.parse_exact_number_optional_precision_scale()?,
13303 )),
13304 Keyword::DECIMAL => {
13305 let precision = self.parse_exact_number_optional_precision_scale()?;
13306
13307 if self.parse_keyword(Keyword::UNSIGNED) {
13308 Ok(DataType::DecimalUnsigned(precision))
13309 } else {
13310 Ok(DataType::Decimal(precision))
13311 }
13312 }
13313 Keyword::DEC => {
13314 let precision = self.parse_exact_number_optional_precision_scale()?;
13315
13316 if self.parse_keyword(Keyword::UNSIGNED) {
13317 Ok(DataType::DecUnsigned(precision))
13318 } else {
13319 Ok(DataType::Dec(precision))
13320 }
13321 }
13322 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13323 self.parse_exact_number_optional_precision_scale()?,
13324 )),
13325 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13326 self.parse_exact_number_optional_precision_scale()?,
13327 )),
13328 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13329 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13330 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13331 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13332 Keyword::ARRAY => {
13333 if self.dialect.supports_array_typedef_without_element_type() {
13334 Ok(DataType::Array(ArrayElemTypeDef::None))
13335 } else if dialect_of!(self is ClickHouseDialect) {
13336 Ok(self.parse_sub_type(|internal_type| {
13337 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13338 })?)
13339 } else {
13340 self.expect_token(&Token::Lt)?;
13341 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13342 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13343 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13344 inside_type,
13345 ))))
13346 }
13347 }
13348 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13349 self.prev_token();
13350 let field_defs = self.parse_duckdb_struct_type_def()?;
13351 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13352 }
13353 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13354 {
13355 self.prev_token();
13356 let (field_defs, _trailing_bracket) =
13357 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13358 trailing_bracket = _trailing_bracket;
13359 Ok(DataType::Struct(
13360 field_defs,
13361 StructBracketKind::AngleBrackets,
13362 ))
13363 }
13364 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13365 self.prev_token();
13366 let fields = self.parse_union_type_def()?;
13367 Ok(DataType::Union(fields))
13368 }
13369 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13370 Ok(self.parse_sub_type(DataType::Nullable)?)
13371 }
13372 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13373 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13374 }
13375 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13376 self.prev_token();
13377 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13378 Ok(DataType::Map(
13379 Box::new(key_data_type),
13380 Box::new(value_data_type),
13381 ))
13382 }
13383 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13384 self.expect_token(&Token::LParen)?;
13385 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13386 self.expect_token(&Token::RParen)?;
13387 Ok(DataType::Nested(field_defs))
13388 }
13389 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13390 self.prev_token();
13391 let field_defs = self.parse_click_house_tuple_def()?;
13392 Ok(DataType::Tuple(field_defs))
13393 }
13394 Keyword::TRIGGER => Ok(DataType::Trigger),
13395 Keyword::SETOF => {
13396 let inner = self.parse_data_type()?;
13397 Ok(DataType::SetOf(Box::new(inner)))
13398 }
13399 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13400 let _ = self.parse_keyword(Keyword::TYPE);
13401 Ok(DataType::AnyType)
13402 }
13403 Keyword::TABLE => {
13404 if self.peek_token_ref().token == Token::LParen {
13407 let columns = self.parse_returns_table_columns()?;
13408 Ok(DataType::Table(Some(columns)))
13409 } else {
13410 Ok(DataType::Table(None))
13411 }
13412 }
13413 Keyword::SIGNED => {
13414 if self.parse_keyword(Keyword::INTEGER) {
13415 Ok(DataType::SignedInteger)
13416 } else {
13417 Ok(DataType::Signed)
13418 }
13419 }
13420 Keyword::UNSIGNED => {
13421 if self.parse_keyword(Keyword::INTEGER) {
13422 Ok(DataType::UnsignedInteger)
13423 } else {
13424 Ok(DataType::Unsigned)
13425 }
13426 }
13427 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13428 Ok(DataType::TsVector)
13429 }
13430 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13431 Ok(DataType::TsQuery)
13432 }
13433 _ => {
13434 self.prev_token();
13435 let type_name = self.parse_object_name(false)?;
13436 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13437 Ok(DataType::Custom(type_name, modifiers))
13438 } else {
13439 Ok(DataType::Custom(type_name, vec![]))
13440 }
13441 }
13442 },
13443 _ => self.expected_at("a data type name", next_token_index),
13444 }?;
13445
13446 if self.dialect.supports_array_typedef_with_brackets() {
13447 while self.consume_token(&Token::LBracket) {
13448 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13450 self.expect_token(&Token::RBracket)?;
13451 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13452 }
13453 }
13454 Ok((data, trailing_bracket))
13455 }
13456
13457 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13458 self.parse_column_def()
13459 }
13460
13461 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13462 self.expect_token(&Token::LParen)?;
13463 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13464 self.expect_token(&Token::RParen)?;
13465 Ok(columns)
13466 }
13467
13468 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13470 self.expect_token(&Token::LParen)?;
13471 let mut values = Vec::new();
13472 loop {
13473 let next_token = self.next_token();
13474 match next_token.token {
13475 Token::SingleQuotedString(value) => values.push(value),
13476 _ => self.expected("a string", next_token)?,
13477 }
13478 let next_token = self.next_token();
13479 match next_token.token {
13480 Token::Comma => (),
13481 Token::RParen => break,
13482 _ => self.expected(", or }", next_token)?,
13483 }
13484 }
13485 Ok(values)
13486 }
13487
13488 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13490 let ident = self.parse_identifier()?;
13491 self.expect_keyword_is(Keyword::AS)?;
13492 let alias = self.parse_identifier()?;
13493 Ok(IdentWithAlias { ident, alias })
13494 }
13495
13496 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13498 let ident = self.parse_identifier()?;
13499 let _after_as = self.parse_keyword(Keyword::AS);
13500 let alias = self.parse_identifier()?;
13501 Ok(IdentWithAlias { ident, alias })
13502 }
13503
13504 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13506 self.parse_comma_separated(|parser| {
13507 parser.expect_token(&Token::LParen)?;
13508 let query = parser.parse_query()?;
13509 parser.expect_token(&Token::RParen)?;
13510 Ok(*query)
13511 })
13512 }
13513
13514 fn parse_distinct_required_set_quantifier(
13516 &mut self,
13517 operator_name: &str,
13518 ) -> Result<SetQuantifier, ParserError> {
13519 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13520 match quantifier {
13521 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13522 _ => Err(ParserError::ParserError(format!(
13523 "{operator_name} pipe operator requires DISTINCT modifier",
13524 ))),
13525 }
13526 }
13527
13528 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13530 if self.parse_keyword(Keyword::AS) {
13531 Ok(Some(self.parse_identifier()?))
13532 } else {
13533 self.maybe_parse(|parser| parser.parse_identifier())
13535 }
13536 }
13537
13538 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13540 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13541 parser.dialect.is_select_item_alias(explicit, kw, parser)
13542 }
13543 self.parse_optional_alias_inner(None, validator)
13544 }
13545
13546 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13550 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13551 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13552 }
13553 let explicit = self.peek_keyword(Keyword::AS);
13554 match self.parse_optional_alias_inner(None, validator)? {
13555 Some(name) => {
13556 let columns = self.parse_table_alias_column_defs()?;
13557 Ok(Some(TableAlias {
13558 explicit,
13559 name,
13560 columns,
13561 }))
13562 }
13563 None => Ok(None),
13564 }
13565 }
13566
13567 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13568 let mut hints = vec![];
13569 while let Some(hint_type) =
13570 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13571 {
13572 let hint_type = match hint_type {
13573 Keyword::USE => TableIndexHintType::Use,
13574 Keyword::IGNORE => TableIndexHintType::Ignore,
13575 Keyword::FORCE => TableIndexHintType::Force,
13576 _ => {
13577 return self.expected_ref(
13578 "expected to match USE/IGNORE/FORCE keyword",
13579 self.peek_token_ref(),
13580 )
13581 }
13582 };
13583 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13584 Some(Keyword::INDEX) => TableIndexType::Index,
13585 Some(Keyword::KEY) => TableIndexType::Key,
13586 _ => {
13587 return self
13588 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13589 }
13590 };
13591 let for_clause = if self.parse_keyword(Keyword::FOR) {
13592 let clause = if self.parse_keyword(Keyword::JOIN) {
13593 TableIndexHintForClause::Join
13594 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13595 TableIndexHintForClause::OrderBy
13596 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13597 TableIndexHintForClause::GroupBy
13598 } else {
13599 return self.expected_ref(
13600 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13601 self.peek_token_ref(),
13602 );
13603 };
13604 Some(clause)
13605 } else {
13606 None
13607 };
13608
13609 self.expect_token(&Token::LParen)?;
13610 let index_names = if self.peek_token_ref().token != Token::RParen {
13611 self.parse_comma_separated(Parser::parse_identifier)?
13612 } else {
13613 vec![]
13614 };
13615 self.expect_token(&Token::RParen)?;
13616 hints.push(TableIndexHints {
13617 hint_type,
13618 index_type,
13619 for_clause,
13620 index_names,
13621 });
13622 }
13623 Ok(hints)
13624 }
13625
13626 pub fn parse_optional_alias(
13630 &mut self,
13631 reserved_kwds: &[Keyword],
13632 ) -> Result<Option<Ident>, ParserError> {
13633 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13634 false
13635 }
13636 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13637 }
13638
13639 fn parse_optional_alias_inner<F>(
13646 &mut self,
13647 reserved_kwds: Option<&[Keyword]>,
13648 validator: F,
13649 ) -> Result<Option<Ident>, ParserError>
13650 where
13651 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13652 {
13653 let after_as = self.parse_keyword(Keyword::AS);
13654
13655 let next_token = self.next_token();
13656 match next_token.token {
13657 Token::Word(w)
13660 if reserved_kwds.is_some()
13661 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13662 {
13663 Ok(Some(w.into_ident(next_token.span)))
13664 }
13665 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13669 Ok(Some(w.into_ident(next_token.span)))
13670 }
13671 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13673 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13674 _ => {
13675 if after_as {
13676 return self.expected("an identifier after AS", next_token);
13677 }
13678 self.prev_token();
13679 Ok(None) }
13681 }
13682 }
13683
13684 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13686 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13687 let expressions = if self.parse_keyword(Keyword::ALL) {
13688 None
13689 } else {
13690 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13691 };
13692
13693 let mut modifiers = vec![];
13694 if self.dialect.supports_group_by_with_modifier() {
13695 loop {
13696 if !self.parse_keyword(Keyword::WITH) {
13697 break;
13698 }
13699 let keyword = self.expect_one_of_keywords(&[
13700 Keyword::ROLLUP,
13701 Keyword::CUBE,
13702 Keyword::TOTALS,
13703 ])?;
13704 modifiers.push(match keyword {
13705 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13706 Keyword::CUBE => GroupByWithModifier::Cube,
13707 Keyword::TOTALS => GroupByWithModifier::Totals,
13708 _ => {
13709 return parser_err!(
13710 "BUG: expected to match GroupBy modifier keyword",
13711 self.peek_token_ref().span.start
13712 )
13713 }
13714 });
13715 }
13716 }
13717 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13718 self.expect_token(&Token::LParen)?;
13719 let result = self.parse_comma_separated(|p| {
13720 if p.peek_token_ref().token == Token::LParen {
13721 p.parse_tuple(true, true)
13722 } else {
13723 Ok(vec![p.parse_expr()?])
13724 }
13725 })?;
13726 self.expect_token(&Token::RParen)?;
13727 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13728 result,
13729 )));
13730 };
13731 let group_by = match expressions {
13732 None => GroupByExpr::All(modifiers),
13733 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13734 };
13735 Ok(Some(group_by))
13736 } else {
13737 Ok(None)
13738 }
13739 }
13740
13741 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13743 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13744 let order_by =
13745 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13746 let order_by_options = self.parse_order_by_options()?;
13747 OrderBy {
13748 kind: OrderByKind::All(order_by_options),
13749 interpolate: None,
13750 }
13751 } else {
13752 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13753 let interpolate = if self.dialect.supports_interpolate() {
13754 self.parse_interpolations()?
13755 } else {
13756 None
13757 };
13758 OrderBy {
13759 kind: OrderByKind::Expressions(exprs),
13760 interpolate,
13761 }
13762 };
13763 Ok(Some(order_by))
13764 } else {
13765 Ok(None)
13766 }
13767 }
13768
13769 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13770 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13771 Some(self.parse_offset()?)
13772 } else {
13773 None
13774 };
13775
13776 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13777 let expr = self.parse_limit()?;
13778
13779 if self.dialect.supports_limit_comma()
13780 && offset.is_none()
13781 && expr.is_some() && self.consume_token(&Token::Comma)
13783 {
13784 let offset = expr.ok_or_else(|| {
13785 ParserError::ParserError(
13786 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13787 )
13788 })?;
13789 return Ok(Some(LimitClause::OffsetCommaLimit {
13790 offset,
13791 limit: self.parse_expr()?,
13792 }));
13793 }
13794
13795 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13796 Some(self.parse_comma_separated(Parser::parse_expr)?)
13797 } else {
13798 None
13799 };
13800
13801 (Some(expr), limit_by)
13802 } else {
13803 (None, None)
13804 };
13805
13806 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13807 offset = Some(self.parse_offset()?);
13808 }
13809
13810 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13811 Ok(Some(LimitClause::LimitOffset {
13812 limit: limit.unwrap_or_default(),
13813 offset,
13814 limit_by: limit_by.unwrap_or_default(),
13815 }))
13816 } else {
13817 Ok(None)
13818 }
13819 }
13820
13821 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13824 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13825 let fn_name = self.parse_object_name(false)?;
13826 self.parse_function_call(fn_name)
13827 .map(TableObject::TableFunction)
13828 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13829 self.parse_parenthesized(|p| p.parse_query())
13830 .map(TableObject::TableQuery)
13831 } else {
13832 self.parse_object_name(false).map(TableObject::TableName)
13833 }
13834 }
13835
13836 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13843 self.parse_object_name_inner(in_table_clause, false)
13844 }
13845
13846 fn parse_object_name_inner(
13856 &mut self,
13857 in_table_clause: bool,
13858 allow_wildcards: bool,
13859 ) -> Result<ObjectName, ParserError> {
13860 let mut parts = vec![];
13861 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13862 loop {
13863 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13864 parts.push(ObjectNamePart::Identifier(ident));
13865 if !self.consume_token(&Token::Period) && !end_with_period {
13866 break;
13867 }
13868 }
13869 } else {
13870 loop {
13871 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13872 let span = self.next_token().span;
13873 parts.push(ObjectNamePart::Identifier(Ident {
13874 value: Token::Mul.to_string(),
13875 quote_style: None,
13876 span,
13877 }));
13878 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13879 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13880 parts.push(ObjectNamePart::Identifier(ident));
13881 if !self.consume_token(&Token::Period) && !end_with_period {
13882 break;
13883 }
13884 } else if self.dialect.supports_object_name_double_dot_notation()
13885 && parts.len() == 1
13886 && matches!(self.peek_token_ref().token, Token::Period)
13887 {
13888 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13890 } else {
13891 let ident = self.parse_identifier()?;
13892 let part = if self
13893 .dialect
13894 .is_identifier_generating_function_name(&ident, &parts)
13895 {
13896 self.expect_token(&Token::LParen)?;
13897 let args: Vec<FunctionArg> =
13898 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13899 self.expect_token(&Token::RParen)?;
13900 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13901 } else {
13902 ObjectNamePart::Identifier(ident)
13903 };
13904 parts.push(part);
13905 }
13906
13907 if !self.consume_token(&Token::Period) {
13908 break;
13909 }
13910 }
13911 }
13912
13913 if dialect_of!(self is BigQueryDialect)
13916 && parts.iter().any(|part| {
13917 part.as_ident()
13918 .is_some_and(|ident| ident.value.contains('.'))
13919 })
13920 {
13921 parts = parts
13922 .into_iter()
13923 .flat_map(|part| match part.as_ident() {
13924 Some(ident) => ident
13925 .value
13926 .split('.')
13927 .map(|value| {
13928 ObjectNamePart::Identifier(Ident {
13929 value: value.into(),
13930 quote_style: ident.quote_style,
13931 span: ident.span,
13932 })
13933 })
13934 .collect::<Vec<_>>(),
13935 None => vec![part],
13936 })
13937 .collect()
13938 }
13939
13940 Ok(ObjectName(parts))
13941 }
13942
13943 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13945 let mut idents = vec![];
13946 loop {
13947 let token = self.peek_token_ref();
13948 match &token.token {
13949 Token::Word(w) => {
13950 idents.push(w.to_ident(token.span));
13951 }
13952 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13953 break
13954 }
13955 _ => {}
13956 }
13957 self.advance_token();
13958 }
13959 Ok(idents)
13960 }
13961
13962 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
14002 let mut idents = vec![];
14003
14004 let next_token = self.next_token();
14006 match next_token.token {
14007 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
14008 Token::EOF => {
14009 return Err(ParserError::ParserError(
14010 "Empty input when parsing identifier".to_string(),
14011 ))?
14012 }
14013 token => {
14014 return Err(ParserError::ParserError(format!(
14015 "Unexpected token in identifier: {token}"
14016 )))?
14017 }
14018 };
14019
14020 loop {
14022 match self.next_token().token {
14023 Token::Period => {
14025 let next_token = self.next_token();
14026 match next_token.token {
14027 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
14028 Token::EOF => {
14029 return Err(ParserError::ParserError(
14030 "Trailing period in identifier".to_string(),
14031 ))?
14032 }
14033 token => {
14034 return Err(ParserError::ParserError(format!(
14035 "Unexpected token following period in identifier: {token}"
14036 )))?
14037 }
14038 }
14039 }
14040 Token::EOF => break,
14041 token => {
14042 return Err(ParserError::ParserError(format!(
14043 "Unexpected token in identifier: {token}"
14044 )))?;
14045 }
14046 }
14047 }
14048
14049 Ok(idents)
14050 }
14051
14052 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
14054 let next_token = self.next_token();
14055 match next_token.token {
14056 Token::Word(w) => Ok(w.into_ident(next_token.span)),
14057 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
14058 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
14059 _ => self.expected("identifier", next_token),
14060 }
14061 }
14062
14063 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
14074 match self.peek_token().token {
14075 Token::Word(w) => {
14076 let quote_style_is_none = w.quote_style.is_none();
14077 let mut requires_whitespace = false;
14078 let mut ident = w.into_ident(self.next_token().span);
14079 if quote_style_is_none {
14080 while matches!(self.peek_token_no_skip().token, Token::Minus) {
14081 self.next_token();
14082 ident.value.push('-');
14083
14084 let token = self
14085 .next_token_no_skip()
14086 .cloned()
14087 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
14088 requires_whitespace = match token.token {
14089 Token::Word(next_word) if next_word.quote_style.is_none() => {
14090 ident.value.push_str(&next_word.value);
14091 false
14092 }
14093 Token::Number(s, false) => {
14094 if s.ends_with('.') {
14101 let Some(s) = s.split('.').next().filter(|s| {
14102 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
14103 }) else {
14104 return self.expected(
14105 "continuation of hyphenated identifier",
14106 TokenWithSpan::new(Token::Number(s, false), token.span),
14107 );
14108 };
14109 ident.value.push_str(s);
14110 return Ok((ident, true));
14111 } else {
14112 ident.value.push_str(&s);
14113 }
14114 !matches!(self.peek_token_ref().token, Token::Period)
14117 }
14118 _ => {
14119 return self
14120 .expected("continuation of hyphenated identifier", token);
14121 }
14122 }
14123 }
14124
14125 if requires_whitespace {
14128 let token = self.next_token();
14129 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
14130 return self
14131 .expected("whitespace following hyphenated identifier", token);
14132 }
14133 }
14134 }
14135 Ok((ident, false))
14136 }
14137 _ => Ok((self.parse_identifier()?, false)),
14138 }
14139 }
14140
14141 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
14143 if self.consume_token(&Token::LParen) {
14144 if self.peek_token_ref().token == Token::RParen {
14145 self.next_token();
14146 Ok(vec![])
14147 } else {
14148 let cols = self.parse_comma_separated_with_trailing_commas(
14149 Parser::parse_view_column,
14150 self.dialect.supports_column_definition_trailing_commas(),
14151 Self::is_reserved_for_column_alias,
14152 )?;
14153 self.expect_token(&Token::RParen)?;
14154 Ok(cols)
14155 }
14156 } else {
14157 Ok(vec![])
14158 }
14159 }
14160
14161 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
14163 let name = self.parse_identifier()?;
14164 let options = self.parse_view_column_options()?;
14165 let data_type = if dialect_of!(self is ClickHouseDialect) {
14166 Some(self.parse_data_type()?)
14167 } else {
14168 None
14169 };
14170 Ok(ViewColumnDef {
14171 name,
14172 data_type,
14173 options,
14174 })
14175 }
14176
14177 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
14178 let mut options = Vec::new();
14179 loop {
14180 let option = self.parse_optional_column_option()?;
14181 if let Some(option) = option {
14182 options.push(option);
14183 } else {
14184 break;
14185 }
14186 }
14187 if options.is_empty() {
14188 Ok(None)
14189 } else if self.dialect.supports_space_separated_column_options() {
14190 Ok(Some(ColumnOptions::SpaceSeparated(options)))
14191 } else {
14192 Ok(Some(ColumnOptions::CommaSeparated(options)))
14193 }
14194 }
14195
14196 pub fn parse_parenthesized_column_list(
14199 &mut self,
14200 optional: IsOptional,
14201 allow_empty: bool,
14202 ) -> Result<Vec<Ident>, ParserError> {
14203 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
14204 }
14205
14206 pub fn parse_parenthesized_compound_identifier_list(
14208 &mut self,
14209 optional: IsOptional,
14210 allow_empty: bool,
14211 ) -> Result<Vec<Expr>, ParserError> {
14212 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14213 Ok(Expr::CompoundIdentifier(
14214 p.parse_period_separated(|p| p.parse_identifier())?,
14215 ))
14216 })
14217 }
14218
14219 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
14222 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
14223 p.parse_create_index_expr()
14224 })
14225 }
14226
14227 pub fn parse_parenthesized_qualified_column_list(
14230 &mut self,
14231 optional: IsOptional,
14232 allow_empty: bool,
14233 ) -> Result<Vec<ObjectName>, ParserError> {
14234 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14235 p.parse_object_name(true)
14236 })
14237 }
14238
14239 fn parse_parenthesized_column_list_inner<F, T>(
14242 &mut self,
14243 optional: IsOptional,
14244 allow_empty: bool,
14245 mut f: F,
14246 ) -> Result<Vec<T>, ParserError>
14247 where
14248 F: FnMut(&mut Parser) -> Result<T, ParserError>,
14249 {
14250 if self.consume_token(&Token::LParen) {
14251 if allow_empty && self.peek_token_ref().token == Token::RParen {
14252 self.next_token();
14253 Ok(vec![])
14254 } else {
14255 let cols = self.parse_comma_separated(|p| f(p))?;
14256 self.expect_token(&Token::RParen)?;
14257 Ok(cols)
14258 }
14259 } else if optional == Optional {
14260 Ok(vec![])
14261 } else {
14262 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
14263 }
14264 }
14265
14266 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
14268 if self.consume_token(&Token::LParen) {
14269 let cols = self.parse_comma_separated(|p| {
14270 let name = p.parse_identifier()?;
14271 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
14272 Ok(TableAliasColumnDef { name, data_type })
14273 })?;
14274 self.expect_token(&Token::RParen)?;
14275 Ok(cols)
14276 } else {
14277 Ok(vec![])
14278 }
14279 }
14280
14281 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
14283 self.expect_token(&Token::LParen)?;
14284 let n = self.parse_literal_uint()?;
14285 self.expect_token(&Token::RParen)?;
14286 Ok(n)
14287 }
14288
14289 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
14291 if self.consume_token(&Token::LParen) {
14292 let n = self.parse_literal_uint()?;
14293 self.expect_token(&Token::RParen)?;
14294 Ok(Some(n))
14295 } else {
14296 Ok(None)
14297 }
14298 }
14299
14300 fn maybe_parse_optional_interval_fields(
14301 &mut self,
14302 ) -> Result<Option<IntervalFields>, ParserError> {
14303 match self.parse_one_of_keywords(&[
14304 Keyword::YEAR,
14306 Keyword::DAY,
14307 Keyword::HOUR,
14308 Keyword::MINUTE,
14309 Keyword::MONTH,
14311 Keyword::SECOND,
14312 ]) {
14313 Some(Keyword::YEAR) => {
14314 if self.peek_keyword(Keyword::TO) {
14315 self.expect_keyword(Keyword::TO)?;
14316 self.expect_keyword(Keyword::MONTH)?;
14317 Ok(Some(IntervalFields::YearToMonth))
14318 } else {
14319 Ok(Some(IntervalFields::Year))
14320 }
14321 }
14322 Some(Keyword::DAY) => {
14323 if self.peek_keyword(Keyword::TO) {
14324 self.expect_keyword(Keyword::TO)?;
14325 match self.expect_one_of_keywords(&[
14326 Keyword::HOUR,
14327 Keyword::MINUTE,
14328 Keyword::SECOND,
14329 ])? {
14330 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14331 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14332 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14333 _ => {
14334 self.prev_token();
14335 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14336 }
14337 }
14338 } else {
14339 Ok(Some(IntervalFields::Day))
14340 }
14341 }
14342 Some(Keyword::HOUR) => {
14343 if self.peek_keyword(Keyword::TO) {
14344 self.expect_keyword(Keyword::TO)?;
14345 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14346 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14347 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14348 _ => {
14349 self.prev_token();
14350 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14351 }
14352 }
14353 } else {
14354 Ok(Some(IntervalFields::Hour))
14355 }
14356 }
14357 Some(Keyword::MINUTE) => {
14358 if self.peek_keyword(Keyword::TO) {
14359 self.expect_keyword(Keyword::TO)?;
14360 self.expect_keyword(Keyword::SECOND)?;
14361 Ok(Some(IntervalFields::MinuteToSecond))
14362 } else {
14363 Ok(Some(IntervalFields::Minute))
14364 }
14365 }
14366 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14367 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14368 Some(_) => {
14369 self.prev_token();
14370 self.expected_ref(
14371 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14372 self.peek_token_ref(),
14373 )
14374 }
14375 None => Ok(None),
14376 }
14377 }
14378
14379 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14387 self.expect_keyword_is(Keyword::DATETIME64)?;
14388 self.expect_token(&Token::LParen)?;
14389 let precision = self.parse_literal_uint()?;
14390 let time_zone = if self.consume_token(&Token::Comma) {
14391 Some(self.parse_literal_string()?)
14392 } else {
14393 None
14394 };
14395 self.expect_token(&Token::RParen)?;
14396 Ok((precision, time_zone))
14397 }
14398
14399 pub fn parse_optional_character_length(
14401 &mut self,
14402 ) -> Result<Option<CharacterLength>, ParserError> {
14403 if self.consume_token(&Token::LParen) {
14404 let character_length = self.parse_character_length()?;
14405 self.expect_token(&Token::RParen)?;
14406 Ok(Some(character_length))
14407 } else {
14408 Ok(None)
14409 }
14410 }
14411
14412 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14414 if self.consume_token(&Token::LParen) {
14415 let binary_length = self.parse_binary_length()?;
14416 self.expect_token(&Token::RParen)?;
14417 Ok(Some(binary_length))
14418 } else {
14419 Ok(None)
14420 }
14421 }
14422
14423 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14425 if self.parse_keyword(Keyword::MAX) {
14426 return Ok(CharacterLength::Max);
14427 }
14428 let length = self.parse_literal_uint()?;
14429 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14430 Some(CharLengthUnits::Characters)
14431 } else if self.parse_keyword(Keyword::OCTETS) {
14432 Some(CharLengthUnits::Octets)
14433 } else {
14434 None
14435 };
14436 Ok(CharacterLength::IntegerLength { length, unit })
14437 }
14438
14439 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14441 if self.parse_keyword(Keyword::MAX) {
14442 return Ok(BinaryLength::Max);
14443 }
14444 let length = self.parse_literal_uint()?;
14445 Ok(BinaryLength::IntegerLength { length })
14446 }
14447
14448 pub fn parse_optional_precision_scale(
14450 &mut self,
14451 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14452 if self.consume_token(&Token::LParen) {
14453 let n = self.parse_literal_uint()?;
14454 let scale = if self.consume_token(&Token::Comma) {
14455 Some(self.parse_literal_uint()?)
14456 } else {
14457 None
14458 };
14459 self.expect_token(&Token::RParen)?;
14460 Ok((Some(n), scale))
14461 } else {
14462 Ok((None, None))
14463 }
14464 }
14465
14466 pub fn parse_exact_number_optional_precision_scale(
14468 &mut self,
14469 ) -> Result<ExactNumberInfo, ParserError> {
14470 if self.consume_token(&Token::LParen) {
14471 let precision = self.parse_literal_uint()?;
14472 let scale = if self.consume_token(&Token::Comma) {
14473 Some(self.parse_signed_integer()?)
14474 } else {
14475 None
14476 };
14477
14478 self.expect_token(&Token::RParen)?;
14479
14480 match scale {
14481 None => Ok(ExactNumberInfo::Precision(precision)),
14482 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14483 }
14484 } else {
14485 Ok(ExactNumberInfo::None)
14486 }
14487 }
14488
14489 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14491 let is_negative = self.consume_token(&Token::Minus);
14492
14493 if !is_negative {
14494 let _ = self.consume_token(&Token::Plus);
14495 }
14496
14497 let current_token = self.peek_token_ref();
14498 match ¤t_token.token {
14499 Token::Number(s, _) => {
14500 let s = s.clone();
14501 let span_start = current_token.span.start;
14502 self.advance_token();
14503 let value = Self::parse::<i64>(s, span_start)?;
14504 Ok(if is_negative { -value } else { value })
14505 }
14506 _ => self.expected_ref("number", current_token),
14507 }
14508 }
14509
14510 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14512 if self.consume_token(&Token::LParen) {
14513 let mut modifiers = Vec::new();
14514 loop {
14515 let next_token = self.next_token();
14516 match next_token.token {
14517 Token::Word(w) => modifiers.push(w.to_string()),
14518 Token::Number(n, _) => modifiers.push(n),
14519 Token::SingleQuotedString(s) => modifiers.push(s),
14520
14521 Token::Comma => {
14522 continue;
14523 }
14524 Token::RParen => {
14525 break;
14526 }
14527 _ => self.expected("type modifiers", next_token)?,
14528 }
14529 }
14530
14531 Ok(Some(modifiers))
14532 } else {
14533 Ok(None)
14534 }
14535 }
14536
14537 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14539 where
14540 F: FnOnce(Box<DataType>) -> DataType,
14541 {
14542 self.expect_token(&Token::LParen)?;
14543 let inside_type = self.parse_data_type()?;
14544 self.expect_token(&Token::RParen)?;
14545 Ok(parent_type(inside_type.into()))
14546 }
14547
14548 fn parse_delete_setexpr_boxed(
14552 &mut self,
14553 delete_token: TokenWithSpan,
14554 ) -> Result<Box<SetExpr>, ParserError> {
14555 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14556 }
14557
14558 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14560 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14561 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14562 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14565 (vec![], false)
14566 } else {
14567 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14568 self.expect_keyword_is(Keyword::FROM)?;
14569 (tables, true)
14570 }
14571 } else {
14572 (vec![], true)
14573 };
14574
14575 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14576
14577 let output = self.maybe_parse_output_clause()?;
14578
14579 let using = if self.parse_keyword(Keyword::USING) {
14580 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14581 } else {
14582 None
14583 };
14584 let selection = if self.parse_keyword(Keyword::WHERE) {
14585 Some(self.parse_expr()?)
14586 } else {
14587 None
14588 };
14589 let returning = if self.parse_keyword(Keyword::RETURNING) {
14590 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14591 } else {
14592 None
14593 };
14594 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14595 self.parse_comma_separated(Parser::parse_order_by_expr)?
14596 } else {
14597 vec![]
14598 };
14599 let limit = if self.parse_keyword(Keyword::LIMIT) {
14600 self.parse_limit()?
14601 } else {
14602 None
14603 };
14604
14605 Ok(Statement::Delete(Delete {
14606 delete_token: delete_token.into(),
14607 optimizer_hints,
14608 tables,
14609 from: if with_from_keyword {
14610 FromTable::WithFromKeyword(from)
14611 } else {
14612 FromTable::WithoutKeyword(from)
14613 },
14614 using,
14615 selection,
14616 returning,
14617 output,
14618 order_by,
14619 limit,
14620 }))
14621 }
14622
14623 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14626 let modifier_keyword =
14627 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14628
14629 let id = self.parse_literal_uint()?;
14630
14631 let modifier = match modifier_keyword {
14632 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14633 Some(Keyword::QUERY) => Some(KillType::Query),
14634 Some(Keyword::MUTATION) => {
14635 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14636 Some(KillType::Mutation)
14637 } else {
14638 self.expected_ref(
14639 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14640 self.peek_token_ref(),
14641 )?
14642 }
14643 }
14644 _ => None,
14645 };
14646
14647 Ok(Statement::Kill { modifier, id })
14648 }
14649
14650 pub fn parse_explain(
14652 &mut self,
14653 describe_alias: DescribeAlias,
14654 ) -> Result<Statement, ParserError> {
14655 let mut analyze = false;
14656 let mut verbose = false;
14657 let mut query_plan = false;
14658 let mut estimate = false;
14659 let mut format = None;
14660 let mut options = None;
14661
14662 if describe_alias == DescribeAlias::Explain
14665 && self.dialect.supports_explain_with_utility_options()
14666 && self.peek_token_ref().token == Token::LParen
14667 {
14668 options = Some(self.parse_utility_options()?)
14669 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14670 query_plan = true;
14671 } else if self.parse_keyword(Keyword::ESTIMATE) {
14672 estimate = true;
14673 } else {
14674 analyze = self.parse_keyword(Keyword::ANALYZE);
14675 verbose = self.parse_keyword(Keyword::VERBOSE);
14676 if self.parse_keyword(Keyword::FORMAT) {
14677 format = Some(self.parse_analyze_format_kind()?);
14678 }
14679 }
14680
14681 match self.maybe_parse(|parser| parser.parse_statement())? {
14682 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14683 ParserError::ParserError("Explain must be root of the plan".to_string()),
14684 ),
14685 Some(statement) => Ok(Statement::Explain {
14686 describe_alias,
14687 analyze,
14688 verbose,
14689 query_plan,
14690 estimate,
14691 statement: Box::new(statement),
14692 format,
14693 options,
14694 }),
14695 _ => {
14696 let hive_format =
14697 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14698 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14699 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14700 _ => None,
14701 };
14702
14703 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14704 self.parse_keyword(Keyword::TABLE)
14706 } else {
14707 false
14708 };
14709
14710 let table_name = self.parse_object_name(false)?;
14711 Ok(Statement::ExplainTable {
14712 describe_alias,
14713 hive_format,
14714 has_table_keyword,
14715 table_name,
14716 })
14717 }
14718 }
14719 }
14720
14721 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14726 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14727 let _guard = self.recursion_counter.try_decrease()?;
14728 let with = if self.parse_keyword(Keyword::WITH) {
14729 let with_token = self.get_current_token();
14730 Some(With {
14731 with_token: with_token.clone().into(),
14732 recursive: self.parse_keyword(Keyword::RECURSIVE),
14733 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14734 })
14735 } else {
14736 None
14737 };
14738 if self.parse_keyword(Keyword::INSERT) {
14739 Ok(Query {
14740 with,
14741 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14742 order_by: None,
14743 limit_clause: None,
14744 fetch: None,
14745 locks: vec![],
14746 for_clause: None,
14747 settings: None,
14748 format_clause: None,
14749 pipe_operators: vec![],
14750 }
14751 .into())
14752 } else if self.parse_keyword(Keyword::UPDATE) {
14753 Ok(Query {
14754 with,
14755 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14756 order_by: None,
14757 limit_clause: None,
14758 fetch: None,
14759 locks: vec![],
14760 for_clause: None,
14761 settings: None,
14762 format_clause: None,
14763 pipe_operators: vec![],
14764 }
14765 .into())
14766 } else if self.parse_keyword(Keyword::DELETE) {
14767 Ok(Query {
14768 with,
14769 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14770 limit_clause: None,
14771 order_by: None,
14772 fetch: None,
14773 locks: vec![],
14774 for_clause: None,
14775 settings: None,
14776 format_clause: None,
14777 pipe_operators: vec![],
14778 }
14779 .into())
14780 } else if self.parse_keyword(Keyword::MERGE) {
14781 Ok(Query {
14782 with,
14783 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14784 limit_clause: None,
14785 order_by: None,
14786 fetch: None,
14787 locks: vec![],
14788 for_clause: None,
14789 settings: None,
14790 format_clause: None,
14791 pipe_operators: vec![],
14792 }
14793 .into())
14794 } else {
14795 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14796
14797 let order_by = self.parse_optional_order_by()?;
14798
14799 let limit_clause = self.parse_optional_limit_clause()?;
14800
14801 let settings = self.parse_settings()?;
14802
14803 let fetch = if self.parse_keyword(Keyword::FETCH) {
14804 Some(self.parse_fetch()?)
14805 } else {
14806 None
14807 };
14808
14809 let mut for_clause = None;
14810 let mut locks = Vec::new();
14811 while self.parse_keyword(Keyword::FOR) {
14812 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14813 for_clause = Some(parsed_for_clause);
14814 break;
14815 } else {
14816 locks.push(self.parse_lock()?);
14817 }
14818 }
14819 let format_clause =
14820 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14821 if self.parse_keyword(Keyword::NULL) {
14822 Some(FormatClause::Null)
14823 } else {
14824 let ident = self.parse_identifier()?;
14825 Some(FormatClause::Identifier(ident))
14826 }
14827 } else {
14828 None
14829 };
14830
14831 let pipe_operators = if self.dialect.supports_pipe_operator() {
14832 self.parse_pipe_operators()?
14833 } else {
14834 Vec::new()
14835 };
14836
14837 Ok(Query {
14838 with,
14839 body,
14840 order_by,
14841 limit_clause,
14842 fetch,
14843 locks,
14844 for_clause,
14845 settings,
14846 format_clause,
14847 pipe_operators,
14848 }
14849 .into())
14850 }
14851 }
14852
14853 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14854 let mut pipe_operators = Vec::new();
14855
14856 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14857 let kw = self.expect_one_of_keywords(&[
14858 Keyword::SELECT,
14859 Keyword::EXTEND,
14860 Keyword::SET,
14861 Keyword::DROP,
14862 Keyword::AS,
14863 Keyword::WHERE,
14864 Keyword::LIMIT,
14865 Keyword::AGGREGATE,
14866 Keyword::ORDER,
14867 Keyword::TABLESAMPLE,
14868 Keyword::RENAME,
14869 Keyword::UNION,
14870 Keyword::INTERSECT,
14871 Keyword::EXCEPT,
14872 Keyword::CALL,
14873 Keyword::PIVOT,
14874 Keyword::UNPIVOT,
14875 Keyword::JOIN,
14876 Keyword::INNER,
14877 Keyword::LEFT,
14878 Keyword::RIGHT,
14879 Keyword::FULL,
14880 Keyword::CROSS,
14881 ])?;
14882 match kw {
14883 Keyword::SELECT => {
14884 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14885 pipe_operators.push(PipeOperator::Select { exprs })
14886 }
14887 Keyword::EXTEND => {
14888 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14889 pipe_operators.push(PipeOperator::Extend { exprs })
14890 }
14891 Keyword::SET => {
14892 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14893 pipe_operators.push(PipeOperator::Set { assignments })
14894 }
14895 Keyword::DROP => {
14896 let columns = self.parse_identifiers()?;
14897 pipe_operators.push(PipeOperator::Drop { columns })
14898 }
14899 Keyword::AS => {
14900 let alias = self.parse_identifier()?;
14901 pipe_operators.push(PipeOperator::As { alias })
14902 }
14903 Keyword::WHERE => {
14904 let expr = self.parse_expr()?;
14905 pipe_operators.push(PipeOperator::Where { expr })
14906 }
14907 Keyword::LIMIT => {
14908 let expr = self.parse_expr()?;
14909 let offset = if self.parse_keyword(Keyword::OFFSET) {
14910 Some(self.parse_expr()?)
14911 } else {
14912 None
14913 };
14914 pipe_operators.push(PipeOperator::Limit { expr, offset })
14915 }
14916 Keyword::AGGREGATE => {
14917 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14918 vec![]
14919 } else {
14920 self.parse_comma_separated(|parser| {
14921 parser.parse_expr_with_alias_and_order_by()
14922 })?
14923 };
14924
14925 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14926 self.parse_comma_separated(|parser| {
14927 parser.parse_expr_with_alias_and_order_by()
14928 })?
14929 } else {
14930 vec![]
14931 };
14932
14933 pipe_operators.push(PipeOperator::Aggregate {
14934 full_table_exprs,
14935 group_by_expr,
14936 })
14937 }
14938 Keyword::ORDER => {
14939 self.expect_one_of_keywords(&[Keyword::BY])?;
14940 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14941 pipe_operators.push(PipeOperator::OrderBy { exprs })
14942 }
14943 Keyword::TABLESAMPLE => {
14944 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14945 pipe_operators.push(PipeOperator::TableSample { sample });
14946 }
14947 Keyword::RENAME => {
14948 let mappings =
14949 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14950 pipe_operators.push(PipeOperator::Rename { mappings });
14951 }
14952 Keyword::UNION => {
14953 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14954 let queries = self.parse_pipe_operator_queries()?;
14955 pipe_operators.push(PipeOperator::Union {
14956 set_quantifier,
14957 queries,
14958 });
14959 }
14960 Keyword::INTERSECT => {
14961 let set_quantifier =
14962 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14963 let queries = self.parse_pipe_operator_queries()?;
14964 pipe_operators.push(PipeOperator::Intersect {
14965 set_quantifier,
14966 queries,
14967 });
14968 }
14969 Keyword::EXCEPT => {
14970 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14971 let queries = self.parse_pipe_operator_queries()?;
14972 pipe_operators.push(PipeOperator::Except {
14973 set_quantifier,
14974 queries,
14975 });
14976 }
14977 Keyword::CALL => {
14978 let function_name = self.parse_object_name(false)?;
14979 let function_expr = self.parse_function(function_name)?;
14980 if let Expr::Function(function) = function_expr {
14981 let alias = self.parse_identifier_optional_alias()?;
14982 pipe_operators.push(PipeOperator::Call { function, alias });
14983 } else {
14984 return Err(ParserError::ParserError(
14985 "Expected function call after CALL".to_string(),
14986 ));
14987 }
14988 }
14989 Keyword::PIVOT => {
14990 self.expect_token(&Token::LParen)?;
14991 let aggregate_functions =
14992 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14993 self.expect_keyword_is(Keyword::FOR)?;
14994 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14995 self.expect_keyword_is(Keyword::IN)?;
14996
14997 self.expect_token(&Token::LParen)?;
14998 let value_source = if self.parse_keyword(Keyword::ANY) {
14999 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15000 self.parse_comma_separated(Parser::parse_order_by_expr)?
15001 } else {
15002 vec![]
15003 };
15004 PivotValueSource::Any(order_by)
15005 } else if self.peek_sub_query() {
15006 PivotValueSource::Subquery(self.parse_query()?)
15007 } else {
15008 PivotValueSource::List(
15009 self.parse_comma_separated(Self::parse_expr_with_alias)?,
15010 )
15011 };
15012 self.expect_token(&Token::RParen)?;
15013 self.expect_token(&Token::RParen)?;
15014
15015 let alias = self.parse_identifier_optional_alias()?;
15016
15017 pipe_operators.push(PipeOperator::Pivot {
15018 aggregate_functions,
15019 value_column,
15020 value_source,
15021 alias,
15022 });
15023 }
15024 Keyword::UNPIVOT => {
15025 self.expect_token(&Token::LParen)?;
15026 let value_column = self.parse_identifier()?;
15027 self.expect_keyword(Keyword::FOR)?;
15028 let name_column = self.parse_identifier()?;
15029 self.expect_keyword(Keyword::IN)?;
15030
15031 self.expect_token(&Token::LParen)?;
15032 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
15033 self.expect_token(&Token::RParen)?;
15034
15035 self.expect_token(&Token::RParen)?;
15036
15037 let alias = self.parse_identifier_optional_alias()?;
15038
15039 pipe_operators.push(PipeOperator::Unpivot {
15040 value_column,
15041 name_column,
15042 unpivot_columns,
15043 alias,
15044 });
15045 }
15046 Keyword::JOIN
15047 | Keyword::INNER
15048 | Keyword::LEFT
15049 | Keyword::RIGHT
15050 | Keyword::FULL
15051 | Keyword::CROSS => {
15052 self.prev_token();
15053 let mut joins = self.parse_joins()?;
15054 if joins.len() != 1 {
15055 return Err(ParserError::ParserError(
15056 "Join pipe operator must have a single join".to_string(),
15057 ));
15058 }
15059 let join = joins.swap_remove(0);
15060 pipe_operators.push(PipeOperator::Join(join))
15061 }
15062 unhandled => {
15063 return Err(ParserError::ParserError(format!(
15064 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
15065 )))
15066 }
15067 }
15068 }
15069 Ok(pipe_operators)
15070 }
15071
15072 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
15073 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
15074 {
15075 let key_values = self.parse_comma_separated(|p| {
15076 let key = p.parse_identifier()?;
15077 p.expect_token(&Token::Eq)?;
15078 let value = p.parse_expr()?;
15079 Ok(Setting { key, value })
15080 })?;
15081 Some(key_values)
15082 } else {
15083 None
15084 };
15085 Ok(settings)
15086 }
15087
15088 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
15090 if self.parse_keyword(Keyword::XML) {
15091 Ok(Some(self.parse_for_xml()?))
15092 } else if self.parse_keyword(Keyword::JSON) {
15093 Ok(Some(self.parse_for_json()?))
15094 } else if self.parse_keyword(Keyword::BROWSE) {
15095 Ok(Some(ForClause::Browse))
15096 } else {
15097 Ok(None)
15098 }
15099 }
15100
15101 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
15103 let for_xml = if self.parse_keyword(Keyword::RAW) {
15104 let mut element_name = None;
15105 if self.peek_token_ref().token == Token::LParen {
15106 self.expect_token(&Token::LParen)?;
15107 element_name = Some(self.parse_literal_string()?);
15108 self.expect_token(&Token::RParen)?;
15109 }
15110 ForXml::Raw(element_name)
15111 } else if self.parse_keyword(Keyword::AUTO) {
15112 ForXml::Auto
15113 } else if self.parse_keyword(Keyword::EXPLICIT) {
15114 ForXml::Explicit
15115 } else if self.parse_keyword(Keyword::PATH) {
15116 let mut element_name = None;
15117 if self.peek_token_ref().token == Token::LParen {
15118 self.expect_token(&Token::LParen)?;
15119 element_name = Some(self.parse_literal_string()?);
15120 self.expect_token(&Token::RParen)?;
15121 }
15122 ForXml::Path(element_name)
15123 } else {
15124 return Err(ParserError::ParserError(
15125 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
15126 ));
15127 };
15128 let mut elements = false;
15129 let mut binary_base64 = false;
15130 let mut root = None;
15131 let mut r#type = false;
15132 while self.peek_token_ref().token == Token::Comma {
15133 self.next_token();
15134 if self.parse_keyword(Keyword::ELEMENTS) {
15135 elements = true;
15136 } else if self.parse_keyword(Keyword::BINARY) {
15137 self.expect_keyword_is(Keyword::BASE64)?;
15138 binary_base64 = true;
15139 } else if self.parse_keyword(Keyword::ROOT) {
15140 self.expect_token(&Token::LParen)?;
15141 root = Some(self.parse_literal_string()?);
15142 self.expect_token(&Token::RParen)?;
15143 } else if self.parse_keyword(Keyword::TYPE) {
15144 r#type = true;
15145 }
15146 }
15147 Ok(ForClause::Xml {
15148 for_xml,
15149 elements,
15150 binary_base64,
15151 root,
15152 r#type,
15153 })
15154 }
15155
15156 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
15158 let for_json = if self.parse_keyword(Keyword::AUTO) {
15159 ForJson::Auto
15160 } else if self.parse_keyword(Keyword::PATH) {
15161 ForJson::Path
15162 } else {
15163 return Err(ParserError::ParserError(
15164 "Expected FOR JSON [AUTO | PATH ]".to_string(),
15165 ));
15166 };
15167 let mut root = None;
15168 let mut include_null_values = false;
15169 let mut without_array_wrapper = false;
15170 while self.peek_token_ref().token == Token::Comma {
15171 self.next_token();
15172 if self.parse_keyword(Keyword::ROOT) {
15173 self.expect_token(&Token::LParen)?;
15174 root = Some(self.parse_literal_string()?);
15175 self.expect_token(&Token::RParen)?;
15176 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
15177 include_null_values = true;
15178 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
15179 without_array_wrapper = true;
15180 }
15181 }
15182 Ok(ForClause::Json {
15183 for_json,
15184 root,
15185 include_null_values,
15186 without_array_wrapper,
15187 })
15188 }
15189
15190 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
15192 let name = self.parse_identifier()?;
15193
15194 let as_optional = self.dialect.supports_cte_without_as();
15195
15196 if as_optional && !self.peek_keyword(Keyword::AS) {
15198 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
15199 p.expect_token(&Token::LParen)?;
15200 let query = p.parse_query()?;
15201 let closing_paren_token = p.expect_token(&Token::RParen)?;
15202 Ok((query, closing_paren_token))
15203 })? {
15204 let mut cte = Cte {
15205 alias: TableAlias {
15206 explicit: false,
15207 name,
15208 columns: vec![],
15209 },
15210 query,
15211 from: None,
15212 materialized: None,
15213 closing_paren_token: closing_paren_token.into(),
15214 };
15215 if self.parse_keyword(Keyword::FROM) {
15216 cte.from = Some(self.parse_identifier()?);
15217 }
15218 return Ok(cte);
15219 }
15220 }
15221
15222 let columns = if self.parse_keyword(Keyword::AS) {
15224 vec![]
15225 } else {
15226 let columns = self.parse_table_alias_column_defs()?;
15227 if as_optional {
15228 let _ = self.parse_keyword(Keyword::AS);
15229 } else {
15230 self.expect_keyword_is(Keyword::AS)?;
15231 }
15232 columns
15233 };
15234
15235 let mut is_materialized = None;
15236 if dialect_of!(self is PostgreSqlDialect) {
15237 if self.parse_keyword(Keyword::MATERIALIZED) {
15238 is_materialized = Some(CteAsMaterialized::Materialized);
15239 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
15240 is_materialized = Some(CteAsMaterialized::NotMaterialized);
15241 }
15242 }
15243
15244 self.expect_token(&Token::LParen)?;
15245 let query = self.parse_query()?;
15246 let closing_paren_token = self.expect_token(&Token::RParen)?;
15247
15248 let mut cte = Cte {
15249 alias: TableAlias {
15250 explicit: false,
15251 name,
15252 columns,
15253 },
15254 query,
15255 from: None,
15256 materialized: is_materialized,
15257 closing_paren_token: closing_paren_token.into(),
15258 };
15259 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
15260 cte.from = Some(self.parse_identifier()?);
15261 }
15262 Ok(cte)
15263 }
15264
15265 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
15274 let expr = if self.peek_keyword(Keyword::SELECT)
15277 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
15278 {
15279 SetExpr::Select(self.parse_select().map(Box::new)?)
15280 } else if self.consume_token(&Token::LParen) {
15281 let subquery = self.parse_query()?;
15283 self.expect_token(&Token::RParen)?;
15284 SetExpr::Query(subquery)
15285 } else if self.parse_keyword(Keyword::VALUES) {
15286 let is_mysql = dialect_of!(self is MySqlDialect);
15287 SetExpr::Values(self.parse_values(is_mysql, false)?)
15288 } else if self.parse_keyword(Keyword::VALUE) {
15289 let is_mysql = dialect_of!(self is MySqlDialect);
15290 SetExpr::Values(self.parse_values(is_mysql, true)?)
15291 } else if self.parse_keyword(Keyword::TABLE) {
15292 SetExpr::Table(Box::new(self.parse_as_table()?))
15293 } else {
15294 return self.expected_ref(
15295 "SELECT, VALUES, or a subquery in the query body",
15296 self.peek_token_ref(),
15297 );
15298 };
15299
15300 self.parse_remaining_set_exprs(expr, precedence)
15301 }
15302
15303 fn parse_remaining_set_exprs(
15307 &mut self,
15308 mut expr: SetExpr,
15309 precedence: u8,
15310 ) -> Result<Box<SetExpr>, ParserError> {
15311 loop {
15312 let op = self.parse_set_operator(&self.peek_token().token);
15314 let next_precedence = match op {
15315 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15317 10
15318 }
15319 Some(SetOperator::Intersect) => 20,
15321 None => break,
15323 };
15324 if precedence >= next_precedence {
15325 break;
15326 }
15327 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15329 expr = SetExpr::SetOperation {
15330 left: Box::new(expr),
15331 op: op.unwrap(),
15332 set_quantifier,
15333 right: self.parse_query_body(next_precedence)?,
15334 };
15335 }
15336
15337 Ok(expr.into())
15338 }
15339
15340 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15342 match token {
15343 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15344 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15345 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15346 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15347 _ => None,
15348 }
15349 }
15350
15351 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15353 match op {
15354 Some(
15355 SetOperator::Except
15356 | SetOperator::Intersect
15357 | SetOperator::Union
15358 | SetOperator::Minus,
15359 ) => {
15360 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15361 SetQuantifier::DistinctByName
15362 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15363 SetQuantifier::ByName
15364 } else if self.parse_keyword(Keyword::ALL) {
15365 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15366 SetQuantifier::AllByName
15367 } else {
15368 SetQuantifier::All
15369 }
15370 } else if self.parse_keyword(Keyword::DISTINCT) {
15371 SetQuantifier::Distinct
15372 } else {
15373 SetQuantifier::None
15374 }
15375 }
15376 _ => SetQuantifier::None,
15377 }
15378 }
15379
15380 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15382 let mut from_first = None;
15383
15384 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15385 let from_token = self.expect_keyword(Keyword::FROM)?;
15386 let from = self.parse_table_with_joins()?;
15387 if !self.peek_keyword(Keyword::SELECT) {
15388 return Ok(Select {
15389 select_token: AttachedToken(from_token),
15390 optimizer_hints: vec![],
15391 distinct: None,
15392 select_modifiers: None,
15393 top: None,
15394 top_before_distinct: false,
15395 projection: vec![],
15396 exclude: None,
15397 into: None,
15398 from,
15399 lateral_views: vec![],
15400 prewhere: None,
15401 selection: None,
15402 group_by: GroupByExpr::Expressions(vec![], vec![]),
15403 cluster_by: vec![],
15404 distribute_by: vec![],
15405 sort_by: vec![],
15406 having: None,
15407 named_window: vec![],
15408 window_before_qualify: false,
15409 qualify: None,
15410 value_table_mode: None,
15411 connect_by: vec![],
15412 flavor: SelectFlavor::FromFirstNoSelect,
15413 });
15414 }
15415 from_first = Some(from);
15416 }
15417
15418 let select_token = self.expect_keyword(Keyword::SELECT)?;
15419 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15420 let value_table_mode = self.parse_value_table_mode()?;
15421
15422 let (select_modifiers, distinct_select_modifier) =
15423 if self.dialect.supports_select_modifiers() {
15424 self.parse_select_modifiers()?
15425 } else {
15426 (None, None)
15427 };
15428
15429 let mut top_before_distinct = false;
15430 let mut top = None;
15431 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15432 top = Some(self.parse_top()?);
15433 top_before_distinct = true;
15434 }
15435
15436 let distinct = if distinct_select_modifier.is_some() {
15437 distinct_select_modifier
15438 } else {
15439 self.parse_all_or_distinct()?
15440 };
15441
15442 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15443 top = Some(self.parse_top()?);
15444 }
15445
15446 let projection =
15447 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15448 vec![]
15449 } else {
15450 self.parse_projection()?
15451 };
15452
15453 let exclude = if self.dialect.supports_select_exclude() {
15454 self.parse_optional_select_item_exclude()?
15455 } else {
15456 None
15457 };
15458
15459 let into = if self.parse_keyword(Keyword::INTO) {
15460 Some(self.parse_select_into()?)
15461 } else {
15462 None
15463 };
15464
15465 let (from, from_first) = if let Some(from) = from_first.take() {
15471 (from, true)
15472 } else if self.parse_keyword(Keyword::FROM) {
15473 (self.parse_table_with_joins()?, false)
15474 } else {
15475 (vec![], false)
15476 };
15477
15478 let mut lateral_views = vec![];
15479 loop {
15480 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15481 let outer = self.parse_keyword(Keyword::OUTER);
15482 let lateral_view = self.parse_expr()?;
15483 let lateral_view_name = self.parse_object_name(false)?;
15484 let lateral_col_alias = self
15485 .parse_comma_separated(|parser| {
15486 parser.parse_optional_alias(&[
15487 Keyword::WHERE,
15488 Keyword::GROUP,
15489 Keyword::CLUSTER,
15490 Keyword::HAVING,
15491 Keyword::LATERAL,
15492 ]) })?
15494 .into_iter()
15495 .flatten()
15496 .collect();
15497
15498 lateral_views.push(LateralView {
15499 lateral_view,
15500 lateral_view_name,
15501 lateral_col_alias,
15502 outer,
15503 });
15504 } else {
15505 break;
15506 }
15507 }
15508
15509 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15510 {
15511 Some(self.parse_expr()?)
15512 } else {
15513 None
15514 };
15515
15516 let selection = if self.parse_keyword(Keyword::WHERE) {
15517 Some(self.parse_expr()?)
15518 } else {
15519 None
15520 };
15521
15522 let connect_by = self.maybe_parse_connect_by()?;
15523
15524 let group_by = self
15525 .parse_optional_group_by()?
15526 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15527
15528 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15529 self.parse_comma_separated(Parser::parse_expr)?
15530 } else {
15531 vec![]
15532 };
15533
15534 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15535 self.parse_comma_separated(Parser::parse_expr)?
15536 } else {
15537 vec![]
15538 };
15539
15540 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15541 self.parse_comma_separated(Parser::parse_order_by_expr)?
15542 } else {
15543 vec![]
15544 };
15545
15546 let having = if self.parse_keyword(Keyword::HAVING) {
15547 Some(self.parse_expr()?)
15548 } else {
15549 None
15550 };
15551
15552 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15554 {
15555 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15556 if self.parse_keyword(Keyword::QUALIFY) {
15557 (named_windows, Some(self.parse_expr()?), true)
15558 } else {
15559 (named_windows, None, true)
15560 }
15561 } else if self.parse_keyword(Keyword::QUALIFY) {
15562 let qualify = Some(self.parse_expr()?);
15563 if self.parse_keyword(Keyword::WINDOW) {
15564 (
15565 self.parse_comma_separated(Parser::parse_named_window)?,
15566 qualify,
15567 false,
15568 )
15569 } else {
15570 (Default::default(), qualify, false)
15571 }
15572 } else {
15573 Default::default()
15574 };
15575
15576 Ok(Select {
15577 select_token: AttachedToken(select_token),
15578 optimizer_hints,
15579 distinct,
15580 select_modifiers,
15581 top,
15582 top_before_distinct,
15583 projection,
15584 exclude,
15585 into,
15586 from,
15587 lateral_views,
15588 prewhere,
15589 selection,
15590 group_by,
15591 cluster_by,
15592 distribute_by,
15593 sort_by,
15594 having,
15595 named_window: named_windows,
15596 window_before_qualify,
15597 qualify,
15598 value_table_mode,
15599 connect_by,
15600 flavor: if from_first {
15601 SelectFlavor::FromFirst
15602 } else {
15603 SelectFlavor::Standard
15604 },
15605 })
15606 }
15607
15608 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15617 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15618 if !supports_hints {
15619 return Ok(vec![]);
15620 }
15621 let mut hints = vec![];
15622 loop {
15623 let t = self.peek_nth_token_no_skip_ref(0);
15624 let Token::Whitespace(ws) = &t.token else {
15625 break;
15626 };
15627 match ws {
15628 Whitespace::SingleLineComment { comment, prefix } => {
15629 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15630 hints.push(OptimizerHint {
15631 prefix: hint_prefix,
15632 text,
15633 style: OptimizerHintStyle::SingleLine {
15634 prefix: prefix.clone(),
15635 },
15636 });
15637 }
15638 self.next_token_no_skip();
15639 }
15640 Whitespace::MultiLineComment(comment) => {
15641 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15642 hints.push(OptimizerHint {
15643 prefix: hint_prefix,
15644 text,
15645 style: OptimizerHintStyle::MultiLine,
15646 });
15647 }
15648 self.next_token_no_skip();
15649 }
15650 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15651 self.next_token_no_skip();
15652 }
15653 }
15654 }
15655 Ok(hints)
15656 }
15657
15658 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15661 let (before_plus, text) = comment.split_once('+')?;
15662 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15663 Some((before_plus.to_string(), text.to_string()))
15664 } else {
15665 None
15666 }
15667 }
15668
15669 fn parse_select_modifiers(
15676 &mut self,
15677 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15678 let mut modifiers = SelectModifiers::default();
15679 let mut distinct = None;
15680
15681 let keywords = &[
15682 Keyword::ALL,
15683 Keyword::DISTINCT,
15684 Keyword::DISTINCTROW,
15685 Keyword::HIGH_PRIORITY,
15686 Keyword::STRAIGHT_JOIN,
15687 Keyword::SQL_SMALL_RESULT,
15688 Keyword::SQL_BIG_RESULT,
15689 Keyword::SQL_BUFFER_RESULT,
15690 Keyword::SQL_NO_CACHE,
15691 Keyword::SQL_CALC_FOUND_ROWS,
15692 ];
15693
15694 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15695 match keyword {
15696 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15697 self.prev_token();
15698 distinct = self.parse_all_or_distinct()?;
15699 }
15700 Keyword::DISTINCTROW if distinct.is_none() => {
15702 distinct = Some(Distinct::Distinct);
15703 }
15704 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15705 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15706 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15707 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15708 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15709 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15710 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15711 _ => {
15712 self.prev_token();
15713 return self.expected_ref(
15714 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15715 self.peek_token_ref(),
15716 );
15717 }
15718 }
15719 }
15720
15721 let select_modifiers = if modifiers.is_any_set() {
15724 Some(modifiers)
15725 } else {
15726 None
15727 };
15728 Ok((select_modifiers, distinct))
15729 }
15730
15731 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15732 if !dialect_of!(self is BigQueryDialect) {
15733 return Ok(None);
15734 }
15735
15736 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15737 Some(ValueTableMode::DistinctAsValue)
15738 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15739 Some(ValueTableMode::DistinctAsStruct)
15740 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15741 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15742 {
15743 Some(ValueTableMode::AsValue)
15744 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15745 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15746 {
15747 Some(ValueTableMode::AsStruct)
15748 } else if self.parse_keyword(Keyword::AS) {
15749 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15750 } else {
15751 None
15752 };
15753
15754 Ok(mode)
15755 }
15756
15757 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15761 where
15762 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15763 {
15764 let current_state = self.state;
15765 self.state = state;
15766 let res = f(self);
15767 self.state = current_state;
15768 res
15769 }
15770
15771 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15773 let mut clauses = Vec::with_capacity(2);
15774 loop {
15775 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15776 clauses.push(ConnectByKind::StartWith {
15777 start_token: self.token_at(idx).clone().into(),
15778 condition: self.parse_expr()?.into(),
15779 });
15780 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15781 {
15782 clauses.push(ConnectByKind::ConnectBy {
15783 connect_token: self.token_at(idx).clone().into(),
15784 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15785 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15786 parser.parse_comma_separated(Parser::parse_expr)
15787 })?,
15788 });
15789 } else {
15790 break;
15791 }
15792 }
15793 Ok(clauses)
15794 }
15795
15796 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15798 let token1 = self.next_token();
15799 let token2 = self.next_token();
15800 let token3 = self.next_token();
15801
15802 let table_name;
15803 let schema_name;
15804 if token2 == Token::Period {
15805 match token1.token {
15806 Token::Word(w) => {
15807 schema_name = w.value;
15808 }
15809 _ => {
15810 return self.expected("Schema name", token1);
15811 }
15812 }
15813 match token3.token {
15814 Token::Word(w) => {
15815 table_name = w.value;
15816 }
15817 _ => {
15818 return self.expected("Table name", token3);
15819 }
15820 }
15821 Ok(Table {
15822 table_name: Some(table_name),
15823 schema_name: Some(schema_name),
15824 })
15825 } else {
15826 match token1.token {
15827 Token::Word(w) => {
15828 table_name = w.value;
15829 }
15830 _ => {
15831 return self.expected("Table name", token1);
15832 }
15833 }
15834 Ok(Table {
15835 table_name: Some(table_name),
15836 schema_name: None,
15837 })
15838 }
15839 }
15840
15841 fn parse_set_role(
15843 &mut self,
15844 modifier: Option<ContextModifier>,
15845 ) -> Result<Statement, ParserError> {
15846 self.expect_keyword_is(Keyword::ROLE)?;
15847
15848 let role_name = if self.parse_keyword(Keyword::NONE) {
15849 None
15850 } else {
15851 Some(self.parse_identifier()?)
15852 };
15853 Ok(Statement::Set(Set::SetRole {
15854 context_modifier: modifier,
15855 role_name,
15856 }))
15857 }
15858
15859 fn parse_set_values(
15860 &mut self,
15861 parenthesized_assignment: bool,
15862 ) -> Result<Vec<Expr>, ParserError> {
15863 let mut values = vec![];
15864
15865 if parenthesized_assignment {
15866 self.expect_token(&Token::LParen)?;
15867 }
15868
15869 loop {
15870 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15871 expr
15872 } else if let Ok(expr) = self.parse_expr() {
15873 expr
15874 } else {
15875 self.expected_ref("variable value", self.peek_token_ref())?
15876 };
15877
15878 values.push(value);
15879 if self.consume_token(&Token::Comma) {
15880 continue;
15881 }
15882
15883 if parenthesized_assignment {
15884 self.expect_token(&Token::RParen)?;
15885 }
15886 return Ok(values);
15887 }
15888 }
15889
15890 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15891 let modifier =
15892 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15893
15894 Self::keyword_to_modifier(modifier)
15895 }
15896
15897 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15899 let scope = self.parse_context_modifier();
15900
15901 let name = if self.dialect.supports_parenthesized_set_variables()
15902 && self.consume_token(&Token::LParen)
15903 {
15904 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15908 } else {
15909 self.parse_object_name(false)?
15910 };
15911
15912 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15913 return self.expected_ref("assignment operator", self.peek_token_ref());
15914 }
15915
15916 let value = self.parse_expr()?;
15917
15918 Ok(SetAssignment { scope, name, value })
15919 }
15920
15921 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15922 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15923
15924 let scope = if !hivevar {
15926 self.parse_context_modifier()
15927 } else {
15928 None
15929 };
15930
15931 if hivevar {
15932 self.expect_token(&Token::Colon)?;
15933 }
15934
15935 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15936 return Ok(set_role_stmt);
15937 }
15938
15939 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15941 || self.parse_keyword(Keyword::TIMEZONE)
15942 {
15943 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15944 return Ok(Set::SingleAssignment {
15945 scope,
15946 hivevar,
15947 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15948 values: self.parse_set_values(false)?,
15949 }
15950 .into());
15951 } else {
15952 return Ok(Set::SetTimeZone {
15956 local: scope == Some(ContextModifier::Local),
15957 value: self.parse_expr()?,
15958 }
15959 .into());
15960 }
15961 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15962 if self.parse_keyword(Keyword::DEFAULT) {
15963 return Ok(Set::SetNamesDefault {}.into());
15964 }
15965 let charset_name = self.parse_identifier()?;
15966 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15967 Some(self.parse_literal_string()?)
15968 } else {
15969 None
15970 };
15971
15972 return Ok(Set::SetNames {
15973 charset_name,
15974 collation_name,
15975 }
15976 .into());
15977 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15978 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15979 return Ok(Set::SetTransaction {
15980 modes: self.parse_transaction_modes()?,
15981 snapshot: None,
15982 session: true,
15983 }
15984 .into());
15985 } else if self.parse_keyword(Keyword::TRANSACTION) {
15986 if self.parse_keyword(Keyword::SNAPSHOT) {
15987 let snapshot_id = self.parse_value()?;
15988 return Ok(Set::SetTransaction {
15989 modes: vec![],
15990 snapshot: Some(snapshot_id),
15991 session: false,
15992 }
15993 .into());
15994 }
15995 return Ok(Set::SetTransaction {
15996 modes: self.parse_transaction_modes()?,
15997 snapshot: None,
15998 session: false,
15999 }
16000 .into());
16001 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
16002 let scope = match scope {
16003 Some(s) => s,
16004 None => {
16005 return self.expected_at(
16006 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
16007 self.get_current_index(),
16008 )
16009 }
16010 };
16011 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
16012 SetSessionAuthorizationParamKind::Default
16013 } else {
16014 let value = self.parse_identifier()?;
16015 SetSessionAuthorizationParamKind::User(value)
16016 };
16017 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
16018 scope,
16019 kind: auth_value,
16020 })
16021 .into());
16022 }
16023
16024 if self.dialect.supports_comma_separated_set_assignments() {
16025 if scope.is_some() {
16026 self.prev_token();
16027 }
16028
16029 if let Some(assignments) = self
16030 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
16031 {
16032 return if assignments.len() > 1 {
16033 Ok(Set::MultipleAssignments { assignments }.into())
16034 } else {
16035 let SetAssignment { scope, name, value } =
16036 assignments.into_iter().next().ok_or_else(|| {
16037 ParserError::ParserError("Expected at least one assignment".to_string())
16038 })?;
16039
16040 Ok(Set::SingleAssignment {
16041 scope,
16042 hivevar,
16043 variable: name,
16044 values: vec![value],
16045 }
16046 .into())
16047 };
16048 }
16049 }
16050
16051 let variables = if self.dialect.supports_parenthesized_set_variables()
16052 && self.consume_token(&Token::LParen)
16053 {
16054 let vars = OneOrManyWithParens::Many(
16055 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
16056 .into_iter()
16057 .map(|ident| ObjectName::from(vec![ident]))
16058 .collect(),
16059 );
16060 self.expect_token(&Token::RParen)?;
16061 vars
16062 } else {
16063 OneOrManyWithParens::One(self.parse_object_name(false)?)
16064 };
16065
16066 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
16067 let stmt = match variables {
16068 OneOrManyWithParens::One(var) => Set::SingleAssignment {
16069 scope,
16070 hivevar,
16071 variable: var,
16072 values: self.parse_set_values(false)?,
16073 },
16074 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
16075 variables: vars,
16076 values: self.parse_set_values(true)?,
16077 },
16078 };
16079
16080 return Ok(stmt.into());
16081 }
16082
16083 if self.dialect.supports_set_stmt_without_operator() {
16084 self.prev_token();
16085 return self.parse_set_session_params();
16086 };
16087
16088 self.expected_ref("equals sign or TO", self.peek_token_ref())
16089 }
16090
16091 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
16093 if self.parse_keyword(Keyword::STATISTICS) {
16094 let topic = match self.parse_one_of_keywords(&[
16095 Keyword::IO,
16096 Keyword::PROFILE,
16097 Keyword::TIME,
16098 Keyword::XML,
16099 ]) {
16100 Some(Keyword::IO) => SessionParamStatsTopic::IO,
16101 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
16102 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
16103 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
16104 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
16105 };
16106 let value = self.parse_session_param_value()?;
16107 Ok(
16108 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
16109 topic,
16110 value,
16111 }))
16112 .into(),
16113 )
16114 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
16115 let obj = self.parse_object_name(false)?;
16116 let value = self.parse_session_param_value()?;
16117 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
16118 SetSessionParamIdentityInsert { obj, value },
16119 ))
16120 .into())
16121 } else if self.parse_keyword(Keyword::OFFSETS) {
16122 let keywords = self.parse_comma_separated(|parser| {
16123 let next_token = parser.next_token();
16124 match &next_token.token {
16125 Token::Word(w) => Ok(w.to_string()),
16126 _ => parser.expected("SQL keyword", next_token),
16127 }
16128 })?;
16129 let value = self.parse_session_param_value()?;
16130 Ok(
16131 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
16132 keywords,
16133 value,
16134 }))
16135 .into(),
16136 )
16137 } else {
16138 let names = self.parse_comma_separated(|parser| {
16139 let next_token = parser.next_token();
16140 match next_token.token {
16141 Token::Word(w) => Ok(w.to_string()),
16142 _ => parser.expected("Session param name", next_token),
16143 }
16144 })?;
16145 let value = self.parse_expr()?.to_string();
16146 Ok(
16147 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
16148 names,
16149 value,
16150 }))
16151 .into(),
16152 )
16153 }
16154 }
16155
16156 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
16157 if self.parse_keyword(Keyword::ON) {
16158 Ok(SessionParamValue::On)
16159 } else if self.parse_keyword(Keyword::OFF) {
16160 Ok(SessionParamValue::Off)
16161 } else {
16162 self.expected_ref("ON or OFF", self.peek_token_ref())
16163 }
16164 }
16165
16166 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
16168 let terse = self.parse_keyword(Keyword::TERSE);
16169 let extended = self.parse_keyword(Keyword::EXTENDED);
16170 let full = self.parse_keyword(Keyword::FULL);
16171 let session = self.parse_keyword(Keyword::SESSION);
16172 let global = self.parse_keyword(Keyword::GLOBAL);
16173 let external = self.parse_keyword(Keyword::EXTERNAL);
16174 if self
16175 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
16176 .is_some()
16177 {
16178 Ok(self.parse_show_columns(extended, full)?)
16179 } else if self.parse_keyword(Keyword::TABLES) {
16180 Ok(self.parse_show_tables(terse, extended, full, external)?)
16181 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
16182 Ok(self.parse_show_views(terse, true)?)
16183 } else if self.parse_keyword(Keyword::VIEWS) {
16184 Ok(self.parse_show_views(terse, false)?)
16185 } else if self.parse_keyword(Keyword::FUNCTIONS) {
16186 Ok(self.parse_show_functions()?)
16187 } else if self.parse_keyword(Keyword::PROCESSLIST) {
16188 Ok(Statement::ShowProcessList { full })
16189 } else if extended || full {
16190 Err(ParserError::ParserError(
16191 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
16192 ))
16193 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
16194 Ok(self.parse_show_create()?)
16195 } else if self.parse_keyword(Keyword::COLLATION) {
16196 Ok(self.parse_show_collation()?)
16197 } else if self.parse_keyword(Keyword::VARIABLES)
16198 && dialect_of!(self is MySqlDialect | GenericDialect)
16199 {
16200 Ok(Statement::ShowVariables {
16201 filter: self.parse_show_statement_filter()?,
16202 session,
16203 global,
16204 })
16205 } else if self.parse_keyword(Keyword::STATUS)
16206 && dialect_of!(self is MySqlDialect | GenericDialect)
16207 {
16208 Ok(Statement::ShowStatus {
16209 filter: self.parse_show_statement_filter()?,
16210 session,
16211 global,
16212 })
16213 } else if self.parse_keyword(Keyword::CATALOGS) {
16214 self.parse_show_catalogs(terse)
16215 } else if self.parse_keyword(Keyword::DATABASES) {
16216 self.parse_show_databases(terse)
16217 } else if self.parse_keyword(Keyword::SCHEMAS) {
16218 self.parse_show_schemas(terse)
16219 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
16220 self.parse_show_charset(false)
16221 } else if self.parse_keyword(Keyword::CHARSET) {
16222 self.parse_show_charset(true)
16223 } else {
16224 Ok(Statement::ShowVariable {
16225 variable: self.parse_identifiers()?,
16226 })
16227 }
16228 }
16229
16230 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
16231 Ok(Statement::ShowCharset(ShowCharset {
16233 is_shorthand,
16234 filter: self.parse_show_statement_filter()?,
16235 }))
16236 }
16237
16238 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
16239 let history = self.parse_keyword(Keyword::HISTORY);
16240 let show_options = self.parse_show_stmt_options()?;
16241 Ok(Statement::ShowCatalogs {
16242 terse,
16243 history,
16244 show_options,
16245 })
16246 }
16247
16248 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
16249 let history = self.parse_keyword(Keyword::HISTORY);
16250 let show_options = self.parse_show_stmt_options()?;
16251 Ok(Statement::ShowDatabases {
16252 terse,
16253 history,
16254 show_options,
16255 })
16256 }
16257
16258 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
16259 let history = self.parse_keyword(Keyword::HISTORY);
16260 let show_options = self.parse_show_stmt_options()?;
16261 Ok(Statement::ShowSchemas {
16262 terse,
16263 history,
16264 show_options,
16265 })
16266 }
16267
16268 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
16270 let obj_type = match self.expect_one_of_keywords(&[
16271 Keyword::TABLE,
16272 Keyword::TRIGGER,
16273 Keyword::FUNCTION,
16274 Keyword::PROCEDURE,
16275 Keyword::EVENT,
16276 Keyword::VIEW,
16277 ])? {
16278 Keyword::TABLE => Ok(ShowCreateObject::Table),
16279 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
16280 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
16281 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
16282 Keyword::EVENT => Ok(ShowCreateObject::Event),
16283 Keyword::VIEW => Ok(ShowCreateObject::View),
16284 keyword => Err(ParserError::ParserError(format!(
16285 "Unable to map keyword to ShowCreateObject: {keyword:?}"
16286 ))),
16287 }?;
16288
16289 let obj_name = self.parse_object_name(false)?;
16290
16291 Ok(Statement::ShowCreate { obj_type, obj_name })
16292 }
16293
16294 pub fn parse_show_columns(
16296 &mut self,
16297 extended: bool,
16298 full: bool,
16299 ) -> Result<Statement, ParserError> {
16300 let show_options = self.parse_show_stmt_options()?;
16301 Ok(Statement::ShowColumns {
16302 extended,
16303 full,
16304 show_options,
16305 })
16306 }
16307
16308 fn parse_show_tables(
16309 &mut self,
16310 terse: bool,
16311 extended: bool,
16312 full: bool,
16313 external: bool,
16314 ) -> Result<Statement, ParserError> {
16315 let history = !external && self.parse_keyword(Keyword::HISTORY);
16316 let show_options = self.parse_show_stmt_options()?;
16317 Ok(Statement::ShowTables {
16318 terse,
16319 history,
16320 extended,
16321 full,
16322 external,
16323 show_options,
16324 })
16325 }
16326
16327 fn parse_show_views(
16328 &mut self,
16329 terse: bool,
16330 materialized: bool,
16331 ) -> Result<Statement, ParserError> {
16332 let show_options = self.parse_show_stmt_options()?;
16333 Ok(Statement::ShowViews {
16334 materialized,
16335 terse,
16336 show_options,
16337 })
16338 }
16339
16340 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16342 let filter = self.parse_show_statement_filter()?;
16343 Ok(Statement::ShowFunctions { filter })
16344 }
16345
16346 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16348 let filter = self.parse_show_statement_filter()?;
16349 Ok(Statement::ShowCollation { filter })
16350 }
16351
16352 pub fn parse_show_statement_filter(
16354 &mut self,
16355 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16356 if self.parse_keyword(Keyword::LIKE) {
16357 Ok(Some(ShowStatementFilter::Like(
16358 self.parse_literal_string()?,
16359 )))
16360 } else if self.parse_keyword(Keyword::ILIKE) {
16361 Ok(Some(ShowStatementFilter::ILike(
16362 self.parse_literal_string()?,
16363 )))
16364 } else if self.parse_keyword(Keyword::WHERE) {
16365 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16366 } else {
16367 self.maybe_parse(|parser| -> Result<String, ParserError> {
16368 parser.parse_literal_string()
16369 })?
16370 .map_or(Ok(None), |filter| {
16371 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16372 })
16373 }
16374 }
16375
16376 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16378 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16380 if self.parse_keyword(Keyword::DEFAULT) {
16382 return Ok(Statement::Use(Use::Default));
16383 }
16384 None } else if dialect_of!(self is DatabricksDialect) {
16386 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16387 } else if dialect_of!(self is SnowflakeDialect) {
16388 self.parse_one_of_keywords(&[
16389 Keyword::DATABASE,
16390 Keyword::SCHEMA,
16391 Keyword::WAREHOUSE,
16392 Keyword::ROLE,
16393 Keyword::SECONDARY,
16394 ])
16395 } else {
16396 None };
16398
16399 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16400 self.parse_secondary_roles()?
16401 } else {
16402 let obj_name = self.parse_object_name(false)?;
16403 match parsed_keyword {
16404 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16405 Some(Keyword::DATABASE) => Use::Database(obj_name),
16406 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16407 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16408 Some(Keyword::ROLE) => Use::Role(obj_name),
16409 _ => Use::Object(obj_name),
16410 }
16411 };
16412
16413 Ok(Statement::Use(result))
16414 }
16415
16416 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16417 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16418 if self.parse_keyword(Keyword::NONE) {
16419 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16420 } else if self.parse_keyword(Keyword::ALL) {
16421 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16422 } else {
16423 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16424 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16425 }
16426 }
16427
16428 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16430 let relation = self.parse_table_factor()?;
16431 let joins = self.parse_joins()?;
16435 Ok(TableWithJoins { relation, joins })
16436 }
16437
16438 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16439 let mut joins = vec![];
16440 loop {
16441 let global = self.parse_keyword(Keyword::GLOBAL);
16442 let join = if self.parse_keyword(Keyword::CROSS) {
16443 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16444 JoinOperator::CrossJoin(JoinConstraint::None)
16445 } else if self.parse_keyword(Keyword::APPLY) {
16446 JoinOperator::CrossApply
16448 } else {
16449 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16450 };
16451 let relation = self.parse_table_factor()?;
16452 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16453 && self.dialect.supports_cross_join_constraint()
16454 {
16455 let constraint = self.parse_join_constraint(false)?;
16456 JoinOperator::CrossJoin(constraint)
16457 } else {
16458 join_operator
16459 };
16460 Join {
16461 relation,
16462 global,
16463 join_operator,
16464 }
16465 } else if self.parse_keyword(Keyword::OUTER) {
16466 self.expect_keyword_is(Keyword::APPLY)?;
16468 Join {
16469 relation: self.parse_table_factor()?,
16470 global,
16471 join_operator: JoinOperator::OuterApply,
16472 }
16473 } else if self.parse_keyword(Keyword::ASOF) {
16474 self.expect_keyword_is(Keyword::JOIN)?;
16475 let relation = self.parse_table_factor()?;
16476 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16477 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16478 Join {
16479 relation,
16480 global,
16481 join_operator: JoinOperator::AsOf {
16482 match_condition,
16483 constraint: self.parse_join_constraint(false)?,
16484 },
16485 }
16486 } else {
16487 let natural = self.parse_keyword(Keyword::NATURAL);
16488 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16489 w.keyword
16490 } else {
16491 Keyword::NoKeyword
16492 };
16493
16494 let join_operator_type = match peek_keyword {
16495 Keyword::INNER | Keyword::JOIN => {
16496 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16498 if inner {
16499 JoinOperator::Inner
16500 } else {
16501 JoinOperator::Join
16502 }
16503 }
16504 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16505 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16507 let join_type = self.parse_one_of_keywords(&[
16508 Keyword::OUTER,
16509 Keyword::SEMI,
16510 Keyword::ANTI,
16511 Keyword::JOIN,
16512 ]);
16513 match join_type {
16514 Some(Keyword::OUTER) => {
16515 self.expect_keyword_is(Keyword::JOIN)?;
16516 if is_left {
16517 JoinOperator::LeftOuter
16518 } else {
16519 JoinOperator::RightOuter
16520 }
16521 }
16522 Some(Keyword::SEMI) => {
16523 self.expect_keyword_is(Keyword::JOIN)?;
16524 if is_left {
16525 JoinOperator::LeftSemi
16526 } else {
16527 JoinOperator::RightSemi
16528 }
16529 }
16530 Some(Keyword::ANTI) => {
16531 self.expect_keyword_is(Keyword::JOIN)?;
16532 if is_left {
16533 JoinOperator::LeftAnti
16534 } else {
16535 JoinOperator::RightAnti
16536 }
16537 }
16538 Some(Keyword::JOIN) => {
16539 if is_left {
16540 JoinOperator::Left
16541 } else {
16542 JoinOperator::Right
16543 }
16544 }
16545 _ => {
16546 return Err(ParserError::ParserError(format!(
16547 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16548 )))
16549 }
16550 }
16551 }
16552 Keyword::ANTI => {
16553 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16555 JoinOperator::Anti
16556 }
16557 Keyword::SEMI => {
16558 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16560 JoinOperator::Semi
16561 }
16562 Keyword::FULL => {
16563 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16566 JoinOperator::FullOuter
16567 }
16568 Keyword::OUTER => {
16569 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16570 }
16571 Keyword::STRAIGHT_JOIN => {
16572 let _ = self.next_token(); JoinOperator::StraightJoin
16574 }
16575 _ if natural => {
16576 return self
16577 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16578 }
16579 _ => break,
16580 };
16581 let mut relation = self.parse_table_factor()?;
16582
16583 if !self
16584 .dialect
16585 .supports_left_associative_joins_without_parens()
16586 && self.peek_parens_less_nested_join()
16587 {
16588 let joins = self.parse_joins()?;
16589 relation = TableFactor::NestedJoin {
16590 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16591 alias: None,
16592 };
16593 }
16594
16595 let join_constraint = self.parse_join_constraint(natural)?;
16596 Join {
16597 relation,
16598 global,
16599 join_operator: join_operator_type(join_constraint),
16600 }
16601 };
16602 joins.push(join);
16603 }
16604 Ok(joins)
16605 }
16606
16607 fn peek_parens_less_nested_join(&self) -> bool {
16608 matches!(
16609 self.peek_token_ref().token,
16610 Token::Word(Word {
16611 keyword: Keyword::JOIN
16612 | Keyword::INNER
16613 | Keyword::LEFT
16614 | Keyword::RIGHT
16615 | Keyword::FULL,
16616 ..
16617 })
16618 )
16619 }
16620
16621 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16623 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16624 let _guard = self.recursion_counter.try_decrease()?;
16625 if self.parse_keyword(Keyword::LATERAL) {
16626 if self.consume_token(&Token::LParen) {
16628 self.parse_derived_table_factor(Lateral)
16629 } else {
16630 let name = self.parse_object_name(false)?;
16631 self.expect_token(&Token::LParen)?;
16632 let args = self.parse_optional_args()?;
16633 let alias = self.maybe_parse_table_alias()?;
16634 Ok(TableFactor::Function {
16635 lateral: true,
16636 name,
16637 args,
16638 alias,
16639 })
16640 }
16641 } else if self.parse_keyword(Keyword::TABLE) {
16642 self.expect_token(&Token::LParen)?;
16644 let expr = self.parse_expr()?;
16645 self.expect_token(&Token::RParen)?;
16646 let alias = self.maybe_parse_table_alias()?;
16647 Ok(TableFactor::TableFunction { expr, alias })
16648 } else if self.consume_token(&Token::LParen) {
16649 if let Some(mut table) =
16671 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16672 {
16673 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16674 {
16675 table = match kw {
16676 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16677 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16678 unexpected_keyword => return Err(ParserError::ParserError(
16679 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16680 )),
16681 }
16682 }
16683 return Ok(table);
16684 }
16685
16686 let mut table_and_joins = self.parse_table_and_joins()?;
16693
16694 #[allow(clippy::if_same_then_else)]
16695 if !table_and_joins.joins.is_empty() {
16696 self.expect_token(&Token::RParen)?;
16697 let alias = self.maybe_parse_table_alias()?;
16698 Ok(TableFactor::NestedJoin {
16699 table_with_joins: Box::new(table_and_joins),
16700 alias,
16701 }) } else if let TableFactor::NestedJoin {
16703 table_with_joins: _,
16704 alias: _,
16705 } = &table_and_joins.relation
16706 {
16707 self.expect_token(&Token::RParen)?;
16710 let alias = self.maybe_parse_table_alias()?;
16711 Ok(TableFactor::NestedJoin {
16712 table_with_joins: Box::new(table_and_joins),
16713 alias,
16714 })
16715 } else if self.dialect.supports_parens_around_table_factor() {
16716 self.expect_token(&Token::RParen)?;
16723
16724 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16725 match &mut table_and_joins.relation {
16728 TableFactor::Derived { alias, .. }
16729 | TableFactor::Table { alias, .. }
16730 | TableFactor::Function { alias, .. }
16731 | TableFactor::UNNEST { alias, .. }
16732 | TableFactor::JsonTable { alias, .. }
16733 | TableFactor::XmlTable { alias, .. }
16734 | TableFactor::OpenJsonTable { alias, .. }
16735 | TableFactor::TableFunction { alias, .. }
16736 | TableFactor::Pivot { alias, .. }
16737 | TableFactor::Unpivot { alias, .. }
16738 | TableFactor::MatchRecognize { alias, .. }
16739 | TableFactor::SemanticView { alias, .. }
16740 | TableFactor::NestedJoin { alias, .. } => {
16741 if let Some(inner_alias) = alias {
16743 return Err(ParserError::ParserError(format!(
16744 "duplicate alias {inner_alias}"
16745 )));
16746 }
16747 alias.replace(outer_alias);
16751 }
16752 };
16753 }
16754 Ok(table_and_joins.relation)
16756 } else {
16757 self.expected_ref("joined table", self.peek_token_ref())
16760 }
16761 } else if self.dialect.supports_values_as_table_factor()
16762 && matches!(
16763 self.peek_tokens(),
16764 [
16765 Token::Word(Word {
16766 keyword: Keyword::VALUES,
16767 ..
16768 }),
16769 Token::LParen
16770 ]
16771 )
16772 {
16773 self.expect_keyword_is(Keyword::VALUES)?;
16774
16775 let values = SetExpr::Values(self.parse_values(false, false)?);
16779 let alias = self.maybe_parse_table_alias()?;
16780 Ok(TableFactor::Derived {
16781 lateral: false,
16782 subquery: Box::new(Query {
16783 with: None,
16784 body: Box::new(values),
16785 order_by: None,
16786 limit_clause: None,
16787 fetch: None,
16788 locks: vec![],
16789 for_clause: None,
16790 settings: None,
16791 format_clause: None,
16792 pipe_operators: vec![],
16793 }),
16794 alias,
16795 sample: None,
16796 })
16797 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16798 && self.parse_keyword(Keyword::UNNEST)
16799 {
16800 self.expect_token(&Token::LParen)?;
16801 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16802 self.expect_token(&Token::RParen)?;
16803
16804 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16805 let alias = match self.maybe_parse_table_alias() {
16806 Ok(Some(alias)) => Some(alias),
16807 Ok(None) => None,
16808 Err(e) => return Err(e),
16809 };
16810
16811 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16812 Ok(()) => true,
16813 Err(_) => false,
16814 };
16815
16816 let with_offset_alias = if with_offset {
16817 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16818 Ok(Some(alias)) => Some(alias),
16819 Ok(None) => None,
16820 Err(e) => return Err(e),
16821 }
16822 } else {
16823 None
16824 };
16825
16826 Ok(TableFactor::UNNEST {
16827 alias,
16828 array_exprs,
16829 with_offset,
16830 with_offset_alias,
16831 with_ordinality,
16832 })
16833 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16834 let json_expr = self.parse_expr()?;
16835 self.expect_token(&Token::Comma)?;
16836 let json_path = self.parse_value()?;
16837 self.expect_keyword_is(Keyword::COLUMNS)?;
16838 self.expect_token(&Token::LParen)?;
16839 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16840 self.expect_token(&Token::RParen)?;
16841 self.expect_token(&Token::RParen)?;
16842 let alias = self.maybe_parse_table_alias()?;
16843 Ok(TableFactor::JsonTable {
16844 json_expr,
16845 json_path,
16846 columns,
16847 alias,
16848 })
16849 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16850 self.prev_token();
16851 self.parse_open_json_table_factor()
16852 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16853 self.prev_token();
16854 self.parse_xml_table_factor()
16855 } else if self.dialect.supports_semantic_view_table_factor()
16856 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16857 {
16858 self.parse_semantic_view_table_factor()
16859 } else if self.peek_token_ref().token == Token::AtSign {
16860 self.parse_snowflake_stage_table_factor()
16862 } else {
16863 let name = self.parse_object_name(true)?;
16864
16865 let json_path = match &self.peek_token_ref().token {
16866 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16867 _ => None,
16868 };
16869
16870 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16871 && self.parse_keyword(Keyword::PARTITION)
16872 {
16873 self.parse_parenthesized_identifiers()?
16874 } else {
16875 vec![]
16876 };
16877
16878 let version = self.maybe_parse_table_version()?;
16880
16881 let args = if self.consume_token(&Token::LParen) {
16883 Some(self.parse_table_function_args()?)
16884 } else {
16885 None
16886 };
16887
16888 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16889
16890 let mut sample = None;
16891 if self.dialect.supports_table_sample_before_alias() {
16892 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16893 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16894 }
16895 }
16896
16897 let alias = self.maybe_parse_table_alias()?;
16898
16899 let index_hints = if self.dialect.supports_table_hints() {
16901 self.maybe_parse(|p| p.parse_table_index_hints())?
16902 .unwrap_or(vec![])
16903 } else {
16904 vec![]
16905 };
16906
16907 let mut with_hints = vec![];
16909 if self.parse_keyword(Keyword::WITH) {
16910 if self.consume_token(&Token::LParen) {
16911 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16912 self.expect_token(&Token::RParen)?;
16913 } else {
16914 self.prev_token();
16916 }
16917 };
16918
16919 if !self.dialect.supports_table_sample_before_alias() {
16920 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16921 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16922 }
16923 }
16924
16925 let mut table = TableFactor::Table {
16926 name,
16927 alias,
16928 args,
16929 with_hints,
16930 version,
16931 partitions,
16932 with_ordinality,
16933 json_path,
16934 sample,
16935 index_hints,
16936 };
16937
16938 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16939 table = match kw {
16940 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16941 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16942 unexpected_keyword => return Err(ParserError::ParserError(
16943 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16944 )),
16945 }
16946 }
16947
16948 if self.dialect.supports_match_recognize()
16949 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16950 {
16951 table = self.parse_match_recognize(table)?;
16952 }
16953
16954 Ok(table)
16955 }
16956 }
16957
16958 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16963 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16965
16966 let args = if self.consume_token(&Token::LParen) {
16968 Some(self.parse_table_function_args()?)
16969 } else {
16970 None
16971 };
16972
16973 let alias = self.maybe_parse_table_alias()?;
16974
16975 Ok(TableFactor::Table {
16976 name,
16977 alias,
16978 args,
16979 with_hints: vec![],
16980 version: None,
16981 partitions: vec![],
16982 with_ordinality: false,
16983 json_path: None,
16984 sample: None,
16985 index_hints: vec![],
16986 })
16987 }
16988
16989 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16990 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16991 TableSampleModifier::TableSample
16992 } else if self.parse_keyword(Keyword::SAMPLE) {
16993 TableSampleModifier::Sample
16994 } else {
16995 return Ok(None);
16996 };
16997 self.parse_table_sample(modifier).map(Some)
16998 }
16999
17000 fn parse_table_sample(
17001 &mut self,
17002 modifier: TableSampleModifier,
17003 ) -> Result<Box<TableSample>, ParserError> {
17004 let name = match self.parse_one_of_keywords(&[
17005 Keyword::BERNOULLI,
17006 Keyword::ROW,
17007 Keyword::SYSTEM,
17008 Keyword::BLOCK,
17009 ]) {
17010 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
17011 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
17012 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
17013 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
17014 _ => None,
17015 };
17016
17017 let parenthesized = self.consume_token(&Token::LParen);
17018
17019 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
17020 let selected_bucket = self.parse_number_value()?;
17021 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
17022 let total = self.parse_number_value()?;
17023 let on = if self.parse_keyword(Keyword::ON) {
17024 Some(self.parse_expr()?)
17025 } else {
17026 None
17027 };
17028 (
17029 None,
17030 Some(TableSampleBucket {
17031 bucket: selected_bucket,
17032 total,
17033 on,
17034 }),
17035 )
17036 } else {
17037 let value = match self.maybe_parse(|p| p.parse_expr())? {
17038 Some(num) => num,
17039 None => {
17040 let next_token = self.next_token();
17041 if let Token::Word(w) = next_token.token {
17042 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
17043 } else {
17044 return parser_err!(
17045 "Expecting number or byte length e.g. 100M",
17046 self.peek_token_ref().span.start
17047 );
17048 }
17049 }
17050 };
17051 let unit = if self.parse_keyword(Keyword::ROWS) {
17052 Some(TableSampleUnit::Rows)
17053 } else if self.parse_keyword(Keyword::PERCENT) {
17054 Some(TableSampleUnit::Percent)
17055 } else {
17056 None
17057 };
17058 (
17059 Some(TableSampleQuantity {
17060 parenthesized,
17061 value,
17062 unit,
17063 }),
17064 None,
17065 )
17066 };
17067 if parenthesized {
17068 self.expect_token(&Token::RParen)?;
17069 }
17070
17071 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
17072 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
17073 } else if self.parse_keyword(Keyword::SEED) {
17074 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
17075 } else {
17076 None
17077 };
17078
17079 let offset = if self.parse_keyword(Keyword::OFFSET) {
17080 Some(self.parse_expr()?)
17081 } else {
17082 None
17083 };
17084
17085 Ok(Box::new(TableSample {
17086 modifier,
17087 name,
17088 quantity,
17089 seed,
17090 bucket,
17091 offset,
17092 }))
17093 }
17094
17095 fn parse_table_sample_seed(
17096 &mut self,
17097 modifier: TableSampleSeedModifier,
17098 ) -> Result<TableSampleSeed, ParserError> {
17099 self.expect_token(&Token::LParen)?;
17100 let value = self.parse_number_value()?;
17101 self.expect_token(&Token::RParen)?;
17102 Ok(TableSampleSeed { modifier, value })
17103 }
17104
17105 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17108 self.expect_token(&Token::LParen)?;
17109 let json_expr = self.parse_expr()?;
17110 let json_path = if self.consume_token(&Token::Comma) {
17111 Some(self.parse_value()?)
17112 } else {
17113 None
17114 };
17115 self.expect_token(&Token::RParen)?;
17116 let columns = if self.parse_keyword(Keyword::WITH) {
17117 self.expect_token(&Token::LParen)?;
17118 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
17119 self.expect_token(&Token::RParen)?;
17120 columns
17121 } else {
17122 Vec::new()
17123 };
17124 let alias = self.maybe_parse_table_alias()?;
17125 Ok(TableFactor::OpenJsonTable {
17126 json_expr,
17127 json_path,
17128 columns,
17129 alias,
17130 })
17131 }
17132
17133 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17134 self.expect_token(&Token::LParen)?;
17135 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
17136 self.expect_token(&Token::LParen)?;
17137 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
17138 self.expect_token(&Token::RParen)?;
17139 self.expect_token(&Token::Comma)?;
17140 namespaces
17141 } else {
17142 vec![]
17143 };
17144 let row_expression = self.parse_expr()?;
17145 let passing = self.parse_xml_passing_clause()?;
17146 self.expect_keyword_is(Keyword::COLUMNS)?;
17147 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
17148 self.expect_token(&Token::RParen)?;
17149 let alias = self.maybe_parse_table_alias()?;
17150 Ok(TableFactor::XmlTable {
17151 namespaces,
17152 row_expression,
17153 passing,
17154 columns,
17155 alias,
17156 })
17157 }
17158
17159 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
17160 let uri = self.parse_expr()?;
17161 self.expect_keyword_is(Keyword::AS)?;
17162 let name = self.parse_identifier()?;
17163 Ok(XmlNamespaceDefinition { uri, name })
17164 }
17165
17166 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
17167 let name = self.parse_identifier()?;
17168
17169 let option = if self.parse_keyword(Keyword::FOR) {
17170 self.expect_keyword(Keyword::ORDINALITY)?;
17171 XmlTableColumnOption::ForOrdinality
17172 } else {
17173 let r#type = self.parse_data_type()?;
17174 let mut path = None;
17175 let mut default = None;
17176
17177 if self.parse_keyword(Keyword::PATH) {
17178 path = Some(self.parse_expr()?);
17179 }
17180
17181 if self.parse_keyword(Keyword::DEFAULT) {
17182 default = Some(self.parse_expr()?);
17183 }
17184
17185 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
17186 if !not_null {
17187 let _ = self.parse_keyword(Keyword::NULL);
17189 }
17190
17191 XmlTableColumnOption::NamedInfo {
17192 r#type,
17193 path,
17194 default,
17195 nullable: !not_null,
17196 }
17197 };
17198 Ok(XmlTableColumn { name, option })
17199 }
17200
17201 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
17202 let mut arguments = vec![];
17203 if self.parse_keyword(Keyword::PASSING) {
17204 loop {
17205 let by_value =
17206 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
17207 let expr = self.parse_expr()?;
17208 let alias = if self.parse_keyword(Keyword::AS) {
17209 Some(self.parse_identifier()?)
17210 } else {
17211 None
17212 };
17213 arguments.push(XmlPassingArgument {
17214 expr,
17215 alias,
17216 by_value,
17217 });
17218 if !self.consume_token(&Token::Comma) {
17219 break;
17220 }
17221 }
17222 }
17223 Ok(XmlPassingClause { arguments })
17224 }
17225
17226 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17228 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
17229 self.expect_token(&Token::LParen)?;
17230
17231 let name = self.parse_object_name(true)?;
17232
17233 let mut dimensions = Vec::new();
17235 let mut metrics = Vec::new();
17236 let mut facts = Vec::new();
17237 let mut where_clause = None;
17238
17239 while self.peek_token_ref().token != Token::RParen {
17240 if self.parse_keyword(Keyword::DIMENSIONS) {
17241 if !dimensions.is_empty() {
17242 return Err(ParserError::ParserError(
17243 "DIMENSIONS clause can only be specified once".to_string(),
17244 ));
17245 }
17246 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17247 } else if self.parse_keyword(Keyword::METRICS) {
17248 if !metrics.is_empty() {
17249 return Err(ParserError::ParserError(
17250 "METRICS clause can only be specified once".to_string(),
17251 ));
17252 }
17253 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17254 } else if self.parse_keyword(Keyword::FACTS) {
17255 if !facts.is_empty() {
17256 return Err(ParserError::ParserError(
17257 "FACTS clause can only be specified once".to_string(),
17258 ));
17259 }
17260 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17261 } else if self.parse_keyword(Keyword::WHERE) {
17262 if where_clause.is_some() {
17263 return Err(ParserError::ParserError(
17264 "WHERE clause can only be specified once".to_string(),
17265 ));
17266 }
17267 where_clause = Some(self.parse_expr()?);
17268 } else {
17269 let tok = self.peek_token_ref();
17270 return parser_err!(
17271 format!(
17272 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
17273 tok.token
17274 ),
17275 tok.span.start
17276 )?;
17277 }
17278 }
17279
17280 self.expect_token(&Token::RParen)?;
17281
17282 let alias = self.maybe_parse_table_alias()?;
17283
17284 Ok(TableFactor::SemanticView {
17285 name,
17286 dimensions,
17287 metrics,
17288 facts,
17289 where_clause,
17290 alias,
17291 })
17292 }
17293
17294 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
17295 self.expect_token(&Token::LParen)?;
17296
17297 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17298 self.parse_comma_separated(Parser::parse_expr)?
17299 } else {
17300 vec![]
17301 };
17302
17303 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17304 self.parse_comma_separated(Parser::parse_order_by_expr)?
17305 } else {
17306 vec![]
17307 };
17308
17309 let measures = if self.parse_keyword(Keyword::MEASURES) {
17310 self.parse_comma_separated(|p| {
17311 let expr = p.parse_expr()?;
17312 let _ = p.parse_keyword(Keyword::AS);
17313 let alias = p.parse_identifier()?;
17314 Ok(Measure { expr, alias })
17315 })?
17316 } else {
17317 vec![]
17318 };
17319
17320 let rows_per_match =
17321 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17322 Some(RowsPerMatch::OneRow)
17323 } else if self.parse_keywords(&[
17324 Keyword::ALL,
17325 Keyword::ROWS,
17326 Keyword::PER,
17327 Keyword::MATCH,
17328 ]) {
17329 Some(RowsPerMatch::AllRows(
17330 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17331 Some(EmptyMatchesMode::Show)
17332 } else if self.parse_keywords(&[
17333 Keyword::OMIT,
17334 Keyword::EMPTY,
17335 Keyword::MATCHES,
17336 ]) {
17337 Some(EmptyMatchesMode::Omit)
17338 } else if self.parse_keywords(&[
17339 Keyword::WITH,
17340 Keyword::UNMATCHED,
17341 Keyword::ROWS,
17342 ]) {
17343 Some(EmptyMatchesMode::WithUnmatched)
17344 } else {
17345 None
17346 },
17347 ))
17348 } else {
17349 None
17350 };
17351
17352 let after_match_skip =
17353 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17354 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17355 Some(AfterMatchSkip::PastLastRow)
17356 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17357 Some(AfterMatchSkip::ToNextRow)
17358 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17359 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17360 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17361 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17362 } else {
17363 let found = self.next_token();
17364 return self.expected("after match skip option", found);
17365 }
17366 } else {
17367 None
17368 };
17369
17370 self.expect_keyword_is(Keyword::PATTERN)?;
17371 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17372
17373 self.expect_keyword_is(Keyword::DEFINE)?;
17374
17375 let symbols = self.parse_comma_separated(|p| {
17376 let symbol = p.parse_identifier()?;
17377 p.expect_keyword_is(Keyword::AS)?;
17378 let definition = p.parse_expr()?;
17379 Ok(SymbolDefinition { symbol, definition })
17380 })?;
17381
17382 self.expect_token(&Token::RParen)?;
17383
17384 let alias = self.maybe_parse_table_alias()?;
17385
17386 Ok(TableFactor::MatchRecognize {
17387 table: Box::new(table),
17388 partition_by,
17389 order_by,
17390 measures,
17391 rows_per_match,
17392 after_match_skip,
17393 pattern,
17394 symbols,
17395 alias,
17396 })
17397 }
17398
17399 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17400 match self.next_token().token {
17401 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17402 Token::Placeholder(s) if s == "$" => {
17403 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17404 }
17405 Token::LBrace => {
17406 self.expect_token(&Token::Minus)?;
17407 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17408 self.expect_token(&Token::Minus)?;
17409 self.expect_token(&Token::RBrace)?;
17410 Ok(MatchRecognizePattern::Exclude(symbol))
17411 }
17412 Token::Word(Word {
17413 value,
17414 quote_style: None,
17415 ..
17416 }) if value == "PERMUTE" => {
17417 self.expect_token(&Token::LParen)?;
17418 let symbols = self.parse_comma_separated(|p| {
17419 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17420 })?;
17421 self.expect_token(&Token::RParen)?;
17422 Ok(MatchRecognizePattern::Permute(symbols))
17423 }
17424 Token::LParen => {
17425 let pattern = self.parse_pattern()?;
17426 self.expect_token(&Token::RParen)?;
17427 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17428 }
17429 _ => {
17430 self.prev_token();
17431 self.parse_identifier()
17432 .map(MatchRecognizeSymbol::Named)
17433 .map(MatchRecognizePattern::Symbol)
17434 }
17435 }
17436 }
17437
17438 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17439 let mut pattern = self.parse_base_pattern()?;
17440 loop {
17441 let token = self.next_token();
17442 let quantifier = match token.token {
17443 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17444 Token::Plus => RepetitionQuantifier::OneOrMore,
17445 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17446 Token::LBrace => {
17447 let token = self.next_token();
17449 match token.token {
17450 Token::Comma => {
17451 let next_token = self.next_token();
17452 let Token::Number(n, _) = next_token.token else {
17453 return self.expected("literal number", next_token);
17454 };
17455 self.expect_token(&Token::RBrace)?;
17456 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17457 }
17458 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17459 let next_token = self.next_token();
17460 match next_token.token {
17461 Token::Number(m, _) => {
17462 self.expect_token(&Token::RBrace)?;
17463 RepetitionQuantifier::Range(
17464 Self::parse(n, token.span.start)?,
17465 Self::parse(m, token.span.start)?,
17466 )
17467 }
17468 Token::RBrace => {
17469 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17470 }
17471 _ => {
17472 return self.expected("} or upper bound", next_token);
17473 }
17474 }
17475 }
17476 Token::Number(n, _) => {
17477 self.expect_token(&Token::RBrace)?;
17478 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17479 }
17480 _ => return self.expected("quantifier range", token),
17481 }
17482 }
17483 _ => {
17484 self.prev_token();
17485 break;
17486 }
17487 };
17488 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17489 }
17490 Ok(pattern)
17491 }
17492
17493 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17494 let mut patterns = vec![self.parse_repetition_pattern()?];
17495 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17496 patterns.push(self.parse_repetition_pattern()?);
17497 }
17498 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17499 Ok([pattern]) => Ok(pattern),
17500 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17501 }
17502 }
17503
17504 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17505 let pattern = self.parse_concat_pattern()?;
17506 if self.consume_token(&Token::Pipe) {
17507 match self.parse_pattern()? {
17508 MatchRecognizePattern::Alternation(mut patterns) => {
17510 patterns.insert(0, pattern);
17511 Ok(MatchRecognizePattern::Alternation(patterns))
17512 }
17513 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17514 }
17515 } else {
17516 Ok(pattern)
17517 }
17518 }
17519
17520 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17522 if self.dialect.supports_table_versioning() {
17523 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17524 {
17525 let expr = self.parse_expr()?;
17526 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17527 } else if self.peek_keyword(Keyword::CHANGES) {
17528 return self.parse_table_version_changes().map(Some);
17529 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17530 let func_name = self.parse_object_name(true)?;
17531 let func = self.parse_function(func_name)?;
17532 return Ok(Some(TableVersion::Function(func)));
17533 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17534 let expr = self.parse_expr()?;
17535 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17536 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17537 let expr = Expr::Value(self.parse_number_value()?);
17538 return Ok(Some(TableVersion::VersionAsOf(expr)));
17539 }
17540 }
17541 Ok(None)
17542 }
17543
17544 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17555 let changes_name = self.parse_object_name(true)?;
17556 let changes = self.parse_function(changes_name)?;
17557 let at_name = self.parse_object_name(true)?;
17558 let at = self.parse_function(at_name)?;
17559 let end = if self.peek_keyword(Keyword::END) {
17560 let end_name = self.parse_object_name(true)?;
17561 Some(self.parse_function(end_name)?)
17562 } else {
17563 None
17564 };
17565 Ok(TableVersion::Changes { changes, at, end })
17566 }
17567
17568 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17571 if self.parse_keyword(Keyword::NESTED) {
17572 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17573 let path = self.parse_value()?;
17574 self.expect_keyword_is(Keyword::COLUMNS)?;
17575 let columns = self.parse_parenthesized(|p| {
17576 p.parse_comma_separated(Self::parse_json_table_column_def)
17577 })?;
17578 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17579 path,
17580 columns,
17581 }));
17582 }
17583 let name = self.parse_identifier()?;
17584 if self.parse_keyword(Keyword::FOR) {
17585 self.expect_keyword_is(Keyword::ORDINALITY)?;
17586 return Ok(JsonTableColumn::ForOrdinality(name));
17587 }
17588 let r#type = self.parse_data_type()?;
17589 let exists = self.parse_keyword(Keyword::EXISTS);
17590 self.expect_keyword_is(Keyword::PATH)?;
17591 let path = self.parse_value()?;
17592 let mut on_empty = None;
17593 let mut on_error = None;
17594 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17595 if self.parse_keyword(Keyword::EMPTY) {
17596 on_empty = Some(error_handling);
17597 } else {
17598 self.expect_keyword_is(Keyword::ERROR)?;
17599 on_error = Some(error_handling);
17600 }
17601 }
17602 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17603 name,
17604 r#type,
17605 path,
17606 exists,
17607 on_empty,
17608 on_error,
17609 }))
17610 }
17611
17612 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17620 let name = self.parse_identifier()?;
17621 let r#type = self.parse_data_type()?;
17622 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17623 self.next_token();
17624 Some(path)
17625 } else {
17626 None
17627 };
17628 let as_json = self.parse_keyword(Keyword::AS);
17629 if as_json {
17630 self.expect_keyword_is(Keyword::JSON)?;
17631 }
17632 Ok(OpenJsonTableColumn {
17633 name,
17634 r#type,
17635 path,
17636 as_json,
17637 })
17638 }
17639
17640 fn parse_json_table_column_error_handling(
17641 &mut self,
17642 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17643 let res = if self.parse_keyword(Keyword::NULL) {
17644 JsonTableColumnErrorHandling::Null
17645 } else if self.parse_keyword(Keyword::ERROR) {
17646 JsonTableColumnErrorHandling::Error
17647 } else if self.parse_keyword(Keyword::DEFAULT) {
17648 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17649 } else {
17650 return Ok(None);
17651 };
17652 self.expect_keyword_is(Keyword::ON)?;
17653 Ok(Some(res))
17654 }
17655
17656 pub fn parse_derived_table_factor(
17658 &mut self,
17659 lateral: IsLateral,
17660 ) -> Result<TableFactor, ParserError> {
17661 let subquery = self.parse_query()?;
17662 self.expect_token(&Token::RParen)?;
17663 let alias = self.maybe_parse_table_alias()?;
17664
17665 let sample = self
17667 .maybe_parse_table_sample()?
17668 .map(TableSampleKind::AfterTableAlias);
17669
17670 Ok(TableFactor::Derived {
17671 lateral: match lateral {
17672 Lateral => true,
17673 NotLateral => false,
17674 },
17675 subquery,
17676 alias,
17677 sample,
17678 })
17679 }
17680
17681 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17704 let expr = self.parse_expr()?;
17705 let alias = if self.parse_keyword(Keyword::AS) {
17706 Some(self.parse_identifier()?)
17707 } else {
17708 None
17709 };
17710
17711 Ok(ExprWithAlias { expr, alias })
17712 }
17713
17714 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17718 let expr = self.parse_expr()?;
17719 let alias = self.parse_identifier_optional_alias()?;
17720 Ok(ExprWithAlias { expr, alias })
17721 }
17722
17723 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17725 let function_name = match self.next_token().token {
17726 Token::Word(w) => Ok(w.value),
17727 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17728 }?;
17729 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17730 let alias = {
17731 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17732 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17734 }
17735 self.parse_optional_alias_inner(None, validator)?
17736 };
17737 Ok(ExprWithAlias { expr, alias })
17738 }
17739
17740 pub fn parse_pivot_table_factor(
17742 &mut self,
17743 table: TableFactor,
17744 ) -> Result<TableFactor, ParserError> {
17745 self.expect_token(&Token::LParen)?;
17746 let aggregate_functions =
17747 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17748 self.expect_keyword_is(Keyword::FOR)?;
17749 let value_column = if self.peek_token_ref().token == Token::LParen {
17750 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17751 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17752 })?
17753 } else {
17754 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17755 };
17756 self.expect_keyword_is(Keyword::IN)?;
17757
17758 self.expect_token(&Token::LParen)?;
17759 let value_source = if self.parse_keyword(Keyword::ANY) {
17760 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17761 self.parse_comma_separated(Parser::parse_order_by_expr)?
17762 } else {
17763 vec![]
17764 };
17765 PivotValueSource::Any(order_by)
17766 } else if self.peek_sub_query() {
17767 PivotValueSource::Subquery(self.parse_query()?)
17768 } else {
17769 PivotValueSource::List(
17770 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17771 )
17772 };
17773 self.expect_token(&Token::RParen)?;
17774
17775 let default_on_null =
17776 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17777 self.expect_token(&Token::LParen)?;
17778 let expr = self.parse_expr()?;
17779 self.expect_token(&Token::RParen)?;
17780 Some(expr)
17781 } else {
17782 None
17783 };
17784
17785 self.expect_token(&Token::RParen)?;
17786 let alias = self.maybe_parse_table_alias()?;
17787 Ok(TableFactor::Pivot {
17788 table: Box::new(table),
17789 aggregate_functions,
17790 value_column,
17791 value_source,
17792 default_on_null,
17793 alias,
17794 })
17795 }
17796
17797 pub fn parse_unpivot_table_factor(
17799 &mut self,
17800 table: TableFactor,
17801 ) -> Result<TableFactor, ParserError> {
17802 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17803 self.expect_keyword_is(Keyword::NULLS)?;
17804 Some(NullInclusion::IncludeNulls)
17805 } else if self.parse_keyword(Keyword::EXCLUDE) {
17806 self.expect_keyword_is(Keyword::NULLS)?;
17807 Some(NullInclusion::ExcludeNulls)
17808 } else {
17809 None
17810 };
17811 self.expect_token(&Token::LParen)?;
17812 let value = self.parse_expr()?;
17813 self.expect_keyword_is(Keyword::FOR)?;
17814 let name = self.parse_identifier()?;
17815 self.expect_keyword_is(Keyword::IN)?;
17816 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17817 p.parse_expr_with_alias()
17818 })?;
17819 self.expect_token(&Token::RParen)?;
17820 let alias = self.maybe_parse_table_alias()?;
17821 Ok(TableFactor::Unpivot {
17822 table: Box::new(table),
17823 value,
17824 null_inclusion,
17825 name,
17826 columns,
17827 alias,
17828 })
17829 }
17830
17831 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17833 if natural {
17834 Ok(JoinConstraint::Natural)
17835 } else if self.parse_keyword(Keyword::ON) {
17836 let constraint = self.parse_expr()?;
17837 Ok(JoinConstraint::On(constraint))
17838 } else if self.parse_keyword(Keyword::USING) {
17839 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17840 Ok(JoinConstraint::Using(columns))
17841 } else {
17842 Ok(JoinConstraint::None)
17843 }
17845 }
17846
17847 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17849 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17850
17851 self.expect_keyword_is(Keyword::TO)?;
17852 let grantees = self.parse_grantees()?;
17853
17854 let with_grant_option =
17855 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17856
17857 let current_grants =
17858 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17859 Some(CurrentGrantsKind::CopyCurrentGrants)
17860 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17861 Some(CurrentGrantsKind::RevokeCurrentGrants)
17862 } else {
17863 None
17864 };
17865
17866 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17867 Some(self.parse_identifier()?)
17868 } else {
17869 None
17870 };
17871
17872 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17873 Some(self.parse_identifier()?)
17874 } else {
17875 None
17876 };
17877
17878 Ok(Grant {
17879 privileges,
17880 objects,
17881 grantees,
17882 with_grant_option,
17883 as_grantor,
17884 granted_by,
17885 current_grants,
17886 })
17887 }
17888
17889 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17890 let mut values = vec![];
17891 let mut grantee_type = GranteesType::None;
17892 loop {
17893 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17894 GranteesType::Role
17895 } else if self.parse_keyword(Keyword::USER) {
17896 GranteesType::User
17897 } else if self.parse_keyword(Keyword::SHARE) {
17898 GranteesType::Share
17899 } else if self.parse_keyword(Keyword::GROUP) {
17900 GranteesType::Group
17901 } else if self.parse_keyword(Keyword::PUBLIC) {
17902 GranteesType::Public
17903 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17904 GranteesType::DatabaseRole
17905 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17906 GranteesType::ApplicationRole
17907 } else if self.parse_keyword(Keyword::APPLICATION) {
17908 GranteesType::Application
17909 } else {
17910 grantee_type.clone() };
17912
17913 if self
17914 .dialect
17915 .get_reserved_grantees_types()
17916 .contains(&new_grantee_type)
17917 {
17918 self.prev_token();
17919 } else {
17920 grantee_type = new_grantee_type;
17921 }
17922
17923 let grantee = if grantee_type == GranteesType::Public {
17924 Grantee {
17925 grantee_type: grantee_type.clone(),
17926 name: None,
17927 }
17928 } else {
17929 let mut name = self.parse_grantee_name()?;
17930 if self.consume_token(&Token::Colon) {
17931 let ident = self.parse_identifier()?;
17935 if let GranteeName::ObjectName(namespace) = name {
17936 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17937 format!("{namespace}:{ident}"),
17938 )]));
17939 };
17940 }
17941 Grantee {
17942 grantee_type: grantee_type.clone(),
17943 name: Some(name),
17944 }
17945 };
17946
17947 values.push(grantee);
17948
17949 if !self.consume_token(&Token::Comma) {
17950 break;
17951 }
17952 }
17953
17954 Ok(values)
17955 }
17956
17957 pub fn parse_grant_deny_revoke_privileges_objects(
17959 &mut self,
17960 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17961 let privileges = if self.parse_keyword(Keyword::ALL) {
17962 Privileges::All {
17963 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17964 }
17965 } else {
17966 let actions = self.parse_actions_list()?;
17967 Privileges::Actions(actions)
17968 };
17969
17970 let objects = if self.parse_keyword(Keyword::ON) {
17971 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17972 Some(GrantObjects::AllTablesInSchema {
17973 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17974 })
17975 } else if self.parse_keywords(&[
17976 Keyword::ALL,
17977 Keyword::EXTERNAL,
17978 Keyword::TABLES,
17979 Keyword::IN,
17980 Keyword::SCHEMA,
17981 ]) {
17982 Some(GrantObjects::AllExternalTablesInSchema {
17983 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17984 })
17985 } else if self.parse_keywords(&[
17986 Keyword::ALL,
17987 Keyword::VIEWS,
17988 Keyword::IN,
17989 Keyword::SCHEMA,
17990 ]) {
17991 Some(GrantObjects::AllViewsInSchema {
17992 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17993 })
17994 } else if self.parse_keywords(&[
17995 Keyword::ALL,
17996 Keyword::MATERIALIZED,
17997 Keyword::VIEWS,
17998 Keyword::IN,
17999 Keyword::SCHEMA,
18000 ]) {
18001 Some(GrantObjects::AllMaterializedViewsInSchema {
18002 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18003 })
18004 } else if self.parse_keywords(&[
18005 Keyword::ALL,
18006 Keyword::FUNCTIONS,
18007 Keyword::IN,
18008 Keyword::SCHEMA,
18009 ]) {
18010 Some(GrantObjects::AllFunctionsInSchema {
18011 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18012 })
18013 } else if self.parse_keywords(&[
18014 Keyword::FUTURE,
18015 Keyword::SCHEMAS,
18016 Keyword::IN,
18017 Keyword::DATABASE,
18018 ]) {
18019 Some(GrantObjects::FutureSchemasInDatabase {
18020 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18021 })
18022 } else if self.parse_keywords(&[
18023 Keyword::FUTURE,
18024 Keyword::TABLES,
18025 Keyword::IN,
18026 Keyword::SCHEMA,
18027 ]) {
18028 Some(GrantObjects::FutureTablesInSchema {
18029 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18030 })
18031 } else if self.parse_keywords(&[
18032 Keyword::FUTURE,
18033 Keyword::EXTERNAL,
18034 Keyword::TABLES,
18035 Keyword::IN,
18036 Keyword::SCHEMA,
18037 ]) {
18038 Some(GrantObjects::FutureExternalTablesInSchema {
18039 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18040 })
18041 } else if self.parse_keywords(&[
18042 Keyword::FUTURE,
18043 Keyword::VIEWS,
18044 Keyword::IN,
18045 Keyword::SCHEMA,
18046 ]) {
18047 Some(GrantObjects::FutureViewsInSchema {
18048 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18049 })
18050 } else if self.parse_keywords(&[
18051 Keyword::FUTURE,
18052 Keyword::MATERIALIZED,
18053 Keyword::VIEWS,
18054 Keyword::IN,
18055 Keyword::SCHEMA,
18056 ]) {
18057 Some(GrantObjects::FutureMaterializedViewsInSchema {
18058 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18059 })
18060 } else if self.parse_keywords(&[
18061 Keyword::ALL,
18062 Keyword::SEQUENCES,
18063 Keyword::IN,
18064 Keyword::SCHEMA,
18065 ]) {
18066 Some(GrantObjects::AllSequencesInSchema {
18067 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18068 })
18069 } else if self.parse_keywords(&[
18070 Keyword::FUTURE,
18071 Keyword::SEQUENCES,
18072 Keyword::IN,
18073 Keyword::SCHEMA,
18074 ]) {
18075 Some(GrantObjects::FutureSequencesInSchema {
18076 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18077 })
18078 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
18079 Some(GrantObjects::ResourceMonitors(
18080 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18081 ))
18082 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18083 Some(GrantObjects::ComputePools(
18084 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18085 ))
18086 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18087 Some(GrantObjects::FailoverGroup(
18088 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18089 ))
18090 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18091 Some(GrantObjects::ReplicationGroup(
18092 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18093 ))
18094 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18095 Some(GrantObjects::ExternalVolumes(
18096 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18097 ))
18098 } else {
18099 let object_type = self.parse_one_of_keywords(&[
18100 Keyword::SEQUENCE,
18101 Keyword::DATABASE,
18102 Keyword::SCHEMA,
18103 Keyword::TABLE,
18104 Keyword::VIEW,
18105 Keyword::WAREHOUSE,
18106 Keyword::INTEGRATION,
18107 Keyword::VIEW,
18108 Keyword::WAREHOUSE,
18109 Keyword::INTEGRATION,
18110 Keyword::USER,
18111 Keyword::CONNECTION,
18112 Keyword::PROCEDURE,
18113 Keyword::FUNCTION,
18114 Keyword::TYPE,
18115 Keyword::DOMAIN,
18116 ]);
18117 let objects =
18118 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
18119 match object_type {
18120 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
18121 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
18122 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
18123 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
18124 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
18125 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
18126 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
18127 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
18128 Some(Keyword::TYPE) => Some(GrantObjects::Types(objects?)),
18129 Some(Keyword::DOMAIN) => Some(GrantObjects::Domains(objects?)),
18130 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
18131 if let Some(name) = objects?.first() {
18132 self.parse_grant_procedure_or_function(name, &kw)?
18133 } else {
18134 self.expected_ref("procedure or function name", self.peek_token_ref())?
18135 }
18136 }
18137 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
18138 Some(unexpected_keyword) => return Err(ParserError::ParserError(
18139 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
18140 )),
18141 }
18142 }
18143 } else {
18144 None
18145 };
18146
18147 Ok((privileges, objects))
18148 }
18149
18150 fn parse_grant_procedure_or_function(
18151 &mut self,
18152 name: &ObjectName,
18153 kw: &Option<Keyword>,
18154 ) -> Result<Option<GrantObjects>, ParserError> {
18155 let arg_types = if self.consume_token(&Token::LParen) {
18156 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
18157 self.expect_token(&Token::RParen)?;
18158 list
18159 } else {
18160 vec![]
18161 };
18162 match kw {
18163 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
18164 name: name.clone(),
18165 arg_types,
18166 })),
18167 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
18168 name: name.clone(),
18169 arg_types,
18170 })),
18171 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
18172 }
18173 }
18174
18175 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
18177 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
18178 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
18179 if columns.is_empty() {
18180 Ok(None)
18181 } else {
18182 Ok(Some(columns))
18183 }
18184 }
18185
18186 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
18188 Ok(Action::ImportedPrivileges)
18189 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
18190 Ok(Action::AddSearchOptimization)
18191 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
18192 Ok(Action::AttachListing)
18193 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
18194 Ok(Action::AttachPolicy)
18195 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
18196 Ok(Action::BindServiceEndpoint)
18197 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
18198 let role = self.parse_object_name(false)?;
18199 Ok(Action::DatabaseRole { role })
18200 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
18201 Ok(Action::EvolveSchema)
18202 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
18203 Ok(Action::ImportShare)
18204 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
18205 Ok(Action::ManageVersions)
18206 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
18207 Ok(Action::ManageReleases)
18208 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
18209 Ok(Action::OverrideShareRestrictions)
18210 } else if self.parse_keywords(&[
18211 Keyword::PURCHASE,
18212 Keyword::DATA,
18213 Keyword::EXCHANGE,
18214 Keyword::LISTING,
18215 ]) {
18216 Ok(Action::PurchaseDataExchangeListing)
18217 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
18218 Ok(Action::ResolveAll)
18219 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
18220 Ok(Action::ReadSession)
18221
18222 } else if self.parse_keyword(Keyword::APPLY) {
18224 let apply_type = self.parse_action_apply_type()?;
18225 Ok(Action::Apply { apply_type })
18226 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
18227 Ok(Action::ApplyBudget)
18228 } else if self.parse_keyword(Keyword::AUDIT) {
18229 Ok(Action::Audit)
18230 } else if self.parse_keyword(Keyword::CONNECT) {
18231 Ok(Action::Connect)
18232 } else if self.parse_keyword(Keyword::CREATE) {
18233 let obj_type = self.maybe_parse_action_create_object_type();
18234 Ok(Action::Create { obj_type })
18235 } else if self.parse_keyword(Keyword::DELETE) {
18236 Ok(Action::Delete)
18237 } else if self.parse_keyword(Keyword::EXEC) {
18238 let obj_type = self.maybe_parse_action_execute_obj_type();
18239 Ok(Action::Exec { obj_type })
18240 } else if self.parse_keyword(Keyword::EXECUTE) {
18241 let obj_type = self.maybe_parse_action_execute_obj_type();
18242 Ok(Action::Execute { obj_type })
18243 } else if self.parse_keyword(Keyword::FAILOVER) {
18244 Ok(Action::Failover)
18245 } else if self.parse_keyword(Keyword::INSERT) {
18246 Ok(Action::Insert {
18247 columns: parse_columns(self)?,
18248 })
18249 } else if self.parse_keyword(Keyword::MANAGE) {
18250 let manage_type = self.parse_action_manage_type()?;
18251 Ok(Action::Manage { manage_type })
18252 } else if self.parse_keyword(Keyword::MODIFY) {
18253 let modify_type = self.parse_action_modify_type();
18254 Ok(Action::Modify { modify_type })
18255 } else if self.parse_keyword(Keyword::MONITOR) {
18256 let monitor_type = self.parse_action_monitor_type();
18257 Ok(Action::Monitor { monitor_type })
18258 } else if self.parse_keyword(Keyword::OPERATE) {
18259 Ok(Action::Operate)
18260 } else if self.parse_keyword(Keyword::REFERENCES) {
18261 Ok(Action::References {
18262 columns: parse_columns(self)?,
18263 })
18264 } else if self.parse_keyword(Keyword::READ) {
18265 Ok(Action::Read)
18266 } else if self.parse_keyword(Keyword::REPLICATE) {
18267 Ok(Action::Replicate)
18268 } else if self.parse_keyword(Keyword::ROLE) {
18269 let role = self.parse_object_name(false)?;
18270 Ok(Action::Role { role })
18271 } else if self.parse_keyword(Keyword::SELECT) {
18272 Ok(Action::Select {
18273 columns: parse_columns(self)?,
18274 })
18275 } else if self.parse_keyword(Keyword::TEMPORARY) {
18276 Ok(Action::Temporary)
18277 } else if self.parse_keyword(Keyword::TRIGGER) {
18278 Ok(Action::Trigger)
18279 } else if self.parse_keyword(Keyword::TRUNCATE) {
18280 Ok(Action::Truncate)
18281 } else if self.parse_keyword(Keyword::UPDATE) {
18282 Ok(Action::Update {
18283 columns: parse_columns(self)?,
18284 })
18285 } else if self.parse_keyword(Keyword::USAGE) {
18286 Ok(Action::Usage)
18287 } else if self.parse_keyword(Keyword::OWNERSHIP) {
18288 Ok(Action::Ownership)
18289 } else if self.parse_keyword(Keyword::DROP) {
18290 Ok(Action::Drop)
18291 } else {
18292 self.expected_ref("a privilege keyword", self.peek_token_ref())?
18293 }
18294 }
18295
18296 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
18297 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
18299 Some(ActionCreateObjectType::ApplicationPackage)
18300 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18301 Some(ActionCreateObjectType::ComputePool)
18302 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
18303 Some(ActionCreateObjectType::DataExchangeListing)
18304 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18305 Some(ActionCreateObjectType::ExternalVolume)
18306 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18307 Some(ActionCreateObjectType::FailoverGroup)
18308 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
18309 Some(ActionCreateObjectType::NetworkPolicy)
18310 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
18311 Some(ActionCreateObjectType::OrganiationListing)
18312 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18313 Some(ActionCreateObjectType::ReplicationGroup)
18314 }
18315 else if self.parse_keyword(Keyword::ACCOUNT) {
18317 Some(ActionCreateObjectType::Account)
18318 } else if self.parse_keyword(Keyword::APPLICATION) {
18319 Some(ActionCreateObjectType::Application)
18320 } else if self.parse_keyword(Keyword::DATABASE) {
18321 Some(ActionCreateObjectType::Database)
18322 } else if self.parse_keyword(Keyword::INTEGRATION) {
18323 Some(ActionCreateObjectType::Integration)
18324 } else if self.parse_keyword(Keyword::ROLE) {
18325 Some(ActionCreateObjectType::Role)
18326 } else if self.parse_keyword(Keyword::SCHEMA) {
18327 Some(ActionCreateObjectType::Schema)
18328 } else if self.parse_keyword(Keyword::SHARE) {
18329 Some(ActionCreateObjectType::Share)
18330 } else if self.parse_keyword(Keyword::USER) {
18331 Some(ActionCreateObjectType::User)
18332 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18333 Some(ActionCreateObjectType::Warehouse)
18334 } else {
18335 None
18336 }
18337 }
18338
18339 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18340 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18341 Ok(ActionApplyType::AggregationPolicy)
18342 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18343 Ok(ActionApplyType::AuthenticationPolicy)
18344 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18345 Ok(ActionApplyType::JoinPolicy)
18346 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18347 Ok(ActionApplyType::MaskingPolicy)
18348 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18349 Ok(ActionApplyType::PackagesPolicy)
18350 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18351 Ok(ActionApplyType::PasswordPolicy)
18352 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18353 Ok(ActionApplyType::ProjectionPolicy)
18354 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18355 Ok(ActionApplyType::RowAccessPolicy)
18356 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18357 Ok(ActionApplyType::SessionPolicy)
18358 } else if self.parse_keyword(Keyword::TAG) {
18359 Ok(ActionApplyType::Tag)
18360 } else {
18361 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18362 }
18363 }
18364
18365 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18366 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18367 Some(ActionExecuteObjectType::DataMetricFunction)
18368 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18369 Some(ActionExecuteObjectType::ManagedAlert)
18370 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18371 Some(ActionExecuteObjectType::ManagedTask)
18372 } else if self.parse_keyword(Keyword::ALERT) {
18373 Some(ActionExecuteObjectType::Alert)
18374 } else if self.parse_keyword(Keyword::TASK) {
18375 Some(ActionExecuteObjectType::Task)
18376 } else {
18377 None
18378 }
18379 }
18380
18381 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18382 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18383 Ok(ActionManageType::AccountSupportCases)
18384 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18385 Ok(ActionManageType::EventSharing)
18386 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18387 Ok(ActionManageType::ListingAutoFulfillment)
18388 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18389 Ok(ActionManageType::OrganizationSupportCases)
18390 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18391 Ok(ActionManageType::UserSupportCases)
18392 } else if self.parse_keyword(Keyword::GRANTS) {
18393 Ok(ActionManageType::Grants)
18394 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18395 Ok(ActionManageType::Warehouses)
18396 } else {
18397 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18398 }
18399 }
18400
18401 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18402 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18403 Some(ActionModifyType::LogLevel)
18404 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18405 Some(ActionModifyType::TraceLevel)
18406 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18407 Some(ActionModifyType::SessionLogLevel)
18408 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18409 Some(ActionModifyType::SessionTraceLevel)
18410 } else {
18411 None
18412 }
18413 }
18414
18415 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18416 if self.parse_keyword(Keyword::EXECUTION) {
18417 Some(ActionMonitorType::Execution)
18418 } else if self.parse_keyword(Keyword::SECURITY) {
18419 Some(ActionMonitorType::Security)
18420 } else if self.parse_keyword(Keyword::USAGE) {
18421 Some(ActionMonitorType::Usage)
18422 } else {
18423 None
18424 }
18425 }
18426
18427 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18429 let mut name = self.parse_object_name(false)?;
18430 if self.dialect.supports_user_host_grantee()
18431 && name.0.len() == 1
18432 && name.0[0].as_ident().is_some()
18433 && self.consume_token(&Token::AtSign)
18434 {
18435 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18436 let host = self.parse_identifier()?;
18437 Ok(GranteeName::UserHost { user, host })
18438 } else {
18439 Ok(GranteeName::ObjectName(name))
18440 }
18441 }
18442
18443 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18445 self.expect_keyword(Keyword::DENY)?;
18446
18447 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18448 let objects = match objects {
18449 Some(o) => o,
18450 None => {
18451 return parser_err!(
18452 "DENY statements must specify an object",
18453 self.peek_token_ref().span.start
18454 )
18455 }
18456 };
18457
18458 self.expect_keyword_is(Keyword::TO)?;
18459 let grantees = self.parse_grantees()?;
18460 let cascade = self.parse_cascade_option();
18461 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18462 Some(self.parse_identifier()?)
18463 } else {
18464 None
18465 };
18466
18467 Ok(Statement::Deny(DenyStatement {
18468 privileges,
18469 objects,
18470 grantees,
18471 cascade,
18472 granted_by,
18473 }))
18474 }
18475
18476 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18478 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18479
18480 self.expect_keyword_is(Keyword::FROM)?;
18481 let grantees = self.parse_grantees()?;
18482
18483 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18484 Some(self.parse_identifier()?)
18485 } else {
18486 None
18487 };
18488
18489 let cascade = self.parse_cascade_option();
18490
18491 Ok(Revoke {
18492 privileges,
18493 objects,
18494 grantees,
18495 granted_by,
18496 cascade,
18497 })
18498 }
18499
18500 pub fn parse_replace(
18502 &mut self,
18503 replace_token: TokenWithSpan,
18504 ) -> Result<Statement, ParserError> {
18505 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18506 return parser_err!(
18507 "Unsupported statement REPLACE",
18508 self.peek_token_ref().span.start
18509 );
18510 }
18511
18512 let mut insert = self.parse_insert(replace_token)?;
18513 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18514 *replace_into = true;
18515 }
18516
18517 Ok(insert)
18518 }
18519
18520 fn parse_insert_setexpr_boxed(
18524 &mut self,
18525 insert_token: TokenWithSpan,
18526 ) -> Result<Box<SetExpr>, ParserError> {
18527 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18528 }
18529
18530 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18532 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18533 let or = self.parse_conflict_clause();
18534 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18535 None
18536 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18537 Some(MysqlInsertPriority::LowPriority)
18538 } else if self.parse_keyword(Keyword::DELAYED) {
18539 Some(MysqlInsertPriority::Delayed)
18540 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18541 Some(MysqlInsertPriority::HighPriority)
18542 } else {
18543 None
18544 };
18545
18546 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18547 && self.parse_keyword(Keyword::IGNORE);
18548
18549 let replace_into = false;
18550
18551 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18552 let into = self.parse_keyword(Keyword::INTO);
18553
18554 let local = self.parse_keyword(Keyword::LOCAL);
18555
18556 if self.parse_keyword(Keyword::DIRECTORY) {
18557 let path = self.parse_literal_string()?;
18558 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18559 Some(self.parse_file_format()?)
18560 } else {
18561 None
18562 };
18563 let source = self.parse_query()?;
18564 Ok(Statement::Directory {
18565 local,
18566 path,
18567 overwrite,
18568 file_format,
18569 source,
18570 })
18571 } else {
18572 let table = self.parse_keyword(Keyword::TABLE);
18574 let table_object = self.parse_table_object()?;
18575
18576 let table_alias = if self.dialect.supports_insert_table_alias()
18577 && !self.peek_sub_query()
18578 && self
18579 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18580 .is_none()
18581 {
18582 if self.parse_keyword(Keyword::AS) {
18583 Some(TableAliasWithoutColumns {
18584 explicit: true,
18585 alias: self.parse_identifier()?,
18586 })
18587 } else {
18588 self.maybe_parse(|parser| parser.parse_identifier())?
18589 .map(|alias| TableAliasWithoutColumns {
18590 explicit: false,
18591 alias,
18592 })
18593 }
18594 } else {
18595 None
18596 };
18597
18598 let is_mysql = dialect_of!(self is MySqlDialect);
18599
18600 let (columns, partitioned, after_columns, output, source, assignments) = if self
18601 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18602 {
18603 (vec![], None, vec![], None, None, vec![])
18604 } else {
18605 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18606 let columns =
18607 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18608
18609 let partitioned = self.parse_insert_partition()?;
18610 let after_columns = if dialect_of!(self is HiveDialect) {
18612 self.parse_parenthesized_column_list(Optional, false)?
18613 } else {
18614 vec![]
18615 };
18616 (columns, partitioned, after_columns)
18617 } else {
18618 Default::default()
18619 };
18620
18621 let output = self.maybe_parse_output_clause()?;
18622
18623 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18624 || self.peek_keyword(Keyword::SETTINGS)
18625 {
18626 (None, vec![])
18627 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18628 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18629 } else {
18630 (Some(self.parse_query()?), vec![])
18631 };
18632
18633 (
18634 columns,
18635 partitioned,
18636 after_columns,
18637 output,
18638 source,
18639 assignments,
18640 )
18641 };
18642
18643 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18644 let settings = self.parse_settings()?;
18647
18648 let format = if self.parse_keyword(Keyword::FORMAT) {
18649 Some(self.parse_input_format_clause()?)
18650 } else {
18651 None
18652 };
18653
18654 (format, settings)
18655 } else {
18656 Default::default()
18657 };
18658
18659 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18660 && self.parse_keyword(Keyword::AS)
18661 {
18662 let row_alias = self.parse_object_name(false)?;
18663 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18664 Some(InsertAliases {
18665 row_alias,
18666 col_aliases,
18667 })
18668 } else {
18669 None
18670 };
18671
18672 let on = if self.parse_keyword(Keyword::ON) {
18673 if self.parse_keyword(Keyword::CONFLICT) {
18674 let conflict_target =
18675 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18676 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18677 } else if self.peek_token_ref().token == Token::LParen {
18678 Some(ConflictTarget::Columns(
18679 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18680 ))
18681 } else {
18682 None
18683 };
18684
18685 self.expect_keyword_is(Keyword::DO)?;
18686 let action = if self.parse_keyword(Keyword::NOTHING) {
18687 OnConflictAction::DoNothing
18688 } else {
18689 self.expect_keyword_is(Keyword::UPDATE)?;
18690 self.expect_keyword_is(Keyword::SET)?;
18691 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18692 let selection = if self.parse_keyword(Keyword::WHERE) {
18693 Some(self.parse_expr()?)
18694 } else {
18695 None
18696 };
18697 OnConflictAction::DoUpdate(DoUpdate {
18698 assignments,
18699 selection,
18700 })
18701 };
18702
18703 Some(OnInsert::OnConflict(OnConflict {
18704 conflict_target,
18705 action,
18706 }))
18707 } else {
18708 self.expect_keyword_is(Keyword::DUPLICATE)?;
18709 self.expect_keyword_is(Keyword::KEY)?;
18710 self.expect_keyword_is(Keyword::UPDATE)?;
18711 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18712
18713 Some(OnInsert::DuplicateKeyUpdate(l))
18714 }
18715 } else {
18716 None
18717 };
18718
18719 let returning = if self.parse_keyword(Keyword::RETURNING) {
18720 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18721 } else {
18722 None
18723 };
18724
18725 Ok(Insert {
18726 insert_token: insert_token.into(),
18727 optimizer_hints,
18728 or,
18729 table: table_object,
18730 table_alias,
18731 ignore,
18732 into,
18733 overwrite,
18734 partitioned,
18735 columns,
18736 after_columns,
18737 source,
18738 assignments,
18739 has_table_keyword: table,
18740 on,
18741 returning,
18742 output,
18743 replace_into,
18744 priority,
18745 insert_alias,
18746 settings,
18747 format_clause,
18748 multi_table_insert_type: None,
18749 multi_table_into_clauses: vec![],
18750 multi_table_when_clauses: vec![],
18751 multi_table_else_clause: None,
18752 }
18753 .into())
18754 }
18755 }
18756
18757 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18761 let ident = self.parse_identifier()?;
18762 let values = self
18763 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18764 .unwrap_or_default();
18765
18766 Ok(InputFormatClause { ident, values })
18767 }
18768
18769 fn peek_subquery_start(&mut self) -> bool {
18772 matches!(
18773 self.peek_tokens_ref(),
18774 [
18775 TokenWithSpan {
18776 token: Token::LParen,
18777 ..
18778 },
18779 TokenWithSpan {
18780 token: Token::Word(Word {
18781 keyword: Keyword::SELECT,
18782 ..
18783 }),
18784 ..
18785 },
18786 ]
18787 )
18788 }
18789
18790 fn peek_subquery_or_cte_start(&mut self) -> bool {
18794 matches!(
18795 self.peek_tokens_ref(),
18796 [
18797 TokenWithSpan {
18798 token: Token::LParen,
18799 ..
18800 },
18801 TokenWithSpan {
18802 token: Token::Word(Word {
18803 keyword: Keyword::SELECT | Keyword::WITH,
18804 ..
18805 }),
18806 ..
18807 },
18808 ]
18809 )
18810 }
18811
18812 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18813 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18814 Some(SqliteOnConflict::Replace)
18815 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18816 Some(SqliteOnConflict::Rollback)
18817 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18818 Some(SqliteOnConflict::Abort)
18819 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18820 Some(SqliteOnConflict::Fail)
18821 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18822 Some(SqliteOnConflict::Ignore)
18823 } else if self.parse_keyword(Keyword::REPLACE) {
18824 Some(SqliteOnConflict::Replace)
18825 } else {
18826 None
18827 }
18828 }
18829
18830 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18832 if self.parse_keyword(Keyword::PARTITION) {
18833 self.expect_token(&Token::LParen)?;
18834 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18835 self.expect_token(&Token::RParen)?;
18836 Ok(partition_cols)
18837 } else {
18838 Ok(None)
18839 }
18840 }
18841
18842 pub fn parse_load_data_table_format(
18844 &mut self,
18845 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18846 if self.parse_keyword(Keyword::INPUTFORMAT) {
18847 let input_format = self.parse_expr()?;
18848 self.expect_keyword_is(Keyword::SERDE)?;
18849 let serde = self.parse_expr()?;
18850 Ok(Some(HiveLoadDataFormat {
18851 input_format,
18852 serde,
18853 }))
18854 } else {
18855 Ok(None)
18856 }
18857 }
18858
18859 fn parse_update_setexpr_boxed(
18863 &mut self,
18864 update_token: TokenWithSpan,
18865 ) -> Result<Box<SetExpr>, ParserError> {
18866 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18867 }
18868
18869 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18871 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18872 let or = self.parse_conflict_clause();
18873 let table = self.parse_table_and_joins()?;
18874 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18875 Some(UpdateTableFromKind::BeforeSet(
18876 self.parse_table_with_joins()?,
18877 ))
18878 } else {
18879 None
18880 };
18881 self.expect_keyword(Keyword::SET)?;
18882 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18883
18884 let output = self.maybe_parse_output_clause()?;
18885
18886 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18887 Some(UpdateTableFromKind::AfterSet(
18888 self.parse_table_with_joins()?,
18889 ))
18890 } else {
18891 from_before_set
18892 };
18893 let selection = if self.parse_keyword(Keyword::WHERE) {
18894 Some(self.parse_expr()?)
18895 } else {
18896 None
18897 };
18898 let returning = if self.parse_keyword(Keyword::RETURNING) {
18899 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18900 } else {
18901 None
18902 };
18903 let order_by = if self.dialect.supports_update_order_by()
18904 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18905 {
18906 self.parse_comma_separated(Parser::parse_order_by_expr)?
18907 } else {
18908 vec![]
18909 };
18910 let limit = if self.parse_keyword(Keyword::LIMIT) {
18911 Some(self.parse_expr()?)
18912 } else {
18913 None
18914 };
18915 Ok(Update {
18916 update_token: update_token.into(),
18917 optimizer_hints,
18918 table,
18919 assignments,
18920 from,
18921 selection,
18922 returning,
18923 output,
18924 or,
18925 order_by,
18926 limit,
18927 }
18928 .into())
18929 }
18930
18931 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18933 let target = self.parse_assignment_target()?;
18934 self.expect_token(&Token::Eq)?;
18935 let value = self.parse_expr()?;
18936 Ok(Assignment { target, value })
18937 }
18938
18939 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18941 if self.consume_token(&Token::LParen) {
18942 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18943 self.expect_token(&Token::RParen)?;
18944 Ok(AssignmentTarget::Tuple(columns))
18945 } else {
18946 let column = self.parse_object_name(false)?;
18947 Ok(AssignmentTarget::ColumnName(column))
18948 }
18949 }
18950
18951 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18953 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18954 self.maybe_parse(|p| {
18955 let name = p.parse_expr()?;
18956 let operator = p.parse_function_named_arg_operator()?;
18957 let arg = p.parse_wildcard_expr()?.into();
18958 Ok(FunctionArg::ExprNamed {
18959 name,
18960 arg,
18961 operator,
18962 })
18963 })?
18964 } else {
18965 self.maybe_parse(|p| {
18966 let name = p.parse_identifier()?;
18967 let operator = p.parse_function_named_arg_operator()?;
18968 let arg = p.parse_wildcard_expr()?.into();
18969 Ok(FunctionArg::Named {
18970 name,
18971 arg,
18972 operator,
18973 })
18974 })?
18975 };
18976 if let Some(arg) = arg {
18977 return Ok(arg);
18978 }
18979 let wildcard_expr = self.parse_wildcard_expr()?;
18980 let arg_expr: FunctionArgExpr = match wildcard_expr {
18981 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18982 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18985 if opts.opt_exclude.is_some()
18986 || opts.opt_except.is_some()
18987 || opts.opt_replace.is_some()
18988 || opts.opt_rename.is_some()
18989 || opts.opt_ilike.is_some()
18990 {
18991 FunctionArgExpr::WildcardWithOptions(opts)
18992 } else {
18993 wildcard_expr.into()
18994 }
18995 }
18996 other => other.into(),
18997 };
18998 Ok(FunctionArg::Unnamed(arg_expr))
18999 }
19000
19001 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
19002 if self.parse_keyword(Keyword::VALUE) {
19003 return Ok(FunctionArgOperator::Value);
19004 }
19005 let tok = self.next_token();
19006 match tok.token {
19007 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
19008 Ok(FunctionArgOperator::RightArrow)
19009 }
19010 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
19011 Ok(FunctionArgOperator::Equals)
19012 }
19013 Token::Assignment
19014 if self
19015 .dialect
19016 .supports_named_fn_args_with_assignment_operator() =>
19017 {
19018 Ok(FunctionArgOperator::Assignment)
19019 }
19020 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
19021 Ok(FunctionArgOperator::Colon)
19022 }
19023 _ => {
19024 self.prev_token();
19025 self.expected("argument operator", tok)
19026 }
19027 }
19028 }
19029
19030 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
19032 if self.consume_token(&Token::RParen) {
19033 Ok(vec![])
19034 } else {
19035 let args = self.parse_comma_separated(Parser::parse_function_args)?;
19036 self.expect_token(&Token::RParen)?;
19037 Ok(args)
19038 }
19039 }
19040
19041 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
19042 if self.consume_token(&Token::RParen) {
19043 return Ok(TableFunctionArgs {
19044 args: vec![],
19045 settings: None,
19046 });
19047 }
19048 let mut args = vec![];
19049 let settings = loop {
19050 if let Some(settings) = self.parse_settings()? {
19051 break Some(settings);
19052 }
19053 args.push(self.parse_function_args()?);
19054 if self.is_parse_comma_separated_end() {
19055 break None;
19056 }
19057 };
19058 self.expect_token(&Token::RParen)?;
19059 Ok(TableFunctionArgs { args, settings })
19060 }
19061
19062 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
19071 let mut clauses = vec![];
19072
19073 if let Some(null_clause) = self.parse_json_null_clause() {
19076 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19077 }
19078
19079 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19080 clauses.push(FunctionArgumentClause::JsonReturningClause(
19081 json_returning_clause,
19082 ));
19083 }
19084
19085 if self.consume_token(&Token::RParen) {
19086 return Ok(FunctionArgumentList {
19087 duplicate_treatment: None,
19088 args: vec![],
19089 clauses,
19090 });
19091 }
19092
19093 let duplicate_treatment = self.parse_duplicate_treatment()?;
19094 let args = self.parse_comma_separated(Parser::parse_function_args)?;
19095
19096 if self.dialect.supports_window_function_null_treatment_arg() {
19097 if let Some(null_treatment) = self.parse_null_treatment()? {
19098 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
19099 }
19100 }
19101
19102 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
19103 clauses.push(FunctionArgumentClause::OrderBy(
19104 self.parse_comma_separated(Parser::parse_order_by_expr)?,
19105 ));
19106 }
19107
19108 if self.parse_keyword(Keyword::LIMIT) {
19109 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
19110 }
19111
19112 if dialect_of!(self is GenericDialect | BigQueryDialect)
19113 && self.parse_keyword(Keyword::HAVING)
19114 {
19115 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
19116 Keyword::MIN => HavingBoundKind::Min,
19117 Keyword::MAX => HavingBoundKind::Max,
19118 unexpected_keyword => return Err(ParserError::ParserError(
19119 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
19120 )),
19121 };
19122 clauses.push(FunctionArgumentClause::Having(HavingBound(
19123 kind,
19124 self.parse_expr()?,
19125 )))
19126 }
19127
19128 if dialect_of!(self is GenericDialect | MySqlDialect)
19129 && self.parse_keyword(Keyword::SEPARATOR)
19130 {
19131 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
19132 }
19133
19134 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
19135 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
19136 }
19137
19138 if let Some(null_clause) = self.parse_json_null_clause() {
19139 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19140 }
19141
19142 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19143 clauses.push(FunctionArgumentClause::JsonReturningClause(
19144 json_returning_clause,
19145 ));
19146 }
19147
19148 self.expect_token(&Token::RParen)?;
19149 Ok(FunctionArgumentList {
19150 duplicate_treatment,
19151 args,
19152 clauses,
19153 })
19154 }
19155
19156 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
19157 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
19158 Some(JsonNullClause::AbsentOnNull)
19159 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
19160 Some(JsonNullClause::NullOnNull)
19161 } else {
19162 None
19163 }
19164 }
19165
19166 fn maybe_parse_json_returning_clause(
19167 &mut self,
19168 ) -> Result<Option<JsonReturningClause>, ParserError> {
19169 if self.parse_keyword(Keyword::RETURNING) {
19170 let data_type = self.parse_data_type()?;
19171 Ok(Some(JsonReturningClause { data_type }))
19172 } else {
19173 Ok(None)
19174 }
19175 }
19176
19177 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
19178 let loc = self.peek_token_ref().span.start;
19179 match (
19180 self.parse_keyword(Keyword::ALL),
19181 self.parse_keyword(Keyword::DISTINCT),
19182 ) {
19183 (true, false) => Ok(Some(DuplicateTreatment::All)),
19184 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
19185 (false, false) => Ok(None),
19186 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
19187 }
19188 }
19189
19190 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
19192 let prefix = self
19193 .parse_one_of_keywords(
19194 self.dialect
19195 .get_reserved_keywords_for_select_item_operator(),
19196 )
19197 .map(|keyword| Ident::new(format!("{keyword:?}")));
19198
19199 match self.parse_wildcard_expr()? {
19200 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
19201 SelectItemQualifiedWildcardKind::ObjectName(prefix),
19202 self.parse_wildcard_additional_options(token.0)?,
19203 )),
19204 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
19205 self.parse_wildcard_additional_options(token.0)?,
19206 )),
19207 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
19208 parser_err!(
19209 format!("Expected an expression, found: {}", v),
19210 self.peek_token_ref().span.start
19211 )
19212 }
19213 Expr::BinaryOp {
19214 left,
19215 op: BinaryOperator::Eq,
19216 right,
19217 } if self.dialect.supports_eq_alias_assignment()
19218 && matches!(left.as_ref(), Expr::Identifier(_)) =>
19219 {
19220 let Expr::Identifier(alias) = *left else {
19221 return parser_err!(
19222 "BUG: expected identifier expression as alias",
19223 self.peek_token_ref().span.start
19224 );
19225 };
19226 Ok(SelectItem::ExprWithAlias {
19227 expr: *right,
19228 alias,
19229 })
19230 }
19231 expr if self.dialect.supports_select_expr_star()
19232 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
19233 {
19234 let wildcard_token = self.get_previous_token().clone();
19235 Ok(SelectItem::QualifiedWildcard(
19236 SelectItemQualifiedWildcardKind::Expr(expr),
19237 self.parse_wildcard_additional_options(wildcard_token)?,
19238 ))
19239 }
19240 expr if self.dialect.supports_select_item_multi_column_alias()
19241 && self.peek_keyword(Keyword::AS)
19242 && self.peek_nth_token(1).token == Token::LParen =>
19243 {
19244 self.expect_keyword(Keyword::AS)?;
19245 self.expect_token(&Token::LParen)?;
19246 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
19247 self.expect_token(&Token::RParen)?;
19248 Ok(SelectItem::ExprWithAliases {
19249 expr: maybe_prefixed_expr(expr, prefix),
19250 aliases,
19251 })
19252 }
19253 expr => self
19254 .maybe_parse_select_item_alias()
19255 .map(|alias| match alias {
19256 Some(alias) => SelectItem::ExprWithAlias {
19257 expr: maybe_prefixed_expr(expr, prefix),
19258 alias,
19259 },
19260 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
19261 }),
19262 }
19263 }
19264
19265 pub fn parse_wildcard_additional_options(
19269 &mut self,
19270 wildcard_token: TokenWithSpan,
19271 ) -> Result<WildcardAdditionalOptions, ParserError> {
19272 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
19273 self.parse_optional_select_item_ilike()?
19274 } else {
19275 None
19276 };
19277 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
19278 {
19279 self.parse_optional_select_item_exclude()?
19280 } else {
19281 None
19282 };
19283 let opt_except = if self.dialect.supports_select_wildcard_except() {
19284 self.parse_optional_select_item_except()?
19285 } else {
19286 None
19287 };
19288 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
19289 self.parse_optional_select_item_replace()?
19290 } else {
19291 None
19292 };
19293 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
19294 self.parse_optional_select_item_rename()?
19295 } else {
19296 None
19297 };
19298
19299 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
19300 self.maybe_parse_select_item_alias()?
19301 } else {
19302 None
19303 };
19304
19305 Ok(WildcardAdditionalOptions {
19306 wildcard_token: wildcard_token.into(),
19307 opt_ilike,
19308 opt_exclude,
19309 opt_except,
19310 opt_rename,
19311 opt_replace,
19312 opt_alias,
19313 })
19314 }
19315
19316 pub fn parse_optional_select_item_ilike(
19320 &mut self,
19321 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19322 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19323 let next_token = self.next_token();
19324 let pattern = match next_token.token {
19325 Token::SingleQuotedString(s) => s,
19326 _ => return self.expected("ilike pattern", next_token),
19327 };
19328 Some(IlikeSelectItem { pattern })
19329 } else {
19330 None
19331 };
19332 Ok(opt_ilike)
19333 }
19334
19335 pub fn parse_optional_select_item_exclude(
19339 &mut self,
19340 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19341 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19342 if self.consume_token(&Token::LParen) {
19343 let columns =
19344 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19345 self.expect_token(&Token::RParen)?;
19346 Some(ExcludeSelectItem::Multiple(columns))
19347 } else {
19348 let column = self.parse_object_name(false)?;
19349 Some(ExcludeSelectItem::Single(column))
19350 }
19351 } else {
19352 None
19353 };
19354
19355 Ok(opt_exclude)
19356 }
19357
19358 pub fn parse_optional_select_item_except(
19362 &mut self,
19363 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19364 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19365 if self.peek_token_ref().token == Token::LParen {
19366 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19367 match &idents[..] {
19368 [] => {
19369 return self.expected_ref(
19370 "at least one column should be parsed by the expect clause",
19371 self.peek_token_ref(),
19372 )?;
19373 }
19374 [first, idents @ ..] => Some(ExceptSelectItem {
19375 first_element: first.clone(),
19376 additional_elements: idents.to_vec(),
19377 }),
19378 }
19379 } else {
19380 let ident = self.parse_identifier()?;
19382 Some(ExceptSelectItem {
19383 first_element: ident,
19384 additional_elements: vec![],
19385 })
19386 }
19387 } else {
19388 None
19389 };
19390
19391 Ok(opt_except)
19392 }
19393
19394 pub fn parse_optional_select_item_rename(
19396 &mut self,
19397 ) -> Result<Option<RenameSelectItem>, ParserError> {
19398 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19399 if self.consume_token(&Token::LParen) {
19400 let idents =
19401 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19402 self.expect_token(&Token::RParen)?;
19403 Some(RenameSelectItem::Multiple(idents))
19404 } else {
19405 let ident = self.parse_identifier_with_alias()?;
19406 Some(RenameSelectItem::Single(ident))
19407 }
19408 } else {
19409 None
19410 };
19411
19412 Ok(opt_rename)
19413 }
19414
19415 pub fn parse_optional_select_item_replace(
19417 &mut self,
19418 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19419 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19420 if self.consume_token(&Token::LParen) {
19421 let items = self.parse_comma_separated(|parser| {
19422 Ok(Box::new(parser.parse_replace_elements()?))
19423 })?;
19424 self.expect_token(&Token::RParen)?;
19425 Some(ReplaceSelectItem { items })
19426 } else {
19427 let tok = self.next_token();
19428 return self.expected("( after REPLACE but", tok);
19429 }
19430 } else {
19431 None
19432 };
19433
19434 Ok(opt_replace)
19435 }
19436 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19438 let expr = self.parse_expr()?;
19439 let as_keyword = self.parse_keyword(Keyword::AS);
19440 let ident = self.parse_identifier()?;
19441 Ok(ReplaceSelectElement {
19442 expr,
19443 column_name: ident,
19444 as_keyword,
19445 })
19446 }
19447
19448 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19451 if self.parse_keyword(Keyword::ASC) {
19452 Some(true)
19453 } else if self.parse_keyword(Keyword::DESC) {
19454 Some(false)
19455 } else {
19456 None
19457 }
19458 }
19459
19460 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19462 self.parse_order_by_expr_inner(false)
19463 .map(|(order_by, _)| order_by)
19464 }
19465
19466 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19468 self.parse_order_by_expr_inner(true)
19469 .map(|(column, operator_class)| IndexColumn {
19470 column,
19471 operator_class,
19472 })
19473 }
19474
19475 fn parse_order_by_expr_inner(
19476 &mut self,
19477 with_operator_class: bool,
19478 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19479 let expr = self.parse_expr()?;
19480
19481 let operator_class: Option<ObjectName> = if with_operator_class {
19482 if self
19485 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19486 .is_some()
19487 {
19488 None
19489 } else {
19490 self.maybe_parse(|parser| parser.parse_object_name(false))?
19491 }
19492 } else {
19493 None
19494 };
19495
19496 let options = self.parse_order_by_options()?;
19497
19498 let with_fill = if self.dialect.supports_with_fill()
19499 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19500 {
19501 Some(self.parse_with_fill()?)
19502 } else {
19503 None
19504 };
19505
19506 Ok((
19507 OrderByExpr {
19508 expr,
19509 options,
19510 with_fill,
19511 },
19512 operator_class,
19513 ))
19514 }
19515
19516 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19517 let asc = self.parse_asc_desc();
19518
19519 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19520 Some(true)
19521 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19522 Some(false)
19523 } else {
19524 None
19525 };
19526
19527 Ok(OrderByOptions { asc, nulls_first })
19528 }
19529
19530 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19534 let from = if self.parse_keyword(Keyword::FROM) {
19535 Some(self.parse_expr()?)
19536 } else {
19537 None
19538 };
19539
19540 let to = if self.parse_keyword(Keyword::TO) {
19541 Some(self.parse_expr()?)
19542 } else {
19543 None
19544 };
19545
19546 let step = if self.parse_keyword(Keyword::STEP) {
19547 Some(self.parse_expr()?)
19548 } else {
19549 None
19550 };
19551
19552 Ok(WithFill { from, to, step })
19553 }
19554
19555 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19558 if !self.parse_keyword(Keyword::INTERPOLATE) {
19559 return Ok(None);
19560 }
19561
19562 if self.consume_token(&Token::LParen) {
19563 let interpolations =
19564 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19565 self.expect_token(&Token::RParen)?;
19566 return Ok(Some(Interpolate {
19568 exprs: Some(interpolations),
19569 }));
19570 }
19571
19572 Ok(Some(Interpolate { exprs: None }))
19574 }
19575
19576 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19578 let column = self.parse_identifier()?;
19579 let expr = if self.parse_keyword(Keyword::AS) {
19580 Some(self.parse_expr()?)
19581 } else {
19582 None
19583 };
19584 Ok(InterpolateExpr { column, expr })
19585 }
19586
19587 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19590 let quantity = if self.consume_token(&Token::LParen) {
19591 let quantity = self.parse_expr()?;
19592 self.expect_token(&Token::RParen)?;
19593 Some(TopQuantity::Expr(quantity))
19594 } else {
19595 let next_token = self.next_token();
19596 let quantity = match next_token.token {
19597 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19598 _ => self.expected("literal int", next_token)?,
19599 };
19600 Some(TopQuantity::Constant(quantity))
19601 };
19602
19603 let percent = self.parse_keyword(Keyword::PERCENT);
19604
19605 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19606
19607 Ok(Top {
19608 with_ties,
19609 percent,
19610 quantity,
19611 })
19612 }
19613
19614 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19616 if self.parse_keyword(Keyword::ALL) {
19617 Ok(None)
19618 } else {
19619 Ok(Some(self.parse_expr()?))
19620 }
19621 }
19622
19623 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19625 let value = self.parse_expr()?;
19626 let rows = if self.parse_keyword(Keyword::ROW) {
19627 OffsetRows::Row
19628 } else if self.parse_keyword(Keyword::ROWS) {
19629 OffsetRows::Rows
19630 } else {
19631 OffsetRows::None
19632 };
19633 Ok(Offset { value, rows })
19634 }
19635
19636 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19638 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19639
19640 let (quantity, percent) = if self
19641 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19642 .is_some()
19643 {
19644 (None, false)
19645 } else {
19646 let quantity = Expr::Value(self.parse_value()?);
19647 let percent = self.parse_keyword(Keyword::PERCENT);
19648 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19649 (Some(quantity), percent)
19650 };
19651
19652 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19653 false
19654 } else {
19655 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19656 };
19657
19658 Ok(Fetch {
19659 with_ties,
19660 percent,
19661 quantity,
19662 })
19663 }
19664
19665 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19667 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19668 Keyword::UPDATE => LockType::Update,
19669 Keyword::SHARE => LockType::Share,
19670 unexpected_keyword => return Err(ParserError::ParserError(
19671 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19672 )),
19673 };
19674 let of = if self.parse_keyword(Keyword::OF) {
19675 Some(self.parse_object_name(false)?)
19676 } else {
19677 None
19678 };
19679 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19680 Some(NonBlock::Nowait)
19681 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19682 Some(NonBlock::SkipLocked)
19683 } else {
19684 None
19685 };
19686 Ok(LockClause {
19687 lock_type,
19688 of,
19689 nonblock,
19690 })
19691 }
19692
19693 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19695 self.expect_keyword(Keyword::LOCK)?;
19696
19697 if self.peek_keyword(Keyword::TABLES) {
19698 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19699 }
19700
19701 let _ = self.parse_keyword(Keyword::TABLE);
19702 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19703 let lock_mode = if self.parse_keyword(Keyword::IN) {
19704 let lock_mode = self.parse_lock_table_mode()?;
19705 self.expect_keyword(Keyword::MODE)?;
19706 Some(lock_mode)
19707 } else {
19708 None
19709 };
19710 let nowait = self.parse_keyword(Keyword::NOWAIT);
19711
19712 Ok(Lock {
19713 tables,
19714 lock_mode,
19715 nowait,
19716 })
19717 }
19718
19719 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19720 let only = self.parse_keyword(Keyword::ONLY);
19721 let name = self.parse_object_name(false)?;
19722 let has_asterisk = self.consume_token(&Token::Mul);
19723
19724 Ok(LockTableTarget {
19725 name,
19726 only,
19727 has_asterisk,
19728 })
19729 }
19730
19731 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19732 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19733 Ok(LockTableMode::AccessShare)
19734 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19735 Ok(LockTableMode::AccessExclusive)
19736 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19737 Ok(LockTableMode::RowShare)
19738 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19739 Ok(LockTableMode::RowExclusive)
19740 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19741 Ok(LockTableMode::ShareUpdateExclusive)
19742 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19743 Ok(LockTableMode::ShareRowExclusive)
19744 } else if self.parse_keyword(Keyword::SHARE) {
19745 Ok(LockTableMode::Share)
19746 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19747 Ok(LockTableMode::Exclusive)
19748 } else {
19749 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19750 }
19751 }
19752
19753 pub fn parse_values(
19755 &mut self,
19756 allow_empty: bool,
19757 value_keyword: bool,
19758 ) -> Result<Values, ParserError> {
19759 let mut explicit_row = false;
19760
19761 let rows = self.parse_comma_separated(|parser| {
19762 if parser.parse_keyword(Keyword::ROW) {
19763 explicit_row = true;
19764 }
19765
19766 parser.expect_token(&Token::LParen)?;
19767 if allow_empty && parser.peek_token().token == Token::RParen {
19768 parser.next_token();
19769 Ok(vec![])
19770 } else {
19771 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19772 parser.expect_token(&Token::RParen)?;
19773 Ok(exprs)
19774 }
19775 })?;
19776 Ok(Values {
19777 explicit_row,
19778 rows,
19779 value_keyword,
19780 })
19781 }
19782
19783 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19785 self.expect_keyword_is(Keyword::TRANSACTION)?;
19786 Ok(Statement::StartTransaction {
19787 modes: self.parse_transaction_modes()?,
19788 begin: false,
19789 transaction: Some(BeginTransactionKind::Transaction),
19790 modifier: None,
19791 statements: vec![],
19792 exception: None,
19793 has_end_keyword: false,
19794 })
19795 }
19796
19797 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19799 if !self.dialect.supports_start_transaction_modifier() {
19800 None
19801 } else if self.parse_keyword(Keyword::DEFERRED) {
19802 Some(TransactionModifier::Deferred)
19803 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19804 Some(TransactionModifier::Immediate)
19805 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19806 Some(TransactionModifier::Exclusive)
19807 } else if self.parse_keyword(Keyword::TRY) {
19808 Some(TransactionModifier::Try)
19809 } else if self.parse_keyword(Keyword::CATCH) {
19810 Some(TransactionModifier::Catch)
19811 } else {
19812 None
19813 }
19814 }
19815
19816 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19818 let modifier = self.parse_transaction_modifier();
19819 let transaction =
19820 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19821 {
19822 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19823 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19824 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19825 _ => None,
19826 };
19827 Ok(Statement::StartTransaction {
19828 modes: self.parse_transaction_modes()?,
19829 begin: true,
19830 transaction,
19831 modifier,
19832 statements: vec![],
19833 exception: None,
19834 has_end_keyword: false,
19835 })
19836 }
19837
19838 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19840 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19841
19842 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19843 let mut when = Vec::new();
19844
19845 while !self.peek_keyword(Keyword::END) {
19847 self.expect_keyword(Keyword::WHEN)?;
19848
19849 let mut idents = Vec::new();
19853
19854 while !self.parse_keyword(Keyword::THEN) {
19855 let ident = self.parse_identifier()?;
19856 idents.push(ident);
19857
19858 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19859 }
19860
19861 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19862
19863 when.push(ExceptionWhen { idents, statements });
19864 }
19865
19866 Some(when)
19867 } else {
19868 None
19869 };
19870
19871 self.expect_keyword(Keyword::END)?;
19872
19873 Ok(Statement::StartTransaction {
19874 begin: true,
19875 statements,
19876 exception,
19877 has_end_keyword: true,
19878 transaction: None,
19879 modifier: None,
19880 modes: Default::default(),
19881 })
19882 }
19883
19884 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19886 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19887 None
19888 } else if self.parse_keyword(Keyword::TRY) {
19889 Some(TransactionModifier::Try)
19890 } else if self.parse_keyword(Keyword::CATCH) {
19891 Some(TransactionModifier::Catch)
19892 } else {
19893 None
19894 };
19895 Ok(Statement::Commit {
19896 chain: self.parse_commit_rollback_chain()?,
19897 end: true,
19898 modifier,
19899 })
19900 }
19901
19902 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19904 let mut modes = vec![];
19905 let mut required = false;
19906 loop {
19907 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19908 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19909 TransactionIsolationLevel::ReadUncommitted
19910 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19911 TransactionIsolationLevel::ReadCommitted
19912 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19913 TransactionIsolationLevel::RepeatableRead
19914 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19915 TransactionIsolationLevel::Serializable
19916 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19917 TransactionIsolationLevel::Snapshot
19918 } else {
19919 self.expected_ref("isolation level", self.peek_token_ref())?
19920 };
19921 TransactionMode::IsolationLevel(iso_level)
19922 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19923 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19924 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19925 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19926 } else if required {
19927 self.expected_ref("transaction mode", self.peek_token_ref())?
19928 } else {
19929 break;
19930 };
19931 modes.push(mode);
19932 required = self.consume_token(&Token::Comma);
19937 }
19938 Ok(modes)
19939 }
19940
19941 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19943 Ok(Statement::Commit {
19944 chain: self.parse_commit_rollback_chain()?,
19945 end: false,
19946 modifier: None,
19947 })
19948 }
19949
19950 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19952 let chain = self.parse_commit_rollback_chain()?;
19953 let savepoint = self.parse_rollback_savepoint()?;
19954
19955 Ok(Statement::Rollback { chain, savepoint })
19956 }
19957
19958 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19960 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19961 if self.parse_keyword(Keyword::AND) {
19962 let chain = !self.parse_keyword(Keyword::NO);
19963 self.expect_keyword_is(Keyword::CHAIN)?;
19964 Ok(chain)
19965 } else {
19966 Ok(false)
19967 }
19968 }
19969
19970 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19972 if self.parse_keyword(Keyword::TO) {
19973 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19974 let savepoint = self.parse_identifier()?;
19975
19976 Ok(Some(savepoint))
19977 } else {
19978 Ok(None)
19979 }
19980 }
19981
19982 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19984 self.expect_token(&Token::LParen)?;
19985 let message = Box::new(self.parse_expr()?);
19986 self.expect_token(&Token::Comma)?;
19987 let severity = Box::new(self.parse_expr()?);
19988 self.expect_token(&Token::Comma)?;
19989 let state = Box::new(self.parse_expr()?);
19990 let arguments = if self.consume_token(&Token::Comma) {
19991 self.parse_comma_separated(Parser::parse_expr)?
19992 } else {
19993 vec![]
19994 };
19995 self.expect_token(&Token::RParen)?;
19996 let options = if self.parse_keyword(Keyword::WITH) {
19997 self.parse_comma_separated(Parser::parse_raiserror_option)?
19998 } else {
19999 vec![]
20000 };
20001 Ok(Statement::RaisError {
20002 message,
20003 severity,
20004 state,
20005 arguments,
20006 options,
20007 })
20008 }
20009
20010 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
20012 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
20013 Keyword::LOG => Ok(RaisErrorOption::Log),
20014 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
20015 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
20016 _ => self.expected_ref(
20017 "LOG, NOWAIT OR SETERROR raiserror option",
20018 self.peek_token_ref(),
20019 ),
20020 }
20021 }
20022
20023 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
20027 self.expect_keyword_is(Keyword::THROW)?;
20028
20029 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
20030 let (message, state) = if error_number.is_some() {
20031 self.expect_token(&Token::Comma)?;
20032 let message = Box::new(self.parse_expr()?);
20033 self.expect_token(&Token::Comma)?;
20034 let state = Box::new(self.parse_expr()?);
20035 (Some(message), Some(state))
20036 } else {
20037 (None, None)
20038 };
20039
20040 Ok(ThrowStatement {
20041 error_number,
20042 message,
20043 state,
20044 })
20045 }
20046
20047 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
20049 let prepare = self.parse_keyword(Keyword::PREPARE);
20050 let name = self.parse_identifier()?;
20051 Ok(Statement::Deallocate { name, prepare })
20052 }
20053
20054 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
20056 let immediate =
20057 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
20058
20059 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
20065 None
20066 } else {
20067 Some(self.parse_object_name(false)?)
20068 };
20069
20070 let has_parentheses = self.consume_token(&Token::LParen);
20071
20072 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
20073 let end_token = match (has_parentheses, self.peek_token().token) {
20074 (true, _) => Token::RParen,
20075 (false, Token::EOF) => Token::EOF,
20076 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
20077 (false, _) => Token::SemiColon,
20078 };
20079
20080 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
20081
20082 if has_parentheses {
20083 self.expect_token(&Token::RParen)?;
20084 }
20085
20086 let into = if self.parse_keyword(Keyword::INTO) {
20087 self.parse_comma_separated(Self::parse_identifier)?
20088 } else {
20089 vec![]
20090 };
20091
20092 let using = if self.parse_keyword(Keyword::USING) {
20093 self.parse_comma_separated(Self::parse_expr_with_alias)?
20094 } else {
20095 vec![]
20096 };
20097
20098 let output = self.parse_keyword(Keyword::OUTPUT);
20099
20100 let default = self.parse_keyword(Keyword::DEFAULT);
20101
20102 Ok(Statement::Execute {
20103 immediate,
20104 name,
20105 parameters,
20106 has_parentheses,
20107 into,
20108 using,
20109 output,
20110 default,
20111 })
20112 }
20113
20114 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
20116 let name = self.parse_identifier()?;
20117
20118 let mut data_types = vec![];
20119 if self.consume_token(&Token::LParen) {
20120 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
20121 self.expect_token(&Token::RParen)?;
20122 }
20123
20124 self.expect_keyword_is(Keyword::AS)?;
20125 let statement = Box::new(self.parse_statement()?);
20126 Ok(Statement::Prepare {
20127 name,
20128 data_types,
20129 statement,
20130 })
20131 }
20132
20133 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
20135 self.expect_keyword(Keyword::UNLOAD)?;
20136 self.expect_token(&Token::LParen)?;
20137 let (query, query_text) =
20138 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
20139 (None, Some(self.parse_literal_string()?))
20140 } else {
20141 (Some(self.parse_query()?), None)
20142 };
20143 self.expect_token(&Token::RParen)?;
20144
20145 self.expect_keyword_is(Keyword::TO)?;
20146 let to = self.parse_identifier()?;
20147 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
20148 Some(self.parse_iam_role_kind()?)
20149 } else {
20150 None
20151 };
20152 let with = self.parse_options(Keyword::WITH)?;
20153 let mut options = vec![];
20154 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
20155 options.push(opt);
20156 }
20157 Ok(Statement::Unload {
20158 query,
20159 query_text,
20160 to,
20161 auth,
20162 with,
20163 options,
20164 })
20165 }
20166
20167 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
20168 let temporary = self
20169 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
20170 .is_some();
20171 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
20172 let table = self.parse_keyword(Keyword::TABLE);
20173 let name = self.parse_object_name(false)?;
20174
20175 Ok(SelectInto {
20176 temporary,
20177 unlogged,
20178 table,
20179 name,
20180 })
20181 }
20182
20183 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
20184 let v = self.parse_value()?;
20185 match &v.value {
20186 Value::SingleQuotedString(_) => Ok(v),
20187 Value::DoubleQuotedString(_) => Ok(v),
20188 Value::Number(_, _) => Ok(v),
20189 Value::Placeholder(_) => Ok(v),
20190 _ => {
20191 self.prev_token();
20192 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
20193 }
20194 }
20195 }
20196
20197 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
20199 let name = self.parse_object_name(false)?;
20200 if self.consume_token(&Token::LParen) {
20201 let value = self.parse_pragma_value()?;
20202 self.expect_token(&Token::RParen)?;
20203 Ok(Statement::Pragma {
20204 name,
20205 value: Some(value),
20206 is_eq: false,
20207 })
20208 } else if self.consume_token(&Token::Eq) {
20209 Ok(Statement::Pragma {
20210 name,
20211 value: Some(self.parse_pragma_value()?),
20212 is_eq: true,
20213 })
20214 } else {
20215 Ok(Statement::Pragma {
20216 name,
20217 value: None,
20218 is_eq: false,
20219 })
20220 }
20221 }
20222
20223 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
20225 let extension_name = self.parse_identifier()?;
20226
20227 Ok(Statement::Install { extension_name })
20228 }
20229
20230 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
20232 if self.dialect.supports_load_extension() {
20233 let extension_name = self.parse_identifier()?;
20234 Ok(Statement::Load { extension_name })
20235 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
20236 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
20237 self.expect_keyword_is(Keyword::INPATH)?;
20238 let inpath = self.parse_literal_string()?;
20239 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
20240 self.expect_keyword_is(Keyword::INTO)?;
20241 self.expect_keyword_is(Keyword::TABLE)?;
20242 let table_name = self.parse_object_name(false)?;
20243 let partitioned = self.parse_insert_partition()?;
20244 let table_format = self.parse_load_data_table_format()?;
20245 Ok(Statement::LoadData {
20246 local,
20247 inpath,
20248 overwrite,
20249 table_name,
20250 partitioned,
20251 table_format,
20252 })
20253 } else {
20254 self.expected_ref(
20255 "`DATA` or an extension name after `LOAD`",
20256 self.peek_token_ref(),
20257 )
20258 }
20259 }
20260
20261 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
20273 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
20274
20275 let name = self.parse_object_name(false)?;
20276
20277 let on_cluster = self.parse_optional_on_cluster()?;
20279
20280 let partition = if self.parse_keyword(Keyword::PARTITION) {
20281 if self.parse_keyword(Keyword::ID) {
20282 Some(Partition::Identifier(self.parse_identifier()?))
20283 } else {
20284 Some(Partition::Expr(self.parse_expr()?))
20285 }
20286 } else {
20287 None
20288 };
20289
20290 let include_final = self.parse_keyword(Keyword::FINAL);
20291
20292 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
20293 if self.parse_keyword(Keyword::BY) {
20294 Some(Deduplicate::ByExpression(self.parse_expr()?))
20295 } else {
20296 Some(Deduplicate::All)
20297 }
20298 } else {
20299 None
20300 };
20301
20302 let predicate = if self.parse_keyword(Keyword::WHERE) {
20304 Some(self.parse_expr()?)
20305 } else {
20306 None
20307 };
20308
20309 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
20310 self.expect_token(&Token::LParen)?;
20311 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
20312 self.expect_token(&Token::RParen)?;
20313 Some(columns)
20314 } else {
20315 None
20316 };
20317
20318 Ok(Statement::OptimizeTable {
20319 name,
20320 has_table_keyword,
20321 on_cluster,
20322 partition,
20323 include_final,
20324 deduplicate,
20325 predicate,
20326 zorder,
20327 })
20328 }
20329
20330 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20336 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20338 let name = self.parse_object_name(false)?;
20340 let mut data_type: Option<DataType> = None;
20342 if self.parse_keywords(&[Keyword::AS]) {
20343 data_type = Some(self.parse_data_type()?)
20344 }
20345 let sequence_options = self.parse_create_sequence_options()?;
20346 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20348 if self.parse_keywords(&[Keyword::NONE]) {
20349 Some(ObjectName::from(vec![Ident::new("NONE")]))
20350 } else {
20351 Some(self.parse_object_name(false)?)
20352 }
20353 } else {
20354 None
20355 };
20356 Ok(Statement::CreateSequence {
20357 temporary,
20358 if_not_exists,
20359 name,
20360 data_type,
20361 sequence_options,
20362 owned_by,
20363 })
20364 }
20365
20366 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20367 let mut sequence_options = vec![];
20368 if self.parse_keywords(&[Keyword::INCREMENT]) {
20370 if self.parse_keywords(&[Keyword::BY]) {
20371 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20372 } else {
20373 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20374 }
20375 }
20376 if self.parse_keyword(Keyword::MINVALUE) {
20378 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20379 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20380 sequence_options.push(SequenceOptions::MinValue(None));
20381 }
20382 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20384 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20385 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20386 sequence_options.push(SequenceOptions::MaxValue(None));
20387 }
20388
20389 if self.parse_keywords(&[Keyword::START]) {
20391 if self.parse_keywords(&[Keyword::WITH]) {
20392 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20393 } else {
20394 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20395 }
20396 }
20397 if self.parse_keywords(&[Keyword::CACHE]) {
20399 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20400 }
20401 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20403 sequence_options.push(SequenceOptions::Cycle(true));
20404 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20405 sequence_options.push(SequenceOptions::Cycle(false));
20406 }
20407
20408 Ok(sequence_options)
20409 }
20410
20411 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20415 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20416 let name = self.parse_object_name(false)?;
20417
20418 let server_type = if self.parse_keyword(Keyword::TYPE) {
20419 Some(self.parse_identifier()?)
20420 } else {
20421 None
20422 };
20423
20424 let version = if self.parse_keyword(Keyword::VERSION) {
20425 Some(self.parse_identifier()?)
20426 } else {
20427 None
20428 };
20429
20430 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20431 let foreign_data_wrapper = self.parse_object_name(false)?;
20432
20433 let mut options = None;
20434 if self.parse_keyword(Keyword::OPTIONS) {
20435 self.expect_token(&Token::LParen)?;
20436 options = Some(self.parse_comma_separated(|p| {
20437 let key = p.parse_identifier()?;
20438 let value = p.parse_identifier()?;
20439 Ok(CreateServerOption { key, value })
20440 })?);
20441 self.expect_token(&Token::RParen)?;
20442 }
20443
20444 Ok(Statement::CreateServer(CreateServerStatement {
20445 name,
20446 if_not_exists: ine,
20447 server_type,
20448 version,
20449 foreign_data_wrapper,
20450 options,
20451 }))
20452 }
20453
20454 pub fn parse_create_foreign_data_wrapper(
20458 &mut self,
20459 ) -> Result<CreateForeignDataWrapper, ParserError> {
20460 let name = self.parse_identifier()?;
20461
20462 let handler = if self.parse_keyword(Keyword::HANDLER) {
20463 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20464 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20465 Some(FdwRoutineClause::NoFunction)
20466 } else {
20467 None
20468 };
20469
20470 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20471 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20472 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20473 Some(FdwRoutineClause::NoFunction)
20474 } else {
20475 None
20476 };
20477
20478 let options = if self.parse_keyword(Keyword::OPTIONS) {
20479 self.expect_token(&Token::LParen)?;
20480 let opts = self.parse_comma_separated(|p| {
20481 let key = p.parse_identifier()?;
20482 let value = p.parse_identifier()?;
20483 Ok(CreateServerOption { key, value })
20484 })?;
20485 self.expect_token(&Token::RParen)?;
20486 Some(opts)
20487 } else {
20488 None
20489 };
20490
20491 Ok(CreateForeignDataWrapper {
20492 name,
20493 handler,
20494 validator,
20495 options,
20496 })
20497 }
20498
20499 pub fn parse_create_foreign_table(&mut self) -> Result<CreateForeignTable, ParserError> {
20503 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20504 let name = self.parse_object_name(false)?;
20505 let (columns, _constraints) = self.parse_columns()?;
20506 self.expect_keyword_is(Keyword::SERVER)?;
20507 let server_name = self.parse_identifier()?;
20508
20509 let options = if self.parse_keyword(Keyword::OPTIONS) {
20510 self.expect_token(&Token::LParen)?;
20511 let opts = self.parse_comma_separated(|p| {
20512 let key = p.parse_identifier()?;
20513 let value = p.parse_identifier()?;
20514 Ok(CreateServerOption { key, value })
20515 })?;
20516 self.expect_token(&Token::RParen)?;
20517 Some(opts)
20518 } else {
20519 None
20520 };
20521
20522 Ok(CreateForeignTable {
20523 name,
20524 if_not_exists,
20525 columns,
20526 server_name,
20527 options,
20528 })
20529 }
20530
20531 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20535 let name = self.parse_identifier()?;
20536
20537 let target = if self.parse_keyword(Keyword::FOR) {
20538 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20539 Some(PublicationTarget::AllTables)
20540 } else if self.parse_keyword(Keyword::TABLE) {
20541 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20542 Some(PublicationTarget::Tables(tables))
20543 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20544 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20545 Some(PublicationTarget::TablesInSchema(schemas))
20546 } else {
20547 return self.expected_ref(
20548 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20549 self.peek_token_ref(),
20550 );
20551 }
20552 } else {
20553 None
20554 };
20555
20556 let with_options = self.parse_options(Keyword::WITH)?;
20557
20558 Ok(CreatePublication {
20559 name,
20560 target,
20561 with_options,
20562 })
20563 }
20564
20565 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20569 let name = self.parse_identifier()?;
20570 self.expect_keyword_is(Keyword::CONNECTION)?;
20571 let connection = self.parse_value()?.value;
20572 self.expect_keyword_is(Keyword::PUBLICATION)?;
20573 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20574 let with_options = self.parse_options(Keyword::WITH)?;
20575
20576 Ok(CreateSubscription {
20577 name,
20578 connection,
20579 publications,
20580 with_options,
20581 })
20582 }
20583
20584 pub fn parse_create_cast(&mut self) -> Result<CreateCast, ParserError> {
20588 self.expect_token(&Token::LParen)?;
20589 let source_type = self.parse_data_type()?;
20590 self.expect_keyword_is(Keyword::AS)?;
20591 let target_type = self.parse_data_type()?;
20592 self.expect_token(&Token::RParen)?;
20593
20594 let function_kind = if self.parse_keywords(&[Keyword::WITHOUT, Keyword::FUNCTION]) {
20595 CastFunctionKind::WithoutFunction
20596 } else if self.parse_keywords(&[Keyword::WITH, Keyword::INOUT]) {
20597 CastFunctionKind::WithInout
20598 } else if self.parse_keywords(&[Keyword::WITH, Keyword::FUNCTION]) {
20599 let function_name = self.parse_object_name(false)?;
20600 let argument_types = if self.peek_token_ref().token == Token::LParen {
20601 self.expect_token(&Token::LParen)?;
20602 let types = if self.peek_token_ref().token == Token::RParen {
20603 vec![]
20604 } else {
20605 self.parse_comma_separated(|p| p.parse_data_type())?
20606 };
20607 self.expect_token(&Token::RParen)?;
20608 types
20609 } else {
20610 vec![]
20611 };
20612 CastFunctionKind::WithFunction {
20613 function_name,
20614 argument_types,
20615 }
20616 } else {
20617 return self.expected_ref(
20618 "WITH FUNCTION, WITHOUT FUNCTION, or WITH INOUT",
20619 self.peek_token_ref(),
20620 );
20621 };
20622
20623 let cast_context = if self.parse_keyword(Keyword::AS) {
20624 if self.parse_keyword(Keyword::ASSIGNMENT) {
20625 CastContext::Assignment
20626 } else if self.parse_keyword(Keyword::IMPLICIT) {
20627 CastContext::Implicit
20628 } else {
20629 return self.expected_ref("ASSIGNMENT or IMPLICIT after AS", self.peek_token_ref());
20630 }
20631 } else {
20632 CastContext::Explicit
20633 };
20634
20635 Ok(CreateCast {
20636 source_type,
20637 target_type,
20638 function_kind,
20639 cast_context,
20640 })
20641 }
20642
20643 pub fn parse_create_conversion(
20647 &mut self,
20648 is_default: bool,
20649 ) -> Result<CreateConversion, ParserError> {
20650 let name = self.parse_object_name(false)?;
20651 self.expect_keyword_is(Keyword::FOR)?;
20652 let source_encoding = self.parse_literal_string()?;
20653 self.expect_keyword_is(Keyword::TO)?;
20654 let destination_encoding = self.parse_literal_string()?;
20655 self.expect_keyword_is(Keyword::FROM)?;
20656 let function_name = self.parse_object_name(false)?;
20657
20658 Ok(CreateConversion {
20659 name,
20660 is_default,
20661 source_encoding,
20662 destination_encoding,
20663 function_name,
20664 })
20665 }
20666
20667 pub fn parse_create_language(
20671 &mut self,
20672 or_replace: bool,
20673 trusted: bool,
20674 procedural: bool,
20675 ) -> Result<CreateLanguage, ParserError> {
20676 let name = self.parse_identifier()?;
20677
20678 let handler = if self.parse_keyword(Keyword::HANDLER) {
20679 Some(self.parse_object_name(false)?)
20680 } else {
20681 None
20682 };
20683
20684 let inline_handler = if self.parse_keyword(Keyword::INLINE) {
20685 Some(self.parse_object_name(false)?)
20686 } else {
20687 None
20688 };
20689
20690 let validator = if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20691 None
20692 } else if self.parse_keyword(Keyword::VALIDATOR) {
20693 Some(self.parse_object_name(false)?)
20694 } else {
20695 None
20696 };
20697
20698 Ok(CreateLanguage {
20699 name,
20700 or_replace,
20701 trusted,
20702 procedural,
20703 handler,
20704 inline_handler,
20705 validator,
20706 })
20707 }
20708
20709 pub fn parse_create_rule(&mut self) -> Result<CreateRule, ParserError> {
20713 let name = self.parse_identifier()?;
20714 self.expect_keyword_is(Keyword::AS)?;
20715 self.expect_keyword_is(Keyword::ON)?;
20716
20717 let event = if self.parse_keyword(Keyword::SELECT) {
20718 RuleEvent::Select
20719 } else if self.parse_keyword(Keyword::INSERT) {
20720 RuleEvent::Insert
20721 } else if self.parse_keyword(Keyword::UPDATE) {
20722 RuleEvent::Update
20723 } else if self.parse_keyword(Keyword::DELETE) {
20724 RuleEvent::Delete
20725 } else {
20726 return self.expected_ref(
20727 "SELECT, INSERT, UPDATE, or DELETE after ON",
20728 self.peek_token_ref(),
20729 );
20730 };
20731
20732 self.expect_keyword_is(Keyword::TO)?;
20733 let table = self.parse_object_name(false)?;
20734
20735 let condition = if self.parse_keyword(Keyword::WHERE) {
20736 Some(self.parse_expr()?)
20737 } else {
20738 None
20739 };
20740
20741 self.expect_keyword_is(Keyword::DO)?;
20742
20743 let instead = self.parse_keyword(Keyword::INSTEAD);
20744 if !instead {
20745 let _ = self.parse_keyword(Keyword::ALSO);
20747 }
20748
20749 let action = if self.parse_keyword(Keyword::NOTHING) {
20750 RuleAction::Nothing
20751 } else if self.peek_token_ref().token == Token::LParen {
20752 self.expect_token(&Token::LParen)?;
20753 let mut stmts = Vec::new();
20754 loop {
20755 stmts.push(self.parse_statement()?);
20756 if !self.consume_token(&Token::SemiColon) {
20757 break;
20758 }
20759 if self.peek_token_ref().token == Token::RParen {
20760 break;
20761 }
20762 }
20763 self.expect_token(&Token::RParen)?;
20764 RuleAction::Statements(stmts)
20765 } else {
20766 let stmt = self.parse_statement()?;
20767 RuleAction::Statements(vec![stmt])
20768 };
20769
20770 Ok(CreateRule {
20771 name,
20772 event,
20773 table,
20774 condition,
20775 instead,
20776 action,
20777 })
20778 }
20779
20780 pub fn parse_create_statistics(&mut self) -> Result<CreateStatistics, ParserError> {
20784 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20785 let name = self.parse_object_name(false)?;
20786
20787 let kinds = if self.consume_token(&Token::LParen) {
20788 let kinds = self.parse_comma_separated(|p| {
20789 let ident = p.parse_identifier()?;
20790 match ident.value.to_lowercase().as_str() {
20791 "ndistinct" => Ok(StatisticsKind::NDistinct),
20792 "dependencies" => Ok(StatisticsKind::Dependencies),
20793 "mcv" => Ok(StatisticsKind::Mcv),
20794 other => Err(ParserError::ParserError(format!(
20795 "Unknown statistics kind: {other}"
20796 ))),
20797 }
20798 })?;
20799 self.expect_token(&Token::RParen)?;
20800 kinds
20801 } else {
20802 vec![]
20803 };
20804
20805 self.expect_keyword_is(Keyword::ON)?;
20806 let on = self.parse_comma_separated(Parser::parse_expr)?;
20807 self.expect_keyword_is(Keyword::FROM)?;
20808 let from = self.parse_object_name(false)?;
20809
20810 Ok(CreateStatistics {
20811 if_not_exists,
20812 name,
20813 kinds,
20814 on,
20815 from,
20816 })
20817 }
20818
20819 pub fn parse_create_access_method(&mut self) -> Result<CreateAccessMethod, ParserError> {
20823 let name = self.parse_identifier()?;
20824 self.expect_keyword_is(Keyword::TYPE)?;
20825 let method_type = if self.parse_keyword(Keyword::INDEX) {
20826 AccessMethodType::Index
20827 } else if self.parse_keyword(Keyword::TABLE) {
20828 AccessMethodType::Table
20829 } else {
20830 return self.expected_ref("INDEX or TABLE after TYPE", self.peek_token_ref());
20831 };
20832 self.expect_keyword_is(Keyword::HANDLER)?;
20833 let handler = self.parse_object_name(false)?;
20834
20835 Ok(CreateAccessMethod {
20836 name,
20837 method_type,
20838 handler,
20839 })
20840 }
20841
20842 pub fn parse_create_event_trigger(&mut self) -> Result<CreateEventTrigger, ParserError> {
20846 let name = self.parse_identifier()?;
20847 self.expect_keyword_is(Keyword::ON)?;
20848 let event_ident = self.parse_identifier()?;
20849 let event = match event_ident.value.to_lowercase().as_str() {
20850 "ddl_command_start" => EventTriggerEvent::DdlCommandStart,
20851 "ddl_command_end" => EventTriggerEvent::DdlCommandEnd,
20852 "table_rewrite" => EventTriggerEvent::TableRewrite,
20853 "sql_drop" => EventTriggerEvent::SqlDrop,
20854 other => {
20855 return Err(ParserError::ParserError(format!(
20856 "Unknown event trigger event: {other}"
20857 )))
20858 }
20859 };
20860
20861 let when_tags = if self.parse_keyword(Keyword::WHEN) {
20862 self.expect_keyword_is(Keyword::TAG)?;
20863 self.expect_keyword_is(Keyword::IN)?;
20864 self.expect_token(&Token::LParen)?;
20865 let tags = self.parse_comma_separated(|p| p.parse_value().map(|v| v.value))?;
20866 self.expect_token(&Token::RParen)?;
20867 Some(tags)
20868 } else {
20869 None
20870 };
20871
20872 self.expect_keyword_is(Keyword::EXECUTE)?;
20873 let is_procedure = if self.parse_keyword(Keyword::FUNCTION) {
20874 false
20875 } else if self.parse_keyword(Keyword::PROCEDURE) {
20876 true
20877 } else {
20878 return self.expected_ref("FUNCTION or PROCEDURE after EXECUTE", self.peek_token_ref());
20879 };
20880 let execute = self.parse_object_name(false)?;
20881 self.expect_token(&Token::LParen)?;
20882 self.expect_token(&Token::RParen)?;
20883
20884 Ok(CreateEventTrigger {
20885 name,
20886 event,
20887 when_tags,
20888 execute,
20889 is_procedure,
20890 })
20891 }
20892
20893 pub fn parse_create_transform(
20897 &mut self,
20898 or_replace: bool,
20899 ) -> Result<CreateTransform, ParserError> {
20900 self.expect_keyword_is(Keyword::FOR)?;
20901 let type_name = self.parse_data_type()?;
20902 self.expect_keyword_is(Keyword::LANGUAGE)?;
20903 let language = self.parse_identifier()?;
20904 self.expect_token(&Token::LParen)?;
20905 let elements = self.parse_comma_separated(|p| {
20906 let is_from = if p.parse_keyword(Keyword::FROM) {
20907 true
20908 } else {
20909 p.expect_keyword_is(Keyword::TO)?;
20910 false
20911 };
20912 p.expect_keyword_is(Keyword::SQL)?;
20913 p.expect_keyword_is(Keyword::WITH)?;
20914 p.expect_keyword_is(Keyword::FUNCTION)?;
20915 let function = p.parse_object_name(false)?;
20916 p.expect_token(&Token::LParen)?;
20917 let arg_types = if p.peek_token().token == Token::RParen {
20918 vec![]
20919 } else {
20920 p.parse_comma_separated(|p| p.parse_data_type())?
20921 };
20922 p.expect_token(&Token::RParen)?;
20923 Ok(TransformElement {
20924 is_from,
20925 function,
20926 arg_types,
20927 })
20928 })?;
20929 self.expect_token(&Token::RParen)?;
20930
20931 Ok(CreateTransform {
20932 or_replace,
20933 type_name,
20934 language,
20935 elements,
20936 })
20937 }
20938
20939 pub fn parse_security_label(&mut self) -> Result<SecurityLabel, ParserError> {
20943 self.expect_keyword_is(Keyword::LABEL)?;
20944
20945 let provider = if self.parse_keyword(Keyword::FOR) {
20946 Some(self.parse_identifier()?)
20947 } else {
20948 None
20949 };
20950
20951 self.expect_keyword_is(Keyword::ON)?;
20952
20953 let object_kind = if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
20954 SecurityLabelObjectKind::MaterializedView
20955 } else if self.parse_keyword(Keyword::TABLE) {
20956 SecurityLabelObjectKind::Table
20957 } else if self.parse_keyword(Keyword::COLUMN) {
20958 SecurityLabelObjectKind::Column
20959 } else if self.parse_keyword(Keyword::DATABASE) {
20960 SecurityLabelObjectKind::Database
20961 } else if self.parse_keyword(Keyword::DOMAIN) {
20962 SecurityLabelObjectKind::Domain
20963 } else if self.parse_keyword(Keyword::FUNCTION) {
20964 SecurityLabelObjectKind::Function
20965 } else if self.parse_keyword(Keyword::ROLE) {
20966 SecurityLabelObjectKind::Role
20967 } else if self.parse_keyword(Keyword::SCHEMA) {
20968 SecurityLabelObjectKind::Schema
20969 } else if self.parse_keyword(Keyword::SEQUENCE) {
20970 SecurityLabelObjectKind::Sequence
20971 } else if self.parse_keyword(Keyword::TYPE) {
20972 SecurityLabelObjectKind::Type
20973 } else if self.parse_keyword(Keyword::VIEW) {
20974 SecurityLabelObjectKind::View
20975 } else {
20976 return self.expected_ref(
20977 "TABLE, COLUMN, DATABASE, DOMAIN, FUNCTION, MATERIALIZED VIEW, ROLE, SCHEMA, SEQUENCE, TYPE, or VIEW after ON",
20978 self.peek_token_ref(),
20979 );
20980 };
20981
20982 let object_name = self.parse_object_name(false)?;
20983
20984 self.expect_keyword_is(Keyword::IS)?;
20985
20986 let label = if self.parse_keyword(Keyword::NULL) {
20987 None
20988 } else {
20989 Some(self.parse_value()?.value)
20990 };
20991
20992 Ok(SecurityLabel {
20993 provider,
20994 object_kind,
20995 object_name,
20996 label,
20997 })
20998 }
20999
21000 pub fn parse_create_user_mapping(&mut self) -> Result<CreateUserMapping, ParserError> {
21004 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
21005
21006 self.expect_keyword_is(Keyword::FOR)?;
21007
21008 let user = if self.parse_keyword(Keyword::CURRENT_ROLE) {
21009 UserMappingUser::CurrentRole
21010 } else if self.parse_keyword(Keyword::CURRENT_USER) {
21011 UserMappingUser::CurrentUser
21012 } else if self.parse_keyword(Keyword::PUBLIC) {
21013 UserMappingUser::Public
21014 } else if self.parse_keyword(Keyword::USER) {
21015 UserMappingUser::User
21016 } else {
21017 UserMappingUser::Ident(self.parse_identifier()?)
21018 };
21019
21020 self.expect_keyword_is(Keyword::SERVER)?;
21021 let server_name = self.parse_identifier()?;
21022
21023 let options = if self.parse_keyword(Keyword::OPTIONS) {
21024 self.expect_token(&Token::LParen)?;
21025 let opts = self.parse_comma_separated(|p| {
21026 let key = p.parse_identifier()?;
21027 let value = p.parse_identifier()?;
21028 Ok(CreateServerOption { key, value })
21029 })?;
21030 self.expect_token(&Token::RParen)?;
21031 Some(opts)
21032 } else {
21033 None
21034 };
21035
21036 Ok(CreateUserMapping {
21037 if_not_exists,
21038 user,
21039 server_name,
21040 options,
21041 })
21042 }
21043
21044 pub fn parse_create_tablespace(&mut self) -> Result<CreateTablespace, ParserError> {
21048 let name = self.parse_identifier()?;
21049
21050 let owner = if self.parse_keyword(Keyword::OWNER) {
21051 Some(self.parse_identifier()?)
21052 } else {
21053 None
21054 };
21055
21056 self.expect_keyword_is(Keyword::LOCATION)?;
21057 let location = self.parse_value()?.value;
21058
21059 let with_options = self.parse_options(Keyword::WITH)?;
21060
21061 Ok(CreateTablespace {
21062 name,
21063 owner,
21064 location,
21065 with_options,
21066 })
21067 }
21068
21069 pub fn index(&self) -> usize {
21071 self.index
21072 }
21073
21074 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
21076 let ident = self.parse_identifier()?;
21077 self.expect_keyword_is(Keyword::AS)?;
21078
21079 let window_expr = if self.consume_token(&Token::LParen) {
21080 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
21081 } else if self.dialect.supports_window_clause_named_window_reference() {
21082 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
21083 } else {
21084 return self.expected_ref("(", self.peek_token_ref());
21085 };
21086
21087 Ok(NamedWindowDefinition(ident, window_expr))
21088 }
21089
21090 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
21092 let name = self.parse_object_name(false)?;
21093 let params = self.parse_optional_procedure_parameters()?;
21094
21095 let language = if self.parse_keyword(Keyword::LANGUAGE) {
21096 Some(self.parse_identifier()?)
21097 } else {
21098 None
21099 };
21100
21101 self.expect_keyword_is(Keyword::AS)?;
21102
21103 let body = self.parse_conditional_statements(&[Keyword::END])?;
21104
21105 Ok(Statement::CreateProcedure {
21106 name,
21107 or_alter,
21108 params,
21109 language,
21110 body,
21111 })
21112 }
21113
21114 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
21116 let window_name = match &self.peek_token_ref().token {
21117 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
21118 self.parse_optional_ident()?
21119 }
21120 _ => None,
21121 };
21122
21123 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
21124 self.parse_comma_separated(Parser::parse_expr)?
21125 } else {
21126 vec![]
21127 };
21128 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
21129 self.parse_comma_separated(Parser::parse_order_by_expr)?
21130 } else {
21131 vec![]
21132 };
21133
21134 let window_frame = if !self.consume_token(&Token::RParen) {
21135 let window_frame = self.parse_window_frame()?;
21136 self.expect_token(&Token::RParen)?;
21137 Some(window_frame)
21138 } else {
21139 None
21140 };
21141 Ok(WindowSpec {
21142 window_name,
21143 partition_by,
21144 order_by,
21145 window_frame,
21146 })
21147 }
21148
21149 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
21151 let name = self.parse_object_name(false)?;
21152
21153 let has_as = self.parse_keyword(Keyword::AS);
21155
21156 if !has_as {
21157 if self.consume_token(&Token::LParen) {
21159 let options = self.parse_create_type_sql_definition_options()?;
21161 self.expect_token(&Token::RParen)?;
21162 return Ok(Statement::CreateType {
21163 name,
21164 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
21165 });
21166 }
21167
21168 return Ok(Statement::CreateType {
21170 name,
21171 representation: None,
21172 });
21173 }
21174
21175 if self.parse_keyword(Keyword::ENUM) {
21177 self.parse_create_type_enum(name)
21179 } else if self.parse_keyword(Keyword::RANGE) {
21180 self.parse_create_type_range(name)
21182 } else if self.consume_token(&Token::LParen) {
21183 self.parse_create_type_composite(name)
21185 } else {
21186 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
21187 }
21188 }
21189
21190 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21194 if self.consume_token(&Token::RParen) {
21195 return Ok(Statement::CreateType {
21197 name,
21198 representation: Some(UserDefinedTypeRepresentation::Composite {
21199 attributes: vec![],
21200 }),
21201 });
21202 }
21203
21204 let mut attributes = vec![];
21205 loop {
21206 let attr_name = self.parse_identifier()?;
21207 let attr_data_type = self.parse_data_type()?;
21208 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
21209 Some(self.parse_object_name(false)?)
21210 } else {
21211 None
21212 };
21213 attributes.push(UserDefinedTypeCompositeAttributeDef {
21214 name: attr_name,
21215 data_type: attr_data_type,
21216 collation: attr_collation,
21217 });
21218
21219 if !self.consume_token(&Token::Comma) {
21220 break;
21221 }
21222 }
21223 self.expect_token(&Token::RParen)?;
21224
21225 Ok(Statement::CreateType {
21226 name,
21227 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
21228 })
21229 }
21230
21231 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21235 self.expect_token(&Token::LParen)?;
21236 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21237 self.expect_token(&Token::RParen)?;
21238
21239 Ok(Statement::CreateType {
21240 name,
21241 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
21242 })
21243 }
21244
21245 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21249 self.expect_token(&Token::LParen)?;
21250 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
21251 self.expect_token(&Token::RParen)?;
21252
21253 Ok(Statement::CreateType {
21254 name,
21255 representation: Some(UserDefinedTypeRepresentation::Range { options }),
21256 })
21257 }
21258
21259 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
21261 let keyword = self.parse_one_of_keywords(&[
21262 Keyword::SUBTYPE,
21263 Keyword::SUBTYPE_OPCLASS,
21264 Keyword::COLLATION,
21265 Keyword::CANONICAL,
21266 Keyword::SUBTYPE_DIFF,
21267 Keyword::MULTIRANGE_TYPE_NAME,
21268 ]);
21269
21270 match keyword {
21271 Some(Keyword::SUBTYPE) => {
21272 self.expect_token(&Token::Eq)?;
21273 let data_type = self.parse_data_type()?;
21274 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
21275 }
21276 Some(Keyword::SUBTYPE_OPCLASS) => {
21277 self.expect_token(&Token::Eq)?;
21278 let name = self.parse_object_name(false)?;
21279 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
21280 }
21281 Some(Keyword::COLLATION) => {
21282 self.expect_token(&Token::Eq)?;
21283 let name = self.parse_object_name(false)?;
21284 Ok(UserDefinedTypeRangeOption::Collation(name))
21285 }
21286 Some(Keyword::CANONICAL) => {
21287 self.expect_token(&Token::Eq)?;
21288 let name = self.parse_object_name(false)?;
21289 Ok(UserDefinedTypeRangeOption::Canonical(name))
21290 }
21291 Some(Keyword::SUBTYPE_DIFF) => {
21292 self.expect_token(&Token::Eq)?;
21293 let name = self.parse_object_name(false)?;
21294 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
21295 }
21296 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
21297 self.expect_token(&Token::Eq)?;
21298 let name = self.parse_object_name(false)?;
21299 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
21300 }
21301 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
21302 }
21303 }
21304
21305 fn parse_create_type_sql_definition_options(
21307 &mut self,
21308 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
21309 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
21310 }
21311
21312 fn parse_sql_definition_option(
21314 &mut self,
21315 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
21316 let keyword = self.parse_one_of_keywords(&[
21317 Keyword::INPUT,
21318 Keyword::OUTPUT,
21319 Keyword::RECEIVE,
21320 Keyword::SEND,
21321 Keyword::TYPMOD_IN,
21322 Keyword::TYPMOD_OUT,
21323 Keyword::ANALYZE,
21324 Keyword::SUBSCRIPT,
21325 Keyword::INTERNALLENGTH,
21326 Keyword::PASSEDBYVALUE,
21327 Keyword::ALIGNMENT,
21328 Keyword::STORAGE,
21329 Keyword::LIKE,
21330 Keyword::CATEGORY,
21331 Keyword::PREFERRED,
21332 Keyword::DEFAULT,
21333 Keyword::ELEMENT,
21334 Keyword::DELIMITER,
21335 Keyword::COLLATABLE,
21336 ]);
21337
21338 match keyword {
21339 Some(Keyword::INPUT) => {
21340 self.expect_token(&Token::Eq)?;
21341 let name = self.parse_object_name(false)?;
21342 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
21343 }
21344 Some(Keyword::OUTPUT) => {
21345 self.expect_token(&Token::Eq)?;
21346 let name = self.parse_object_name(false)?;
21347 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
21348 }
21349 Some(Keyword::RECEIVE) => {
21350 self.expect_token(&Token::Eq)?;
21351 let name = self.parse_object_name(false)?;
21352 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
21353 }
21354 Some(Keyword::SEND) => {
21355 self.expect_token(&Token::Eq)?;
21356 let name = self.parse_object_name(false)?;
21357 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
21358 }
21359 Some(Keyword::TYPMOD_IN) => {
21360 self.expect_token(&Token::Eq)?;
21361 let name = self.parse_object_name(false)?;
21362 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
21363 }
21364 Some(Keyword::TYPMOD_OUT) => {
21365 self.expect_token(&Token::Eq)?;
21366 let name = self.parse_object_name(false)?;
21367 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
21368 }
21369 Some(Keyword::ANALYZE) => {
21370 self.expect_token(&Token::Eq)?;
21371 let name = self.parse_object_name(false)?;
21372 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
21373 }
21374 Some(Keyword::SUBSCRIPT) => {
21375 self.expect_token(&Token::Eq)?;
21376 let name = self.parse_object_name(false)?;
21377 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
21378 }
21379 Some(Keyword::INTERNALLENGTH) => {
21380 self.expect_token(&Token::Eq)?;
21381 if self.parse_keyword(Keyword::VARIABLE) {
21382 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21383 UserDefinedTypeInternalLength::Variable,
21384 ))
21385 } else {
21386 let value = self.parse_literal_uint()?;
21387 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21388 UserDefinedTypeInternalLength::Fixed(value),
21389 ))
21390 }
21391 }
21392 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
21393 Some(Keyword::ALIGNMENT) => {
21394 self.expect_token(&Token::Eq)?;
21395 let align_keyword = self.parse_one_of_keywords(&[
21396 Keyword::CHAR,
21397 Keyword::INT2,
21398 Keyword::INT4,
21399 Keyword::DOUBLE,
21400 ]);
21401 match align_keyword {
21402 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21403 Alignment::Char,
21404 )),
21405 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21406 Alignment::Int2,
21407 )),
21408 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21409 Alignment::Int4,
21410 )),
21411 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21412 Alignment::Double,
21413 )),
21414 _ => self.expected_ref(
21415 "alignment value (char, int2, int4, or double)",
21416 self.peek_token_ref(),
21417 ),
21418 }
21419 }
21420 Some(Keyword::STORAGE) => {
21421 self.expect_token(&Token::Eq)?;
21422 let storage_keyword = self.parse_one_of_keywords(&[
21423 Keyword::PLAIN,
21424 Keyword::EXTERNAL,
21425 Keyword::EXTENDED,
21426 Keyword::MAIN,
21427 ]);
21428 match storage_keyword {
21429 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21430 UserDefinedTypeStorage::Plain,
21431 )),
21432 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21433 UserDefinedTypeStorage::External,
21434 )),
21435 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21436 UserDefinedTypeStorage::Extended,
21437 )),
21438 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21439 UserDefinedTypeStorage::Main,
21440 )),
21441 _ => self.expected_ref(
21442 "storage value (plain, external, extended, or main)",
21443 self.peek_token_ref(),
21444 ),
21445 }
21446 }
21447 Some(Keyword::LIKE) => {
21448 self.expect_token(&Token::Eq)?;
21449 let name = self.parse_object_name(false)?;
21450 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
21451 }
21452 Some(Keyword::CATEGORY) => {
21453 self.expect_token(&Token::Eq)?;
21454 let category_str = self.parse_literal_string()?;
21455 let category_char = category_str.chars().next().ok_or_else(|| {
21456 ParserError::ParserError(
21457 "CATEGORY value must be a single character".to_string(),
21458 )
21459 })?;
21460 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
21461 }
21462 Some(Keyword::PREFERRED) => {
21463 self.expect_token(&Token::Eq)?;
21464 let value =
21465 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21466 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
21467 }
21468 Some(Keyword::DEFAULT) => {
21469 self.expect_token(&Token::Eq)?;
21470 let expr = self.parse_expr()?;
21471 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
21472 }
21473 Some(Keyword::ELEMENT) => {
21474 self.expect_token(&Token::Eq)?;
21475 let data_type = self.parse_data_type()?;
21476 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
21477 }
21478 Some(Keyword::DELIMITER) => {
21479 self.expect_token(&Token::Eq)?;
21480 let delimiter = self.parse_literal_string()?;
21481 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
21482 }
21483 Some(Keyword::COLLATABLE) => {
21484 self.expect_token(&Token::Eq)?;
21485 let value =
21486 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21487 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
21488 }
21489 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
21490 }
21491 }
21492
21493 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
21494 self.expect_token(&Token::LParen)?;
21495 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21496 self.expect_token(&Token::RParen)?;
21497 Ok(idents)
21498 }
21499
21500 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
21501 if dialect_of!(self is MySqlDialect | GenericDialect) {
21502 if self.parse_keyword(Keyword::FIRST) {
21503 Ok(Some(MySQLColumnPosition::First))
21504 } else if self.parse_keyword(Keyword::AFTER) {
21505 let ident = self.parse_identifier()?;
21506 Ok(Some(MySQLColumnPosition::After(ident)))
21507 } else {
21508 Ok(None)
21509 }
21510 } else {
21511 Ok(None)
21512 }
21513 }
21514
21515 fn parse_print(&mut self) -> Result<Statement, ParserError> {
21517 Ok(Statement::Print(PrintStatement {
21518 message: Box::new(self.parse_expr()?),
21519 }))
21520 }
21521
21522 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
21526 let wait_type = if self.parse_keyword(Keyword::DELAY) {
21527 WaitForType::Delay
21528 } else if self.parse_keyword(Keyword::TIME) {
21529 WaitForType::Time
21530 } else {
21531 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
21532 };
21533 let expr = self.parse_expr()?;
21534 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
21535 }
21536
21537 fn parse_return(&mut self) -> Result<Statement, ParserError> {
21539 match self.maybe_parse(|p| p.parse_expr())? {
21540 Some(expr) => Ok(Statement::Return(ReturnStatement {
21541 value: Some(ReturnStatementValue::Expr(expr)),
21542 })),
21543 None => Ok(Statement::Return(ReturnStatement { value: None })),
21544 }
21545 }
21546
21547 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
21551 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
21552
21553 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
21554 Some(self.parse_object_name(false)?)
21555 } else {
21556 None
21557 };
21558 self.expect_keyword(Keyword::OPTIONS)?;
21559 self.expect_token(&Token::LParen)?;
21560 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
21561 self.expect_token(&Token::RParen)?;
21562 self.expect_keyword(Keyword::AS)?;
21563 let query = self.parse_query()?;
21564 Ok(Statement::ExportData(ExportData {
21565 options,
21566 query,
21567 connection,
21568 }))
21569 }
21570
21571 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
21572 self.expect_keyword(Keyword::VACUUM)?;
21573 let full = self.parse_keyword(Keyword::FULL);
21574 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
21575 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
21576 let reindex = self.parse_keyword(Keyword::REINDEX);
21577 let recluster = self.parse_keyword(Keyword::RECLUSTER);
21578 let (table_name, threshold, boost) =
21579 match self.maybe_parse(|p| p.parse_object_name(false))? {
21580 Some(table_name) => {
21581 let threshold = if self.parse_keyword(Keyword::TO) {
21582 let value = self.parse_value()?;
21583 self.expect_keyword(Keyword::PERCENT)?;
21584 Some(value)
21585 } else {
21586 None
21587 };
21588 let boost = self.parse_keyword(Keyword::BOOST);
21589 (Some(table_name), threshold, boost)
21590 }
21591 _ => (None, None, false),
21592 };
21593 Ok(Statement::Vacuum(VacuumStatement {
21594 full,
21595 sort_only,
21596 delete_only,
21597 reindex,
21598 recluster,
21599 table_name,
21600 threshold,
21601 boost,
21602 }))
21603 }
21604
21605 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
21607 self.tokens
21608 }
21609
21610 fn peek_sub_query(&mut self) -> bool {
21612 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
21613 .is_some()
21614 }
21615
21616 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
21617 let show_in;
21618 let mut filter_position = None;
21619 if self.dialect.supports_show_like_before_in() {
21620 if let Some(filter) = self.parse_show_statement_filter()? {
21621 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
21622 }
21623 show_in = self.maybe_parse_show_stmt_in()?;
21624 } else {
21625 show_in = self.maybe_parse_show_stmt_in()?;
21626 if let Some(filter) = self.parse_show_statement_filter()? {
21627 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
21628 }
21629 }
21630 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
21631 let limit = self.maybe_parse_show_stmt_limit()?;
21632 let from = self.maybe_parse_show_stmt_from()?;
21633 Ok(ShowStatementOptions {
21634 filter_position,
21635 show_in,
21636 starts_with,
21637 limit,
21638 limit_from: from,
21639 })
21640 }
21641
21642 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
21643 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
21644 Some(Keyword::FROM) => ShowStatementInClause::FROM,
21645 Some(Keyword::IN) => ShowStatementInClause::IN,
21646 None => return Ok(None),
21647 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
21648 };
21649
21650 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
21651 Keyword::ACCOUNT,
21652 Keyword::DATABASE,
21653 Keyword::SCHEMA,
21654 Keyword::TABLE,
21655 Keyword::VIEW,
21656 ]) {
21657 Some(Keyword::DATABASE)
21659 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21660 | self.peek_keyword(Keyword::LIMIT) =>
21661 {
21662 (Some(ShowStatementInParentType::Database), None)
21663 }
21664 Some(Keyword::SCHEMA)
21665 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21666 | self.peek_keyword(Keyword::LIMIT) =>
21667 {
21668 (Some(ShowStatementInParentType::Schema), None)
21669 }
21670 Some(parent_kw) => {
21671 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
21675 match parent_kw {
21676 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
21677 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
21678 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
21679 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
21680 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
21681 _ => {
21682 return self.expected_ref(
21683 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
21684 self.peek_token_ref(),
21685 )
21686 }
21687 }
21688 }
21689 None => {
21690 let mut parent_name = self.parse_object_name(false)?;
21693 if self
21694 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
21695 .is_some()
21696 {
21697 parent_name
21698 .0
21699 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
21700 }
21701 (None, Some(parent_name))
21702 }
21703 };
21704
21705 Ok(Some(ShowStatementIn {
21706 clause,
21707 parent_type,
21708 parent_name,
21709 }))
21710 }
21711
21712 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21713 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
21714 Ok(Some(self.parse_value()?))
21715 } else {
21716 Ok(None)
21717 }
21718 }
21719
21720 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
21721 if self.parse_keyword(Keyword::LIMIT) {
21722 Ok(self.parse_limit()?)
21723 } else {
21724 Ok(None)
21725 }
21726 }
21727
21728 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21729 if self.parse_keyword(Keyword::FROM) {
21730 Ok(Some(self.parse_value()?))
21731 } else {
21732 Ok(None)
21733 }
21734 }
21735
21736 pub(crate) fn in_column_definition_state(&self) -> bool {
21737 matches!(self.state, ColumnDefinition)
21738 }
21739
21740 pub(crate) fn parse_key_value_options(
21745 &mut self,
21746 parenthesized: bool,
21747 end_words: &[Keyword],
21748 ) -> Result<KeyValueOptions, ParserError> {
21749 let mut options: Vec<KeyValueOption> = Vec::new();
21750 let mut delimiter = KeyValueOptionsDelimiter::Space;
21751 if parenthesized {
21752 self.expect_token(&Token::LParen)?;
21753 }
21754 loop {
21755 match self.next_token().token {
21756 Token::RParen => {
21757 if parenthesized {
21758 break;
21759 } else {
21760 return self.expected_ref(" another option or EOF", self.peek_token_ref());
21761 }
21762 }
21763 Token::EOF | Token::SemiColon => break,
21764 Token::Comma => {
21765 delimiter = KeyValueOptionsDelimiter::Comma;
21766 continue;
21767 }
21768 Token::Word(w) if !end_words.contains(&w.keyword) => {
21769 options.push(self.parse_key_value_option(&w)?)
21770 }
21771 Token::Word(w) if end_words.contains(&w.keyword) => {
21772 self.prev_token();
21773 break;
21774 }
21775 _ => {
21776 return self.expected_ref(
21777 "another option, EOF, SemiColon, Comma or ')'",
21778 self.peek_token_ref(),
21779 )
21780 }
21781 };
21782 }
21783
21784 Ok(KeyValueOptions { delimiter, options })
21785 }
21786
21787 pub(crate) fn parse_key_value_option(
21789 &mut self,
21790 key: &Word,
21791 ) -> Result<KeyValueOption, ParserError> {
21792 self.expect_token(&Token::Eq)?;
21793 let peeked_token = self.peek_token();
21794 match peeked_token.token {
21795 Token::SingleQuotedString(_) => Ok(KeyValueOption {
21796 option_name: key.value.clone(),
21797 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21798 }),
21799 Token::Word(word)
21800 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21801 {
21802 Ok(KeyValueOption {
21803 option_name: key.value.clone(),
21804 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21805 })
21806 }
21807 Token::Number(..) => Ok(KeyValueOption {
21808 option_name: key.value.clone(),
21809 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21810 }),
21811 Token::Word(word) => {
21812 self.next_token();
21813 Ok(KeyValueOption {
21814 option_name: key.value.clone(),
21815 option_value: KeyValueOptionKind::Single(
21816 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21817 ),
21818 })
21819 }
21820 Token::LParen => {
21821 match self.maybe_parse(|parser| {
21825 parser.expect_token(&Token::LParen)?;
21826 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21827 parser.expect_token(&Token::RParen)?;
21828 values
21829 })? {
21830 Some(values) => Ok(KeyValueOption {
21831 option_name: key.value.clone(),
21832 option_value: KeyValueOptionKind::Multi(values),
21833 }),
21834 None => Ok(KeyValueOption {
21835 option_name: key.value.clone(),
21836 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21837 self.parse_key_value_options(true, &[])?,
21838 )),
21839 }),
21840 }
21841 }
21842 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21843 }
21844 }
21845
21846 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21848 if self.parse_keyword(Keyword::ALL) {
21849 return Ok(ResetStatement { reset: Reset::ALL });
21850 }
21851
21852 let obj = self.parse_object_name(false)?;
21853 Ok(ResetStatement {
21854 reset: Reset::ConfigurationParameter(obj),
21855 })
21856 }
21857}
21858
21859fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21860 if let Some(prefix) = prefix {
21861 Expr::Prefixed {
21862 prefix,
21863 value: Box::new(expr),
21864 }
21865 } else {
21866 expr
21867 }
21868}
21869
21870impl Word {
21871 pub fn to_ident(&self, span: Span) -> Ident {
21877 Ident {
21878 value: self.value.clone(),
21879 quote_style: self.quote_style,
21880 span,
21881 }
21882 }
21883
21884 pub fn into_ident(self, span: Span) -> Ident {
21889 Ident {
21890 value: self.value,
21891 quote_style: self.quote_style,
21892 span,
21893 }
21894 }
21895}
21896
21897#[cfg(test)]
21898mod tests {
21899 use crate::test_utils::{all_dialects, TestedDialects};
21900
21901 use super::*;
21902
21903 #[test]
21904 fn test_prev_index() {
21905 let sql = "SELECT version";
21906 all_dialects().run_parser_method(sql, |parser| {
21907 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21908 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21909 parser.prev_token();
21910 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21911 assert_eq!(parser.next_token(), Token::make_word("version", None));
21912 parser.prev_token();
21913 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21914 assert_eq!(parser.next_token(), Token::make_word("version", None));
21915 assert_eq!(parser.peek_token(), Token::EOF);
21916 parser.prev_token();
21917 assert_eq!(parser.next_token(), Token::make_word("version", None));
21918 assert_eq!(parser.next_token(), Token::EOF);
21919 assert_eq!(parser.next_token(), Token::EOF);
21920 parser.prev_token();
21921 });
21922 }
21923
21924 #[test]
21925 fn test_peek_tokens() {
21926 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21927 assert!(matches!(
21928 parser.peek_tokens(),
21929 [Token::Word(Word {
21930 keyword: Keyword::SELECT,
21931 ..
21932 })]
21933 ));
21934
21935 assert!(matches!(
21936 parser.peek_tokens(),
21937 [
21938 Token::Word(Word {
21939 keyword: Keyword::SELECT,
21940 ..
21941 }),
21942 Token::Word(_),
21943 Token::Word(Word {
21944 keyword: Keyword::AS,
21945 ..
21946 }),
21947 ]
21948 ));
21949
21950 for _ in 0..4 {
21951 parser.next_token();
21952 }
21953
21954 assert!(matches!(
21955 parser.peek_tokens(),
21956 [
21957 Token::Word(Word {
21958 keyword: Keyword::FROM,
21959 ..
21960 }),
21961 Token::Word(_),
21962 Token::EOF,
21963 Token::EOF,
21964 ]
21965 ))
21966 })
21967 }
21968
21969 #[cfg(test)]
21970 mod test_parse_data_type {
21971 use crate::ast::{
21972 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21973 };
21974 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21975 use crate::test_utils::TestedDialects;
21976
21977 macro_rules! test_parse_data_type {
21978 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21979 $dialect.run_parser_method(&*$input, |parser| {
21980 let data_type = parser.parse_data_type().unwrap();
21981 assert_eq!($expected_type, data_type);
21982 assert_eq!($input.to_string(), data_type.to_string());
21983 });
21984 }};
21985 }
21986
21987 #[test]
21988 fn test_ansii_character_string_types() {
21989 let dialect =
21991 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21992
21993 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21994
21995 test_parse_data_type!(
21996 dialect,
21997 "CHARACTER(20)",
21998 DataType::Character(Some(CharacterLength::IntegerLength {
21999 length: 20,
22000 unit: None
22001 }))
22002 );
22003
22004 test_parse_data_type!(
22005 dialect,
22006 "CHARACTER(20 CHARACTERS)",
22007 DataType::Character(Some(CharacterLength::IntegerLength {
22008 length: 20,
22009 unit: Some(CharLengthUnits::Characters)
22010 }))
22011 );
22012
22013 test_parse_data_type!(
22014 dialect,
22015 "CHARACTER(20 OCTETS)",
22016 DataType::Character(Some(CharacterLength::IntegerLength {
22017 length: 20,
22018 unit: Some(CharLengthUnits::Octets)
22019 }))
22020 );
22021
22022 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
22023
22024 test_parse_data_type!(
22025 dialect,
22026 "CHAR(20)",
22027 DataType::Char(Some(CharacterLength::IntegerLength {
22028 length: 20,
22029 unit: None
22030 }))
22031 );
22032
22033 test_parse_data_type!(
22034 dialect,
22035 "CHAR(20 CHARACTERS)",
22036 DataType::Char(Some(CharacterLength::IntegerLength {
22037 length: 20,
22038 unit: Some(CharLengthUnits::Characters)
22039 }))
22040 );
22041
22042 test_parse_data_type!(
22043 dialect,
22044 "CHAR(20 OCTETS)",
22045 DataType::Char(Some(CharacterLength::IntegerLength {
22046 length: 20,
22047 unit: Some(CharLengthUnits::Octets)
22048 }))
22049 );
22050
22051 test_parse_data_type!(
22052 dialect,
22053 "CHARACTER VARYING(20)",
22054 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22055 length: 20,
22056 unit: None
22057 }))
22058 );
22059
22060 test_parse_data_type!(
22061 dialect,
22062 "CHARACTER VARYING(20 CHARACTERS)",
22063 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22064 length: 20,
22065 unit: Some(CharLengthUnits::Characters)
22066 }))
22067 );
22068
22069 test_parse_data_type!(
22070 dialect,
22071 "CHARACTER VARYING(20 OCTETS)",
22072 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22073 length: 20,
22074 unit: Some(CharLengthUnits::Octets)
22075 }))
22076 );
22077
22078 test_parse_data_type!(
22079 dialect,
22080 "CHAR VARYING(20)",
22081 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22082 length: 20,
22083 unit: None
22084 }))
22085 );
22086
22087 test_parse_data_type!(
22088 dialect,
22089 "CHAR VARYING(20 CHARACTERS)",
22090 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22091 length: 20,
22092 unit: Some(CharLengthUnits::Characters)
22093 }))
22094 );
22095
22096 test_parse_data_type!(
22097 dialect,
22098 "CHAR VARYING(20 OCTETS)",
22099 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22100 length: 20,
22101 unit: Some(CharLengthUnits::Octets)
22102 }))
22103 );
22104
22105 test_parse_data_type!(
22106 dialect,
22107 "VARCHAR(20)",
22108 DataType::Varchar(Some(CharacterLength::IntegerLength {
22109 length: 20,
22110 unit: None
22111 }))
22112 );
22113 }
22114
22115 #[test]
22116 fn test_ansii_character_large_object_types() {
22117 let dialect =
22119 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22120
22121 test_parse_data_type!(
22122 dialect,
22123 "CHARACTER LARGE OBJECT",
22124 DataType::CharacterLargeObject(None)
22125 );
22126 test_parse_data_type!(
22127 dialect,
22128 "CHARACTER LARGE OBJECT(20)",
22129 DataType::CharacterLargeObject(Some(20))
22130 );
22131
22132 test_parse_data_type!(
22133 dialect,
22134 "CHAR LARGE OBJECT",
22135 DataType::CharLargeObject(None)
22136 );
22137 test_parse_data_type!(
22138 dialect,
22139 "CHAR LARGE OBJECT(20)",
22140 DataType::CharLargeObject(Some(20))
22141 );
22142
22143 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
22144 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
22145 }
22146
22147 #[test]
22148 fn test_parse_custom_types() {
22149 let dialect =
22150 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22151
22152 test_parse_data_type!(
22153 dialect,
22154 "GEOMETRY",
22155 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
22156 );
22157
22158 test_parse_data_type!(
22159 dialect,
22160 "GEOMETRY(POINT)",
22161 DataType::Custom(
22162 ObjectName::from(vec!["GEOMETRY".into()]),
22163 vec!["POINT".to_string()]
22164 )
22165 );
22166
22167 test_parse_data_type!(
22168 dialect,
22169 "GEOMETRY(POINT, 4326)",
22170 DataType::Custom(
22171 ObjectName::from(vec!["GEOMETRY".into()]),
22172 vec!["POINT".to_string(), "4326".to_string()]
22173 )
22174 );
22175 }
22176
22177 #[test]
22178 fn test_ansii_exact_numeric_types() {
22179 let dialect = TestedDialects::new(vec![
22181 Box::new(GenericDialect {}),
22182 Box::new(AnsiDialect {}),
22183 Box::new(PostgreSqlDialect {}),
22184 ]);
22185
22186 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
22187
22188 test_parse_data_type!(
22189 dialect,
22190 "NUMERIC(2)",
22191 DataType::Numeric(ExactNumberInfo::Precision(2))
22192 );
22193
22194 test_parse_data_type!(
22195 dialect,
22196 "NUMERIC(2,10)",
22197 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
22198 );
22199
22200 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
22201
22202 test_parse_data_type!(
22203 dialect,
22204 "DECIMAL(2)",
22205 DataType::Decimal(ExactNumberInfo::Precision(2))
22206 );
22207
22208 test_parse_data_type!(
22209 dialect,
22210 "DECIMAL(2,10)",
22211 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
22212 );
22213
22214 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
22215
22216 test_parse_data_type!(
22217 dialect,
22218 "DEC(2)",
22219 DataType::Dec(ExactNumberInfo::Precision(2))
22220 );
22221
22222 test_parse_data_type!(
22223 dialect,
22224 "DEC(2,10)",
22225 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
22226 );
22227
22228 test_parse_data_type!(
22230 dialect,
22231 "NUMERIC(10,-2)",
22232 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
22233 );
22234
22235 test_parse_data_type!(
22236 dialect,
22237 "DECIMAL(1000,-10)",
22238 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
22239 );
22240
22241 test_parse_data_type!(
22242 dialect,
22243 "DEC(5,-1000)",
22244 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
22245 );
22246
22247 test_parse_data_type!(
22248 dialect,
22249 "NUMERIC(10,-5)",
22250 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
22251 );
22252
22253 test_parse_data_type!(
22254 dialect,
22255 "DECIMAL(20,-10)",
22256 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
22257 );
22258
22259 test_parse_data_type!(
22260 dialect,
22261 "DEC(5,-2)",
22262 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
22263 );
22264
22265 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
22266 let data_type = parser.parse_data_type().unwrap();
22267 assert_eq!(
22268 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
22269 data_type
22270 );
22271 assert_eq!("NUMERIC(10,5)", data_type.to_string());
22273 });
22274 }
22275
22276 #[test]
22277 fn test_ansii_date_type() {
22278 let dialect =
22280 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22281
22282 test_parse_data_type!(dialect, "DATE", DataType::Date);
22283
22284 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
22285
22286 test_parse_data_type!(
22287 dialect,
22288 "TIME(6)",
22289 DataType::Time(Some(6), TimezoneInfo::None)
22290 );
22291
22292 test_parse_data_type!(
22293 dialect,
22294 "TIME WITH TIME ZONE",
22295 DataType::Time(None, TimezoneInfo::WithTimeZone)
22296 );
22297
22298 test_parse_data_type!(
22299 dialect,
22300 "TIME(6) WITH TIME ZONE",
22301 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
22302 );
22303
22304 test_parse_data_type!(
22305 dialect,
22306 "TIME WITHOUT TIME ZONE",
22307 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
22308 );
22309
22310 test_parse_data_type!(
22311 dialect,
22312 "TIME(6) WITHOUT TIME ZONE",
22313 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
22314 );
22315
22316 test_parse_data_type!(
22317 dialect,
22318 "TIMESTAMP",
22319 DataType::Timestamp(None, TimezoneInfo::None)
22320 );
22321
22322 test_parse_data_type!(
22323 dialect,
22324 "TIMESTAMP(22)",
22325 DataType::Timestamp(Some(22), TimezoneInfo::None)
22326 );
22327
22328 test_parse_data_type!(
22329 dialect,
22330 "TIMESTAMP(22) WITH TIME ZONE",
22331 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
22332 );
22333
22334 test_parse_data_type!(
22335 dialect,
22336 "TIMESTAMP(33) WITHOUT TIME ZONE",
22337 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
22338 );
22339 }
22340 }
22341
22342 #[test]
22343 fn test_parse_schema_name() {
22344 macro_rules! test_parse_schema_name {
22346 ($input:expr, $expected_name:expr $(,)?) => {{
22347 all_dialects().run_parser_method(&*$input, |parser| {
22348 let schema_name = parser.parse_schema_name().unwrap();
22349 assert_eq!(schema_name, $expected_name);
22351 assert_eq!(schema_name.to_string(), $input.to_string());
22353 });
22354 }};
22355 }
22356
22357 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
22358 let dummy_authorization = Ident::new("dummy_authorization");
22359
22360 test_parse_schema_name!(
22361 format!("{dummy_name}"),
22362 SchemaName::Simple(dummy_name.clone())
22363 );
22364
22365 test_parse_schema_name!(
22366 format!("AUTHORIZATION {dummy_authorization}"),
22367 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
22368 );
22369 test_parse_schema_name!(
22370 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
22371 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
22372 );
22373 }
22374
22375 #[test]
22376 fn mysql_parse_index_table_constraint() {
22377 macro_rules! test_parse_table_constraint {
22378 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
22379 $dialect.run_parser_method(&*$input, |parser| {
22380 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
22381 assert_eq!(constraint, $expected);
22383 assert_eq!(constraint.to_string(), $input.to_string());
22385 });
22386 }};
22387 }
22388
22389 fn mk_expected_col(name: &str) -> IndexColumn {
22390 IndexColumn {
22391 column: OrderByExpr {
22392 expr: Expr::Identifier(name.into()),
22393 options: OrderByOptions {
22394 asc: None,
22395 nulls_first: None,
22396 },
22397 with_fill: None,
22398 },
22399 operator_class: None,
22400 }
22401 }
22402
22403 let dialect =
22404 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
22405
22406 test_parse_table_constraint!(
22407 dialect,
22408 "INDEX (c1)",
22409 IndexConstraint {
22410 display_as_key: false,
22411 name: None,
22412 index_type: None,
22413 columns: vec![mk_expected_col("c1")],
22414 index_options: vec![],
22415 }
22416 .into()
22417 );
22418
22419 test_parse_table_constraint!(
22420 dialect,
22421 "KEY (c1)",
22422 IndexConstraint {
22423 display_as_key: true,
22424 name: None,
22425 index_type: None,
22426 columns: vec![mk_expected_col("c1")],
22427 index_options: vec![],
22428 }
22429 .into()
22430 );
22431
22432 test_parse_table_constraint!(
22433 dialect,
22434 "INDEX 'index' (c1, c2)",
22435 TableConstraint::Index(IndexConstraint {
22436 display_as_key: false,
22437 name: Some(Ident::with_quote('\'', "index")),
22438 index_type: None,
22439 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
22440 index_options: vec![],
22441 })
22442 );
22443
22444 test_parse_table_constraint!(
22445 dialect,
22446 "INDEX USING BTREE (c1)",
22447 IndexConstraint {
22448 display_as_key: false,
22449 name: None,
22450 index_type: Some(IndexType::BTree),
22451 columns: vec![mk_expected_col("c1")],
22452 index_options: vec![],
22453 }
22454 .into()
22455 );
22456
22457 test_parse_table_constraint!(
22458 dialect,
22459 "INDEX USING HASH (c1)",
22460 IndexConstraint {
22461 display_as_key: false,
22462 name: None,
22463 index_type: Some(IndexType::Hash),
22464 columns: vec![mk_expected_col("c1")],
22465 index_options: vec![],
22466 }
22467 .into()
22468 );
22469
22470 test_parse_table_constraint!(
22471 dialect,
22472 "INDEX idx_name USING BTREE (c1)",
22473 IndexConstraint {
22474 display_as_key: false,
22475 name: Some(Ident::new("idx_name")),
22476 index_type: Some(IndexType::BTree),
22477 columns: vec![mk_expected_col("c1")],
22478 index_options: vec![],
22479 }
22480 .into()
22481 );
22482
22483 test_parse_table_constraint!(
22484 dialect,
22485 "INDEX idx_name USING HASH (c1)",
22486 IndexConstraint {
22487 display_as_key: false,
22488 name: Some(Ident::new("idx_name")),
22489 index_type: Some(IndexType::Hash),
22490 columns: vec![mk_expected_col("c1")],
22491 index_options: vec![],
22492 }
22493 .into()
22494 );
22495 }
22496
22497 #[test]
22498 fn test_tokenizer_error_loc() {
22499 let sql = "foo '";
22500 let ast = Parser::parse_sql(&GenericDialect, sql);
22501 assert_eq!(
22502 ast,
22503 Err(ParserError::TokenizerError(
22504 "Unterminated string literal at Line: 1, Column: 5".to_string()
22505 ))
22506 );
22507 }
22508
22509 #[test]
22510 fn test_parser_error_loc() {
22511 let sql = "SELECT this is a syntax error";
22512 let ast = Parser::parse_sql(&GenericDialect, sql);
22513 assert_eq!(
22514 ast,
22515 Err(ParserError::ParserError(
22516 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
22517 .to_string()
22518 ))
22519 );
22520 }
22521
22522 #[test]
22523 fn test_nested_explain_error() {
22524 let sql = "EXPLAIN EXPLAIN SELECT 1";
22525 let ast = Parser::parse_sql(&GenericDialect, sql);
22526 assert_eq!(
22527 ast,
22528 Err(ParserError::ParserError(
22529 "Explain must be root of the plan".to_string()
22530 ))
22531 );
22532 }
22533
22534 #[test]
22535 fn test_parse_multipart_identifier_positive() {
22536 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
22537
22538 let expected = vec![
22540 Ident {
22541 value: "CATALOG".to_string(),
22542 quote_style: None,
22543 span: Span::empty(),
22544 },
22545 Ident {
22546 value: "F(o)o. \"bar".to_string(),
22547 quote_style: Some('"'),
22548 span: Span::empty(),
22549 },
22550 Ident {
22551 value: "table".to_string(),
22552 quote_style: None,
22553 span: Span::empty(),
22554 },
22555 ];
22556 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
22557 let actual = parser.parse_multipart_identifier().unwrap();
22558 assert_eq!(expected, actual);
22559 });
22560
22561 let expected = vec![
22563 Ident {
22564 value: "CATALOG".to_string(),
22565 quote_style: None,
22566 span: Span::empty(),
22567 },
22568 Ident {
22569 value: "table".to_string(),
22570 quote_style: None,
22571 span: Span::empty(),
22572 },
22573 ];
22574 dialect.run_parser_method("CATALOG . table", |parser| {
22575 let actual = parser.parse_multipart_identifier().unwrap();
22576 assert_eq!(expected, actual);
22577 });
22578 }
22579
22580 #[test]
22581 fn test_parse_multipart_identifier_negative() {
22582 macro_rules! test_parse_multipart_identifier_error {
22583 ($input:expr, $expected_err:expr $(,)?) => {{
22584 all_dialects().run_parser_method(&*$input, |parser| {
22585 let actual_err = parser.parse_multipart_identifier().unwrap_err();
22586 assert_eq!(actual_err.to_string(), $expected_err);
22587 });
22588 }};
22589 }
22590
22591 test_parse_multipart_identifier_error!(
22592 "",
22593 "sql parser error: Empty input when parsing identifier",
22594 );
22595
22596 test_parse_multipart_identifier_error!(
22597 "*schema.table",
22598 "sql parser error: Unexpected token in identifier: *",
22599 );
22600
22601 test_parse_multipart_identifier_error!(
22602 "schema.table*",
22603 "sql parser error: Unexpected token in identifier: *",
22604 );
22605
22606 test_parse_multipart_identifier_error!(
22607 "schema.table.",
22608 "sql parser error: Trailing period in identifier",
22609 );
22610
22611 test_parse_multipart_identifier_error!(
22612 "schema.*",
22613 "sql parser error: Unexpected token following period in identifier: *",
22614 );
22615 }
22616
22617 #[test]
22618 fn test_mysql_partition_selection() {
22619 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
22620 let expected = vec!["p0", "p2"];
22621
22622 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
22623 assert_eq!(ast.len(), 1);
22624 if let Statement::Query(v) = &ast[0] {
22625 if let SetExpr::Select(select) = &*v.body {
22626 assert_eq!(select.from.len(), 1);
22627 let from: &TableWithJoins = &select.from[0];
22628 let table_factor = &from.relation;
22629 if let TableFactor::Table { partitions, .. } = table_factor {
22630 let actual: Vec<&str> = partitions
22631 .iter()
22632 .map(|ident| ident.value.as_str())
22633 .collect();
22634 assert_eq!(expected, actual);
22635 }
22636 }
22637 } else {
22638 panic!("fail to parse mysql partition selection");
22639 }
22640 }
22641
22642 #[test]
22643 fn test_replace_into_placeholders() {
22644 let sql = "REPLACE INTO t (a) VALUES (&a)";
22645
22646 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22647 }
22648
22649 #[test]
22650 fn test_replace_into_set_placeholder() {
22651 let sql = "REPLACE INTO t SET ?";
22652
22653 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22654 }
22655
22656 #[test]
22657 fn test_replace_incomplete() {
22658 let sql = r#"REPLACE"#;
22659
22660 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
22661 }
22662
22663 #[test]
22664 fn test_placeholder_invalid_whitespace() {
22665 for w in [" ", "/*invalid*/"] {
22666 let sql = format!("\nSELECT\n :{w}fooBar");
22667 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
22668 }
22669 }
22670}