1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word)
512 if expecting_statement_delimiter && word.keyword == Keyword::END =>
513 {
514 break;
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 Keyword::SECURITY => self.parse_security_label().map(Into::into),
723 _ => self.expected("an SQL statement", next_token),
724 },
725 Token::LParen => {
726 self.prev_token();
727 self.parse_query().map(Into::into)
728 }
729 _ => self.expected("an SQL statement", next_token),
730 }
731 }
732
733 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
737 let case_token = self.expect_keyword(Keyword::CASE)?;
738
739 let match_expr = if self.peek_keyword(Keyword::WHEN) {
740 None
741 } else {
742 Some(self.parse_expr()?)
743 };
744
745 self.expect_keyword_is(Keyword::WHEN)?;
746 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
747 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
748 })?;
749
750 let else_block = if self.parse_keyword(Keyword::ELSE) {
751 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
752 } else {
753 None
754 };
755
756 let mut end_case_token = self.expect_keyword(Keyword::END)?;
757 if self.peek_keyword(Keyword::CASE) {
758 end_case_token = self.expect_keyword(Keyword::CASE)?;
759 }
760
761 Ok(CaseStatement {
762 case_token: AttachedToken(case_token),
763 match_expr,
764 when_blocks,
765 else_block,
766 end_case_token: AttachedToken(end_case_token),
767 })
768 }
769
770 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
774 self.expect_keyword_is(Keyword::IF)?;
775 let if_block = self.parse_conditional_statement_block(&[
776 Keyword::ELSE,
777 Keyword::ELSEIF,
778 Keyword::END,
779 ])?;
780
781 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
782 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
783 parser.parse_conditional_statement_block(&[
784 Keyword::ELSEIF,
785 Keyword::ELSE,
786 Keyword::END,
787 ])
788 })?
789 } else {
790 vec![]
791 };
792
793 let else_block = if self.parse_keyword(Keyword::ELSE) {
794 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
795 } else {
796 None
797 };
798
799 self.expect_keyword_is(Keyword::END)?;
800 let end_token = self.expect_keyword(Keyword::IF)?;
801
802 Ok(IfStatement {
803 if_block,
804 elseif_blocks,
805 else_block,
806 end_token: Some(AttachedToken(end_token)),
807 })
808 }
809
810 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
814 self.expect_keyword_is(Keyword::WHILE)?;
815 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
816
817 Ok(WhileStatement { while_block })
818 }
819
820 fn parse_conditional_statement_block(
828 &mut self,
829 terminal_keywords: &[Keyword],
830 ) -> Result<ConditionalStatementBlock, ParserError> {
831 let start_token = self.get_current_token().clone(); let mut then_token = None;
833
834 let condition = match &start_token.token {
835 Token::Word(w) if w.keyword == Keyword::ELSE => None,
836 Token::Word(w) if w.keyword == Keyword::WHILE => {
837 let expr = self.parse_expr()?;
838 Some(expr)
839 }
840 _ => {
841 let expr = self.parse_expr()?;
842 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
843 Some(expr)
844 }
845 };
846
847 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
848
849 Ok(ConditionalStatementBlock {
850 start_token: AttachedToken(start_token),
851 condition,
852 then_token,
853 conditional_statements,
854 })
855 }
856
857 pub(crate) fn parse_conditional_statements(
860 &mut self,
861 terminal_keywords: &[Keyword],
862 ) -> Result<ConditionalStatements, ParserError> {
863 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
864 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
865 let statements = self.parse_statement_list(terminal_keywords)?;
866 let end_token = self.expect_keyword(Keyword::END)?;
867
868 ConditionalStatements::BeginEnd(BeginEndStatements {
869 begin_token: AttachedToken(begin_token),
870 statements,
871 end_token: AttachedToken(end_token),
872 })
873 } else {
874 ConditionalStatements::Sequence {
875 statements: self.parse_statement_list(terminal_keywords)?,
876 }
877 };
878 Ok(conditional_statements)
879 }
880
881 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
885 self.expect_keyword_is(Keyword::RAISE)?;
886
887 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
888 self.expect_token(&Token::Eq)?;
889 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
890 } else {
891 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
892 };
893
894 Ok(RaiseStatement { value })
895 }
896 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
900 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
901
902 self.expect_keyword_is(Keyword::ON)?;
903 let token = self.next_token();
904
905 let (object_type, object_name) = match token.token {
906 Token::Word(w) if w.keyword == Keyword::AGGREGATE => {
907 (CommentObject::Aggregate, self.parse_object_name(false)?)
908 }
909 Token::Word(w) if w.keyword == Keyword::COLLATION => {
910 (CommentObject::Collation, self.parse_object_name(false)?)
911 }
912 Token::Word(w) if w.keyword == Keyword::COLUMN => {
913 (CommentObject::Column, self.parse_object_name(false)?)
914 }
915 Token::Word(w) if w.keyword == Keyword::DATABASE => {
916 (CommentObject::Database, self.parse_object_name(false)?)
917 }
918 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
919 (CommentObject::Domain, self.parse_object_name(false)?)
920 }
921 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
922 (CommentObject::Extension, self.parse_object_name(false)?)
923 }
924 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
925 (CommentObject::Function, self.parse_object_name(false)?)
926 }
927 Token::Word(w) if w.keyword == Keyword::INDEX => {
928 (CommentObject::Index, self.parse_object_name(false)?)
929 }
930 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
931 self.expect_keyword_is(Keyword::VIEW)?;
932 (
933 CommentObject::MaterializedView,
934 self.parse_object_name(false)?,
935 )
936 }
937 Token::Word(w) if w.keyword == Keyword::POLICY => {
938 (CommentObject::Policy, self.parse_object_name(false)?)
939 }
940 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
941 (CommentObject::Procedure, self.parse_object_name(false)?)
942 }
943 Token::Word(w) if w.keyword == Keyword::ROLE => {
944 (CommentObject::Role, self.parse_object_name(false)?)
945 }
946 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
947 (CommentObject::Schema, self.parse_object_name(false)?)
948 }
949 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
950 (CommentObject::Sequence, self.parse_object_name(false)?)
951 }
952 Token::Word(w) if w.keyword == Keyword::TABLE => {
953 (CommentObject::Table, self.parse_object_name(false)?)
954 }
955 Token::Word(w) if w.keyword == Keyword::TRIGGER => {
956 (CommentObject::Trigger, self.parse_object_name(false)?)
957 }
958 Token::Word(w) if w.keyword == Keyword::TYPE => {
959 (CommentObject::Type, self.parse_object_name(false)?)
960 }
961 Token::Word(w) if w.keyword == Keyword::USER => {
962 (CommentObject::User, self.parse_object_name(false)?)
963 }
964 Token::Word(w) if w.keyword == Keyword::VIEW => {
965 (CommentObject::View, self.parse_object_name(false)?)
966 }
967 _ => self.expected("comment object_type", token)?,
968 };
969
970 let arguments = match object_type {
971 CommentObject::Function | CommentObject::Procedure | CommentObject::Aggregate => {
972 if self.consume_token(&Token::LParen) {
973 let args =
974 self.parse_comma_separated0(Self::parse_function_arg, Token::RParen)?;
975 self.expect_token(&Token::RParen)?;
976 Some(args.into_iter().map(|a| a.data_type).collect())
977 } else {
978 None
979 }
980 }
981 _ => None,
982 };
983
984 if object_type == CommentObject::Aggregate && arguments.is_none() {
985 return Err(ParserError::ParserError(
986 "COMMENT ON AGGREGATE requires an argument list, e.g. AGGREGATE foo(int)".into(),
987 ));
988 }
989
990 let relation = match object_type {
991 CommentObject::Trigger | CommentObject::Policy => {
992 self.expect_keyword_is(Keyword::ON)?;
993 Some(self.parse_object_name(false)?)
994 }
995 _ => None,
996 };
997
998 self.expect_keyword_is(Keyword::IS)?;
999 let comment = if self.parse_keyword(Keyword::NULL) {
1000 None
1001 } else {
1002 Some(self.parse_literal_string()?)
1003 };
1004 Ok(Statement::Comment {
1005 object_type,
1006 object_name,
1007 arguments,
1008 relation,
1009 comment,
1010 if_exists,
1011 })
1012 }
1013
1014 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
1016 let mut channel = None;
1017 let mut tables: Vec<ObjectName> = vec![];
1018 let mut read_lock = false;
1019 let mut export = false;
1020
1021 if !dialect_of!(self is MySqlDialect | GenericDialect) {
1022 return parser_err!(
1023 "Unsupported statement FLUSH",
1024 self.peek_token_ref().span.start
1025 );
1026 }
1027
1028 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
1029 Some(FlushLocation::NoWriteToBinlog)
1030 } else if self.parse_keyword(Keyword::LOCAL) {
1031 Some(FlushLocation::Local)
1032 } else {
1033 None
1034 };
1035
1036 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
1037 FlushType::BinaryLogs
1038 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
1039 FlushType::EngineLogs
1040 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1041 FlushType::ErrorLogs
1042 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1043 FlushType::GeneralLogs
1044 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1045 FlushType::Hosts
1046 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1047 FlushType::Privileges
1048 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1049 FlushType::OptimizerCosts
1050 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1051 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1052 channel = Some(self.parse_object_name(false).unwrap().to_string());
1053 }
1054 FlushType::RelayLogs
1055 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1056 FlushType::SlowLogs
1057 } else if self.parse_keyword(Keyword::STATUS) {
1058 FlushType::Status
1059 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1060 FlushType::UserResources
1061 } else if self.parse_keywords(&[Keyword::LOGS]) {
1062 FlushType::Logs
1063 } else if self.parse_keywords(&[Keyword::TABLES]) {
1064 loop {
1065 let next_token = self.next_token();
1066 match &next_token.token {
1067 Token::Word(w) => match w.keyword {
1068 Keyword::WITH => {
1069 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1070 }
1071 Keyword::FOR => {
1072 export = self.parse_keyword(Keyword::EXPORT);
1073 }
1074 Keyword::NoKeyword => {
1075 self.prev_token();
1076 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1077 }
1078 _ => {}
1079 },
1080 _ => {
1081 break;
1082 }
1083 }
1084 }
1085
1086 FlushType::Tables
1087 } else {
1088 return self.expected_ref(
1089 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1090 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1091 self.peek_token_ref(),
1092 );
1093 };
1094
1095 Ok(Statement::Flush {
1096 object_type,
1097 location,
1098 channel,
1099 read_lock,
1100 export,
1101 tables,
1102 })
1103 }
1104
1105 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1107 let repair = self.parse_keyword(Keyword::REPAIR);
1108 self.expect_keyword_is(Keyword::TABLE)?;
1109 let table_name = self.parse_object_name(false)?;
1110 let partition_action = self
1111 .maybe_parse(|parser| {
1112 let pa = match parser.parse_one_of_keywords(&[
1113 Keyword::ADD,
1114 Keyword::DROP,
1115 Keyword::SYNC,
1116 ]) {
1117 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1118 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1119 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1120 _ => None,
1121 };
1122 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1123 Ok(pa)
1124 })?
1125 .unwrap_or_default();
1126 Ok(Msck {
1127 repair,
1128 table_name,
1129 partition_action,
1130 })
1131 }
1132
1133 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1135 let table = self.parse_keyword(Keyword::TABLE);
1136 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1137
1138 let table_names = self.parse_comma_separated(|p| {
1139 let only = p.parse_keyword(Keyword::ONLY);
1140 let name = p.parse_object_name(false)?;
1141 let has_asterisk = p.consume_token(&Token::Mul);
1142 Ok(TruncateTableTarget {
1143 name,
1144 only,
1145 has_asterisk,
1146 })
1147 })?;
1148
1149 let mut partitions = None;
1150 if self.parse_keyword(Keyword::PARTITION) {
1151 self.expect_token(&Token::LParen)?;
1152 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1153 self.expect_token(&Token::RParen)?;
1154 }
1155
1156 let mut identity = None;
1157 let mut cascade = None;
1158
1159 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1160 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1161 Some(TruncateIdentityOption::Restart)
1162 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1163 Some(TruncateIdentityOption::Continue)
1164 } else {
1165 None
1166 };
1167
1168 cascade = self.parse_cascade_option();
1169 };
1170
1171 let on_cluster = self.parse_optional_on_cluster()?;
1172
1173 Ok(Truncate {
1174 table_names,
1175 partitions,
1176 table,
1177 if_exists,
1178 identity,
1179 cascade,
1180 on_cluster,
1181 })
1182 }
1183
1184 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1185 if self.parse_keyword(Keyword::CASCADE) {
1186 Some(CascadeOption::Cascade)
1187 } else if self.parse_keyword(Keyword::RESTRICT) {
1188 Some(CascadeOption::Restrict)
1189 } else {
1190 None
1191 }
1192 }
1193
1194 pub fn parse_attach_duckdb_database_options(
1196 &mut self,
1197 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1198 if !self.consume_token(&Token::LParen) {
1199 return Ok(vec![]);
1200 }
1201
1202 let mut options = vec![];
1203 loop {
1204 if self.parse_keyword(Keyword::READ_ONLY) {
1205 let boolean = if self.parse_keyword(Keyword::TRUE) {
1206 Some(true)
1207 } else if self.parse_keyword(Keyword::FALSE) {
1208 Some(false)
1209 } else {
1210 None
1211 };
1212 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1213 } else if self.parse_keyword(Keyword::TYPE) {
1214 let ident = self.parse_identifier()?;
1215 options.push(AttachDuckDBDatabaseOption::Type(ident));
1216 } else {
1217 return self
1218 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1219 };
1220
1221 if self.consume_token(&Token::RParen) {
1222 return Ok(options);
1223 } else if self.consume_token(&Token::Comma) {
1224 continue;
1225 } else {
1226 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1227 }
1228 }
1229 }
1230
1231 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1233 let database = self.parse_keyword(Keyword::DATABASE);
1234 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1235 let database_path = self.parse_identifier()?;
1236 let database_alias = if self.parse_keyword(Keyword::AS) {
1237 Some(self.parse_identifier()?)
1238 } else {
1239 None
1240 };
1241
1242 let attach_options = self.parse_attach_duckdb_database_options()?;
1243 Ok(Statement::AttachDuckDBDatabase {
1244 if_not_exists,
1245 database,
1246 database_path,
1247 database_alias,
1248 attach_options,
1249 })
1250 }
1251
1252 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1254 let database = self.parse_keyword(Keyword::DATABASE);
1255 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1256 let database_alias = self.parse_identifier()?;
1257 Ok(Statement::DetachDuckDBDatabase {
1258 if_exists,
1259 database,
1260 database_alias,
1261 })
1262 }
1263
1264 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1266 let database = self.parse_keyword(Keyword::DATABASE);
1267 let database_file_name = self.parse_expr()?;
1268 self.expect_keyword_is(Keyword::AS)?;
1269 let schema_name = self.parse_identifier()?;
1270 Ok(Statement::AttachDatabase {
1271 database,
1272 schema_name,
1273 database_file_name,
1274 })
1275 }
1276
1277 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1279 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1280 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1281 let mut for_columns = false;
1282 let mut cache_metadata = false;
1283 let mut noscan = false;
1284 let mut partitions = None;
1285 let mut compute_statistics = false;
1286 let mut columns = vec![];
1287
1288 if table_name.is_some() && self.consume_token(&Token::LParen) {
1290 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1291 self.expect_token(&Token::RParen)?;
1292 }
1293
1294 loop {
1295 match self.parse_one_of_keywords(&[
1296 Keyword::PARTITION,
1297 Keyword::FOR,
1298 Keyword::CACHE,
1299 Keyword::NOSCAN,
1300 Keyword::COMPUTE,
1301 ]) {
1302 Some(Keyword::PARTITION) => {
1303 self.expect_token(&Token::LParen)?;
1304 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1305 self.expect_token(&Token::RParen)?;
1306 }
1307 Some(Keyword::NOSCAN) => noscan = true,
1308 Some(Keyword::FOR) => {
1309 self.expect_keyword_is(Keyword::COLUMNS)?;
1310
1311 columns = self
1312 .maybe_parse(|parser| {
1313 parser.parse_comma_separated(|p| p.parse_identifier())
1314 })?
1315 .unwrap_or_default();
1316 for_columns = true
1317 }
1318 Some(Keyword::CACHE) => {
1319 self.expect_keyword_is(Keyword::METADATA)?;
1320 cache_metadata = true
1321 }
1322 Some(Keyword::COMPUTE) => {
1323 self.expect_keyword_is(Keyword::STATISTICS)?;
1324 compute_statistics = true
1325 }
1326 _ => break,
1327 }
1328 }
1329
1330 Ok(Analyze {
1331 has_table_keyword,
1332 table_name,
1333 for_columns,
1334 columns,
1335 partitions,
1336 cache_metadata,
1337 noscan,
1338 compute_statistics,
1339 })
1340 }
1341
1342 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1344 let index = self.index;
1345
1346 let next_token = self.next_token();
1347 match next_token.token {
1348 t @ (Token::Word(_) | Token::SingleQuotedString(_))
1349 if self.peek_token_ref().token == Token::Period =>
1350 {
1351 let mut id_parts: Vec<Ident> = vec![match t {
1352 Token::Word(w) => w.into_ident(next_token.span),
1353 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1354 _ => {
1355 return Err(ParserError::ParserError(
1356 "Internal parser error: unexpected token type".to_string(),
1357 ))
1358 }
1359 }];
1360
1361 while self.consume_token(&Token::Period) {
1362 let next_token = self.next_token();
1363 match next_token.token {
1364 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1365 Token::SingleQuotedString(s) => {
1366 id_parts.push(Ident::with_quote('\'', s))
1368 }
1369 Token::Placeholder(s) => {
1370 id_parts.push(Ident::new(s))
1373 }
1374 Token::Mul => {
1375 return Ok(Expr::QualifiedWildcard(
1376 ObjectName::from(id_parts),
1377 AttachedToken(next_token),
1378 ));
1379 }
1380 _ => {
1381 return self.expected("an identifier or a '*' after '.'", next_token);
1382 }
1383 }
1384 }
1385 }
1386 Token::Mul => {
1387 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1388 }
1389 Token::LParen => {
1391 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1392 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1393 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1396 }
1397 }
1398 _ => (),
1399 };
1400
1401 self.index = index;
1402 self.parse_expr()
1403 }
1404
1405 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1407 self.parse_subexpr(self.dialect.prec_unknown())
1408 }
1409
1410 pub fn parse_expr_with_alias_and_order_by(
1412 &mut self,
1413 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1414 let expr = self.parse_expr()?;
1415
1416 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1417 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1418 }
1419 let alias = self.parse_optional_alias_inner(None, validator)?;
1420 let order_by = OrderByOptions {
1421 asc: self.parse_asc_desc(),
1422 nulls_first: None,
1423 };
1424 Ok(ExprWithAliasAndOrderBy {
1425 expr: ExprWithAlias { expr, alias },
1426 order_by,
1427 })
1428 }
1429
1430 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1432 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1433 let _guard = self.recursion_counter.try_decrease()?;
1434 debug!("parsing expr");
1435 let mut expr = self.parse_prefix()?;
1436
1437 expr = self.parse_compound_expr(expr, vec![])?;
1438
1439 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1443 expr = Expr::Collate {
1444 expr: Box::new(expr),
1445 collation: self.parse_object_name(false)?,
1446 };
1447 }
1448
1449 debug!("prefix: {expr:?}");
1450 loop {
1451 let next_precedence = self.get_next_precedence()?;
1452 debug!("next precedence: {next_precedence:?}");
1453
1454 if precedence >= next_precedence {
1455 break;
1456 }
1457
1458 if Token::Period == self.peek_token_ref().token {
1461 break;
1462 }
1463
1464 expr = self.parse_infix(expr, next_precedence)?;
1465 }
1466 Ok(expr)
1467 }
1468
1469 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1471 let condition = self.parse_expr()?;
1472 let message = if self.parse_keyword(Keyword::AS) {
1473 Some(self.parse_expr()?)
1474 } else {
1475 None
1476 };
1477
1478 Ok(Statement::Assert { condition, message })
1479 }
1480
1481 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1483 let name = self.parse_identifier()?;
1484 Ok(Statement::Savepoint { name })
1485 }
1486
1487 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1489 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1490 let name = self.parse_identifier()?;
1491
1492 Ok(Statement::ReleaseSavepoint { name })
1493 }
1494
1495 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1497 let channel = self.parse_identifier()?;
1498 Ok(Statement::LISTEN { channel })
1499 }
1500
1501 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1503 let channel = if self.consume_token(&Token::Mul) {
1504 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1505 } else {
1506 match self.parse_identifier() {
1507 Ok(expr) => expr,
1508 _ => {
1509 self.prev_token();
1510 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1511 }
1512 }
1513 };
1514 Ok(Statement::UNLISTEN { channel })
1515 }
1516
1517 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1519 let channel = self.parse_identifier()?;
1520 let payload = if self.consume_token(&Token::Comma) {
1521 Some(self.parse_literal_string()?)
1522 } else {
1523 None
1524 };
1525 Ok(Statement::NOTIFY { channel, payload })
1526 }
1527
1528 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1530 if self.peek_keyword(Keyword::TABLE) {
1531 self.expect_keyword(Keyword::TABLE)?;
1532 let rename_tables = self.parse_comma_separated(|parser| {
1533 let old_name = parser.parse_object_name(false)?;
1534 parser.expect_keyword(Keyword::TO)?;
1535 let new_name = parser.parse_object_name(false)?;
1536
1537 Ok(RenameTable { old_name, new_name })
1538 })?;
1539 Ok(rename_tables.into())
1540 } else {
1541 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1542 }
1543 }
1544
1545 fn parse_expr_prefix_by_reserved_word(
1548 &mut self,
1549 w: &Word,
1550 w_span: Span,
1551 ) -> Result<Option<Expr>, ParserError> {
1552 match w.keyword {
1553 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1554 self.prev_token();
1555 Ok(Some(Expr::Value(self.parse_value()?)))
1556 }
1557 Keyword::NULL => {
1558 self.prev_token();
1559 Ok(Some(Expr::Value(self.parse_value()?)))
1560 }
1561 Keyword::CURRENT_CATALOG
1562 | Keyword::CURRENT_USER
1563 | Keyword::SESSION_USER
1564 | Keyword::USER
1565 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1566 {
1567 Ok(Some(Expr::Function(Function {
1568 name: ObjectName::from(vec![w.to_ident(w_span)]),
1569 uses_odbc_syntax: false,
1570 parameters: FunctionArguments::None,
1571 args: FunctionArguments::None,
1572 null_treatment: None,
1573 filter: None,
1574 over: None,
1575 within_group: vec![],
1576 })))
1577 }
1578 Keyword::CURRENT_TIMESTAMP
1579 | Keyword::CURRENT_TIME
1580 | Keyword::CURRENT_DATE
1581 | Keyword::LOCALTIME
1582 | Keyword::LOCALTIMESTAMP => {
1583 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1584 }
1585 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1586 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1587 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1588 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1589 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1590 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1591 Keyword::EXISTS
1592 if !dialect_of!(self is DatabricksDialect)
1594 || matches!(
1595 self.peek_nth_token_ref(1).token,
1596 Token::Word(Word {
1597 keyword: Keyword::SELECT | Keyword::WITH,
1598 ..
1599 })
1600 ) =>
1601 {
1602 Ok(Some(self.parse_exists_expr(false)?))
1603 }
1604 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1605 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1606 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1607 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1608 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1609 }
1610 Keyword::SUBSTR | Keyword::SUBSTRING => {
1611 self.prev_token();
1612 Ok(Some(self.parse_substring()?))
1613 }
1614 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1615 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1616 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1617 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1619 self.expect_token(&Token::LBracket)?;
1620 Ok(Some(self.parse_array_expr(true)?))
1621 }
1622 Keyword::ARRAY
1623 if self.peek_token_ref().token == Token::LParen
1624 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1625 {
1626 self.expect_token(&Token::LParen)?;
1627 let query = self.parse_query()?;
1628 self.expect_token(&Token::RParen)?;
1629 Ok(Some(Expr::Function(Function {
1630 name: ObjectName::from(vec![w.to_ident(w_span)]),
1631 uses_odbc_syntax: false,
1632 parameters: FunctionArguments::None,
1633 args: FunctionArguments::Subquery(query),
1634 filter: None,
1635 null_treatment: None,
1636 over: None,
1637 within_group: vec![],
1638 })))
1639 }
1640 Keyword::NOT => Ok(Some(self.parse_not()?)),
1641 Keyword::MATCH if self.dialect.supports_match_against() => {
1642 Ok(Some(self.parse_match_against()?))
1643 }
1644 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1645 let struct_expr = self.parse_struct_literal()?;
1646 Ok(Some(struct_expr))
1647 }
1648 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1649 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1650 Ok(Some(Expr::Prior(Box::new(expr))))
1651 }
1652 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1653 Ok(Some(self.parse_duckdb_map_literal()?))
1654 }
1655 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1656 Ok(Some(self.parse_lambda_expr()?))
1657 }
1658 _ if self.dialect.supports_geometric_types() => match w.keyword {
1659 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1660 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1661 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1662 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1663 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1664 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1665 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1666 _ => Ok(None),
1667 },
1668 _ => Ok(None),
1669 }
1670 }
1671
1672 fn parse_expr_prefix_by_unreserved_word(
1674 &mut self,
1675 w: &Word,
1676 w_span: Span,
1677 ) -> Result<Expr, ParserError> {
1678 let is_outer_join = self.peek_outer_join_operator();
1679 match &self.peek_token_ref().token {
1680 Token::LParen if !is_outer_join => {
1681 let id_parts = vec![w.to_ident(w_span)];
1682 self.parse_function(ObjectName::from(id_parts))
1683 }
1684 Token::SingleQuotedString(_)
1686 | Token::DoubleQuotedString(_)
1687 | Token::HexStringLiteral(_)
1688 if w.value.starts_with('_') =>
1689 {
1690 Ok(Expr::Prefixed {
1691 prefix: w.to_ident(w_span),
1692 value: self.parse_introduced_string_expr()?.into(),
1693 })
1694 }
1695 Token::SingleQuotedString(_)
1697 | Token::DoubleQuotedString(_)
1698 | Token::HexStringLiteral(_)
1699 if w.value.starts_with('_') =>
1700 {
1701 Ok(Expr::Prefixed {
1702 prefix: w.to_ident(w_span),
1703 value: self.parse_introduced_string_expr()?.into(),
1704 })
1705 }
1706 Token::Arrow if self.dialect.supports_lambda_functions() => {
1710 self.expect_token(&Token::Arrow)?;
1711 Ok(Expr::Lambda(LambdaFunction {
1712 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1713 name: w.to_ident(w_span),
1714 data_type: None,
1715 }),
1716 body: Box::new(self.parse_expr()?),
1717 syntax: LambdaSyntax::Arrow,
1718 }))
1719 }
1720 Token::Word(_)
1724 if self.dialect.supports_lambda_functions()
1725 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1726 {
1727 let data_type = self.parse_data_type()?;
1728 self.expect_token(&Token::Arrow)?;
1729 Ok(Expr::Lambda(LambdaFunction {
1730 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1731 name: w.to_ident(w_span),
1732 data_type: Some(data_type),
1733 }),
1734 body: Box::new(self.parse_expr()?),
1735 syntax: LambdaSyntax::Arrow,
1736 }))
1737 }
1738 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1739 }
1740 }
1741
1742 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1745 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1746 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1747 } else {
1748 false
1749 }
1750 }
1751
1752 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1754 if let Some(prefix) = self.dialect.parse_prefix(self) {
1756 return prefix;
1757 }
1758
1759 let loc = self.peek_token_ref().span.start;
1776 let opt_expr = self.maybe_parse(|parser| {
1777 match parser.parse_data_type()? {
1778 DataType::Interval { .. } => parser.parse_interval(),
1779 DataType::Custom(ref name, ref modifiers)
1790 if modifiers.is_empty()
1791 && Self::is_simple_unquoted_object_name(name, "xml")
1792 && parser.dialect.supports_xml_expressions() =>
1793 {
1794 Ok(Expr::TypedString(TypedString {
1795 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1796 value: parser.parse_value()?,
1797 uses_odbc_syntax: false,
1798 }))
1799 }
1800 DataType::Custom(..) => parser_err!("dummy", loc),
1801 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1803 Ok(Expr::Cast {
1804 kind: CastKind::Cast,
1805 expr: Box::new(parser.parse_expr()?),
1806 data_type: DataType::Binary(None),
1807 array: false,
1808 format: None,
1809 })
1810 }
1811 data_type => Ok(Expr::TypedString(TypedString {
1812 data_type,
1813 value: parser.parse_value()?,
1814 uses_odbc_syntax: false,
1815 })),
1816 }
1817 })?;
1818
1819 if let Some(expr) = opt_expr {
1820 return Ok(expr);
1821 }
1822
1823 let dialect = self.dialect;
1827
1828 self.advance_token();
1829 let next_token_index = self.get_current_index();
1830 let next_token = self.get_current_token();
1831 let span = next_token.span;
1832 let expr = match &next_token.token {
1833 Token::Word(w) => {
1834 let w = w.clone();
1843 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1844 Ok(Some(expr)) => Ok(expr),
1846
1847 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1849
1850 Err(e) => {
1857 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1858 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1859 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1860 }) {
1861 return Ok(expr);
1862 }
1863 }
1864 return Err(e);
1865 }
1866 }
1867 } Token::LBracket => self.parse_array_expr(false),
1870 tok @ Token::Minus | tok @ Token::Plus => {
1871 let op = if *tok == Token::Plus {
1872 UnaryOperator::Plus
1873 } else {
1874 UnaryOperator::Minus
1875 };
1876 Ok(Expr::UnaryOp {
1877 op,
1878 expr: Box::new(
1879 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1880 ),
1881 })
1882 }
1883 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1884 op: UnaryOperator::BangNot,
1885 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1886 }),
1887 tok @ Token::DoubleExclamationMark
1888 | tok @ Token::PGSquareRoot
1889 | tok @ Token::PGCubeRoot
1890 | tok @ Token::AtSign
1891 if dialect_is!(dialect is PostgreSqlDialect) =>
1892 {
1893 let op = match tok {
1894 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1895 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1896 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1897 Token::AtSign => UnaryOperator::PGAbs,
1898 _ => {
1899 return Err(ParserError::ParserError(
1900 "Internal parser error: unexpected unary operator token".to_string(),
1901 ))
1902 }
1903 };
1904 Ok(Expr::UnaryOp {
1905 op,
1906 expr: Box::new(
1907 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1908 ),
1909 })
1910 }
1911 Token::Tilde => Ok(Expr::UnaryOp {
1912 op: UnaryOperator::BitwiseNot,
1913 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1914 }),
1915 tok @ Token::Sharp
1916 | tok @ Token::AtDashAt
1917 | tok @ Token::AtAt
1918 | tok @ Token::QuestionMarkDash
1919 | tok @ Token::QuestionPipe
1920 if self.dialect.supports_geometric_types() =>
1921 {
1922 let op = match tok {
1923 Token::Sharp => UnaryOperator::Hash,
1924 Token::AtDashAt => UnaryOperator::AtDashAt,
1925 Token::AtAt => UnaryOperator::DoubleAt,
1926 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1927 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1928 _ => {
1929 return Err(ParserError::ParserError(format!(
1930 "Unexpected token in unary operator parsing: {tok:?}"
1931 )))
1932 }
1933 };
1934 Ok(Expr::UnaryOp {
1935 op,
1936 expr: Box::new(
1937 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1938 ),
1939 })
1940 }
1941 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1942 {
1943 self.prev_token();
1944 Ok(Expr::Value(self.parse_value()?))
1945 }
1946 Token::UnicodeStringLiteral(_) => {
1947 self.prev_token();
1948 Ok(Expr::Value(self.parse_value()?))
1949 }
1950 Token::Number(_, _)
1951 | Token::SingleQuotedString(_)
1952 | Token::DoubleQuotedString(_)
1953 | Token::TripleSingleQuotedString(_)
1954 | Token::TripleDoubleQuotedString(_)
1955 | Token::DollarQuotedString(_)
1956 | Token::SingleQuotedByteStringLiteral(_)
1957 | Token::DoubleQuotedByteStringLiteral(_)
1958 | Token::TripleSingleQuotedByteStringLiteral(_)
1959 | Token::TripleDoubleQuotedByteStringLiteral(_)
1960 | Token::SingleQuotedRawStringLiteral(_)
1961 | Token::DoubleQuotedRawStringLiteral(_)
1962 | Token::TripleSingleQuotedRawStringLiteral(_)
1963 | Token::TripleDoubleQuotedRawStringLiteral(_)
1964 | Token::NationalStringLiteral(_)
1965 | Token::QuoteDelimitedStringLiteral(_)
1966 | Token::NationalQuoteDelimitedStringLiteral(_)
1967 | Token::HexStringLiteral(_) => {
1968 self.prev_token();
1969 Ok(Expr::Value(self.parse_value()?))
1970 }
1971 Token::LParen => {
1972 let expr =
1973 if let Some(expr) = self.try_parse_expr_sub_query()? {
1974 expr
1975 } else if let Some(lambda) = self.try_parse_lambda()? {
1976 return Ok(lambda);
1977 } else {
1978 let exprs = self.with_state(ParserState::Normal, |p| {
1989 p.parse_comma_separated(Parser::parse_expr)
1990 })?;
1991 match exprs.len() {
1992 0 => return Err(ParserError::ParserError(
1993 "Internal parser error: parse_comma_separated returned empty list"
1994 .to_string(),
1995 )),
1996 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1997 _ => Expr::Tuple(exprs),
1998 }
1999 };
2000 self.expect_token(&Token::RParen)?;
2001 Ok(expr)
2002 }
2003 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
2004 self.prev_token();
2005 Ok(Expr::Value(self.parse_value()?))
2006 }
2007 Token::LBrace => {
2008 self.prev_token();
2009 self.parse_lbrace_expr()
2010 }
2011 _ => self.expected_at("an expression", next_token_index),
2012 }?;
2013
2014 Ok(expr)
2015 }
2016
2017 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
2018 Ok(Expr::TypedString(TypedString {
2019 data_type: DataType::GeometricType(kind),
2020 value: self.parse_value()?,
2021 uses_odbc_syntax: false,
2022 }))
2023 }
2024
2025 pub fn parse_compound_expr(
2032 &mut self,
2033 root: Expr,
2034 mut chain: Vec<AccessExpr>,
2035 ) -> Result<Expr, ParserError> {
2036 let mut ending_wildcard: Option<TokenWithSpan> = None;
2037 loop {
2038 if self.consume_token(&Token::Period) {
2039 let next_token = self.peek_token_ref();
2040 match &next_token.token {
2041 Token::Mul => {
2042 if dialect_of!(self is PostgreSqlDialect) {
2045 ending_wildcard = Some(self.next_token());
2046 } else {
2047 self.prev_token(); }
2054
2055 break;
2056 }
2057 Token::SingleQuotedString(s) => {
2058 let expr =
2059 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2060 chain.push(AccessExpr::Dot(expr));
2061 self.advance_token(); }
2063 Token::Placeholder(s) => {
2064 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2067 chain.push(AccessExpr::Dot(expr));
2068 self.advance_token(); }
2070 _ => {
2075 let expr = self.maybe_parse(|parser| {
2076 let expr = parser
2077 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2078 match &expr {
2079 Expr::CompoundFieldAccess { .. }
2080 | Expr::CompoundIdentifier(_)
2081 | Expr::Identifier(_)
2082 | Expr::Value(_)
2083 | Expr::Function(_) => Ok(expr),
2084 _ => parser.expected_ref(
2085 "an identifier or value",
2086 parser.peek_token_ref(),
2087 ),
2088 }
2089 })?;
2090
2091 match expr {
2092 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2101 chain.push(AccessExpr::Dot(*root));
2102 chain.extend(access_chain);
2103 }
2104 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2105 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2106 ),
2107 Some(expr) => {
2108 chain.push(AccessExpr::Dot(expr));
2109 }
2110 None => {
2114 chain.push(AccessExpr::Dot(Expr::Identifier(
2115 self.parse_identifier()?,
2116 )));
2117 }
2118 }
2119 }
2120 }
2121 } else if !self.dialect.supports_partiql()
2122 && self.peek_token_ref().token == Token::LBracket
2123 {
2124 self.parse_multi_dim_subscript(&mut chain)?;
2125 } else {
2126 break;
2127 }
2128 }
2129
2130 let tok_index = self.get_current_index();
2131 if let Some(wildcard_token) = ending_wildcard {
2132 if !Self::is_all_ident(&root, &chain) {
2133 return self
2134 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2135 };
2136 Ok(Expr::QualifiedWildcard(
2137 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2138 AttachedToken(wildcard_token),
2139 ))
2140 } else if self.maybe_parse_outer_join_operator() {
2141 if !Self::is_all_ident(&root, &chain) {
2142 return self.expected_at("column identifier before (+)", tok_index);
2143 };
2144 let expr = if chain.is_empty() {
2145 root
2146 } else {
2147 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2148 };
2149 Ok(Expr::OuterJoin(expr.into()))
2150 } else {
2151 Self::build_compound_expr(root, chain)
2152 }
2153 }
2154
2155 fn build_compound_expr(
2160 root: Expr,
2161 mut access_chain: Vec<AccessExpr>,
2162 ) -> Result<Expr, ParserError> {
2163 if access_chain.is_empty() {
2164 return Ok(root);
2165 }
2166
2167 if Self::is_all_ident(&root, &access_chain) {
2168 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2169 root,
2170 access_chain,
2171 )?));
2172 }
2173
2174 if matches!(root, Expr::Identifier(_))
2179 && matches!(
2180 access_chain.last(),
2181 Some(AccessExpr::Dot(Expr::Function(_)))
2182 )
2183 && access_chain
2184 .iter()
2185 .rev()
2186 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2188 {
2189 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2190 return parser_err!("expected function expression", root.span().start);
2191 };
2192
2193 let compound_func_name = [root]
2194 .into_iter()
2195 .chain(access_chain.into_iter().flat_map(|access| match access {
2196 AccessExpr::Dot(expr) => Some(expr),
2197 _ => None,
2198 }))
2199 .flat_map(|expr| match expr {
2200 Expr::Identifier(ident) => Some(ident),
2201 _ => None,
2202 })
2203 .map(ObjectNamePart::Identifier)
2204 .chain(func.name.0)
2205 .collect::<Vec<_>>();
2206 func.name = ObjectName(compound_func_name);
2207
2208 return Ok(Expr::Function(func));
2209 }
2210
2211 if access_chain.len() == 1
2216 && matches!(
2217 access_chain.last(),
2218 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2219 )
2220 {
2221 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2222 return parser_err!("expected (+) expression", root.span().start);
2223 };
2224
2225 if !Self::is_all_ident(&root, &[]) {
2226 return parser_err!("column identifier before (+)", root.span().start);
2227 };
2228
2229 let token_start = root.span().start;
2230 let mut idents = Self::exprs_to_idents(root, vec![])?;
2231 match *inner_expr {
2232 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2233 Expr::Identifier(suffix) => idents.push(suffix),
2234 _ => {
2235 return parser_err!("column identifier before (+)", token_start);
2236 }
2237 }
2238
2239 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2240 }
2241
2242 Ok(Expr::CompoundFieldAccess {
2243 root: Box::new(root),
2244 access_chain,
2245 })
2246 }
2247
2248 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2249 match k {
2250 Keyword::LOCAL => Some(ContextModifier::Local),
2251 Keyword::GLOBAL => Some(ContextModifier::Global),
2252 Keyword::SESSION => Some(ContextModifier::Session),
2253 _ => None,
2254 }
2255 }
2256
2257 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2259 if !matches!(root, Expr::Identifier(_)) {
2260 return false;
2261 }
2262 fields
2263 .iter()
2264 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2265 }
2266
2267 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2269 let mut idents = vec![];
2270 if let Expr::Identifier(root) = root {
2271 idents.push(root);
2272 for x in fields {
2273 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2274 idents.push(ident);
2275 } else {
2276 return parser_err!(
2277 format!("Expected identifier, found: {}", x),
2278 x.span().start
2279 );
2280 }
2281 }
2282 Ok(idents)
2283 } else {
2284 parser_err!(
2285 format!("Expected identifier, found: {}", root),
2286 root.span().start
2287 )
2288 }
2289 }
2290
2291 fn peek_outer_join_operator(&mut self) -> bool {
2293 if !self.dialect.supports_outer_join_operator() {
2294 return false;
2295 }
2296
2297 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2298 Token::LParen == maybe_lparen.token
2299 && Token::Plus == maybe_plus.token
2300 && Token::RParen == maybe_rparen.token
2301 }
2302
2303 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2306 self.dialect.supports_outer_join_operator()
2307 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2308 }
2309
2310 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2312 self.expect_token(&Token::LParen)?;
2313 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2314 self.expect_token(&Token::RParen)?;
2315
2316 Ok(options)
2317 }
2318
2319 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2320 let name = self.parse_identifier()?;
2321
2322 let next_token = self.peek_token_ref();
2323 if next_token == &Token::Comma || next_token == &Token::RParen {
2324 return Ok(UtilityOption { name, arg: None });
2325 }
2326 let arg = self.parse_expr()?;
2327
2328 Ok(UtilityOption {
2329 name,
2330 arg: Some(arg),
2331 })
2332 }
2333
2334 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2335 if !self.peek_sub_query() {
2336 return Ok(None);
2337 }
2338
2339 Ok(Some(Expr::Subquery(self.parse_query()?)))
2340 }
2341
2342 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2343 if !self.dialect.supports_lambda_functions() {
2344 return Ok(None);
2345 }
2346 self.maybe_parse(|p| {
2347 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2348 p.expect_token(&Token::RParen)?;
2349 p.expect_token(&Token::Arrow)?;
2350 let expr = p.parse_expr()?;
2351 Ok(Expr::Lambda(LambdaFunction {
2352 params: OneOrManyWithParens::Many(params),
2353 body: Box::new(expr),
2354 syntax: LambdaSyntax::Arrow,
2355 }))
2356 })
2357 }
2358
2359 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2369 let params = self.parse_lambda_function_parameters()?;
2371 self.expect_token(&Token::Colon)?;
2373 let body = self.parse_expr()?;
2375 Ok(Expr::Lambda(LambdaFunction {
2376 params,
2377 body: Box::new(body),
2378 syntax: LambdaSyntax::LambdaKeyword,
2379 }))
2380 }
2381
2382 fn parse_lambda_function_parameters(
2384 &mut self,
2385 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2386 let params = if self.consume_token(&Token::LParen) {
2388 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2390 self.expect_token(&Token::RParen)?;
2391 OneOrManyWithParens::Many(params)
2392 } else {
2393 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2395 if params.len() == 1 {
2396 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2397 } else {
2398 OneOrManyWithParens::Many(params)
2399 }
2400 };
2401 Ok(params)
2402 }
2403
2404 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2406 let name = self.parse_identifier()?;
2407 let data_type = match &self.peek_token_ref().token {
2408 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2409 _ => None,
2410 };
2411 Ok(LambdaFunctionParameter { name, data_type })
2412 }
2413
2414 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2421 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2423 return Ok(Some(expr));
2424 }
2425 self.maybe_parse_odbc_body_datetime()
2427 }
2428
2429 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2440 self.maybe_parse(|p| {
2441 let token = p.next_token().clone();
2442 let word_string = token.token.to_string();
2443 let data_type = match word_string.as_str() {
2444 "t" => DataType::Time(None, TimezoneInfo::None),
2445 "d" => DataType::Date,
2446 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2447 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2448 };
2449 let value = p.parse_value()?;
2450 Ok(Expr::TypedString(TypedString {
2451 data_type,
2452 value,
2453 uses_odbc_syntax: true,
2454 }))
2455 })
2456 }
2457
2458 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2467 self.maybe_parse(|p| {
2468 p.expect_keyword(Keyword::FN)?;
2469 let fn_name = p.parse_object_name(false)?;
2470 let mut fn_call = p.parse_function_call(fn_name)?;
2471 fn_call.uses_odbc_syntax = true;
2472 Ok(Expr::Function(fn_call))
2473 })
2474 }
2475
2476 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2478 self.parse_function_call(name).map(Expr::Function)
2479 }
2480
2481 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2482 self.expect_token(&Token::LParen)?;
2483
2484 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2487 let subquery = self.parse_query()?;
2488 self.expect_token(&Token::RParen)?;
2489 return Ok(Function {
2490 name,
2491 uses_odbc_syntax: false,
2492 parameters: FunctionArguments::None,
2493 args: FunctionArguments::Subquery(subquery),
2494 filter: None,
2495 null_treatment: None,
2496 over: None,
2497 within_group: vec![],
2498 });
2499 }
2500
2501 let mut args = self.parse_function_argument_list()?;
2502 let mut parameters = FunctionArguments::None;
2503 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2506 && self.consume_token(&Token::LParen)
2507 {
2508 parameters = FunctionArguments::List(args);
2509 args = self.parse_function_argument_list()?;
2510 }
2511
2512 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2513 self.expect_token(&Token::LParen)?;
2514 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2515 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2516 self.expect_token(&Token::RParen)?;
2517 order_by
2518 } else {
2519 vec![]
2520 };
2521
2522 let filter = if self.dialect.supports_filter_during_aggregation()
2523 && self.parse_keyword(Keyword::FILTER)
2524 && self.consume_token(&Token::LParen)
2525 && self.parse_keyword(Keyword::WHERE)
2526 {
2527 let filter = Some(Box::new(self.parse_expr()?));
2528 self.expect_token(&Token::RParen)?;
2529 filter
2530 } else {
2531 None
2532 };
2533
2534 let null_treatment = if args
2537 .clauses
2538 .iter()
2539 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2540 {
2541 self.parse_null_treatment()?
2542 } else {
2543 None
2544 };
2545
2546 let over = if self.parse_keyword(Keyword::OVER) {
2547 if self.consume_token(&Token::LParen) {
2548 let window_spec = self.parse_window_spec()?;
2549 Some(WindowType::WindowSpec(window_spec))
2550 } else {
2551 Some(WindowType::NamedWindow(self.parse_identifier()?))
2552 }
2553 } else {
2554 None
2555 };
2556
2557 Ok(Function {
2558 name,
2559 uses_odbc_syntax: false,
2560 parameters,
2561 args: FunctionArguments::List(args),
2562 null_treatment,
2563 filter,
2564 over,
2565 within_group,
2566 })
2567 }
2568
2569 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2571 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2572 Some(keyword) => {
2573 self.expect_keyword_is(Keyword::NULLS)?;
2574
2575 Ok(match keyword {
2576 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2577 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2578 _ => None,
2579 })
2580 }
2581 None => Ok(None),
2582 }
2583 }
2584
2585 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2587 let args = if self.consume_token(&Token::LParen) {
2588 FunctionArguments::List(self.parse_function_argument_list()?)
2589 } else {
2590 FunctionArguments::None
2591 };
2592 Ok(Expr::Function(Function {
2593 name,
2594 uses_odbc_syntax: false,
2595 parameters: FunctionArguments::None,
2596 args,
2597 filter: None,
2598 over: None,
2599 null_treatment: None,
2600 within_group: vec![],
2601 }))
2602 }
2603
2604 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2606 let next_token = self.next_token();
2607 match &next_token.token {
2608 Token::Word(w) => match w.keyword {
2609 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2610 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2611 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2612 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2613 },
2614 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2615 }
2616 }
2617
2618 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2620 let units = self.parse_window_frame_units()?;
2621 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2622 let start_bound = self.parse_window_frame_bound()?;
2623 self.expect_keyword_is(Keyword::AND)?;
2624 let end_bound = Some(self.parse_window_frame_bound()?);
2625 (start_bound, end_bound)
2626 } else {
2627 (self.parse_window_frame_bound()?, None)
2628 };
2629 Ok(WindowFrame {
2630 units,
2631 start_bound,
2632 end_bound,
2633 })
2634 }
2635
2636 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2638 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2639 Ok(WindowFrameBound::CurrentRow)
2640 } else {
2641 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2642 None
2643 } else {
2644 Some(Box::new(match &self.peek_token_ref().token {
2645 Token::SingleQuotedString(_) => self.parse_interval()?,
2646 _ => self.parse_expr()?,
2647 }))
2648 };
2649 if self.parse_keyword(Keyword::PRECEDING) {
2650 Ok(WindowFrameBound::Preceding(rows))
2651 } else if self.parse_keyword(Keyword::FOLLOWING) {
2652 Ok(WindowFrameBound::Following(rows))
2653 } else {
2654 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2655 }
2656 }
2657 }
2658
2659 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2661 if self.dialect.supports_group_by_expr() {
2662 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2663 self.expect_token(&Token::LParen)?;
2664 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2665 self.expect_token(&Token::RParen)?;
2666 Ok(Expr::GroupingSets(result))
2667 } else if self.parse_keyword(Keyword::CUBE) {
2668 self.expect_token(&Token::LParen)?;
2669 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2670 self.expect_token(&Token::RParen)?;
2671 Ok(Expr::Cube(result))
2672 } else if self.parse_keyword(Keyword::ROLLUP) {
2673 self.expect_token(&Token::LParen)?;
2674 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2675 self.expect_token(&Token::RParen)?;
2676 Ok(Expr::Rollup(result))
2677 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2678 Ok(Expr::Tuple(vec![]))
2682 } else {
2683 self.parse_expr()
2684 }
2685 } else {
2686 self.parse_expr()
2688 }
2689 }
2690
2691 fn parse_tuple(
2695 &mut self,
2696 lift_singleton: bool,
2697 allow_empty: bool,
2698 ) -> Result<Vec<Expr>, ParserError> {
2699 if lift_singleton {
2700 if self.consume_token(&Token::LParen) {
2701 let result = if allow_empty && self.consume_token(&Token::RParen) {
2702 vec![]
2703 } else {
2704 let result = self.parse_comma_separated(Parser::parse_expr)?;
2705 self.expect_token(&Token::RParen)?;
2706 result
2707 };
2708 Ok(result)
2709 } else {
2710 Ok(vec![self.parse_expr()?])
2711 }
2712 } else {
2713 self.expect_token(&Token::LParen)?;
2714 let result = if allow_empty && self.consume_token(&Token::RParen) {
2715 vec![]
2716 } else {
2717 let result = self.parse_comma_separated(Parser::parse_expr)?;
2718 self.expect_token(&Token::RParen)?;
2719 result
2720 };
2721 Ok(result)
2722 }
2723 }
2724
2725 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2727 let case_token = AttachedToken(self.get_current_token().clone());
2728 let mut operand = None;
2729 if !self.parse_keyword(Keyword::WHEN) {
2730 operand = Some(Box::new(self.parse_expr()?));
2731 self.expect_keyword_is(Keyword::WHEN)?;
2732 }
2733 let mut conditions = vec![];
2734 loop {
2735 let condition = self.parse_expr()?;
2736 self.expect_keyword_is(Keyword::THEN)?;
2737 let result = self.parse_expr()?;
2738 conditions.push(CaseWhen { condition, result });
2739 if !self.parse_keyword(Keyword::WHEN) {
2740 break;
2741 }
2742 }
2743 let else_result = if self.parse_keyword(Keyword::ELSE) {
2744 Some(Box::new(self.parse_expr()?))
2745 } else {
2746 None
2747 };
2748 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2749 Ok(Expr::Case {
2750 case_token,
2751 end_token,
2752 operand,
2753 conditions,
2754 else_result,
2755 })
2756 }
2757
2758 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2760 if self.parse_keyword(Keyword::FORMAT) {
2761 let value = self.parse_value()?;
2762 match self.parse_optional_time_zone()? {
2763 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2764 None => Ok(Some(CastFormat::Value(value))),
2765 }
2766 } else {
2767 Ok(None)
2768 }
2769 }
2770
2771 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2773 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2774 self.parse_value().map(Some)
2775 } else {
2776 Ok(None)
2777 }
2778 }
2779
2780 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2782 self.expect_token(&Token::LParen)?;
2783 let data_type = self.parse_data_type()?;
2784 self.expect_token(&Token::Comma)?;
2785 let expr = self.parse_expr()?;
2786 let styles = if self.consume_token(&Token::Comma) {
2787 self.parse_comma_separated(Parser::parse_expr)?
2788 } else {
2789 Default::default()
2790 };
2791 self.expect_token(&Token::RParen)?;
2792 Ok(Expr::Convert {
2793 is_try,
2794 expr: Box::new(expr),
2795 data_type: Some(data_type),
2796 charset: None,
2797 target_before_value: true,
2798 styles,
2799 })
2800 }
2801
2802 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2807 if self.dialect.convert_type_before_value() {
2808 return self.parse_mssql_convert(is_try);
2809 }
2810 self.expect_token(&Token::LParen)?;
2811 let expr = self.parse_expr()?;
2812 if self.parse_keyword(Keyword::USING) {
2813 let charset = self.parse_object_name(false)?;
2814 self.expect_token(&Token::RParen)?;
2815 return Ok(Expr::Convert {
2816 is_try,
2817 expr: Box::new(expr),
2818 data_type: None,
2819 charset: Some(charset),
2820 target_before_value: false,
2821 styles: vec![],
2822 });
2823 }
2824 self.expect_token(&Token::Comma)?;
2825 let data_type = self.parse_data_type()?;
2826 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2827 Some(self.parse_object_name(false)?)
2828 } else {
2829 None
2830 };
2831 self.expect_token(&Token::RParen)?;
2832 Ok(Expr::Convert {
2833 is_try,
2834 expr: Box::new(expr),
2835 data_type: Some(data_type),
2836 charset,
2837 target_before_value: false,
2838 styles: vec![],
2839 })
2840 }
2841
2842 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2844 self.expect_token(&Token::LParen)?;
2845 let expr = self.parse_expr()?;
2846 self.expect_keyword_is(Keyword::AS)?;
2847 let data_type = self.parse_data_type()?;
2848 let array = self.parse_keyword(Keyword::ARRAY);
2849 let format = self.parse_optional_cast_format()?;
2850 self.expect_token(&Token::RParen)?;
2851 Ok(Expr::Cast {
2852 kind,
2853 expr: Box::new(expr),
2854 data_type,
2855 array,
2856 format,
2857 })
2858 }
2859
2860 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2862 self.expect_token(&Token::LParen)?;
2863 let exists_node = Expr::Exists {
2864 negated,
2865 subquery: self.parse_query()?,
2866 };
2867 self.expect_token(&Token::RParen)?;
2868 Ok(exists_node)
2869 }
2870
2871 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2873 self.expect_token(&Token::LParen)?;
2874 let field = self.parse_date_time_field()?;
2875
2876 let syntax = if self.parse_keyword(Keyword::FROM) {
2877 ExtractSyntax::From
2878 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2879 {
2880 ExtractSyntax::Comma
2881 } else {
2882 return Err(ParserError::ParserError(
2883 "Expected 'FROM' or ','".to_string(),
2884 ));
2885 };
2886
2887 let expr = self.parse_expr()?;
2888 self.expect_token(&Token::RParen)?;
2889 Ok(Expr::Extract {
2890 field,
2891 expr: Box::new(expr),
2892 syntax,
2893 })
2894 }
2895
2896 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2898 self.expect_token(&Token::LParen)?;
2899 let expr = self.parse_expr()?;
2900 let field = if self.parse_keyword(Keyword::TO) {
2902 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2904 } else if self.consume_token(&Token::Comma) {
2905 let v = self.parse_value()?;
2907 if matches!(v.value, Value::Number(_, _)) {
2908 CeilFloorKind::Scale(v)
2909 } else {
2910 return Err(ParserError::ParserError(
2911 "Scale field can only be of number type".to_string(),
2912 ));
2913 }
2914 } else {
2915 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2916 };
2917 self.expect_token(&Token::RParen)?;
2918 if is_ceil {
2919 Ok(Expr::Ceil {
2920 expr: Box::new(expr),
2921 field,
2922 })
2923 } else {
2924 Ok(Expr::Floor {
2925 expr: Box::new(expr),
2926 field,
2927 })
2928 }
2929 }
2930
2931 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2933 let between_prec = self.dialect.prec_value(Precedence::Between);
2934 let position_expr = self.maybe_parse(|p| {
2935 p.expect_token(&Token::LParen)?;
2937
2938 let expr = p.parse_subexpr(between_prec)?;
2940 p.expect_keyword_is(Keyword::IN)?;
2941 let from = p.parse_expr()?;
2942 p.expect_token(&Token::RParen)?;
2943 Ok(Expr::Position {
2944 expr: Box::new(expr),
2945 r#in: Box::new(from),
2946 })
2947 })?;
2948 match position_expr {
2949 Some(expr) => Ok(expr),
2950 None => self.parse_function(ObjectName::from(vec![ident])),
2953 }
2954 }
2955
2956 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2958 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2959 Keyword::SUBSTR => true,
2960 Keyword::SUBSTRING => false,
2961 _ => {
2962 self.prev_token();
2963 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2964 }
2965 };
2966 self.expect_token(&Token::LParen)?;
2967 let expr = self.parse_expr()?;
2968 let mut from_expr = None;
2969 let special = self.consume_token(&Token::Comma);
2970 if special || self.parse_keyword(Keyword::FROM) {
2971 from_expr = Some(self.parse_expr()?);
2972 }
2973
2974 let mut to_expr = None;
2975 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2976 to_expr = Some(self.parse_expr()?);
2977 }
2978 self.expect_token(&Token::RParen)?;
2979
2980 Ok(Expr::Substring {
2981 expr: Box::new(expr),
2982 substring_from: from_expr.map(Box::new),
2983 substring_for: to_expr.map(Box::new),
2984 special,
2985 shorthand,
2986 })
2987 }
2988
2989 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2993 self.expect_token(&Token::LParen)?;
2995 let expr = self.parse_expr()?;
2996 self.expect_keyword_is(Keyword::PLACING)?;
2997 let what_expr = self.parse_expr()?;
2998 self.expect_keyword_is(Keyword::FROM)?;
2999 let from_expr = self.parse_expr()?;
3000 let mut for_expr = None;
3001 if self.parse_keyword(Keyword::FOR) {
3002 for_expr = Some(self.parse_expr()?);
3003 }
3004 self.expect_token(&Token::RParen)?;
3005
3006 Ok(Expr::Overlay {
3007 expr: Box::new(expr),
3008 overlay_what: Box::new(what_expr),
3009 overlay_from: Box::new(from_expr),
3010 overlay_for: for_expr.map(Box::new),
3011 })
3012 }
3013
3014 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
3020 self.expect_token(&Token::LParen)?;
3021 let mut trim_where = None;
3022 if let Token::Word(word) = &self.peek_token_ref().token {
3023 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
3024 trim_where = Some(self.parse_trim_where()?);
3025 }
3026 }
3027 let expr = self.parse_expr()?;
3028 if self.parse_keyword(Keyword::FROM) {
3029 let trim_what = Box::new(expr);
3030 let expr = self.parse_expr()?;
3031 self.expect_token(&Token::RParen)?;
3032 Ok(Expr::Trim {
3033 expr: Box::new(expr),
3034 trim_where,
3035 trim_what: Some(trim_what),
3036 trim_characters: None,
3037 })
3038 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3039 {
3040 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3041 self.expect_token(&Token::RParen)?;
3042 Ok(Expr::Trim {
3043 expr: Box::new(expr),
3044 trim_where: None,
3045 trim_what: None,
3046 trim_characters: Some(characters),
3047 })
3048 } else {
3049 self.expect_token(&Token::RParen)?;
3050 Ok(Expr::Trim {
3051 expr: Box::new(expr),
3052 trim_where,
3053 trim_what: None,
3054 trim_characters: None,
3055 })
3056 }
3057 }
3058
3059 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3063 let next_token = self.next_token();
3064 match &next_token.token {
3065 Token::Word(w) => match w.keyword {
3066 Keyword::BOTH => Ok(TrimWhereField::Both),
3067 Keyword::LEADING => Ok(TrimWhereField::Leading),
3068 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3069 _ => self.expected("trim_where field", next_token)?,
3070 },
3071 _ => self.expected("trim_where field", next_token),
3072 }
3073 }
3074
3075 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3078 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3079 self.expect_token(&Token::RBracket)?;
3080 Ok(Expr::Array(Array { elem: exprs, named }))
3081 }
3082
3083 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3087 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3088 if self.parse_keyword(Keyword::ERROR) {
3089 Ok(Some(ListAggOnOverflow::Error))
3090 } else {
3091 self.expect_keyword_is(Keyword::TRUNCATE)?;
3092 let filler = match &self.peek_token_ref().token {
3093 Token::Word(w)
3094 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3095 {
3096 None
3097 }
3098 Token::SingleQuotedString(_)
3099 | Token::EscapedStringLiteral(_)
3100 | Token::UnicodeStringLiteral(_)
3101 | Token::NationalStringLiteral(_)
3102 | Token::QuoteDelimitedStringLiteral(_)
3103 | Token::NationalQuoteDelimitedStringLiteral(_)
3104 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3105 _ => self.expected_ref(
3106 "either filler, WITH, or WITHOUT in LISTAGG",
3107 self.peek_token_ref(),
3108 )?,
3109 };
3110 let with_count = self.parse_keyword(Keyword::WITH);
3111 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3112 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3113 }
3114 self.expect_keyword_is(Keyword::COUNT)?;
3115 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3116 }
3117 } else {
3118 Ok(None)
3119 }
3120 }
3121
3122 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3129 let next_token = self.next_token();
3130 match &next_token.token {
3131 Token::Word(w) => match w.keyword {
3132 Keyword::YEAR => Ok(DateTimeField::Year),
3133 Keyword::YEARS => Ok(DateTimeField::Years),
3134 Keyword::MONTH => Ok(DateTimeField::Month),
3135 Keyword::MONTHS => Ok(DateTimeField::Months),
3136 Keyword::WEEK => {
3137 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3138 && self.consume_token(&Token::LParen)
3139 {
3140 let week_day = self.parse_identifier()?;
3141 self.expect_token(&Token::RParen)?;
3142 Some(week_day)
3143 } else {
3144 None
3145 };
3146 Ok(DateTimeField::Week(week_day))
3147 }
3148 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3149 Keyword::DAY => Ok(DateTimeField::Day),
3150 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3151 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3152 Keyword::DAYS => Ok(DateTimeField::Days),
3153 Keyword::DATE => Ok(DateTimeField::Date),
3154 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3155 Keyword::HOUR => Ok(DateTimeField::Hour),
3156 Keyword::HOURS => Ok(DateTimeField::Hours),
3157 Keyword::MINUTE => Ok(DateTimeField::Minute),
3158 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3159 Keyword::SECOND => Ok(DateTimeField::Second),
3160 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3161 Keyword::CENTURY => Ok(DateTimeField::Century),
3162 Keyword::DECADE => Ok(DateTimeField::Decade),
3163 Keyword::DOY => Ok(DateTimeField::Doy),
3164 Keyword::DOW => Ok(DateTimeField::Dow),
3165 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3166 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3167 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3168 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3169 Keyword::JULIAN => Ok(DateTimeField::Julian),
3170 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3171 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3172 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3173 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3174 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3175 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3176 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3177 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3178 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3179 Keyword::TIME => Ok(DateTimeField::Time),
3180 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3181 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3182 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3183 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3184 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3185 _ if self.dialect.allow_extract_custom() => {
3186 self.prev_token();
3187 let custom = self.parse_identifier()?;
3188 Ok(DateTimeField::Custom(custom))
3189 }
3190 _ => self.expected("date/time field", next_token),
3191 },
3192 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3193 self.prev_token();
3194 let custom = self.parse_identifier()?;
3195 Ok(DateTimeField::Custom(custom))
3196 }
3197 _ => self.expected("date/time field", next_token),
3198 }
3199 }
3200
3201 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3205 match &self.peek_token_ref().token {
3206 Token::Word(w) => match w.keyword {
3207 Keyword::EXISTS => {
3208 let negated = true;
3209 let _ = self.parse_keyword(Keyword::EXISTS);
3210 self.parse_exists_expr(negated)
3211 }
3212 _ => Ok(Expr::UnaryOp {
3213 op: UnaryOperator::Not,
3214 expr: Box::new(
3215 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3216 ),
3217 }),
3218 },
3219 _ => Ok(Expr::UnaryOp {
3220 op: UnaryOperator::Not,
3221 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3222 }),
3223 }
3224 }
3225
3226 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3236 let token = self.expect_token(&Token::LBrace)?;
3237
3238 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3239 self.expect_token(&Token::RBrace)?;
3240 return Ok(fn_expr);
3241 }
3242
3243 if self.dialect.supports_dictionary_syntax() {
3244 self.prev_token(); return self.parse_dictionary();
3246 }
3247
3248 self.expected("an expression", token)
3249 }
3250
3251 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3257 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3258
3259 self.expect_keyword_is(Keyword::AGAINST)?;
3260
3261 self.expect_token(&Token::LParen)?;
3262
3263 let match_value = self.parse_value()?;
3265
3266 let in_natural_language_mode_keywords = &[
3267 Keyword::IN,
3268 Keyword::NATURAL,
3269 Keyword::LANGUAGE,
3270 Keyword::MODE,
3271 ];
3272
3273 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3274
3275 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3276
3277 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3278 if self.parse_keywords(with_query_expansion_keywords) {
3279 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3280 } else {
3281 Some(SearchModifier::InNaturalLanguageMode)
3282 }
3283 } else if self.parse_keywords(in_boolean_mode_keywords) {
3284 Some(SearchModifier::InBooleanMode)
3285 } else if self.parse_keywords(with_query_expansion_keywords) {
3286 Some(SearchModifier::WithQueryExpansion)
3287 } else {
3288 None
3289 };
3290
3291 self.expect_token(&Token::RParen)?;
3292
3293 Ok(Expr::MatchAgainst {
3294 columns,
3295 match_value,
3296 opt_search_modifier,
3297 })
3298 }
3299
3300 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3316 let value = if self.dialect.require_interval_qualifier() {
3325 self.parse_expr()?
3327 } else {
3328 self.parse_prefix()?
3331 };
3332
3333 let leading_field = if self.next_token_is_temporal_unit() {
3339 Some(self.parse_date_time_field()?)
3340 } else if self.dialect.require_interval_qualifier() {
3341 return parser_err!(
3342 "INTERVAL requires a unit after the literal value",
3343 self.peek_token_ref().span.start
3344 );
3345 } else {
3346 None
3347 };
3348
3349 let (leading_precision, last_field, fsec_precision) =
3350 if leading_field == Some(DateTimeField::Second) {
3351 let last_field = None;
3357 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3358 (leading_precision, last_field, fsec_precision)
3359 } else {
3360 let leading_precision = self.parse_optional_precision()?;
3361 if self.parse_keyword(Keyword::TO) {
3362 let last_field = Some(self.parse_date_time_field()?);
3363 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3364 self.parse_optional_precision()?
3365 } else {
3366 None
3367 };
3368 (leading_precision, last_field, fsec_precision)
3369 } else {
3370 (leading_precision, None, None)
3371 }
3372 };
3373
3374 Ok(Expr::Interval(Interval {
3375 value: Box::new(value),
3376 leading_field,
3377 leading_precision,
3378 last_field,
3379 fractional_seconds_precision: fsec_precision,
3380 }))
3381 }
3382
3383 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3386 if let Token::Word(word) = &self.peek_token_ref().token {
3387 matches!(
3388 word.keyword,
3389 Keyword::YEAR
3390 | Keyword::YEARS
3391 | Keyword::MONTH
3392 | Keyword::MONTHS
3393 | Keyword::WEEK
3394 | Keyword::WEEKS
3395 | Keyword::DAY
3396 | Keyword::DAYS
3397 | Keyword::HOUR
3398 | Keyword::HOURS
3399 | Keyword::MINUTE
3400 | Keyword::MINUTES
3401 | Keyword::SECOND
3402 | Keyword::SECONDS
3403 | Keyword::CENTURY
3404 | Keyword::DECADE
3405 | Keyword::DOW
3406 | Keyword::DOY
3407 | Keyword::EPOCH
3408 | Keyword::ISODOW
3409 | Keyword::ISOYEAR
3410 | Keyword::JULIAN
3411 | Keyword::MICROSECOND
3412 | Keyword::MICROSECONDS
3413 | Keyword::MILLENIUM
3414 | Keyword::MILLENNIUM
3415 | Keyword::MILLISECOND
3416 | Keyword::MILLISECONDS
3417 | Keyword::NANOSECOND
3418 | Keyword::NANOSECONDS
3419 | Keyword::QUARTER
3420 | Keyword::TIMEZONE
3421 | Keyword::TIMEZONE_HOUR
3422 | Keyword::TIMEZONE_MINUTE
3423 )
3424 } else {
3425 false
3426 }
3427 }
3428
3429 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3437 self.prev_token();
3439 let (fields, trailing_bracket) =
3440 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3441 if trailing_bracket.0 {
3442 return parser_err!(
3443 "unmatched > in STRUCT literal",
3444 self.peek_token_ref().span.start
3445 );
3446 }
3447
3448 self.expect_token(&Token::LParen)?;
3450 let values = self
3451 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3452 self.expect_token(&Token::RParen)?;
3453
3454 Ok(Expr::Struct { values, fields })
3455 }
3456
3457 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3471 let expr = self.parse_expr()?;
3472 if self.parse_keyword(Keyword::AS) {
3473 if typed_syntax {
3474 return parser_err!("Typed syntax does not allow AS", {
3475 self.prev_token();
3476 self.peek_token_ref().span.start
3477 });
3478 }
3479 let field_name = self.parse_identifier()?;
3480 Ok(Expr::Named {
3481 expr: expr.into(),
3482 name: field_name,
3483 })
3484 } else {
3485 Ok(expr)
3486 }
3487 }
3488
3489 fn parse_struct_type_def<F>(
3502 &mut self,
3503 mut elem_parser: F,
3504 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3505 where
3506 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3507 {
3508 self.expect_keyword_is(Keyword::STRUCT)?;
3509
3510 if self.peek_token_ref().token != Token::Lt {
3512 return Ok((Default::default(), false.into()));
3513 }
3514 self.next_token();
3515
3516 let mut field_defs = vec![];
3517 let trailing_bracket = loop {
3518 let (def, trailing_bracket) = elem_parser(self)?;
3519 field_defs.push(def);
3520 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3522 break trailing_bracket;
3523 }
3524 };
3525
3526 Ok((
3527 field_defs,
3528 self.expect_closing_angle_bracket(trailing_bracket)?,
3529 ))
3530 }
3531
3532 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3534 self.expect_keyword_is(Keyword::STRUCT)?;
3535 self.expect_token(&Token::LParen)?;
3536 let struct_body = self.parse_comma_separated(|parser| {
3537 let field_name = parser.parse_identifier()?;
3538 let field_type = parser.parse_data_type()?;
3539
3540 Ok(StructField {
3541 field_name: Some(field_name),
3542 field_type,
3543 options: None,
3544 })
3545 });
3546 self.expect_token(&Token::RParen)?;
3547 struct_body
3548 }
3549
3550 fn parse_struct_field_def(
3562 &mut self,
3563 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3564 let is_named_field = matches!(
3567 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3568 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3569 );
3570
3571 let field_name = if is_named_field {
3572 let name = self.parse_identifier()?;
3573 let _ = self.consume_token(&Token::Colon);
3574 Some(name)
3575 } else {
3576 None
3577 };
3578
3579 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3580
3581 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3582 Ok((
3583 StructField {
3584 field_name,
3585 field_type,
3586 options,
3587 },
3588 trailing_bracket,
3589 ))
3590 }
3591
3592 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3602 self.expect_keyword_is(Keyword::UNION)?;
3603
3604 self.expect_token(&Token::LParen)?;
3605
3606 let fields = self.parse_comma_separated(|p| {
3607 Ok(UnionField {
3608 field_name: p.parse_identifier()?,
3609 field_type: p.parse_data_type()?,
3610 })
3611 })?;
3612
3613 self.expect_token(&Token::RParen)?;
3614
3615 Ok(fields)
3616 }
3617
3618 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3629 self.expect_token(&Token::LBrace)?;
3630
3631 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3632
3633 self.expect_token(&Token::RBrace)?;
3634
3635 Ok(Expr::Dictionary(fields))
3636 }
3637
3638 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3649 let key = self.parse_identifier()?;
3650
3651 self.expect_token(&Token::Colon)?;
3652
3653 let expr = self.parse_expr()?;
3654
3655 Ok(DictionaryField {
3656 key,
3657 value: Box::new(expr),
3658 })
3659 }
3660
3661 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3671 self.expect_token(&Token::LBrace)?;
3672 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3673 self.expect_token(&Token::RBrace)?;
3674 Ok(Expr::Map(Map { entries: fields }))
3675 }
3676
3677 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3687 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3689
3690 self.expect_token(&Token::Colon)?;
3691
3692 let value = self.parse_expr()?;
3693
3694 Ok(MapEntry {
3695 key: Box::new(key),
3696 value: Box::new(value),
3697 })
3698 }
3699
3700 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3710 self.expect_keyword_is(Keyword::MAP)?;
3711 self.expect_token(&Token::LParen)?;
3712 let key_data_type = self.parse_data_type()?;
3713 self.expect_token(&Token::Comma)?;
3714 let value_data_type = self.parse_data_type()?;
3715 self.expect_token(&Token::RParen)?;
3716
3717 Ok((key_data_type, value_data_type))
3718 }
3719
3720 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3730 self.expect_keyword_is(Keyword::TUPLE)?;
3731 self.expect_token(&Token::LParen)?;
3732 let mut field_defs = vec![];
3733 loop {
3734 let (def, _) = self.parse_struct_field_def()?;
3735 field_defs.push(def);
3736 if !self.consume_token(&Token::Comma) {
3737 break;
3738 }
3739 }
3740 self.expect_token(&Token::RParen)?;
3741
3742 Ok(field_defs)
3743 }
3744
3745 fn expect_closing_angle_bracket(
3750 &mut self,
3751 trailing_bracket: MatchedTrailingBracket,
3752 ) -> Result<MatchedTrailingBracket, ParserError> {
3753 let trailing_bracket = if !trailing_bracket.0 {
3754 match &self.peek_token_ref().token {
3755 Token::Gt => {
3756 self.next_token();
3757 false.into()
3758 }
3759 Token::ShiftRight => {
3760 self.next_token();
3761 true.into()
3762 }
3763 _ => return self.expected_ref(">", self.peek_token_ref()),
3764 }
3765 } else {
3766 false.into()
3767 };
3768
3769 Ok(trailing_bracket)
3770 }
3771
3772 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3774 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3776 return infix;
3777 }
3778
3779 let dialect = self.dialect;
3780
3781 self.advance_token();
3782 let tok = self.get_current_token();
3783 debug!("infix: {tok:?}");
3784 let tok_index = self.get_current_index();
3785 let span = tok.span;
3786 let regular_binary_operator = match &tok.token {
3787 Token::Spaceship => Some(BinaryOperator::Spaceship),
3788 Token::DoubleEq => Some(BinaryOperator::Eq),
3789 Token::Assignment => Some(BinaryOperator::Assignment),
3790 Token::Eq => Some(BinaryOperator::Eq),
3791 Token::Neq => Some(BinaryOperator::NotEq),
3792 Token::Gt => Some(BinaryOperator::Gt),
3793 Token::GtEq => Some(BinaryOperator::GtEq),
3794 Token::Lt => Some(BinaryOperator::Lt),
3795 Token::LtEq => Some(BinaryOperator::LtEq),
3796 Token::Plus => Some(BinaryOperator::Plus),
3797 Token::Minus => Some(BinaryOperator::Minus),
3798 Token::Mul => Some(BinaryOperator::Multiply),
3799 Token::Mod => Some(BinaryOperator::Modulo),
3800 Token::StringConcat => Some(BinaryOperator::StringConcat),
3801 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3802 Token::Caret => {
3803 if dialect_is!(dialect is PostgreSqlDialect) {
3806 Some(BinaryOperator::PGExp)
3807 } else {
3808 Some(BinaryOperator::BitwiseXor)
3809 }
3810 }
3811 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3812 Token::Div => Some(BinaryOperator::Divide),
3813 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3814 Some(BinaryOperator::DuckIntegerDivide)
3815 }
3816 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3817 Some(BinaryOperator::PGBitwiseShiftLeft)
3818 }
3819 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3820 Some(BinaryOperator::PGBitwiseShiftRight)
3821 }
3822 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3823 Some(BinaryOperator::PGBitwiseXor)
3824 }
3825 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3826 Some(BinaryOperator::PGOverlap)
3827 }
3828 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3829 Some(BinaryOperator::PGOverlap)
3830 }
3831 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3832 Some(BinaryOperator::And)
3833 }
3834 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3835 Some(BinaryOperator::PGStartsWith)
3836 }
3837 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3838 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3839 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3840 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3841 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3842 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3843 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3844 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3845 Token::Arrow => Some(BinaryOperator::Arrow),
3846 Token::LongArrow => Some(BinaryOperator::LongArrow),
3847 Token::HashArrow => Some(BinaryOperator::HashArrow),
3848 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3849 Token::AtArrow => Some(BinaryOperator::AtArrow),
3850 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3851 Token::HashMinus => Some(BinaryOperator::HashMinus),
3852 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3853 Token::AtAt => Some(BinaryOperator::AtAt),
3854 Token::Question => Some(BinaryOperator::Question),
3855 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3856 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3857 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3858 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3859 Some(BinaryOperator::DoubleHash)
3860 }
3861
3862 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3863 Some(BinaryOperator::AndLt)
3864 }
3865 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3866 Some(BinaryOperator::AndGt)
3867 }
3868 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3869 Some(BinaryOperator::QuestionDash)
3870 }
3871 Token::AmpersandLeftAngleBracketVerticalBar
3872 if self.dialect.supports_geometric_types() =>
3873 {
3874 Some(BinaryOperator::AndLtPipe)
3875 }
3876 Token::VerticalBarAmpersandRightAngleBracket
3877 if self.dialect.supports_geometric_types() =>
3878 {
3879 Some(BinaryOperator::PipeAndGt)
3880 }
3881 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3882 Some(BinaryOperator::LtDashGt)
3883 }
3884 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3885 Some(BinaryOperator::LtCaret)
3886 }
3887 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3888 Some(BinaryOperator::GtCaret)
3889 }
3890 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3891 Some(BinaryOperator::QuestionHash)
3892 }
3893 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3894 Some(BinaryOperator::QuestionDoublePipe)
3895 }
3896 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3897 Some(BinaryOperator::QuestionDashPipe)
3898 }
3899 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3900 Some(BinaryOperator::TildeEq)
3901 }
3902 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3903 Some(BinaryOperator::LtLtPipe)
3904 }
3905 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3906 Some(BinaryOperator::PipeGtGt)
3907 }
3908 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3909
3910 Token::Word(w) => match w.keyword {
3911 Keyword::AND => Some(BinaryOperator::And),
3912 Keyword::OR => Some(BinaryOperator::Or),
3913 Keyword::XOR => Some(BinaryOperator::Xor),
3914 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3915 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3916 self.expect_token(&Token::LParen)?;
3917 let mut idents = vec![];
3922 loop {
3923 self.advance_token();
3924 idents.push(self.get_current_token().to_string());
3925 if !self.consume_token(&Token::Period) {
3926 break;
3927 }
3928 }
3929 self.expect_token(&Token::RParen)?;
3930 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3931 }
3932 _ => None,
3933 },
3934 _ => None,
3935 };
3936
3937 let tok = self.token_at(tok_index);
3938 if let Some(op) = regular_binary_operator {
3939 if let Some(keyword) =
3940 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3941 {
3942 self.expect_token(&Token::LParen)?;
3943 let right = if self.peek_sub_query() {
3944 self.prev_token(); self.parse_subexpr(precedence)?
3948 } else {
3949 let right = self.parse_subexpr(precedence)?;
3951 self.expect_token(&Token::RParen)?;
3952 right
3953 };
3954
3955 if !matches!(
3956 op,
3957 BinaryOperator::Gt
3958 | BinaryOperator::Lt
3959 | BinaryOperator::GtEq
3960 | BinaryOperator::LtEq
3961 | BinaryOperator::Eq
3962 | BinaryOperator::NotEq
3963 | BinaryOperator::PGRegexMatch
3964 | BinaryOperator::PGRegexIMatch
3965 | BinaryOperator::PGRegexNotMatch
3966 | BinaryOperator::PGRegexNotIMatch
3967 | BinaryOperator::PGLikeMatch
3968 | BinaryOperator::PGILikeMatch
3969 | BinaryOperator::PGNotLikeMatch
3970 | BinaryOperator::PGNotILikeMatch
3971 ) {
3972 return parser_err!(
3973 format!(
3974 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3975 ),
3976 span.start
3977 );
3978 };
3979
3980 Ok(match keyword {
3981 Keyword::ALL => Expr::AllOp {
3982 left: Box::new(expr),
3983 compare_op: op,
3984 right: Box::new(right),
3985 },
3986 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3987 left: Box::new(expr),
3988 compare_op: op,
3989 right: Box::new(right),
3990 is_some: keyword == Keyword::SOME,
3991 },
3992 unexpected_keyword => return Err(ParserError::ParserError(
3993 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3994 )),
3995 })
3996 } else {
3997 Ok(Expr::BinaryOp {
3998 left: Box::new(expr),
3999 op,
4000 right: Box::new(self.parse_subexpr(precedence)?),
4001 })
4002 }
4003 } else if let Token::Word(w) = &tok.token {
4004 match w.keyword {
4005 Keyword::IS => {
4006 if self.parse_keyword(Keyword::NULL) {
4007 Ok(Expr::IsNull(Box::new(expr)))
4008 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
4009 Ok(Expr::IsNotNull(Box::new(expr)))
4010 } else if self.parse_keywords(&[Keyword::TRUE]) {
4011 Ok(Expr::IsTrue(Box::new(expr)))
4012 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
4013 Ok(Expr::IsNotTrue(Box::new(expr)))
4014 } else if self.parse_keywords(&[Keyword::FALSE]) {
4015 Ok(Expr::IsFalse(Box::new(expr)))
4016 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
4017 Ok(Expr::IsNotFalse(Box::new(expr)))
4018 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
4019 Ok(Expr::IsUnknown(Box::new(expr)))
4020 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
4021 Ok(Expr::IsNotUnknown(Box::new(expr)))
4022 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
4023 let expr2 = self.parse_expr()?;
4024 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
4025 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
4026 {
4027 let expr2 = self.parse_expr()?;
4028 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
4029 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
4030 Ok(is_normalized)
4031 } else {
4032 self.expected_ref(
4033 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
4034 self.peek_token_ref(),
4035 )
4036 }
4037 }
4038 Keyword::AT => {
4039 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4040 Ok(Expr::AtTimeZone {
4041 timestamp: Box::new(expr),
4042 time_zone: Box::new(self.parse_subexpr(precedence)?),
4043 })
4044 }
4045 Keyword::NOT
4046 | Keyword::IN
4047 | Keyword::BETWEEN
4048 | Keyword::LIKE
4049 | Keyword::ILIKE
4050 | Keyword::SIMILAR
4051 | Keyword::REGEXP
4052 | Keyword::RLIKE => {
4053 self.prev_token();
4054 let negated = self.parse_keyword(Keyword::NOT);
4055 let regexp = self.parse_keyword(Keyword::REGEXP);
4056 let rlike = self.parse_keyword(Keyword::RLIKE);
4057 let null = if !self.in_column_definition_state() {
4058 self.parse_keyword(Keyword::NULL)
4059 } else {
4060 false
4061 };
4062 if regexp || rlike {
4063 Ok(Expr::RLike {
4064 negated,
4065 expr: Box::new(expr),
4066 pattern: Box::new(
4067 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4068 ),
4069 regexp,
4070 })
4071 } else if negated && null {
4072 Ok(Expr::IsNotNull(Box::new(expr)))
4073 } else if self.parse_keyword(Keyword::IN) {
4074 self.parse_in(expr, negated)
4075 } else if self.parse_keyword(Keyword::BETWEEN) {
4076 self.parse_between(expr, negated)
4077 } else if self.parse_keyword(Keyword::LIKE) {
4078 Ok(Expr::Like {
4079 negated,
4080 any: self.parse_keyword(Keyword::ANY),
4081 expr: Box::new(expr),
4082 pattern: Box::new(
4083 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4084 ),
4085 escape_char: self.parse_escape_char()?,
4086 })
4087 } else if self.parse_keyword(Keyword::ILIKE) {
4088 Ok(Expr::ILike {
4089 negated,
4090 any: self.parse_keyword(Keyword::ANY),
4091 expr: Box::new(expr),
4092 pattern: Box::new(
4093 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4094 ),
4095 escape_char: self.parse_escape_char()?,
4096 })
4097 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4098 Ok(Expr::SimilarTo {
4099 negated,
4100 expr: Box::new(expr),
4101 pattern: Box::new(
4102 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4103 ),
4104 escape_char: self.parse_escape_char()?,
4105 })
4106 } else {
4107 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4108 }
4109 }
4110 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4111 Ok(Expr::IsNotNull(Box::new(expr)))
4112 }
4113 Keyword::MEMBER => {
4114 if self.parse_keyword(Keyword::OF) {
4115 self.expect_token(&Token::LParen)?;
4116 let array = self.parse_expr()?;
4117 self.expect_token(&Token::RParen)?;
4118 Ok(Expr::MemberOf(MemberOf {
4119 value: Box::new(expr),
4120 array: Box::new(array),
4121 }))
4122 } else {
4123 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4124 }
4125 }
4126 _ => parser_err!(
4128 format!("No infix parser for token {:?}", tok.token),
4129 tok.span.start
4130 ),
4131 }
4132 } else if Token::DoubleColon == *tok {
4133 Ok(Expr::Cast {
4134 kind: CastKind::DoubleColon,
4135 expr: Box::new(expr),
4136 data_type: self.parse_data_type()?,
4137 array: false,
4138 format: None,
4139 })
4140 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4141 Ok(Expr::UnaryOp {
4142 op: UnaryOperator::PGPostfixFactorial,
4143 expr: Box::new(expr),
4144 })
4145 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4146 || (Token::Colon == *tok)
4147 {
4148 self.prev_token();
4149 self.parse_json_access(expr)
4150 } else {
4151 parser_err!(
4153 format!("No infix parser for token {:?}", tok.token),
4154 tok.span.start
4155 )
4156 }
4157 }
4158
4159 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4161 if self.parse_keyword(Keyword::ESCAPE) {
4162 Ok(Some(self.parse_value()?))
4163 } else {
4164 Ok(None)
4165 }
4166 }
4167
4168 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4178 let lower_bound = if self.consume_token(&Token::Colon) {
4180 None
4181 } else {
4182 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4184 };
4185
4186 if self.consume_token(&Token::RBracket) {
4188 if let Some(lower_bound) = lower_bound {
4189 return Ok(Subscript::Index { index: lower_bound });
4190 };
4191 return Ok(Subscript::Slice {
4192 lower_bound,
4193 upper_bound: None,
4194 stride: None,
4195 });
4196 }
4197
4198 if lower_bound.is_some() {
4200 self.expect_token(&Token::Colon)?;
4201 }
4202
4203 let upper_bound = if self.consume_token(&Token::RBracket) {
4205 return Ok(Subscript::Slice {
4206 lower_bound,
4207 upper_bound: None,
4208 stride: None,
4209 });
4210 } else {
4211 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4213 };
4214
4215 if self.consume_token(&Token::RBracket) {
4217 return Ok(Subscript::Slice {
4218 lower_bound,
4219 upper_bound,
4220 stride: None,
4221 });
4222 }
4223
4224 self.expect_token(&Token::Colon)?;
4226 let stride = if self.consume_token(&Token::RBracket) {
4227 None
4228 } else {
4229 Some(self.parse_expr()?)
4230 };
4231
4232 if stride.is_some() {
4233 self.expect_token(&Token::RBracket)?;
4234 }
4235
4236 Ok(Subscript::Slice {
4237 lower_bound,
4238 upper_bound,
4239 stride,
4240 })
4241 }
4242
4243 pub fn parse_multi_dim_subscript(
4245 &mut self,
4246 chain: &mut Vec<AccessExpr>,
4247 ) -> Result<(), ParserError> {
4248 while self.consume_token(&Token::LBracket) {
4249 self.parse_subscript(chain)?;
4250 }
4251 Ok(())
4252 }
4253
4254 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4258 let subscript = self.parse_subscript_inner()?;
4259 chain.push(AccessExpr::Subscript(subscript));
4260 Ok(())
4261 }
4262
4263 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4264 let token = self.next_token();
4265 match token.token {
4266 Token::Word(Word {
4267 value,
4268 quote_style: quote_style @ (Some('"') | Some('`') | None),
4271 keyword: _,
4274 }) => Ok(JsonPathElem::Dot {
4275 key: value,
4276 quoted: quote_style.is_some(),
4277 }),
4278
4279 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4283
4284 _ => self.expected("variant object key name", token),
4285 }
4286 }
4287
4288 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4289 let path = self.parse_json_path()?;
4290 Ok(Expr::JsonAccess {
4291 value: Box::new(expr),
4292 path,
4293 })
4294 }
4295
4296 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4297 let mut path = Vec::new();
4298 loop {
4299 match self.next_token().token {
4300 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4301 self.next_token();
4302 let key = self.parse_wildcard_expr()?;
4303 self.expect_token(&Token::RBracket)?;
4304 path.push(JsonPathElem::ColonBracket { key });
4305 }
4306 Token::Colon if path.is_empty() => {
4307 path.push(self.parse_json_path_object_key()?);
4308 }
4309 Token::Period if !path.is_empty() => {
4310 path.push(self.parse_json_path_object_key()?);
4311 }
4312 Token::LBracket => {
4313 let key = self.parse_wildcard_expr()?;
4314 self.expect_token(&Token::RBracket)?;
4315
4316 path.push(JsonPathElem::Bracket { key });
4317 }
4318 _ => {
4319 self.prev_token();
4320 break;
4321 }
4322 };
4323 }
4324
4325 debug_assert!(!path.is_empty());
4326 Ok(JsonPath { path })
4327 }
4328
4329 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4331 if self.parse_keyword(Keyword::UNNEST) {
4334 self.expect_token(&Token::LParen)?;
4335 let array_expr = self.parse_expr()?;
4336 self.expect_token(&Token::RParen)?;
4337 return Ok(Expr::InUnnest {
4338 expr: Box::new(expr),
4339 array_expr: Box::new(array_expr),
4340 negated,
4341 });
4342 }
4343 self.expect_token(&Token::LParen)?;
4344 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4345 Some(subquery) => Expr::InSubquery {
4346 expr: Box::new(expr),
4347 subquery,
4348 negated,
4349 },
4350 None => Expr::InList {
4351 expr: Box::new(expr),
4352 list: if self.dialect.supports_in_empty_list() {
4353 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4354 } else {
4355 self.parse_comma_separated(Parser::parse_expr)?
4356 },
4357 negated,
4358 },
4359 };
4360 self.expect_token(&Token::RParen)?;
4361 Ok(in_op)
4362 }
4363
4364 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4366 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4369 self.expect_keyword_is(Keyword::AND)?;
4370 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4371 Ok(Expr::Between {
4372 expr: Box::new(expr),
4373 negated,
4374 low: Box::new(low),
4375 high: Box::new(high),
4376 })
4377 }
4378
4379 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4381 Ok(Expr::Cast {
4382 kind: CastKind::DoubleColon,
4383 expr: Box::new(expr),
4384 data_type: self.parse_data_type()?,
4385 array: false,
4386 format: None,
4387 })
4388 }
4389
4390 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4392 self.dialect.get_next_precedence_default(self)
4393 }
4394
4395 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4398 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4399 }
4400
4401 pub fn peek_token(&self) -> TokenWithSpan {
4406 self.peek_nth_token(0)
4407 }
4408
4409 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4412 self.peek_nth_token_ref(0)
4413 }
4414
4415 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4438 self.peek_tokens_with_location()
4439 .map(|with_loc| with_loc.token)
4440 }
4441
4442 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4447 let mut index = self.index;
4448 core::array::from_fn(|_| loop {
4449 let token = self.tokens.get(index);
4450 index += 1;
4451 if let Some(TokenWithSpan {
4452 token: Token::Whitespace(_),
4453 span: _,
4454 }) = token
4455 {
4456 continue;
4457 }
4458 break token.cloned().unwrap_or(TokenWithSpan {
4459 token: Token::EOF,
4460 span: Span::empty(),
4461 });
4462 })
4463 }
4464
4465 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4470 let mut index = self.index;
4471 core::array::from_fn(|_| loop {
4472 let token = self.tokens.get(index);
4473 index += 1;
4474 if let Some(TokenWithSpan {
4475 token: Token::Whitespace(_),
4476 span: _,
4477 }) = token
4478 {
4479 continue;
4480 }
4481 break token.unwrap_or(&EOF_TOKEN);
4482 })
4483 }
4484
4485 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4487 self.peek_nth_token_ref(n).clone()
4488 }
4489
4490 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4492 let mut index = self.index;
4493 loop {
4494 index += 1;
4495 match self.tokens.get(index - 1) {
4496 Some(TokenWithSpan {
4497 token: Token::Whitespace(_),
4498 span: _,
4499 }) => continue,
4500 non_whitespace => {
4501 if n == 0 {
4502 return non_whitespace.unwrap_or(&EOF_TOKEN);
4503 }
4504 n -= 1;
4505 }
4506 }
4507 }
4508 }
4509
4510 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4513 self.peek_nth_token_no_skip(0)
4514 }
4515
4516 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4518 self.tokens
4519 .get(self.index + n)
4520 .cloned()
4521 .unwrap_or(TokenWithSpan {
4522 token: Token::EOF,
4523 span: Span::empty(),
4524 })
4525 }
4526
4527 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4529 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4530 }
4531
4532 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4536 let index = self.index;
4537 let matched = self.parse_keywords(expected);
4538 self.index = index;
4539 matched
4540 }
4541
4542 pub fn next_token(&mut self) -> TokenWithSpan {
4547 self.advance_token();
4548 self.get_current_token().clone()
4549 }
4550
4551 pub fn get_current_index(&self) -> usize {
4556 self.index.saturating_sub(1)
4557 }
4558
4559 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4561 self.index += 1;
4562 self.tokens.get(self.index - 1)
4563 }
4564
4565 pub fn advance_token(&mut self) {
4569 loop {
4570 self.index += 1;
4571 match self.tokens.get(self.index - 1) {
4572 Some(TokenWithSpan {
4573 token: Token::Whitespace(_),
4574 span: _,
4575 }) => continue,
4576 _ => break,
4577 }
4578 }
4579 }
4580
4581 pub fn get_current_token(&self) -> &TokenWithSpan {
4585 self.token_at(self.index.saturating_sub(1))
4586 }
4587
4588 pub fn get_previous_token(&self) -> &TokenWithSpan {
4592 self.token_at(self.index.saturating_sub(2))
4593 }
4594
4595 pub fn get_next_token(&self) -> &TokenWithSpan {
4599 self.token_at(self.index)
4600 }
4601
4602 pub fn prev_token(&mut self) {
4609 loop {
4610 assert!(self.index > 0);
4611 self.index -= 1;
4612 if let Some(TokenWithSpan {
4613 token: Token::Whitespace(_),
4614 span: _,
4615 }) = self.tokens.get(self.index)
4616 {
4617 continue;
4618 }
4619 return;
4620 }
4621 }
4622
4623 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4625 parser_err!(
4626 format!("Expected: {expected}, found: {found}"),
4627 found.span.start
4628 )
4629 }
4630
4631 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4633 parser_err!(
4634 format!("Expected: {expected}, found: {found}"),
4635 found.span.start
4636 )
4637 }
4638
4639 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4641 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4642 parser_err!(
4643 format!("Expected: {expected}, found: {found}"),
4644 found.span.start
4645 )
4646 }
4647
4648 #[must_use]
4651 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4652 if self.peek_keyword(expected) {
4653 self.advance_token();
4654 true
4655 } else {
4656 false
4657 }
4658 }
4659
4660 #[must_use]
4661 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4665 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4666 }
4667
4668 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4676 self.keyword_with_tokens(expected, tokens, true)
4677 }
4678
4679 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4684 self.keyword_with_tokens(expected, tokens, false)
4685 }
4686
4687 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4688 match &self.peek_token_ref().token {
4689 Token::Word(w) if expected == w.keyword => {
4690 for (idx, token) in tokens.iter().enumerate() {
4691 if self.peek_nth_token_ref(idx + 1).token != *token {
4692 return false;
4693 }
4694 }
4695
4696 if consume {
4697 for _ in 0..(tokens.len() + 1) {
4698 self.advance_token();
4699 }
4700 }
4701
4702 true
4703 }
4704 _ => false,
4705 }
4706 }
4707
4708 #[must_use]
4712 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4713 self.parse_keywords_indexed(keywords).is_some()
4714 }
4715
4716 #[must_use]
4719 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4720 let start_index = self.index;
4721 let mut first_keyword_index = None;
4722 for &keyword in keywords {
4723 if !self.parse_keyword(keyword) {
4724 self.index = start_index;
4725 return None;
4726 }
4727 if first_keyword_index.is_none() {
4728 first_keyword_index = Some(self.index.saturating_sub(1));
4729 }
4730 }
4731 first_keyword_index
4732 }
4733
4734 #[must_use]
4737 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4738 for keyword in keywords {
4739 if self.peek_keyword(*keyword) {
4740 return Some(*keyword);
4741 }
4742 }
4743 None
4744 }
4745
4746 #[must_use]
4750 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4751 match &self.peek_token_ref().token {
4752 Token::Word(w) => {
4753 keywords
4754 .iter()
4755 .find(|keyword| **keyword == w.keyword)
4756 .map(|keyword| {
4757 self.advance_token();
4758 *keyword
4759 })
4760 }
4761 _ => None,
4762 }
4763 }
4764
4765 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4768 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4769 Ok(keyword)
4770 } else {
4771 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4772 self.expected_ref(
4773 &format!("one of {}", keywords.join(" or ")),
4774 self.peek_token_ref(),
4775 )
4776 }
4777 }
4778
4779 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4784 if self.parse_keyword(expected) {
4785 Ok(self.get_current_token().clone())
4786 } else {
4787 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4788 }
4789 }
4790
4791 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4797 if self.parse_keyword(expected) {
4798 Ok(())
4799 } else {
4800 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4801 }
4802 }
4803
4804 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4807 for &kw in expected {
4808 self.expect_keyword_is(kw)?;
4809 }
4810 Ok(())
4811 }
4812
4813 #[must_use]
4817 pub fn consume_token(&mut self, expected: &Token) -> bool {
4818 if self.peek_token_ref() == expected {
4819 self.advance_token();
4820 true
4821 } else {
4822 false
4823 }
4824 }
4825
4826 #[must_use]
4830 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4831 let index = self.index;
4832 for token in tokens {
4833 if !self.consume_token(token) {
4834 self.index = index;
4835 return false;
4836 }
4837 }
4838 true
4839 }
4840
4841 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4843 if self.peek_token_ref() == expected {
4844 Ok(self.next_token())
4845 } else {
4846 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4847 }
4848 }
4849
4850 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4851 where
4852 <T as FromStr>::Err: Display,
4853 {
4854 s.parse::<T>().map_err(|e| {
4855 ParserError::ParserError(format!(
4856 "Could not parse '{s}' as {}: {e}{loc}",
4857 core::any::type_name::<T>()
4858 ))
4859 })
4860 }
4861
4862 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4864 let trailing_commas =
4870 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4871
4872 self.parse_comma_separated_with_trailing_commas(
4873 |p| p.parse_select_item(),
4874 trailing_commas,
4875 Self::is_reserved_for_column_alias,
4876 )
4877 }
4878
4879 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4881 let mut values = vec![];
4882 loop {
4883 values.push(self.parse_grant_permission()?);
4884 if !self.consume_token(&Token::Comma) {
4885 break;
4886 } else if self.options.trailing_commas {
4887 match &self.peek_token_ref().token {
4888 Token::Word(kw) if kw.keyword == Keyword::ON => {
4889 break;
4890 }
4891 Token::RParen
4892 | Token::SemiColon
4893 | Token::EOF
4894 | Token::RBracket
4895 | Token::RBrace => break,
4896 _ => continue,
4897 }
4898 }
4899 }
4900 Ok(values)
4901 }
4902
4903 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4905 let trailing_commas = self.dialect.supports_from_trailing_commas();
4906
4907 self.parse_comma_separated_with_trailing_commas(
4908 Parser::parse_table_and_joins,
4909 trailing_commas,
4910 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4911 )
4912 }
4913
4914 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4921 &mut self,
4922 trailing_commas: bool,
4923 is_reserved_keyword: &R,
4924 ) -> bool
4925 where
4926 R: Fn(&Keyword, &mut Parser) -> bool,
4927 {
4928 if !self.consume_token(&Token::Comma) {
4929 true
4930 } else if trailing_commas {
4931 let token = self.next_token().token;
4932 let is_end = match token {
4933 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4934 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4935 true
4936 }
4937 _ => false,
4938 };
4939 self.prev_token();
4940
4941 is_end
4942 } else {
4943 false
4944 }
4945 }
4946
4947 fn is_parse_comma_separated_end(&mut self) -> bool {
4950 self.is_parse_comma_separated_end_with_trailing_commas(
4951 self.options.trailing_commas,
4952 &Self::is_reserved_for_column_alias,
4953 )
4954 }
4955
4956 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4958 where
4959 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4960 {
4961 self.parse_comma_separated_with_trailing_commas(
4962 f,
4963 self.options.trailing_commas,
4964 Self::is_reserved_for_column_alias,
4965 )
4966 }
4967
4968 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4973 &mut self,
4974 mut f: F,
4975 trailing_commas: bool,
4976 is_reserved_keyword: R,
4977 ) -> Result<Vec<T>, ParserError>
4978 where
4979 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4980 R: Fn(&Keyword, &mut Parser) -> bool,
4981 {
4982 let mut values = vec![];
4983 loop {
4984 values.push(f(self)?);
4985 if self.is_parse_comma_separated_end_with_trailing_commas(
4986 trailing_commas,
4987 &is_reserved_keyword,
4988 ) {
4989 break;
4990 }
4991 }
4992 Ok(values)
4993 }
4994
4995 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4997 where
4998 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4999 {
5000 let mut values = vec![];
5001 loop {
5002 values.push(f(self)?);
5003 if !self.consume_token(&Token::Period) {
5004 break;
5005 }
5006 }
5007 Ok(values)
5008 }
5009
5010 pub fn parse_keyword_separated<T, F>(
5012 &mut self,
5013 keyword: Keyword,
5014 mut f: F,
5015 ) -> Result<Vec<T>, ParserError>
5016 where
5017 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5018 {
5019 let mut values = vec![];
5020 loop {
5021 values.push(f(self)?);
5022 if !self.parse_keyword(keyword) {
5023 break;
5024 }
5025 }
5026 Ok(values)
5027 }
5028
5029 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5031 where
5032 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5033 {
5034 self.expect_token(&Token::LParen)?;
5035 let res = f(self)?;
5036 self.expect_token(&Token::RParen)?;
5037 Ok(res)
5038 }
5039
5040 pub fn parse_comma_separated0<T, F>(
5043 &mut self,
5044 f: F,
5045 end_token: Token,
5046 ) -> Result<Vec<T>, ParserError>
5047 where
5048 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5049 {
5050 if self.peek_token_ref().token == end_token {
5051 return Ok(vec![]);
5052 }
5053
5054 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5055 let _ = self.consume_token(&Token::Comma);
5056 return Ok(vec![]);
5057 }
5058
5059 self.parse_comma_separated(f)
5060 }
5061
5062 pub(crate) fn parse_statement_list(
5066 &mut self,
5067 terminal_keywords: &[Keyword],
5068 ) -> Result<Vec<Statement>, ParserError> {
5069 let mut values = vec![];
5070 loop {
5071 match &self.peek_nth_token_ref(0).token {
5072 Token::EOF => break,
5073 Token::Word(w)
5074 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
5075 {
5076 break;
5077 }
5078 _ => {}
5079 }
5080
5081 values.push(self.parse_statement()?);
5082 self.expect_token(&Token::SemiColon)?;
5083 }
5084 Ok(values)
5085 }
5086
5087 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5091 !parser.dialect.is_column_alias(kw, parser)
5092 }
5093
5094 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5098 where
5099 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5100 {
5101 match self.try_parse(f) {
5102 Ok(t) => Ok(Some(t)),
5103 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5104 _ => Ok(None),
5105 }
5106 }
5107
5108 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5110 where
5111 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5112 {
5113 let index = self.index;
5114 match f(self) {
5115 Ok(t) => Ok(t),
5116 Err(e) => {
5117 self.index = index;
5119 Err(e)
5120 }
5121 }
5122 }
5123
5124 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5127 let loc = self.peek_token_ref().span.start;
5128 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5129 Some(Keyword::ALL) => {
5130 if self.peek_keyword(Keyword::DISTINCT) {
5131 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5132 }
5133 Some(Distinct::All)
5134 }
5135 Some(Keyword::DISTINCT) => {
5136 if self.peek_keyword(Keyword::ALL) {
5137 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5138 }
5139 Some(Distinct::Distinct)
5140 }
5141 None => return Ok(None),
5142 _ => return parser_err!("ALL or DISTINCT", loc),
5143 };
5144
5145 let Some(Distinct::Distinct) = distinct else {
5146 return Ok(distinct);
5147 };
5148 if !self.parse_keyword(Keyword::ON) {
5149 return Ok(Some(Distinct::Distinct));
5150 }
5151
5152 self.expect_token(&Token::LParen)?;
5153 let col_names = if self.consume_token(&Token::RParen) {
5154 self.prev_token();
5155 Vec::new()
5156 } else {
5157 self.parse_comma_separated(Parser::parse_expr)?
5158 };
5159 self.expect_token(&Token::RParen)?;
5160 Ok(Some(Distinct::On(col_names)))
5161 }
5162
5163 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5165 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5166 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5167 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5168 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5169 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5170 let global: Option<bool> = if global {
5171 Some(true)
5172 } else if local {
5173 Some(false)
5174 } else {
5175 None
5176 };
5177 let temporary = self
5178 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5179 .is_some();
5180 let persistent = dialect_of!(self is DuckDbDialect)
5181 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5182 let create_view_params = self.parse_create_view_params()?;
5183 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5184 self.parse_create_snapshot_table().map(Into::into)
5185 } else if self.parse_keyword(Keyword::TABLE) {
5186 self.parse_create_table(or_replace, temporary, global, transient)
5187 .map(Into::into)
5188 } else if self.peek_keyword(Keyword::MATERIALIZED)
5189 || self.peek_keyword(Keyword::VIEW)
5190 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5191 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5192 {
5193 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5194 .map(Into::into)
5195 } else if self.parse_keyword(Keyword::POLICY) {
5196 self.parse_create_policy().map(Into::into)
5197 } else if self.parse_keyword(Keyword::EXTERNAL) {
5198 self.parse_create_external_table(or_replace).map(Into::into)
5199 } else if self.parse_keyword(Keyword::FUNCTION) {
5200 self.parse_create_function(or_alter, or_replace, temporary)
5201 } else if self.parse_keyword(Keyword::DOMAIN) {
5202 self.parse_create_domain().map(Into::into)
5203 } else if self.parse_keyword(Keyword::TRIGGER) {
5204 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5205 .map(Into::into)
5206 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5207 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5208 .map(Into::into)
5209 } else if self.parse_keyword(Keyword::MACRO) {
5210 self.parse_create_macro(or_replace, temporary)
5211 } else if self.parse_keyword(Keyword::SECRET) {
5212 self.parse_create_secret(or_replace, temporary, persistent)
5213 } else if self.parse_keyword(Keyword::USER) {
5214 if self.parse_keyword(Keyword::MAPPING) {
5215 self.parse_create_user_mapping().map(Into::into)
5216 } else {
5217 self.parse_create_user(or_replace).map(Into::into)
5218 }
5219 } else if self.parse_keyword(Keyword::AGGREGATE) {
5220 self.parse_create_aggregate(or_replace).map(Into::into)
5221 } else if self.peek_keyword(Keyword::TRUSTED)
5222 || self.peek_keyword(Keyword::PROCEDURAL)
5223 || self.peek_keyword(Keyword::LANGUAGE)
5224 {
5225 let trusted = self.parse_keyword(Keyword::TRUSTED);
5226 let procedural = self.parse_keyword(Keyword::PROCEDURAL);
5227 if self.parse_keyword(Keyword::LANGUAGE) {
5228 self.parse_create_language(or_replace, trusted, procedural)
5229 .map(Into::into)
5230 } else {
5231 self.expected_ref(
5232 "LANGUAGE after TRUSTED or PROCEDURAL",
5233 self.peek_token_ref(),
5234 )
5235 }
5236 } else if self.parse_keyword(Keyword::TRANSFORM) {
5237 self.parse_create_transform(or_replace).map(Into::into)
5238 } else if or_replace {
5239 self.expected_ref(
5240 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5241 self.peek_token_ref(),
5242 )
5243 } else if self.parse_keyword(Keyword::CAST) {
5244 self.parse_create_cast().map(Into::into)
5245 } else if self.parse_keyword(Keyword::CONVERSION) {
5246 self.parse_create_conversion(false).map(Into::into)
5247 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CONVERSION]) {
5248 self.parse_create_conversion(true).map(Into::into)
5249 } else if self.parse_keyword(Keyword::RULE) {
5250 self.parse_create_rule().map(Into::into)
5251 } else if self.parse_keyword(Keyword::EXTENSION) {
5252 self.parse_create_extension().map(Into::into)
5253 } else if self.parse_keyword(Keyword::INDEX) {
5254 self.parse_create_index(false).map(Into::into)
5255 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5256 self.parse_create_index(true).map(Into::into)
5257 } else if self.parse_keyword(Keyword::VIRTUAL) {
5258 self.parse_create_virtual_table()
5259 } else if self.parse_keyword(Keyword::SCHEMA) {
5260 self.parse_create_schema()
5261 } else if self.parse_keyword(Keyword::DATABASE) {
5262 self.parse_create_database()
5263 } else if self.parse_keyword(Keyword::ROLE) {
5264 self.parse_create_role().map(Into::into)
5265 } else if self.parse_keyword(Keyword::SEQUENCE) {
5266 self.parse_create_sequence(temporary)
5267 } else if self.parse_keyword(Keyword::COLLATION) {
5268 self.parse_create_collation().map(Into::into)
5269 } else if self.parse_keyword(Keyword::TYPE) {
5270 self.parse_create_type()
5271 } else if self.parse_keyword(Keyword::PROCEDURE) {
5272 self.parse_create_procedure(or_alter)
5273 } else if self.parse_keyword(Keyword::CONNECTOR) {
5274 self.parse_create_connector().map(Into::into)
5275 } else if self.parse_keyword(Keyword::OPERATOR) {
5276 if self.parse_keyword(Keyword::FAMILY) {
5278 self.parse_create_operator_family().map(Into::into)
5279 } else if self.parse_keyword(Keyword::CLASS) {
5280 self.parse_create_operator_class().map(Into::into)
5281 } else {
5282 self.parse_create_operator().map(Into::into)
5283 }
5284 } else if self.parse_keyword(Keyword::SERVER) {
5285 self.parse_pg_create_server()
5286 } else if self.parse_keyword(Keyword::FOREIGN) {
5287 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5288 self.parse_create_foreign_data_wrapper().map(Into::into)
5289 } else if self.parse_keyword(Keyword::TABLE) {
5290 self.parse_create_foreign_table().map(Into::into)
5291 } else {
5292 self.expected_ref(
5293 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5294 self.peek_token_ref(),
5295 )
5296 }
5297 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5298 self.parse_create_text_search()
5299 } else if self.parse_keyword(Keyword::PUBLICATION) {
5300 self.parse_create_publication().map(Into::into)
5301 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5302 self.parse_create_subscription().map(Into::into)
5303 } else if self.parse_keyword(Keyword::STATISTICS) {
5304 self.parse_create_statistics().map(Into::into)
5305 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::METHOD]) {
5306 self.parse_create_access_method().map(Into::into)
5307 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::TRIGGER]) {
5308 self.parse_create_event_trigger().map(Into::into)
5309 } else if self.parse_keyword(Keyword::TABLESPACE) {
5310 self.parse_create_tablespace().map(Into::into)
5311 } else {
5312 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5313 }
5314 }
5315
5316 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5317 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5318 let name = self.parse_identifier()?;
5319 let options = self
5320 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5321 .options;
5322 let with_tags = self.parse_keyword(Keyword::WITH);
5323 let tags = if self.parse_keyword(Keyword::TAG) {
5324 self.parse_key_value_options(true, &[])?.options
5325 } else {
5326 vec![]
5327 };
5328 Ok(CreateUser {
5329 or_replace,
5330 if_not_exists,
5331 name,
5332 options: KeyValueOptions {
5333 options,
5334 delimiter: KeyValueOptionsDelimiter::Space,
5335 },
5336 with_tags,
5337 tags: KeyValueOptions {
5338 options: tags,
5339 delimiter: KeyValueOptionsDelimiter::Comma,
5340 },
5341 })
5342 }
5343
5344 pub fn parse_create_secret(
5346 &mut self,
5347 or_replace: bool,
5348 temporary: bool,
5349 persistent: bool,
5350 ) -> Result<Statement, ParserError> {
5351 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5352
5353 let mut storage_specifier = None;
5354 let mut name = None;
5355 if self.peek_token_ref().token != Token::LParen {
5356 if self.parse_keyword(Keyword::IN) {
5357 storage_specifier = self.parse_identifier().ok()
5358 } else {
5359 name = self.parse_identifier().ok();
5360 }
5361
5362 if storage_specifier.is_none()
5364 && self.peek_token_ref().token != Token::LParen
5365 && self.parse_keyword(Keyword::IN)
5366 {
5367 storage_specifier = self.parse_identifier().ok();
5368 }
5369 }
5370
5371 self.expect_token(&Token::LParen)?;
5372 self.expect_keyword_is(Keyword::TYPE)?;
5373 let secret_type = self.parse_identifier()?;
5374
5375 let mut options = Vec::new();
5376 if self.consume_token(&Token::Comma) {
5377 options.append(&mut self.parse_comma_separated(|p| {
5378 let key = p.parse_identifier()?;
5379 let value = p.parse_identifier()?;
5380 Ok(SecretOption { key, value })
5381 })?);
5382 }
5383 self.expect_token(&Token::RParen)?;
5384
5385 let temp = match (temporary, persistent) {
5386 (true, false) => Some(true),
5387 (false, true) => Some(false),
5388 (false, false) => None,
5389 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5390 };
5391
5392 Ok(Statement::CreateSecret {
5393 or_replace,
5394 temporary: temp,
5395 if_not_exists,
5396 name,
5397 storage_specifier,
5398 secret_type,
5399 options,
5400 })
5401 }
5402
5403 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5405 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5406 if self.parse_keyword(Keyword::TABLE) {
5407 let table_name = self.parse_object_name(false)?;
5408 if self.peek_token_ref().token != Token::EOF {
5409 if let Token::Word(word) = &self.peek_token_ref().token {
5410 if word.keyword == Keyword::OPTIONS {
5411 options = self.parse_options(Keyword::OPTIONS)?
5412 }
5413 };
5414
5415 if self.peek_token_ref().token != Token::EOF {
5416 let (a, q) = self.parse_as_query()?;
5417 has_as = a;
5418 query = Some(q);
5419 }
5420
5421 Ok(Statement::Cache {
5422 table_flag,
5423 table_name,
5424 has_as,
5425 options,
5426 query,
5427 })
5428 } else {
5429 Ok(Statement::Cache {
5430 table_flag,
5431 table_name,
5432 has_as,
5433 options,
5434 query,
5435 })
5436 }
5437 } else {
5438 table_flag = Some(self.parse_object_name(false)?);
5439 if self.parse_keyword(Keyword::TABLE) {
5440 let table_name = self.parse_object_name(false)?;
5441 if self.peek_token_ref().token != Token::EOF {
5442 if let Token::Word(word) = &self.peek_token_ref().token {
5443 if word.keyword == Keyword::OPTIONS {
5444 options = self.parse_options(Keyword::OPTIONS)?
5445 }
5446 };
5447
5448 if self.peek_token_ref().token != Token::EOF {
5449 let (a, q) = self.parse_as_query()?;
5450 has_as = a;
5451 query = Some(q);
5452 }
5453
5454 Ok(Statement::Cache {
5455 table_flag,
5456 table_name,
5457 has_as,
5458 options,
5459 query,
5460 })
5461 } else {
5462 Ok(Statement::Cache {
5463 table_flag,
5464 table_name,
5465 has_as,
5466 options,
5467 query,
5468 })
5469 }
5470 } else {
5471 if self.peek_token_ref().token == Token::EOF {
5472 self.prev_token();
5473 }
5474 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5475 }
5476 }
5477 }
5478
5479 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5481 match &self.peek_token_ref().token {
5482 Token::Word(word) => match word.keyword {
5483 Keyword::AS => {
5484 self.next_token();
5485 Ok((true, self.parse_query()?))
5486 }
5487 _ => Ok((false, self.parse_query()?)),
5488 },
5489 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5490 }
5491 }
5492
5493 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5495 self.expect_keyword_is(Keyword::TABLE)?;
5496 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5497 let table_name = self.parse_object_name(false)?;
5498 Ok(Statement::UNCache {
5499 table_name,
5500 if_exists,
5501 })
5502 }
5503
5504 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5506 self.expect_keyword_is(Keyword::TABLE)?;
5507 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5508 let table_name = self.parse_object_name(false)?;
5509 self.expect_keyword_is(Keyword::USING)?;
5510 let module_name = self.parse_identifier()?;
5511 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5516 Ok(Statement::CreateVirtualTable {
5517 name: table_name,
5518 if_not_exists,
5519 module_name,
5520 module_args,
5521 })
5522 }
5523
5524 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5526 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5527
5528 let schema_name = self.parse_schema_name()?;
5529
5530 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5531 Some(self.parse_expr()?)
5532 } else {
5533 None
5534 };
5535
5536 let with = if self.peek_keyword(Keyword::WITH) {
5537 Some(self.parse_options(Keyword::WITH)?)
5538 } else {
5539 None
5540 };
5541
5542 let options = if self.peek_keyword(Keyword::OPTIONS) {
5543 Some(self.parse_options(Keyword::OPTIONS)?)
5544 } else {
5545 None
5546 };
5547
5548 let clone = if self.parse_keyword(Keyword::CLONE) {
5549 Some(self.parse_object_name(false)?)
5550 } else {
5551 None
5552 };
5553
5554 Ok(Statement::CreateSchema {
5555 schema_name,
5556 if_not_exists,
5557 with,
5558 options,
5559 default_collate_spec,
5560 clone,
5561 })
5562 }
5563
5564 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5565 if self.parse_keyword(Keyword::AUTHORIZATION) {
5566 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5567 } else {
5568 let name = self.parse_object_name(false)?;
5569
5570 if self.parse_keyword(Keyword::AUTHORIZATION) {
5571 Ok(SchemaName::NamedAuthorization(
5572 name,
5573 self.parse_identifier()?,
5574 ))
5575 } else {
5576 Ok(SchemaName::Simple(name))
5577 }
5578 }
5579 }
5580
5581 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5583 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5584 let db_name = self.parse_object_name(false)?;
5585 let mut location = None;
5586 let mut managed_location = None;
5587 loop {
5588 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5589 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5590 Some(Keyword::MANAGEDLOCATION) => {
5591 managed_location = Some(self.parse_literal_string()?)
5592 }
5593 _ => break,
5594 }
5595 }
5596 let clone = if self.parse_keyword(Keyword::CLONE) {
5597 Some(self.parse_object_name(false)?)
5598 } else {
5599 None
5600 };
5601
5602 let mut default_charset = None;
5610 let mut default_collation = None;
5611 loop {
5612 let has_default = self.parse_keyword(Keyword::DEFAULT);
5613 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5614 || self.parse_keyword(Keyword::CHARSET)
5615 {
5616 let _ = self.consume_token(&Token::Eq);
5617 default_charset = Some(self.parse_identifier()?.value);
5618 } else if self.parse_keyword(Keyword::COLLATE) {
5619 let _ = self.consume_token(&Token::Eq);
5620 default_collation = Some(self.parse_identifier()?.value);
5621 } else if has_default {
5622 self.prev_token();
5624 break;
5625 } else {
5626 break;
5627 }
5628 }
5629
5630 Ok(Statement::CreateDatabase {
5631 db_name,
5632 if_not_exists: ine,
5633 location,
5634 managed_location,
5635 or_replace: false,
5636 transient: false,
5637 clone,
5638 data_retention_time_in_days: None,
5639 max_data_extension_time_in_days: None,
5640 external_volume: None,
5641 catalog: None,
5642 replace_invalid_characters: None,
5643 default_ddl_collation: None,
5644 storage_serialization_policy: None,
5645 comment: None,
5646 default_charset,
5647 default_collation,
5648 catalog_sync: None,
5649 catalog_sync_namespace_mode: None,
5650 catalog_sync_namespace_flatten_delimiter: None,
5651 with_tags: None,
5652 with_contacts: None,
5653 })
5654 }
5655
5656 pub fn parse_optional_create_function_using(
5658 &mut self,
5659 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5660 if !self.parse_keyword(Keyword::USING) {
5661 return Ok(None);
5662 };
5663 let keyword =
5664 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5665
5666 let uri = self.parse_literal_string()?;
5667
5668 match keyword {
5669 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5670 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5671 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5672 _ => self.expected(
5673 "JAR, FILE or ARCHIVE, got {:?}",
5674 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5675 ),
5676 }
5677 }
5678
5679 pub fn parse_create_function(
5681 &mut self,
5682 or_alter: bool,
5683 or_replace: bool,
5684 temporary: bool,
5685 ) -> Result<Statement, ParserError> {
5686 if dialect_of!(self is HiveDialect) {
5687 self.parse_hive_create_function(or_replace, temporary)
5688 .map(Into::into)
5689 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5690 self.parse_postgres_create_function(or_replace, temporary)
5691 .map(Into::into)
5692 } else if dialect_of!(self is DuckDbDialect) {
5693 self.parse_create_macro(or_replace, temporary)
5694 } else if dialect_of!(self is BigQueryDialect) {
5695 self.parse_bigquery_create_function(or_replace, temporary)
5696 .map(Into::into)
5697 } else if dialect_of!(self is MsSqlDialect) {
5698 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5699 .map(Into::into)
5700 } else {
5701 self.prev_token();
5702 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5703 }
5704 }
5705
5706 fn parse_postgres_create_function(
5710 &mut self,
5711 or_replace: bool,
5712 temporary: bool,
5713 ) -> Result<CreateFunction, ParserError> {
5714 let name = self.parse_object_name(false)?;
5715
5716 self.expect_token(&Token::LParen)?;
5717 let args = if Token::RParen != self.peek_token_ref().token {
5718 self.parse_comma_separated(Parser::parse_function_arg)?
5719 } else {
5720 vec![]
5721 };
5722 self.expect_token(&Token::RParen)?;
5723
5724 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5725 Some(self.parse_function_return_type()?)
5726 } else {
5727 None
5728 };
5729
5730 #[derive(Default)]
5731 struct Body {
5732 language: Option<Ident>,
5733 behavior: Option<FunctionBehavior>,
5734 function_body: Option<CreateFunctionBody>,
5735 called_on_null: Option<FunctionCalledOnNull>,
5736 parallel: Option<FunctionParallel>,
5737 security: Option<FunctionSecurity>,
5738 }
5739 let mut body = Body::default();
5740 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5741 loop {
5742 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5743 if field.is_some() {
5744 return Err(ParserError::ParserError(format!(
5745 "{name} specified more than once",
5746 )));
5747 }
5748 Ok(())
5749 }
5750 if self.parse_keyword(Keyword::AS) {
5751 ensure_not_set(&body.function_body, "AS")?;
5752 body.function_body = Some(self.parse_create_function_body_string()?);
5753 } else if self.parse_keyword(Keyword::LANGUAGE) {
5754 ensure_not_set(&body.language, "LANGUAGE")?;
5755 body.language = Some(self.parse_identifier()?);
5756 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5757 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5758 body.behavior = Some(FunctionBehavior::Immutable);
5759 } else if self.parse_keyword(Keyword::STABLE) {
5760 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5761 body.behavior = Some(FunctionBehavior::Stable);
5762 } else if self.parse_keyword(Keyword::VOLATILE) {
5763 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5764 body.behavior = Some(FunctionBehavior::Volatile);
5765 } else if self.parse_keywords(&[
5766 Keyword::CALLED,
5767 Keyword::ON,
5768 Keyword::NULL,
5769 Keyword::INPUT,
5770 ]) {
5771 ensure_not_set(
5772 &body.called_on_null,
5773 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5774 )?;
5775 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5776 } else if self.parse_keywords(&[
5777 Keyword::RETURNS,
5778 Keyword::NULL,
5779 Keyword::ON,
5780 Keyword::NULL,
5781 Keyword::INPUT,
5782 ]) {
5783 ensure_not_set(
5784 &body.called_on_null,
5785 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5786 )?;
5787 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5788 } else if self.parse_keyword(Keyword::STRICT) {
5789 ensure_not_set(
5790 &body.called_on_null,
5791 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5792 )?;
5793 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5794 } else if self.parse_keyword(Keyword::PARALLEL) {
5795 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5796 if self.parse_keyword(Keyword::UNSAFE) {
5797 body.parallel = Some(FunctionParallel::Unsafe);
5798 } else if self.parse_keyword(Keyword::RESTRICTED) {
5799 body.parallel = Some(FunctionParallel::Restricted);
5800 } else if self.parse_keyword(Keyword::SAFE) {
5801 body.parallel = Some(FunctionParallel::Safe);
5802 } else {
5803 return self
5804 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5805 }
5806 } else if self.parse_keyword(Keyword::SECURITY) {
5807 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5808 if self.parse_keyword(Keyword::DEFINER) {
5809 body.security = Some(FunctionSecurity::Definer);
5810 } else if self.parse_keyword(Keyword::INVOKER) {
5811 body.security = Some(FunctionSecurity::Invoker);
5812 } else {
5813 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5814 }
5815 } else if self.parse_keyword(Keyword::SET) {
5816 let name = self.parse_object_name(false)?;
5817 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5818 FunctionSetValue::FromCurrent
5819 } else {
5820 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5821 return self.expected_ref("= or TO", self.peek_token_ref());
5822 }
5823 if self.parse_keyword(Keyword::DEFAULT) {
5824 FunctionSetValue::Default
5825 } else {
5826 let values = self.parse_comma_separated(Parser::parse_expr)?;
5827 FunctionSetValue::Values(values)
5828 }
5829 };
5830 set_params.push(FunctionDefinitionSetParam { name, value });
5831 } else if self.parse_keyword(Keyword::RETURN) {
5832 ensure_not_set(&body.function_body, "RETURN")?;
5833 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5834 } else {
5835 break;
5836 }
5837 }
5838
5839 Ok(CreateFunction {
5840 or_alter: false,
5841 or_replace,
5842 temporary,
5843 name,
5844 args: Some(args),
5845 return_type,
5846 behavior: body.behavior,
5847 called_on_null: body.called_on_null,
5848 parallel: body.parallel,
5849 security: body.security,
5850 set_params,
5851 language: body.language,
5852 function_body: body.function_body,
5853 if_not_exists: false,
5854 using: None,
5855 determinism_specifier: None,
5856 options: None,
5857 remote_connection: None,
5858 })
5859 }
5860
5861 fn parse_hive_create_function(
5865 &mut self,
5866 or_replace: bool,
5867 temporary: bool,
5868 ) -> Result<CreateFunction, ParserError> {
5869 let name = self.parse_object_name(false)?;
5870 self.expect_keyword_is(Keyword::AS)?;
5871
5872 let body = self.parse_create_function_body_string()?;
5873 let using = self.parse_optional_create_function_using()?;
5874
5875 Ok(CreateFunction {
5876 or_alter: false,
5877 or_replace,
5878 temporary,
5879 name,
5880 function_body: Some(body),
5881 using,
5882 if_not_exists: false,
5883 args: None,
5884 return_type: None,
5885 behavior: None,
5886 called_on_null: None,
5887 parallel: None,
5888 security: None,
5889 set_params: vec![],
5890 language: None,
5891 determinism_specifier: None,
5892 options: None,
5893 remote_connection: None,
5894 })
5895 }
5896
5897 fn parse_bigquery_create_function(
5901 &mut self,
5902 or_replace: bool,
5903 temporary: bool,
5904 ) -> Result<CreateFunction, ParserError> {
5905 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5906 let (name, args) = self.parse_create_function_name_and_params()?;
5907
5908 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5909 Some(self.parse_function_return_type()?)
5910 } else {
5911 None
5912 };
5913
5914 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5915 Some(FunctionDeterminismSpecifier::Deterministic)
5916 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5917 Some(FunctionDeterminismSpecifier::NotDeterministic)
5918 } else {
5919 None
5920 };
5921
5922 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5923 Some(self.parse_identifier()?)
5924 } else {
5925 None
5926 };
5927
5928 let remote_connection =
5929 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5930 Some(self.parse_object_name(false)?)
5931 } else {
5932 None
5933 };
5934
5935 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5938
5939 let function_body = if remote_connection.is_none() {
5940 self.expect_keyword_is(Keyword::AS)?;
5941 let expr = self.parse_expr()?;
5942 if options.is_none() {
5943 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5944 Some(CreateFunctionBody::AsBeforeOptions {
5945 body: expr,
5946 link_symbol: None,
5947 })
5948 } else {
5949 Some(CreateFunctionBody::AsAfterOptions(expr))
5950 }
5951 } else {
5952 None
5953 };
5954
5955 Ok(CreateFunction {
5956 or_alter: false,
5957 or_replace,
5958 temporary,
5959 if_not_exists,
5960 name,
5961 args: Some(args),
5962 return_type,
5963 function_body,
5964 language,
5965 determinism_specifier,
5966 options,
5967 remote_connection,
5968 using: None,
5969 behavior: None,
5970 called_on_null: None,
5971 parallel: None,
5972 security: None,
5973 set_params: vec![],
5974 })
5975 }
5976
5977 fn parse_mssql_create_function(
5981 &mut self,
5982 or_alter: bool,
5983 or_replace: bool,
5984 temporary: bool,
5985 ) -> Result<CreateFunction, ParserError> {
5986 let (name, args) = self.parse_create_function_name_and_params()?;
5987
5988 self.expect_keyword(Keyword::RETURNS)?;
5989
5990 let return_table = self.maybe_parse(|p| {
5991 let return_table_name = p.parse_identifier()?;
5992
5993 p.expect_keyword_is(Keyword::TABLE)?;
5994 p.prev_token();
5995
5996 let table_column_defs = match p.parse_data_type()? {
5997 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5998 table_column_defs
5999 }
6000 _ => parser_err!(
6001 "Expected table column definitions after TABLE keyword",
6002 p.peek_token_ref().span.start
6003 )?,
6004 };
6005
6006 Ok(DataType::NamedTable {
6007 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
6008 columns: table_column_defs,
6009 })
6010 })?;
6011
6012 let data_type = match return_table {
6013 Some(table_type) => table_type,
6014 None => self.parse_data_type()?,
6015 };
6016 let return_type = Some(FunctionReturnType::DataType(data_type));
6017
6018 let _ = self.parse_keyword(Keyword::AS);
6019
6020 let function_body = if self.peek_keyword(Keyword::BEGIN) {
6021 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
6022 let statements = self.parse_statement_list(&[Keyword::END])?;
6023 let end_token = self.expect_keyword(Keyword::END)?;
6024
6025 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
6026 begin_token: AttachedToken(begin_token),
6027 statements,
6028 end_token: AttachedToken(end_token),
6029 }))
6030 } else if self.parse_keyword(Keyword::RETURN) {
6031 if self.peek_token_ref().token == Token::LParen {
6032 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
6033 } else if self.peek_keyword(Keyword::SELECT) {
6034 let select = self.parse_select()?;
6035 Some(CreateFunctionBody::AsReturnSelect(select))
6036 } else {
6037 parser_err!(
6038 "Expected a subquery (or bare SELECT statement) after RETURN",
6039 self.peek_token_ref().span.start
6040 )?
6041 }
6042 } else {
6043 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
6044 };
6045
6046 Ok(CreateFunction {
6047 or_alter,
6048 or_replace,
6049 temporary,
6050 if_not_exists: false,
6051 name,
6052 args: Some(args),
6053 return_type,
6054 function_body,
6055 language: None,
6056 determinism_specifier: None,
6057 options: None,
6058 remote_connection: None,
6059 using: None,
6060 behavior: None,
6061 called_on_null: None,
6062 parallel: None,
6063 security: None,
6064 set_params: vec![],
6065 })
6066 }
6067
6068 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
6069 if self.parse_keyword(Keyword::SETOF) {
6070 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
6071 } else {
6072 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
6073 }
6074 }
6075
6076 fn parse_create_function_name_and_params(
6077 &mut self,
6078 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6079 let name = self.parse_object_name(false)?;
6080 let parse_function_param =
6081 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6082 let name = parser.parse_identifier()?;
6083 let data_type = parser.parse_data_type()?;
6084 let default_expr = if parser.consume_token(&Token::Eq) {
6085 Some(parser.parse_expr()?)
6086 } else {
6087 None
6088 };
6089
6090 Ok(OperateFunctionArg {
6091 mode: None,
6092 name: Some(name),
6093 data_type,
6094 default_expr,
6095 })
6096 };
6097 self.expect_token(&Token::LParen)?;
6098 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6099 self.expect_token(&Token::RParen)?;
6100 Ok((name, args))
6101 }
6102
6103 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6104 let mode = if self.parse_keyword(Keyword::IN) {
6105 Some(ArgMode::In)
6106 } else if self.parse_keyword(Keyword::OUT) {
6107 Some(ArgMode::Out)
6108 } else if self.parse_keyword(Keyword::INOUT) {
6109 Some(ArgMode::InOut)
6110 } else if self.parse_keyword(Keyword::VARIADIC) {
6111 Some(ArgMode::Variadic)
6112 } else {
6113 None
6114 };
6115
6116 let mut name = None;
6118 let mut data_type = self.parse_data_type()?;
6119
6120 let data_type_idx = self.get_current_index();
6124
6125 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6127 if parser.peek_keyword(Keyword::DEFAULT) {
6128 parser_err!(
6130 "The DEFAULT keyword is not a type",
6131 parser.peek_token_ref().span.start
6132 )
6133 } else {
6134 parser.parse_data_type()
6135 }
6136 }
6137
6138 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6139 let token = self.token_at(data_type_idx);
6140
6141 if !matches!(token.token, Token::Word(_)) {
6143 return self.expected("a name or type", token.clone());
6144 }
6145
6146 name = Some(Ident::new(token.to_string()));
6147 data_type = next_data_type;
6148 }
6149
6150 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6151 {
6152 Some(self.parse_expr()?)
6153 } else {
6154 None
6155 };
6156 Ok(OperateFunctionArg {
6157 mode,
6158 name,
6159 data_type,
6160 default_expr,
6161 })
6162 }
6163
6164 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6165 let mode = if self.parse_keyword(Keyword::IN) {
6166 Some(ArgMode::In)
6167 } else {
6168 if self
6169 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6170 .is_some()
6171 {
6172 return self.expected_ref(
6173 "IN or argument type in aggregate signature",
6174 self.peek_token_ref(),
6175 );
6176 }
6177 None
6178 };
6179
6180 let mut name = None;
6183 let mut data_type = self.parse_data_type()?;
6184 let data_type_idx = self.get_current_index();
6185
6186 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6187 if parser.peek_keyword(Keyword::DEFAULT)
6188 || parser.peek_keyword(Keyword::ORDER)
6189 || parser.peek_token_ref().token == Token::Comma
6190 || parser.peek_token_ref().token == Token::RParen
6191 {
6192 parser_err!(
6194 "The current token cannot start an aggregate argument type",
6195 parser.peek_token_ref().span.start
6196 )
6197 } else {
6198 parser.parse_data_type()
6199 }
6200 }
6201
6202 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6203 let token = self.token_at(data_type_idx);
6204 if !matches!(token.token, Token::Word(_)) {
6205 return self.expected("a name or type", token.clone());
6206 }
6207
6208 name = Some(Ident::new(token.to_string()));
6209 data_type = next_data_type;
6210 }
6211
6212 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6213 return self.expected_ref(
6214 "',' or ')' or ORDER BY after aggregate argument type",
6215 self.peek_token_ref(),
6216 );
6217 }
6218
6219 Ok(OperateFunctionArg {
6220 mode,
6221 name,
6222 data_type,
6223 default_expr: None,
6224 })
6225 }
6226
6227 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6233 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6234 {
6235 self.prev_token();
6236 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6237 }
6238 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6239 let trigger_name = self.parse_object_name(false)?;
6240 let table_name = if self.parse_keyword(Keyword::ON) {
6241 Some(self.parse_object_name(false)?)
6242 } else {
6243 None
6244 };
6245 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6246 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6247 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6248 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6249 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6250 )),
6251 None => None,
6252 };
6253 Ok(DropTrigger {
6254 if_exists,
6255 trigger_name,
6256 table_name,
6257 option,
6258 })
6259 }
6260
6261 pub fn parse_create_trigger(
6263 &mut self,
6264 temporary: bool,
6265 or_alter: bool,
6266 or_replace: bool,
6267 is_constraint: bool,
6268 ) -> Result<CreateTrigger, ParserError> {
6269 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6270 {
6271 self.prev_token();
6272 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6273 }
6274
6275 let name = self.parse_object_name(false)?;
6276 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6277
6278 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6279 self.expect_keyword_is(Keyword::ON)?;
6280 let table_name = self.parse_object_name(false)?;
6281
6282 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6283 self.parse_object_name(true).ok()
6284 } else {
6285 None
6286 };
6287
6288 let characteristics = self.parse_constraint_characteristics()?;
6289
6290 let mut referencing = vec![];
6291 if self.parse_keyword(Keyword::REFERENCING) {
6292 while let Some(refer) = self.parse_trigger_referencing()? {
6293 referencing.push(refer);
6294 }
6295 }
6296
6297 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6298 let include_each = self.parse_keyword(Keyword::EACH);
6299 let trigger_object =
6300 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6301 Keyword::ROW => TriggerObject::Row,
6302 Keyword::STATEMENT => TriggerObject::Statement,
6303 unexpected_keyword => return Err(ParserError::ParserError(
6304 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6305 )),
6306 };
6307
6308 Some(if include_each {
6309 TriggerObjectKind::ForEach(trigger_object)
6310 } else {
6311 TriggerObjectKind::For(trigger_object)
6312 })
6313 } else {
6314 let _ = self.parse_keyword(Keyword::FOR);
6315
6316 None
6317 };
6318
6319 let condition = self
6320 .parse_keyword(Keyword::WHEN)
6321 .then(|| self.parse_expr())
6322 .transpose()?;
6323
6324 let mut exec_body = None;
6325 let mut statements = None;
6326 if self.parse_keyword(Keyword::EXECUTE) {
6327 exec_body = Some(self.parse_trigger_exec_body()?);
6328 } else {
6329 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6330 }
6331
6332 Ok(CreateTrigger {
6333 or_alter,
6334 temporary,
6335 or_replace,
6336 is_constraint,
6337 name,
6338 period,
6339 period_before_table: true,
6340 events,
6341 table_name,
6342 referenced_table_name,
6343 referencing,
6344 trigger_object,
6345 condition,
6346 exec_body,
6347 statements_as: false,
6348 statements,
6349 characteristics,
6350 })
6351 }
6352
6353 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6355 Ok(
6356 match self.expect_one_of_keywords(&[
6357 Keyword::FOR,
6358 Keyword::BEFORE,
6359 Keyword::AFTER,
6360 Keyword::INSTEAD,
6361 ])? {
6362 Keyword::FOR => TriggerPeriod::For,
6363 Keyword::BEFORE => TriggerPeriod::Before,
6364 Keyword::AFTER => TriggerPeriod::After,
6365 Keyword::INSTEAD => self
6366 .expect_keyword_is(Keyword::OF)
6367 .map(|_| TriggerPeriod::InsteadOf)?,
6368 unexpected_keyword => return Err(ParserError::ParserError(
6369 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6370 )),
6371 },
6372 )
6373 }
6374
6375 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6377 Ok(
6378 match self.expect_one_of_keywords(&[
6379 Keyword::INSERT,
6380 Keyword::UPDATE,
6381 Keyword::DELETE,
6382 Keyword::TRUNCATE,
6383 ])? {
6384 Keyword::INSERT => TriggerEvent::Insert,
6385 Keyword::UPDATE => {
6386 if self.parse_keyword(Keyword::OF) {
6387 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6388 TriggerEvent::Update(cols)
6389 } else {
6390 TriggerEvent::Update(vec![])
6391 }
6392 }
6393 Keyword::DELETE => TriggerEvent::Delete,
6394 Keyword::TRUNCATE => TriggerEvent::Truncate,
6395 unexpected_keyword => return Err(ParserError::ParserError(
6396 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6397 )),
6398 },
6399 )
6400 }
6401
6402 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6404 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6405 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6406 TriggerReferencingType::OldTable
6407 }
6408 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6409 TriggerReferencingType::NewTable
6410 }
6411 _ => {
6412 return Ok(None);
6413 }
6414 };
6415
6416 let is_as = self.parse_keyword(Keyword::AS);
6417 let transition_relation_name = self.parse_object_name(false)?;
6418 Ok(Some(TriggerReferencing {
6419 refer_type,
6420 is_as,
6421 transition_relation_name,
6422 }))
6423 }
6424
6425 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6432 let exec_type = match self
6433 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6434 {
6435 Keyword::FUNCTION => TriggerExecBodyType::Function,
6436 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6437 unexpected_keyword => {
6438 return Err(ParserError::ParserError(format!(
6439 "Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"
6440 )))
6441 }
6442 };
6443
6444 let func_name = self.parse_object_name(false)?;
6445
6446 let args = if self.consume_token(&Token::LParen) {
6447 if self.consume_token(&Token::RParen) {
6448 Some(vec![])
6449 } else {
6450 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
6451 self.expect_token(&Token::RParen)?;
6452 Some(exprs)
6453 }
6454 } else {
6455 None
6456 };
6457
6458 Ok(TriggerExecBody {
6459 exec_type,
6460 func_name,
6461 args,
6462 })
6463 }
6464
6465 pub fn parse_create_macro(
6467 &mut self,
6468 or_replace: bool,
6469 temporary: bool,
6470 ) -> Result<Statement, ParserError> {
6471 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6472 let name = self.parse_object_name(false)?;
6473 self.expect_token(&Token::LParen)?;
6474 let args = if self.consume_token(&Token::RParen) {
6475 self.prev_token();
6476 None
6477 } else {
6478 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6479 };
6480
6481 self.expect_token(&Token::RParen)?;
6482 self.expect_keyword_is(Keyword::AS)?;
6483
6484 Ok(Statement::CreateMacro {
6485 or_replace,
6486 temporary,
6487 name,
6488 args,
6489 definition: if self.parse_keyword(Keyword::TABLE) {
6490 MacroDefinition::Table(self.parse_query()?)
6491 } else {
6492 MacroDefinition::Expr(self.parse_expr()?)
6493 },
6494 })
6495 } else {
6496 self.prev_token();
6497 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6498 }
6499 }
6500
6501 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6502 let name = self.parse_identifier()?;
6503
6504 let default_expr =
6505 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6506 Some(self.parse_expr()?)
6507 } else {
6508 None
6509 };
6510 Ok(MacroArg { name, default_expr })
6511 }
6512
6513 pub fn parse_create_external_table(
6515 &mut self,
6516 or_replace: bool,
6517 ) -> Result<CreateTable, ParserError> {
6518 self.expect_keyword_is(Keyword::TABLE)?;
6519 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6520 let table_name = self.parse_object_name(false)?;
6521 let (columns, constraints) = self.parse_columns()?;
6522
6523 let hive_distribution = self.parse_hive_distribution()?;
6524 let hive_formats = self.parse_hive_formats()?;
6525
6526 let file_format = if let Some(ref hf) = hive_formats {
6527 if let Some(ref ff) = hf.storage {
6528 match ff {
6529 HiveIOFormat::FileFormat { format } => Some(*format),
6530 _ => None,
6531 }
6532 } else {
6533 None
6534 }
6535 } else {
6536 None
6537 };
6538 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6539 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6540 let table_options = if !table_properties.is_empty() {
6541 CreateTableOptions::TableProperties(table_properties)
6542 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6543 CreateTableOptions::Options(options)
6544 } else {
6545 CreateTableOptions::None
6546 };
6547 Ok(CreateTableBuilder::new(table_name)
6548 .columns(columns)
6549 .constraints(constraints)
6550 .hive_distribution(hive_distribution)
6551 .hive_formats(hive_formats)
6552 .table_options(table_options)
6553 .or_replace(or_replace)
6554 .if_not_exists(if_not_exists)
6555 .external(true)
6556 .file_format(file_format)
6557 .location(location)
6558 .build())
6559 }
6560
6561 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6565 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6566 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6567 let table_name = self.parse_object_name(true)?;
6568
6569 self.expect_keyword_is(Keyword::CLONE)?;
6570 let clone = Some(self.parse_object_name(true)?);
6571
6572 let version =
6573 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6574 {
6575 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6576 } else {
6577 None
6578 };
6579
6580 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6581 CreateTableOptions::Options(options)
6582 } else {
6583 CreateTableOptions::None
6584 };
6585
6586 Ok(CreateTableBuilder::new(table_name)
6587 .snapshot(true)
6588 .if_not_exists(if_not_exists)
6589 .clone_clause(clone)
6590 .version(version)
6591 .table_options(table_options)
6592 .build())
6593 }
6594
6595 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6597 let next_token = self.next_token();
6598 match &next_token.token {
6599 Token::Word(w) => match w.keyword {
6600 Keyword::AVRO => Ok(FileFormat::AVRO),
6601 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6602 Keyword::ORC => Ok(FileFormat::ORC),
6603 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6604 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6605 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6606 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6607 _ => self.expected("fileformat", next_token),
6608 },
6609 _ => self.expected("fileformat", next_token),
6610 }
6611 }
6612
6613 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6614 if self.consume_token(&Token::Eq) {
6615 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6616 } else {
6617 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6618 }
6619 }
6620
6621 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6623 let next_token = self.next_token();
6624 match &next_token.token {
6625 Token::Word(w) => match w.keyword {
6626 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6627 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6628 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6629 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6630 _ => self.expected("fileformat", next_token),
6631 },
6632 _ => self.expected("fileformat", next_token),
6633 }
6634 }
6635
6636 pub fn parse_create_view(
6638 &mut self,
6639 or_alter: bool,
6640 or_replace: bool,
6641 temporary: bool,
6642 create_view_params: Option<CreateViewParams>,
6643 ) -> Result<CreateView, ParserError> {
6644 let secure = self.parse_keyword(Keyword::SECURE);
6645 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6646 self.expect_keyword_is(Keyword::VIEW)?;
6647 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6648 let if_not_exists_first =
6651 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6652 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6653 let name_before_not_exists = !if_not_exists_first
6654 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6655 let if_not_exists = if_not_exists_first || name_before_not_exists;
6656 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6657 let columns = self.parse_view_columns()?;
6660 let mut options = CreateTableOptions::None;
6661 let with_options = self.parse_options(Keyword::WITH)?;
6662 if !with_options.is_empty() {
6663 options = CreateTableOptions::With(with_options);
6664 }
6665
6666 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6667 self.expect_keyword_is(Keyword::BY)?;
6668 self.parse_parenthesized_column_list(Optional, false)?
6669 } else {
6670 vec![]
6671 };
6672
6673 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6674 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6675 if !opts.is_empty() {
6676 options = CreateTableOptions::Options(opts);
6677 }
6678 };
6679 }
6680
6681 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6682 && self.parse_keyword(Keyword::TO)
6683 {
6684 Some(self.parse_object_name(false)?)
6685 } else {
6686 None
6687 };
6688
6689 let comment = if self.dialect.supports_create_view_comment_syntax()
6690 && self.parse_keyword(Keyword::COMMENT)
6691 {
6692 self.expect_token(&Token::Eq)?;
6693 Some(self.parse_comment_value()?)
6694 } else {
6695 None
6696 };
6697
6698 self.expect_keyword_is(Keyword::AS)?;
6699 let query = self.parse_query()?;
6700 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6703 && self.parse_keywords(&[
6704 Keyword::WITH,
6705 Keyword::NO,
6706 Keyword::SCHEMA,
6707 Keyword::BINDING,
6708 ]);
6709
6710 let with_data = if materialized && self.parse_keyword(Keyword::WITH) {
6713 if self.parse_keyword(Keyword::NO) {
6714 self.expect_keyword_is(Keyword::DATA)?;
6715 Some(false)
6716 } else {
6717 self.expect_keyword_is(Keyword::DATA)?;
6718 Some(true)
6719 }
6720 } else {
6721 None
6722 };
6723
6724 Ok(CreateView {
6725 or_alter,
6726 name,
6727 columns,
6728 query,
6729 materialized,
6730 secure,
6731 or_replace,
6732 options,
6733 cluster_by,
6734 comment,
6735 with_no_schema_binding,
6736 if_not_exists,
6737 temporary,
6738 copy_grants,
6739 to,
6740 params: create_view_params,
6741 name_before_not_exists,
6742 with_data,
6743 })
6744 }
6745
6746 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6750 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6751 self.expect_token(&Token::Eq)?;
6752 Some(
6753 match self.expect_one_of_keywords(&[
6754 Keyword::UNDEFINED,
6755 Keyword::MERGE,
6756 Keyword::TEMPTABLE,
6757 ])? {
6758 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6759 Keyword::MERGE => CreateViewAlgorithm::Merge,
6760 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6761 _ => {
6762 self.prev_token();
6763 let found = self.next_token();
6764 return self
6765 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6766 }
6767 },
6768 )
6769 } else {
6770 None
6771 };
6772 let definer = if self.parse_keyword(Keyword::DEFINER) {
6773 self.expect_token(&Token::Eq)?;
6774 Some(self.parse_grantee_name()?)
6775 } else {
6776 None
6777 };
6778 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6779 Some(
6780 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6781 Keyword::DEFINER => CreateViewSecurity::Definer,
6782 Keyword::INVOKER => CreateViewSecurity::Invoker,
6783 _ => {
6784 self.prev_token();
6785 let found = self.next_token();
6786 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6787 }
6788 },
6789 )
6790 } else {
6791 None
6792 };
6793 if algorithm.is_some() || definer.is_some() || security.is_some() {
6794 Ok(Some(CreateViewParams {
6795 algorithm,
6796 definer,
6797 security,
6798 }))
6799 } else {
6800 Ok(None)
6801 }
6802 }
6803
6804 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6806 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6807 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6808
6809 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6812 vec![Keyword::AUTHORIZATION]
6813 } else if dialect_of!(self is PostgreSqlDialect) {
6814 vec![
6815 Keyword::LOGIN,
6816 Keyword::NOLOGIN,
6817 Keyword::INHERIT,
6818 Keyword::NOINHERIT,
6819 Keyword::BYPASSRLS,
6820 Keyword::NOBYPASSRLS,
6821 Keyword::PASSWORD,
6822 Keyword::CREATEDB,
6823 Keyword::NOCREATEDB,
6824 Keyword::CREATEROLE,
6825 Keyword::NOCREATEROLE,
6826 Keyword::SUPERUSER,
6827 Keyword::NOSUPERUSER,
6828 Keyword::REPLICATION,
6829 Keyword::NOREPLICATION,
6830 Keyword::CONNECTION,
6831 Keyword::VALID,
6832 Keyword::IN,
6833 Keyword::ROLE,
6834 Keyword::ADMIN,
6835 Keyword::USER,
6836 ]
6837 } else {
6838 vec![]
6839 };
6840
6841 let mut authorization_owner = None;
6843 let mut login = None;
6845 let mut inherit = None;
6846 let mut bypassrls = None;
6847 let mut password = None;
6848 let mut create_db = None;
6849 let mut create_role = None;
6850 let mut superuser = None;
6851 let mut replication = None;
6852 let mut connection_limit = None;
6853 let mut valid_until = None;
6854 let mut in_role = vec![];
6855 let mut in_group = vec![];
6856 let mut role = vec![];
6857 let mut user = vec![];
6858 let mut admin = vec![];
6859
6860 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6861 let loc = self
6862 .tokens
6863 .get(self.index - 1)
6864 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6865 match keyword {
6866 Keyword::AUTHORIZATION => {
6867 if authorization_owner.is_some() {
6868 parser_err!("Found multiple AUTHORIZATION", loc)
6869 } else {
6870 authorization_owner = Some(self.parse_object_name(false)?);
6871 Ok(())
6872 }
6873 }
6874 Keyword::LOGIN | Keyword::NOLOGIN => {
6875 if login.is_some() {
6876 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6877 } else {
6878 login = Some(keyword == Keyword::LOGIN);
6879 Ok(())
6880 }
6881 }
6882 Keyword::INHERIT | Keyword::NOINHERIT => {
6883 if inherit.is_some() {
6884 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6885 } else {
6886 inherit = Some(keyword == Keyword::INHERIT);
6887 Ok(())
6888 }
6889 }
6890 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6891 if bypassrls.is_some() {
6892 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6893 } else {
6894 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6895 Ok(())
6896 }
6897 }
6898 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6899 if create_db.is_some() {
6900 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6901 } else {
6902 create_db = Some(keyword == Keyword::CREATEDB);
6903 Ok(())
6904 }
6905 }
6906 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6907 if create_role.is_some() {
6908 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6909 } else {
6910 create_role = Some(keyword == Keyword::CREATEROLE);
6911 Ok(())
6912 }
6913 }
6914 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6915 if superuser.is_some() {
6916 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6917 } else {
6918 superuser = Some(keyword == Keyword::SUPERUSER);
6919 Ok(())
6920 }
6921 }
6922 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6923 if replication.is_some() {
6924 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6925 } else {
6926 replication = Some(keyword == Keyword::REPLICATION);
6927 Ok(())
6928 }
6929 }
6930 Keyword::PASSWORD => {
6931 if password.is_some() {
6932 parser_err!("Found multiple PASSWORD", loc)
6933 } else {
6934 password = if self.parse_keyword(Keyword::NULL) {
6935 Some(Password::NullPassword)
6936 } else {
6937 Some(Password::Password(Expr::Value(self.parse_value()?)))
6938 };
6939 Ok(())
6940 }
6941 }
6942 Keyword::CONNECTION => {
6943 self.expect_keyword_is(Keyword::LIMIT)?;
6944 if connection_limit.is_some() {
6945 parser_err!("Found multiple CONNECTION LIMIT", loc)
6946 } else {
6947 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6948 Ok(())
6949 }
6950 }
6951 Keyword::VALID => {
6952 self.expect_keyword_is(Keyword::UNTIL)?;
6953 if valid_until.is_some() {
6954 parser_err!("Found multiple VALID UNTIL", loc)
6955 } else {
6956 valid_until = Some(Expr::Value(self.parse_value()?));
6957 Ok(())
6958 }
6959 }
6960 Keyword::IN => {
6961 if self.parse_keyword(Keyword::ROLE) {
6962 if !in_role.is_empty() {
6963 parser_err!("Found multiple IN ROLE", loc)
6964 } else {
6965 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6966 Ok(())
6967 }
6968 } else if self.parse_keyword(Keyword::GROUP) {
6969 if !in_group.is_empty() {
6970 parser_err!("Found multiple IN GROUP", loc)
6971 } else {
6972 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6973 Ok(())
6974 }
6975 } else {
6976 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6977 }
6978 }
6979 Keyword::ROLE => {
6980 if !role.is_empty() {
6981 parser_err!("Found multiple ROLE", loc)
6982 } else {
6983 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6984 Ok(())
6985 }
6986 }
6987 Keyword::USER => {
6988 if !user.is_empty() {
6989 parser_err!("Found multiple USER", loc)
6990 } else {
6991 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6992 Ok(())
6993 }
6994 }
6995 Keyword::ADMIN => {
6996 if !admin.is_empty() {
6997 parser_err!("Found multiple ADMIN", loc)
6998 } else {
6999 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
7000 Ok(())
7001 }
7002 }
7003 _ => break,
7004 }?
7005 }
7006
7007 Ok(CreateRole {
7008 names,
7009 if_not_exists,
7010 login,
7011 inherit,
7012 bypassrls,
7013 password,
7014 create_db,
7015 create_role,
7016 replication,
7017 superuser,
7018 connection_limit,
7019 valid_until,
7020 in_role,
7021 in_group,
7022 role,
7023 user,
7024 admin,
7025 authorization_owner,
7026 })
7027 }
7028
7029 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
7031 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
7032 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
7033 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
7034 Some(Keyword::SESSION_USER) => Owner::SessionUser,
7035 Some(unexpected_keyword) => return Err(ParserError::ParserError(
7036 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
7037 )),
7038 None => {
7039 match self.parse_identifier() {
7040 Ok(ident) => Owner::Ident(ident),
7041 Err(e) => {
7042 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
7043 }
7044 }
7045 }
7046 };
7047 Ok(owner)
7048 }
7049
7050 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
7052 let name = self.parse_object_name(false)?;
7053 self.expect_keyword_is(Keyword::AS)?;
7054 let data_type = self.parse_data_type()?;
7055 let collation = if self.parse_keyword(Keyword::COLLATE) {
7056 Some(self.parse_identifier()?)
7057 } else {
7058 None
7059 };
7060 let default = if self.parse_keyword(Keyword::DEFAULT) {
7061 Some(self.parse_expr()?)
7062 } else {
7063 None
7064 };
7065 let mut constraints = Vec::new();
7066 while let Some(constraint) = self.parse_optional_table_constraint()? {
7067 constraints.push(constraint);
7068 }
7069
7070 Ok(CreateDomain {
7071 name,
7072 data_type,
7073 collation,
7074 default,
7075 constraints,
7076 })
7077 }
7078
7079 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
7089 let name = self.parse_identifier()?;
7090 self.expect_keyword_is(Keyword::ON)?;
7091 let table_name = self.parse_object_name(false)?;
7092
7093 let policy_type = if self.parse_keyword(Keyword::AS) {
7094 let keyword =
7095 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
7096 Some(match keyword {
7097 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
7098 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
7099 unexpected_keyword => return Err(ParserError::ParserError(
7100 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
7101 )),
7102 })
7103 } else {
7104 None
7105 };
7106
7107 let command = if self.parse_keyword(Keyword::FOR) {
7108 let keyword = self.expect_one_of_keywords(&[
7109 Keyword::ALL,
7110 Keyword::SELECT,
7111 Keyword::INSERT,
7112 Keyword::UPDATE,
7113 Keyword::DELETE,
7114 ])?;
7115 Some(match keyword {
7116 Keyword::ALL => CreatePolicyCommand::All,
7117 Keyword::SELECT => CreatePolicyCommand::Select,
7118 Keyword::INSERT => CreatePolicyCommand::Insert,
7119 Keyword::UPDATE => CreatePolicyCommand::Update,
7120 Keyword::DELETE => CreatePolicyCommand::Delete,
7121 unexpected_keyword => return Err(ParserError::ParserError(
7122 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7123 )),
7124 })
7125 } else {
7126 None
7127 };
7128
7129 let to = if self.parse_keyword(Keyword::TO) {
7130 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7131 } else {
7132 None
7133 };
7134
7135 let using = if self.parse_keyword(Keyword::USING) {
7136 self.expect_token(&Token::LParen)?;
7137 let expr = self.parse_expr()?;
7138 self.expect_token(&Token::RParen)?;
7139 Some(expr)
7140 } else {
7141 None
7142 };
7143
7144 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7145 self.expect_token(&Token::LParen)?;
7146 let expr = self.parse_expr()?;
7147 self.expect_token(&Token::RParen)?;
7148 Some(expr)
7149 } else {
7150 None
7151 };
7152
7153 Ok(CreatePolicy {
7154 name,
7155 table_name,
7156 policy_type,
7157 command,
7158 to,
7159 using,
7160 with_check,
7161 })
7162 }
7163
7164 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7174 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7175 let name = self.parse_identifier()?;
7176
7177 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7178 Some(self.parse_literal_string()?)
7179 } else {
7180 None
7181 };
7182
7183 let url = if self.parse_keyword(Keyword::URL) {
7184 Some(self.parse_literal_string()?)
7185 } else {
7186 None
7187 };
7188
7189 let comment = self.parse_optional_inline_comment()?;
7190
7191 let with_dcproperties =
7192 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7193 properties if !properties.is_empty() => Some(properties),
7194 _ => None,
7195 };
7196
7197 Ok(CreateConnector {
7198 name,
7199 if_not_exists,
7200 connector_type,
7201 url,
7202 comment,
7203 with_dcproperties,
7204 })
7205 }
7206
7207 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7213 let mut parts = vec![];
7214 loop {
7215 parts.push(ObjectNamePart::Identifier(Ident::new(
7216 self.next_token().to_string(),
7217 )));
7218 if !self.consume_token(&Token::Period) {
7219 break;
7220 }
7221 }
7222 Ok(ObjectName(parts))
7223 }
7224
7225 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7229 let name = self.parse_operator_name()?;
7230 self.expect_token(&Token::LParen)?;
7231
7232 let mut function: Option<ObjectName> = None;
7233 let mut is_procedure = false;
7234 let mut left_arg: Option<DataType> = None;
7235 let mut right_arg: Option<DataType> = None;
7236 let mut options: Vec<OperatorOption> = Vec::new();
7237
7238 loop {
7239 let keyword = self.expect_one_of_keywords(&[
7240 Keyword::FUNCTION,
7241 Keyword::PROCEDURE,
7242 Keyword::LEFTARG,
7243 Keyword::RIGHTARG,
7244 Keyword::COMMUTATOR,
7245 Keyword::NEGATOR,
7246 Keyword::RESTRICT,
7247 Keyword::JOIN,
7248 Keyword::HASHES,
7249 Keyword::MERGES,
7250 ])?;
7251
7252 match keyword {
7253 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7254 options.push(OperatorOption::Hashes);
7255 }
7256 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7257 options.push(OperatorOption::Merges);
7258 }
7259 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7260 self.expect_token(&Token::Eq)?;
7261 function = Some(self.parse_object_name(false)?);
7262 is_procedure = keyword == Keyword::PROCEDURE;
7263 }
7264 Keyword::LEFTARG if left_arg.is_none() => {
7265 self.expect_token(&Token::Eq)?;
7266 left_arg = Some(self.parse_data_type()?);
7267 }
7268 Keyword::RIGHTARG if right_arg.is_none() => {
7269 self.expect_token(&Token::Eq)?;
7270 right_arg = Some(self.parse_data_type()?);
7271 }
7272 Keyword::COMMUTATOR
7273 if !options
7274 .iter()
7275 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7276 {
7277 self.expect_token(&Token::Eq)?;
7278 if self.parse_keyword(Keyword::OPERATOR) {
7279 self.expect_token(&Token::LParen)?;
7280 let op = self.parse_operator_name()?;
7281 self.expect_token(&Token::RParen)?;
7282 options.push(OperatorOption::Commutator(op));
7283 } else {
7284 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7285 }
7286 }
7287 Keyword::NEGATOR
7288 if !options
7289 .iter()
7290 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7291 {
7292 self.expect_token(&Token::Eq)?;
7293 if self.parse_keyword(Keyword::OPERATOR) {
7294 self.expect_token(&Token::LParen)?;
7295 let op = self.parse_operator_name()?;
7296 self.expect_token(&Token::RParen)?;
7297 options.push(OperatorOption::Negator(op));
7298 } else {
7299 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7300 }
7301 }
7302 Keyword::RESTRICT
7303 if !options
7304 .iter()
7305 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7306 {
7307 self.expect_token(&Token::Eq)?;
7308 options.push(OperatorOption::Restrict(Some(
7309 self.parse_object_name(false)?,
7310 )));
7311 }
7312 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7313 self.expect_token(&Token::Eq)?;
7314 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7315 }
7316 _ => {
7317 return Err(ParserError::ParserError(format!(
7318 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7319 keyword
7320 )))
7321 }
7322 }
7323
7324 if !self.consume_token(&Token::Comma) {
7325 break;
7326 }
7327 }
7328
7329 self.expect_token(&Token::RParen)?;
7331
7332 let function = function.ok_or_else(|| {
7334 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7335 })?;
7336
7337 Ok(CreateOperator {
7338 name,
7339 function,
7340 is_procedure,
7341 left_arg,
7342 right_arg,
7343 options,
7344 })
7345 }
7346
7347 pub fn parse_create_aggregate(
7351 &mut self,
7352 or_replace: bool,
7353 ) -> Result<CreateAggregate, ParserError> {
7354 let name = self.parse_object_name(false)?;
7355
7356 self.expect_token(&Token::LParen)?;
7358 let args = if self.consume_token(&Token::Mul) {
7359 vec![]
7361 } else if self.consume_token(&Token::RParen) {
7362 self.prev_token();
7363 vec![]
7364 } else {
7365 self.parse_comma_separated(|p| p.parse_data_type())?
7366 };
7367 self.expect_token(&Token::RParen)?;
7368
7369 self.expect_token(&Token::LParen)?;
7371 let mut options: Vec<CreateAggregateOption> = Vec::new();
7372 loop {
7373 let token = self.next_token();
7374 match &token.token {
7375 Token::RParen => break,
7376 Token::Comma => continue,
7377 Token::Word(word) => {
7378 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7379 options.push(option);
7380 }
7381 other => {
7382 return Err(ParserError::ParserError(format!(
7383 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7384 )));
7385 }
7386 }
7387 }
7388
7389 Ok(CreateAggregate {
7390 or_replace,
7391 name,
7392 args,
7393 options,
7394 })
7395 }
7396
7397 fn parse_create_aggregate_option(
7398 &mut self,
7399 key: &str,
7400 ) -> Result<CreateAggregateOption, ParserError> {
7401 match key {
7402 "SFUNC" => {
7403 self.expect_token(&Token::Eq)?;
7404 Ok(CreateAggregateOption::Sfunc(self.parse_object_name(false)?))
7405 }
7406 "STYPE" => {
7407 self.expect_token(&Token::Eq)?;
7408 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7409 }
7410 "SSPACE" => {
7411 self.expect_token(&Token::Eq)?;
7412 let size = self.parse_literal_uint()?;
7413 Ok(CreateAggregateOption::Sspace(size))
7414 }
7415 "FINALFUNC" => {
7416 self.expect_token(&Token::Eq)?;
7417 Ok(CreateAggregateOption::Finalfunc(
7418 self.parse_object_name(false)?,
7419 ))
7420 }
7421 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7422 "FINALFUNC_MODIFY" => {
7423 self.expect_token(&Token::Eq)?;
7424 Ok(CreateAggregateOption::FinalfuncModify(
7425 self.parse_aggregate_modify_kind()?,
7426 ))
7427 }
7428 "COMBINEFUNC" => {
7429 self.expect_token(&Token::Eq)?;
7430 Ok(CreateAggregateOption::Combinefunc(
7431 self.parse_object_name(false)?,
7432 ))
7433 }
7434 "SERIALFUNC" => {
7435 self.expect_token(&Token::Eq)?;
7436 Ok(CreateAggregateOption::Serialfunc(
7437 self.parse_object_name(false)?,
7438 ))
7439 }
7440 "DESERIALFUNC" => {
7441 self.expect_token(&Token::Eq)?;
7442 Ok(CreateAggregateOption::Deserialfunc(
7443 self.parse_object_name(false)?,
7444 ))
7445 }
7446 "INITCOND" => {
7447 self.expect_token(&Token::Eq)?;
7448 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7449 }
7450 "MSFUNC" => {
7451 self.expect_token(&Token::Eq)?;
7452 Ok(CreateAggregateOption::Msfunc(
7453 self.parse_object_name(false)?,
7454 ))
7455 }
7456 "MINVFUNC" => {
7457 self.expect_token(&Token::Eq)?;
7458 Ok(CreateAggregateOption::Minvfunc(
7459 self.parse_object_name(false)?,
7460 ))
7461 }
7462 "MSTYPE" => {
7463 self.expect_token(&Token::Eq)?;
7464 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7465 }
7466 "MSSPACE" => {
7467 self.expect_token(&Token::Eq)?;
7468 let size = self.parse_literal_uint()?;
7469 Ok(CreateAggregateOption::Msspace(size))
7470 }
7471 "MFINALFUNC" => {
7472 self.expect_token(&Token::Eq)?;
7473 Ok(CreateAggregateOption::Mfinalfunc(
7474 self.parse_object_name(false)?,
7475 ))
7476 }
7477 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7478 "MFINALFUNC_MODIFY" => {
7479 self.expect_token(&Token::Eq)?;
7480 Ok(CreateAggregateOption::MfinalfuncModify(
7481 self.parse_aggregate_modify_kind()?,
7482 ))
7483 }
7484 "MINITCOND" => {
7485 self.expect_token(&Token::Eq)?;
7486 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7487 }
7488 "SORTOP" => {
7489 self.expect_token(&Token::Eq)?;
7490 Ok(CreateAggregateOption::Sortop(
7491 self.parse_object_name(false)?,
7492 ))
7493 }
7494 "PARALLEL" => {
7495 self.expect_token(&Token::Eq)?;
7496 let parallel = match self.expect_one_of_keywords(&[
7497 Keyword::SAFE,
7498 Keyword::RESTRICTED,
7499 Keyword::UNSAFE,
7500 ])? {
7501 Keyword::SAFE => FunctionParallel::Safe,
7502 Keyword::RESTRICTED => FunctionParallel::Restricted,
7503 Keyword::UNSAFE => FunctionParallel::Unsafe,
7504 other => {
7505 return Err(ParserError::ParserError(format!(
7506 "Internal parser error: unexpected keyword `{other}` for PARALLEL"
7507 )))
7508 }
7509 };
7510 Ok(CreateAggregateOption::Parallel(parallel))
7511 }
7512 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7513 other => Err(ParserError::ParserError(format!(
7514 "Unknown CREATE AGGREGATE option: {other}"
7515 ))),
7516 }
7517 }
7518
7519 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7520 let token = self.next_token();
7521 match &token.token {
7522 Token::Word(word) => match word.value.to_uppercase().as_str() {
7523 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7524 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7525 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7526 other => Err(ParserError::ParserError(format!(
7527 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7528 ))),
7529 },
7530 other => Err(ParserError::ParserError(format!(
7531 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7532 ))),
7533 }
7534 }
7535
7536 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7540 let name = self.parse_object_name(false)?;
7541 self.expect_keyword(Keyword::USING)?;
7542 let using = self.parse_identifier()?;
7543
7544 Ok(CreateOperatorFamily { name, using })
7545 }
7546
7547 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7551 let name = self.parse_object_name(false)?;
7552 let default = self.parse_keyword(Keyword::DEFAULT);
7553 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7554 let for_type = self.parse_data_type()?;
7555 self.expect_keyword(Keyword::USING)?;
7556 let using = self.parse_identifier()?;
7557
7558 let family = if self.parse_keyword(Keyword::FAMILY) {
7559 Some(self.parse_object_name(false)?)
7560 } else {
7561 None
7562 };
7563
7564 self.expect_keyword(Keyword::AS)?;
7565
7566 let mut items = vec![];
7567 loop {
7568 if self.parse_keyword(Keyword::OPERATOR) {
7569 let strategy_number = self.parse_literal_uint()?;
7570 let operator_name = self.parse_operator_name()?;
7571
7572 let op_types = if self.consume_token(&Token::LParen) {
7574 let left = self.parse_data_type()?;
7575 self.expect_token(&Token::Comma)?;
7576 let right = self.parse_data_type()?;
7577 self.expect_token(&Token::RParen)?;
7578 Some(OperatorArgTypes { left, right })
7579 } else {
7580 None
7581 };
7582
7583 let purpose = if self.parse_keyword(Keyword::FOR) {
7585 if self.parse_keyword(Keyword::SEARCH) {
7586 Some(OperatorPurpose::ForSearch)
7587 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7588 let sort_family = self.parse_object_name(false)?;
7589 Some(OperatorPurpose::ForOrderBy { sort_family })
7590 } else {
7591 return self
7592 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7593 }
7594 } else {
7595 None
7596 };
7597
7598 items.push(OperatorClassItem::Operator {
7599 strategy_number,
7600 operator_name,
7601 op_types,
7602 purpose,
7603 });
7604 } else if self.parse_keyword(Keyword::FUNCTION) {
7605 let support_number = self.parse_literal_uint()?;
7606
7607 let op_types = if self.consume_token(&Token::LParen)
7609 && self.peek_token_ref().token != Token::RParen
7610 {
7611 let mut types = vec![];
7612 loop {
7613 types.push(self.parse_data_type()?);
7614 if !self.consume_token(&Token::Comma) {
7615 break;
7616 }
7617 }
7618 self.expect_token(&Token::RParen)?;
7619 Some(types)
7620 } else if self.consume_token(&Token::LParen) {
7621 self.expect_token(&Token::RParen)?;
7622 Some(vec![])
7623 } else {
7624 None
7625 };
7626
7627 let function_name = self.parse_object_name(false)?;
7628
7629 let argument_types = if self.consume_token(&Token::LParen) {
7631 let mut types = vec![];
7632 loop {
7633 if self.peek_token_ref().token == Token::RParen {
7634 break;
7635 }
7636 types.push(self.parse_data_type()?);
7637 if !self.consume_token(&Token::Comma) {
7638 break;
7639 }
7640 }
7641 self.expect_token(&Token::RParen)?;
7642 types
7643 } else {
7644 vec![]
7645 };
7646
7647 items.push(OperatorClassItem::Function {
7648 support_number,
7649 op_types,
7650 function_name,
7651 argument_types,
7652 });
7653 } else if self.parse_keyword(Keyword::STORAGE) {
7654 let storage_type = self.parse_data_type()?;
7655 items.push(OperatorClassItem::Storage { storage_type });
7656 } else {
7657 break;
7658 }
7659
7660 if !self.consume_token(&Token::Comma) {
7662 break;
7663 }
7664 }
7665
7666 Ok(CreateOperatorClass {
7667 name,
7668 default,
7669 for_type,
7670 using,
7671 family,
7672 items,
7673 })
7674 }
7675
7676 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7678 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7680 && self.parse_keyword(Keyword::TEMPORARY);
7681 let persistent = dialect_of!(self is DuckDbDialect)
7682 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7683
7684 let object_type = if self.parse_keyword(Keyword::TABLE) {
7685 ObjectType::Table
7686 } else if self.parse_keyword(Keyword::COLLATION) {
7687 ObjectType::Collation
7688 } else if self.parse_keyword(Keyword::VIEW) {
7689 ObjectType::View
7690 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7691 ObjectType::MaterializedView
7692 } else if self.parse_keyword(Keyword::INDEX) {
7693 ObjectType::Index
7694 } else if self.parse_keyword(Keyword::ROLE) {
7695 ObjectType::Role
7696 } else if self.parse_keyword(Keyword::SCHEMA) {
7697 ObjectType::Schema
7698 } else if self.parse_keyword(Keyword::DATABASE) {
7699 ObjectType::Database
7700 } else if self.parse_keyword(Keyword::SEQUENCE) {
7701 ObjectType::Sequence
7702 } else if self.parse_keyword(Keyword::STAGE) {
7703 ObjectType::Stage
7704 } else if self.parse_keyword(Keyword::TYPE) {
7705 ObjectType::Type
7706 } else if self.parse_keyword(Keyword::USER) {
7707 ObjectType::User
7708 } else if self.parse_keyword(Keyword::STREAM) {
7709 ObjectType::Stream
7710 } else if self.parse_keyword(Keyword::FUNCTION) {
7711 return self.parse_drop_function().map(Into::into);
7712 } else if self.parse_keyword(Keyword::POLICY) {
7713 return self.parse_drop_policy().map(Into::into);
7714 } else if self.parse_keyword(Keyword::CONNECTOR) {
7715 return self.parse_drop_connector();
7716 } else if self.parse_keyword(Keyword::DOMAIN) {
7717 return self.parse_drop_domain().map(Into::into);
7718 } else if self.parse_keyword(Keyword::PROCEDURE) {
7719 return self.parse_drop_procedure();
7720 } else if self.parse_keyword(Keyword::SECRET) {
7721 return self.parse_drop_secret(temporary, persistent);
7722 } else if self.parse_keyword(Keyword::TRIGGER) {
7723 return self.parse_drop_trigger().map(Into::into);
7724 } else if self.parse_keyword(Keyword::EXTENSION) {
7725 return self.parse_drop_extension();
7726 } else if self.parse_keyword(Keyword::OPERATOR) {
7727 return if self.parse_keyword(Keyword::FAMILY) {
7729 self.parse_drop_operator_family()
7730 } else if self.parse_keyword(Keyword::CLASS) {
7731 self.parse_drop_operator_class()
7732 } else {
7733 self.parse_drop_operator()
7734 };
7735 } else {
7736 return self.expected_ref(
7737 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7738 self.peek_token_ref(),
7739 );
7740 };
7741 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7744 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7745
7746 let loc = self.peek_token_ref().span.start;
7747 let cascade = self.parse_keyword(Keyword::CASCADE);
7748 let restrict = self.parse_keyword(Keyword::RESTRICT);
7749 let purge = self.parse_keyword(Keyword::PURGE);
7750 if cascade && restrict {
7751 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7752 }
7753 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7754 return parser_err!(
7755 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7756 loc
7757 );
7758 }
7759 let table = if self.parse_keyword(Keyword::ON) {
7760 Some(self.parse_object_name(false)?)
7761 } else {
7762 None
7763 };
7764 Ok(Statement::Drop {
7765 object_type,
7766 if_exists,
7767 names,
7768 cascade,
7769 restrict,
7770 purge,
7771 temporary,
7772 table,
7773 })
7774 }
7775
7776 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7777 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7778 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7779 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7780 _ => None,
7781 }
7782 }
7783
7784 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7789 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7790 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7791 let drop_behavior = self.parse_optional_drop_behavior();
7792 Ok(DropFunction {
7793 if_exists,
7794 func_desc,
7795 drop_behavior,
7796 })
7797 }
7798
7799 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7805 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7806 let name = self.parse_identifier()?;
7807 self.expect_keyword_is(Keyword::ON)?;
7808 let table_name = self.parse_object_name(false)?;
7809 let drop_behavior = self.parse_optional_drop_behavior();
7810 Ok(DropPolicy {
7811 if_exists,
7812 name,
7813 table_name,
7814 drop_behavior,
7815 })
7816 }
7817 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7823 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7824 let name = self.parse_identifier()?;
7825 Ok(Statement::DropConnector { if_exists, name })
7826 }
7827
7828 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7832 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7833 let name = self.parse_object_name(false)?;
7834 let drop_behavior = self.parse_optional_drop_behavior();
7835 Ok(DropDomain {
7836 if_exists,
7837 name,
7838 drop_behavior,
7839 })
7840 }
7841
7842 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7847 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7848 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7849 let drop_behavior = self.parse_optional_drop_behavior();
7850 Ok(Statement::DropProcedure {
7851 if_exists,
7852 proc_desc,
7853 drop_behavior,
7854 })
7855 }
7856
7857 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7858 let name = self.parse_object_name(false)?;
7859
7860 let args = if self.consume_token(&Token::LParen) {
7861 if self.consume_token(&Token::RParen) {
7862 Some(vec![])
7863 } else {
7864 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7865 self.expect_token(&Token::RParen)?;
7866 Some(args)
7867 }
7868 } else {
7869 None
7870 };
7871
7872 Ok(FunctionDesc { name, args })
7873 }
7874
7875 fn parse_drop_secret(
7877 &mut self,
7878 temporary: bool,
7879 persistent: bool,
7880 ) -> Result<Statement, ParserError> {
7881 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7882 let name = self.parse_identifier()?;
7883 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7884 self.parse_identifier().ok()
7885 } else {
7886 None
7887 };
7888 let temp = match (temporary, persistent) {
7889 (true, false) => Some(true),
7890 (false, true) => Some(false),
7891 (false, false) => None,
7892 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7893 };
7894
7895 Ok(Statement::DropSecret {
7896 if_exists,
7897 temporary: temp,
7898 name,
7899 storage_specifier,
7900 })
7901 }
7902
7903 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7913 if dialect_of!(self is BigQueryDialect) {
7914 return self.parse_big_query_declare();
7915 }
7916 if dialect_of!(self is SnowflakeDialect) {
7917 return self.parse_snowflake_declare();
7918 }
7919 if dialect_of!(self is MsSqlDialect) {
7920 return self.parse_mssql_declare();
7921 }
7922
7923 let name = self.parse_identifier()?;
7924
7925 let binary = Some(self.parse_keyword(Keyword::BINARY));
7926 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7927 Some(true)
7928 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7929 Some(false)
7930 } else {
7931 None
7932 };
7933 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7934 Some(true)
7935 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7936 Some(false)
7937 } else {
7938 None
7939 };
7940
7941 self.expect_keyword_is(Keyword::CURSOR)?;
7942 let declare_type = Some(DeclareType::Cursor);
7943
7944 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7945 Some(keyword) => {
7946 self.expect_keyword_is(Keyword::HOLD)?;
7947
7948 match keyword {
7949 Keyword::WITH => Some(true),
7950 Keyword::WITHOUT => Some(false),
7951 unexpected_keyword => return Err(ParserError::ParserError(
7952 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7953 )),
7954 }
7955 }
7956 None => None,
7957 };
7958
7959 self.expect_keyword_is(Keyword::FOR)?;
7960
7961 let query = Some(self.parse_query()?);
7962
7963 Ok(Statement::Declare {
7964 stmts: vec![Declare {
7965 names: vec![name],
7966 data_type: None,
7967 assignment: None,
7968 declare_type,
7969 binary,
7970 sensitive,
7971 scroll,
7972 hold,
7973 for_query: query,
7974 }],
7975 })
7976 }
7977
7978 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7986 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7987
7988 let data_type = match &self.peek_token_ref().token {
7989 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7990 _ => Some(self.parse_data_type()?),
7991 };
7992
7993 let expr = if data_type.is_some() {
7994 if self.parse_keyword(Keyword::DEFAULT) {
7995 Some(self.parse_expr()?)
7996 } else {
7997 None
7998 }
7999 } else {
8000 self.expect_keyword_is(Keyword::DEFAULT)?;
8003 Some(self.parse_expr()?)
8004 };
8005
8006 Ok(Statement::Declare {
8007 stmts: vec![Declare {
8008 names,
8009 data_type,
8010 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
8011 declare_type: None,
8012 binary: None,
8013 sensitive: None,
8014 scroll: None,
8015 hold: None,
8016 for_query: None,
8017 }],
8018 })
8019 }
8020
8021 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
8046 let mut stmts = vec![];
8047 loop {
8048 let name = self.parse_identifier()?;
8049 let (declare_type, for_query, assigned_expr, data_type) =
8050 if self.parse_keyword(Keyword::CURSOR) {
8051 self.expect_keyword_is(Keyword::FOR)?;
8052 match &self.peek_token_ref().token {
8053 Token::Word(w) if w.keyword == Keyword::SELECT => (
8054 Some(DeclareType::Cursor),
8055 Some(self.parse_query()?),
8056 None,
8057 None,
8058 ),
8059 _ => (
8060 Some(DeclareType::Cursor),
8061 None,
8062 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
8063 None,
8064 ),
8065 }
8066 } else if self.parse_keyword(Keyword::RESULTSET) {
8067 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
8068 self.parse_snowflake_variable_declaration_expression()?
8069 } else {
8070 None
8072 };
8073
8074 (Some(DeclareType::ResultSet), None, assigned_expr, None)
8075 } else if self.parse_keyword(Keyword::EXCEPTION) {
8076 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
8077 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
8078 } else {
8079 None
8081 };
8082
8083 (Some(DeclareType::Exception), None, assigned_expr, None)
8084 } else {
8085 let (assigned_expr, data_type) = if let Some(assigned_expr) =
8087 self.parse_snowflake_variable_declaration_expression()?
8088 {
8089 (Some(assigned_expr), None)
8090 } else if let Token::Word(_) = &self.peek_token_ref().token {
8091 let data_type = self.parse_data_type()?;
8092 (
8093 self.parse_snowflake_variable_declaration_expression()?,
8094 Some(data_type),
8095 )
8096 } else {
8097 (None, None)
8098 };
8099 (None, None, assigned_expr, data_type)
8100 };
8101 let stmt = Declare {
8102 names: vec![name],
8103 data_type,
8104 assignment: assigned_expr,
8105 declare_type,
8106 binary: None,
8107 sensitive: None,
8108 scroll: None,
8109 hold: None,
8110 for_query,
8111 };
8112
8113 stmts.push(stmt);
8114 if self.consume_token(&Token::SemiColon) {
8115 match &self.peek_token_ref().token {
8116 Token::Word(w)
8117 if ALL_KEYWORDS
8118 .binary_search(&w.value.to_uppercase().as_str())
8119 .is_err() =>
8120 {
8121 continue;
8123 }
8124 _ => {
8125 self.prev_token();
8127 }
8128 }
8129 }
8130
8131 break;
8132 }
8133
8134 Ok(Statement::Declare { stmts })
8135 }
8136
8137 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8149 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8150
8151 Ok(Statement::Declare { stmts })
8152 }
8153
8154 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8165 let name = {
8166 let ident = self.parse_identifier()?;
8167 if !ident.value.starts_with('@')
8168 && !matches!(
8169 &self.peek_token_ref().token,
8170 Token::Word(w) if w.keyword == Keyword::CURSOR
8171 )
8172 {
8173 Err(ParserError::TokenizerError(
8174 "Invalid MsSql variable declaration.".to_string(),
8175 ))
8176 } else {
8177 Ok(ident)
8178 }
8179 }?;
8180
8181 let (declare_type, data_type) = match &self.peek_token_ref().token {
8182 Token::Word(w) => match w.keyword {
8183 Keyword::CURSOR => {
8184 self.next_token();
8185 (Some(DeclareType::Cursor), None)
8186 }
8187 Keyword::AS => {
8188 self.next_token();
8189 (None, Some(self.parse_data_type()?))
8190 }
8191 _ => (None, Some(self.parse_data_type()?)),
8192 },
8193 _ => (None, Some(self.parse_data_type()?)),
8194 };
8195
8196 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8197 self.next_token();
8198 let query = Some(self.parse_query()?);
8199 (query, None)
8200 } else {
8201 let assignment = self.parse_mssql_variable_declaration_expression()?;
8202 (None, assignment)
8203 };
8204
8205 Ok(Declare {
8206 names: vec![name],
8207 data_type,
8208 assignment,
8209 declare_type,
8210 binary: None,
8211 sensitive: None,
8212 scroll: None,
8213 hold: None,
8214 for_query,
8215 })
8216 }
8217
8218 pub fn parse_snowflake_variable_declaration_expression(
8226 &mut self,
8227 ) -> Result<Option<DeclareAssignment>, ParserError> {
8228 Ok(match &self.peek_token_ref().token {
8229 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8230 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8232 }
8233 Token::Assignment => {
8234 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8236 self.parse_expr()?,
8237 )))
8238 }
8239 _ => None,
8240 })
8241 }
8242
8243 pub fn parse_mssql_variable_declaration_expression(
8250 &mut self,
8251 ) -> Result<Option<DeclareAssignment>, ParserError> {
8252 Ok(match &self.peek_token_ref().token {
8253 Token::Eq => {
8254 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8256 self.parse_expr()?,
8257 )))
8258 }
8259 _ => None,
8260 })
8261 }
8262
8263 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8265 let direction = if self.parse_keyword(Keyword::NEXT) {
8266 FetchDirection::Next
8267 } else if self.parse_keyword(Keyword::PRIOR) {
8268 FetchDirection::Prior
8269 } else if self.parse_keyword(Keyword::FIRST) {
8270 FetchDirection::First
8271 } else if self.parse_keyword(Keyword::LAST) {
8272 FetchDirection::Last
8273 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8274 FetchDirection::Absolute {
8275 limit: self.parse_number_value()?,
8276 }
8277 } else if self.parse_keyword(Keyword::RELATIVE) {
8278 FetchDirection::Relative {
8279 limit: self.parse_number_value()?,
8280 }
8281 } else if self.parse_keyword(Keyword::FORWARD) {
8282 if self.parse_keyword(Keyword::ALL) {
8283 FetchDirection::ForwardAll
8284 } else {
8285 FetchDirection::Forward {
8286 limit: Some(self.parse_number_value()?),
8288 }
8289 }
8290 } else if self.parse_keyword(Keyword::BACKWARD) {
8291 if self.parse_keyword(Keyword::ALL) {
8292 FetchDirection::BackwardAll
8293 } else {
8294 FetchDirection::Backward {
8295 limit: Some(self.parse_number_value()?),
8297 }
8298 }
8299 } else if self.parse_keyword(Keyword::ALL) {
8300 FetchDirection::All
8301 } else {
8302 FetchDirection::Count {
8303 limit: self.parse_number_value()?,
8304 }
8305 };
8306
8307 let position = if self.peek_keyword(Keyword::FROM) {
8308 self.expect_keyword(Keyword::FROM)?;
8309 FetchPosition::From
8310 } else if self.peek_keyword(Keyword::IN) {
8311 self.expect_keyword(Keyword::IN)?;
8312 FetchPosition::In
8313 } else {
8314 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8315 };
8316
8317 let name = self.parse_identifier()?;
8318
8319 let into = if self.parse_keyword(Keyword::INTO) {
8320 Some(self.parse_object_name(false)?)
8321 } else {
8322 None
8323 };
8324
8325 Ok(Statement::Fetch {
8326 name,
8327 direction,
8328 position,
8329 into,
8330 })
8331 }
8332
8333 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8335 let object_type = if self.parse_keyword(Keyword::ALL) {
8336 DiscardObject::ALL
8337 } else if self.parse_keyword(Keyword::PLANS) {
8338 DiscardObject::PLANS
8339 } else if self.parse_keyword(Keyword::SEQUENCES) {
8340 DiscardObject::SEQUENCES
8341 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8342 DiscardObject::TEMP
8343 } else {
8344 return self.expected_ref(
8345 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8346 self.peek_token_ref(),
8347 );
8348 };
8349 Ok(Statement::Discard { object_type })
8350 }
8351
8352 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8354 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8355 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8356
8357 let mut using = None;
8358
8359 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8360 let index_name = self.parse_object_name(false)?;
8361 using = self.parse_optional_using_then_index_type()?;
8363 self.expect_keyword_is(Keyword::ON)?;
8364 Some(index_name)
8365 } else {
8366 None
8367 };
8368
8369 let table_name = self.parse_object_name(false)?;
8370
8371 using = self.parse_optional_using_then_index_type()?.or(using);
8374
8375 let columns = self.parse_parenthesized_index_column_list()?;
8376
8377 let include = if self.parse_keyword(Keyword::INCLUDE) {
8378 self.expect_token(&Token::LParen)?;
8379 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8380 self.expect_token(&Token::RParen)?;
8381 columns
8382 } else {
8383 vec![]
8384 };
8385
8386 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8387 let not = self.parse_keyword(Keyword::NOT);
8388 self.expect_keyword_is(Keyword::DISTINCT)?;
8389 Some(!not)
8390 } else {
8391 None
8392 };
8393
8394 let with = if self.dialect.supports_create_index_with_clause()
8395 && self.parse_keyword(Keyword::WITH)
8396 {
8397 self.expect_token(&Token::LParen)?;
8398 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8399 self.expect_token(&Token::RParen)?;
8400 with_params
8401 } else {
8402 Vec::new()
8403 };
8404
8405 let predicate = if self.parse_keyword(Keyword::WHERE) {
8406 Some(self.parse_expr()?)
8407 } else {
8408 None
8409 };
8410
8411 let index_options = self.parse_index_options()?;
8417
8418 let mut alter_options = Vec::new();
8420 while self
8421 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8422 .is_some()
8423 {
8424 alter_options.push(self.parse_alter_table_operation()?)
8425 }
8426
8427 Ok(CreateIndex {
8428 name: index_name,
8429 table_name,
8430 using,
8431 columns,
8432 unique,
8433 concurrently,
8434 if_not_exists,
8435 include,
8436 nulls_distinct,
8437 with,
8438 predicate,
8439 index_options,
8440 alter_options,
8441 })
8442 }
8443
8444 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8446 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8447 let name = self.parse_identifier()?;
8448
8449 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8450 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8451 Some(self.parse_identifier()?)
8452 } else {
8453 None
8454 };
8455
8456 let version = if self.parse_keyword(Keyword::VERSION) {
8457 Some(self.parse_identifier()?)
8458 } else {
8459 None
8460 };
8461
8462 let cascade = self.parse_keyword(Keyword::CASCADE);
8463
8464 (schema, version, cascade)
8465 } else {
8466 (None, None, false)
8467 };
8468
8469 Ok(CreateExtension {
8470 name,
8471 if_not_exists,
8472 schema,
8473 version,
8474 cascade,
8475 })
8476 }
8477
8478 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8480 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8481 let name = self.parse_object_name(false)?;
8482
8483 let definition = if self.parse_keyword(Keyword::FROM) {
8484 CreateCollationDefinition::From(self.parse_object_name(false)?)
8485 } else if self.consume_token(&Token::LParen) {
8486 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8487 self.expect_token(&Token::RParen)?;
8488 CreateCollationDefinition::Options(options)
8489 } else {
8490 return self.expected_ref(
8491 "FROM or parenthesized option list after CREATE COLLATION name",
8492 self.peek_token_ref(),
8493 );
8494 };
8495
8496 Ok(CreateCollation {
8497 if_not_exists,
8498 name,
8499 definition,
8500 })
8501 }
8502
8503 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8505 if self.parse_keyword(Keyword::CONFIGURATION) {
8506 let name = self.parse_object_name(false)?;
8507 self.expect_token(&Token::LParen)?;
8508 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8509 self.expect_token(&Token::RParen)?;
8510 Ok(Statement::CreateTextSearchConfiguration(
8511 CreateTextSearchConfiguration { name, options },
8512 ))
8513 } else if self.parse_keyword(Keyword::DICTIONARY) {
8514 let name = self.parse_object_name(false)?;
8515 self.expect_token(&Token::LParen)?;
8516 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8517 self.expect_token(&Token::RParen)?;
8518 Ok(Statement::CreateTextSearchDictionary(
8519 CreateTextSearchDictionary { name, options },
8520 ))
8521 } else if self.parse_keyword(Keyword::PARSER) {
8522 let name = self.parse_object_name(false)?;
8523 self.expect_token(&Token::LParen)?;
8524 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8525 self.expect_token(&Token::RParen)?;
8526 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8527 name,
8528 options,
8529 }))
8530 } else if self.parse_keyword(Keyword::TEMPLATE) {
8531 let name = self.parse_object_name(false)?;
8532 self.expect_token(&Token::LParen)?;
8533 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8534 self.expect_token(&Token::RParen)?;
8535 Ok(Statement::CreateTextSearchTemplate(
8536 CreateTextSearchTemplate { name, options },
8537 ))
8538 } else {
8539 self.expected_ref(
8540 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8541 self.peek_token_ref(),
8542 )
8543 }
8544 }
8545
8546 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8548 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8549 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8550 let cascade_or_restrict =
8551 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8552 Ok(Statement::DropExtension(DropExtension {
8553 names,
8554 if_exists,
8555 cascade_or_restrict: cascade_or_restrict
8556 .map(|k| match k {
8557 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8558 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8559 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8560 })
8561 .transpose()?,
8562 }))
8563 }
8564
8565 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8568 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8569 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8570 let drop_behavior = self.parse_optional_drop_behavior();
8571 Ok(Statement::DropOperator(DropOperator {
8572 if_exists,
8573 operators,
8574 drop_behavior,
8575 }))
8576 }
8577
8578 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8581 let name = self.parse_operator_name()?;
8582 self.expect_token(&Token::LParen)?;
8583
8584 let left_type = if self.parse_keyword(Keyword::NONE) {
8586 None
8587 } else {
8588 Some(self.parse_data_type()?)
8589 };
8590
8591 self.expect_token(&Token::Comma)?;
8592
8593 let right_type = self.parse_data_type()?;
8595
8596 self.expect_token(&Token::RParen)?;
8597
8598 Ok(DropOperatorSignature {
8599 name,
8600 left_type,
8601 right_type,
8602 })
8603 }
8604
8605 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8609 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8610 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8611 self.expect_keyword(Keyword::USING)?;
8612 let using = self.parse_identifier()?;
8613 let drop_behavior = self.parse_optional_drop_behavior();
8614 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8615 if_exists,
8616 names,
8617 using,
8618 drop_behavior,
8619 }))
8620 }
8621
8622 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8626 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8627 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8628 self.expect_keyword(Keyword::USING)?;
8629 let using = self.parse_identifier()?;
8630 let drop_behavior = self.parse_optional_drop_behavior();
8631 Ok(Statement::DropOperatorClass(DropOperatorClass {
8632 if_exists,
8633 names,
8634 using,
8635 drop_behavior,
8636 }))
8637 }
8638
8639 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8643 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8644 self.expect_token(&Token::LParen)?;
8645 let columns =
8646 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8647 self.expect_token(&Token::RParen)?;
8648 Ok(HiveDistributionStyle::PARTITIONED { columns })
8649 } else {
8650 Ok(HiveDistributionStyle::NONE)
8651 }
8652 }
8653
8654 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8658 let token = self.next_token();
8659 match &token.token {
8660 Token::Word(w) => match w.keyword {
8661 Keyword::AUTO => Ok(DistStyle::Auto),
8662 Keyword::EVEN => Ok(DistStyle::Even),
8663 Keyword::KEY => Ok(DistStyle::Key),
8664 Keyword::ALL => Ok(DistStyle::All),
8665 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8666 },
8667 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8668 }
8669 }
8670
8671 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8673 let mut hive_format: Option<HiveFormat> = None;
8674 loop {
8675 match self.parse_one_of_keywords(&[
8676 Keyword::ROW,
8677 Keyword::STORED,
8678 Keyword::LOCATION,
8679 Keyword::WITH,
8680 ]) {
8681 Some(Keyword::ROW) => {
8682 hive_format
8683 .get_or_insert_with(HiveFormat::default)
8684 .row_format = Some(self.parse_row_format()?);
8685 }
8686 Some(Keyword::STORED) => {
8687 self.expect_keyword_is(Keyword::AS)?;
8688 if self.parse_keyword(Keyword::INPUTFORMAT) {
8689 let input_format = self.parse_expr()?;
8690 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8691 let output_format = self.parse_expr()?;
8692 hive_format.get_or_insert_with(HiveFormat::default).storage =
8693 Some(HiveIOFormat::IOF {
8694 input_format,
8695 output_format,
8696 });
8697 } else {
8698 let format = self.parse_file_format()?;
8699 hive_format.get_or_insert_with(HiveFormat::default).storage =
8700 Some(HiveIOFormat::FileFormat { format });
8701 }
8702 }
8703 Some(Keyword::LOCATION) => {
8704 hive_format.get_or_insert_with(HiveFormat::default).location =
8705 Some(self.parse_literal_string()?);
8706 }
8707 Some(Keyword::WITH) => {
8708 self.prev_token();
8709 let properties = self
8710 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8711 if !properties.is_empty() {
8712 hive_format
8713 .get_or_insert_with(HiveFormat::default)
8714 .serde_properties = Some(properties);
8715 } else {
8716 break;
8717 }
8718 }
8719 None => break,
8720 _ => break,
8721 }
8722 }
8723
8724 Ok(hive_format)
8725 }
8726
8727 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8729 self.expect_keyword_is(Keyword::FORMAT)?;
8730 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8731 Some(Keyword::SERDE) => {
8732 let class = self.parse_literal_string()?;
8733 Ok(HiveRowFormat::SERDE { class })
8734 }
8735 _ => {
8736 let mut row_delimiters = vec![];
8737
8738 loop {
8739 match self.parse_one_of_keywords(&[
8740 Keyword::FIELDS,
8741 Keyword::COLLECTION,
8742 Keyword::MAP,
8743 Keyword::LINES,
8744 Keyword::NULL,
8745 ]) {
8746 Some(Keyword::FIELDS)
8747 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8748 {
8749 row_delimiters.push(HiveRowDelimiter {
8750 delimiter: HiveDelimiter::FieldsTerminatedBy,
8751 char: self.parse_identifier()?,
8752 });
8753
8754 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8755 row_delimiters.push(HiveRowDelimiter {
8756 delimiter: HiveDelimiter::FieldsEscapedBy,
8757 char: self.parse_identifier()?,
8758 });
8759 }
8760 }
8761 Some(Keyword::COLLECTION)
8762 if self.parse_keywords(&[
8763 Keyword::ITEMS,
8764 Keyword::TERMINATED,
8765 Keyword::BY,
8766 ]) =>
8767 {
8768 row_delimiters.push(HiveRowDelimiter {
8769 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8770 char: self.parse_identifier()?,
8771 });
8772 }
8773 Some(Keyword::MAP)
8774 if self.parse_keywords(&[
8775 Keyword::KEYS,
8776 Keyword::TERMINATED,
8777 Keyword::BY,
8778 ]) =>
8779 {
8780 row_delimiters.push(HiveRowDelimiter {
8781 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8782 char: self.parse_identifier()?,
8783 });
8784 }
8785 Some(Keyword::LINES)
8786 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8787 {
8788 row_delimiters.push(HiveRowDelimiter {
8789 delimiter: HiveDelimiter::LinesTerminatedBy,
8790 char: self.parse_identifier()?,
8791 });
8792 }
8793 Some(Keyword::NULL)
8794 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
8795 {
8796 row_delimiters.push(HiveRowDelimiter {
8797 delimiter: HiveDelimiter::NullDefinedAs,
8798 char: self.parse_identifier()?,
8799 });
8800 }
8801 _ => {
8802 break;
8803 }
8804 }
8805 }
8806
8807 Ok(HiveRowFormat::DELIMITED {
8808 delimiters: row_delimiters,
8809 })
8810 }
8811 }
8812 }
8813
8814 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8815 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8816 Ok(Some(self.parse_identifier()?))
8817 } else {
8818 Ok(None)
8819 }
8820 }
8821
8822 pub fn parse_create_table(
8824 &mut self,
8825 or_replace: bool,
8826 temporary: bool,
8827 global: Option<bool>,
8828 transient: bool,
8829 ) -> Result<CreateTable, ParserError> {
8830 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8831 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8832 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8833
8834 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8844 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8845 } else {
8846 None
8847 };
8848
8849 let on_cluster = self.parse_optional_on_cluster()?;
8851
8852 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8853
8854 let clone = if self.parse_keyword(Keyword::CLONE) {
8855 self.parse_object_name(allow_unquoted_hyphen).ok()
8856 } else {
8857 None
8858 };
8859
8860 let (columns, constraints) = self.parse_columns()?;
8862 let comment_after_column_def =
8863 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8864 let next_token = self.next_token();
8865 match next_token.token {
8866 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8867 _ => self.expected("comment", next_token)?,
8868 }
8869 } else {
8870 None
8871 };
8872
8873 let for_values = if partition_of.is_some() {
8875 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8876 Some(self.parse_partition_for_values()?)
8877 } else {
8878 return self.expected_ref(
8879 "FOR VALUES or DEFAULT after PARTITION OF",
8880 self.peek_token_ref(),
8881 );
8882 }
8883 } else {
8884 None
8885 };
8886
8887 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8889
8890 let hive_distribution = self.parse_hive_distribution()?;
8891 let clustered_by = self.parse_optional_clustered_by()?;
8892 let hive_formats = self.parse_hive_formats()?;
8893
8894 let create_table_config = self.parse_optional_create_table_config()?;
8895
8896 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8899 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8900 {
8901 Some(Box::new(self.parse_expr()?))
8902 } else {
8903 None
8904 };
8905
8906 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8907 if self.consume_token(&Token::LParen) {
8908 let columns = if self.peek_token_ref().token != Token::RParen {
8909 self.parse_comma_separated(|p| p.parse_expr())?
8910 } else {
8911 vec![]
8912 };
8913 self.expect_token(&Token::RParen)?;
8914 Some(OneOrManyWithParens::Many(columns))
8915 } else {
8916 Some(OneOrManyWithParens::One(self.parse_expr()?))
8917 }
8918 } else {
8919 None
8920 };
8921
8922 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8923 Some(self.parse_create_table_on_commit()?)
8924 } else {
8925 None
8926 };
8927
8928 let strict = self.parse_keyword(Keyword::STRICT);
8929
8930 let backup = if self.parse_keyword(Keyword::BACKUP) {
8932 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8933 Some(keyword == Keyword::YES)
8934 } else {
8935 None
8936 };
8937
8938 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8940 Some(self.parse_dist_style()?)
8941 } else {
8942 None
8943 };
8944 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8945 self.expect_token(&Token::LParen)?;
8946 let expr = self.parse_expr()?;
8947 self.expect_token(&Token::RParen)?;
8948 Some(expr)
8949 } else {
8950 None
8951 };
8952 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8953 self.expect_token(&Token::LParen)?;
8954 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8955 self.expect_token(&Token::RParen)?;
8956 Some(columns)
8957 } else {
8958 None
8959 };
8960
8961 let query = if self.parse_keyword(Keyword::AS) {
8963 Some(self.parse_query()?)
8964 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8965 {
8966 self.prev_token();
8968 Some(self.parse_query()?)
8969 } else {
8970 None
8971 };
8972
8973 Ok(CreateTableBuilder::new(table_name)
8974 .temporary(temporary)
8975 .columns(columns)
8976 .constraints(constraints)
8977 .or_replace(or_replace)
8978 .if_not_exists(if_not_exists)
8979 .transient(transient)
8980 .hive_distribution(hive_distribution)
8981 .hive_formats(hive_formats)
8982 .global(global)
8983 .query(query)
8984 .without_rowid(without_rowid)
8985 .like(like)
8986 .clone_clause(clone)
8987 .comment_after_column_def(comment_after_column_def)
8988 .order_by(order_by)
8989 .on_commit(on_commit)
8990 .on_cluster(on_cluster)
8991 .clustered_by(clustered_by)
8992 .partition_by(create_table_config.partition_by)
8993 .cluster_by(create_table_config.cluster_by)
8994 .inherits(create_table_config.inherits)
8995 .partition_of(partition_of)
8996 .for_values(for_values)
8997 .table_options(create_table_config.table_options)
8998 .primary_key(primary_key)
8999 .strict(strict)
9000 .backup(backup)
9001 .diststyle(diststyle)
9002 .distkey(distkey)
9003 .sortkey(sortkey)
9004 .build())
9005 }
9006
9007 fn maybe_parse_create_table_like(
9008 &mut self,
9009 allow_unquoted_hyphen: bool,
9010 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
9011 let like = if self.dialect.supports_create_table_like_parenthesized()
9012 && self.consume_token(&Token::LParen)
9013 {
9014 if self.parse_keyword(Keyword::LIKE) {
9015 let name = self.parse_object_name(allow_unquoted_hyphen)?;
9016 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
9017 Some(CreateTableLikeDefaults::Including)
9018 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
9019 Some(CreateTableLikeDefaults::Excluding)
9020 } else {
9021 None
9022 };
9023 self.expect_token(&Token::RParen)?;
9024 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
9025 name,
9026 defaults,
9027 }))
9028 } else {
9029 self.prev_token();
9031 None
9032 }
9033 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
9034 let name = self.parse_object_name(allow_unquoted_hyphen)?;
9035 Some(CreateTableLikeKind::Plain(CreateTableLike {
9036 name,
9037 defaults: None,
9038 }))
9039 } else {
9040 None
9041 };
9042 Ok(like)
9043 }
9044
9045 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
9046 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
9047 Ok(OnCommit::DeleteRows)
9048 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
9049 Ok(OnCommit::PreserveRows)
9050 } else if self.parse_keywords(&[Keyword::DROP]) {
9051 Ok(OnCommit::Drop)
9052 } else {
9053 parser_err!(
9054 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
9055 self.peek_token_ref()
9056 )
9057 }
9058 }
9059
9060 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
9066 if self.parse_keyword(Keyword::DEFAULT) {
9067 return Ok(ForValues::Default);
9068 }
9069
9070 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9071
9072 if self.parse_keyword(Keyword::IN) {
9073 self.expect_token(&Token::LParen)?;
9075 if self.peek_token_ref().token == Token::RParen {
9076 return self.expected_ref("at least one value", self.peek_token_ref());
9077 }
9078 let values = self.parse_comma_separated(Parser::parse_expr)?;
9079 self.expect_token(&Token::RParen)?;
9080 Ok(ForValues::In(values))
9081 } else if self.parse_keyword(Keyword::FROM) {
9082 self.expect_token(&Token::LParen)?;
9084 if self.peek_token_ref().token == Token::RParen {
9085 return self.expected_ref("at least one value", self.peek_token_ref());
9086 }
9087 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9088 self.expect_token(&Token::RParen)?;
9089 self.expect_keyword(Keyword::TO)?;
9090 self.expect_token(&Token::LParen)?;
9091 if self.peek_token_ref().token == Token::RParen {
9092 return self.expected_ref("at least one value", self.peek_token_ref());
9093 }
9094 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9095 self.expect_token(&Token::RParen)?;
9096 Ok(ForValues::From { from, to })
9097 } else if self.parse_keyword(Keyword::WITH) {
9098 self.expect_token(&Token::LParen)?;
9100 self.expect_keyword(Keyword::MODULUS)?;
9101 let modulus = self.parse_literal_uint()?;
9102 self.expect_token(&Token::Comma)?;
9103 self.expect_keyword(Keyword::REMAINDER)?;
9104 let remainder = self.parse_literal_uint()?;
9105 self.expect_token(&Token::RParen)?;
9106 Ok(ForValues::With { modulus, remainder })
9107 } else {
9108 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9109 }
9110 }
9111
9112 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9114 if self.parse_keyword(Keyword::MINVALUE) {
9115 Ok(PartitionBoundValue::MinValue)
9116 } else if self.parse_keyword(Keyword::MAXVALUE) {
9117 Ok(PartitionBoundValue::MaxValue)
9118 } else {
9119 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9120 }
9121 }
9122
9123 fn parse_optional_create_table_config(
9129 &mut self,
9130 ) -> Result<CreateTableConfiguration, ParserError> {
9131 let mut table_options = CreateTableOptions::None;
9132
9133 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9134 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9135 } else {
9136 None
9137 };
9138
9139 let with_options = self.parse_options(Keyword::WITH)?;
9141 if !with_options.is_empty() {
9142 table_options = CreateTableOptions::With(with_options)
9143 }
9144
9145 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9146 if !table_properties.is_empty() {
9147 table_options = CreateTableOptions::TableProperties(table_properties);
9148 }
9149 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9150 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9151 {
9152 Some(Box::new(self.parse_expr()?))
9153 } else {
9154 None
9155 };
9156
9157 let mut cluster_by = None;
9158 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9159 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9160 cluster_by = Some(WrappedCollection::NoWrapping(
9161 self.parse_comma_separated(|p| p.parse_expr())?,
9162 ));
9163 };
9164
9165 if let Token::Word(word) = &self.peek_token_ref().token {
9166 if word.keyword == Keyword::OPTIONS {
9167 table_options =
9168 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9169 }
9170 };
9171 }
9172
9173 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9174 let plain_options = self.parse_plain_options()?;
9175 if !plain_options.is_empty() {
9176 table_options = CreateTableOptions::Plain(plain_options)
9177 }
9178 };
9179
9180 Ok(CreateTableConfiguration {
9181 partition_by,
9182 cluster_by,
9183 inherits,
9184 table_options,
9185 })
9186 }
9187
9188 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9189 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9192 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9193 }
9194
9195 if self.parse_keywords(&[Keyword::COMMENT]) {
9198 let has_eq = self.consume_token(&Token::Eq);
9199 let value = self.next_token();
9200
9201 let comment = match (has_eq, value.token) {
9202 (true, Token::SingleQuotedString(s)) => {
9203 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9204 }
9205 (false, Token::SingleQuotedString(s)) => {
9206 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9207 }
9208 (_, token) => {
9209 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9210 }
9211 };
9212 return comment;
9213 }
9214
9215 if self.parse_keywords(&[Keyword::ENGINE]) {
9218 let _ = self.consume_token(&Token::Eq);
9219 let value = self.next_token();
9220
9221 let engine = match value.token {
9222 Token::Word(w) => {
9223 let parameters = if self.peek_token_ref().token == Token::LParen {
9224 self.parse_parenthesized_identifiers()?
9225 } else {
9226 vec![]
9227 };
9228
9229 Ok(Some(SqlOption::NamedParenthesizedList(
9230 NamedParenthesizedList {
9231 key: Ident::new("ENGINE"),
9232 name: Some(Ident::new(w.value)),
9233 values: parameters,
9234 },
9235 )))
9236 }
9237 _ => {
9238 return self.expected("Token::Word", value)?;
9239 }
9240 };
9241
9242 return engine;
9243 }
9244
9245 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9247 let _ = self.consume_token(&Token::Eq);
9248 let value = self.next_token();
9249
9250 let tablespace = match value.token {
9251 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9252 let storage = match self.parse_keyword(Keyword::STORAGE) {
9253 true => {
9254 let _ = self.consume_token(&Token::Eq);
9255 let storage_token = self.next_token();
9256 match &storage_token.token {
9257 Token::Word(w) => match w.value.to_uppercase().as_str() {
9258 "DISK" => Some(StorageType::Disk),
9259 "MEMORY" => Some(StorageType::Memory),
9260 _ => self
9261 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9262 },
9263 _ => self.expected("Token::Word", storage_token)?,
9264 }
9265 }
9266 false => None,
9267 };
9268
9269 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9270 name,
9271 storage,
9272 })))
9273 }
9274 _ => {
9275 return self.expected("Token::Word", value)?;
9276 }
9277 };
9278
9279 return tablespace;
9280 }
9281
9282 if self.parse_keyword(Keyword::UNION) {
9284 let _ = self.consume_token(&Token::Eq);
9285 let value = self.next_token();
9286
9287 match value.token {
9288 Token::LParen => {
9289 let tables: Vec<Ident> =
9290 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9291 self.expect_token(&Token::RParen)?;
9292
9293 return Ok(Some(SqlOption::NamedParenthesizedList(
9294 NamedParenthesizedList {
9295 key: Ident::new("UNION"),
9296 name: None,
9297 values: tables,
9298 },
9299 )));
9300 }
9301 _ => {
9302 return self.expected("Token::LParen", value)?;
9303 }
9304 }
9305 }
9306
9307 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9309 Ident::new("DEFAULT CHARSET")
9310 } else if self.parse_keyword(Keyword::CHARSET) {
9311 Ident::new("CHARSET")
9312 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9313 Ident::new("DEFAULT CHARACTER SET")
9314 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9315 Ident::new("CHARACTER SET")
9316 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9317 Ident::new("DEFAULT COLLATE")
9318 } else if self.parse_keyword(Keyword::COLLATE) {
9319 Ident::new("COLLATE")
9320 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9321 Ident::new("DATA DIRECTORY")
9322 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9323 Ident::new("INDEX DIRECTORY")
9324 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9325 Ident::new("KEY_BLOCK_SIZE")
9326 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9327 Ident::new("ROW_FORMAT")
9328 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9329 Ident::new("PACK_KEYS")
9330 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9331 Ident::new("STATS_AUTO_RECALC")
9332 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9333 Ident::new("STATS_PERSISTENT")
9334 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9335 Ident::new("STATS_SAMPLE_PAGES")
9336 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9337 Ident::new("DELAY_KEY_WRITE")
9338 } else if self.parse_keyword(Keyword::COMPRESSION) {
9339 Ident::new("COMPRESSION")
9340 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9341 Ident::new("ENCRYPTION")
9342 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9343 Ident::new("MAX_ROWS")
9344 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9345 Ident::new("MIN_ROWS")
9346 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9347 Ident::new("AUTOEXTEND_SIZE")
9348 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9349 Ident::new("AVG_ROW_LENGTH")
9350 } else if self.parse_keyword(Keyword::CHECKSUM) {
9351 Ident::new("CHECKSUM")
9352 } else if self.parse_keyword(Keyword::CONNECTION) {
9353 Ident::new("CONNECTION")
9354 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9355 Ident::new("ENGINE_ATTRIBUTE")
9356 } else if self.parse_keyword(Keyword::PASSWORD) {
9357 Ident::new("PASSWORD")
9358 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9359 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9360 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9361 Ident::new("INSERT_METHOD")
9362 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9363 Ident::new("AUTO_INCREMENT")
9364 } else {
9365 return Ok(None);
9366 };
9367
9368 let _ = self.consume_token(&Token::Eq);
9369
9370 let value = match self
9371 .maybe_parse(|parser| parser.parse_value())?
9372 .map(Expr::Value)
9373 {
9374 Some(expr) => expr,
9375 None => Expr::Identifier(self.parse_identifier()?),
9376 };
9377
9378 Ok(Some(SqlOption::KeyValue { key, value }))
9379 }
9380
9381 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9383 let mut options = Vec::new();
9384
9385 while let Some(option) = self.parse_plain_option()? {
9386 options.push(option);
9387 let _ = self.consume_token(&Token::Comma);
9390 }
9391
9392 Ok(options)
9393 }
9394
9395 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9397 let comment = if self.parse_keyword(Keyword::COMMENT) {
9398 let has_eq = self.consume_token(&Token::Eq);
9399 let comment = self.parse_comment_value()?;
9400 Some(if has_eq {
9401 CommentDef::WithEq(comment)
9402 } else {
9403 CommentDef::WithoutEq(comment)
9404 })
9405 } else {
9406 None
9407 };
9408 Ok(comment)
9409 }
9410
9411 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9413 let next_token = self.next_token();
9414 let value = match next_token.token {
9415 Token::SingleQuotedString(str) => str,
9416 Token::DollarQuotedString(str) => str.value,
9417 _ => self.expected("string literal", next_token)?,
9418 };
9419 Ok(value)
9420 }
9421
9422 pub fn parse_optional_procedure_parameters(
9424 &mut self,
9425 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9426 let mut params = vec![];
9427 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9428 return Ok(Some(params));
9429 }
9430 loop {
9431 if let Token::Word(_) = &self.peek_token_ref().token {
9432 params.push(self.parse_procedure_param()?)
9433 }
9434 let comma = self.consume_token(&Token::Comma);
9435 if self.consume_token(&Token::RParen) {
9436 break;
9438 } else if !comma {
9439 return self.expected_ref(
9440 "',' or ')' after parameter definition",
9441 self.peek_token_ref(),
9442 );
9443 }
9444 }
9445 Ok(Some(params))
9446 }
9447
9448 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9450 let mut columns = vec![];
9451 let mut constraints = vec![];
9452 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9453 return Ok((columns, constraints));
9454 }
9455
9456 loop {
9457 if let Some(constraint) = self.parse_optional_table_constraint()? {
9458 constraints.push(constraint);
9459 } else if let Token::Word(_) = &self.peek_token_ref().token {
9460 columns.push(self.parse_column_def()?);
9461 } else {
9462 return self.expected_ref(
9463 "column name or constraint definition",
9464 self.peek_token_ref(),
9465 );
9466 }
9467
9468 let comma = self.consume_token(&Token::Comma);
9469 let rparen = self.peek_token_ref().token == Token::RParen;
9470
9471 if !comma && !rparen {
9472 return self
9473 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9474 };
9475
9476 if rparen
9477 && (!comma
9478 || self.dialect.supports_column_definition_trailing_commas()
9479 || self.options.trailing_commas)
9480 {
9481 let _ = self.consume_token(&Token::RParen);
9482 break;
9483 }
9484 }
9485
9486 Ok((columns, constraints))
9487 }
9488
9489 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9491 let mode = if self.parse_keyword(Keyword::IN) {
9492 Some(ArgMode::In)
9493 } else if self.parse_keyword(Keyword::OUT) {
9494 Some(ArgMode::Out)
9495 } else if self.parse_keyword(Keyword::INOUT) {
9496 Some(ArgMode::InOut)
9497 } else {
9498 None
9499 };
9500 let name = self.parse_identifier()?;
9501 let data_type = self.parse_data_type()?;
9502 let default = if self.consume_token(&Token::Eq) {
9503 Some(self.parse_expr()?)
9504 } else {
9505 None
9506 };
9507
9508 Ok(ProcedureParam {
9509 name,
9510 data_type,
9511 mode,
9512 default,
9513 })
9514 }
9515
9516 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9518 self.parse_column_def_inner(false)
9519 }
9520
9521 fn parse_column_def_inner(
9522 &mut self,
9523 optional_data_type: bool,
9524 ) -> Result<ColumnDef, ParserError> {
9525 let col_name = self.parse_identifier()?;
9526 let data_type = if self.is_column_type_sqlite_unspecified() {
9527 DataType::Unspecified
9528 } else if optional_data_type {
9529 self.maybe_parse(|parser| parser.parse_data_type())?
9530 .unwrap_or(DataType::Unspecified)
9531 } else {
9532 self.parse_data_type()?
9533 };
9534 let mut options = vec![];
9535 loop {
9536 if self.parse_keyword(Keyword::CONSTRAINT) {
9537 let name = Some(self.parse_identifier()?);
9538 if let Some(option) = self.parse_optional_column_option()? {
9539 options.push(ColumnOptionDef { name, option });
9540 } else {
9541 return self.expected_ref(
9542 "constraint details after CONSTRAINT <name>",
9543 self.peek_token_ref(),
9544 );
9545 }
9546 } else if let Some(option) = self.parse_optional_column_option()? {
9547 options.push(ColumnOptionDef { name: None, option });
9548 } else {
9549 break;
9550 };
9551 }
9552 Ok(ColumnDef {
9553 name: col_name,
9554 data_type,
9555 options,
9556 })
9557 }
9558
9559 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9560 if dialect_of!(self is SQLiteDialect) {
9561 match &self.peek_token_ref().token {
9562 Token::Word(word) => matches!(
9563 word.keyword,
9564 Keyword::CONSTRAINT
9565 | Keyword::PRIMARY
9566 | Keyword::NOT
9567 | Keyword::UNIQUE
9568 | Keyword::CHECK
9569 | Keyword::DEFAULT
9570 | Keyword::COLLATE
9571 | Keyword::REFERENCES
9572 | Keyword::GENERATED
9573 | Keyword::AS
9574 ),
9575 _ => true, }
9577 } else {
9578 false
9579 }
9580 }
9581
9582 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9584 if let Some(option) = self.dialect.parse_column_option(self)? {
9585 return option;
9586 }
9587
9588 self.with_state(
9589 ColumnDefinition,
9590 |parser| -> Result<Option<ColumnOption>, ParserError> {
9591 parser.parse_optional_column_option_inner()
9592 },
9593 )
9594 }
9595
9596 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9597 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9598 Ok(Some(ColumnOption::CharacterSet(
9599 self.parse_object_name(false)?,
9600 )))
9601 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9602 Ok(Some(ColumnOption::Collation(
9603 self.parse_object_name(false)?,
9604 )))
9605 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9606 Ok(Some(ColumnOption::NotNull))
9607 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9608 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9609 } else if self.parse_keyword(Keyword::NULL) {
9610 Ok(Some(ColumnOption::Null))
9611 } else if self.parse_keyword(Keyword::DEFAULT) {
9612 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9613 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9614 && self.parse_keyword(Keyword::MATERIALIZED)
9615 {
9616 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9617 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9618 && self.parse_keyword(Keyword::ALIAS)
9619 {
9620 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9621 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9622 && self.parse_keyword(Keyword::EPHEMERAL)
9623 {
9624 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9627 Ok(Some(ColumnOption::Ephemeral(None)))
9628 } else {
9629 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9630 }
9631 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9632 let characteristics = self.parse_constraint_characteristics()?;
9633 Ok(Some(
9634 PrimaryKeyConstraint {
9635 name: None,
9636 index_name: None,
9637 index_type: None,
9638 columns: vec![],
9639 index_options: vec![],
9640 characteristics,
9641 }
9642 .into(),
9643 ))
9644 } else if self.parse_keyword(Keyword::UNIQUE) {
9645 let index_type_display =
9646 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9647 KeyOrIndexDisplay::Key
9648 } else {
9649 KeyOrIndexDisplay::None
9650 };
9651 let characteristics = self.parse_constraint_characteristics()?;
9652 Ok(Some(
9653 UniqueConstraint {
9654 name: None,
9655 index_name: None,
9656 index_type_display,
9657 index_type: None,
9658 columns: vec![],
9659 index_options: vec![],
9660 characteristics,
9661 nulls_distinct: NullsDistinctOption::None,
9662 }
9663 .into(),
9664 ))
9665 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9666 let characteristics = self.parse_constraint_characteristics()?;
9669 Ok(Some(
9670 PrimaryKeyConstraint {
9671 name: None,
9672 index_name: None,
9673 index_type: None,
9674 columns: vec![],
9675 index_options: vec![],
9676 characteristics,
9677 }
9678 .into(),
9679 ))
9680 } else if self.parse_keyword(Keyword::REFERENCES) {
9681 let foreign_table = self.parse_object_name(false)?;
9682 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9685 let mut match_kind = None;
9686 let mut on_delete = None;
9687 let mut on_update = None;
9688 loop {
9689 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9690 match_kind = Some(self.parse_match_kind()?);
9691 } else if on_delete.is_none()
9692 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9693 {
9694 on_delete = Some(self.parse_referential_action()?);
9695 } else if on_update.is_none()
9696 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9697 {
9698 on_update = Some(self.parse_referential_action()?);
9699 } else {
9700 break;
9701 }
9702 }
9703 let characteristics = self.parse_constraint_characteristics()?;
9704
9705 Ok(Some(
9706 ForeignKeyConstraint {
9707 name: None, index_name: None, columns: vec![], foreign_table,
9711 referred_columns,
9712 on_delete,
9713 on_update,
9714 match_kind,
9715 characteristics,
9716 }
9717 .into(),
9718 ))
9719 } else if self.parse_keyword(Keyword::CHECK) {
9720 self.expect_token(&Token::LParen)?;
9721 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9723 self.expect_token(&Token::RParen)?;
9724
9725 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9726 Some(true)
9727 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9728 Some(false)
9729 } else {
9730 None
9731 };
9732
9733 Ok(Some(
9734 CheckConstraint {
9735 name: None, expr: Box::new(expr),
9737 enforced,
9738 }
9739 .into(),
9740 ))
9741 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9742 && dialect_of!(self is MySqlDialect | GenericDialect)
9743 {
9744 Ok(Some(ColumnOption::DialectSpecific(vec![
9746 Token::make_keyword("AUTO_INCREMENT"),
9747 ])))
9748 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9749 && dialect_of!(self is SQLiteDialect | GenericDialect)
9750 {
9751 Ok(Some(ColumnOption::DialectSpecific(vec![
9753 Token::make_keyword("AUTOINCREMENT"),
9754 ])))
9755 } else if self.parse_keyword(Keyword::ASC)
9756 && self.dialect.supports_asc_desc_in_column_definition()
9757 {
9758 Ok(Some(ColumnOption::DialectSpecific(vec![
9760 Token::make_keyword("ASC"),
9761 ])))
9762 } else if self.parse_keyword(Keyword::DESC)
9763 && self.dialect.supports_asc_desc_in_column_definition()
9764 {
9765 Ok(Some(ColumnOption::DialectSpecific(vec![
9767 Token::make_keyword("DESC"),
9768 ])))
9769 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9770 && dialect_of!(self is MySqlDialect | GenericDialect)
9771 {
9772 let expr = self.parse_expr()?;
9773 Ok(Some(ColumnOption::OnUpdate(expr)))
9774 } else if self.parse_keyword(Keyword::GENERATED) {
9775 self.parse_optional_column_option_generated()
9776 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9777 && self.parse_keyword(Keyword::OPTIONS)
9778 {
9779 self.prev_token();
9780 Ok(Some(ColumnOption::Options(
9781 self.parse_options(Keyword::OPTIONS)?,
9782 )))
9783 } else if self.parse_keyword(Keyword::AS)
9784 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9785 {
9786 self.parse_optional_column_option_as()
9787 } else if self.parse_keyword(Keyword::SRID)
9788 && dialect_of!(self is MySqlDialect | GenericDialect)
9789 {
9790 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9791 } else if self.parse_keyword(Keyword::IDENTITY)
9792 && dialect_of!(self is MsSqlDialect | GenericDialect)
9793 {
9794 let parameters = if self.consume_token(&Token::LParen) {
9795 let seed = self.parse_number()?;
9796 self.expect_token(&Token::Comma)?;
9797 let increment = self.parse_number()?;
9798 self.expect_token(&Token::RParen)?;
9799
9800 Some(IdentityPropertyFormatKind::FunctionCall(
9801 IdentityParameters { seed, increment },
9802 ))
9803 } else {
9804 None
9805 };
9806 Ok(Some(ColumnOption::Identity(
9807 IdentityPropertyKind::Identity(IdentityProperty {
9808 parameters,
9809 order: None,
9810 }),
9811 )))
9812 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9813 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9814 {
9815 Ok(Some(ColumnOption::OnConflict(
9817 self.expect_one_of_keywords(&[
9818 Keyword::ROLLBACK,
9819 Keyword::ABORT,
9820 Keyword::FAIL,
9821 Keyword::IGNORE,
9822 Keyword::REPLACE,
9823 ])?,
9824 )))
9825 } else if self.parse_keyword(Keyword::INVISIBLE) {
9826 Ok(Some(ColumnOption::Invisible))
9827 } else {
9828 Ok(None)
9829 }
9830 }
9831
9832 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9833 let name = self.parse_object_name(false)?;
9834 self.expect_token(&Token::Eq)?;
9835 let value = self.parse_literal_string()?;
9836
9837 Ok(Tag::new(name, value))
9838 }
9839
9840 fn parse_optional_column_option_generated(
9841 &mut self,
9842 ) -> Result<Option<ColumnOption>, ParserError> {
9843 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9844 let mut sequence_options = vec![];
9845 if self.expect_token(&Token::LParen).is_ok() {
9846 sequence_options = self.parse_create_sequence_options()?;
9847 self.expect_token(&Token::RParen)?;
9848 }
9849 Ok(Some(ColumnOption::Generated {
9850 generated_as: GeneratedAs::Always,
9851 sequence_options: Some(sequence_options),
9852 generation_expr: None,
9853 generation_expr_mode: None,
9854 generated_keyword: true,
9855 }))
9856 } else if self.parse_keywords(&[
9857 Keyword::BY,
9858 Keyword::DEFAULT,
9859 Keyword::AS,
9860 Keyword::IDENTITY,
9861 ]) {
9862 let mut sequence_options = vec![];
9863 if self.expect_token(&Token::LParen).is_ok() {
9864 sequence_options = self.parse_create_sequence_options()?;
9865 self.expect_token(&Token::RParen)?;
9866 }
9867 Ok(Some(ColumnOption::Generated {
9868 generated_as: GeneratedAs::ByDefault,
9869 sequence_options: Some(sequence_options),
9870 generation_expr: None,
9871 generation_expr_mode: None,
9872 generated_keyword: true,
9873 }))
9874 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9875 if self.expect_token(&Token::LParen).is_ok() {
9876 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9877 self.expect_token(&Token::RParen)?;
9878 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9879 Ok((
9880 GeneratedAs::ExpStored,
9881 Some(GeneratedExpressionMode::Stored),
9882 ))
9883 } else if dialect_of!(self is PostgreSqlDialect) {
9884 self.expected_ref("STORED", self.peek_token_ref())
9886 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9887 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9888 } else {
9889 Ok((GeneratedAs::Always, None))
9890 }?;
9891
9892 Ok(Some(ColumnOption::Generated {
9893 generated_as: gen_as,
9894 sequence_options: None,
9895 generation_expr: Some(expr),
9896 generation_expr_mode: expr_mode,
9897 generated_keyword: true,
9898 }))
9899 } else {
9900 Ok(None)
9901 }
9902 } else {
9903 Ok(None)
9904 }
9905 }
9906
9907 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9908 self.expect_token(&Token::LParen)?;
9910 let expr = self.parse_expr()?;
9911 self.expect_token(&Token::RParen)?;
9912
9913 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9914 (
9915 GeneratedAs::ExpStored,
9916 Some(GeneratedExpressionMode::Stored),
9917 )
9918 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9919 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9920 } else {
9921 (GeneratedAs::Always, None)
9922 };
9923
9924 Ok(Some(ColumnOption::Generated {
9925 generated_as: gen_as,
9926 sequence_options: None,
9927 generation_expr: Some(expr),
9928 generation_expr_mode: expr_mode,
9929 generated_keyword: false,
9930 }))
9931 }
9932
9933 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9935 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9936 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9937 {
9938 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9939
9940 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9941 self.expect_token(&Token::LParen)?;
9942 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9943 self.expect_token(&Token::RParen)?;
9944 Some(sorted_by_columns)
9945 } else {
9946 None
9947 };
9948
9949 self.expect_keyword_is(Keyword::INTO)?;
9950 let num_buckets = self.parse_number_value()?.value;
9951 self.expect_keyword_is(Keyword::BUCKETS)?;
9952 Some(ClusteredBy {
9953 columns,
9954 sorted_by,
9955 num_buckets,
9956 })
9957 } else {
9958 None
9959 };
9960 Ok(clustered_by)
9961 }
9962
9963 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9967 if self.parse_keyword(Keyword::RESTRICT) {
9968 Ok(ReferentialAction::Restrict)
9969 } else if self.parse_keyword(Keyword::CASCADE) {
9970 Ok(ReferentialAction::Cascade)
9971 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9972 Ok(ReferentialAction::SetNull)
9973 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9974 Ok(ReferentialAction::NoAction)
9975 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9976 Ok(ReferentialAction::SetDefault)
9977 } else {
9978 self.expected_ref(
9979 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9980 self.peek_token_ref(),
9981 )
9982 }
9983 }
9984
9985 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9987 if self.parse_keyword(Keyword::FULL) {
9988 Ok(ConstraintReferenceMatchKind::Full)
9989 } else if self.parse_keyword(Keyword::PARTIAL) {
9990 Ok(ConstraintReferenceMatchKind::Partial)
9991 } else if self.parse_keyword(Keyword::SIMPLE) {
9992 Ok(ConstraintReferenceMatchKind::Simple)
9993 } else {
9994 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9995 }
9996 }
9997
9998 fn parse_constraint_using_index(
10001 &mut self,
10002 name: Option<Ident>,
10003 ) -> Result<ConstraintUsingIndex, ParserError> {
10004 let index_name = self.parse_identifier()?;
10005 let characteristics = self.parse_constraint_characteristics()?;
10006 Ok(ConstraintUsingIndex {
10007 name,
10008 index_name,
10009 characteristics,
10010 })
10011 }
10012
10013 pub fn parse_constraint_characteristics(
10015 &mut self,
10016 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
10017 let mut cc = ConstraintCharacteristics::default();
10018
10019 loop {
10020 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
10021 {
10022 cc.deferrable = Some(false);
10023 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
10024 cc.deferrable = Some(true);
10025 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
10026 if self.parse_keyword(Keyword::DEFERRED) {
10027 cc.initially = Some(DeferrableInitial::Deferred);
10028 } else if self.parse_keyword(Keyword::IMMEDIATE) {
10029 cc.initially = Some(DeferrableInitial::Immediate);
10030 } else {
10031 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
10032 }
10033 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
10034 cc.enforced = Some(true);
10035 } else if cc.enforced.is_none()
10036 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
10037 {
10038 cc.enforced = Some(false);
10039 } else {
10040 break;
10041 }
10042 }
10043
10044 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
10045 Ok(Some(cc))
10046 } else {
10047 Ok(None)
10048 }
10049 }
10050
10051 pub fn parse_optional_table_constraint(
10053 &mut self,
10054 ) -> Result<Option<TableConstraint>, ParserError> {
10055 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
10056 if self.dialect.supports_constraint_keyword_without_name()
10057 && self
10058 .peek_one_of_keywords(&[
10059 Keyword::CHECK,
10060 Keyword::PRIMARY,
10061 Keyword::UNIQUE,
10062 Keyword::FOREIGN,
10063 ])
10064 .is_some()
10065 {
10066 None
10067 } else {
10068 Some(self.parse_identifier()?)
10069 }
10070 } else {
10071 None
10072 };
10073
10074 if name.is_none()
10079 && self
10080 .peek_one_of_keywords(&[Keyword::FULLTEXT, Keyword::SPATIAL])
10081 .is_some()
10082 && !dialect_of!(self is GenericDialect | MySqlDialect)
10083 {
10084 return Ok(None);
10085 }
10086
10087 let next_token = self.next_token();
10088 match next_token.token {
10089 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
10090 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10093 return Ok(Some(TableConstraint::UniqueUsingIndex(
10094 self.parse_constraint_using_index(name)?,
10095 )));
10096 }
10097
10098 let index_type_display = self.parse_index_type_display();
10099 if !dialect_of!(self is GenericDialect | MySqlDialect)
10100 && !index_type_display.is_none()
10101 {
10102 return self.expected_ref(
10103 "`index_name` or `(column_name [, ...])`",
10104 self.peek_token_ref(),
10105 );
10106 }
10107
10108 let nulls_distinct = self.parse_optional_nulls_distinct()?;
10109
10110 let index_name = self.parse_optional_ident()?;
10112 let index_type = self.parse_optional_using_then_index_type()?;
10113
10114 let columns = self.parse_parenthesized_index_column_list()?;
10115 let index_options = self.parse_index_options()?;
10116 let characteristics = self.parse_constraint_characteristics()?;
10117 Ok(Some(
10118 UniqueConstraint {
10119 name,
10120 index_name,
10121 index_type_display,
10122 index_type,
10123 columns,
10124 index_options,
10125 characteristics,
10126 nulls_distinct,
10127 }
10128 .into(),
10129 ))
10130 }
10131 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10132 self.expect_keyword_is(Keyword::KEY)?;
10134
10135 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10138 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10139 self.parse_constraint_using_index(name)?,
10140 )));
10141 }
10142
10143 let index_name = self.parse_optional_ident()?;
10145 let index_type = self.parse_optional_using_then_index_type()?;
10146
10147 let columns = self.parse_parenthesized_index_column_list()?;
10148 let index_options = self.parse_index_options()?;
10149 let characteristics = self.parse_constraint_characteristics()?;
10150 Ok(Some(
10151 PrimaryKeyConstraint {
10152 name,
10153 index_name,
10154 index_type,
10155 columns,
10156 index_options,
10157 characteristics,
10158 }
10159 .into(),
10160 ))
10161 }
10162 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10163 self.expect_keyword_is(Keyword::KEY)?;
10164 let index_name = self.parse_optional_ident()?;
10165 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10166 self.expect_keyword_is(Keyword::REFERENCES)?;
10167 let foreign_table = self.parse_object_name(false)?;
10168 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10169 let mut match_kind = None;
10170 let mut on_delete = None;
10171 let mut on_update = None;
10172 loop {
10173 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10174 match_kind = Some(self.parse_match_kind()?);
10175 } else if on_delete.is_none()
10176 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10177 {
10178 on_delete = Some(self.parse_referential_action()?);
10179 } else if on_update.is_none()
10180 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10181 {
10182 on_update = Some(self.parse_referential_action()?);
10183 } else {
10184 break;
10185 }
10186 }
10187
10188 let characteristics = self.parse_constraint_characteristics()?;
10189
10190 Ok(Some(
10191 ForeignKeyConstraint {
10192 name,
10193 index_name,
10194 columns,
10195 foreign_table,
10196 referred_columns,
10197 on_delete,
10198 on_update,
10199 match_kind,
10200 characteristics,
10201 }
10202 .into(),
10203 ))
10204 }
10205 Token::Word(w) if w.keyword == Keyword::CHECK => {
10206 self.expect_token(&Token::LParen)?;
10207 let expr = Box::new(self.parse_expr()?);
10208 self.expect_token(&Token::RParen)?;
10209
10210 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10211 Some(true)
10212 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10213 Some(false)
10214 } else {
10215 None
10216 };
10217
10218 Ok(Some(
10219 CheckConstraint {
10220 name,
10221 expr,
10222 enforced,
10223 }
10224 .into(),
10225 ))
10226 }
10227 Token::Word(w)
10228 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10229 && dialect_of!(self is GenericDialect | MySqlDialect)
10230 && name.is_none() =>
10231 {
10232 let display_as_key = w.keyword == Keyword::KEY;
10233
10234 let name = match &self.peek_token_ref().token {
10235 Token::Word(word) if word.keyword == Keyword::USING => None,
10236 _ => self.parse_optional_ident()?,
10237 };
10238
10239 let index_type = self.parse_optional_using_then_index_type()?;
10240 let columns = self.parse_parenthesized_index_column_list()?;
10241 let index_options = self.parse_index_options()?;
10242
10243 Ok(Some(
10244 IndexConstraint {
10245 display_as_key,
10246 name,
10247 index_type,
10248 columns,
10249 index_options,
10250 }
10251 .into(),
10252 ))
10253 }
10254 Token::Word(w)
10255 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10256 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10257 {
10258 if let Some(name) = name {
10259 return self.expected(
10260 "FULLTEXT or SPATIAL option without constraint name",
10261 TokenWithSpan {
10262 token: Token::make_keyword(&name.to_string()),
10263 span: next_token.span,
10264 },
10265 );
10266 }
10267
10268 let fulltext = w.keyword == Keyword::FULLTEXT;
10269
10270 let index_type_display = self.parse_index_type_display();
10271
10272 let opt_index_name = self.parse_optional_ident()?;
10273
10274 let columns = self.parse_parenthesized_index_column_list()?;
10275
10276 Ok(Some(
10277 FullTextOrSpatialConstraint {
10278 fulltext,
10279 index_type_display,
10280 opt_index_name,
10281 columns,
10282 }
10283 .into(),
10284 ))
10285 }
10286 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10287 let index_method = if self.parse_keyword(Keyword::USING) {
10288 Some(self.parse_identifier()?)
10289 } else {
10290 None
10291 };
10292
10293 self.expect_token(&Token::LParen)?;
10294 let elements = self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10295 self.expect_token(&Token::RParen)?;
10296
10297 let include = if self.parse_keyword(Keyword::INCLUDE) {
10298 self.expect_token(&Token::LParen)?;
10299 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10300 self.expect_token(&Token::RParen)?;
10301 cols
10302 } else {
10303 vec![]
10304 };
10305
10306 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10307 self.expect_token(&Token::LParen)?;
10308 let predicate = self.parse_expr()?;
10309 self.expect_token(&Token::RParen)?;
10310 Some(Box::new(predicate))
10311 } else {
10312 None
10313 };
10314
10315 let characteristics = self.parse_constraint_characteristics()?;
10316
10317 Ok(Some(
10318 ExclusionConstraint {
10319 name,
10320 index_method,
10321 elements,
10322 include,
10323 where_clause,
10324 characteristics,
10325 }
10326 .into(),
10327 ))
10328 }
10329 _ => {
10330 if name.is_some() {
10331 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10332 } else {
10333 self.prev_token();
10334 Ok(None)
10335 }
10336 }
10337 }
10338 }
10339
10340 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10341 let expr = self.parse_expr()?;
10342 self.expect_keyword_is(Keyword::WITH)?;
10343 let operator_token = self.next_token();
10344 let operator = operator_token.token.to_string();
10345 Ok(ExclusionElement { expr, operator })
10346 }
10347
10348 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10349 Ok(if self.parse_keyword(Keyword::NULLS) {
10350 let not = self.parse_keyword(Keyword::NOT);
10351 self.expect_keyword_is(Keyword::DISTINCT)?;
10352 if not {
10353 NullsDistinctOption::NotDistinct
10354 } else {
10355 NullsDistinctOption::Distinct
10356 }
10357 } else {
10358 NullsDistinctOption::None
10359 })
10360 }
10361
10362 pub fn maybe_parse_options(
10364 &mut self,
10365 keyword: Keyword,
10366 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10367 if let Token::Word(word) = &self.peek_token_ref().token {
10368 if word.keyword == keyword {
10369 return Ok(Some(self.parse_options(keyword)?));
10370 }
10371 };
10372 Ok(None)
10373 }
10374
10375 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10377 if self.parse_keyword(keyword) {
10378 self.expect_token(&Token::LParen)?;
10379 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10380 self.expect_token(&Token::RParen)?;
10381 Ok(options)
10382 } else {
10383 Ok(vec![])
10384 }
10385 }
10386
10387 pub fn parse_options_with_keywords(
10389 &mut self,
10390 keywords: &[Keyword],
10391 ) -> Result<Vec<SqlOption>, ParserError> {
10392 if self.parse_keywords(keywords) {
10393 self.expect_token(&Token::LParen)?;
10394 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10395 self.expect_token(&Token::RParen)?;
10396 Ok(options)
10397 } else {
10398 Ok(vec![])
10399 }
10400 }
10401
10402 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10404 Ok(if self.parse_keyword(Keyword::BTREE) {
10405 IndexType::BTree
10406 } else if self.parse_keyword(Keyword::HASH) {
10407 IndexType::Hash
10408 } else if self.parse_keyword(Keyword::GIN) {
10409 IndexType::GIN
10410 } else if self.parse_keyword(Keyword::GIST) {
10411 IndexType::GiST
10412 } else if self.parse_keyword(Keyword::SPGIST) {
10413 IndexType::SPGiST
10414 } else if self.parse_keyword(Keyword::BRIN) {
10415 IndexType::BRIN
10416 } else if self.parse_keyword(Keyword::BLOOM) {
10417 IndexType::Bloom
10418 } else {
10419 IndexType::Custom(self.parse_identifier()?)
10420 })
10421 }
10422
10423 pub fn parse_optional_using_then_index_type(
10430 &mut self,
10431 ) -> Result<Option<IndexType>, ParserError> {
10432 if self.parse_keyword(Keyword::USING) {
10433 Ok(Some(self.parse_index_type()?))
10434 } else {
10435 Ok(None)
10436 }
10437 }
10438
10439 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10443 self.maybe_parse(|parser| parser.parse_identifier())
10444 }
10445
10446 #[must_use]
10447 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10449 if self.parse_keyword(Keyword::KEY) {
10450 KeyOrIndexDisplay::Key
10451 } else if self.parse_keyword(Keyword::INDEX) {
10452 KeyOrIndexDisplay::Index
10453 } else {
10454 KeyOrIndexDisplay::None
10455 }
10456 }
10457
10458 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10460 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10461 Ok(Some(IndexOption::Using(index_type)))
10462 } else if self.parse_keyword(Keyword::COMMENT) {
10463 let s = self.parse_literal_string()?;
10464 Ok(Some(IndexOption::Comment(s)))
10465 } else {
10466 Ok(None)
10467 }
10468 }
10469
10470 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10472 let mut options = Vec::new();
10473
10474 loop {
10475 match self.parse_optional_index_option()? {
10476 Some(index_option) => options.push(index_option),
10477 None => return Ok(options),
10478 }
10479 }
10480 }
10481
10482 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10484 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10485
10486 match &self.peek_token_ref().token {
10487 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10488 Ok(SqlOption::Ident(self.parse_identifier()?))
10489 }
10490 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10491 self.parse_option_partition()
10492 }
10493 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10494 self.parse_option_clustered()
10495 }
10496 _ => {
10497 let name = self.parse_identifier()?;
10498 self.expect_token(&Token::Eq)?;
10499 let value = self.parse_expr()?;
10500
10501 Ok(SqlOption::KeyValue { key: name, value })
10502 }
10503 }
10504 }
10505
10506 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10508 if self.parse_keywords(&[
10509 Keyword::CLUSTERED,
10510 Keyword::COLUMNSTORE,
10511 Keyword::INDEX,
10512 Keyword::ORDER,
10513 ]) {
10514 Ok(SqlOption::Clustered(
10515 TableOptionsClustered::ColumnstoreIndexOrder(
10516 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10517 ),
10518 ))
10519 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10520 Ok(SqlOption::Clustered(
10521 TableOptionsClustered::ColumnstoreIndex,
10522 ))
10523 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10524 self.expect_token(&Token::LParen)?;
10525
10526 let columns = self.parse_comma_separated(|p| {
10527 let name = p.parse_identifier()?;
10528 let asc = p.parse_asc_desc();
10529
10530 Ok(ClusteredIndex { name, asc })
10531 })?;
10532
10533 self.expect_token(&Token::RParen)?;
10534
10535 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10536 } else {
10537 Err(ParserError::ParserError(
10538 "invalid CLUSTERED sequence".to_string(),
10539 ))
10540 }
10541 }
10542
10543 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10545 self.expect_keyword_is(Keyword::PARTITION)?;
10546 self.expect_token(&Token::LParen)?;
10547 let column_name = self.parse_identifier()?;
10548
10549 self.expect_keyword_is(Keyword::RANGE)?;
10550 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10551 Some(PartitionRangeDirection::Left)
10552 } else if self.parse_keyword(Keyword::RIGHT) {
10553 Some(PartitionRangeDirection::Right)
10554 } else {
10555 None
10556 };
10557
10558 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10559 self.expect_token(&Token::LParen)?;
10560
10561 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10562
10563 self.expect_token(&Token::RParen)?;
10564 self.expect_token(&Token::RParen)?;
10565
10566 Ok(SqlOption::Partition {
10567 column_name,
10568 range_direction,
10569 for_values,
10570 })
10571 }
10572
10573 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10575 self.expect_token(&Token::LParen)?;
10576 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10577 self.expect_token(&Token::RParen)?;
10578 Ok(Partition::Partitions(partitions))
10579 }
10580
10581 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10583 self.expect_token(&Token::LParen)?;
10584 self.expect_keyword_is(Keyword::SELECT)?;
10585 let projection = self.parse_projection()?;
10586 let group_by = self.parse_optional_group_by()?;
10587 let order_by = self.parse_optional_order_by()?;
10588 self.expect_token(&Token::RParen)?;
10589 Ok(ProjectionSelect {
10590 projection,
10591 group_by,
10592 order_by,
10593 })
10594 }
10595 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10597 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10598 let name = self.parse_identifier()?;
10599 let query = self.parse_projection_select()?;
10600 Ok(AlterTableOperation::AddProjection {
10601 if_not_exists,
10602 name,
10603 select: query,
10604 })
10605 }
10606
10607 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10611 self.expect_keyword_is(Keyword::ALTER)?;
10612 self.expect_keyword_is(Keyword::SORTKEY)?;
10613 self.expect_token(&Token::LParen)?;
10614 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10615 self.expect_token(&Token::RParen)?;
10616 Ok(AlterTableOperation::AlterSortKey { columns })
10617 }
10618
10619 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10621 let operation = if self.parse_keyword(Keyword::ADD) {
10622 if let Some(constraint) = self.parse_optional_table_constraint()? {
10623 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10624 AlterTableOperation::AddConstraint {
10625 constraint,
10626 not_valid,
10627 }
10628 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10629 && self.parse_keyword(Keyword::PROJECTION)
10630 {
10631 return self.parse_alter_table_add_projection();
10632 } else {
10633 let if_not_exists =
10634 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10635 let mut new_partitions = vec![];
10636 loop {
10637 if self.parse_keyword(Keyword::PARTITION) {
10638 new_partitions.push(self.parse_partition()?);
10639 } else {
10640 break;
10641 }
10642 }
10643 if !new_partitions.is_empty() {
10644 AlterTableOperation::AddPartitions {
10645 if_not_exists,
10646 new_partitions,
10647 }
10648 } else {
10649 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10650
10651 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10652 {
10653 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10654 || if_not_exists
10655 } else {
10656 false
10657 };
10658
10659 let column_def = self.parse_column_def()?;
10660
10661 let column_position = self.parse_column_position()?;
10662
10663 AlterTableOperation::AddColumn {
10664 column_keyword,
10665 if_not_exists,
10666 column_def,
10667 column_position,
10668 }
10669 }
10670 }
10671 } else if self.parse_keyword(Keyword::RENAME) {
10672 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10673 let old_name = self.parse_identifier()?;
10674 self.expect_keyword_is(Keyword::TO)?;
10675 let new_name = self.parse_identifier()?;
10676 AlterTableOperation::RenameConstraint { old_name, new_name }
10677 } else if self.parse_keyword(Keyword::TO) {
10678 let table_name = self.parse_object_name(false)?;
10679 AlterTableOperation::RenameTable {
10680 table_name: RenameTableNameKind::To(table_name),
10681 }
10682 } else if self.parse_keyword(Keyword::AS) {
10683 let table_name = self.parse_object_name(false)?;
10684 AlterTableOperation::RenameTable {
10685 table_name: RenameTableNameKind::As(table_name),
10686 }
10687 } else {
10688 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10690 self.expect_keyword_is(Keyword::TO)?;
10691 let new_column_name = self.parse_identifier()?;
10692 AlterTableOperation::RenameColumn {
10693 old_column_name,
10694 new_column_name,
10695 }
10696 }
10697 } else if self.parse_keyword(Keyword::DISABLE) {
10698 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10699 AlterTableOperation::DisableRowLevelSecurity {}
10700 } else if self.parse_keyword(Keyword::RULE) {
10701 let name = self.parse_identifier()?;
10702 AlterTableOperation::DisableRule { name }
10703 } else if self.parse_keyword(Keyword::TRIGGER) {
10704 let name = self.parse_identifier()?;
10705 AlterTableOperation::DisableTrigger { name }
10706 } else {
10707 return self.expected_ref(
10708 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10709 self.peek_token_ref(),
10710 );
10711 }
10712 } else if self.parse_keyword(Keyword::ENABLE) {
10713 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10714 let name = self.parse_identifier()?;
10715 AlterTableOperation::EnableAlwaysRule { name }
10716 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10717 let name = self.parse_identifier()?;
10718 AlterTableOperation::EnableAlwaysTrigger { name }
10719 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10720 AlterTableOperation::EnableRowLevelSecurity {}
10721 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10722 let name = self.parse_identifier()?;
10723 AlterTableOperation::EnableReplicaRule { name }
10724 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10725 let name = self.parse_identifier()?;
10726 AlterTableOperation::EnableReplicaTrigger { name }
10727 } else if self.parse_keyword(Keyword::RULE) {
10728 let name = self.parse_identifier()?;
10729 AlterTableOperation::EnableRule { name }
10730 } else if self.parse_keyword(Keyword::TRIGGER) {
10731 let name = self.parse_identifier()?;
10732 AlterTableOperation::EnableTrigger { name }
10733 } else {
10734 return self.expected_ref(
10735 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10736 self.peek_token_ref(),
10737 );
10738 }
10739 } else if self.parse_keywords(&[
10740 Keyword::FORCE,
10741 Keyword::ROW,
10742 Keyword::LEVEL,
10743 Keyword::SECURITY,
10744 ]) {
10745 AlterTableOperation::ForceRowLevelSecurity
10746 } else if self.parse_keywords(&[
10747 Keyword::NO,
10748 Keyword::FORCE,
10749 Keyword::ROW,
10750 Keyword::LEVEL,
10751 Keyword::SECURITY,
10752 ]) {
10753 AlterTableOperation::NoForceRowLevelSecurity
10754 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10755 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10756 {
10757 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10758 let name = self.parse_identifier()?;
10759 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10760 Some(self.parse_identifier()?)
10761 } else {
10762 None
10763 };
10764 AlterTableOperation::ClearProjection {
10765 if_exists,
10766 name,
10767 partition,
10768 }
10769 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10770 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10771 {
10772 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10773 let name = self.parse_identifier()?;
10774 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10775 Some(self.parse_identifier()?)
10776 } else {
10777 None
10778 };
10779 AlterTableOperation::MaterializeProjection {
10780 if_exists,
10781 name,
10782 partition,
10783 }
10784 } else if self.parse_keyword(Keyword::DROP) {
10785 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10786 self.expect_token(&Token::LParen)?;
10787 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10788 self.expect_token(&Token::RParen)?;
10789 AlterTableOperation::DropPartitions {
10790 partitions,
10791 if_exists: true,
10792 }
10793 } else if self.parse_keyword(Keyword::PARTITION) {
10794 self.expect_token(&Token::LParen)?;
10795 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10796 self.expect_token(&Token::RParen)?;
10797 AlterTableOperation::DropPartitions {
10798 partitions,
10799 if_exists: false,
10800 }
10801 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10802 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10803 let name = self.parse_identifier()?;
10804 let drop_behavior = self.parse_optional_drop_behavior();
10805 AlterTableOperation::DropConstraint {
10806 if_exists,
10807 name,
10808 drop_behavior,
10809 }
10810 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10811 let drop_behavior = self.parse_optional_drop_behavior();
10812 AlterTableOperation::DropPrimaryKey { drop_behavior }
10813 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10814 let name = self.parse_identifier()?;
10815 let drop_behavior = self.parse_optional_drop_behavior();
10816 AlterTableOperation::DropForeignKey {
10817 name,
10818 drop_behavior,
10819 }
10820 } else if self.parse_keyword(Keyword::INDEX) {
10821 let name = self.parse_identifier()?;
10822 AlterTableOperation::DropIndex { name }
10823 } else if self.parse_keyword(Keyword::PROJECTION)
10824 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10825 {
10826 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10827 let name = self.parse_identifier()?;
10828 AlterTableOperation::DropProjection { if_exists, name }
10829 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10830 AlterTableOperation::DropClusteringKey
10831 } else {
10832 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10834 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10835 self.parse_comma_separated(Parser::parse_identifier)?
10836 } else {
10837 vec![self.parse_identifier()?]
10838 };
10839 let drop_behavior = self.parse_optional_drop_behavior();
10840 AlterTableOperation::DropColumn {
10841 has_column_keyword,
10842 column_names,
10843 if_exists,
10844 drop_behavior,
10845 }
10846 }
10847 } else if self.parse_keyword(Keyword::PARTITION) {
10848 self.expect_token(&Token::LParen)?;
10849 let before = self.parse_comma_separated(Parser::parse_expr)?;
10850 self.expect_token(&Token::RParen)?;
10851 self.expect_keyword_is(Keyword::RENAME)?;
10852 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10853 self.expect_token(&Token::LParen)?;
10854 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10855 self.expect_token(&Token::RParen)?;
10856 AlterTableOperation::RenamePartitions {
10857 old_partitions: before,
10858 new_partitions: renames,
10859 }
10860 } else if self.parse_keyword(Keyword::CHANGE) {
10861 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10863 let new_name = self.parse_identifier()?;
10864 let data_type = self.parse_data_type()?;
10865 let mut options = vec![];
10866 while let Some(option) = self.parse_optional_column_option()? {
10867 options.push(option);
10868 }
10869
10870 let column_position = self.parse_column_position()?;
10871
10872 AlterTableOperation::ChangeColumn {
10873 old_name,
10874 new_name,
10875 data_type,
10876 options,
10877 column_position,
10878 }
10879 } else if self.parse_keyword(Keyword::MODIFY) {
10880 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10882 let data_type = self.parse_data_type()?;
10883 let mut options = vec![];
10884 while let Some(option) = self.parse_optional_column_option()? {
10885 options.push(option);
10886 }
10887
10888 let column_position = self.parse_column_position()?;
10889
10890 AlterTableOperation::ModifyColumn {
10891 col_name,
10892 data_type,
10893 options,
10894 column_position,
10895 }
10896 } else if self.parse_keyword(Keyword::ALTER) {
10897 if self.peek_keyword(Keyword::SORTKEY) {
10898 self.prev_token();
10899 return self.parse_alter_sort_key();
10900 }
10901
10902 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10904 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10905
10906 let op: AlterColumnOperation = if self.parse_keywords(&[
10907 Keyword::SET,
10908 Keyword::NOT,
10909 Keyword::NULL,
10910 ]) {
10911 AlterColumnOperation::SetNotNull {}
10912 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10913 AlterColumnOperation::DropNotNull {}
10914 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10915 AlterColumnOperation::SetDefault {
10916 value: self.parse_expr()?,
10917 }
10918 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10919 AlterColumnOperation::DropDefault {}
10920 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10921 self.parse_set_data_type(true)?
10922 } else if self.parse_keyword(Keyword::TYPE) {
10923 self.parse_set_data_type(false)?
10924 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10925 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10926 Some(GeneratedAs::Always)
10927 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10928 Some(GeneratedAs::ByDefault)
10929 } else {
10930 None
10931 };
10932
10933 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10934
10935 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10936
10937 if self.peek_token_ref().token == Token::LParen {
10938 self.expect_token(&Token::LParen)?;
10939 sequence_options = Some(self.parse_create_sequence_options()?);
10940 self.expect_token(&Token::RParen)?;
10941 }
10942
10943 AlterColumnOperation::AddGenerated {
10944 generated_as,
10945 sequence_options,
10946 }
10947 } else {
10948 let message = if is_postgresql {
10949 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10950 } else {
10951 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10952 };
10953
10954 return self.expected_ref(message, self.peek_token_ref());
10955 };
10956 AlterTableOperation::AlterColumn { column_name, op }
10957 } else if self.parse_keyword(Keyword::SWAP) {
10958 self.expect_keyword_is(Keyword::WITH)?;
10959 let table_name = self.parse_object_name(false)?;
10960 AlterTableOperation::SwapWith { table_name }
10961 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10962 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10963 {
10964 let new_owner = self.parse_owner()?;
10965 AlterTableOperation::OwnerTo { new_owner }
10966 } else if dialect_of!(self is PostgreSqlDialect)
10967 && self.parse_keywords(&[Keyword::ATTACH, Keyword::PARTITION])
10968 {
10969 let partition_name = self.parse_object_name(false)?;
10970 let partition_bound = self.parse_partition_for_values()?;
10971 AlterTableOperation::AttachPartitionOf {
10972 partition_name,
10973 partition_bound,
10974 }
10975 } else if dialect_of!(self is PostgreSqlDialect)
10976 && self.parse_keywords(&[Keyword::DETACH, Keyword::PARTITION])
10977 {
10978 let partition_name = self.parse_object_name(false)?;
10979 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
10980 let finalize = self.parse_keyword(Keyword::FINALIZE);
10981 AlterTableOperation::DetachPartitionOf {
10982 partition_name,
10983 concurrently,
10984 finalize,
10985 }
10986 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10987 && self.parse_keyword(Keyword::ATTACH)
10988 {
10989 AlterTableOperation::AttachPartition {
10990 partition: self.parse_part_or_partition()?,
10991 }
10992 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10993 && self.parse_keyword(Keyword::DETACH)
10994 {
10995 AlterTableOperation::DetachPartition {
10996 partition: self.parse_part_or_partition()?,
10997 }
10998 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10999 && self.parse_keyword(Keyword::FREEZE)
11000 {
11001 let partition = self.parse_part_or_partition()?;
11002 let with_name = if self.parse_keyword(Keyword::WITH) {
11003 self.expect_keyword_is(Keyword::NAME)?;
11004 Some(self.parse_identifier()?)
11005 } else {
11006 None
11007 };
11008 AlterTableOperation::FreezePartition {
11009 partition,
11010 with_name,
11011 }
11012 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
11013 && self.parse_keyword(Keyword::UNFREEZE)
11014 {
11015 let partition = self.parse_part_or_partition()?;
11016 let with_name = if self.parse_keyword(Keyword::WITH) {
11017 self.expect_keyword_is(Keyword::NAME)?;
11018 Some(self.parse_identifier()?)
11019 } else {
11020 None
11021 };
11022 AlterTableOperation::UnfreezePartition {
11023 partition,
11024 with_name,
11025 }
11026 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
11027 self.expect_token(&Token::LParen)?;
11028 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
11029 self.expect_token(&Token::RParen)?;
11030 AlterTableOperation::ClusterBy { exprs }
11031 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
11032 AlterTableOperation::SuspendRecluster
11033 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
11034 AlterTableOperation::ResumeRecluster
11035 } else if self.parse_keyword(Keyword::LOCK) {
11036 let equals = self.consume_token(&Token::Eq);
11037 let lock = match self.parse_one_of_keywords(&[
11038 Keyword::DEFAULT,
11039 Keyword::EXCLUSIVE,
11040 Keyword::NONE,
11041 Keyword::SHARED,
11042 ]) {
11043 Some(Keyword::DEFAULT) => AlterTableLock::Default,
11044 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
11045 Some(Keyword::NONE) => AlterTableLock::None,
11046 Some(Keyword::SHARED) => AlterTableLock::Shared,
11047 _ => self.expected_ref(
11048 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
11049 self.peek_token_ref(),
11050 )?,
11051 };
11052 AlterTableOperation::Lock { equals, lock }
11053 } else if self.parse_keyword(Keyword::ALGORITHM) {
11054 let equals = self.consume_token(&Token::Eq);
11055 let algorithm = match self.parse_one_of_keywords(&[
11056 Keyword::DEFAULT,
11057 Keyword::INSTANT,
11058 Keyword::INPLACE,
11059 Keyword::COPY,
11060 ]) {
11061 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
11062 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
11063 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
11064 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
11065 _ => self.expected_ref(
11066 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
11067 self.peek_token_ref(),
11068 )?,
11069 };
11070 AlterTableOperation::Algorithm { equals, algorithm }
11071 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
11072 let equals = self.consume_token(&Token::Eq);
11073 let value = self.parse_number_value()?;
11074 AlterTableOperation::AutoIncrement { equals, value }
11075 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
11076 let identity = if self.parse_keyword(Keyword::NOTHING) {
11077 ReplicaIdentity::Nothing
11078 } else if self.parse_keyword(Keyword::FULL) {
11079 ReplicaIdentity::Full
11080 } else if self.parse_keyword(Keyword::DEFAULT) {
11081 ReplicaIdentity::Default
11082 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
11083 ReplicaIdentity::Index(self.parse_identifier()?)
11084 } else {
11085 return self.expected_ref(
11086 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
11087 self.peek_token_ref(),
11088 );
11089 };
11090
11091 AlterTableOperation::ReplicaIdentity { identity }
11092 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11093 let name = self.parse_identifier()?;
11094 AlterTableOperation::ValidateConstraint { name }
11095 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11096 let tablespace_name = self.parse_identifier()?;
11097 AlterTableOperation::SetTablespace { tablespace_name }
11098 } else {
11099 let mut options =
11100 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
11101 if !options.is_empty() {
11102 AlterTableOperation::SetTblProperties {
11103 table_properties: options,
11104 }
11105 } else {
11106 options = self.parse_options(Keyword::SET)?;
11107 if !options.is_empty() {
11108 AlterTableOperation::SetOptionsParens { options }
11109 } else {
11110 return self.expected_ref(
11111 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
11112 self.peek_token_ref(),
11113 );
11114 }
11115 }
11116 };
11117 Ok(operation)
11118 }
11119
11120 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
11121 let data_type = self.parse_data_type()?;
11122 let using = if self.dialect.supports_alter_column_type_using()
11123 && self.parse_keyword(Keyword::USING)
11124 {
11125 Some(self.parse_expr()?)
11126 } else {
11127 None
11128 };
11129 Ok(AlterColumnOperation::SetDataType {
11130 data_type,
11131 using,
11132 had_set,
11133 })
11134 }
11135
11136 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
11137 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11138 match keyword {
11139 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11140 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11141 unexpected_keyword => Err(ParserError::ParserError(
11143 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11144 )),
11145 }
11146 }
11147
11148 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11150 let object_type = self.expect_one_of_keywords(&[
11151 Keyword::VIEW,
11152 Keyword::TYPE,
11153 Keyword::COLLATION,
11154 Keyword::TABLE,
11155 Keyword::INDEX,
11156 Keyword::FUNCTION,
11157 Keyword::AGGREGATE,
11158 Keyword::ROLE,
11159 Keyword::POLICY,
11160 Keyword::CONNECTOR,
11161 Keyword::ICEBERG,
11162 Keyword::SCHEMA,
11163 Keyword::USER,
11164 Keyword::OPERATOR,
11165 Keyword::DOMAIN,
11166 Keyword::TRIGGER,
11167 Keyword::EXTENSION,
11168 Keyword::PROCEDURE,
11169 Keyword::DEFAULT,
11170 ])?;
11171 match object_type {
11172 Keyword::SCHEMA => {
11173 self.prev_token();
11174 self.prev_token();
11175 self.parse_alter_schema()
11176 }
11177 Keyword::VIEW => self.parse_alter_view(),
11178 Keyword::TYPE => self.parse_alter_type(),
11179 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11180 Keyword::TABLE => self.parse_alter_table(false),
11181 Keyword::ICEBERG => {
11182 self.expect_keyword(Keyword::TABLE)?;
11183 self.parse_alter_table(true)
11184 }
11185 Keyword::DEFAULT => self.parse_alter_default_privileges().map(Into::into),
11186 Keyword::INDEX => {
11187 let index_name = self.parse_object_name(false)?;
11188 let operation = if self.parse_keyword(Keyword::RENAME) {
11189 if self.parse_keyword(Keyword::TO) {
11190 let index_name = self.parse_object_name(false)?;
11191 AlterIndexOperation::RenameIndex { index_name }
11192 } else {
11193 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11194 }
11195 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11196 let tablespace_name = self.parse_identifier()?;
11197 AlterIndexOperation::SetTablespace { tablespace_name }
11198 } else {
11199 return self.expected_ref(
11200 "RENAME or SET TABLESPACE after ALTER INDEX",
11201 self.peek_token_ref(),
11202 );
11203 };
11204
11205 Ok(Statement::AlterIndex {
11206 name: index_name,
11207 operation,
11208 })
11209 }
11210 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11211 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11212 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11213 Keyword::OPERATOR => {
11214 if self.parse_keyword(Keyword::FAMILY) {
11215 self.parse_alter_operator_family().map(Into::into)
11216 } else if self.parse_keyword(Keyword::CLASS) {
11217 self.parse_alter_operator_class().map(Into::into)
11218 } else {
11219 self.parse_alter_operator().map(Into::into)
11220 }
11221 }
11222 Keyword::ROLE => self.parse_alter_role(),
11223 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11224 Keyword::CONNECTOR => self.parse_alter_connector(),
11225 Keyword::USER => self.parse_alter_user().map(Into::into),
11226 Keyword::DOMAIN => self.parse_alter_domain(),
11227 Keyword::TRIGGER => self.parse_alter_trigger(),
11228 Keyword::EXTENSION => self.parse_alter_extension(),
11229 unexpected_keyword => Err(ParserError::ParserError(
11231 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE, DEFAULT}}, got {unexpected_keyword:?}"),
11232 )),
11233 }
11234 }
11235
11236 fn parse_alter_aggregate_signature(
11237 &mut self,
11238 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11239 let name = self.parse_object_name(false)?;
11240 self.expect_token(&Token::LParen)?;
11241
11242 if self.consume_token(&Token::Mul) {
11243 self.expect_token(&Token::RParen)?;
11244 return Ok((
11245 FunctionDesc {
11246 name,
11247 args: Some(vec![]),
11248 },
11249 true,
11250 None,
11251 ));
11252 }
11253
11254 let args =
11255 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11256 vec![]
11257 } else {
11258 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11259 };
11260
11261 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11262 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11263 } else {
11264 None
11265 };
11266
11267 self.expect_token(&Token::RParen)?;
11268 Ok((
11269 FunctionDesc {
11270 name,
11271 args: Some(args),
11272 },
11273 false,
11274 aggregate_order_by,
11275 ))
11276 }
11277
11278 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11279 let action = if self.parse_keywords(&[
11280 Keyword::CALLED,
11281 Keyword::ON,
11282 Keyword::NULL,
11283 Keyword::INPUT,
11284 ]) {
11285 Some(AlterFunctionAction::CalledOnNull(
11286 FunctionCalledOnNull::CalledOnNullInput,
11287 ))
11288 } else if self.parse_keywords(&[
11289 Keyword::RETURNS,
11290 Keyword::NULL,
11291 Keyword::ON,
11292 Keyword::NULL,
11293 Keyword::INPUT,
11294 ]) {
11295 Some(AlterFunctionAction::CalledOnNull(
11296 FunctionCalledOnNull::ReturnsNullOnNullInput,
11297 ))
11298 } else if self.parse_keyword(Keyword::STRICT) {
11299 Some(AlterFunctionAction::CalledOnNull(
11300 FunctionCalledOnNull::Strict,
11301 ))
11302 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11303 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11304 } else if self.parse_keyword(Keyword::STABLE) {
11305 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11306 } else if self.parse_keyword(Keyword::VOLATILE) {
11307 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11308 } else if self.parse_keyword(Keyword::NOT) {
11309 self.expect_keyword(Keyword::LEAKPROOF)?;
11310 Some(AlterFunctionAction::Leakproof(false))
11311 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11312 Some(AlterFunctionAction::Leakproof(true))
11313 } else if self.parse_keyword(Keyword::EXTERNAL) {
11314 self.expect_keyword(Keyword::SECURITY)?;
11315 let security = if self.parse_keyword(Keyword::DEFINER) {
11316 FunctionSecurity::Definer
11317 } else if self.parse_keyword(Keyword::INVOKER) {
11318 FunctionSecurity::Invoker
11319 } else {
11320 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11321 };
11322 Some(AlterFunctionAction::Security {
11323 external: true,
11324 security,
11325 })
11326 } else if self.parse_keyword(Keyword::SECURITY) {
11327 let security = if self.parse_keyword(Keyword::DEFINER) {
11328 FunctionSecurity::Definer
11329 } else if self.parse_keyword(Keyword::INVOKER) {
11330 FunctionSecurity::Invoker
11331 } else {
11332 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11333 };
11334 Some(AlterFunctionAction::Security {
11335 external: false,
11336 security,
11337 })
11338 } else if self.parse_keyword(Keyword::PARALLEL) {
11339 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11340 FunctionParallel::Unsafe
11341 } else if self.parse_keyword(Keyword::RESTRICTED) {
11342 FunctionParallel::Restricted
11343 } else if self.parse_keyword(Keyword::SAFE) {
11344 FunctionParallel::Safe
11345 } else {
11346 return self
11347 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11348 };
11349 Some(AlterFunctionAction::Parallel(parallel))
11350 } else if self.parse_keyword(Keyword::COST) {
11351 Some(AlterFunctionAction::Cost(self.parse_number()?))
11352 } else if self.parse_keyword(Keyword::ROWS) {
11353 Some(AlterFunctionAction::Rows(self.parse_number()?))
11354 } else if self.parse_keyword(Keyword::SUPPORT) {
11355 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11356 } else if self.parse_keyword(Keyword::SET) {
11357 let name = self.parse_object_name(false)?;
11358 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11359 FunctionSetValue::FromCurrent
11360 } else {
11361 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11362 return self.expected_ref("= or TO", self.peek_token_ref());
11363 }
11364 if self.parse_keyword(Keyword::DEFAULT) {
11365 FunctionSetValue::Default
11366 } else {
11367 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11368 }
11369 };
11370 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11371 name,
11372 value,
11373 }))
11374 } else if self.parse_keyword(Keyword::RESET) {
11375 let reset_config = if self.parse_keyword(Keyword::ALL) {
11376 ResetConfig::ALL
11377 } else {
11378 ResetConfig::ConfigName(self.parse_object_name(false)?)
11379 };
11380 Some(AlterFunctionAction::Reset(reset_config))
11381 } else {
11382 None
11383 };
11384
11385 Ok(action)
11386 }
11387
11388 fn parse_alter_function_actions(
11389 &mut self,
11390 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11391 let mut actions = vec![];
11392 while let Some(action) = self.parse_alter_function_action()? {
11393 actions.push(action);
11394 }
11395 if actions.is_empty() {
11396 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11397 }
11398 let restrict = self.parse_keyword(Keyword::RESTRICT);
11399 Ok((actions, restrict))
11400 }
11401
11402 pub fn parse_alter_function(
11404 &mut self,
11405 kind: AlterFunctionKind,
11406 ) -> Result<Statement, ParserError> {
11407 let (function, aggregate_star, aggregate_order_by) = match kind {
11408 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11409 (self.parse_function_desc()?, false, None)
11410 }
11411 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11412 };
11413
11414 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11415 let new_name = self.parse_identifier()?;
11416 AlterFunctionOperation::RenameTo { new_name }
11417 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11418 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11419 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11420 AlterFunctionOperation::SetSchema {
11421 schema_name: self.parse_object_name(false)?,
11422 }
11423 } else if matches!(
11424 kind,
11425 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11426 ) && self.parse_keyword(Keyword::NO)
11427 {
11428 if !self.parse_keyword(Keyword::DEPENDS) {
11429 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11430 }
11431 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11432 AlterFunctionOperation::DependsOnExtension {
11433 no: true,
11434 extension_name: self.parse_object_name(false)?,
11435 }
11436 } else if matches!(
11437 kind,
11438 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11439 ) && self.parse_keyword(Keyword::DEPENDS)
11440 {
11441 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11442 AlterFunctionOperation::DependsOnExtension {
11443 no: false,
11444 extension_name: self.parse_object_name(false)?,
11445 }
11446 } else if matches!(
11447 kind,
11448 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11449 ) {
11450 let (actions, restrict) = self.parse_alter_function_actions()?;
11451 AlterFunctionOperation::Actions { actions, restrict }
11452 } else {
11453 return self.expected_ref(
11454 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11455 self.peek_token_ref(),
11456 );
11457 };
11458
11459 Ok(Statement::AlterFunction(AlterFunction {
11460 kind,
11461 function,
11462 aggregate_order_by,
11463 aggregate_star,
11464 operation,
11465 }))
11466 }
11467
11468 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11470 let name = self.parse_object_name(false)?;
11471
11472 let operation = if self.parse_keyword(Keyword::ADD) {
11473 if let Some(constraint) = self.parse_optional_table_constraint()? {
11474 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11475 AlterDomainOperation::AddConstraint {
11476 constraint,
11477 not_valid,
11478 }
11479 } else {
11480 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11481 }
11482 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11483 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11484 let name = self.parse_identifier()?;
11485 let drop_behavior = self.parse_optional_drop_behavior();
11486 AlterDomainOperation::DropConstraint {
11487 if_exists,
11488 name,
11489 drop_behavior,
11490 }
11491 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11492 AlterDomainOperation::DropDefault
11493 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11494 let old_name = self.parse_identifier()?;
11495 self.expect_keyword_is(Keyword::TO)?;
11496 let new_name = self.parse_identifier()?;
11497 AlterDomainOperation::RenameConstraint { old_name, new_name }
11498 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11499 let new_name = self.parse_identifier()?;
11500 AlterDomainOperation::RenameTo { new_name }
11501 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11502 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11503 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11504 AlterDomainOperation::SetSchema {
11505 schema_name: self.parse_object_name(false)?,
11506 }
11507 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11508 AlterDomainOperation::SetDefault {
11509 default: self.parse_expr()?,
11510 }
11511 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11512 let name = self.parse_identifier()?;
11513 AlterDomainOperation::ValidateConstraint { name }
11514 } else {
11515 return self.expected_ref(
11516 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11517 self.peek_token_ref(),
11518 );
11519 };
11520
11521 Ok(AlterDomain { name, operation }.into())
11522 }
11523
11524 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11526 let name = self.parse_identifier()?;
11527 self.expect_keyword_is(Keyword::ON)?;
11528 let table_name = self.parse_object_name(false)?;
11529
11530 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11531 let new_name = self.parse_identifier()?;
11532 AlterTriggerOperation::RenameTo { new_name }
11533 } else {
11534 return self.expected_ref(
11535 "RENAME TO after ALTER TRIGGER ... ON ...",
11536 self.peek_token_ref(),
11537 );
11538 };
11539
11540 Ok(AlterTrigger {
11541 name,
11542 table_name,
11543 operation,
11544 }
11545 .into())
11546 }
11547
11548 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11550 let name = self.parse_identifier()?;
11551
11552 let operation = if self.parse_keyword(Keyword::UPDATE) {
11553 let version = if self.parse_keyword(Keyword::TO) {
11554 Some(self.parse_identifier()?)
11555 } else {
11556 None
11557 };
11558 AlterExtensionOperation::UpdateTo { version }
11559 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11560 AlterExtensionOperation::SetSchema {
11561 schema_name: self.parse_object_name(false)?,
11562 }
11563 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11564 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11565 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11566 let new_name = self.parse_identifier()?;
11567 AlterExtensionOperation::RenameTo { new_name }
11568 } else {
11569 return self.expected_ref(
11570 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11571 self.peek_token_ref(),
11572 );
11573 };
11574
11575 Ok(AlterExtension { name, operation }.into())
11576 }
11577
11578 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11580 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11581 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11583 let on_cluster = self.parse_optional_on_cluster()?;
11584 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11585
11586 let mut location = None;
11587 if self.parse_keyword(Keyword::LOCATION) {
11588 location = Some(HiveSetLocation {
11589 has_set: false,
11590 location: self.parse_identifier()?,
11591 });
11592 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11593 location = Some(HiveSetLocation {
11594 has_set: true,
11595 location: self.parse_identifier()?,
11596 });
11597 }
11598
11599 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11600 self.peek_token_ref().clone()
11601 } else {
11602 self.get_current_token().clone()
11603 };
11604
11605 Ok(AlterTable {
11606 name: table_name,
11607 if_exists,
11608 only,
11609 operations,
11610 location,
11611 on_cluster,
11612 table_type: if iceberg {
11613 Some(AlterTableType::Iceberg)
11614 } else {
11615 None
11616 },
11617 end_token: AttachedToken(end_token),
11618 }
11619 .into())
11620 }
11621
11622 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11624 let name = self.parse_object_name(false)?;
11625 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11626
11627 let with_options = self.parse_options(Keyword::WITH)?;
11628
11629 self.expect_keyword_is(Keyword::AS)?;
11630 let query = self.parse_query()?;
11631
11632 Ok(Statement::AlterView {
11633 name,
11634 columns,
11635 query,
11636 with_options,
11637 })
11638 }
11639
11640 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11642 let name = self.parse_object_name(false)?;
11643
11644 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11645 let new_name = self.parse_identifier()?;
11646 AlterTypeOperation::Rename(AlterTypeRename { new_name })
11647 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11648 let existing_enum_value = self.parse_identifier()?;
11649 self.expect_keyword(Keyword::TO)?;
11650 let new_enum_value = self.parse_identifier()?;
11651 AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11652 from: existing_enum_value,
11653 to: new_enum_value,
11654 })
11655 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::ATTRIBUTE]) {
11656 let old_name = self.parse_identifier()?;
11657 self.expect_keyword(Keyword::TO)?;
11658 let new_name = self.parse_identifier()?;
11659 let drop_behavior = self.parse_optional_drop_behavior();
11660 AlterTypeOperation::RenameAttribute {
11661 old_name,
11662 new_name,
11663 drop_behavior,
11664 }
11665 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11666 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11667 let new_enum_value = self.parse_identifier()?;
11668 let position = if self.parse_keyword(Keyword::BEFORE) {
11669 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11670 } else if self.parse_keyword(Keyword::AFTER) {
11671 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11672 } else {
11673 None
11674 };
11675 AlterTypeOperation::AddValue(AlterTypeAddValue {
11676 if_not_exists,
11677 value: new_enum_value,
11678 position,
11679 })
11680 } else if self.parse_keywords(&[Keyword::ADD, Keyword::ATTRIBUTE]) {
11681 let attr_name = self.parse_identifier()?;
11682 let data_type = self.parse_data_type()?;
11683 let collation = if self.parse_keyword(Keyword::COLLATE) {
11684 Some(self.parse_object_name(false)?)
11685 } else {
11686 None
11687 };
11688 let drop_behavior = self.parse_optional_drop_behavior();
11689 AlterTypeOperation::AddAttribute {
11690 name: attr_name,
11691 data_type,
11692 collation,
11693 drop_behavior,
11694 }
11695 } else if self.parse_keywords(&[Keyword::DROP, Keyword::ATTRIBUTE]) {
11696 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11697 let attr_name = self.parse_identifier()?;
11698 let drop_behavior = self.parse_optional_drop_behavior();
11699 AlterTypeOperation::DropAttribute {
11700 if_exists,
11701 name: attr_name,
11702 drop_behavior,
11703 }
11704 } else if self.parse_keywords(&[Keyword::ALTER, Keyword::ATTRIBUTE]) {
11705 let attr_name = self.parse_identifier()?;
11706 let _ = self.parse_keywords(&[Keyword::SET, Keyword::DATA]);
11707 self.expect_keyword(Keyword::TYPE)?;
11708 let data_type = self.parse_data_type()?;
11709 let collation = if self.parse_keyword(Keyword::COLLATE) {
11710 Some(self.parse_object_name(false)?)
11711 } else {
11712 None
11713 };
11714 let drop_behavior = self.parse_optional_drop_behavior();
11715 AlterTypeOperation::AlterAttribute {
11716 name: attr_name,
11717 data_type,
11718 collation,
11719 drop_behavior,
11720 }
11721 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11722 let new_owner = self.parse_owner()?;
11723 AlterTypeOperation::OwnerTo { new_owner }
11724 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11725 let new_schema = self.parse_object_name(false)?;
11726 AlterTypeOperation::SetSchema { new_schema }
11727 } else {
11728 return self.expected_ref(
11729 "{RENAME TO | RENAME VALUE | RENAME ATTRIBUTE | ADD VALUE | \
11730 ADD ATTRIBUTE | DROP ATTRIBUTE | ALTER ATTRIBUTE | OWNER TO | SET SCHEMA}",
11731 self.peek_token_ref(),
11732 );
11733 };
11734
11735 Ok(Statement::AlterType(AlterType { name, operation }))
11736 }
11737
11738 pub fn parse_alter_default_privileges(
11745 &mut self,
11746 ) -> Result<AlterDefaultPrivileges, ParserError> {
11747 self.expect_keyword(Keyword::PRIVILEGES)?;
11748
11749 let for_roles = if self.parse_keyword(Keyword::FOR) {
11750 self.expect_one_of_keywords(&[Keyword::ROLE, Keyword::USER])?;
11752 self.parse_comma_separated(Parser::parse_identifier)?
11753 } else {
11754 Vec::new()
11755 };
11756
11757 let in_schemas = if self.parse_keywords(&[Keyword::IN, Keyword::SCHEMA]) {
11758 self.parse_comma_separated(Parser::parse_identifier)?
11759 } else {
11760 Vec::new()
11761 };
11762
11763 let action = self.parse_alter_default_privileges_action()?;
11764
11765 Ok(AlterDefaultPrivileges {
11766 for_roles,
11767 in_schemas,
11768 action,
11769 })
11770 }
11771
11772 fn parse_alter_default_privileges_action(
11773 &mut self,
11774 ) -> Result<AlterDefaultPrivilegesAction, ParserError> {
11775 let kw = self.expect_one_of_keywords(&[Keyword::GRANT, Keyword::REVOKE])?;
11776 match kw {
11777 Keyword::GRANT => {
11778 let privileges = self.parse_alter_default_privileges_privileges()?;
11779 self.expect_keyword(Keyword::ON)?;
11780 let object_type = self.parse_alter_default_privileges_object_type()?;
11781 self.expect_keyword(Keyword::TO)?;
11782 let grantees = self.parse_grantees()?;
11783 let with_grant_option =
11784 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
11785 Ok(AlterDefaultPrivilegesAction::Grant {
11786 privileges,
11787 object_type,
11788 grantees,
11789 with_grant_option,
11790 })
11791 }
11792 Keyword::REVOKE => {
11793 let grant_option_for =
11794 self.parse_keywords(&[Keyword::GRANT, Keyword::OPTION, Keyword::FOR]);
11795 let privileges = self.parse_alter_default_privileges_privileges()?;
11796 self.expect_keyword(Keyword::ON)?;
11797 let object_type = self.parse_alter_default_privileges_object_type()?;
11798 self.expect_keyword(Keyword::FROM)?;
11799 let grantees = self.parse_grantees()?;
11800 let cascade = self.parse_cascade_option();
11801 Ok(AlterDefaultPrivilegesAction::Revoke {
11802 grant_option_for,
11803 privileges,
11804 object_type,
11805 grantees,
11806 cascade,
11807 })
11808 }
11809 unexpected_keyword => Err(ParserError::ParserError(format!(
11810 "Internal parser error: expected GRANT or REVOKE, got {unexpected_keyword:?}"
11811 ))),
11812 }
11813 }
11814
11815 fn parse_alter_default_privileges_privileges(&mut self) -> Result<Privileges, ParserError> {
11816 if self.parse_keyword(Keyword::ALL) {
11817 Ok(Privileges::All {
11818 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
11819 })
11820 } else {
11821 Ok(Privileges::Actions(self.parse_actions_list()?))
11822 }
11823 }
11824
11825 fn parse_alter_default_privileges_object_type(
11826 &mut self,
11827 ) -> Result<AlterDefaultPrivilegesObjectType, ParserError> {
11828 let kw = self.expect_one_of_keywords(&[
11829 Keyword::TABLES,
11830 Keyword::SEQUENCES,
11831 Keyword::FUNCTIONS,
11832 Keyword::ROUTINES,
11833 Keyword::TYPES,
11834 Keyword::SCHEMAS,
11835 ])?;
11836 match kw {
11837 Keyword::TABLES => Ok(AlterDefaultPrivilegesObjectType::Tables),
11838 Keyword::SEQUENCES => Ok(AlterDefaultPrivilegesObjectType::Sequences),
11839 Keyword::FUNCTIONS => Ok(AlterDefaultPrivilegesObjectType::Functions),
11840 Keyword::ROUTINES => Ok(AlterDefaultPrivilegesObjectType::Routines),
11841 Keyword::TYPES => Ok(AlterDefaultPrivilegesObjectType::Types),
11842 Keyword::SCHEMAS => Ok(AlterDefaultPrivilegesObjectType::Schemas),
11843 unexpected_keyword => Err(ParserError::ParserError(format!(
11844 "Internal parser error: expected one of {{TABLES, SEQUENCES, FUNCTIONS, ROUTINES, TYPES, SCHEMAS}}, got {unexpected_keyword:?}"
11845 ))),
11846 }
11847 }
11848
11849 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11853 let name = self.parse_object_name(false)?;
11854 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11855 AlterCollationOperation::RenameTo {
11856 new_name: self.parse_identifier()?,
11857 }
11858 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11859 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11860 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11861 AlterCollationOperation::SetSchema {
11862 schema_name: self.parse_object_name(false)?,
11863 }
11864 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11865 AlterCollationOperation::RefreshVersion
11866 } else {
11867 return self.expected_ref(
11868 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11869 self.peek_token_ref(),
11870 );
11871 };
11872
11873 Ok(AlterCollation { name, operation })
11874 }
11875
11876 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11880 let name = self.parse_operator_name()?;
11881
11882 self.expect_token(&Token::LParen)?;
11884
11885 let left_type = if self.parse_keyword(Keyword::NONE) {
11886 None
11887 } else {
11888 Some(self.parse_data_type()?)
11889 };
11890
11891 self.expect_token(&Token::Comma)?;
11892 let right_type = self.parse_data_type()?;
11893 self.expect_token(&Token::RParen)?;
11894
11895 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11897 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11898 Owner::CurrentRole
11899 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11900 Owner::CurrentUser
11901 } else if self.parse_keyword(Keyword::SESSION_USER) {
11902 Owner::SessionUser
11903 } else {
11904 Owner::Ident(self.parse_identifier()?)
11905 };
11906 AlterOperatorOperation::OwnerTo(owner)
11907 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11908 let schema_name = self.parse_object_name(false)?;
11909 AlterOperatorOperation::SetSchema { schema_name }
11910 } else if self.parse_keyword(Keyword::SET) {
11911 self.expect_token(&Token::LParen)?;
11912
11913 let mut options = Vec::new();
11914 loop {
11915 let keyword = self.expect_one_of_keywords(&[
11916 Keyword::RESTRICT,
11917 Keyword::JOIN,
11918 Keyword::COMMUTATOR,
11919 Keyword::NEGATOR,
11920 Keyword::HASHES,
11921 Keyword::MERGES,
11922 ])?;
11923
11924 match keyword {
11925 Keyword::RESTRICT => {
11926 self.expect_token(&Token::Eq)?;
11927 let proc_name = if self.parse_keyword(Keyword::NONE) {
11928 None
11929 } else {
11930 Some(self.parse_object_name(false)?)
11931 };
11932 options.push(OperatorOption::Restrict(proc_name));
11933 }
11934 Keyword::JOIN => {
11935 self.expect_token(&Token::Eq)?;
11936 let proc_name = if self.parse_keyword(Keyword::NONE) {
11937 None
11938 } else {
11939 Some(self.parse_object_name(false)?)
11940 };
11941 options.push(OperatorOption::Join(proc_name));
11942 }
11943 Keyword::COMMUTATOR => {
11944 self.expect_token(&Token::Eq)?;
11945 let op_name = self.parse_operator_name()?;
11946 options.push(OperatorOption::Commutator(op_name));
11947 }
11948 Keyword::NEGATOR => {
11949 self.expect_token(&Token::Eq)?;
11950 let op_name = self.parse_operator_name()?;
11951 options.push(OperatorOption::Negator(op_name));
11952 }
11953 Keyword::HASHES => {
11954 options.push(OperatorOption::Hashes);
11955 }
11956 Keyword::MERGES => {
11957 options.push(OperatorOption::Merges);
11958 }
11959 unexpected_keyword => return Err(ParserError::ParserError(
11960 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11961 )),
11962 }
11963
11964 if !self.consume_token(&Token::Comma) {
11965 break;
11966 }
11967 }
11968
11969 self.expect_token(&Token::RParen)?;
11970 AlterOperatorOperation::Set { options }
11971 } else {
11972 return self.expected_ref(
11973 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11974 self.peek_token_ref(),
11975 );
11976 };
11977
11978 Ok(AlterOperator {
11979 name,
11980 left_type,
11981 right_type,
11982 operation,
11983 })
11984 }
11985
11986 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11988 let strategy_number = self.parse_literal_uint()?;
11989 let operator_name = self.parse_operator_name()?;
11990
11991 self.expect_token(&Token::LParen)?;
11993 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11994 self.expect_token(&Token::RParen)?;
11995
11996 let purpose = if self.parse_keyword(Keyword::FOR) {
11998 if self.parse_keyword(Keyword::SEARCH) {
11999 Some(OperatorPurpose::ForSearch)
12000 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12001 let sort_family = self.parse_object_name(false)?;
12002 Some(OperatorPurpose::ForOrderBy { sort_family })
12003 } else {
12004 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
12005 }
12006 } else {
12007 None
12008 };
12009
12010 Ok(OperatorFamilyItem::Operator {
12011 strategy_number,
12012 operator_name,
12013 op_types,
12014 purpose,
12015 })
12016 }
12017
12018 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
12020 let support_number = self.parse_literal_uint()?;
12021
12022 let op_types =
12024 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
12025 let types = self.parse_comma_separated(Parser::parse_data_type)?;
12026 self.expect_token(&Token::RParen)?;
12027 Some(types)
12028 } else if self.consume_token(&Token::LParen) {
12029 self.expect_token(&Token::RParen)?;
12030 Some(vec![])
12031 } else {
12032 None
12033 };
12034
12035 let function_name = self.parse_object_name(false)?;
12036
12037 let argument_types = if self.consume_token(&Token::LParen) {
12039 if self.peek_token_ref().token == Token::RParen {
12040 self.expect_token(&Token::RParen)?;
12041 vec![]
12042 } else {
12043 let types = self.parse_comma_separated(Parser::parse_data_type)?;
12044 self.expect_token(&Token::RParen)?;
12045 types
12046 }
12047 } else {
12048 vec![]
12049 };
12050
12051 Ok(OperatorFamilyItem::Function {
12052 support_number,
12053 op_types,
12054 function_name,
12055 argument_types,
12056 })
12057 }
12058
12059 fn parse_operator_family_drop_operator(
12061 &mut self,
12062 ) -> Result<OperatorFamilyDropItem, ParserError> {
12063 let strategy_number = self.parse_literal_uint()?;
12064
12065 self.expect_token(&Token::LParen)?;
12067 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12068 self.expect_token(&Token::RParen)?;
12069
12070 Ok(OperatorFamilyDropItem::Operator {
12071 strategy_number,
12072 op_types,
12073 })
12074 }
12075
12076 fn parse_operator_family_drop_function(
12078 &mut self,
12079 ) -> Result<OperatorFamilyDropItem, ParserError> {
12080 let support_number = self.parse_literal_uint()?;
12081
12082 self.expect_token(&Token::LParen)?;
12084 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12085 self.expect_token(&Token::RParen)?;
12086
12087 Ok(OperatorFamilyDropItem::Function {
12088 support_number,
12089 op_types,
12090 })
12091 }
12092
12093 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
12095 if self.parse_keyword(Keyword::OPERATOR) {
12096 self.parse_operator_family_add_operator()
12097 } else if self.parse_keyword(Keyword::FUNCTION) {
12098 self.parse_operator_family_add_function()
12099 } else {
12100 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12101 }
12102 }
12103
12104 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
12106 if self.parse_keyword(Keyword::OPERATOR) {
12107 self.parse_operator_family_drop_operator()
12108 } else if self.parse_keyword(Keyword::FUNCTION) {
12109 self.parse_operator_family_drop_function()
12110 } else {
12111 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12112 }
12113 }
12114
12115 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
12118 let name = self.parse_object_name(false)?;
12119 self.expect_keyword(Keyword::USING)?;
12120 let using = self.parse_identifier()?;
12121
12122 let operation = if self.parse_keyword(Keyword::ADD) {
12123 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
12124 AlterOperatorFamilyOperation::Add { items }
12125 } else if self.parse_keyword(Keyword::DROP) {
12126 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
12127 AlterOperatorFamilyOperation::Drop { items }
12128 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12129 let new_name = self.parse_object_name(false)?;
12130 AlterOperatorFamilyOperation::RenameTo { new_name }
12131 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12132 let owner = self.parse_owner()?;
12133 AlterOperatorFamilyOperation::OwnerTo(owner)
12134 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12135 let schema_name = self.parse_object_name(false)?;
12136 AlterOperatorFamilyOperation::SetSchema { schema_name }
12137 } else {
12138 return self.expected_ref(
12139 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
12140 self.peek_token_ref(),
12141 );
12142 };
12143
12144 Ok(AlterOperatorFamily {
12145 name,
12146 using,
12147 operation,
12148 })
12149 }
12150
12151 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
12155 let name = self.parse_object_name(false)?;
12156 self.expect_keyword(Keyword::USING)?;
12157 let using = self.parse_identifier()?;
12158
12159 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12160 let new_name = self.parse_object_name(false)?;
12161 AlterOperatorClassOperation::RenameTo { new_name }
12162 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12163 let owner = self.parse_owner()?;
12164 AlterOperatorClassOperation::OwnerTo(owner)
12165 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12166 let schema_name = self.parse_object_name(false)?;
12167 AlterOperatorClassOperation::SetSchema { schema_name }
12168 } else {
12169 return self.expected_ref(
12170 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
12171 self.peek_token_ref(),
12172 );
12173 };
12174
12175 Ok(AlterOperatorClass {
12176 name,
12177 using,
12178 operation,
12179 })
12180 }
12181
12182 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
12186 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
12187 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
12188 let name = self.parse_object_name(false)?;
12189 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
12190 self.prev_token();
12191 let options = self.parse_options(Keyword::OPTIONS)?;
12192 AlterSchemaOperation::SetOptionsParens { options }
12193 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
12194 let collate = self.parse_expr()?;
12195 AlterSchemaOperation::SetDefaultCollate { collate }
12196 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
12197 let replica = self.parse_identifier()?;
12198 let options = if self.peek_keyword(Keyword::OPTIONS) {
12199 Some(self.parse_options(Keyword::OPTIONS)?)
12200 } else {
12201 None
12202 };
12203 AlterSchemaOperation::AddReplica { replica, options }
12204 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
12205 let replica = self.parse_identifier()?;
12206 AlterSchemaOperation::DropReplica { replica }
12207 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12208 let new_name = self.parse_object_name(false)?;
12209 AlterSchemaOperation::Rename { name: new_name }
12210 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12211 let owner = self.parse_owner()?;
12212 AlterSchemaOperation::OwnerTo { owner }
12213 } else {
12214 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
12215 };
12216 Ok(Statement::AlterSchema(AlterSchema {
12217 name,
12218 if_exists,
12219 operations: vec![operation],
12220 }))
12221 }
12222
12223 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
12226 let object_name = self.parse_object_name(false)?;
12227 if self.peek_token_ref().token == Token::LParen {
12228 match self.parse_function(object_name)? {
12229 Expr::Function(f) => Ok(Statement::Call(f)),
12230 other => parser_err!(
12231 format!("Expected a simple procedure call but found: {other}"),
12232 self.peek_token_ref().span.start
12233 ),
12234 }
12235 } else {
12236 Ok(Statement::Call(Function {
12237 name: object_name,
12238 uses_odbc_syntax: false,
12239 parameters: FunctionArguments::None,
12240 args: FunctionArguments::None,
12241 over: None,
12242 filter: None,
12243 null_treatment: None,
12244 within_group: vec![],
12245 }))
12246 }
12247 }
12248
12249 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
12251 let source;
12252 if self.consume_token(&Token::LParen) {
12253 source = CopySource::Query(self.parse_query()?);
12254 self.expect_token(&Token::RParen)?;
12255 } else {
12256 let table_name = self.parse_object_name(false)?;
12257 let columns = self.parse_parenthesized_column_list(Optional, false)?;
12258 source = CopySource::Table {
12259 table_name,
12260 columns,
12261 };
12262 }
12263 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
12264 Some(Keyword::FROM) => false,
12265 Some(Keyword::TO) => true,
12266 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
12267 };
12268 if !to {
12269 if let CopySource::Query(_) = source {
12272 return Err(ParserError::ParserError(
12273 "COPY ... FROM does not support query as a source".to_string(),
12274 ));
12275 }
12276 }
12277 let target = if self.parse_keyword(Keyword::STDIN) {
12278 CopyTarget::Stdin
12279 } else if self.parse_keyword(Keyword::STDOUT) {
12280 CopyTarget::Stdout
12281 } else if self.parse_keyword(Keyword::PROGRAM) {
12282 CopyTarget::Program {
12283 command: self.parse_literal_string()?,
12284 }
12285 } else {
12286 CopyTarget::File {
12287 filename: self.parse_literal_string()?,
12288 }
12289 };
12290 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
12292 if self.consume_token(&Token::LParen) {
12293 options = self.parse_comma_separated(Parser::parse_copy_option)?;
12294 self.expect_token(&Token::RParen)?;
12295 }
12296 let mut legacy_options = vec![];
12297 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
12298 legacy_options.push(opt);
12299 }
12300 let values =
12301 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
12302 self.expect_token(&Token::SemiColon)?;
12303 self.parse_tsv()
12304 } else {
12305 vec![]
12306 };
12307 Ok(Statement::Copy {
12308 source,
12309 to,
12310 target,
12311 options,
12312 legacy_options,
12313 values,
12314 })
12315 }
12316
12317 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12319 self.expect_keyword(Keyword::OPEN)?;
12320 Ok(Statement::Open(OpenStatement {
12321 cursor_name: self.parse_identifier()?,
12322 }))
12323 }
12324
12325 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12327 let cursor = if self.parse_keyword(Keyword::ALL) {
12328 CloseCursor::All
12329 } else {
12330 let name = self.parse_identifier()?;
12331
12332 CloseCursor::Specific { name }
12333 };
12334
12335 Ok(Statement::Close { cursor })
12336 }
12337
12338 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12339 let ret = match self.parse_one_of_keywords(&[
12340 Keyword::FORMAT,
12341 Keyword::FREEZE,
12342 Keyword::DELIMITER,
12343 Keyword::NULL,
12344 Keyword::HEADER,
12345 Keyword::QUOTE,
12346 Keyword::ESCAPE,
12347 Keyword::FORCE_QUOTE,
12348 Keyword::FORCE_NOT_NULL,
12349 Keyword::FORCE_NULL,
12350 Keyword::ENCODING,
12351 ]) {
12352 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12353 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12354 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12355 Some(Keyword::FALSE)
12356 )),
12357 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12358 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12359 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12360 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12361 Some(Keyword::FALSE)
12362 )),
12363 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12364 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12365 Some(Keyword::FORCE_QUOTE) => {
12366 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12367 }
12368 Some(Keyword::FORCE_NOT_NULL) => {
12369 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12370 }
12371 Some(Keyword::FORCE_NULL) => {
12372 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12373 }
12374 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12375 _ => self.expected_ref("option", self.peek_token_ref())?,
12376 };
12377 Ok(ret)
12378 }
12379
12380 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12381 if self.parse_keyword(Keyword::FORMAT) {
12383 let _ = self.parse_keyword(Keyword::AS);
12384 }
12385
12386 let ret = match self.parse_one_of_keywords(&[
12387 Keyword::ACCEPTANYDATE,
12388 Keyword::ACCEPTINVCHARS,
12389 Keyword::ADDQUOTES,
12390 Keyword::ALLOWOVERWRITE,
12391 Keyword::BINARY,
12392 Keyword::BLANKSASNULL,
12393 Keyword::BZIP2,
12394 Keyword::CLEANPATH,
12395 Keyword::COMPUPDATE,
12396 Keyword::CREDENTIALS,
12397 Keyword::CSV,
12398 Keyword::DATEFORMAT,
12399 Keyword::DELIMITER,
12400 Keyword::EMPTYASNULL,
12401 Keyword::ENCRYPTED,
12402 Keyword::ESCAPE,
12403 Keyword::EXTENSION,
12404 Keyword::FIXEDWIDTH,
12405 Keyword::GZIP,
12406 Keyword::HEADER,
12407 Keyword::IAM_ROLE,
12408 Keyword::IGNOREHEADER,
12409 Keyword::JSON,
12410 Keyword::MANIFEST,
12411 Keyword::MAXFILESIZE,
12412 Keyword::NULL,
12413 Keyword::PARALLEL,
12414 Keyword::PARQUET,
12415 Keyword::PARTITION,
12416 Keyword::REGION,
12417 Keyword::REMOVEQUOTES,
12418 Keyword::ROWGROUPSIZE,
12419 Keyword::STATUPDATE,
12420 Keyword::TIMEFORMAT,
12421 Keyword::TRUNCATECOLUMNS,
12422 Keyword::ZSTD,
12423 ]) {
12424 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12425 Some(Keyword::ACCEPTINVCHARS) => {
12426 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12428 Some(self.parse_literal_string()?)
12429 } else {
12430 None
12431 };
12432 CopyLegacyOption::AcceptInvChars(ch)
12433 }
12434 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12435 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12436 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12437 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12438 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12439 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12440 Some(Keyword::COMPUPDATE) => {
12441 let preset = self.parse_keyword(Keyword::PRESET);
12442 let enabled = match self.parse_one_of_keywords(&[
12443 Keyword::TRUE,
12444 Keyword::FALSE,
12445 Keyword::ON,
12446 Keyword::OFF,
12447 ]) {
12448 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12449 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12450 _ => None,
12451 };
12452 CopyLegacyOption::CompUpdate { preset, enabled }
12453 }
12454 Some(Keyword::CREDENTIALS) => {
12455 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12456 }
12457 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12458 let mut opts = vec![];
12459 while let Some(opt) =
12460 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12461 {
12462 opts.push(opt);
12463 }
12464 opts
12465 }),
12466 Some(Keyword::DATEFORMAT) => {
12467 let _ = self.parse_keyword(Keyword::AS);
12468 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12469 Some(self.parse_literal_string()?)
12470 } else {
12471 None
12472 };
12473 CopyLegacyOption::DateFormat(fmt)
12474 }
12475 Some(Keyword::DELIMITER) => {
12476 let _ = self.parse_keyword(Keyword::AS);
12477 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12478 }
12479 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12480 Some(Keyword::ENCRYPTED) => {
12481 let auto = self.parse_keyword(Keyword::AUTO);
12482 CopyLegacyOption::Encrypted { auto }
12483 }
12484 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12485 Some(Keyword::EXTENSION) => {
12486 let ext = self.parse_literal_string()?;
12487 CopyLegacyOption::Extension(ext)
12488 }
12489 Some(Keyword::FIXEDWIDTH) => {
12490 let spec = self.parse_literal_string()?;
12491 CopyLegacyOption::FixedWidth(spec)
12492 }
12493 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12494 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12495 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12496 Some(Keyword::IGNOREHEADER) => {
12497 let _ = self.parse_keyword(Keyword::AS);
12498 let num_rows = self.parse_literal_uint()?;
12499 CopyLegacyOption::IgnoreHeader(num_rows)
12500 }
12501 Some(Keyword::JSON) => {
12502 let _ = self.parse_keyword(Keyword::AS);
12503 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12504 Some(self.parse_literal_string()?)
12505 } else {
12506 None
12507 };
12508 CopyLegacyOption::Json(fmt)
12509 }
12510 Some(Keyword::MANIFEST) => {
12511 let verbose = self.parse_keyword(Keyword::VERBOSE);
12512 CopyLegacyOption::Manifest { verbose }
12513 }
12514 Some(Keyword::MAXFILESIZE) => {
12515 let _ = self.parse_keyword(Keyword::AS);
12516 let size = self.parse_number_value()?;
12517 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12518 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12519 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12520 _ => None,
12521 };
12522 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12523 }
12524 Some(Keyword::NULL) => {
12525 let _ = self.parse_keyword(Keyword::AS);
12526 CopyLegacyOption::Null(self.parse_literal_string()?)
12527 }
12528 Some(Keyword::PARALLEL) => {
12529 let enabled = match self.parse_one_of_keywords(&[
12530 Keyword::TRUE,
12531 Keyword::FALSE,
12532 Keyword::ON,
12533 Keyword::OFF,
12534 ]) {
12535 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12536 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12537 _ => None,
12538 };
12539 CopyLegacyOption::Parallel(enabled)
12540 }
12541 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12542 Some(Keyword::PARTITION) => {
12543 self.expect_keyword(Keyword::BY)?;
12544 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12545 let include = self.parse_keyword(Keyword::INCLUDE);
12546 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12547 }
12548 Some(Keyword::REGION) => {
12549 let _ = self.parse_keyword(Keyword::AS);
12550 let region = self.parse_literal_string()?;
12551 CopyLegacyOption::Region(region)
12552 }
12553 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12554 Some(Keyword::ROWGROUPSIZE) => {
12555 let _ = self.parse_keyword(Keyword::AS);
12556 let file_size = self.parse_file_size()?;
12557 CopyLegacyOption::RowGroupSize(file_size)
12558 }
12559 Some(Keyword::STATUPDATE) => {
12560 let enabled = match self.parse_one_of_keywords(&[
12561 Keyword::TRUE,
12562 Keyword::FALSE,
12563 Keyword::ON,
12564 Keyword::OFF,
12565 ]) {
12566 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12567 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12568 _ => None,
12569 };
12570 CopyLegacyOption::StatUpdate(enabled)
12571 }
12572 Some(Keyword::TIMEFORMAT) => {
12573 let _ = self.parse_keyword(Keyword::AS);
12574 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12575 Some(self.parse_literal_string()?)
12576 } else {
12577 None
12578 };
12579 CopyLegacyOption::TimeFormat(fmt)
12580 }
12581 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12582 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12583 _ => self.expected_ref("option", self.peek_token_ref())?,
12584 };
12585 Ok(ret)
12586 }
12587
12588 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12589 let size = self.parse_number_value()?;
12590 let unit = self.maybe_parse_file_size_unit();
12591 Ok(FileSize { size, unit })
12592 }
12593
12594 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12595 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12596 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12597 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12598 _ => None,
12599 }
12600 }
12601
12602 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12603 if self.parse_keyword(Keyword::DEFAULT) {
12604 Ok(IamRoleKind::Default)
12605 } else {
12606 let arn = self.parse_literal_string()?;
12607 Ok(IamRoleKind::Arn(arn))
12608 }
12609 }
12610
12611 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12612 let ret = match self.parse_one_of_keywords(&[
12613 Keyword::HEADER,
12614 Keyword::QUOTE,
12615 Keyword::ESCAPE,
12616 Keyword::FORCE,
12617 ]) {
12618 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12619 Some(Keyword::QUOTE) => {
12620 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12622 }
12623 Some(Keyword::ESCAPE) => {
12624 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12626 }
12627 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12628 CopyLegacyCsvOption::ForceNotNull(
12629 self.parse_comma_separated(|p| p.parse_identifier())?,
12630 )
12631 }
12632 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12633 CopyLegacyCsvOption::ForceQuote(
12634 self.parse_comma_separated(|p| p.parse_identifier())?,
12635 )
12636 }
12637 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12638 };
12639 Ok(ret)
12640 }
12641
12642 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12643 let s = self.parse_literal_string()?;
12644 if s.len() != 1 {
12645 let loc = self
12646 .tokens
12647 .get(self.index - 1)
12648 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12649 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12650 }
12651 Ok(s.chars().next().unwrap())
12652 }
12653
12654 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12657 self.parse_tab_value()
12658 }
12659
12660 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12662 let mut values = vec![];
12663 let mut content = String::new();
12664 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12665 match t {
12666 Token::Whitespace(Whitespace::Tab) => {
12667 values.push(Some(core::mem::take(&mut content)));
12668 }
12669 Token::Whitespace(Whitespace::Newline) => {
12670 values.push(Some(core::mem::take(&mut content)));
12671 }
12672 Token::Backslash => {
12673 if self.consume_token(&Token::Period) {
12674 return values;
12675 }
12676 if let Token::Word(w) = self.next_token().token {
12677 if w.value == "N" {
12678 values.push(None);
12679 }
12680 }
12681 }
12682 _ => {
12683 content.push_str(&t.to_string());
12684 }
12685 }
12686 }
12687 values
12688 }
12689
12690 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12692 let next_token = self.next_token();
12693 let span = next_token.span;
12694 let ok_value = |value: Value| Ok(value.with_span(span));
12695 match next_token.token {
12696 Token::Word(w) => match w.keyword {
12697 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12698 ok_value(Value::Boolean(true))
12699 }
12700 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12701 ok_value(Value::Boolean(false))
12702 }
12703 Keyword::NULL => ok_value(Value::Null),
12704 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12705 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12706 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12707 _ => self.expected(
12708 "A value?",
12709 TokenWithSpan {
12710 token: Token::Word(w),
12711 span,
12712 },
12713 )?,
12714 },
12715 _ => self.expected(
12716 "a concrete value",
12717 TokenWithSpan {
12718 token: Token::Word(w),
12719 span,
12720 },
12721 ),
12722 },
12723 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12727 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12728 self.maybe_concat_string_literal(s.to_string()),
12729 )),
12730 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12731 self.maybe_concat_string_literal(s.to_string()),
12732 )),
12733 Token::TripleSingleQuotedString(ref s) => {
12734 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12735 }
12736 Token::TripleDoubleQuotedString(ref s) => {
12737 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12738 }
12739 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12740 Token::SingleQuotedByteStringLiteral(ref s) => {
12741 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12742 }
12743 Token::DoubleQuotedByteStringLiteral(ref s) => {
12744 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12745 }
12746 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12747 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12748 }
12749 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12750 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12751 }
12752 Token::SingleQuotedRawStringLiteral(ref s) => {
12753 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12754 }
12755 Token::DoubleQuotedRawStringLiteral(ref s) => {
12756 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12757 }
12758 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12759 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12760 }
12761 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12762 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12763 }
12764 Token::NationalStringLiteral(ref s) => {
12765 ok_value(Value::NationalStringLiteral(s.to_string()))
12766 }
12767 Token::QuoteDelimitedStringLiteral(v) => {
12768 ok_value(Value::QuoteDelimitedStringLiteral(v))
12769 }
12770 Token::NationalQuoteDelimitedStringLiteral(v) => {
12771 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12772 }
12773 Token::EscapedStringLiteral(ref s) => {
12774 ok_value(Value::EscapedStringLiteral(s.to_string()))
12775 }
12776 Token::UnicodeStringLiteral(ref s) => {
12777 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12778 }
12779 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12780 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12781 tok @ Token::Colon | tok @ Token::AtSign => {
12782 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12790 let ident = match next_token.token {
12791 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12792 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12793 _ => self.expected("placeholder", next_token),
12794 }?;
12795 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12796 .with_span(Span::new(span.start, ident.span.end)))
12797 }
12798 unexpected => self.expected(
12799 "a value",
12800 TokenWithSpan {
12801 token: unexpected,
12802 span,
12803 },
12804 ),
12805 }
12806 }
12807
12808 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12809 if self.dialect.supports_string_literal_concatenation() {
12810 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12811 self.peek_token_ref().token
12812 {
12813 str.push_str(s);
12814 self.advance_token();
12815 }
12816 } else if self
12817 .dialect
12818 .supports_string_literal_concatenation_with_newline()
12819 {
12820 let mut after_newline = false;
12823 loop {
12824 match self.peek_token_no_skip().token {
12825 Token::Whitespace(Whitespace::Newline) => {
12826 after_newline = true;
12827 self.next_token_no_skip();
12828 }
12829 Token::Whitespace(_) => {
12830 self.next_token_no_skip();
12831 }
12832 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12833 if after_newline =>
12834 {
12835 str.push_str(s.clone().as_str());
12836 self.next_token_no_skip();
12837 after_newline = false;
12838 }
12839 _ => break,
12840 }
12841 }
12842 }
12843
12844 str
12845 }
12846
12847 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12849 let value_wrapper = self.parse_value()?;
12850 match &value_wrapper.value {
12851 Value::Number(_, _) => Ok(value_wrapper),
12852 Value::Placeholder(_) => Ok(value_wrapper),
12853 _ => {
12854 self.prev_token();
12855 self.expected_ref("literal number", self.peek_token_ref())
12856 }
12857 }
12858 }
12859
12860 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12863 let next_token = self.next_token();
12864 match next_token.token {
12865 Token::Plus => Ok(Expr::UnaryOp {
12866 op: UnaryOperator::Plus,
12867 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12868 }),
12869 Token::Minus => Ok(Expr::UnaryOp {
12870 op: UnaryOperator::Minus,
12871 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12872 }),
12873 _ => {
12874 self.prev_token();
12875 Ok(Expr::Value(self.parse_number_value()?))
12876 }
12877 }
12878 }
12879
12880 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12881 let next_token = self.next_token();
12882 let span = next_token.span;
12883 match next_token.token {
12884 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12885 Value::SingleQuotedString(s.to_string()).with_span(span),
12886 )),
12887 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12888 Value::DoubleQuotedString(s.to_string()).with_span(span),
12889 )),
12890 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12891 Value::HexStringLiteral(s.to_string()).with_span(span),
12892 )),
12893 unexpected => self.expected(
12894 "a string value",
12895 TokenWithSpan {
12896 token: unexpected,
12897 span,
12898 },
12899 ),
12900 }
12901 }
12902
12903 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12905 let next_token = self.next_token();
12906 match next_token.token {
12907 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12908 _ => self.expected("literal int", next_token),
12909 }
12910 }
12911
12912 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12915 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12916 let peek_token = parser.peek_token();
12917 let span = peek_token.span;
12918 match peek_token.token {
12919 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12920 {
12921 parser.next_token();
12922 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12923 }
12924 _ => Ok(Expr::Value(
12925 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12926 )),
12927 }
12928 };
12929
12930 Ok(CreateFunctionBody::AsBeforeOptions {
12931 body: parse_string_expr(self)?,
12932 link_symbol: if self.consume_token(&Token::Comma) {
12933 Some(parse_string_expr(self)?)
12934 } else {
12935 None
12936 },
12937 })
12938 }
12939
12940 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12942 let next_token = self.next_token();
12943 match next_token.token {
12944 Token::Word(Word {
12945 value,
12946 keyword: Keyword::NoKeyword,
12947 ..
12948 }) => Ok(value),
12949 Token::SingleQuotedString(s) => Ok(s),
12950 Token::DoubleQuotedString(s) => Ok(s),
12951 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12952 Ok(s)
12953 }
12954 Token::UnicodeStringLiteral(s) => Ok(s),
12955 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12956 Ok(s.value)
12957 }
12958 _ => self.expected("literal string", next_token),
12959 }
12960 }
12961
12962 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12964 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12965 Some(Keyword::TRUE) => Ok(true),
12966 Some(Keyword::FALSE) => Ok(false),
12967 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12968 }
12969 }
12970
12971 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12973 let neg = self.parse_keyword(Keyword::NOT);
12974 let normalized_form = self.maybe_parse(|parser| {
12975 match parser.parse_one_of_keywords(&[
12976 Keyword::NFC,
12977 Keyword::NFD,
12978 Keyword::NFKC,
12979 Keyword::NFKD,
12980 ]) {
12981 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12982 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12983 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12984 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12985 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12986 }
12987 })?;
12988 if self.parse_keyword(Keyword::NORMALIZED) {
12989 return Ok(Expr::IsNormalized {
12990 expr: Box::new(expr),
12991 form: normalized_form,
12992 negated: neg,
12993 });
12994 }
12995 self.expected_ref("unicode normalization form", self.peek_token_ref())
12996 }
12997
12998 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
13000 self.expect_token(&Token::LParen)?;
13001 let values = self.parse_comma_separated(|parser| {
13002 let name = parser.parse_literal_string()?;
13003 let e = if parser.consume_token(&Token::Eq) {
13004 let value = parser.parse_number()?;
13005 EnumMember::NamedValue(name, value)
13006 } else {
13007 EnumMember::Name(name)
13008 };
13009 Ok(e)
13010 })?;
13011 self.expect_token(&Token::RParen)?;
13012
13013 Ok(values)
13014 }
13015
13016 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
13018 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
13019 if trailing_bracket.0 {
13020 return parser_err!(
13021 format!("unmatched > after parsing data type {ty}"),
13022 self.peek_token_ref()
13023 );
13024 }
13025
13026 Ok(ty)
13027 }
13028
13029 fn parse_data_type_helper(
13030 &mut self,
13031 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
13032 let dialect = self.dialect;
13033 self.advance_token();
13034 let next_token = self.get_current_token();
13035 let next_token_index = self.get_current_index();
13036
13037 let mut trailing_bracket: MatchedTrailingBracket = false.into();
13038 let mut data = match &next_token.token {
13039 Token::Word(w) => match w.keyword {
13040 Keyword::BOOLEAN => Ok(DataType::Boolean),
13041 Keyword::BOOL => Ok(DataType::Bool),
13042 Keyword::FLOAT => {
13043 let precision = self.parse_exact_number_optional_precision_scale()?;
13044
13045 if self.parse_keyword(Keyword::UNSIGNED) {
13046 Ok(DataType::FloatUnsigned(precision))
13047 } else {
13048 Ok(DataType::Float(precision))
13049 }
13050 }
13051 Keyword::REAL => {
13052 if self.parse_keyword(Keyword::UNSIGNED) {
13053 Ok(DataType::RealUnsigned)
13054 } else {
13055 Ok(DataType::Real)
13056 }
13057 }
13058 Keyword::FLOAT4 => Ok(DataType::Float4),
13059 Keyword::FLOAT32 => Ok(DataType::Float32),
13060 Keyword::FLOAT64 => Ok(DataType::Float64),
13061 Keyword::FLOAT8 => Ok(DataType::Float8),
13062 Keyword::DOUBLE => {
13063 if self.parse_keyword(Keyword::PRECISION) {
13064 if self.parse_keyword(Keyword::UNSIGNED) {
13065 Ok(DataType::DoublePrecisionUnsigned)
13066 } else {
13067 Ok(DataType::DoublePrecision)
13068 }
13069 } else {
13070 let precision = self.parse_exact_number_optional_precision_scale()?;
13071
13072 if self.parse_keyword(Keyword::UNSIGNED) {
13073 Ok(DataType::DoubleUnsigned(precision))
13074 } else {
13075 Ok(DataType::Double(precision))
13076 }
13077 }
13078 }
13079 Keyword::TINYINT => {
13080 let optional_precision = self.parse_optional_precision();
13081 if self.parse_keyword(Keyword::UNSIGNED) {
13082 Ok(DataType::TinyIntUnsigned(optional_precision?))
13083 } else {
13084 if dialect.supports_data_type_signed_suffix() {
13085 let _ = self.parse_keyword(Keyword::SIGNED);
13086 }
13087 Ok(DataType::TinyInt(optional_precision?))
13088 }
13089 }
13090 Keyword::INT2 => {
13091 let optional_precision = self.parse_optional_precision();
13092 if self.parse_keyword(Keyword::UNSIGNED) {
13093 Ok(DataType::Int2Unsigned(optional_precision?))
13094 } else {
13095 Ok(DataType::Int2(optional_precision?))
13096 }
13097 }
13098 Keyword::SMALLINT => {
13099 let optional_precision = self.parse_optional_precision();
13100 if self.parse_keyword(Keyword::UNSIGNED) {
13101 Ok(DataType::SmallIntUnsigned(optional_precision?))
13102 } else {
13103 if dialect.supports_data_type_signed_suffix() {
13104 let _ = self.parse_keyword(Keyword::SIGNED);
13105 }
13106 Ok(DataType::SmallInt(optional_precision?))
13107 }
13108 }
13109 Keyword::MEDIUMINT => {
13110 let optional_precision = self.parse_optional_precision();
13111 if self.parse_keyword(Keyword::UNSIGNED) {
13112 Ok(DataType::MediumIntUnsigned(optional_precision?))
13113 } else {
13114 if dialect.supports_data_type_signed_suffix() {
13115 let _ = self.parse_keyword(Keyword::SIGNED);
13116 }
13117 Ok(DataType::MediumInt(optional_precision?))
13118 }
13119 }
13120 Keyword::INT => {
13121 let optional_precision = self.parse_optional_precision();
13122 if self.parse_keyword(Keyword::UNSIGNED) {
13123 Ok(DataType::IntUnsigned(optional_precision?))
13124 } else {
13125 if dialect.supports_data_type_signed_suffix() {
13126 let _ = self.parse_keyword(Keyword::SIGNED);
13127 }
13128 Ok(DataType::Int(optional_precision?))
13129 }
13130 }
13131 Keyword::INT4 => {
13132 let optional_precision = self.parse_optional_precision();
13133 if self.parse_keyword(Keyword::UNSIGNED) {
13134 Ok(DataType::Int4Unsigned(optional_precision?))
13135 } else {
13136 Ok(DataType::Int4(optional_precision?))
13137 }
13138 }
13139 Keyword::INT8 => {
13140 let optional_precision = self.parse_optional_precision();
13141 if self.parse_keyword(Keyword::UNSIGNED) {
13142 Ok(DataType::Int8Unsigned(optional_precision?))
13143 } else {
13144 Ok(DataType::Int8(optional_precision?))
13145 }
13146 }
13147 Keyword::INT16 => Ok(DataType::Int16),
13148 Keyword::INT32 => Ok(DataType::Int32),
13149 Keyword::INT64 => Ok(DataType::Int64),
13150 Keyword::INT128 => Ok(DataType::Int128),
13151 Keyword::INT256 => Ok(DataType::Int256),
13152 Keyword::INTEGER => {
13153 let optional_precision = self.parse_optional_precision();
13154 if self.parse_keyword(Keyword::UNSIGNED) {
13155 Ok(DataType::IntegerUnsigned(optional_precision?))
13156 } else {
13157 if dialect.supports_data_type_signed_suffix() {
13158 let _ = self.parse_keyword(Keyword::SIGNED);
13159 }
13160 Ok(DataType::Integer(optional_precision?))
13161 }
13162 }
13163 Keyword::BIGINT => {
13164 let optional_precision = self.parse_optional_precision();
13165 if self.parse_keyword(Keyword::UNSIGNED) {
13166 Ok(DataType::BigIntUnsigned(optional_precision?))
13167 } else {
13168 if dialect.supports_data_type_signed_suffix() {
13169 let _ = self.parse_keyword(Keyword::SIGNED);
13170 }
13171 Ok(DataType::BigInt(optional_precision?))
13172 }
13173 }
13174 Keyword::HUGEINT => Ok(DataType::HugeInt),
13175 Keyword::UBIGINT => Ok(DataType::UBigInt),
13176 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
13177 Keyword::USMALLINT => Ok(DataType::USmallInt),
13178 Keyword::UTINYINT => Ok(DataType::UTinyInt),
13179 Keyword::UINT8 => Ok(DataType::UInt8),
13180 Keyword::UINT16 => Ok(DataType::UInt16),
13181 Keyword::UINT32 => Ok(DataType::UInt32),
13182 Keyword::UINT64 => Ok(DataType::UInt64),
13183 Keyword::UINT128 => Ok(DataType::UInt128),
13184 Keyword::UINT256 => Ok(DataType::UInt256),
13185 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
13186 Keyword::NVARCHAR => {
13187 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
13188 }
13189 Keyword::CHARACTER => {
13190 if self.parse_keyword(Keyword::VARYING) {
13191 Ok(DataType::CharacterVarying(
13192 self.parse_optional_character_length()?,
13193 ))
13194 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13195 Ok(DataType::CharacterLargeObject(
13196 self.parse_optional_precision()?,
13197 ))
13198 } else {
13199 Ok(DataType::Character(self.parse_optional_character_length()?))
13200 }
13201 }
13202 Keyword::CHAR => {
13203 if self.parse_keyword(Keyword::VARYING) {
13204 Ok(DataType::CharVarying(
13205 self.parse_optional_character_length()?,
13206 ))
13207 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13208 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
13209 } else {
13210 Ok(DataType::Char(self.parse_optional_character_length()?))
13211 }
13212 }
13213 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
13214 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
13215 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
13216 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
13217 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
13218 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
13219 Keyword::LONGBLOB => Ok(DataType::LongBlob),
13220 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
13221 Keyword::BIT => {
13222 if self.parse_keyword(Keyword::VARYING) {
13223 Ok(DataType::BitVarying(self.parse_optional_precision()?))
13224 } else {
13225 Ok(DataType::Bit(self.parse_optional_precision()?))
13226 }
13227 }
13228 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
13229 Keyword::UUID => Ok(DataType::Uuid),
13230 Keyword::DATE => Ok(DataType::Date),
13231 Keyword::DATE32 => Ok(DataType::Date32),
13232 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
13233 Keyword::DATETIME64 => {
13234 self.prev_token();
13235 let (precision, time_zone) = self.parse_datetime_64()?;
13236 Ok(DataType::Datetime64(precision, time_zone))
13237 }
13238 Keyword::TIMESTAMP => {
13239 let precision = self.parse_optional_precision()?;
13240 let tz = if self.parse_keyword(Keyword::WITH) {
13241 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13242 TimezoneInfo::WithTimeZone
13243 } else if self.parse_keyword(Keyword::WITHOUT) {
13244 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13245 TimezoneInfo::WithoutTimeZone
13246 } else {
13247 TimezoneInfo::None
13248 };
13249 Ok(DataType::Timestamp(precision, tz))
13250 }
13251 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
13252 self.parse_optional_precision()?,
13253 TimezoneInfo::Tz,
13254 )),
13255 Keyword::TIMESTAMP_NTZ => {
13256 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
13257 }
13258 Keyword::TIME => {
13259 let precision = self.parse_optional_precision()?;
13260 let tz = if self.parse_keyword(Keyword::WITH) {
13261 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13262 TimezoneInfo::WithTimeZone
13263 } else if self.parse_keyword(Keyword::WITHOUT) {
13264 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13265 TimezoneInfo::WithoutTimeZone
13266 } else {
13267 TimezoneInfo::None
13268 };
13269 Ok(DataType::Time(precision, tz))
13270 }
13271 Keyword::TIMETZ => Ok(DataType::Time(
13272 self.parse_optional_precision()?,
13273 TimezoneInfo::Tz,
13274 )),
13275 Keyword::INTERVAL => {
13276 if self.dialect.supports_interval_options() {
13277 let fields = self.maybe_parse_optional_interval_fields()?;
13278 let precision = self.parse_optional_precision()?;
13279 Ok(DataType::Interval { fields, precision })
13280 } else {
13281 Ok(DataType::Interval {
13282 fields: None,
13283 precision: None,
13284 })
13285 }
13286 }
13287 Keyword::JSON => Ok(DataType::JSON),
13288 Keyword::JSONB => Ok(DataType::JSONB),
13289 Keyword::REGCLASS => Ok(DataType::Regclass),
13290 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
13291 Keyword::FIXEDSTRING => {
13292 self.expect_token(&Token::LParen)?;
13293 let character_length = self.parse_literal_uint()?;
13294 self.expect_token(&Token::RParen)?;
13295 Ok(DataType::FixedString(character_length))
13296 }
13297 Keyword::TEXT => Ok(DataType::Text),
13298 Keyword::TINYTEXT => Ok(DataType::TinyText),
13299 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
13300 Keyword::LONGTEXT => Ok(DataType::LongText),
13301 Keyword::BYTEA => Ok(DataType::Bytea),
13302 Keyword::NUMERIC => Ok(DataType::Numeric(
13303 self.parse_exact_number_optional_precision_scale()?,
13304 )),
13305 Keyword::DECIMAL => {
13306 let precision = self.parse_exact_number_optional_precision_scale()?;
13307
13308 if self.parse_keyword(Keyword::UNSIGNED) {
13309 Ok(DataType::DecimalUnsigned(precision))
13310 } else {
13311 Ok(DataType::Decimal(precision))
13312 }
13313 }
13314 Keyword::DEC => {
13315 let precision = self.parse_exact_number_optional_precision_scale()?;
13316
13317 if self.parse_keyword(Keyword::UNSIGNED) {
13318 Ok(DataType::DecUnsigned(precision))
13319 } else {
13320 Ok(DataType::Dec(precision))
13321 }
13322 }
13323 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13324 self.parse_exact_number_optional_precision_scale()?,
13325 )),
13326 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13327 self.parse_exact_number_optional_precision_scale()?,
13328 )),
13329 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13330 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13331 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13332 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13333 Keyword::ARRAY => {
13334 if self.dialect.supports_array_typedef_without_element_type() {
13335 Ok(DataType::Array(ArrayElemTypeDef::None))
13336 } else if dialect_of!(self is ClickHouseDialect) {
13337 Ok(self.parse_sub_type(|internal_type| {
13338 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13339 })?)
13340 } else {
13341 self.expect_token(&Token::Lt)?;
13342 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13343 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13344 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13345 inside_type,
13346 ))))
13347 }
13348 }
13349 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13350 self.prev_token();
13351 let field_defs = self.parse_duckdb_struct_type_def()?;
13352 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13353 }
13354 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13355 {
13356 self.prev_token();
13357 let (field_defs, _trailing_bracket) =
13358 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13359 trailing_bracket = _trailing_bracket;
13360 Ok(DataType::Struct(
13361 field_defs,
13362 StructBracketKind::AngleBrackets,
13363 ))
13364 }
13365 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13366 self.prev_token();
13367 let fields = self.parse_union_type_def()?;
13368 Ok(DataType::Union(fields))
13369 }
13370 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13371 Ok(self.parse_sub_type(DataType::Nullable)?)
13372 }
13373 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13374 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13375 }
13376 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13377 self.prev_token();
13378 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13379 Ok(DataType::Map(
13380 Box::new(key_data_type),
13381 Box::new(value_data_type),
13382 ))
13383 }
13384 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13385 self.expect_token(&Token::LParen)?;
13386 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13387 self.expect_token(&Token::RParen)?;
13388 Ok(DataType::Nested(field_defs))
13389 }
13390 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13391 self.prev_token();
13392 let field_defs = self.parse_click_house_tuple_def()?;
13393 Ok(DataType::Tuple(field_defs))
13394 }
13395 Keyword::TRIGGER => Ok(DataType::Trigger),
13396 Keyword::SETOF => {
13397 let inner = self.parse_data_type()?;
13398 Ok(DataType::SetOf(Box::new(inner)))
13399 }
13400 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13401 let _ = self.parse_keyword(Keyword::TYPE);
13402 Ok(DataType::AnyType)
13403 }
13404 Keyword::TABLE => {
13405 if self.peek_token_ref().token == Token::LParen {
13408 let columns = self.parse_returns_table_columns()?;
13409 Ok(DataType::Table(Some(columns)))
13410 } else {
13411 Ok(DataType::Table(None))
13412 }
13413 }
13414 Keyword::SIGNED => {
13415 if self.parse_keyword(Keyword::INTEGER) {
13416 Ok(DataType::SignedInteger)
13417 } else {
13418 Ok(DataType::Signed)
13419 }
13420 }
13421 Keyword::UNSIGNED => {
13422 if self.parse_keyword(Keyword::INTEGER) {
13423 Ok(DataType::UnsignedInteger)
13424 } else {
13425 Ok(DataType::Unsigned)
13426 }
13427 }
13428 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13429 Ok(DataType::TsVector)
13430 }
13431 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13432 Ok(DataType::TsQuery)
13433 }
13434 _ => {
13435 self.prev_token();
13436 let type_name = self.parse_object_name(false)?;
13437 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13438 Ok(DataType::Custom(type_name, modifiers))
13439 } else {
13440 Ok(DataType::Custom(type_name, vec![]))
13441 }
13442 }
13443 },
13444 _ => self.expected_at("a data type name", next_token_index),
13445 }?;
13446
13447 if self.dialect.supports_array_typedef_with_brackets() {
13448 while self.consume_token(&Token::LBracket) {
13449 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13451 self.expect_token(&Token::RBracket)?;
13452 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13453 }
13454 }
13455 Ok((data, trailing_bracket))
13456 }
13457
13458 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13459 self.parse_column_def()
13460 }
13461
13462 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13463 self.expect_token(&Token::LParen)?;
13464 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13465 self.expect_token(&Token::RParen)?;
13466 Ok(columns)
13467 }
13468
13469 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13471 self.expect_token(&Token::LParen)?;
13472 let mut values = Vec::new();
13473 loop {
13474 let next_token = self.next_token();
13475 match next_token.token {
13476 Token::SingleQuotedString(value) => values.push(value),
13477 _ => self.expected("a string", next_token)?,
13478 }
13479 let next_token = self.next_token();
13480 match next_token.token {
13481 Token::Comma => (),
13482 Token::RParen => break,
13483 _ => self.expected(", or }", next_token)?,
13484 }
13485 }
13486 Ok(values)
13487 }
13488
13489 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13491 let ident = self.parse_identifier()?;
13492 self.expect_keyword_is(Keyword::AS)?;
13493 let alias = self.parse_identifier()?;
13494 Ok(IdentWithAlias { ident, alias })
13495 }
13496
13497 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13499 let ident = self.parse_identifier()?;
13500 let _after_as = self.parse_keyword(Keyword::AS);
13501 let alias = self.parse_identifier()?;
13502 Ok(IdentWithAlias { ident, alias })
13503 }
13504
13505 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13507 self.parse_comma_separated(|parser| {
13508 parser.expect_token(&Token::LParen)?;
13509 let query = parser.parse_query()?;
13510 parser.expect_token(&Token::RParen)?;
13511 Ok(*query)
13512 })
13513 }
13514
13515 fn parse_distinct_required_set_quantifier(
13517 &mut self,
13518 operator_name: &str,
13519 ) -> Result<SetQuantifier, ParserError> {
13520 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13521 match quantifier {
13522 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13523 _ => Err(ParserError::ParserError(format!(
13524 "{operator_name} pipe operator requires DISTINCT modifier",
13525 ))),
13526 }
13527 }
13528
13529 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13531 if self.parse_keyword(Keyword::AS) {
13532 Ok(Some(self.parse_identifier()?))
13533 } else {
13534 self.maybe_parse(|parser| parser.parse_identifier())
13536 }
13537 }
13538
13539 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13541 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13542 parser.dialect.is_select_item_alias(explicit, kw, parser)
13543 }
13544 self.parse_optional_alias_inner(None, validator)
13545 }
13546
13547 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13551 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13552 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13553 }
13554 let explicit = self.peek_keyword(Keyword::AS);
13555 match self.parse_optional_alias_inner(None, validator)? {
13556 Some(name) => {
13557 let columns = self.parse_table_alias_column_defs()?;
13558 Ok(Some(TableAlias {
13559 explicit,
13560 name,
13561 columns,
13562 }))
13563 }
13564 None => Ok(None),
13565 }
13566 }
13567
13568 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13569 let mut hints = vec![];
13570 while let Some(hint_type) =
13571 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13572 {
13573 let hint_type = match hint_type {
13574 Keyword::USE => TableIndexHintType::Use,
13575 Keyword::IGNORE => TableIndexHintType::Ignore,
13576 Keyword::FORCE => TableIndexHintType::Force,
13577 _ => {
13578 return self.expected_ref(
13579 "expected to match USE/IGNORE/FORCE keyword",
13580 self.peek_token_ref(),
13581 )
13582 }
13583 };
13584 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13585 Some(Keyword::INDEX) => TableIndexType::Index,
13586 Some(Keyword::KEY) => TableIndexType::Key,
13587 _ => {
13588 return self
13589 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13590 }
13591 };
13592 let for_clause = if self.parse_keyword(Keyword::FOR) {
13593 let clause = if self.parse_keyword(Keyword::JOIN) {
13594 TableIndexHintForClause::Join
13595 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13596 TableIndexHintForClause::OrderBy
13597 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13598 TableIndexHintForClause::GroupBy
13599 } else {
13600 return self.expected_ref(
13601 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13602 self.peek_token_ref(),
13603 );
13604 };
13605 Some(clause)
13606 } else {
13607 None
13608 };
13609
13610 self.expect_token(&Token::LParen)?;
13611 let index_names = if self.peek_token_ref().token != Token::RParen {
13612 self.parse_comma_separated(Parser::parse_identifier)?
13613 } else {
13614 vec![]
13615 };
13616 self.expect_token(&Token::RParen)?;
13617 hints.push(TableIndexHints {
13618 hint_type,
13619 index_type,
13620 for_clause,
13621 index_names,
13622 });
13623 }
13624 Ok(hints)
13625 }
13626
13627 pub fn parse_optional_alias(
13631 &mut self,
13632 reserved_kwds: &[Keyword],
13633 ) -> Result<Option<Ident>, ParserError> {
13634 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13635 false
13636 }
13637 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13638 }
13639
13640 fn parse_optional_alias_inner<F>(
13647 &mut self,
13648 reserved_kwds: Option<&[Keyword]>,
13649 validator: F,
13650 ) -> Result<Option<Ident>, ParserError>
13651 where
13652 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13653 {
13654 let after_as = self.parse_keyword(Keyword::AS);
13655
13656 let next_token = self.next_token();
13657 match next_token.token {
13658 Token::Word(w)
13661 if reserved_kwds.is_some()
13662 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13663 {
13664 Ok(Some(w.into_ident(next_token.span)))
13665 }
13666 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13670 Ok(Some(w.into_ident(next_token.span)))
13671 }
13672 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13674 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13675 _ => {
13676 if after_as {
13677 return self.expected("an identifier after AS", next_token);
13678 }
13679 self.prev_token();
13680 Ok(None) }
13682 }
13683 }
13684
13685 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13687 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13688 let expressions = if self.parse_keyword(Keyword::ALL) {
13689 None
13690 } else {
13691 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13692 };
13693
13694 let mut modifiers = vec![];
13695 if self.dialect.supports_group_by_with_modifier() {
13696 loop {
13697 if !self.parse_keyword(Keyword::WITH) {
13698 break;
13699 }
13700 let keyword = self.expect_one_of_keywords(&[
13701 Keyword::ROLLUP,
13702 Keyword::CUBE,
13703 Keyword::TOTALS,
13704 ])?;
13705 modifiers.push(match keyword {
13706 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13707 Keyword::CUBE => GroupByWithModifier::Cube,
13708 Keyword::TOTALS => GroupByWithModifier::Totals,
13709 _ => {
13710 return parser_err!(
13711 "BUG: expected to match GroupBy modifier keyword",
13712 self.peek_token_ref().span.start
13713 )
13714 }
13715 });
13716 }
13717 }
13718 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13719 self.expect_token(&Token::LParen)?;
13720 let result = self.parse_comma_separated(|p| {
13721 if p.peek_token_ref().token == Token::LParen {
13722 p.parse_tuple(true, true)
13723 } else {
13724 Ok(vec![p.parse_expr()?])
13725 }
13726 })?;
13727 self.expect_token(&Token::RParen)?;
13728 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13729 result,
13730 )));
13731 };
13732 let group_by = match expressions {
13733 None => GroupByExpr::All(modifiers),
13734 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13735 };
13736 Ok(Some(group_by))
13737 } else {
13738 Ok(None)
13739 }
13740 }
13741
13742 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13744 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13745 let order_by =
13746 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13747 let order_by_options = self.parse_order_by_options()?;
13748 OrderBy {
13749 kind: OrderByKind::All(order_by_options),
13750 interpolate: None,
13751 }
13752 } else {
13753 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13754 let interpolate = if self.dialect.supports_interpolate() {
13755 self.parse_interpolations()?
13756 } else {
13757 None
13758 };
13759 OrderBy {
13760 kind: OrderByKind::Expressions(exprs),
13761 interpolate,
13762 }
13763 };
13764 Ok(Some(order_by))
13765 } else {
13766 Ok(None)
13767 }
13768 }
13769
13770 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13771 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13772 Some(self.parse_offset()?)
13773 } else {
13774 None
13775 };
13776
13777 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13778 let expr = self.parse_limit()?;
13779
13780 if self.dialect.supports_limit_comma()
13781 && offset.is_none()
13782 && expr.is_some() && self.consume_token(&Token::Comma)
13784 {
13785 let offset = expr.ok_or_else(|| {
13786 ParserError::ParserError(
13787 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13788 )
13789 })?;
13790 return Ok(Some(LimitClause::OffsetCommaLimit {
13791 offset,
13792 limit: self.parse_expr()?,
13793 }));
13794 }
13795
13796 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13797 Some(self.parse_comma_separated(Parser::parse_expr)?)
13798 } else {
13799 None
13800 };
13801
13802 (Some(expr), limit_by)
13803 } else {
13804 (None, None)
13805 };
13806
13807 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13808 offset = Some(self.parse_offset()?);
13809 }
13810
13811 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13812 Ok(Some(LimitClause::LimitOffset {
13813 limit: limit.unwrap_or_default(),
13814 offset,
13815 limit_by: limit_by.unwrap_or_default(),
13816 }))
13817 } else {
13818 Ok(None)
13819 }
13820 }
13821
13822 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13825 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13826 let fn_name = self.parse_object_name(false)?;
13827 self.parse_function_call(fn_name)
13828 .map(TableObject::TableFunction)
13829 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13830 self.parse_parenthesized(|p| p.parse_query())
13831 .map(TableObject::TableQuery)
13832 } else {
13833 self.parse_object_name(false).map(TableObject::TableName)
13834 }
13835 }
13836
13837 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13844 self.parse_object_name_inner(in_table_clause, false)
13845 }
13846
13847 fn parse_object_name_inner(
13857 &mut self,
13858 in_table_clause: bool,
13859 allow_wildcards: bool,
13860 ) -> Result<ObjectName, ParserError> {
13861 let mut parts = vec![];
13862 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13863 loop {
13864 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13865 parts.push(ObjectNamePart::Identifier(ident));
13866 if !self.consume_token(&Token::Period) && !end_with_period {
13867 break;
13868 }
13869 }
13870 } else {
13871 loop {
13872 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13873 let span = self.next_token().span;
13874 parts.push(ObjectNamePart::Identifier(Ident {
13875 value: Token::Mul.to_string(),
13876 quote_style: None,
13877 span,
13878 }));
13879 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13880 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13881 parts.push(ObjectNamePart::Identifier(ident));
13882 if !self.consume_token(&Token::Period) && !end_with_period {
13883 break;
13884 }
13885 } else if self.dialect.supports_object_name_double_dot_notation()
13886 && parts.len() == 1
13887 && matches!(self.peek_token_ref().token, Token::Period)
13888 {
13889 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13891 } else {
13892 let ident = self.parse_identifier()?;
13893 let part = if self
13894 .dialect
13895 .is_identifier_generating_function_name(&ident, &parts)
13896 {
13897 self.expect_token(&Token::LParen)?;
13898 let args: Vec<FunctionArg> =
13899 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13900 self.expect_token(&Token::RParen)?;
13901 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13902 } else {
13903 ObjectNamePart::Identifier(ident)
13904 };
13905 parts.push(part);
13906 }
13907
13908 if !self.consume_token(&Token::Period) {
13909 break;
13910 }
13911 }
13912 }
13913
13914 if dialect_of!(self is BigQueryDialect)
13917 && parts.iter().any(|part| {
13918 part.as_ident()
13919 .is_some_and(|ident| ident.value.contains('.'))
13920 })
13921 {
13922 parts = parts
13923 .into_iter()
13924 .flat_map(|part| match part.as_ident() {
13925 Some(ident) => ident
13926 .value
13927 .split('.')
13928 .map(|value| {
13929 ObjectNamePart::Identifier(Ident {
13930 value: value.into(),
13931 quote_style: ident.quote_style,
13932 span: ident.span,
13933 })
13934 })
13935 .collect::<Vec<_>>(),
13936 None => vec![part],
13937 })
13938 .collect()
13939 }
13940
13941 Ok(ObjectName(parts))
13942 }
13943
13944 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13946 let mut idents = vec![];
13947 loop {
13948 let token = self.peek_token_ref();
13949 match &token.token {
13950 Token::Word(w) => {
13951 idents.push(w.to_ident(token.span));
13952 }
13953 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13954 break
13955 }
13956 _ => {}
13957 }
13958 self.advance_token();
13959 }
13960 Ok(idents)
13961 }
13962
13963 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
14003 let mut idents = vec![];
14004
14005 let next_token = self.next_token();
14007 match next_token.token {
14008 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
14009 Token::EOF => {
14010 return Err(ParserError::ParserError(
14011 "Empty input when parsing identifier".to_string(),
14012 ))?
14013 }
14014 token => {
14015 return Err(ParserError::ParserError(format!(
14016 "Unexpected token in identifier: {token}"
14017 )))?
14018 }
14019 };
14020
14021 loop {
14023 match self.next_token().token {
14024 Token::Period => {
14026 let next_token = self.next_token();
14027 match next_token.token {
14028 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
14029 Token::EOF => {
14030 return Err(ParserError::ParserError(
14031 "Trailing period in identifier".to_string(),
14032 ))?
14033 }
14034 token => {
14035 return Err(ParserError::ParserError(format!(
14036 "Unexpected token following period in identifier: {token}"
14037 )))?
14038 }
14039 }
14040 }
14041 Token::EOF => break,
14042 token => {
14043 return Err(ParserError::ParserError(format!(
14044 "Unexpected token in identifier: {token}"
14045 )))?;
14046 }
14047 }
14048 }
14049
14050 Ok(idents)
14051 }
14052
14053 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
14055 let next_token = self.next_token();
14056 match next_token.token {
14057 Token::Word(w) => Ok(w.into_ident(next_token.span)),
14058 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
14059 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
14060 _ => self.expected("identifier", next_token),
14061 }
14062 }
14063
14064 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
14075 match self.peek_token().token {
14076 Token::Word(w) => {
14077 let quote_style_is_none = w.quote_style.is_none();
14078 let mut requires_whitespace = false;
14079 let mut ident = w.into_ident(self.next_token().span);
14080 if quote_style_is_none {
14081 while matches!(self.peek_token_no_skip().token, Token::Minus) {
14082 self.next_token();
14083 ident.value.push('-');
14084
14085 let token = self
14086 .next_token_no_skip()
14087 .cloned()
14088 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
14089 requires_whitespace = match token.token {
14090 Token::Word(next_word) if next_word.quote_style.is_none() => {
14091 ident.value.push_str(&next_word.value);
14092 false
14093 }
14094 Token::Number(s, false) => {
14095 if s.ends_with('.') {
14102 let Some(s) = s.split('.').next().filter(|s| {
14103 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
14104 }) else {
14105 return self.expected(
14106 "continuation of hyphenated identifier",
14107 TokenWithSpan::new(Token::Number(s, false), token.span),
14108 );
14109 };
14110 ident.value.push_str(s);
14111 return Ok((ident, true));
14112 } else {
14113 ident.value.push_str(&s);
14114 }
14115 !matches!(self.peek_token_ref().token, Token::Period)
14118 }
14119 _ => {
14120 return self
14121 .expected("continuation of hyphenated identifier", token);
14122 }
14123 }
14124 }
14125
14126 if requires_whitespace {
14129 let token = self.next_token();
14130 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
14131 return self
14132 .expected("whitespace following hyphenated identifier", token);
14133 }
14134 }
14135 }
14136 Ok((ident, false))
14137 }
14138 _ => Ok((self.parse_identifier()?, false)),
14139 }
14140 }
14141
14142 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
14144 if self.consume_token(&Token::LParen) {
14145 if self.peek_token_ref().token == Token::RParen {
14146 self.next_token();
14147 Ok(vec![])
14148 } else {
14149 let cols = self.parse_comma_separated_with_trailing_commas(
14150 Parser::parse_view_column,
14151 self.dialect.supports_column_definition_trailing_commas(),
14152 Self::is_reserved_for_column_alias,
14153 )?;
14154 self.expect_token(&Token::RParen)?;
14155 Ok(cols)
14156 }
14157 } else {
14158 Ok(vec![])
14159 }
14160 }
14161
14162 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
14164 let name = self.parse_identifier()?;
14165 let options = self.parse_view_column_options()?;
14166 let data_type = if dialect_of!(self is ClickHouseDialect) {
14167 Some(self.parse_data_type()?)
14168 } else {
14169 None
14170 };
14171 Ok(ViewColumnDef {
14172 name,
14173 data_type,
14174 options,
14175 })
14176 }
14177
14178 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
14179 let mut options = Vec::new();
14180 loop {
14181 let option = self.parse_optional_column_option()?;
14182 if let Some(option) = option {
14183 options.push(option);
14184 } else {
14185 break;
14186 }
14187 }
14188 if options.is_empty() {
14189 Ok(None)
14190 } else if self.dialect.supports_space_separated_column_options() {
14191 Ok(Some(ColumnOptions::SpaceSeparated(options)))
14192 } else {
14193 Ok(Some(ColumnOptions::CommaSeparated(options)))
14194 }
14195 }
14196
14197 pub fn parse_parenthesized_column_list(
14200 &mut self,
14201 optional: IsOptional,
14202 allow_empty: bool,
14203 ) -> Result<Vec<Ident>, ParserError> {
14204 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
14205 }
14206
14207 pub fn parse_parenthesized_compound_identifier_list(
14209 &mut self,
14210 optional: IsOptional,
14211 allow_empty: bool,
14212 ) -> Result<Vec<Expr>, ParserError> {
14213 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14214 Ok(Expr::CompoundIdentifier(
14215 p.parse_period_separated(|p| p.parse_identifier())?,
14216 ))
14217 })
14218 }
14219
14220 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
14223 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
14224 p.parse_create_index_expr()
14225 })
14226 }
14227
14228 pub fn parse_parenthesized_qualified_column_list(
14231 &mut self,
14232 optional: IsOptional,
14233 allow_empty: bool,
14234 ) -> Result<Vec<ObjectName>, ParserError> {
14235 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14236 p.parse_object_name(true)
14237 })
14238 }
14239
14240 fn parse_parenthesized_column_list_inner<F, T>(
14243 &mut self,
14244 optional: IsOptional,
14245 allow_empty: bool,
14246 mut f: F,
14247 ) -> Result<Vec<T>, ParserError>
14248 where
14249 F: FnMut(&mut Parser) -> Result<T, ParserError>,
14250 {
14251 if self.consume_token(&Token::LParen) {
14252 if allow_empty && self.peek_token_ref().token == Token::RParen {
14253 self.next_token();
14254 Ok(vec![])
14255 } else {
14256 let cols = self.parse_comma_separated(|p| f(p))?;
14257 self.expect_token(&Token::RParen)?;
14258 Ok(cols)
14259 }
14260 } else if optional == Optional {
14261 Ok(vec![])
14262 } else {
14263 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
14264 }
14265 }
14266
14267 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
14269 if self.consume_token(&Token::LParen) {
14270 let cols = self.parse_comma_separated(|p| {
14271 let name = p.parse_identifier()?;
14272 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
14273 Ok(TableAliasColumnDef { name, data_type })
14274 })?;
14275 self.expect_token(&Token::RParen)?;
14276 Ok(cols)
14277 } else {
14278 Ok(vec![])
14279 }
14280 }
14281
14282 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
14284 self.expect_token(&Token::LParen)?;
14285 let n = self.parse_literal_uint()?;
14286 self.expect_token(&Token::RParen)?;
14287 Ok(n)
14288 }
14289
14290 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
14292 if self.consume_token(&Token::LParen) {
14293 let n = self.parse_literal_uint()?;
14294 self.expect_token(&Token::RParen)?;
14295 Ok(Some(n))
14296 } else {
14297 Ok(None)
14298 }
14299 }
14300
14301 fn maybe_parse_optional_interval_fields(
14302 &mut self,
14303 ) -> Result<Option<IntervalFields>, ParserError> {
14304 match self.parse_one_of_keywords(&[
14305 Keyword::YEAR,
14307 Keyword::DAY,
14308 Keyword::HOUR,
14309 Keyword::MINUTE,
14310 Keyword::MONTH,
14312 Keyword::SECOND,
14313 ]) {
14314 Some(Keyword::YEAR) => {
14315 if self.peek_keyword(Keyword::TO) {
14316 self.expect_keyword(Keyword::TO)?;
14317 self.expect_keyword(Keyword::MONTH)?;
14318 Ok(Some(IntervalFields::YearToMonth))
14319 } else {
14320 Ok(Some(IntervalFields::Year))
14321 }
14322 }
14323 Some(Keyword::DAY) => {
14324 if self.peek_keyword(Keyword::TO) {
14325 self.expect_keyword(Keyword::TO)?;
14326 match self.expect_one_of_keywords(&[
14327 Keyword::HOUR,
14328 Keyword::MINUTE,
14329 Keyword::SECOND,
14330 ])? {
14331 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14332 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14333 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14334 _ => {
14335 self.prev_token();
14336 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14337 }
14338 }
14339 } else {
14340 Ok(Some(IntervalFields::Day))
14341 }
14342 }
14343 Some(Keyword::HOUR) => {
14344 if self.peek_keyword(Keyword::TO) {
14345 self.expect_keyword(Keyword::TO)?;
14346 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14347 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14348 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14349 _ => {
14350 self.prev_token();
14351 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14352 }
14353 }
14354 } else {
14355 Ok(Some(IntervalFields::Hour))
14356 }
14357 }
14358 Some(Keyword::MINUTE) => {
14359 if self.peek_keyword(Keyword::TO) {
14360 self.expect_keyword(Keyword::TO)?;
14361 self.expect_keyword(Keyword::SECOND)?;
14362 Ok(Some(IntervalFields::MinuteToSecond))
14363 } else {
14364 Ok(Some(IntervalFields::Minute))
14365 }
14366 }
14367 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14368 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14369 Some(_) => {
14370 self.prev_token();
14371 self.expected_ref(
14372 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14373 self.peek_token_ref(),
14374 )
14375 }
14376 None => Ok(None),
14377 }
14378 }
14379
14380 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14388 self.expect_keyword_is(Keyword::DATETIME64)?;
14389 self.expect_token(&Token::LParen)?;
14390 let precision = self.parse_literal_uint()?;
14391 let time_zone = if self.consume_token(&Token::Comma) {
14392 Some(self.parse_literal_string()?)
14393 } else {
14394 None
14395 };
14396 self.expect_token(&Token::RParen)?;
14397 Ok((precision, time_zone))
14398 }
14399
14400 pub fn parse_optional_character_length(
14402 &mut self,
14403 ) -> Result<Option<CharacterLength>, ParserError> {
14404 if self.consume_token(&Token::LParen) {
14405 let character_length = self.parse_character_length()?;
14406 self.expect_token(&Token::RParen)?;
14407 Ok(Some(character_length))
14408 } else {
14409 Ok(None)
14410 }
14411 }
14412
14413 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14415 if self.consume_token(&Token::LParen) {
14416 let binary_length = self.parse_binary_length()?;
14417 self.expect_token(&Token::RParen)?;
14418 Ok(Some(binary_length))
14419 } else {
14420 Ok(None)
14421 }
14422 }
14423
14424 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14426 if self.parse_keyword(Keyword::MAX) {
14427 return Ok(CharacterLength::Max);
14428 }
14429 let length = self.parse_literal_uint()?;
14430 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14431 Some(CharLengthUnits::Characters)
14432 } else if self.parse_keyword(Keyword::OCTETS) {
14433 Some(CharLengthUnits::Octets)
14434 } else {
14435 None
14436 };
14437 Ok(CharacterLength::IntegerLength { length, unit })
14438 }
14439
14440 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14442 if self.parse_keyword(Keyword::MAX) {
14443 return Ok(BinaryLength::Max);
14444 }
14445 let length = self.parse_literal_uint()?;
14446 Ok(BinaryLength::IntegerLength { length })
14447 }
14448
14449 pub fn parse_optional_precision_scale(
14451 &mut self,
14452 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14453 if self.consume_token(&Token::LParen) {
14454 let n = self.parse_literal_uint()?;
14455 let scale = if self.consume_token(&Token::Comma) {
14456 Some(self.parse_literal_uint()?)
14457 } else {
14458 None
14459 };
14460 self.expect_token(&Token::RParen)?;
14461 Ok((Some(n), scale))
14462 } else {
14463 Ok((None, None))
14464 }
14465 }
14466
14467 pub fn parse_exact_number_optional_precision_scale(
14469 &mut self,
14470 ) -> Result<ExactNumberInfo, ParserError> {
14471 if self.consume_token(&Token::LParen) {
14472 let precision = self.parse_literal_uint()?;
14473 let scale = if self.consume_token(&Token::Comma) {
14474 Some(self.parse_signed_integer()?)
14475 } else {
14476 None
14477 };
14478
14479 self.expect_token(&Token::RParen)?;
14480
14481 match scale {
14482 None => Ok(ExactNumberInfo::Precision(precision)),
14483 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14484 }
14485 } else {
14486 Ok(ExactNumberInfo::None)
14487 }
14488 }
14489
14490 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14492 let is_negative = self.consume_token(&Token::Minus);
14493
14494 if !is_negative {
14495 let _ = self.consume_token(&Token::Plus);
14496 }
14497
14498 let current_token = self.peek_token_ref();
14499 match ¤t_token.token {
14500 Token::Number(s, _) => {
14501 let s = s.clone();
14502 let span_start = current_token.span.start;
14503 self.advance_token();
14504 let value = Self::parse::<i64>(s, span_start)?;
14505 Ok(if is_negative { -value } else { value })
14506 }
14507 _ => self.expected_ref("number", current_token),
14508 }
14509 }
14510
14511 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14513 if self.consume_token(&Token::LParen) {
14514 let mut modifiers = Vec::new();
14515 loop {
14516 let next_token = self.next_token();
14517 match next_token.token {
14518 Token::Word(w) => modifiers.push(w.to_string()),
14519 Token::Number(n, _) => modifiers.push(n),
14520 Token::SingleQuotedString(s) => modifiers.push(s),
14521
14522 Token::Comma => {
14523 continue;
14524 }
14525 Token::RParen => {
14526 break;
14527 }
14528 _ => self.expected("type modifiers", next_token)?,
14529 }
14530 }
14531
14532 Ok(Some(modifiers))
14533 } else {
14534 Ok(None)
14535 }
14536 }
14537
14538 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14540 where
14541 F: FnOnce(Box<DataType>) -> DataType,
14542 {
14543 self.expect_token(&Token::LParen)?;
14544 let inside_type = self.parse_data_type()?;
14545 self.expect_token(&Token::RParen)?;
14546 Ok(parent_type(inside_type.into()))
14547 }
14548
14549 fn parse_delete_setexpr_boxed(
14553 &mut self,
14554 delete_token: TokenWithSpan,
14555 ) -> Result<Box<SetExpr>, ParserError> {
14556 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14557 }
14558
14559 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14561 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14562 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14563 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14566 (vec![], false)
14567 } else {
14568 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14569 self.expect_keyword_is(Keyword::FROM)?;
14570 (tables, true)
14571 }
14572 } else {
14573 (vec![], true)
14574 };
14575
14576 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14577
14578 let output = self.maybe_parse_output_clause()?;
14579
14580 let using = if self.parse_keyword(Keyword::USING) {
14581 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14582 } else {
14583 None
14584 };
14585 let selection = if self.parse_keyword(Keyword::WHERE) {
14586 Some(self.parse_expr()?)
14587 } else {
14588 None
14589 };
14590 let returning = if self.parse_keyword(Keyword::RETURNING) {
14591 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14592 } else {
14593 None
14594 };
14595 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14596 self.parse_comma_separated(Parser::parse_order_by_expr)?
14597 } else {
14598 vec![]
14599 };
14600 let limit = if self.parse_keyword(Keyword::LIMIT) {
14601 self.parse_limit()?
14602 } else {
14603 None
14604 };
14605
14606 Ok(Statement::Delete(Delete {
14607 delete_token: delete_token.into(),
14608 optimizer_hints,
14609 tables,
14610 from: if with_from_keyword {
14611 FromTable::WithFromKeyword(from)
14612 } else {
14613 FromTable::WithoutKeyword(from)
14614 },
14615 using,
14616 selection,
14617 returning,
14618 output,
14619 order_by,
14620 limit,
14621 }))
14622 }
14623
14624 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14627 let modifier_keyword =
14628 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14629
14630 let id = self.parse_literal_uint()?;
14631
14632 let modifier = match modifier_keyword {
14633 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14634 Some(Keyword::QUERY) => Some(KillType::Query),
14635 Some(Keyword::MUTATION) => {
14636 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14637 Some(KillType::Mutation)
14638 } else {
14639 self.expected_ref(
14640 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14641 self.peek_token_ref(),
14642 )?
14643 }
14644 }
14645 _ => None,
14646 };
14647
14648 Ok(Statement::Kill { modifier, id })
14649 }
14650
14651 pub fn parse_explain(
14653 &mut self,
14654 describe_alias: DescribeAlias,
14655 ) -> Result<Statement, ParserError> {
14656 let mut analyze = false;
14657 let mut verbose = false;
14658 let mut query_plan = false;
14659 let mut estimate = false;
14660 let mut format = None;
14661 let mut options = None;
14662
14663 if describe_alias == DescribeAlias::Explain
14666 && self.dialect.supports_explain_with_utility_options()
14667 && self.peek_token_ref().token == Token::LParen
14668 {
14669 options = Some(self.parse_utility_options()?)
14670 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14671 query_plan = true;
14672 } else if self.parse_keyword(Keyword::ESTIMATE) {
14673 estimate = true;
14674 } else {
14675 analyze = self.parse_keyword(Keyword::ANALYZE);
14676 verbose = self.parse_keyword(Keyword::VERBOSE);
14677 if self.parse_keyword(Keyword::FORMAT) {
14678 format = Some(self.parse_analyze_format_kind()?);
14679 }
14680 }
14681
14682 match self.maybe_parse(|parser| parser.parse_statement())? {
14683 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14684 ParserError::ParserError("Explain must be root of the plan".to_string()),
14685 ),
14686 Some(statement) => Ok(Statement::Explain {
14687 describe_alias,
14688 analyze,
14689 verbose,
14690 query_plan,
14691 estimate,
14692 statement: Box::new(statement),
14693 format,
14694 options,
14695 }),
14696 _ => {
14697 let hive_format =
14698 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14699 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14700 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14701 _ => None,
14702 };
14703
14704 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14705 self.parse_keyword(Keyword::TABLE)
14707 } else {
14708 false
14709 };
14710
14711 let table_name = self.parse_object_name(false)?;
14712 Ok(Statement::ExplainTable {
14713 describe_alias,
14714 hive_format,
14715 has_table_keyword,
14716 table_name,
14717 })
14718 }
14719 }
14720 }
14721
14722 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14727 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14728 let _guard = self.recursion_counter.try_decrease()?;
14729 let with = if self.parse_keyword(Keyword::WITH) {
14730 let with_token = self.get_current_token();
14731 Some(With {
14732 with_token: with_token.clone().into(),
14733 recursive: self.parse_keyword(Keyword::RECURSIVE),
14734 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14735 })
14736 } else {
14737 None
14738 };
14739 if self.parse_keyword(Keyword::INSERT) {
14740 Ok(Query {
14741 with,
14742 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14743 order_by: None,
14744 limit_clause: None,
14745 fetch: None,
14746 locks: vec![],
14747 for_clause: None,
14748 settings: None,
14749 format_clause: None,
14750 pipe_operators: vec![],
14751 }
14752 .into())
14753 } else if self.parse_keyword(Keyword::UPDATE) {
14754 Ok(Query {
14755 with,
14756 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14757 order_by: None,
14758 limit_clause: None,
14759 fetch: None,
14760 locks: vec![],
14761 for_clause: None,
14762 settings: None,
14763 format_clause: None,
14764 pipe_operators: vec![],
14765 }
14766 .into())
14767 } else if self.parse_keyword(Keyword::DELETE) {
14768 Ok(Query {
14769 with,
14770 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14771 limit_clause: None,
14772 order_by: None,
14773 fetch: None,
14774 locks: vec![],
14775 for_clause: None,
14776 settings: None,
14777 format_clause: None,
14778 pipe_operators: vec![],
14779 }
14780 .into())
14781 } else if self.parse_keyword(Keyword::MERGE) {
14782 Ok(Query {
14783 with,
14784 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14785 limit_clause: None,
14786 order_by: None,
14787 fetch: None,
14788 locks: vec![],
14789 for_clause: None,
14790 settings: None,
14791 format_clause: None,
14792 pipe_operators: vec![],
14793 }
14794 .into())
14795 } else {
14796 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14797
14798 let order_by = self.parse_optional_order_by()?;
14799
14800 let limit_clause = self.parse_optional_limit_clause()?;
14801
14802 let settings = self.parse_settings()?;
14803
14804 let fetch = if self.parse_keyword(Keyword::FETCH) {
14805 Some(self.parse_fetch()?)
14806 } else {
14807 None
14808 };
14809
14810 let mut for_clause = None;
14811 let mut locks = Vec::new();
14812 while self.parse_keyword(Keyword::FOR) {
14813 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14814 for_clause = Some(parsed_for_clause);
14815 break;
14816 } else {
14817 locks.push(self.parse_lock()?);
14818 }
14819 }
14820 let format_clause =
14821 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14822 if self.parse_keyword(Keyword::NULL) {
14823 Some(FormatClause::Null)
14824 } else {
14825 let ident = self.parse_identifier()?;
14826 Some(FormatClause::Identifier(ident))
14827 }
14828 } else {
14829 None
14830 };
14831
14832 let pipe_operators = if self.dialect.supports_pipe_operator() {
14833 self.parse_pipe_operators()?
14834 } else {
14835 Vec::new()
14836 };
14837
14838 Ok(Query {
14839 with,
14840 body,
14841 order_by,
14842 limit_clause,
14843 fetch,
14844 locks,
14845 for_clause,
14846 settings,
14847 format_clause,
14848 pipe_operators,
14849 }
14850 .into())
14851 }
14852 }
14853
14854 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14855 let mut pipe_operators = Vec::new();
14856
14857 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14858 let kw = self.expect_one_of_keywords(&[
14859 Keyword::SELECT,
14860 Keyword::EXTEND,
14861 Keyword::SET,
14862 Keyword::DROP,
14863 Keyword::AS,
14864 Keyword::WHERE,
14865 Keyword::LIMIT,
14866 Keyword::AGGREGATE,
14867 Keyword::ORDER,
14868 Keyword::TABLESAMPLE,
14869 Keyword::RENAME,
14870 Keyword::UNION,
14871 Keyword::INTERSECT,
14872 Keyword::EXCEPT,
14873 Keyword::CALL,
14874 Keyword::PIVOT,
14875 Keyword::UNPIVOT,
14876 Keyword::JOIN,
14877 Keyword::INNER,
14878 Keyword::LEFT,
14879 Keyword::RIGHT,
14880 Keyword::FULL,
14881 Keyword::CROSS,
14882 ])?;
14883 match kw {
14884 Keyword::SELECT => {
14885 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14886 pipe_operators.push(PipeOperator::Select { exprs })
14887 }
14888 Keyword::EXTEND => {
14889 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14890 pipe_operators.push(PipeOperator::Extend { exprs })
14891 }
14892 Keyword::SET => {
14893 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14894 pipe_operators.push(PipeOperator::Set { assignments })
14895 }
14896 Keyword::DROP => {
14897 let columns = self.parse_identifiers()?;
14898 pipe_operators.push(PipeOperator::Drop { columns })
14899 }
14900 Keyword::AS => {
14901 let alias = self.parse_identifier()?;
14902 pipe_operators.push(PipeOperator::As { alias })
14903 }
14904 Keyword::WHERE => {
14905 let expr = self.parse_expr()?;
14906 pipe_operators.push(PipeOperator::Where { expr })
14907 }
14908 Keyword::LIMIT => {
14909 let expr = self.parse_expr()?;
14910 let offset = if self.parse_keyword(Keyword::OFFSET) {
14911 Some(self.parse_expr()?)
14912 } else {
14913 None
14914 };
14915 pipe_operators.push(PipeOperator::Limit { expr, offset })
14916 }
14917 Keyword::AGGREGATE => {
14918 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14919 vec![]
14920 } else {
14921 self.parse_comma_separated(|parser| {
14922 parser.parse_expr_with_alias_and_order_by()
14923 })?
14924 };
14925
14926 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14927 self.parse_comma_separated(|parser| {
14928 parser.parse_expr_with_alias_and_order_by()
14929 })?
14930 } else {
14931 vec![]
14932 };
14933
14934 pipe_operators.push(PipeOperator::Aggregate {
14935 full_table_exprs,
14936 group_by_expr,
14937 })
14938 }
14939 Keyword::ORDER => {
14940 self.expect_one_of_keywords(&[Keyword::BY])?;
14941 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14942 pipe_operators.push(PipeOperator::OrderBy { exprs })
14943 }
14944 Keyword::TABLESAMPLE => {
14945 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14946 pipe_operators.push(PipeOperator::TableSample { sample });
14947 }
14948 Keyword::RENAME => {
14949 let mappings =
14950 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14951 pipe_operators.push(PipeOperator::Rename { mappings });
14952 }
14953 Keyword::UNION => {
14954 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14955 let queries = self.parse_pipe_operator_queries()?;
14956 pipe_operators.push(PipeOperator::Union {
14957 set_quantifier,
14958 queries,
14959 });
14960 }
14961 Keyword::INTERSECT => {
14962 let set_quantifier =
14963 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14964 let queries = self.parse_pipe_operator_queries()?;
14965 pipe_operators.push(PipeOperator::Intersect {
14966 set_quantifier,
14967 queries,
14968 });
14969 }
14970 Keyword::EXCEPT => {
14971 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14972 let queries = self.parse_pipe_operator_queries()?;
14973 pipe_operators.push(PipeOperator::Except {
14974 set_quantifier,
14975 queries,
14976 });
14977 }
14978 Keyword::CALL => {
14979 let function_name = self.parse_object_name(false)?;
14980 let function_expr = self.parse_function(function_name)?;
14981 if let Expr::Function(function) = function_expr {
14982 let alias = self.parse_identifier_optional_alias()?;
14983 pipe_operators.push(PipeOperator::Call { function, alias });
14984 } else {
14985 return Err(ParserError::ParserError(
14986 "Expected function call after CALL".to_string(),
14987 ));
14988 }
14989 }
14990 Keyword::PIVOT => {
14991 self.expect_token(&Token::LParen)?;
14992 let aggregate_functions =
14993 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14994 self.expect_keyword_is(Keyword::FOR)?;
14995 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14996 self.expect_keyword_is(Keyword::IN)?;
14997
14998 self.expect_token(&Token::LParen)?;
14999 let value_source = if self.parse_keyword(Keyword::ANY) {
15000 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15001 self.parse_comma_separated(Parser::parse_order_by_expr)?
15002 } else {
15003 vec![]
15004 };
15005 PivotValueSource::Any(order_by)
15006 } else if self.peek_sub_query() {
15007 PivotValueSource::Subquery(self.parse_query()?)
15008 } else {
15009 PivotValueSource::List(
15010 self.parse_comma_separated(Self::parse_expr_with_alias)?,
15011 )
15012 };
15013 self.expect_token(&Token::RParen)?;
15014 self.expect_token(&Token::RParen)?;
15015
15016 let alias = self.parse_identifier_optional_alias()?;
15017
15018 pipe_operators.push(PipeOperator::Pivot {
15019 aggregate_functions,
15020 value_column,
15021 value_source,
15022 alias,
15023 });
15024 }
15025 Keyword::UNPIVOT => {
15026 self.expect_token(&Token::LParen)?;
15027 let value_column = self.parse_identifier()?;
15028 self.expect_keyword(Keyword::FOR)?;
15029 let name_column = self.parse_identifier()?;
15030 self.expect_keyword(Keyword::IN)?;
15031
15032 self.expect_token(&Token::LParen)?;
15033 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
15034 self.expect_token(&Token::RParen)?;
15035
15036 self.expect_token(&Token::RParen)?;
15037
15038 let alias = self.parse_identifier_optional_alias()?;
15039
15040 pipe_operators.push(PipeOperator::Unpivot {
15041 value_column,
15042 name_column,
15043 unpivot_columns,
15044 alias,
15045 });
15046 }
15047 Keyword::JOIN
15048 | Keyword::INNER
15049 | Keyword::LEFT
15050 | Keyword::RIGHT
15051 | Keyword::FULL
15052 | Keyword::CROSS => {
15053 self.prev_token();
15054 let mut joins = self.parse_joins()?;
15055 if joins.len() != 1 {
15056 return Err(ParserError::ParserError(
15057 "Join pipe operator must have a single join".to_string(),
15058 ));
15059 }
15060 let join = joins.swap_remove(0);
15061 pipe_operators.push(PipeOperator::Join(join))
15062 }
15063 unhandled => {
15064 return Err(ParserError::ParserError(format!(
15065 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
15066 )))
15067 }
15068 }
15069 }
15070 Ok(pipe_operators)
15071 }
15072
15073 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
15074 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
15075 {
15076 let key_values = self.parse_comma_separated(|p| {
15077 let key = p.parse_identifier()?;
15078 p.expect_token(&Token::Eq)?;
15079 let value = p.parse_expr()?;
15080 Ok(Setting { key, value })
15081 })?;
15082 Some(key_values)
15083 } else {
15084 None
15085 };
15086 Ok(settings)
15087 }
15088
15089 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
15091 if self.parse_keyword(Keyword::XML) {
15092 Ok(Some(self.parse_for_xml()?))
15093 } else if self.parse_keyword(Keyword::JSON) {
15094 Ok(Some(self.parse_for_json()?))
15095 } else if self.parse_keyword(Keyword::BROWSE) {
15096 Ok(Some(ForClause::Browse))
15097 } else {
15098 Ok(None)
15099 }
15100 }
15101
15102 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
15104 let for_xml = if self.parse_keyword(Keyword::RAW) {
15105 let mut element_name = None;
15106 if self.peek_token_ref().token == Token::LParen {
15107 self.expect_token(&Token::LParen)?;
15108 element_name = Some(self.parse_literal_string()?);
15109 self.expect_token(&Token::RParen)?;
15110 }
15111 ForXml::Raw(element_name)
15112 } else if self.parse_keyword(Keyword::AUTO) {
15113 ForXml::Auto
15114 } else if self.parse_keyword(Keyword::EXPLICIT) {
15115 ForXml::Explicit
15116 } else if self.parse_keyword(Keyword::PATH) {
15117 let mut element_name = None;
15118 if self.peek_token_ref().token == Token::LParen {
15119 self.expect_token(&Token::LParen)?;
15120 element_name = Some(self.parse_literal_string()?);
15121 self.expect_token(&Token::RParen)?;
15122 }
15123 ForXml::Path(element_name)
15124 } else {
15125 return Err(ParserError::ParserError(
15126 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
15127 ));
15128 };
15129 let mut elements = false;
15130 let mut binary_base64 = false;
15131 let mut root = None;
15132 let mut r#type = false;
15133 while self.peek_token_ref().token == Token::Comma {
15134 self.next_token();
15135 if self.parse_keyword(Keyword::ELEMENTS) {
15136 elements = true;
15137 } else if self.parse_keyword(Keyword::BINARY) {
15138 self.expect_keyword_is(Keyword::BASE64)?;
15139 binary_base64 = true;
15140 } else if self.parse_keyword(Keyword::ROOT) {
15141 self.expect_token(&Token::LParen)?;
15142 root = Some(self.parse_literal_string()?);
15143 self.expect_token(&Token::RParen)?;
15144 } else if self.parse_keyword(Keyword::TYPE) {
15145 r#type = true;
15146 }
15147 }
15148 Ok(ForClause::Xml {
15149 for_xml,
15150 elements,
15151 binary_base64,
15152 root,
15153 r#type,
15154 })
15155 }
15156
15157 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
15159 let for_json = if self.parse_keyword(Keyword::AUTO) {
15160 ForJson::Auto
15161 } else if self.parse_keyword(Keyword::PATH) {
15162 ForJson::Path
15163 } else {
15164 return Err(ParserError::ParserError(
15165 "Expected FOR JSON [AUTO | PATH ]".to_string(),
15166 ));
15167 };
15168 let mut root = None;
15169 let mut include_null_values = false;
15170 let mut without_array_wrapper = false;
15171 while self.peek_token_ref().token == Token::Comma {
15172 self.next_token();
15173 if self.parse_keyword(Keyword::ROOT) {
15174 self.expect_token(&Token::LParen)?;
15175 root = Some(self.parse_literal_string()?);
15176 self.expect_token(&Token::RParen)?;
15177 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
15178 include_null_values = true;
15179 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
15180 without_array_wrapper = true;
15181 }
15182 }
15183 Ok(ForClause::Json {
15184 for_json,
15185 root,
15186 include_null_values,
15187 without_array_wrapper,
15188 })
15189 }
15190
15191 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
15193 let name = self.parse_identifier()?;
15194
15195 let as_optional = self.dialect.supports_cte_without_as();
15196
15197 if as_optional && !self.peek_keyword(Keyword::AS) {
15199 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
15200 p.expect_token(&Token::LParen)?;
15201 let query = p.parse_query()?;
15202 let closing_paren_token = p.expect_token(&Token::RParen)?;
15203 Ok((query, closing_paren_token))
15204 })? {
15205 let mut cte = Cte {
15206 alias: TableAlias {
15207 explicit: false,
15208 name,
15209 columns: vec![],
15210 },
15211 query,
15212 from: None,
15213 materialized: None,
15214 closing_paren_token: closing_paren_token.into(),
15215 };
15216 if self.parse_keyword(Keyword::FROM) {
15217 cte.from = Some(self.parse_identifier()?);
15218 }
15219 return Ok(cte);
15220 }
15221 }
15222
15223 let columns = if self.parse_keyword(Keyword::AS) {
15225 vec![]
15226 } else {
15227 let columns = self.parse_table_alias_column_defs()?;
15228 if as_optional {
15229 let _ = self.parse_keyword(Keyword::AS);
15230 } else {
15231 self.expect_keyword_is(Keyword::AS)?;
15232 }
15233 columns
15234 };
15235
15236 let mut is_materialized = None;
15237 if dialect_of!(self is PostgreSqlDialect) {
15238 if self.parse_keyword(Keyword::MATERIALIZED) {
15239 is_materialized = Some(CteAsMaterialized::Materialized);
15240 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
15241 is_materialized = Some(CteAsMaterialized::NotMaterialized);
15242 }
15243 }
15244
15245 self.expect_token(&Token::LParen)?;
15246 let query = self.parse_query()?;
15247 let closing_paren_token = self.expect_token(&Token::RParen)?;
15248
15249 let mut cte = Cte {
15250 alias: TableAlias {
15251 explicit: false,
15252 name,
15253 columns,
15254 },
15255 query,
15256 from: None,
15257 materialized: is_materialized,
15258 closing_paren_token: closing_paren_token.into(),
15259 };
15260 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
15261 cte.from = Some(self.parse_identifier()?);
15262 }
15263 Ok(cte)
15264 }
15265
15266 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
15275 let expr = if self.peek_keyword(Keyword::SELECT)
15278 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
15279 {
15280 SetExpr::Select(self.parse_select().map(Box::new)?)
15281 } else if self.consume_token(&Token::LParen) {
15282 let subquery = self.parse_query()?;
15284 self.expect_token(&Token::RParen)?;
15285 SetExpr::Query(subquery)
15286 } else if self.parse_keyword(Keyword::VALUES) {
15287 let is_mysql = dialect_of!(self is MySqlDialect);
15288 SetExpr::Values(self.parse_values(is_mysql, false)?)
15289 } else if self.parse_keyword(Keyword::VALUE) {
15290 let is_mysql = dialect_of!(self is MySqlDialect);
15291 SetExpr::Values(self.parse_values(is_mysql, true)?)
15292 } else if self.parse_keyword(Keyword::TABLE) {
15293 SetExpr::Table(Box::new(self.parse_as_table()?))
15294 } else {
15295 return self.expected_ref(
15296 "SELECT, VALUES, or a subquery in the query body",
15297 self.peek_token_ref(),
15298 );
15299 };
15300
15301 self.parse_remaining_set_exprs(expr, precedence)
15302 }
15303
15304 fn parse_remaining_set_exprs(
15308 &mut self,
15309 mut expr: SetExpr,
15310 precedence: u8,
15311 ) -> Result<Box<SetExpr>, ParserError> {
15312 loop {
15313 let op = self.parse_set_operator(&self.peek_token().token);
15315 let next_precedence = match op {
15316 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15318 10
15319 }
15320 Some(SetOperator::Intersect) => 20,
15322 None => break,
15324 };
15325 if precedence >= next_precedence {
15326 break;
15327 }
15328 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15330 expr = SetExpr::SetOperation {
15331 left: Box::new(expr),
15332 op: op.unwrap(),
15333 set_quantifier,
15334 right: self.parse_query_body(next_precedence)?,
15335 };
15336 }
15337
15338 Ok(expr.into())
15339 }
15340
15341 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15343 match token {
15344 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15345 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15346 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15347 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15348 _ => None,
15349 }
15350 }
15351
15352 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15354 match op {
15355 Some(
15356 SetOperator::Except
15357 | SetOperator::Intersect
15358 | SetOperator::Union
15359 | SetOperator::Minus,
15360 ) => {
15361 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15362 SetQuantifier::DistinctByName
15363 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15364 SetQuantifier::ByName
15365 } else if self.parse_keyword(Keyword::ALL) {
15366 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15367 SetQuantifier::AllByName
15368 } else {
15369 SetQuantifier::All
15370 }
15371 } else if self.parse_keyword(Keyword::DISTINCT) {
15372 SetQuantifier::Distinct
15373 } else {
15374 SetQuantifier::None
15375 }
15376 }
15377 _ => SetQuantifier::None,
15378 }
15379 }
15380
15381 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15383 let mut from_first = None;
15384
15385 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15386 let from_token = self.expect_keyword(Keyword::FROM)?;
15387 let from = self.parse_table_with_joins()?;
15388 if !self.peek_keyword(Keyword::SELECT) {
15389 return Ok(Select {
15390 select_token: AttachedToken(from_token),
15391 optimizer_hints: vec![],
15392 distinct: None,
15393 select_modifiers: None,
15394 top: None,
15395 top_before_distinct: false,
15396 projection: vec![],
15397 exclude: None,
15398 into: None,
15399 from,
15400 lateral_views: vec![],
15401 prewhere: None,
15402 selection: None,
15403 group_by: GroupByExpr::Expressions(vec![], vec![]),
15404 cluster_by: vec![],
15405 distribute_by: vec![],
15406 sort_by: vec![],
15407 having: None,
15408 named_window: vec![],
15409 window_before_qualify: false,
15410 qualify: None,
15411 value_table_mode: None,
15412 connect_by: vec![],
15413 flavor: SelectFlavor::FromFirstNoSelect,
15414 });
15415 }
15416 from_first = Some(from);
15417 }
15418
15419 let select_token = self.expect_keyword(Keyword::SELECT)?;
15420 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15421 let value_table_mode = self.parse_value_table_mode()?;
15422
15423 let (select_modifiers, distinct_select_modifier) =
15424 if self.dialect.supports_select_modifiers() {
15425 self.parse_select_modifiers()?
15426 } else {
15427 (None, None)
15428 };
15429
15430 let mut top_before_distinct = false;
15431 let mut top = None;
15432 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15433 top = Some(self.parse_top()?);
15434 top_before_distinct = true;
15435 }
15436
15437 let distinct = if distinct_select_modifier.is_some() {
15438 distinct_select_modifier
15439 } else {
15440 self.parse_all_or_distinct()?
15441 };
15442
15443 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15444 top = Some(self.parse_top()?);
15445 }
15446
15447 let projection =
15448 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15449 vec![]
15450 } else {
15451 self.parse_projection()?
15452 };
15453
15454 let exclude = if self.dialect.supports_select_exclude() {
15455 self.parse_optional_select_item_exclude()?
15456 } else {
15457 None
15458 };
15459
15460 let into = if self.parse_keyword(Keyword::INTO) {
15461 Some(self.parse_select_into()?)
15462 } else {
15463 None
15464 };
15465
15466 let (from, from_first) = if let Some(from) = from_first.take() {
15472 (from, true)
15473 } else if self.parse_keyword(Keyword::FROM) {
15474 (self.parse_table_with_joins()?, false)
15475 } else {
15476 (vec![], false)
15477 };
15478
15479 let mut lateral_views = vec![];
15480 loop {
15481 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15482 let outer = self.parse_keyword(Keyword::OUTER);
15483 let lateral_view = self.parse_expr()?;
15484 let lateral_view_name = self.parse_object_name(false)?;
15485 let lateral_col_alias = self
15486 .parse_comma_separated(|parser| {
15487 parser.parse_optional_alias(&[
15488 Keyword::WHERE,
15489 Keyword::GROUP,
15490 Keyword::CLUSTER,
15491 Keyword::HAVING,
15492 Keyword::LATERAL,
15493 ]) })?
15495 .into_iter()
15496 .flatten()
15497 .collect();
15498
15499 lateral_views.push(LateralView {
15500 lateral_view,
15501 lateral_view_name,
15502 lateral_col_alias,
15503 outer,
15504 });
15505 } else {
15506 break;
15507 }
15508 }
15509
15510 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15511 {
15512 Some(self.parse_expr()?)
15513 } else {
15514 None
15515 };
15516
15517 let selection = if self.parse_keyword(Keyword::WHERE) {
15518 Some(self.parse_expr()?)
15519 } else {
15520 None
15521 };
15522
15523 let connect_by = self.maybe_parse_connect_by()?;
15524
15525 let group_by = self
15526 .parse_optional_group_by()?
15527 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15528
15529 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15530 self.parse_comma_separated(Parser::parse_expr)?
15531 } else {
15532 vec![]
15533 };
15534
15535 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15536 self.parse_comma_separated(Parser::parse_expr)?
15537 } else {
15538 vec![]
15539 };
15540
15541 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15542 self.parse_comma_separated(Parser::parse_order_by_expr)?
15543 } else {
15544 vec![]
15545 };
15546
15547 let having = if self.parse_keyword(Keyword::HAVING) {
15548 Some(self.parse_expr()?)
15549 } else {
15550 None
15551 };
15552
15553 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15555 {
15556 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15557 if self.parse_keyword(Keyword::QUALIFY) {
15558 (named_windows, Some(self.parse_expr()?), true)
15559 } else {
15560 (named_windows, None, true)
15561 }
15562 } else if self.parse_keyword(Keyword::QUALIFY) {
15563 let qualify = Some(self.parse_expr()?);
15564 if self.parse_keyword(Keyword::WINDOW) {
15565 (
15566 self.parse_comma_separated(Parser::parse_named_window)?,
15567 qualify,
15568 false,
15569 )
15570 } else {
15571 (Default::default(), qualify, false)
15572 }
15573 } else {
15574 Default::default()
15575 };
15576
15577 Ok(Select {
15578 select_token: AttachedToken(select_token),
15579 optimizer_hints,
15580 distinct,
15581 select_modifiers,
15582 top,
15583 top_before_distinct,
15584 projection,
15585 exclude,
15586 into,
15587 from,
15588 lateral_views,
15589 prewhere,
15590 selection,
15591 group_by,
15592 cluster_by,
15593 distribute_by,
15594 sort_by,
15595 having,
15596 named_window: named_windows,
15597 window_before_qualify,
15598 qualify,
15599 value_table_mode,
15600 connect_by,
15601 flavor: if from_first {
15602 SelectFlavor::FromFirst
15603 } else {
15604 SelectFlavor::Standard
15605 },
15606 })
15607 }
15608
15609 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15618 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15619 if !supports_hints {
15620 return Ok(vec![]);
15621 }
15622 let mut hints = vec![];
15623 loop {
15624 let t = self.peek_nth_token_no_skip_ref(0);
15625 let Token::Whitespace(ws) = &t.token else {
15626 break;
15627 };
15628 match ws {
15629 Whitespace::SingleLineComment { comment, prefix } => {
15630 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15631 hints.push(OptimizerHint {
15632 prefix: hint_prefix,
15633 text,
15634 style: OptimizerHintStyle::SingleLine {
15635 prefix: prefix.clone(),
15636 },
15637 });
15638 }
15639 self.next_token_no_skip();
15640 }
15641 Whitespace::MultiLineComment(comment) => {
15642 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15643 hints.push(OptimizerHint {
15644 prefix: hint_prefix,
15645 text,
15646 style: OptimizerHintStyle::MultiLine,
15647 });
15648 }
15649 self.next_token_no_skip();
15650 }
15651 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15652 self.next_token_no_skip();
15653 }
15654 }
15655 }
15656 Ok(hints)
15657 }
15658
15659 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15662 let (before_plus, text) = comment.split_once('+')?;
15663 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15664 Some((before_plus.to_string(), text.to_string()))
15665 } else {
15666 None
15667 }
15668 }
15669
15670 fn parse_select_modifiers(
15677 &mut self,
15678 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15679 let mut modifiers = SelectModifiers::default();
15680 let mut distinct = None;
15681
15682 let keywords = &[
15683 Keyword::ALL,
15684 Keyword::DISTINCT,
15685 Keyword::DISTINCTROW,
15686 Keyword::HIGH_PRIORITY,
15687 Keyword::STRAIGHT_JOIN,
15688 Keyword::SQL_SMALL_RESULT,
15689 Keyword::SQL_BIG_RESULT,
15690 Keyword::SQL_BUFFER_RESULT,
15691 Keyword::SQL_NO_CACHE,
15692 Keyword::SQL_CALC_FOUND_ROWS,
15693 ];
15694
15695 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15696 match keyword {
15697 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15698 self.prev_token();
15699 distinct = self.parse_all_or_distinct()?;
15700 }
15701 Keyword::DISTINCTROW if distinct.is_none() => {
15703 distinct = Some(Distinct::Distinct);
15704 }
15705 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15706 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15707 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15708 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15709 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15710 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15711 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15712 _ => {
15713 self.prev_token();
15714 return self.expected_ref(
15715 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15716 self.peek_token_ref(),
15717 );
15718 }
15719 }
15720 }
15721
15722 let select_modifiers = if modifiers.is_any_set() {
15725 Some(modifiers)
15726 } else {
15727 None
15728 };
15729 Ok((select_modifiers, distinct))
15730 }
15731
15732 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15733 if !dialect_of!(self is BigQueryDialect) {
15734 return Ok(None);
15735 }
15736
15737 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15738 Some(ValueTableMode::DistinctAsValue)
15739 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15740 Some(ValueTableMode::DistinctAsStruct)
15741 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15742 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15743 {
15744 Some(ValueTableMode::AsValue)
15745 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15746 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15747 {
15748 Some(ValueTableMode::AsStruct)
15749 } else if self.parse_keyword(Keyword::AS) {
15750 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15751 } else {
15752 None
15753 };
15754
15755 Ok(mode)
15756 }
15757
15758 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15762 where
15763 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15764 {
15765 let current_state = self.state;
15766 self.state = state;
15767 let res = f(self);
15768 self.state = current_state;
15769 res
15770 }
15771
15772 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15774 let mut clauses = Vec::with_capacity(2);
15775 loop {
15776 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15777 clauses.push(ConnectByKind::StartWith {
15778 start_token: self.token_at(idx).clone().into(),
15779 condition: self.parse_expr()?.into(),
15780 });
15781 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15782 {
15783 clauses.push(ConnectByKind::ConnectBy {
15784 connect_token: self.token_at(idx).clone().into(),
15785 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15786 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15787 parser.parse_comma_separated(Parser::parse_expr)
15788 })?,
15789 });
15790 } else {
15791 break;
15792 }
15793 }
15794 Ok(clauses)
15795 }
15796
15797 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15799 let token1 = self.next_token();
15800 let token2 = self.next_token();
15801 let token3 = self.next_token();
15802
15803 let table_name;
15804 let schema_name;
15805 if token2 == Token::Period {
15806 match token1.token {
15807 Token::Word(w) => {
15808 schema_name = w.value;
15809 }
15810 _ => {
15811 return self.expected("Schema name", token1);
15812 }
15813 }
15814 match token3.token {
15815 Token::Word(w) => {
15816 table_name = w.value;
15817 }
15818 _ => {
15819 return self.expected("Table name", token3);
15820 }
15821 }
15822 Ok(Table {
15823 table_name: Some(table_name),
15824 schema_name: Some(schema_name),
15825 })
15826 } else {
15827 match token1.token {
15828 Token::Word(w) => {
15829 table_name = w.value;
15830 }
15831 _ => {
15832 return self.expected("Table name", token1);
15833 }
15834 }
15835 Ok(Table {
15836 table_name: Some(table_name),
15837 schema_name: None,
15838 })
15839 }
15840 }
15841
15842 fn parse_set_role(
15844 &mut self,
15845 modifier: Option<ContextModifier>,
15846 ) -> Result<Statement, ParserError> {
15847 self.expect_keyword_is(Keyword::ROLE)?;
15848
15849 let role_name = if self.parse_keyword(Keyword::NONE) {
15850 None
15851 } else {
15852 Some(self.parse_identifier()?)
15853 };
15854 Ok(Statement::Set(Set::SetRole {
15855 context_modifier: modifier,
15856 role_name,
15857 }))
15858 }
15859
15860 fn parse_set_values(
15861 &mut self,
15862 parenthesized_assignment: bool,
15863 ) -> Result<Vec<Expr>, ParserError> {
15864 let mut values = vec![];
15865
15866 if parenthesized_assignment {
15867 self.expect_token(&Token::LParen)?;
15868 }
15869
15870 loop {
15871 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15872 expr
15873 } else if let Ok(expr) = self.parse_expr() {
15874 expr
15875 } else {
15876 self.expected_ref("variable value", self.peek_token_ref())?
15877 };
15878
15879 values.push(value);
15880 if self.consume_token(&Token::Comma) {
15881 continue;
15882 }
15883
15884 if parenthesized_assignment {
15885 self.expect_token(&Token::RParen)?;
15886 }
15887 return Ok(values);
15888 }
15889 }
15890
15891 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15892 let modifier =
15893 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15894
15895 Self::keyword_to_modifier(modifier)
15896 }
15897
15898 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15900 let scope = self.parse_context_modifier();
15901
15902 let name = if self.dialect.supports_parenthesized_set_variables()
15903 && self.consume_token(&Token::LParen)
15904 {
15905 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15909 } else {
15910 self.parse_object_name(false)?
15911 };
15912
15913 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15914 return self.expected_ref("assignment operator", self.peek_token_ref());
15915 }
15916
15917 let value = self.parse_expr()?;
15918
15919 Ok(SetAssignment { scope, name, value })
15920 }
15921
15922 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15923 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15924
15925 let scope = if !hivevar {
15927 self.parse_context_modifier()
15928 } else {
15929 None
15930 };
15931
15932 if hivevar {
15933 self.expect_token(&Token::Colon)?;
15934 }
15935
15936 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15937 return Ok(set_role_stmt);
15938 }
15939
15940 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15942 || self.parse_keyword(Keyword::TIMEZONE)
15943 {
15944 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15945 return Ok(Set::SingleAssignment {
15946 scope,
15947 hivevar,
15948 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15949 values: self.parse_set_values(false)?,
15950 }
15951 .into());
15952 } else {
15953 return Ok(Set::SetTimeZone {
15957 local: scope == Some(ContextModifier::Local),
15958 value: self.parse_expr()?,
15959 }
15960 .into());
15961 }
15962 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15963 if self.parse_keyword(Keyword::DEFAULT) {
15964 return Ok(Set::SetNamesDefault {}.into());
15965 }
15966 let charset_name = self.parse_identifier()?;
15967 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15968 Some(self.parse_literal_string()?)
15969 } else {
15970 None
15971 };
15972
15973 return Ok(Set::SetNames {
15974 charset_name,
15975 collation_name,
15976 }
15977 .into());
15978 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15979 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15980 return Ok(Set::SetTransaction {
15981 modes: self.parse_transaction_modes()?,
15982 snapshot: None,
15983 session: true,
15984 }
15985 .into());
15986 } else if self.parse_keyword(Keyword::TRANSACTION) {
15987 if self.parse_keyword(Keyword::SNAPSHOT) {
15988 let snapshot_id = self.parse_value()?;
15989 return Ok(Set::SetTransaction {
15990 modes: vec![],
15991 snapshot: Some(snapshot_id),
15992 session: false,
15993 }
15994 .into());
15995 }
15996 return Ok(Set::SetTransaction {
15997 modes: self.parse_transaction_modes()?,
15998 snapshot: None,
15999 session: false,
16000 }
16001 .into());
16002 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
16003 let scope = match scope {
16004 Some(s) => s,
16005 None => {
16006 return self.expected_at(
16007 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
16008 self.get_current_index(),
16009 )
16010 }
16011 };
16012 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
16013 SetSessionAuthorizationParamKind::Default
16014 } else {
16015 let value = self.parse_identifier()?;
16016 SetSessionAuthorizationParamKind::User(value)
16017 };
16018 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
16019 scope,
16020 kind: auth_value,
16021 })
16022 .into());
16023 }
16024
16025 if self.dialect.supports_comma_separated_set_assignments() {
16026 if scope.is_some() {
16027 self.prev_token();
16028 }
16029
16030 if let Some(assignments) = self
16031 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
16032 {
16033 return if assignments.len() > 1 {
16034 Ok(Set::MultipleAssignments { assignments }.into())
16035 } else {
16036 let SetAssignment { scope, name, value } =
16037 assignments.into_iter().next().ok_or_else(|| {
16038 ParserError::ParserError("Expected at least one assignment".to_string())
16039 })?;
16040
16041 Ok(Set::SingleAssignment {
16042 scope,
16043 hivevar,
16044 variable: name,
16045 values: vec![value],
16046 }
16047 .into())
16048 };
16049 }
16050 }
16051
16052 let variables = if self.dialect.supports_parenthesized_set_variables()
16053 && self.consume_token(&Token::LParen)
16054 {
16055 let vars = OneOrManyWithParens::Many(
16056 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
16057 .into_iter()
16058 .map(|ident| ObjectName::from(vec![ident]))
16059 .collect(),
16060 );
16061 self.expect_token(&Token::RParen)?;
16062 vars
16063 } else {
16064 OneOrManyWithParens::One(self.parse_object_name(false)?)
16065 };
16066
16067 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
16068 let stmt = match variables {
16069 OneOrManyWithParens::One(var) => Set::SingleAssignment {
16070 scope,
16071 hivevar,
16072 variable: var,
16073 values: self.parse_set_values(false)?,
16074 },
16075 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
16076 variables: vars,
16077 values: self.parse_set_values(true)?,
16078 },
16079 };
16080
16081 return Ok(stmt.into());
16082 }
16083
16084 if self.dialect.supports_set_stmt_without_operator() {
16085 self.prev_token();
16086 return self.parse_set_session_params();
16087 };
16088
16089 self.expected_ref("equals sign or TO", self.peek_token_ref())
16090 }
16091
16092 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
16094 if self.parse_keyword(Keyword::STATISTICS) {
16095 let topic = match self.parse_one_of_keywords(&[
16096 Keyword::IO,
16097 Keyword::PROFILE,
16098 Keyword::TIME,
16099 Keyword::XML,
16100 ]) {
16101 Some(Keyword::IO) => SessionParamStatsTopic::IO,
16102 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
16103 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
16104 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
16105 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
16106 };
16107 let value = self.parse_session_param_value()?;
16108 Ok(
16109 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
16110 topic,
16111 value,
16112 }))
16113 .into(),
16114 )
16115 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
16116 let obj = self.parse_object_name(false)?;
16117 let value = self.parse_session_param_value()?;
16118 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
16119 SetSessionParamIdentityInsert { obj, value },
16120 ))
16121 .into())
16122 } else if self.parse_keyword(Keyword::OFFSETS) {
16123 let keywords = self.parse_comma_separated(|parser| {
16124 let next_token = parser.next_token();
16125 match &next_token.token {
16126 Token::Word(w) => Ok(w.to_string()),
16127 _ => parser.expected("SQL keyword", next_token),
16128 }
16129 })?;
16130 let value = self.parse_session_param_value()?;
16131 Ok(
16132 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
16133 keywords,
16134 value,
16135 }))
16136 .into(),
16137 )
16138 } else {
16139 let names = self.parse_comma_separated(|parser| {
16140 let next_token = parser.next_token();
16141 match next_token.token {
16142 Token::Word(w) => Ok(w.to_string()),
16143 _ => parser.expected("Session param name", next_token),
16144 }
16145 })?;
16146 let value = self.parse_expr()?.to_string();
16147 Ok(
16148 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
16149 names,
16150 value,
16151 }))
16152 .into(),
16153 )
16154 }
16155 }
16156
16157 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
16158 if self.parse_keyword(Keyword::ON) {
16159 Ok(SessionParamValue::On)
16160 } else if self.parse_keyword(Keyword::OFF) {
16161 Ok(SessionParamValue::Off)
16162 } else {
16163 self.expected_ref("ON or OFF", self.peek_token_ref())
16164 }
16165 }
16166
16167 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
16169 let terse = self.parse_keyword(Keyword::TERSE);
16170 let extended = self.parse_keyword(Keyword::EXTENDED);
16171 let full = self.parse_keyword(Keyword::FULL);
16172 let session = self.parse_keyword(Keyword::SESSION);
16173 let global = self.parse_keyword(Keyword::GLOBAL);
16174 let external = self.parse_keyword(Keyword::EXTERNAL);
16175 if self
16176 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
16177 .is_some()
16178 {
16179 Ok(self.parse_show_columns(extended, full)?)
16180 } else if self.parse_keyword(Keyword::TABLES) {
16181 Ok(self.parse_show_tables(terse, extended, full, external)?)
16182 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
16183 Ok(self.parse_show_views(terse, true)?)
16184 } else if self.parse_keyword(Keyword::VIEWS) {
16185 Ok(self.parse_show_views(terse, false)?)
16186 } else if self.parse_keyword(Keyword::FUNCTIONS) {
16187 Ok(self.parse_show_functions()?)
16188 } else if self.parse_keyword(Keyword::PROCESSLIST) {
16189 Ok(Statement::ShowProcessList { full })
16190 } else if extended || full {
16191 Err(ParserError::ParserError(
16192 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
16193 ))
16194 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
16195 Ok(self.parse_show_create()?)
16196 } else if self.parse_keyword(Keyword::COLLATION) {
16197 Ok(self.parse_show_collation()?)
16198 } else if self.parse_keyword(Keyword::VARIABLES)
16199 && dialect_of!(self is MySqlDialect | GenericDialect)
16200 {
16201 Ok(Statement::ShowVariables {
16202 filter: self.parse_show_statement_filter()?,
16203 session,
16204 global,
16205 })
16206 } else if self.parse_keyword(Keyword::STATUS)
16207 && dialect_of!(self is MySqlDialect | GenericDialect)
16208 {
16209 Ok(Statement::ShowStatus {
16210 filter: self.parse_show_statement_filter()?,
16211 session,
16212 global,
16213 })
16214 } else if self.parse_keyword(Keyword::CATALOGS) {
16215 self.parse_show_catalogs(terse)
16216 } else if self.parse_keyword(Keyword::DATABASES) {
16217 self.parse_show_databases(terse)
16218 } else if self.parse_keyword(Keyword::SCHEMAS) {
16219 self.parse_show_schemas(terse)
16220 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
16221 self.parse_show_charset(false)
16222 } else if self.parse_keyword(Keyword::CHARSET) {
16223 self.parse_show_charset(true)
16224 } else {
16225 Ok(Statement::ShowVariable {
16226 variable: self.parse_identifiers()?,
16227 })
16228 }
16229 }
16230
16231 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
16232 Ok(Statement::ShowCharset(ShowCharset {
16234 is_shorthand,
16235 filter: self.parse_show_statement_filter()?,
16236 }))
16237 }
16238
16239 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
16240 let history = self.parse_keyword(Keyword::HISTORY);
16241 let show_options = self.parse_show_stmt_options()?;
16242 Ok(Statement::ShowCatalogs {
16243 terse,
16244 history,
16245 show_options,
16246 })
16247 }
16248
16249 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
16250 let history = self.parse_keyword(Keyword::HISTORY);
16251 let show_options = self.parse_show_stmt_options()?;
16252 Ok(Statement::ShowDatabases {
16253 terse,
16254 history,
16255 show_options,
16256 })
16257 }
16258
16259 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
16260 let history = self.parse_keyword(Keyword::HISTORY);
16261 let show_options = self.parse_show_stmt_options()?;
16262 Ok(Statement::ShowSchemas {
16263 terse,
16264 history,
16265 show_options,
16266 })
16267 }
16268
16269 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
16271 let obj_type = match self.expect_one_of_keywords(&[
16272 Keyword::TABLE,
16273 Keyword::TRIGGER,
16274 Keyword::FUNCTION,
16275 Keyword::PROCEDURE,
16276 Keyword::EVENT,
16277 Keyword::VIEW,
16278 ])? {
16279 Keyword::TABLE => Ok(ShowCreateObject::Table),
16280 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
16281 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
16282 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
16283 Keyword::EVENT => Ok(ShowCreateObject::Event),
16284 Keyword::VIEW => Ok(ShowCreateObject::View),
16285 keyword => Err(ParserError::ParserError(format!(
16286 "Unable to map keyword to ShowCreateObject: {keyword:?}"
16287 ))),
16288 }?;
16289
16290 let obj_name = self.parse_object_name(false)?;
16291
16292 Ok(Statement::ShowCreate { obj_type, obj_name })
16293 }
16294
16295 pub fn parse_show_columns(
16297 &mut self,
16298 extended: bool,
16299 full: bool,
16300 ) -> Result<Statement, ParserError> {
16301 let show_options = self.parse_show_stmt_options()?;
16302 Ok(Statement::ShowColumns {
16303 extended,
16304 full,
16305 show_options,
16306 })
16307 }
16308
16309 fn parse_show_tables(
16310 &mut self,
16311 terse: bool,
16312 extended: bool,
16313 full: bool,
16314 external: bool,
16315 ) -> Result<Statement, ParserError> {
16316 let history = !external && self.parse_keyword(Keyword::HISTORY);
16317 let show_options = self.parse_show_stmt_options()?;
16318 Ok(Statement::ShowTables {
16319 terse,
16320 history,
16321 extended,
16322 full,
16323 external,
16324 show_options,
16325 })
16326 }
16327
16328 fn parse_show_views(
16329 &mut self,
16330 terse: bool,
16331 materialized: bool,
16332 ) -> Result<Statement, ParserError> {
16333 let show_options = self.parse_show_stmt_options()?;
16334 Ok(Statement::ShowViews {
16335 materialized,
16336 terse,
16337 show_options,
16338 })
16339 }
16340
16341 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16343 let filter = self.parse_show_statement_filter()?;
16344 Ok(Statement::ShowFunctions { filter })
16345 }
16346
16347 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16349 let filter = self.parse_show_statement_filter()?;
16350 Ok(Statement::ShowCollation { filter })
16351 }
16352
16353 pub fn parse_show_statement_filter(
16355 &mut self,
16356 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16357 if self.parse_keyword(Keyword::LIKE) {
16358 Ok(Some(ShowStatementFilter::Like(
16359 self.parse_literal_string()?,
16360 )))
16361 } else if self.parse_keyword(Keyword::ILIKE) {
16362 Ok(Some(ShowStatementFilter::ILike(
16363 self.parse_literal_string()?,
16364 )))
16365 } else if self.parse_keyword(Keyword::WHERE) {
16366 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16367 } else {
16368 self.maybe_parse(|parser| -> Result<String, ParserError> {
16369 parser.parse_literal_string()
16370 })?
16371 .map_or(Ok(None), |filter| {
16372 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16373 })
16374 }
16375 }
16376
16377 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16379 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16381 if self.parse_keyword(Keyword::DEFAULT) {
16383 return Ok(Statement::Use(Use::Default));
16384 }
16385 None } else if dialect_of!(self is DatabricksDialect) {
16387 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16388 } else if dialect_of!(self is SnowflakeDialect) {
16389 self.parse_one_of_keywords(&[
16390 Keyword::DATABASE,
16391 Keyword::SCHEMA,
16392 Keyword::WAREHOUSE,
16393 Keyword::ROLE,
16394 Keyword::SECONDARY,
16395 ])
16396 } else {
16397 None };
16399
16400 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16401 self.parse_secondary_roles()?
16402 } else {
16403 let obj_name = self.parse_object_name(false)?;
16404 match parsed_keyword {
16405 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16406 Some(Keyword::DATABASE) => Use::Database(obj_name),
16407 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16408 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16409 Some(Keyword::ROLE) => Use::Role(obj_name),
16410 _ => Use::Object(obj_name),
16411 }
16412 };
16413
16414 Ok(Statement::Use(result))
16415 }
16416
16417 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16418 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16419 if self.parse_keyword(Keyword::NONE) {
16420 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16421 } else if self.parse_keyword(Keyword::ALL) {
16422 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16423 } else {
16424 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16425 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16426 }
16427 }
16428
16429 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16431 let relation = self.parse_table_factor()?;
16432 let joins = self.parse_joins()?;
16436 Ok(TableWithJoins { relation, joins })
16437 }
16438
16439 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16440 let mut joins = vec![];
16441 loop {
16442 let global = self.parse_keyword(Keyword::GLOBAL);
16443 let join = if self.parse_keyword(Keyword::CROSS) {
16444 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16445 JoinOperator::CrossJoin(JoinConstraint::None)
16446 } else if self.parse_keyword(Keyword::APPLY) {
16447 JoinOperator::CrossApply
16449 } else {
16450 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16451 };
16452 let relation = self.parse_table_factor()?;
16453 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16454 && self.dialect.supports_cross_join_constraint()
16455 {
16456 let constraint = self.parse_join_constraint(false)?;
16457 JoinOperator::CrossJoin(constraint)
16458 } else {
16459 join_operator
16460 };
16461 Join {
16462 relation,
16463 global,
16464 join_operator,
16465 }
16466 } else if self.parse_keyword(Keyword::OUTER) {
16467 self.expect_keyword_is(Keyword::APPLY)?;
16469 Join {
16470 relation: self.parse_table_factor()?,
16471 global,
16472 join_operator: JoinOperator::OuterApply,
16473 }
16474 } else if self.parse_keyword(Keyword::ASOF) {
16475 self.expect_keyword_is(Keyword::JOIN)?;
16476 let relation = self.parse_table_factor()?;
16477 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16478 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16479 Join {
16480 relation,
16481 global,
16482 join_operator: JoinOperator::AsOf {
16483 match_condition,
16484 constraint: self.parse_join_constraint(false)?,
16485 },
16486 }
16487 } else {
16488 let natural = self.parse_keyword(Keyword::NATURAL);
16489 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16490 w.keyword
16491 } else {
16492 Keyword::NoKeyword
16493 };
16494
16495 let join_operator_type = match peek_keyword {
16496 Keyword::INNER | Keyword::JOIN => {
16497 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16499 if inner {
16500 JoinOperator::Inner
16501 } else {
16502 JoinOperator::Join
16503 }
16504 }
16505 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16506 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16508 let join_type = self.parse_one_of_keywords(&[
16509 Keyword::OUTER,
16510 Keyword::SEMI,
16511 Keyword::ANTI,
16512 Keyword::JOIN,
16513 ]);
16514 match join_type {
16515 Some(Keyword::OUTER) => {
16516 self.expect_keyword_is(Keyword::JOIN)?;
16517 if is_left {
16518 JoinOperator::LeftOuter
16519 } else {
16520 JoinOperator::RightOuter
16521 }
16522 }
16523 Some(Keyword::SEMI) => {
16524 self.expect_keyword_is(Keyword::JOIN)?;
16525 if is_left {
16526 JoinOperator::LeftSemi
16527 } else {
16528 JoinOperator::RightSemi
16529 }
16530 }
16531 Some(Keyword::ANTI) => {
16532 self.expect_keyword_is(Keyword::JOIN)?;
16533 if is_left {
16534 JoinOperator::LeftAnti
16535 } else {
16536 JoinOperator::RightAnti
16537 }
16538 }
16539 Some(Keyword::JOIN) => {
16540 if is_left {
16541 JoinOperator::Left
16542 } else {
16543 JoinOperator::Right
16544 }
16545 }
16546 _ => {
16547 return Err(ParserError::ParserError(format!(
16548 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16549 )))
16550 }
16551 }
16552 }
16553 Keyword::ANTI => {
16554 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16556 JoinOperator::Anti
16557 }
16558 Keyword::SEMI => {
16559 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16561 JoinOperator::Semi
16562 }
16563 Keyword::FULL => {
16564 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16567 JoinOperator::FullOuter
16568 }
16569 Keyword::OUTER => {
16570 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16571 }
16572 Keyword::STRAIGHT_JOIN => {
16573 let _ = self.next_token(); JoinOperator::StraightJoin
16575 }
16576 _ if natural => {
16577 return self
16578 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16579 }
16580 _ => break,
16581 };
16582 let mut relation = self.parse_table_factor()?;
16583
16584 if !self
16585 .dialect
16586 .supports_left_associative_joins_without_parens()
16587 && self.peek_parens_less_nested_join()
16588 {
16589 let joins = self.parse_joins()?;
16590 relation = TableFactor::NestedJoin {
16591 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16592 alias: None,
16593 };
16594 }
16595
16596 let join_constraint = self.parse_join_constraint(natural)?;
16597 Join {
16598 relation,
16599 global,
16600 join_operator: join_operator_type(join_constraint),
16601 }
16602 };
16603 joins.push(join);
16604 }
16605 Ok(joins)
16606 }
16607
16608 fn peek_parens_less_nested_join(&self) -> bool {
16609 matches!(
16610 self.peek_token_ref().token,
16611 Token::Word(Word {
16612 keyword: Keyword::JOIN
16613 | Keyword::INNER
16614 | Keyword::LEFT
16615 | Keyword::RIGHT
16616 | Keyword::FULL,
16617 ..
16618 })
16619 )
16620 }
16621
16622 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16624 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16625 let _guard = self.recursion_counter.try_decrease()?;
16626 if self.parse_keyword(Keyword::LATERAL) {
16627 if self.consume_token(&Token::LParen) {
16629 self.parse_derived_table_factor(Lateral)
16630 } else {
16631 let name = self.parse_object_name(false)?;
16632 self.expect_token(&Token::LParen)?;
16633 let args = self.parse_optional_args()?;
16634 let alias = self.maybe_parse_table_alias()?;
16635 Ok(TableFactor::Function {
16636 lateral: true,
16637 name,
16638 args,
16639 alias,
16640 })
16641 }
16642 } else if self.parse_keyword(Keyword::TABLE) {
16643 self.expect_token(&Token::LParen)?;
16645 let expr = self.parse_expr()?;
16646 self.expect_token(&Token::RParen)?;
16647 let alias = self.maybe_parse_table_alias()?;
16648 Ok(TableFactor::TableFunction { expr, alias })
16649 } else if self.consume_token(&Token::LParen) {
16650 if let Some(mut table) =
16672 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16673 {
16674 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16675 {
16676 table = match kw {
16677 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16678 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16679 unexpected_keyword => return Err(ParserError::ParserError(
16680 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16681 )),
16682 }
16683 }
16684 return Ok(table);
16685 }
16686
16687 let mut table_and_joins = self.parse_table_and_joins()?;
16694
16695 #[allow(clippy::if_same_then_else)]
16696 if !table_and_joins.joins.is_empty() {
16697 self.expect_token(&Token::RParen)?;
16698 let alias = self.maybe_parse_table_alias()?;
16699 Ok(TableFactor::NestedJoin {
16700 table_with_joins: Box::new(table_and_joins),
16701 alias,
16702 }) } else if let TableFactor::NestedJoin {
16704 table_with_joins: _,
16705 alias: _,
16706 } = &table_and_joins.relation
16707 {
16708 self.expect_token(&Token::RParen)?;
16711 let alias = self.maybe_parse_table_alias()?;
16712 Ok(TableFactor::NestedJoin {
16713 table_with_joins: Box::new(table_and_joins),
16714 alias,
16715 })
16716 } else if self.dialect.supports_parens_around_table_factor() {
16717 self.expect_token(&Token::RParen)?;
16724
16725 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16726 match &mut table_and_joins.relation {
16729 TableFactor::Derived { alias, .. }
16730 | TableFactor::Table { alias, .. }
16731 | TableFactor::Function { alias, .. }
16732 | TableFactor::UNNEST { alias, .. }
16733 | TableFactor::JsonTable { alias, .. }
16734 | TableFactor::XmlTable { alias, .. }
16735 | TableFactor::OpenJsonTable { alias, .. }
16736 | TableFactor::TableFunction { alias, .. }
16737 | TableFactor::Pivot { alias, .. }
16738 | TableFactor::Unpivot { alias, .. }
16739 | TableFactor::MatchRecognize { alias, .. }
16740 | TableFactor::SemanticView { alias, .. }
16741 | TableFactor::NestedJoin { alias, .. } => {
16742 if let Some(inner_alias) = alias {
16744 return Err(ParserError::ParserError(format!(
16745 "duplicate alias {inner_alias}"
16746 )));
16747 }
16748 alias.replace(outer_alias);
16752 }
16753 };
16754 }
16755 Ok(table_and_joins.relation)
16757 } else {
16758 self.expected_ref("joined table", self.peek_token_ref())
16761 }
16762 } else if self.dialect.supports_values_as_table_factor()
16763 && matches!(
16764 self.peek_tokens(),
16765 [
16766 Token::Word(Word {
16767 keyword: Keyword::VALUES,
16768 ..
16769 }),
16770 Token::LParen
16771 ]
16772 )
16773 {
16774 self.expect_keyword_is(Keyword::VALUES)?;
16775
16776 let values = SetExpr::Values(self.parse_values(false, false)?);
16780 let alias = self.maybe_parse_table_alias()?;
16781 Ok(TableFactor::Derived {
16782 lateral: false,
16783 subquery: Box::new(Query {
16784 with: None,
16785 body: Box::new(values),
16786 order_by: None,
16787 limit_clause: None,
16788 fetch: None,
16789 locks: vec![],
16790 for_clause: None,
16791 settings: None,
16792 format_clause: None,
16793 pipe_operators: vec![],
16794 }),
16795 alias,
16796 sample: None,
16797 })
16798 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16799 && self.parse_keyword(Keyword::UNNEST)
16800 {
16801 self.expect_token(&Token::LParen)?;
16802 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16803 self.expect_token(&Token::RParen)?;
16804
16805 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16806 let alias = match self.maybe_parse_table_alias() {
16807 Ok(Some(alias)) => Some(alias),
16808 Ok(None) => None,
16809 Err(e) => return Err(e),
16810 };
16811
16812 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16813 Ok(()) => true,
16814 Err(_) => false,
16815 };
16816
16817 let with_offset_alias = if with_offset {
16818 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16819 Ok(Some(alias)) => Some(alias),
16820 Ok(None) => None,
16821 Err(e) => return Err(e),
16822 }
16823 } else {
16824 None
16825 };
16826
16827 Ok(TableFactor::UNNEST {
16828 alias,
16829 array_exprs,
16830 with_offset,
16831 with_offset_alias,
16832 with_ordinality,
16833 })
16834 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16835 let json_expr = self.parse_expr()?;
16836 self.expect_token(&Token::Comma)?;
16837 let json_path = self.parse_value()?;
16838 self.expect_keyword_is(Keyword::COLUMNS)?;
16839 self.expect_token(&Token::LParen)?;
16840 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16841 self.expect_token(&Token::RParen)?;
16842 self.expect_token(&Token::RParen)?;
16843 let alias = self.maybe_parse_table_alias()?;
16844 Ok(TableFactor::JsonTable {
16845 json_expr,
16846 json_path,
16847 columns,
16848 alias,
16849 })
16850 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16851 self.prev_token();
16852 self.parse_open_json_table_factor()
16853 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16854 self.prev_token();
16855 self.parse_xml_table_factor()
16856 } else if self.dialect.supports_semantic_view_table_factor()
16857 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16858 {
16859 self.parse_semantic_view_table_factor()
16860 } else if self.peek_token_ref().token == Token::AtSign {
16861 self.parse_snowflake_stage_table_factor()
16863 } else {
16864 let name = self.parse_object_name(true)?;
16865
16866 let json_path = match &self.peek_token_ref().token {
16867 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16868 _ => None,
16869 };
16870
16871 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16872 && self.parse_keyword(Keyword::PARTITION)
16873 {
16874 self.parse_parenthesized_identifiers()?
16875 } else {
16876 vec![]
16877 };
16878
16879 let version = self.maybe_parse_table_version()?;
16881
16882 let args = if self.consume_token(&Token::LParen) {
16884 Some(self.parse_table_function_args()?)
16885 } else {
16886 None
16887 };
16888
16889 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16890
16891 let mut sample = None;
16892 if self.dialect.supports_table_sample_before_alias() {
16893 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16894 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16895 }
16896 }
16897
16898 let alias = self.maybe_parse_table_alias()?;
16899
16900 let index_hints = if self.dialect.supports_table_hints() {
16902 self.maybe_parse(|p| p.parse_table_index_hints())?
16903 .unwrap_or(vec![])
16904 } else {
16905 vec![]
16906 };
16907
16908 let mut with_hints = vec![];
16910 if self.parse_keyword(Keyword::WITH) {
16911 if self.consume_token(&Token::LParen) {
16912 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16913 self.expect_token(&Token::RParen)?;
16914 } else {
16915 self.prev_token();
16917 }
16918 };
16919
16920 if !self.dialect.supports_table_sample_before_alias() {
16921 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16922 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16923 }
16924 }
16925
16926 let mut table = TableFactor::Table {
16927 name,
16928 alias,
16929 args,
16930 with_hints,
16931 version,
16932 partitions,
16933 with_ordinality,
16934 json_path,
16935 sample,
16936 index_hints,
16937 };
16938
16939 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16940 table = match kw {
16941 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16942 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16943 unexpected_keyword => return Err(ParserError::ParserError(
16944 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16945 )),
16946 }
16947 }
16948
16949 if self.dialect.supports_match_recognize()
16950 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16951 {
16952 table = self.parse_match_recognize(table)?;
16953 }
16954
16955 Ok(table)
16956 }
16957 }
16958
16959 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16964 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16966
16967 let args = if self.consume_token(&Token::LParen) {
16969 Some(self.parse_table_function_args()?)
16970 } else {
16971 None
16972 };
16973
16974 let alias = self.maybe_parse_table_alias()?;
16975
16976 Ok(TableFactor::Table {
16977 name,
16978 alias,
16979 args,
16980 with_hints: vec![],
16981 version: None,
16982 partitions: vec![],
16983 with_ordinality: false,
16984 json_path: None,
16985 sample: None,
16986 index_hints: vec![],
16987 })
16988 }
16989
16990 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16991 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16992 TableSampleModifier::TableSample
16993 } else if self.parse_keyword(Keyword::SAMPLE) {
16994 TableSampleModifier::Sample
16995 } else {
16996 return Ok(None);
16997 };
16998 self.parse_table_sample(modifier).map(Some)
16999 }
17000
17001 fn parse_table_sample(
17002 &mut self,
17003 modifier: TableSampleModifier,
17004 ) -> Result<Box<TableSample>, ParserError> {
17005 let name = match self.parse_one_of_keywords(&[
17006 Keyword::BERNOULLI,
17007 Keyword::ROW,
17008 Keyword::SYSTEM,
17009 Keyword::BLOCK,
17010 ]) {
17011 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
17012 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
17013 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
17014 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
17015 _ => None,
17016 };
17017
17018 let parenthesized = self.consume_token(&Token::LParen);
17019
17020 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
17021 let selected_bucket = self.parse_number_value()?;
17022 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
17023 let total = self.parse_number_value()?;
17024 let on = if self.parse_keyword(Keyword::ON) {
17025 Some(self.parse_expr()?)
17026 } else {
17027 None
17028 };
17029 (
17030 None,
17031 Some(TableSampleBucket {
17032 bucket: selected_bucket,
17033 total,
17034 on,
17035 }),
17036 )
17037 } else {
17038 let value = match self.maybe_parse(|p| p.parse_expr())? {
17039 Some(num) => num,
17040 None => {
17041 let next_token = self.next_token();
17042 if let Token::Word(w) = next_token.token {
17043 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
17044 } else {
17045 return parser_err!(
17046 "Expecting number or byte length e.g. 100M",
17047 self.peek_token_ref().span.start
17048 );
17049 }
17050 }
17051 };
17052 let unit = if self.parse_keyword(Keyword::ROWS) {
17053 Some(TableSampleUnit::Rows)
17054 } else if self.parse_keyword(Keyword::PERCENT) {
17055 Some(TableSampleUnit::Percent)
17056 } else {
17057 None
17058 };
17059 (
17060 Some(TableSampleQuantity {
17061 parenthesized,
17062 value,
17063 unit,
17064 }),
17065 None,
17066 )
17067 };
17068 if parenthesized {
17069 self.expect_token(&Token::RParen)?;
17070 }
17071
17072 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
17073 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
17074 } else if self.parse_keyword(Keyword::SEED) {
17075 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
17076 } else {
17077 None
17078 };
17079
17080 let offset = if self.parse_keyword(Keyword::OFFSET) {
17081 Some(self.parse_expr()?)
17082 } else {
17083 None
17084 };
17085
17086 Ok(Box::new(TableSample {
17087 modifier,
17088 name,
17089 quantity,
17090 seed,
17091 bucket,
17092 offset,
17093 }))
17094 }
17095
17096 fn parse_table_sample_seed(
17097 &mut self,
17098 modifier: TableSampleSeedModifier,
17099 ) -> Result<TableSampleSeed, ParserError> {
17100 self.expect_token(&Token::LParen)?;
17101 let value = self.parse_number_value()?;
17102 self.expect_token(&Token::RParen)?;
17103 Ok(TableSampleSeed { modifier, value })
17104 }
17105
17106 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17109 self.expect_token(&Token::LParen)?;
17110 let json_expr = self.parse_expr()?;
17111 let json_path = if self.consume_token(&Token::Comma) {
17112 Some(self.parse_value()?)
17113 } else {
17114 None
17115 };
17116 self.expect_token(&Token::RParen)?;
17117 let columns = if self.parse_keyword(Keyword::WITH) {
17118 self.expect_token(&Token::LParen)?;
17119 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
17120 self.expect_token(&Token::RParen)?;
17121 columns
17122 } else {
17123 Vec::new()
17124 };
17125 let alias = self.maybe_parse_table_alias()?;
17126 Ok(TableFactor::OpenJsonTable {
17127 json_expr,
17128 json_path,
17129 columns,
17130 alias,
17131 })
17132 }
17133
17134 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17135 self.expect_token(&Token::LParen)?;
17136 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
17137 self.expect_token(&Token::LParen)?;
17138 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
17139 self.expect_token(&Token::RParen)?;
17140 self.expect_token(&Token::Comma)?;
17141 namespaces
17142 } else {
17143 vec![]
17144 };
17145 let row_expression = self.parse_expr()?;
17146 let passing = self.parse_xml_passing_clause()?;
17147 self.expect_keyword_is(Keyword::COLUMNS)?;
17148 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
17149 self.expect_token(&Token::RParen)?;
17150 let alias = self.maybe_parse_table_alias()?;
17151 Ok(TableFactor::XmlTable {
17152 namespaces,
17153 row_expression,
17154 passing,
17155 columns,
17156 alias,
17157 })
17158 }
17159
17160 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
17161 let uri = self.parse_expr()?;
17162 self.expect_keyword_is(Keyword::AS)?;
17163 let name = self.parse_identifier()?;
17164 Ok(XmlNamespaceDefinition { uri, name })
17165 }
17166
17167 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
17168 let name = self.parse_identifier()?;
17169
17170 let option = if self.parse_keyword(Keyword::FOR) {
17171 self.expect_keyword(Keyword::ORDINALITY)?;
17172 XmlTableColumnOption::ForOrdinality
17173 } else {
17174 let r#type = self.parse_data_type()?;
17175 let mut path = None;
17176 let mut default = None;
17177
17178 if self.parse_keyword(Keyword::PATH) {
17179 path = Some(self.parse_expr()?);
17180 }
17181
17182 if self.parse_keyword(Keyword::DEFAULT) {
17183 default = Some(self.parse_expr()?);
17184 }
17185
17186 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
17187 if !not_null {
17188 let _ = self.parse_keyword(Keyword::NULL);
17190 }
17191
17192 XmlTableColumnOption::NamedInfo {
17193 r#type,
17194 path,
17195 default,
17196 nullable: !not_null,
17197 }
17198 };
17199 Ok(XmlTableColumn { name, option })
17200 }
17201
17202 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
17203 let mut arguments = vec![];
17204 if self.parse_keyword(Keyword::PASSING) {
17205 loop {
17206 let by_value =
17207 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
17208 let expr = self.parse_expr()?;
17209 let alias = if self.parse_keyword(Keyword::AS) {
17210 Some(self.parse_identifier()?)
17211 } else {
17212 None
17213 };
17214 arguments.push(XmlPassingArgument {
17215 expr,
17216 alias,
17217 by_value,
17218 });
17219 if !self.consume_token(&Token::Comma) {
17220 break;
17221 }
17222 }
17223 }
17224 Ok(XmlPassingClause { arguments })
17225 }
17226
17227 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17229 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
17230 self.expect_token(&Token::LParen)?;
17231
17232 let name = self.parse_object_name(true)?;
17233
17234 let mut dimensions = Vec::new();
17236 let mut metrics = Vec::new();
17237 let mut facts = Vec::new();
17238 let mut where_clause = None;
17239
17240 while self.peek_token_ref().token != Token::RParen {
17241 if self.parse_keyword(Keyword::DIMENSIONS) {
17242 if !dimensions.is_empty() {
17243 return Err(ParserError::ParserError(
17244 "DIMENSIONS clause can only be specified once".to_string(),
17245 ));
17246 }
17247 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17248 } else if self.parse_keyword(Keyword::METRICS) {
17249 if !metrics.is_empty() {
17250 return Err(ParserError::ParserError(
17251 "METRICS clause can only be specified once".to_string(),
17252 ));
17253 }
17254 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17255 } else if self.parse_keyword(Keyword::FACTS) {
17256 if !facts.is_empty() {
17257 return Err(ParserError::ParserError(
17258 "FACTS clause can only be specified once".to_string(),
17259 ));
17260 }
17261 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17262 } else if self.parse_keyword(Keyword::WHERE) {
17263 if where_clause.is_some() {
17264 return Err(ParserError::ParserError(
17265 "WHERE clause can only be specified once".to_string(),
17266 ));
17267 }
17268 where_clause = Some(self.parse_expr()?);
17269 } else {
17270 let tok = self.peek_token_ref();
17271 return parser_err!(
17272 format!(
17273 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
17274 tok.token
17275 ),
17276 tok.span.start
17277 )?;
17278 }
17279 }
17280
17281 self.expect_token(&Token::RParen)?;
17282
17283 let alias = self.maybe_parse_table_alias()?;
17284
17285 Ok(TableFactor::SemanticView {
17286 name,
17287 dimensions,
17288 metrics,
17289 facts,
17290 where_clause,
17291 alias,
17292 })
17293 }
17294
17295 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
17296 self.expect_token(&Token::LParen)?;
17297
17298 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17299 self.parse_comma_separated(Parser::parse_expr)?
17300 } else {
17301 vec![]
17302 };
17303
17304 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17305 self.parse_comma_separated(Parser::parse_order_by_expr)?
17306 } else {
17307 vec![]
17308 };
17309
17310 let measures = if self.parse_keyword(Keyword::MEASURES) {
17311 self.parse_comma_separated(|p| {
17312 let expr = p.parse_expr()?;
17313 let _ = p.parse_keyword(Keyword::AS);
17314 let alias = p.parse_identifier()?;
17315 Ok(Measure { expr, alias })
17316 })?
17317 } else {
17318 vec![]
17319 };
17320
17321 let rows_per_match =
17322 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17323 Some(RowsPerMatch::OneRow)
17324 } else if self.parse_keywords(&[
17325 Keyword::ALL,
17326 Keyword::ROWS,
17327 Keyword::PER,
17328 Keyword::MATCH,
17329 ]) {
17330 Some(RowsPerMatch::AllRows(
17331 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17332 Some(EmptyMatchesMode::Show)
17333 } else if self.parse_keywords(&[
17334 Keyword::OMIT,
17335 Keyword::EMPTY,
17336 Keyword::MATCHES,
17337 ]) {
17338 Some(EmptyMatchesMode::Omit)
17339 } else if self.parse_keywords(&[
17340 Keyword::WITH,
17341 Keyword::UNMATCHED,
17342 Keyword::ROWS,
17343 ]) {
17344 Some(EmptyMatchesMode::WithUnmatched)
17345 } else {
17346 None
17347 },
17348 ))
17349 } else {
17350 None
17351 };
17352
17353 let after_match_skip =
17354 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17355 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17356 Some(AfterMatchSkip::PastLastRow)
17357 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17358 Some(AfterMatchSkip::ToNextRow)
17359 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17360 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17361 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17362 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17363 } else {
17364 let found = self.next_token();
17365 return self.expected("after match skip option", found);
17366 }
17367 } else {
17368 None
17369 };
17370
17371 self.expect_keyword_is(Keyword::PATTERN)?;
17372 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17373
17374 self.expect_keyword_is(Keyword::DEFINE)?;
17375
17376 let symbols = self.parse_comma_separated(|p| {
17377 let symbol = p.parse_identifier()?;
17378 p.expect_keyword_is(Keyword::AS)?;
17379 let definition = p.parse_expr()?;
17380 Ok(SymbolDefinition { symbol, definition })
17381 })?;
17382
17383 self.expect_token(&Token::RParen)?;
17384
17385 let alias = self.maybe_parse_table_alias()?;
17386
17387 Ok(TableFactor::MatchRecognize {
17388 table: Box::new(table),
17389 partition_by,
17390 order_by,
17391 measures,
17392 rows_per_match,
17393 after_match_skip,
17394 pattern,
17395 symbols,
17396 alias,
17397 })
17398 }
17399
17400 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17401 match self.next_token().token {
17402 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17403 Token::Placeholder(s) if s == "$" => {
17404 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17405 }
17406 Token::LBrace => {
17407 self.expect_token(&Token::Minus)?;
17408 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17409 self.expect_token(&Token::Minus)?;
17410 self.expect_token(&Token::RBrace)?;
17411 Ok(MatchRecognizePattern::Exclude(symbol))
17412 }
17413 Token::Word(Word {
17414 value,
17415 quote_style: None,
17416 ..
17417 }) if value == "PERMUTE" => {
17418 self.expect_token(&Token::LParen)?;
17419 let symbols = self.parse_comma_separated(|p| {
17420 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17421 })?;
17422 self.expect_token(&Token::RParen)?;
17423 Ok(MatchRecognizePattern::Permute(symbols))
17424 }
17425 Token::LParen => {
17426 let pattern = self.parse_pattern()?;
17427 self.expect_token(&Token::RParen)?;
17428 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17429 }
17430 _ => {
17431 self.prev_token();
17432 self.parse_identifier()
17433 .map(MatchRecognizeSymbol::Named)
17434 .map(MatchRecognizePattern::Symbol)
17435 }
17436 }
17437 }
17438
17439 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17440 let mut pattern = self.parse_base_pattern()?;
17441 loop {
17442 let token = self.next_token();
17443 let quantifier = match token.token {
17444 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17445 Token::Plus => RepetitionQuantifier::OneOrMore,
17446 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17447 Token::LBrace => {
17448 let token = self.next_token();
17450 match token.token {
17451 Token::Comma => {
17452 let next_token = self.next_token();
17453 let Token::Number(n, _) = next_token.token else {
17454 return self.expected("literal number", next_token);
17455 };
17456 self.expect_token(&Token::RBrace)?;
17457 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17458 }
17459 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17460 let next_token = self.next_token();
17461 match next_token.token {
17462 Token::Number(m, _) => {
17463 self.expect_token(&Token::RBrace)?;
17464 RepetitionQuantifier::Range(
17465 Self::parse(n, token.span.start)?,
17466 Self::parse(m, token.span.start)?,
17467 )
17468 }
17469 Token::RBrace => {
17470 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17471 }
17472 _ => {
17473 return self.expected("} or upper bound", next_token);
17474 }
17475 }
17476 }
17477 Token::Number(n, _) => {
17478 self.expect_token(&Token::RBrace)?;
17479 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17480 }
17481 _ => return self.expected("quantifier range", token),
17482 }
17483 }
17484 _ => {
17485 self.prev_token();
17486 break;
17487 }
17488 };
17489 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17490 }
17491 Ok(pattern)
17492 }
17493
17494 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17495 let mut patterns = vec![self.parse_repetition_pattern()?];
17496 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17497 patterns.push(self.parse_repetition_pattern()?);
17498 }
17499 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17500 Ok([pattern]) => Ok(pattern),
17501 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17502 }
17503 }
17504
17505 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17506 let pattern = self.parse_concat_pattern()?;
17507 if self.consume_token(&Token::Pipe) {
17508 match self.parse_pattern()? {
17509 MatchRecognizePattern::Alternation(mut patterns) => {
17511 patterns.insert(0, pattern);
17512 Ok(MatchRecognizePattern::Alternation(patterns))
17513 }
17514 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17515 }
17516 } else {
17517 Ok(pattern)
17518 }
17519 }
17520
17521 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17523 if self.dialect.supports_table_versioning() {
17524 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17525 {
17526 let expr = self.parse_expr()?;
17527 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17528 } else if self.peek_keyword(Keyword::CHANGES) {
17529 return self.parse_table_version_changes().map(Some);
17530 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17531 let func_name = self.parse_object_name(true)?;
17532 let func = self.parse_function(func_name)?;
17533 return Ok(Some(TableVersion::Function(func)));
17534 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17535 let expr = self.parse_expr()?;
17536 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17537 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17538 let expr = Expr::Value(self.parse_number_value()?);
17539 return Ok(Some(TableVersion::VersionAsOf(expr)));
17540 }
17541 }
17542 Ok(None)
17543 }
17544
17545 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17556 let changes_name = self.parse_object_name(true)?;
17557 let changes = self.parse_function(changes_name)?;
17558 let at_name = self.parse_object_name(true)?;
17559 let at = self.parse_function(at_name)?;
17560 let end = if self.peek_keyword(Keyword::END) {
17561 let end_name = self.parse_object_name(true)?;
17562 Some(self.parse_function(end_name)?)
17563 } else {
17564 None
17565 };
17566 Ok(TableVersion::Changes { changes, at, end })
17567 }
17568
17569 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17572 if self.parse_keyword(Keyword::NESTED) {
17573 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17574 let path = self.parse_value()?;
17575 self.expect_keyword_is(Keyword::COLUMNS)?;
17576 let columns = self.parse_parenthesized(|p| {
17577 p.parse_comma_separated(Self::parse_json_table_column_def)
17578 })?;
17579 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17580 path,
17581 columns,
17582 }));
17583 }
17584 let name = self.parse_identifier()?;
17585 if self.parse_keyword(Keyword::FOR) {
17586 self.expect_keyword_is(Keyword::ORDINALITY)?;
17587 return Ok(JsonTableColumn::ForOrdinality(name));
17588 }
17589 let r#type = self.parse_data_type()?;
17590 let exists = self.parse_keyword(Keyword::EXISTS);
17591 self.expect_keyword_is(Keyword::PATH)?;
17592 let path = self.parse_value()?;
17593 let mut on_empty = None;
17594 let mut on_error = None;
17595 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17596 if self.parse_keyword(Keyword::EMPTY) {
17597 on_empty = Some(error_handling);
17598 } else {
17599 self.expect_keyword_is(Keyword::ERROR)?;
17600 on_error = Some(error_handling);
17601 }
17602 }
17603 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17604 name,
17605 r#type,
17606 path,
17607 exists,
17608 on_empty,
17609 on_error,
17610 }))
17611 }
17612
17613 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17621 let name = self.parse_identifier()?;
17622 let r#type = self.parse_data_type()?;
17623 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17624 self.next_token();
17625 Some(path)
17626 } else {
17627 None
17628 };
17629 let as_json = self.parse_keyword(Keyword::AS);
17630 if as_json {
17631 self.expect_keyword_is(Keyword::JSON)?;
17632 }
17633 Ok(OpenJsonTableColumn {
17634 name,
17635 r#type,
17636 path,
17637 as_json,
17638 })
17639 }
17640
17641 fn parse_json_table_column_error_handling(
17642 &mut self,
17643 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17644 let res = if self.parse_keyword(Keyword::NULL) {
17645 JsonTableColumnErrorHandling::Null
17646 } else if self.parse_keyword(Keyword::ERROR) {
17647 JsonTableColumnErrorHandling::Error
17648 } else if self.parse_keyword(Keyword::DEFAULT) {
17649 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17650 } else {
17651 return Ok(None);
17652 };
17653 self.expect_keyword_is(Keyword::ON)?;
17654 Ok(Some(res))
17655 }
17656
17657 pub fn parse_derived_table_factor(
17659 &mut self,
17660 lateral: IsLateral,
17661 ) -> Result<TableFactor, ParserError> {
17662 let subquery = self.parse_query()?;
17663 self.expect_token(&Token::RParen)?;
17664 let alias = self.maybe_parse_table_alias()?;
17665
17666 let sample = self
17668 .maybe_parse_table_sample()?
17669 .map(TableSampleKind::AfterTableAlias);
17670
17671 Ok(TableFactor::Derived {
17672 lateral: match lateral {
17673 Lateral => true,
17674 NotLateral => false,
17675 },
17676 subquery,
17677 alias,
17678 sample,
17679 })
17680 }
17681
17682 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17705 let expr = self.parse_expr()?;
17706 let alias = if self.parse_keyword(Keyword::AS) {
17707 Some(self.parse_identifier()?)
17708 } else {
17709 None
17710 };
17711
17712 Ok(ExprWithAlias { expr, alias })
17713 }
17714
17715 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17719 let expr = self.parse_expr()?;
17720 let alias = self.parse_identifier_optional_alias()?;
17721 Ok(ExprWithAlias { expr, alias })
17722 }
17723
17724 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17726 let function_name = match self.next_token().token {
17727 Token::Word(w) => Ok(w.value),
17728 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17729 }?;
17730 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17731 let alias = {
17732 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17733 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17735 }
17736 self.parse_optional_alias_inner(None, validator)?
17737 };
17738 Ok(ExprWithAlias { expr, alias })
17739 }
17740
17741 pub fn parse_pivot_table_factor(
17743 &mut self,
17744 table: TableFactor,
17745 ) -> Result<TableFactor, ParserError> {
17746 self.expect_token(&Token::LParen)?;
17747 let aggregate_functions =
17748 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17749 self.expect_keyword_is(Keyword::FOR)?;
17750 let value_column = if self.peek_token_ref().token == Token::LParen {
17751 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17752 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17753 })?
17754 } else {
17755 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17756 };
17757 self.expect_keyword_is(Keyword::IN)?;
17758
17759 self.expect_token(&Token::LParen)?;
17760 let value_source = if self.parse_keyword(Keyword::ANY) {
17761 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17762 self.parse_comma_separated(Parser::parse_order_by_expr)?
17763 } else {
17764 vec![]
17765 };
17766 PivotValueSource::Any(order_by)
17767 } else if self.peek_sub_query() {
17768 PivotValueSource::Subquery(self.parse_query()?)
17769 } else {
17770 PivotValueSource::List(
17771 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17772 )
17773 };
17774 self.expect_token(&Token::RParen)?;
17775
17776 let default_on_null =
17777 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17778 self.expect_token(&Token::LParen)?;
17779 let expr = self.parse_expr()?;
17780 self.expect_token(&Token::RParen)?;
17781 Some(expr)
17782 } else {
17783 None
17784 };
17785
17786 self.expect_token(&Token::RParen)?;
17787 let alias = self.maybe_parse_table_alias()?;
17788 Ok(TableFactor::Pivot {
17789 table: Box::new(table),
17790 aggregate_functions,
17791 value_column,
17792 value_source,
17793 default_on_null,
17794 alias,
17795 })
17796 }
17797
17798 pub fn parse_unpivot_table_factor(
17800 &mut self,
17801 table: TableFactor,
17802 ) -> Result<TableFactor, ParserError> {
17803 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17804 self.expect_keyword_is(Keyword::NULLS)?;
17805 Some(NullInclusion::IncludeNulls)
17806 } else if self.parse_keyword(Keyword::EXCLUDE) {
17807 self.expect_keyword_is(Keyword::NULLS)?;
17808 Some(NullInclusion::ExcludeNulls)
17809 } else {
17810 None
17811 };
17812 self.expect_token(&Token::LParen)?;
17813 let value = self.parse_expr()?;
17814 self.expect_keyword_is(Keyword::FOR)?;
17815 let name = self.parse_identifier()?;
17816 self.expect_keyword_is(Keyword::IN)?;
17817 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17818 p.parse_expr_with_alias()
17819 })?;
17820 self.expect_token(&Token::RParen)?;
17821 let alias = self.maybe_parse_table_alias()?;
17822 Ok(TableFactor::Unpivot {
17823 table: Box::new(table),
17824 value,
17825 null_inclusion,
17826 name,
17827 columns,
17828 alias,
17829 })
17830 }
17831
17832 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17834 if natural {
17835 Ok(JoinConstraint::Natural)
17836 } else if self.parse_keyword(Keyword::ON) {
17837 let constraint = self.parse_expr()?;
17838 Ok(JoinConstraint::On(constraint))
17839 } else if self.parse_keyword(Keyword::USING) {
17840 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17841 Ok(JoinConstraint::Using(columns))
17842 } else {
17843 Ok(JoinConstraint::None)
17844 }
17846 }
17847
17848 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17850 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17851
17852 self.expect_keyword_is(Keyword::TO)?;
17853 let grantees = self.parse_grantees()?;
17854
17855 let with_grant_option =
17856 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17857
17858 let current_grants =
17859 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17860 Some(CurrentGrantsKind::CopyCurrentGrants)
17861 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17862 Some(CurrentGrantsKind::RevokeCurrentGrants)
17863 } else {
17864 None
17865 };
17866
17867 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17868 Some(self.parse_identifier()?)
17869 } else {
17870 None
17871 };
17872
17873 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17874 Some(self.parse_identifier()?)
17875 } else {
17876 None
17877 };
17878
17879 Ok(Grant {
17880 privileges,
17881 objects,
17882 grantees,
17883 with_grant_option,
17884 as_grantor,
17885 granted_by,
17886 current_grants,
17887 })
17888 }
17889
17890 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17891 let mut values = vec![];
17892 let mut grantee_type = GranteesType::None;
17893 loop {
17894 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17895 GranteesType::Role
17896 } else if self.parse_keyword(Keyword::USER) {
17897 GranteesType::User
17898 } else if self.parse_keyword(Keyword::SHARE) {
17899 GranteesType::Share
17900 } else if self.parse_keyword(Keyword::GROUP) {
17901 GranteesType::Group
17902 } else if self.parse_keyword(Keyword::PUBLIC) {
17903 GranteesType::Public
17904 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17905 GranteesType::DatabaseRole
17906 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17907 GranteesType::ApplicationRole
17908 } else if self.parse_keyword(Keyword::APPLICATION) {
17909 GranteesType::Application
17910 } else {
17911 grantee_type.clone() };
17913
17914 if self
17915 .dialect
17916 .get_reserved_grantees_types()
17917 .contains(&new_grantee_type)
17918 {
17919 self.prev_token();
17920 } else {
17921 grantee_type = new_grantee_type;
17922 }
17923
17924 let grantee = if grantee_type == GranteesType::Public {
17925 Grantee {
17926 grantee_type: grantee_type.clone(),
17927 name: None,
17928 }
17929 } else {
17930 let mut name = self.parse_grantee_name()?;
17931 if self.consume_token(&Token::Colon) {
17932 let ident = self.parse_identifier()?;
17936 if let GranteeName::ObjectName(namespace) = name {
17937 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17938 format!("{namespace}:{ident}"),
17939 )]));
17940 };
17941 }
17942 Grantee {
17943 grantee_type: grantee_type.clone(),
17944 name: Some(name),
17945 }
17946 };
17947
17948 values.push(grantee);
17949
17950 if !self.consume_token(&Token::Comma) {
17951 break;
17952 }
17953 }
17954
17955 Ok(values)
17956 }
17957
17958 pub fn parse_grant_deny_revoke_privileges_objects(
17960 &mut self,
17961 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17962 let privileges = if self.parse_keyword(Keyword::ALL) {
17963 Privileges::All {
17964 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17965 }
17966 } else {
17967 let actions = self.parse_actions_list()?;
17968 Privileges::Actions(actions)
17969 };
17970
17971 let objects = if self.parse_keyword(Keyword::ON) {
17972 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17973 Some(GrantObjects::AllTablesInSchema {
17974 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17975 })
17976 } else if self.parse_keywords(&[
17977 Keyword::ALL,
17978 Keyword::EXTERNAL,
17979 Keyword::TABLES,
17980 Keyword::IN,
17981 Keyword::SCHEMA,
17982 ]) {
17983 Some(GrantObjects::AllExternalTablesInSchema {
17984 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17985 })
17986 } else if self.parse_keywords(&[
17987 Keyword::ALL,
17988 Keyword::VIEWS,
17989 Keyword::IN,
17990 Keyword::SCHEMA,
17991 ]) {
17992 Some(GrantObjects::AllViewsInSchema {
17993 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17994 })
17995 } else if self.parse_keywords(&[
17996 Keyword::ALL,
17997 Keyword::MATERIALIZED,
17998 Keyword::VIEWS,
17999 Keyword::IN,
18000 Keyword::SCHEMA,
18001 ]) {
18002 Some(GrantObjects::AllMaterializedViewsInSchema {
18003 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18004 })
18005 } else if self.parse_keywords(&[
18006 Keyword::ALL,
18007 Keyword::FUNCTIONS,
18008 Keyword::IN,
18009 Keyword::SCHEMA,
18010 ]) {
18011 Some(GrantObjects::AllFunctionsInSchema {
18012 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18013 })
18014 } else if self.parse_keywords(&[
18015 Keyword::FUTURE,
18016 Keyword::SCHEMAS,
18017 Keyword::IN,
18018 Keyword::DATABASE,
18019 ]) {
18020 Some(GrantObjects::FutureSchemasInDatabase {
18021 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18022 })
18023 } else if self.parse_keywords(&[
18024 Keyword::FUTURE,
18025 Keyword::TABLES,
18026 Keyword::IN,
18027 Keyword::SCHEMA,
18028 ]) {
18029 Some(GrantObjects::FutureTablesInSchema {
18030 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18031 })
18032 } else if self.parse_keywords(&[
18033 Keyword::FUTURE,
18034 Keyword::EXTERNAL,
18035 Keyword::TABLES,
18036 Keyword::IN,
18037 Keyword::SCHEMA,
18038 ]) {
18039 Some(GrantObjects::FutureExternalTablesInSchema {
18040 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18041 })
18042 } else if self.parse_keywords(&[
18043 Keyword::FUTURE,
18044 Keyword::VIEWS,
18045 Keyword::IN,
18046 Keyword::SCHEMA,
18047 ]) {
18048 Some(GrantObjects::FutureViewsInSchema {
18049 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18050 })
18051 } else if self.parse_keywords(&[
18052 Keyword::FUTURE,
18053 Keyword::MATERIALIZED,
18054 Keyword::VIEWS,
18055 Keyword::IN,
18056 Keyword::SCHEMA,
18057 ]) {
18058 Some(GrantObjects::FutureMaterializedViewsInSchema {
18059 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18060 })
18061 } else if self.parse_keywords(&[
18062 Keyword::ALL,
18063 Keyword::SEQUENCES,
18064 Keyword::IN,
18065 Keyword::SCHEMA,
18066 ]) {
18067 Some(GrantObjects::AllSequencesInSchema {
18068 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18069 })
18070 } else if self.parse_keywords(&[
18071 Keyword::FUTURE,
18072 Keyword::SEQUENCES,
18073 Keyword::IN,
18074 Keyword::SCHEMA,
18075 ]) {
18076 Some(GrantObjects::FutureSequencesInSchema {
18077 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18078 })
18079 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
18080 Some(GrantObjects::ResourceMonitors(
18081 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18082 ))
18083 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18084 Some(GrantObjects::ComputePools(
18085 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18086 ))
18087 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18088 Some(GrantObjects::FailoverGroup(
18089 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18090 ))
18091 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18092 Some(GrantObjects::ReplicationGroup(
18093 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18094 ))
18095 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18096 Some(GrantObjects::ExternalVolumes(
18097 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18098 ))
18099 } else {
18100 let object_type = self.parse_one_of_keywords(&[
18101 Keyword::SEQUENCE,
18102 Keyword::DATABASE,
18103 Keyword::SCHEMA,
18104 Keyword::TABLE,
18105 Keyword::VIEW,
18106 Keyword::WAREHOUSE,
18107 Keyword::INTEGRATION,
18108 Keyword::VIEW,
18109 Keyword::WAREHOUSE,
18110 Keyword::INTEGRATION,
18111 Keyword::USER,
18112 Keyword::CONNECTION,
18113 Keyword::PROCEDURE,
18114 Keyword::FUNCTION,
18115 Keyword::TYPE,
18116 Keyword::DOMAIN,
18117 ]);
18118 let objects =
18119 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
18120 match object_type {
18121 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
18122 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
18123 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
18124 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
18125 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
18126 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
18127 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
18128 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
18129 Some(Keyword::TYPE) => Some(GrantObjects::Types(objects?)),
18130 Some(Keyword::DOMAIN) => Some(GrantObjects::Domains(objects?)),
18131 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
18132 if let Some(name) = objects?.first() {
18133 self.parse_grant_procedure_or_function(name, &kw)?
18134 } else {
18135 self.expected_ref("procedure or function name", self.peek_token_ref())?
18136 }
18137 }
18138 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
18139 Some(unexpected_keyword) => return Err(ParserError::ParserError(
18140 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
18141 )),
18142 }
18143 }
18144 } else {
18145 None
18146 };
18147
18148 Ok((privileges, objects))
18149 }
18150
18151 fn parse_grant_procedure_or_function(
18152 &mut self,
18153 name: &ObjectName,
18154 kw: &Option<Keyword>,
18155 ) -> Result<Option<GrantObjects>, ParserError> {
18156 let arg_types = if self.consume_token(&Token::LParen) {
18157 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
18158 self.expect_token(&Token::RParen)?;
18159 list
18160 } else {
18161 vec![]
18162 };
18163 match kw {
18164 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
18165 name: name.clone(),
18166 arg_types,
18167 })),
18168 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
18169 name: name.clone(),
18170 arg_types,
18171 })),
18172 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
18173 }
18174 }
18175
18176 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
18178 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
18179 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
18180 if columns.is_empty() {
18181 Ok(None)
18182 } else {
18183 Ok(Some(columns))
18184 }
18185 }
18186
18187 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
18189 Ok(Action::ImportedPrivileges)
18190 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
18191 Ok(Action::AddSearchOptimization)
18192 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
18193 Ok(Action::AttachListing)
18194 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
18195 Ok(Action::AttachPolicy)
18196 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
18197 Ok(Action::BindServiceEndpoint)
18198 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
18199 let role = self.parse_object_name(false)?;
18200 Ok(Action::DatabaseRole { role })
18201 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
18202 Ok(Action::EvolveSchema)
18203 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
18204 Ok(Action::ImportShare)
18205 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
18206 Ok(Action::ManageVersions)
18207 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
18208 Ok(Action::ManageReleases)
18209 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
18210 Ok(Action::OverrideShareRestrictions)
18211 } else if self.parse_keywords(&[
18212 Keyword::PURCHASE,
18213 Keyword::DATA,
18214 Keyword::EXCHANGE,
18215 Keyword::LISTING,
18216 ]) {
18217 Ok(Action::PurchaseDataExchangeListing)
18218 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
18219 Ok(Action::ResolveAll)
18220 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
18221 Ok(Action::ReadSession)
18222
18223 } else if self.parse_keyword(Keyword::APPLY) {
18225 let apply_type = self.parse_action_apply_type()?;
18226 Ok(Action::Apply { apply_type })
18227 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
18228 Ok(Action::ApplyBudget)
18229 } else if self.parse_keyword(Keyword::AUDIT) {
18230 Ok(Action::Audit)
18231 } else if self.parse_keyword(Keyword::CONNECT) {
18232 Ok(Action::Connect)
18233 } else if self.parse_keyword(Keyword::CREATE) {
18234 let obj_type = self.maybe_parse_action_create_object_type();
18235 Ok(Action::Create { obj_type })
18236 } else if self.parse_keyword(Keyword::DELETE) {
18237 Ok(Action::Delete)
18238 } else if self.parse_keyword(Keyword::EXEC) {
18239 let obj_type = self.maybe_parse_action_execute_obj_type();
18240 Ok(Action::Exec { obj_type })
18241 } else if self.parse_keyword(Keyword::EXECUTE) {
18242 let obj_type = self.maybe_parse_action_execute_obj_type();
18243 Ok(Action::Execute { obj_type })
18244 } else if self.parse_keyword(Keyword::FAILOVER) {
18245 Ok(Action::Failover)
18246 } else if self.parse_keyword(Keyword::INSERT) {
18247 Ok(Action::Insert {
18248 columns: parse_columns(self)?,
18249 })
18250 } else if self.parse_keyword(Keyword::MANAGE) {
18251 let manage_type = self.parse_action_manage_type()?;
18252 Ok(Action::Manage { manage_type })
18253 } else if self.parse_keyword(Keyword::MODIFY) {
18254 let modify_type = self.parse_action_modify_type();
18255 Ok(Action::Modify { modify_type })
18256 } else if self.parse_keyword(Keyword::MONITOR) {
18257 let monitor_type = self.parse_action_monitor_type();
18258 Ok(Action::Monitor { monitor_type })
18259 } else if self.parse_keyword(Keyword::OPERATE) {
18260 Ok(Action::Operate)
18261 } else if self.parse_keyword(Keyword::REFERENCES) {
18262 Ok(Action::References {
18263 columns: parse_columns(self)?,
18264 })
18265 } else if self.parse_keyword(Keyword::READ) {
18266 Ok(Action::Read)
18267 } else if self.parse_keyword(Keyword::REPLICATE) {
18268 Ok(Action::Replicate)
18269 } else if self.parse_keyword(Keyword::ROLE) {
18270 let role = self.parse_object_name(false)?;
18271 Ok(Action::Role { role })
18272 } else if self.parse_keyword(Keyword::SELECT) {
18273 Ok(Action::Select {
18274 columns: parse_columns(self)?,
18275 })
18276 } else if self.parse_keyword(Keyword::TEMPORARY) {
18277 Ok(Action::Temporary)
18278 } else if self.parse_keyword(Keyword::TRIGGER) {
18279 Ok(Action::Trigger)
18280 } else if self.parse_keyword(Keyword::TRUNCATE) {
18281 Ok(Action::Truncate)
18282 } else if self.parse_keyword(Keyword::UPDATE) {
18283 Ok(Action::Update {
18284 columns: parse_columns(self)?,
18285 })
18286 } else if self.parse_keyword(Keyword::USAGE) {
18287 Ok(Action::Usage)
18288 } else if self.parse_keyword(Keyword::OWNERSHIP) {
18289 Ok(Action::Ownership)
18290 } else if self.parse_keyword(Keyword::DROP) {
18291 Ok(Action::Drop)
18292 } else {
18293 self.expected_ref("a privilege keyword", self.peek_token_ref())?
18294 }
18295 }
18296
18297 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
18298 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
18300 Some(ActionCreateObjectType::ApplicationPackage)
18301 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18302 Some(ActionCreateObjectType::ComputePool)
18303 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
18304 Some(ActionCreateObjectType::DataExchangeListing)
18305 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18306 Some(ActionCreateObjectType::ExternalVolume)
18307 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18308 Some(ActionCreateObjectType::FailoverGroup)
18309 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
18310 Some(ActionCreateObjectType::NetworkPolicy)
18311 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
18312 Some(ActionCreateObjectType::OrganiationListing)
18313 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18314 Some(ActionCreateObjectType::ReplicationGroup)
18315 }
18316 else if self.parse_keyword(Keyword::ACCOUNT) {
18318 Some(ActionCreateObjectType::Account)
18319 } else if self.parse_keyword(Keyword::APPLICATION) {
18320 Some(ActionCreateObjectType::Application)
18321 } else if self.parse_keyword(Keyword::DATABASE) {
18322 Some(ActionCreateObjectType::Database)
18323 } else if self.parse_keyword(Keyword::INTEGRATION) {
18324 Some(ActionCreateObjectType::Integration)
18325 } else if self.parse_keyword(Keyword::ROLE) {
18326 Some(ActionCreateObjectType::Role)
18327 } else if self.parse_keyword(Keyword::SCHEMA) {
18328 Some(ActionCreateObjectType::Schema)
18329 } else if self.parse_keyword(Keyword::SHARE) {
18330 Some(ActionCreateObjectType::Share)
18331 } else if self.parse_keyword(Keyword::USER) {
18332 Some(ActionCreateObjectType::User)
18333 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18334 Some(ActionCreateObjectType::Warehouse)
18335 } else {
18336 None
18337 }
18338 }
18339
18340 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18341 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18342 Ok(ActionApplyType::AggregationPolicy)
18343 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18344 Ok(ActionApplyType::AuthenticationPolicy)
18345 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18346 Ok(ActionApplyType::JoinPolicy)
18347 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18348 Ok(ActionApplyType::MaskingPolicy)
18349 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18350 Ok(ActionApplyType::PackagesPolicy)
18351 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18352 Ok(ActionApplyType::PasswordPolicy)
18353 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18354 Ok(ActionApplyType::ProjectionPolicy)
18355 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18356 Ok(ActionApplyType::RowAccessPolicy)
18357 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18358 Ok(ActionApplyType::SessionPolicy)
18359 } else if self.parse_keyword(Keyword::TAG) {
18360 Ok(ActionApplyType::Tag)
18361 } else {
18362 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18363 }
18364 }
18365
18366 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18367 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18368 Some(ActionExecuteObjectType::DataMetricFunction)
18369 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18370 Some(ActionExecuteObjectType::ManagedAlert)
18371 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18372 Some(ActionExecuteObjectType::ManagedTask)
18373 } else if self.parse_keyword(Keyword::ALERT) {
18374 Some(ActionExecuteObjectType::Alert)
18375 } else if self.parse_keyword(Keyword::TASK) {
18376 Some(ActionExecuteObjectType::Task)
18377 } else {
18378 None
18379 }
18380 }
18381
18382 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18383 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18384 Ok(ActionManageType::AccountSupportCases)
18385 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18386 Ok(ActionManageType::EventSharing)
18387 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18388 Ok(ActionManageType::ListingAutoFulfillment)
18389 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18390 Ok(ActionManageType::OrganizationSupportCases)
18391 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18392 Ok(ActionManageType::UserSupportCases)
18393 } else if self.parse_keyword(Keyword::GRANTS) {
18394 Ok(ActionManageType::Grants)
18395 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18396 Ok(ActionManageType::Warehouses)
18397 } else {
18398 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18399 }
18400 }
18401
18402 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18403 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18404 Some(ActionModifyType::LogLevel)
18405 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18406 Some(ActionModifyType::TraceLevel)
18407 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18408 Some(ActionModifyType::SessionLogLevel)
18409 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18410 Some(ActionModifyType::SessionTraceLevel)
18411 } else {
18412 None
18413 }
18414 }
18415
18416 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18417 if self.parse_keyword(Keyword::EXECUTION) {
18418 Some(ActionMonitorType::Execution)
18419 } else if self.parse_keyword(Keyword::SECURITY) {
18420 Some(ActionMonitorType::Security)
18421 } else if self.parse_keyword(Keyword::USAGE) {
18422 Some(ActionMonitorType::Usage)
18423 } else {
18424 None
18425 }
18426 }
18427
18428 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18430 let mut name = self.parse_object_name(false)?;
18431 if self.dialect.supports_user_host_grantee()
18432 && name.0.len() == 1
18433 && name.0[0].as_ident().is_some()
18434 && self.consume_token(&Token::AtSign)
18435 {
18436 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18437 let host = self.parse_identifier()?;
18438 Ok(GranteeName::UserHost { user, host })
18439 } else {
18440 Ok(GranteeName::ObjectName(name))
18441 }
18442 }
18443
18444 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18446 self.expect_keyword(Keyword::DENY)?;
18447
18448 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18449 let objects = match objects {
18450 Some(o) => o,
18451 None => {
18452 return parser_err!(
18453 "DENY statements must specify an object",
18454 self.peek_token_ref().span.start
18455 )
18456 }
18457 };
18458
18459 self.expect_keyword_is(Keyword::TO)?;
18460 let grantees = self.parse_grantees()?;
18461 let cascade = self.parse_cascade_option();
18462 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18463 Some(self.parse_identifier()?)
18464 } else {
18465 None
18466 };
18467
18468 Ok(Statement::Deny(DenyStatement {
18469 privileges,
18470 objects,
18471 grantees,
18472 cascade,
18473 granted_by,
18474 }))
18475 }
18476
18477 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18479 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18480
18481 self.expect_keyword_is(Keyword::FROM)?;
18482 let grantees = self.parse_grantees()?;
18483
18484 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18485 Some(self.parse_identifier()?)
18486 } else {
18487 None
18488 };
18489
18490 let cascade = self.parse_cascade_option();
18491
18492 Ok(Revoke {
18493 privileges,
18494 objects,
18495 grantees,
18496 granted_by,
18497 cascade,
18498 })
18499 }
18500
18501 pub fn parse_replace(
18503 &mut self,
18504 replace_token: TokenWithSpan,
18505 ) -> Result<Statement, ParserError> {
18506 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18507 return parser_err!(
18508 "Unsupported statement REPLACE",
18509 self.peek_token_ref().span.start
18510 );
18511 }
18512
18513 let mut insert = self.parse_insert(replace_token)?;
18514 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18515 *replace_into = true;
18516 }
18517
18518 Ok(insert)
18519 }
18520
18521 fn parse_insert_setexpr_boxed(
18525 &mut self,
18526 insert_token: TokenWithSpan,
18527 ) -> Result<Box<SetExpr>, ParserError> {
18528 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18529 }
18530
18531 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18533 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18534 let or = self.parse_conflict_clause();
18535 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18536 None
18537 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18538 Some(MysqlInsertPriority::LowPriority)
18539 } else if self.parse_keyword(Keyword::DELAYED) {
18540 Some(MysqlInsertPriority::Delayed)
18541 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18542 Some(MysqlInsertPriority::HighPriority)
18543 } else {
18544 None
18545 };
18546
18547 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18548 && self.parse_keyword(Keyword::IGNORE);
18549
18550 let replace_into = false;
18551
18552 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18553 let into = self.parse_keyword(Keyword::INTO);
18554
18555 let local = self.parse_keyword(Keyword::LOCAL);
18556
18557 if self.parse_keyword(Keyword::DIRECTORY) {
18558 let path = self.parse_literal_string()?;
18559 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18560 Some(self.parse_file_format()?)
18561 } else {
18562 None
18563 };
18564 let source = self.parse_query()?;
18565 Ok(Statement::Directory {
18566 local,
18567 path,
18568 overwrite,
18569 file_format,
18570 source,
18571 })
18572 } else {
18573 let table = self.parse_keyword(Keyword::TABLE);
18575 let table_object = self.parse_table_object()?;
18576
18577 let table_alias = if self.dialect.supports_insert_table_alias()
18578 && !self.peek_sub_query()
18579 && self
18580 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18581 .is_none()
18582 {
18583 if self.parse_keyword(Keyword::AS) {
18584 Some(TableAliasWithoutColumns {
18585 explicit: true,
18586 alias: self.parse_identifier()?,
18587 })
18588 } else {
18589 self.maybe_parse(|parser| parser.parse_identifier())?
18590 .map(|alias| TableAliasWithoutColumns {
18591 explicit: false,
18592 alias,
18593 })
18594 }
18595 } else {
18596 None
18597 };
18598
18599 let is_mysql = dialect_of!(self is MySqlDialect);
18600
18601 let (columns, partitioned, after_columns, output, source, assignments) = if self
18602 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18603 {
18604 (vec![], None, vec![], None, None, vec![])
18605 } else {
18606 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18607 let columns =
18608 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18609
18610 let partitioned = self.parse_insert_partition()?;
18611 let after_columns = if dialect_of!(self is HiveDialect) {
18613 self.parse_parenthesized_column_list(Optional, false)?
18614 } else {
18615 vec![]
18616 };
18617 (columns, partitioned, after_columns)
18618 } else {
18619 Default::default()
18620 };
18621
18622 let output = self.maybe_parse_output_clause()?;
18623
18624 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18625 || self.peek_keyword(Keyword::SETTINGS)
18626 {
18627 (None, vec![])
18628 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18629 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18630 } else {
18631 (Some(self.parse_query()?), vec![])
18632 };
18633
18634 (
18635 columns,
18636 partitioned,
18637 after_columns,
18638 output,
18639 source,
18640 assignments,
18641 )
18642 };
18643
18644 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18645 let settings = self.parse_settings()?;
18648
18649 let format = if self.parse_keyword(Keyword::FORMAT) {
18650 Some(self.parse_input_format_clause()?)
18651 } else {
18652 None
18653 };
18654
18655 (format, settings)
18656 } else {
18657 Default::default()
18658 };
18659
18660 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18661 && self.parse_keyword(Keyword::AS)
18662 {
18663 let row_alias = self.parse_object_name(false)?;
18664 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18665 Some(InsertAliases {
18666 row_alias,
18667 col_aliases,
18668 })
18669 } else {
18670 None
18671 };
18672
18673 let on = if self.parse_keyword(Keyword::ON) {
18674 if self.parse_keyword(Keyword::CONFLICT) {
18675 let conflict_target =
18676 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18677 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18678 } else if self.peek_token_ref().token == Token::LParen {
18679 Some(ConflictTarget::Columns(
18680 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18681 ))
18682 } else {
18683 None
18684 };
18685
18686 self.expect_keyword_is(Keyword::DO)?;
18687 let action = if self.parse_keyword(Keyword::NOTHING) {
18688 OnConflictAction::DoNothing
18689 } else {
18690 self.expect_keyword_is(Keyword::UPDATE)?;
18691 self.expect_keyword_is(Keyword::SET)?;
18692 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18693 let selection = if self.parse_keyword(Keyword::WHERE) {
18694 Some(self.parse_expr()?)
18695 } else {
18696 None
18697 };
18698 OnConflictAction::DoUpdate(DoUpdate {
18699 assignments,
18700 selection,
18701 })
18702 };
18703
18704 Some(OnInsert::OnConflict(OnConflict {
18705 conflict_target,
18706 action,
18707 }))
18708 } else {
18709 self.expect_keyword_is(Keyword::DUPLICATE)?;
18710 self.expect_keyword_is(Keyword::KEY)?;
18711 self.expect_keyword_is(Keyword::UPDATE)?;
18712 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18713
18714 Some(OnInsert::DuplicateKeyUpdate(l))
18715 }
18716 } else {
18717 None
18718 };
18719
18720 let returning = if self.parse_keyword(Keyword::RETURNING) {
18721 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18722 } else {
18723 None
18724 };
18725
18726 Ok(Insert {
18727 insert_token: insert_token.into(),
18728 optimizer_hints,
18729 or,
18730 table: table_object,
18731 table_alias,
18732 ignore,
18733 into,
18734 overwrite,
18735 partitioned,
18736 columns,
18737 after_columns,
18738 source,
18739 assignments,
18740 has_table_keyword: table,
18741 on,
18742 returning,
18743 output,
18744 replace_into,
18745 priority,
18746 insert_alias,
18747 settings,
18748 format_clause,
18749 multi_table_insert_type: None,
18750 multi_table_into_clauses: vec![],
18751 multi_table_when_clauses: vec![],
18752 multi_table_else_clause: None,
18753 }
18754 .into())
18755 }
18756 }
18757
18758 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18762 let ident = self.parse_identifier()?;
18763 let values = self
18764 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18765 .unwrap_or_default();
18766
18767 Ok(InputFormatClause { ident, values })
18768 }
18769
18770 fn peek_subquery_start(&mut self) -> bool {
18773 matches!(
18774 self.peek_tokens_ref(),
18775 [
18776 TokenWithSpan {
18777 token: Token::LParen,
18778 ..
18779 },
18780 TokenWithSpan {
18781 token: Token::Word(Word {
18782 keyword: Keyword::SELECT,
18783 ..
18784 }),
18785 ..
18786 },
18787 ]
18788 )
18789 }
18790
18791 fn peek_subquery_or_cte_start(&mut self) -> bool {
18795 matches!(
18796 self.peek_tokens_ref(),
18797 [
18798 TokenWithSpan {
18799 token: Token::LParen,
18800 ..
18801 },
18802 TokenWithSpan {
18803 token: Token::Word(Word {
18804 keyword: Keyword::SELECT | Keyword::WITH,
18805 ..
18806 }),
18807 ..
18808 },
18809 ]
18810 )
18811 }
18812
18813 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18814 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18815 Some(SqliteOnConflict::Replace)
18816 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18817 Some(SqliteOnConflict::Rollback)
18818 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18819 Some(SqliteOnConflict::Abort)
18820 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18821 Some(SqliteOnConflict::Fail)
18822 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18823 Some(SqliteOnConflict::Ignore)
18824 } else if self.parse_keyword(Keyword::REPLACE) {
18825 Some(SqliteOnConflict::Replace)
18826 } else {
18827 None
18828 }
18829 }
18830
18831 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18833 if self.parse_keyword(Keyword::PARTITION) {
18834 self.expect_token(&Token::LParen)?;
18835 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18836 self.expect_token(&Token::RParen)?;
18837 Ok(partition_cols)
18838 } else {
18839 Ok(None)
18840 }
18841 }
18842
18843 pub fn parse_load_data_table_format(
18845 &mut self,
18846 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18847 if self.parse_keyword(Keyword::INPUTFORMAT) {
18848 let input_format = self.parse_expr()?;
18849 self.expect_keyword_is(Keyword::SERDE)?;
18850 let serde = self.parse_expr()?;
18851 Ok(Some(HiveLoadDataFormat {
18852 input_format,
18853 serde,
18854 }))
18855 } else {
18856 Ok(None)
18857 }
18858 }
18859
18860 fn parse_update_setexpr_boxed(
18864 &mut self,
18865 update_token: TokenWithSpan,
18866 ) -> Result<Box<SetExpr>, ParserError> {
18867 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18868 }
18869
18870 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18872 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18873 let or = self.parse_conflict_clause();
18874 let table = self.parse_table_and_joins()?;
18875 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18876 Some(UpdateTableFromKind::BeforeSet(
18877 self.parse_table_with_joins()?,
18878 ))
18879 } else {
18880 None
18881 };
18882 self.expect_keyword(Keyword::SET)?;
18883 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18884
18885 let output = self.maybe_parse_output_clause()?;
18886
18887 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18888 Some(UpdateTableFromKind::AfterSet(
18889 self.parse_table_with_joins()?,
18890 ))
18891 } else {
18892 from_before_set
18893 };
18894 let selection = if self.parse_keyword(Keyword::WHERE) {
18895 Some(self.parse_expr()?)
18896 } else {
18897 None
18898 };
18899 let returning = if self.parse_keyword(Keyword::RETURNING) {
18900 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18901 } else {
18902 None
18903 };
18904 let order_by = if self.dialect.supports_update_order_by()
18905 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18906 {
18907 self.parse_comma_separated(Parser::parse_order_by_expr)?
18908 } else {
18909 vec![]
18910 };
18911 let limit = if self.parse_keyword(Keyword::LIMIT) {
18912 Some(self.parse_expr()?)
18913 } else {
18914 None
18915 };
18916 Ok(Update {
18917 update_token: update_token.into(),
18918 optimizer_hints,
18919 table,
18920 assignments,
18921 from,
18922 selection,
18923 returning,
18924 output,
18925 or,
18926 order_by,
18927 limit,
18928 }
18929 .into())
18930 }
18931
18932 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18934 let target = self.parse_assignment_target()?;
18935 self.expect_token(&Token::Eq)?;
18936 let value = self.parse_expr()?;
18937 Ok(Assignment { target, value })
18938 }
18939
18940 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18942 if self.consume_token(&Token::LParen) {
18943 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18944 self.expect_token(&Token::RParen)?;
18945 Ok(AssignmentTarget::Tuple(columns))
18946 } else {
18947 let column = self.parse_object_name(false)?;
18948 Ok(AssignmentTarget::ColumnName(column))
18949 }
18950 }
18951
18952 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18954 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18955 self.maybe_parse(|p| {
18956 let name = p.parse_expr()?;
18957 let operator = p.parse_function_named_arg_operator()?;
18958 let arg = p.parse_wildcard_expr()?.into();
18959 Ok(FunctionArg::ExprNamed {
18960 name,
18961 arg,
18962 operator,
18963 })
18964 })?
18965 } else {
18966 self.maybe_parse(|p| {
18967 let name = p.parse_identifier()?;
18968 let operator = p.parse_function_named_arg_operator()?;
18969 let arg = p.parse_wildcard_expr()?.into();
18970 Ok(FunctionArg::Named {
18971 name,
18972 arg,
18973 operator,
18974 })
18975 })?
18976 };
18977 if let Some(arg) = arg {
18978 return Ok(arg);
18979 }
18980 let wildcard_expr = self.parse_wildcard_expr()?;
18981 let arg_expr: FunctionArgExpr = match wildcard_expr {
18982 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18983 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18986 if opts.opt_exclude.is_some()
18987 || opts.opt_except.is_some()
18988 || opts.opt_replace.is_some()
18989 || opts.opt_rename.is_some()
18990 || opts.opt_ilike.is_some()
18991 {
18992 FunctionArgExpr::WildcardWithOptions(opts)
18993 } else {
18994 wildcard_expr.into()
18995 }
18996 }
18997 other => other.into(),
18998 };
18999 Ok(FunctionArg::Unnamed(arg_expr))
19000 }
19001
19002 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
19003 if self.parse_keyword(Keyword::VALUE) {
19004 return Ok(FunctionArgOperator::Value);
19005 }
19006 let tok = self.next_token();
19007 match tok.token {
19008 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
19009 Ok(FunctionArgOperator::RightArrow)
19010 }
19011 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
19012 Ok(FunctionArgOperator::Equals)
19013 }
19014 Token::Assignment
19015 if self
19016 .dialect
19017 .supports_named_fn_args_with_assignment_operator() =>
19018 {
19019 Ok(FunctionArgOperator::Assignment)
19020 }
19021 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
19022 Ok(FunctionArgOperator::Colon)
19023 }
19024 _ => {
19025 self.prev_token();
19026 self.expected("argument operator", tok)
19027 }
19028 }
19029 }
19030
19031 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
19033 if self.consume_token(&Token::RParen) {
19034 Ok(vec![])
19035 } else {
19036 let args = self.parse_comma_separated(Parser::parse_function_args)?;
19037 self.expect_token(&Token::RParen)?;
19038 Ok(args)
19039 }
19040 }
19041
19042 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
19043 if self.consume_token(&Token::RParen) {
19044 return Ok(TableFunctionArgs {
19045 args: vec![],
19046 settings: None,
19047 });
19048 }
19049 let mut args = vec![];
19050 let settings = loop {
19051 if let Some(settings) = self.parse_settings()? {
19052 break Some(settings);
19053 }
19054 args.push(self.parse_function_args()?);
19055 if self.is_parse_comma_separated_end() {
19056 break None;
19057 }
19058 };
19059 self.expect_token(&Token::RParen)?;
19060 Ok(TableFunctionArgs { args, settings })
19061 }
19062
19063 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
19072 let mut clauses = vec![];
19073
19074 if let Some(null_clause) = self.parse_json_null_clause() {
19077 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19078 }
19079
19080 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19081 clauses.push(FunctionArgumentClause::JsonReturningClause(
19082 json_returning_clause,
19083 ));
19084 }
19085
19086 if self.consume_token(&Token::RParen) {
19087 return Ok(FunctionArgumentList {
19088 duplicate_treatment: None,
19089 args: vec![],
19090 clauses,
19091 });
19092 }
19093
19094 let duplicate_treatment = self.parse_duplicate_treatment()?;
19095 let args = self.parse_comma_separated(Parser::parse_function_args)?;
19096
19097 if self.dialect.supports_window_function_null_treatment_arg() {
19098 if let Some(null_treatment) = self.parse_null_treatment()? {
19099 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
19100 }
19101 }
19102
19103 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
19104 clauses.push(FunctionArgumentClause::OrderBy(
19105 self.parse_comma_separated(Parser::parse_order_by_expr)?,
19106 ));
19107 }
19108
19109 if self.parse_keyword(Keyword::LIMIT) {
19110 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
19111 }
19112
19113 if dialect_of!(self is GenericDialect | BigQueryDialect)
19114 && self.parse_keyword(Keyword::HAVING)
19115 {
19116 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
19117 Keyword::MIN => HavingBoundKind::Min,
19118 Keyword::MAX => HavingBoundKind::Max,
19119 unexpected_keyword => return Err(ParserError::ParserError(
19120 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
19121 )),
19122 };
19123 clauses.push(FunctionArgumentClause::Having(HavingBound(
19124 kind,
19125 self.parse_expr()?,
19126 )))
19127 }
19128
19129 if dialect_of!(self is GenericDialect | MySqlDialect)
19130 && self.parse_keyword(Keyword::SEPARATOR)
19131 {
19132 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
19133 }
19134
19135 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
19136 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
19137 }
19138
19139 if let Some(null_clause) = self.parse_json_null_clause() {
19140 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19141 }
19142
19143 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19144 clauses.push(FunctionArgumentClause::JsonReturningClause(
19145 json_returning_clause,
19146 ));
19147 }
19148
19149 self.expect_token(&Token::RParen)?;
19150 Ok(FunctionArgumentList {
19151 duplicate_treatment,
19152 args,
19153 clauses,
19154 })
19155 }
19156
19157 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
19158 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
19159 Some(JsonNullClause::AbsentOnNull)
19160 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
19161 Some(JsonNullClause::NullOnNull)
19162 } else {
19163 None
19164 }
19165 }
19166
19167 fn maybe_parse_json_returning_clause(
19168 &mut self,
19169 ) -> Result<Option<JsonReturningClause>, ParserError> {
19170 if self.parse_keyword(Keyword::RETURNING) {
19171 let data_type = self.parse_data_type()?;
19172 Ok(Some(JsonReturningClause { data_type }))
19173 } else {
19174 Ok(None)
19175 }
19176 }
19177
19178 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
19179 let loc = self.peek_token_ref().span.start;
19180 match (
19181 self.parse_keyword(Keyword::ALL),
19182 self.parse_keyword(Keyword::DISTINCT),
19183 ) {
19184 (true, false) => Ok(Some(DuplicateTreatment::All)),
19185 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
19186 (false, false) => Ok(None),
19187 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
19188 }
19189 }
19190
19191 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
19193 let prefix = self
19194 .parse_one_of_keywords(
19195 self.dialect
19196 .get_reserved_keywords_for_select_item_operator(),
19197 )
19198 .map(|keyword| Ident::new(format!("{keyword:?}")));
19199
19200 match self.parse_wildcard_expr()? {
19201 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
19202 SelectItemQualifiedWildcardKind::ObjectName(prefix),
19203 self.parse_wildcard_additional_options(token.0)?,
19204 )),
19205 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
19206 self.parse_wildcard_additional_options(token.0)?,
19207 )),
19208 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
19209 parser_err!(
19210 format!("Expected an expression, found: {}", v),
19211 self.peek_token_ref().span.start
19212 )
19213 }
19214 Expr::BinaryOp {
19215 left,
19216 op: BinaryOperator::Eq,
19217 right,
19218 } if self.dialect.supports_eq_alias_assignment()
19219 && matches!(left.as_ref(), Expr::Identifier(_)) =>
19220 {
19221 let Expr::Identifier(alias) = *left else {
19222 return parser_err!(
19223 "BUG: expected identifier expression as alias",
19224 self.peek_token_ref().span.start
19225 );
19226 };
19227 Ok(SelectItem::ExprWithAlias {
19228 expr: *right,
19229 alias,
19230 })
19231 }
19232 expr if self.dialect.supports_select_expr_star()
19233 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
19234 {
19235 let wildcard_token = self.get_previous_token().clone();
19236 Ok(SelectItem::QualifiedWildcard(
19237 SelectItemQualifiedWildcardKind::Expr(expr),
19238 self.parse_wildcard_additional_options(wildcard_token)?,
19239 ))
19240 }
19241 expr if self.dialect.supports_select_item_multi_column_alias()
19242 && self.peek_keyword(Keyword::AS)
19243 && self.peek_nth_token(1).token == Token::LParen =>
19244 {
19245 self.expect_keyword(Keyword::AS)?;
19246 self.expect_token(&Token::LParen)?;
19247 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
19248 self.expect_token(&Token::RParen)?;
19249 Ok(SelectItem::ExprWithAliases {
19250 expr: maybe_prefixed_expr(expr, prefix),
19251 aliases,
19252 })
19253 }
19254 expr => self
19255 .maybe_parse_select_item_alias()
19256 .map(|alias| match alias {
19257 Some(alias) => SelectItem::ExprWithAlias {
19258 expr: maybe_prefixed_expr(expr, prefix),
19259 alias,
19260 },
19261 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
19262 }),
19263 }
19264 }
19265
19266 pub fn parse_wildcard_additional_options(
19270 &mut self,
19271 wildcard_token: TokenWithSpan,
19272 ) -> Result<WildcardAdditionalOptions, ParserError> {
19273 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
19274 self.parse_optional_select_item_ilike()?
19275 } else {
19276 None
19277 };
19278 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
19279 {
19280 self.parse_optional_select_item_exclude()?
19281 } else {
19282 None
19283 };
19284 let opt_except = if self.dialect.supports_select_wildcard_except() {
19285 self.parse_optional_select_item_except()?
19286 } else {
19287 None
19288 };
19289 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
19290 self.parse_optional_select_item_replace()?
19291 } else {
19292 None
19293 };
19294 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
19295 self.parse_optional_select_item_rename()?
19296 } else {
19297 None
19298 };
19299
19300 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
19301 self.maybe_parse_select_item_alias()?
19302 } else {
19303 None
19304 };
19305
19306 Ok(WildcardAdditionalOptions {
19307 wildcard_token: wildcard_token.into(),
19308 opt_ilike,
19309 opt_exclude,
19310 opt_except,
19311 opt_rename,
19312 opt_replace,
19313 opt_alias,
19314 })
19315 }
19316
19317 pub fn parse_optional_select_item_ilike(
19321 &mut self,
19322 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19323 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19324 let next_token = self.next_token();
19325 let pattern = match next_token.token {
19326 Token::SingleQuotedString(s) => s,
19327 _ => return self.expected("ilike pattern", next_token),
19328 };
19329 Some(IlikeSelectItem { pattern })
19330 } else {
19331 None
19332 };
19333 Ok(opt_ilike)
19334 }
19335
19336 pub fn parse_optional_select_item_exclude(
19340 &mut self,
19341 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19342 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19343 if self.consume_token(&Token::LParen) {
19344 let columns =
19345 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19346 self.expect_token(&Token::RParen)?;
19347 Some(ExcludeSelectItem::Multiple(columns))
19348 } else {
19349 let column = self.parse_object_name(false)?;
19350 Some(ExcludeSelectItem::Single(column))
19351 }
19352 } else {
19353 None
19354 };
19355
19356 Ok(opt_exclude)
19357 }
19358
19359 pub fn parse_optional_select_item_except(
19363 &mut self,
19364 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19365 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19366 if self.peek_token_ref().token == Token::LParen {
19367 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19368 match &idents[..] {
19369 [] => {
19370 return self.expected_ref(
19371 "at least one column should be parsed by the expect clause",
19372 self.peek_token_ref(),
19373 )?;
19374 }
19375 [first, idents @ ..] => Some(ExceptSelectItem {
19376 first_element: first.clone(),
19377 additional_elements: idents.to_vec(),
19378 }),
19379 }
19380 } else {
19381 let ident = self.parse_identifier()?;
19383 Some(ExceptSelectItem {
19384 first_element: ident,
19385 additional_elements: vec![],
19386 })
19387 }
19388 } else {
19389 None
19390 };
19391
19392 Ok(opt_except)
19393 }
19394
19395 pub fn parse_optional_select_item_rename(
19397 &mut self,
19398 ) -> Result<Option<RenameSelectItem>, ParserError> {
19399 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19400 if self.consume_token(&Token::LParen) {
19401 let idents =
19402 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19403 self.expect_token(&Token::RParen)?;
19404 Some(RenameSelectItem::Multiple(idents))
19405 } else {
19406 let ident = self.parse_identifier_with_alias()?;
19407 Some(RenameSelectItem::Single(ident))
19408 }
19409 } else {
19410 None
19411 };
19412
19413 Ok(opt_rename)
19414 }
19415
19416 pub fn parse_optional_select_item_replace(
19418 &mut self,
19419 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19420 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19421 if self.consume_token(&Token::LParen) {
19422 let items = self.parse_comma_separated(|parser| {
19423 Ok(Box::new(parser.parse_replace_elements()?))
19424 })?;
19425 self.expect_token(&Token::RParen)?;
19426 Some(ReplaceSelectItem { items })
19427 } else {
19428 let tok = self.next_token();
19429 return self.expected("( after REPLACE but", tok);
19430 }
19431 } else {
19432 None
19433 };
19434
19435 Ok(opt_replace)
19436 }
19437 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19439 let expr = self.parse_expr()?;
19440 let as_keyword = self.parse_keyword(Keyword::AS);
19441 let ident = self.parse_identifier()?;
19442 Ok(ReplaceSelectElement {
19443 expr,
19444 column_name: ident,
19445 as_keyword,
19446 })
19447 }
19448
19449 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19452 if self.parse_keyword(Keyword::ASC) {
19453 Some(true)
19454 } else if self.parse_keyword(Keyword::DESC) {
19455 Some(false)
19456 } else {
19457 None
19458 }
19459 }
19460
19461 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19463 self.parse_order_by_expr_inner(false)
19464 .map(|(order_by, _)| order_by)
19465 }
19466
19467 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19469 self.parse_order_by_expr_inner(true)
19470 .map(|(column, operator_class)| IndexColumn {
19471 column,
19472 operator_class,
19473 })
19474 }
19475
19476 fn parse_order_by_expr_inner(
19477 &mut self,
19478 with_operator_class: bool,
19479 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19480 let expr = self.parse_expr()?;
19481
19482 let operator_class: Option<ObjectName> = if with_operator_class {
19483 if self
19486 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19487 .is_some()
19488 {
19489 None
19490 } else {
19491 self.maybe_parse(|parser| parser.parse_object_name(false))?
19492 }
19493 } else {
19494 None
19495 };
19496
19497 let options = self.parse_order_by_options()?;
19498
19499 let with_fill = if self.dialect.supports_with_fill()
19500 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19501 {
19502 Some(self.parse_with_fill()?)
19503 } else {
19504 None
19505 };
19506
19507 Ok((
19508 OrderByExpr {
19509 expr,
19510 options,
19511 with_fill,
19512 },
19513 operator_class,
19514 ))
19515 }
19516
19517 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19518 let asc = self.parse_asc_desc();
19519
19520 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19521 Some(true)
19522 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19523 Some(false)
19524 } else {
19525 None
19526 };
19527
19528 Ok(OrderByOptions { asc, nulls_first })
19529 }
19530
19531 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19535 let from = if self.parse_keyword(Keyword::FROM) {
19536 Some(self.parse_expr()?)
19537 } else {
19538 None
19539 };
19540
19541 let to = if self.parse_keyword(Keyword::TO) {
19542 Some(self.parse_expr()?)
19543 } else {
19544 None
19545 };
19546
19547 let step = if self.parse_keyword(Keyword::STEP) {
19548 Some(self.parse_expr()?)
19549 } else {
19550 None
19551 };
19552
19553 Ok(WithFill { from, to, step })
19554 }
19555
19556 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19559 if !self.parse_keyword(Keyword::INTERPOLATE) {
19560 return Ok(None);
19561 }
19562
19563 if self.consume_token(&Token::LParen) {
19564 let interpolations =
19565 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19566 self.expect_token(&Token::RParen)?;
19567 return Ok(Some(Interpolate {
19569 exprs: Some(interpolations),
19570 }));
19571 }
19572
19573 Ok(Some(Interpolate { exprs: None }))
19575 }
19576
19577 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19579 let column = self.parse_identifier()?;
19580 let expr = if self.parse_keyword(Keyword::AS) {
19581 Some(self.parse_expr()?)
19582 } else {
19583 None
19584 };
19585 Ok(InterpolateExpr { column, expr })
19586 }
19587
19588 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19591 let quantity = if self.consume_token(&Token::LParen) {
19592 let quantity = self.parse_expr()?;
19593 self.expect_token(&Token::RParen)?;
19594 Some(TopQuantity::Expr(quantity))
19595 } else {
19596 let next_token = self.next_token();
19597 let quantity = match next_token.token {
19598 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19599 _ => self.expected("literal int", next_token)?,
19600 };
19601 Some(TopQuantity::Constant(quantity))
19602 };
19603
19604 let percent = self.parse_keyword(Keyword::PERCENT);
19605
19606 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19607
19608 Ok(Top {
19609 with_ties,
19610 percent,
19611 quantity,
19612 })
19613 }
19614
19615 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19617 if self.parse_keyword(Keyword::ALL) {
19618 Ok(None)
19619 } else {
19620 Ok(Some(self.parse_expr()?))
19621 }
19622 }
19623
19624 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19626 let value = self.parse_expr()?;
19627 let rows = if self.parse_keyword(Keyword::ROW) {
19628 OffsetRows::Row
19629 } else if self.parse_keyword(Keyword::ROWS) {
19630 OffsetRows::Rows
19631 } else {
19632 OffsetRows::None
19633 };
19634 Ok(Offset { value, rows })
19635 }
19636
19637 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19639 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19640
19641 let (quantity, percent) = if self
19642 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19643 .is_some()
19644 {
19645 (None, false)
19646 } else {
19647 let quantity = Expr::Value(self.parse_value()?);
19648 let percent = self.parse_keyword(Keyword::PERCENT);
19649 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19650 (Some(quantity), percent)
19651 };
19652
19653 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19654 false
19655 } else {
19656 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19657 };
19658
19659 Ok(Fetch {
19660 with_ties,
19661 percent,
19662 quantity,
19663 })
19664 }
19665
19666 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19668 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19669 Keyword::UPDATE => LockType::Update,
19670 Keyword::SHARE => LockType::Share,
19671 unexpected_keyword => return Err(ParserError::ParserError(
19672 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19673 )),
19674 };
19675 let of = if self.parse_keyword(Keyword::OF) {
19676 Some(self.parse_object_name(false)?)
19677 } else {
19678 None
19679 };
19680 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19681 Some(NonBlock::Nowait)
19682 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19683 Some(NonBlock::SkipLocked)
19684 } else {
19685 None
19686 };
19687 Ok(LockClause {
19688 lock_type,
19689 of,
19690 nonblock,
19691 })
19692 }
19693
19694 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19696 self.expect_keyword(Keyword::LOCK)?;
19697
19698 if self.peek_keyword(Keyword::TABLES) {
19699 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19700 }
19701
19702 let _ = self.parse_keyword(Keyword::TABLE);
19703 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19704 let lock_mode = if self.parse_keyword(Keyword::IN) {
19705 let lock_mode = self.parse_lock_table_mode()?;
19706 self.expect_keyword(Keyword::MODE)?;
19707 Some(lock_mode)
19708 } else {
19709 None
19710 };
19711 let nowait = self.parse_keyword(Keyword::NOWAIT);
19712
19713 Ok(Lock {
19714 tables,
19715 lock_mode,
19716 nowait,
19717 })
19718 }
19719
19720 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19721 let only = self.parse_keyword(Keyword::ONLY);
19722 let name = self.parse_object_name(false)?;
19723 let has_asterisk = self.consume_token(&Token::Mul);
19724
19725 Ok(LockTableTarget {
19726 name,
19727 only,
19728 has_asterisk,
19729 })
19730 }
19731
19732 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19733 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19734 Ok(LockTableMode::AccessShare)
19735 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19736 Ok(LockTableMode::AccessExclusive)
19737 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19738 Ok(LockTableMode::RowShare)
19739 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19740 Ok(LockTableMode::RowExclusive)
19741 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19742 Ok(LockTableMode::ShareUpdateExclusive)
19743 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19744 Ok(LockTableMode::ShareRowExclusive)
19745 } else if self.parse_keyword(Keyword::SHARE) {
19746 Ok(LockTableMode::Share)
19747 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19748 Ok(LockTableMode::Exclusive)
19749 } else {
19750 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19751 }
19752 }
19753
19754 pub fn parse_values(
19756 &mut self,
19757 allow_empty: bool,
19758 value_keyword: bool,
19759 ) -> Result<Values, ParserError> {
19760 let mut explicit_row = false;
19761
19762 let rows = self.parse_comma_separated(|parser| {
19763 if parser.parse_keyword(Keyword::ROW) {
19764 explicit_row = true;
19765 }
19766
19767 parser.expect_token(&Token::LParen)?;
19768 if allow_empty && parser.peek_token().token == Token::RParen {
19769 parser.next_token();
19770 Ok(vec![])
19771 } else {
19772 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19773 parser.expect_token(&Token::RParen)?;
19774 Ok(exprs)
19775 }
19776 })?;
19777 Ok(Values {
19778 explicit_row,
19779 rows,
19780 value_keyword,
19781 })
19782 }
19783
19784 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19786 self.expect_keyword_is(Keyword::TRANSACTION)?;
19787 Ok(Statement::StartTransaction {
19788 modes: self.parse_transaction_modes()?,
19789 begin: false,
19790 transaction: Some(BeginTransactionKind::Transaction),
19791 modifier: None,
19792 statements: vec![],
19793 exception: None,
19794 has_end_keyword: false,
19795 })
19796 }
19797
19798 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19800 if !self.dialect.supports_start_transaction_modifier() {
19801 None
19802 } else if self.parse_keyword(Keyword::DEFERRED) {
19803 Some(TransactionModifier::Deferred)
19804 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19805 Some(TransactionModifier::Immediate)
19806 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19807 Some(TransactionModifier::Exclusive)
19808 } else if self.parse_keyword(Keyword::TRY) {
19809 Some(TransactionModifier::Try)
19810 } else if self.parse_keyword(Keyword::CATCH) {
19811 Some(TransactionModifier::Catch)
19812 } else {
19813 None
19814 }
19815 }
19816
19817 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19819 let modifier = self.parse_transaction_modifier();
19820 let transaction =
19821 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19822 {
19823 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19824 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19825 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19826 _ => None,
19827 };
19828 Ok(Statement::StartTransaction {
19829 modes: self.parse_transaction_modes()?,
19830 begin: true,
19831 transaction,
19832 modifier,
19833 statements: vec![],
19834 exception: None,
19835 has_end_keyword: false,
19836 })
19837 }
19838
19839 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19841 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19842
19843 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19844 let mut when = Vec::new();
19845
19846 while !self.peek_keyword(Keyword::END) {
19848 self.expect_keyword(Keyword::WHEN)?;
19849
19850 let mut idents = Vec::new();
19854
19855 while !self.parse_keyword(Keyword::THEN) {
19856 let ident = self.parse_identifier()?;
19857 idents.push(ident);
19858
19859 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19860 }
19861
19862 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19863
19864 when.push(ExceptionWhen { idents, statements });
19865 }
19866
19867 Some(when)
19868 } else {
19869 None
19870 };
19871
19872 self.expect_keyword(Keyword::END)?;
19873
19874 Ok(Statement::StartTransaction {
19875 begin: true,
19876 statements,
19877 exception,
19878 has_end_keyword: true,
19879 transaction: None,
19880 modifier: None,
19881 modes: Default::default(),
19882 })
19883 }
19884
19885 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19887 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19888 None
19889 } else if self.parse_keyword(Keyword::TRY) {
19890 Some(TransactionModifier::Try)
19891 } else if self.parse_keyword(Keyword::CATCH) {
19892 Some(TransactionModifier::Catch)
19893 } else {
19894 None
19895 };
19896 Ok(Statement::Commit {
19897 chain: self.parse_commit_rollback_chain()?,
19898 end: true,
19899 modifier,
19900 })
19901 }
19902
19903 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19905 let mut modes = vec![];
19906 let mut required = false;
19907 loop {
19908 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19909 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19910 TransactionIsolationLevel::ReadUncommitted
19911 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19912 TransactionIsolationLevel::ReadCommitted
19913 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19914 TransactionIsolationLevel::RepeatableRead
19915 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19916 TransactionIsolationLevel::Serializable
19917 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19918 TransactionIsolationLevel::Snapshot
19919 } else {
19920 self.expected_ref("isolation level", self.peek_token_ref())?
19921 };
19922 TransactionMode::IsolationLevel(iso_level)
19923 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19924 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19925 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19926 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19927 } else if required {
19928 self.expected_ref("transaction mode", self.peek_token_ref())?
19929 } else {
19930 break;
19931 };
19932 modes.push(mode);
19933 required = self.consume_token(&Token::Comma);
19938 }
19939 Ok(modes)
19940 }
19941
19942 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19944 Ok(Statement::Commit {
19945 chain: self.parse_commit_rollback_chain()?,
19946 end: false,
19947 modifier: None,
19948 })
19949 }
19950
19951 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19953 let chain = self.parse_commit_rollback_chain()?;
19954 let savepoint = self.parse_rollback_savepoint()?;
19955
19956 Ok(Statement::Rollback { chain, savepoint })
19957 }
19958
19959 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19961 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19962 if self.parse_keyword(Keyword::AND) {
19963 let chain = !self.parse_keyword(Keyword::NO);
19964 self.expect_keyword_is(Keyword::CHAIN)?;
19965 Ok(chain)
19966 } else {
19967 Ok(false)
19968 }
19969 }
19970
19971 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19973 if self.parse_keyword(Keyword::TO) {
19974 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19975 let savepoint = self.parse_identifier()?;
19976
19977 Ok(Some(savepoint))
19978 } else {
19979 Ok(None)
19980 }
19981 }
19982
19983 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19985 self.expect_token(&Token::LParen)?;
19986 let message = Box::new(self.parse_expr()?);
19987 self.expect_token(&Token::Comma)?;
19988 let severity = Box::new(self.parse_expr()?);
19989 self.expect_token(&Token::Comma)?;
19990 let state = Box::new(self.parse_expr()?);
19991 let arguments = if self.consume_token(&Token::Comma) {
19992 self.parse_comma_separated(Parser::parse_expr)?
19993 } else {
19994 vec![]
19995 };
19996 self.expect_token(&Token::RParen)?;
19997 let options = if self.parse_keyword(Keyword::WITH) {
19998 self.parse_comma_separated(Parser::parse_raiserror_option)?
19999 } else {
20000 vec![]
20001 };
20002 Ok(Statement::RaisError {
20003 message,
20004 severity,
20005 state,
20006 arguments,
20007 options,
20008 })
20009 }
20010
20011 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
20013 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
20014 Keyword::LOG => Ok(RaisErrorOption::Log),
20015 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
20016 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
20017 _ => self.expected_ref(
20018 "LOG, NOWAIT OR SETERROR raiserror option",
20019 self.peek_token_ref(),
20020 ),
20021 }
20022 }
20023
20024 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
20028 self.expect_keyword_is(Keyword::THROW)?;
20029
20030 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
20031 let (message, state) = if error_number.is_some() {
20032 self.expect_token(&Token::Comma)?;
20033 let message = Box::new(self.parse_expr()?);
20034 self.expect_token(&Token::Comma)?;
20035 let state = Box::new(self.parse_expr()?);
20036 (Some(message), Some(state))
20037 } else {
20038 (None, None)
20039 };
20040
20041 Ok(ThrowStatement {
20042 error_number,
20043 message,
20044 state,
20045 })
20046 }
20047
20048 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
20050 let prepare = self.parse_keyword(Keyword::PREPARE);
20051 let name = self.parse_identifier()?;
20052 Ok(Statement::Deallocate { name, prepare })
20053 }
20054
20055 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
20057 let immediate =
20058 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
20059
20060 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
20066 None
20067 } else {
20068 Some(self.parse_object_name(false)?)
20069 };
20070
20071 let has_parentheses = self.consume_token(&Token::LParen);
20072
20073 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
20074 let end_token = match (has_parentheses, self.peek_token().token) {
20075 (true, _) => Token::RParen,
20076 (false, Token::EOF) => Token::EOF,
20077 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
20078 (false, _) => Token::SemiColon,
20079 };
20080
20081 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
20082
20083 if has_parentheses {
20084 self.expect_token(&Token::RParen)?;
20085 }
20086
20087 let into = if self.parse_keyword(Keyword::INTO) {
20088 self.parse_comma_separated(Self::parse_identifier)?
20089 } else {
20090 vec![]
20091 };
20092
20093 let using = if self.parse_keyword(Keyword::USING) {
20094 self.parse_comma_separated(Self::parse_expr_with_alias)?
20095 } else {
20096 vec![]
20097 };
20098
20099 let output = self.parse_keyword(Keyword::OUTPUT);
20100
20101 let default = self.parse_keyword(Keyword::DEFAULT);
20102
20103 Ok(Statement::Execute {
20104 immediate,
20105 name,
20106 parameters,
20107 has_parentheses,
20108 into,
20109 using,
20110 output,
20111 default,
20112 })
20113 }
20114
20115 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
20117 let name = self.parse_identifier()?;
20118
20119 let mut data_types = vec![];
20120 if self.consume_token(&Token::LParen) {
20121 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
20122 self.expect_token(&Token::RParen)?;
20123 }
20124
20125 self.expect_keyword_is(Keyword::AS)?;
20126 let statement = Box::new(self.parse_statement()?);
20127 Ok(Statement::Prepare {
20128 name,
20129 data_types,
20130 statement,
20131 })
20132 }
20133
20134 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
20136 self.expect_keyword(Keyword::UNLOAD)?;
20137 self.expect_token(&Token::LParen)?;
20138 let (query, query_text) =
20139 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
20140 (None, Some(self.parse_literal_string()?))
20141 } else {
20142 (Some(self.parse_query()?), None)
20143 };
20144 self.expect_token(&Token::RParen)?;
20145
20146 self.expect_keyword_is(Keyword::TO)?;
20147 let to = self.parse_identifier()?;
20148 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
20149 Some(self.parse_iam_role_kind()?)
20150 } else {
20151 None
20152 };
20153 let with = self.parse_options(Keyword::WITH)?;
20154 let mut options = vec![];
20155 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
20156 options.push(opt);
20157 }
20158 Ok(Statement::Unload {
20159 query,
20160 query_text,
20161 to,
20162 auth,
20163 with,
20164 options,
20165 })
20166 }
20167
20168 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
20169 let temporary = self
20170 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
20171 .is_some();
20172 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
20173 let table = self.parse_keyword(Keyword::TABLE);
20174 let name = self.parse_object_name(false)?;
20175
20176 Ok(SelectInto {
20177 temporary,
20178 unlogged,
20179 table,
20180 name,
20181 })
20182 }
20183
20184 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
20185 let v = self.parse_value()?;
20186 match &v.value {
20187 Value::SingleQuotedString(_) => Ok(v),
20188 Value::DoubleQuotedString(_) => Ok(v),
20189 Value::Number(_, _) => Ok(v),
20190 Value::Placeholder(_) => Ok(v),
20191 _ => {
20192 self.prev_token();
20193 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
20194 }
20195 }
20196 }
20197
20198 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
20200 let name = self.parse_object_name(false)?;
20201 if self.consume_token(&Token::LParen) {
20202 let value = self.parse_pragma_value()?;
20203 self.expect_token(&Token::RParen)?;
20204 Ok(Statement::Pragma {
20205 name,
20206 value: Some(value),
20207 is_eq: false,
20208 })
20209 } else if self.consume_token(&Token::Eq) {
20210 Ok(Statement::Pragma {
20211 name,
20212 value: Some(self.parse_pragma_value()?),
20213 is_eq: true,
20214 })
20215 } else {
20216 Ok(Statement::Pragma {
20217 name,
20218 value: None,
20219 is_eq: false,
20220 })
20221 }
20222 }
20223
20224 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
20226 let extension_name = self.parse_identifier()?;
20227
20228 Ok(Statement::Install { extension_name })
20229 }
20230
20231 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
20233 if self.dialect.supports_load_extension() {
20234 let extension_name = self.parse_identifier()?;
20235 Ok(Statement::Load { extension_name })
20236 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
20237 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
20238 self.expect_keyword_is(Keyword::INPATH)?;
20239 let inpath = self.parse_literal_string()?;
20240 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
20241 self.expect_keyword_is(Keyword::INTO)?;
20242 self.expect_keyword_is(Keyword::TABLE)?;
20243 let table_name = self.parse_object_name(false)?;
20244 let partitioned = self.parse_insert_partition()?;
20245 let table_format = self.parse_load_data_table_format()?;
20246 Ok(Statement::LoadData {
20247 local,
20248 inpath,
20249 overwrite,
20250 table_name,
20251 partitioned,
20252 table_format,
20253 })
20254 } else {
20255 self.expected_ref(
20256 "`DATA` or an extension name after `LOAD`",
20257 self.peek_token_ref(),
20258 )
20259 }
20260 }
20261
20262 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
20274 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
20275
20276 let name = self.parse_object_name(false)?;
20277
20278 let on_cluster = self.parse_optional_on_cluster()?;
20280
20281 let partition = if self.parse_keyword(Keyword::PARTITION) {
20282 if self.parse_keyword(Keyword::ID) {
20283 Some(Partition::Identifier(self.parse_identifier()?))
20284 } else {
20285 Some(Partition::Expr(self.parse_expr()?))
20286 }
20287 } else {
20288 None
20289 };
20290
20291 let include_final = self.parse_keyword(Keyword::FINAL);
20292
20293 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
20294 if self.parse_keyword(Keyword::BY) {
20295 Some(Deduplicate::ByExpression(self.parse_expr()?))
20296 } else {
20297 Some(Deduplicate::All)
20298 }
20299 } else {
20300 None
20301 };
20302
20303 let predicate = if self.parse_keyword(Keyword::WHERE) {
20305 Some(self.parse_expr()?)
20306 } else {
20307 None
20308 };
20309
20310 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
20311 self.expect_token(&Token::LParen)?;
20312 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
20313 self.expect_token(&Token::RParen)?;
20314 Some(columns)
20315 } else {
20316 None
20317 };
20318
20319 Ok(Statement::OptimizeTable {
20320 name,
20321 has_table_keyword,
20322 on_cluster,
20323 partition,
20324 include_final,
20325 deduplicate,
20326 predicate,
20327 zorder,
20328 })
20329 }
20330
20331 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20337 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20339 let name = self.parse_object_name(false)?;
20341 let mut data_type: Option<DataType> = None;
20343 if self.parse_keywords(&[Keyword::AS]) {
20344 data_type = Some(self.parse_data_type()?)
20345 }
20346 let sequence_options = self.parse_create_sequence_options()?;
20347 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20349 if self.parse_keywords(&[Keyword::NONE]) {
20350 Some(ObjectName::from(vec![Ident::new("NONE")]))
20351 } else {
20352 Some(self.parse_object_name(false)?)
20353 }
20354 } else {
20355 None
20356 };
20357 Ok(Statement::CreateSequence {
20358 temporary,
20359 if_not_exists,
20360 name,
20361 data_type,
20362 sequence_options,
20363 owned_by,
20364 })
20365 }
20366
20367 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20368 let mut sequence_options = vec![];
20369 if self.parse_keywords(&[Keyword::INCREMENT]) {
20371 if self.parse_keywords(&[Keyword::BY]) {
20372 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20373 } else {
20374 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20375 }
20376 }
20377 if self.parse_keyword(Keyword::MINVALUE) {
20379 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20380 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20381 sequence_options.push(SequenceOptions::MinValue(None));
20382 }
20383 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20385 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20386 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20387 sequence_options.push(SequenceOptions::MaxValue(None));
20388 }
20389
20390 if self.parse_keywords(&[Keyword::START]) {
20392 if self.parse_keywords(&[Keyword::WITH]) {
20393 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20394 } else {
20395 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20396 }
20397 }
20398 if self.parse_keywords(&[Keyword::CACHE]) {
20400 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20401 }
20402 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20404 sequence_options.push(SequenceOptions::Cycle(true));
20405 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20406 sequence_options.push(SequenceOptions::Cycle(false));
20407 }
20408
20409 Ok(sequence_options)
20410 }
20411
20412 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20416 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20417 let name = self.parse_object_name(false)?;
20418
20419 let server_type = if self.parse_keyword(Keyword::TYPE) {
20420 Some(self.parse_identifier()?)
20421 } else {
20422 None
20423 };
20424
20425 let version = if self.parse_keyword(Keyword::VERSION) {
20426 Some(self.parse_identifier()?)
20427 } else {
20428 None
20429 };
20430
20431 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20432 let foreign_data_wrapper = self.parse_object_name(false)?;
20433
20434 let mut options = None;
20435 if self.parse_keyword(Keyword::OPTIONS) {
20436 self.expect_token(&Token::LParen)?;
20437 options = Some(self.parse_comma_separated(|p| {
20438 let key = p.parse_identifier()?;
20439 let value = p.parse_identifier()?;
20440 Ok(CreateServerOption { key, value })
20441 })?);
20442 self.expect_token(&Token::RParen)?;
20443 }
20444
20445 Ok(Statement::CreateServer(CreateServerStatement {
20446 name,
20447 if_not_exists: ine,
20448 server_type,
20449 version,
20450 foreign_data_wrapper,
20451 options,
20452 }))
20453 }
20454
20455 pub fn parse_create_foreign_data_wrapper(
20459 &mut self,
20460 ) -> Result<CreateForeignDataWrapper, ParserError> {
20461 let name = self.parse_identifier()?;
20462
20463 let handler = if self.parse_keyword(Keyword::HANDLER) {
20464 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20465 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20466 Some(FdwRoutineClause::NoFunction)
20467 } else {
20468 None
20469 };
20470
20471 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20472 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20473 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20474 Some(FdwRoutineClause::NoFunction)
20475 } else {
20476 None
20477 };
20478
20479 let options = if self.parse_keyword(Keyword::OPTIONS) {
20480 self.expect_token(&Token::LParen)?;
20481 let opts = self.parse_comma_separated(|p| {
20482 let key = p.parse_identifier()?;
20483 let value = p.parse_identifier()?;
20484 Ok(CreateServerOption { key, value })
20485 })?;
20486 self.expect_token(&Token::RParen)?;
20487 Some(opts)
20488 } else {
20489 None
20490 };
20491
20492 Ok(CreateForeignDataWrapper {
20493 name,
20494 handler,
20495 validator,
20496 options,
20497 })
20498 }
20499
20500 pub fn parse_create_foreign_table(&mut self) -> Result<CreateForeignTable, ParserError> {
20504 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20505 let name = self.parse_object_name(false)?;
20506 let (columns, _constraints) = self.parse_columns()?;
20507 self.expect_keyword_is(Keyword::SERVER)?;
20508 let server_name = self.parse_identifier()?;
20509
20510 let options = if self.parse_keyword(Keyword::OPTIONS) {
20511 self.expect_token(&Token::LParen)?;
20512 let opts = self.parse_comma_separated(|p| {
20513 let key = p.parse_identifier()?;
20514 let value = p.parse_identifier()?;
20515 Ok(CreateServerOption { key, value })
20516 })?;
20517 self.expect_token(&Token::RParen)?;
20518 Some(opts)
20519 } else {
20520 None
20521 };
20522
20523 Ok(CreateForeignTable {
20524 name,
20525 if_not_exists,
20526 columns,
20527 server_name,
20528 options,
20529 })
20530 }
20531
20532 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20536 let name = self.parse_identifier()?;
20537
20538 let target = if self.parse_keyword(Keyword::FOR) {
20539 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20540 Some(PublicationTarget::AllTables)
20541 } else if self.parse_keyword(Keyword::TABLE) {
20542 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20543 Some(PublicationTarget::Tables(tables))
20544 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20545 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20546 Some(PublicationTarget::TablesInSchema(schemas))
20547 } else {
20548 return self.expected_ref(
20549 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20550 self.peek_token_ref(),
20551 );
20552 }
20553 } else {
20554 None
20555 };
20556
20557 let with_options = self.parse_options(Keyword::WITH)?;
20558
20559 Ok(CreatePublication {
20560 name,
20561 target,
20562 with_options,
20563 })
20564 }
20565
20566 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20570 let name = self.parse_identifier()?;
20571 self.expect_keyword_is(Keyword::CONNECTION)?;
20572 let connection = self.parse_value()?.value;
20573 self.expect_keyword_is(Keyword::PUBLICATION)?;
20574 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20575 let with_options = self.parse_options(Keyword::WITH)?;
20576
20577 Ok(CreateSubscription {
20578 name,
20579 connection,
20580 publications,
20581 with_options,
20582 })
20583 }
20584
20585 pub fn parse_create_cast(&mut self) -> Result<CreateCast, ParserError> {
20589 self.expect_token(&Token::LParen)?;
20590 let source_type = self.parse_data_type()?;
20591 self.expect_keyword_is(Keyword::AS)?;
20592 let target_type = self.parse_data_type()?;
20593 self.expect_token(&Token::RParen)?;
20594
20595 let function_kind = if self.parse_keywords(&[Keyword::WITHOUT, Keyword::FUNCTION]) {
20596 CastFunctionKind::WithoutFunction
20597 } else if self.parse_keywords(&[Keyword::WITH, Keyword::INOUT]) {
20598 CastFunctionKind::WithInout
20599 } else if self.parse_keywords(&[Keyword::WITH, Keyword::FUNCTION]) {
20600 let function_name = self.parse_object_name(false)?;
20601 let argument_types = if self.peek_token_ref().token == Token::LParen {
20602 self.expect_token(&Token::LParen)?;
20603 let types = if self.peek_token_ref().token == Token::RParen {
20604 vec![]
20605 } else {
20606 self.parse_comma_separated(|p| p.parse_data_type())?
20607 };
20608 self.expect_token(&Token::RParen)?;
20609 types
20610 } else {
20611 vec![]
20612 };
20613 CastFunctionKind::WithFunction {
20614 function_name,
20615 argument_types,
20616 }
20617 } else {
20618 return self.expected_ref(
20619 "WITH FUNCTION, WITHOUT FUNCTION, or WITH INOUT",
20620 self.peek_token_ref(),
20621 );
20622 };
20623
20624 let cast_context = if self.parse_keyword(Keyword::AS) {
20625 if self.parse_keyword(Keyword::ASSIGNMENT) {
20626 CastContext::Assignment
20627 } else if self.parse_keyword(Keyword::IMPLICIT) {
20628 CastContext::Implicit
20629 } else {
20630 return self.expected_ref("ASSIGNMENT or IMPLICIT after AS", self.peek_token_ref());
20631 }
20632 } else {
20633 CastContext::Explicit
20634 };
20635
20636 Ok(CreateCast {
20637 source_type,
20638 target_type,
20639 function_kind,
20640 cast_context,
20641 })
20642 }
20643
20644 pub fn parse_create_conversion(
20648 &mut self,
20649 is_default: bool,
20650 ) -> Result<CreateConversion, ParserError> {
20651 let name = self.parse_object_name(false)?;
20652 self.expect_keyword_is(Keyword::FOR)?;
20653 let source_encoding = self.parse_literal_string()?;
20654 self.expect_keyword_is(Keyword::TO)?;
20655 let destination_encoding = self.parse_literal_string()?;
20656 self.expect_keyword_is(Keyword::FROM)?;
20657 let function_name = self.parse_object_name(false)?;
20658
20659 Ok(CreateConversion {
20660 name,
20661 is_default,
20662 source_encoding,
20663 destination_encoding,
20664 function_name,
20665 })
20666 }
20667
20668 pub fn parse_create_language(
20672 &mut self,
20673 or_replace: bool,
20674 trusted: bool,
20675 procedural: bool,
20676 ) -> Result<CreateLanguage, ParserError> {
20677 let name = self.parse_identifier()?;
20678
20679 let handler = if self.parse_keyword(Keyword::HANDLER) {
20680 Some(self.parse_object_name(false)?)
20681 } else {
20682 None
20683 };
20684
20685 let inline_handler = if self.parse_keyword(Keyword::INLINE) {
20686 Some(self.parse_object_name(false)?)
20687 } else {
20688 None
20689 };
20690
20691 let validator = if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20692 None
20693 } else if self.parse_keyword(Keyword::VALIDATOR) {
20694 Some(self.parse_object_name(false)?)
20695 } else {
20696 None
20697 };
20698
20699 Ok(CreateLanguage {
20700 name,
20701 or_replace,
20702 trusted,
20703 procedural,
20704 handler,
20705 inline_handler,
20706 validator,
20707 })
20708 }
20709
20710 pub fn parse_create_rule(&mut self) -> Result<CreateRule, ParserError> {
20714 let name = self.parse_identifier()?;
20715 self.expect_keyword_is(Keyword::AS)?;
20716 self.expect_keyword_is(Keyword::ON)?;
20717
20718 let event = if self.parse_keyword(Keyword::SELECT) {
20719 RuleEvent::Select
20720 } else if self.parse_keyword(Keyword::INSERT) {
20721 RuleEvent::Insert
20722 } else if self.parse_keyword(Keyword::UPDATE) {
20723 RuleEvent::Update
20724 } else if self.parse_keyword(Keyword::DELETE) {
20725 RuleEvent::Delete
20726 } else {
20727 return self.expected_ref(
20728 "SELECT, INSERT, UPDATE, or DELETE after ON",
20729 self.peek_token_ref(),
20730 );
20731 };
20732
20733 self.expect_keyword_is(Keyword::TO)?;
20734 let table = self.parse_object_name(false)?;
20735
20736 let condition = if self.parse_keyword(Keyword::WHERE) {
20737 Some(self.parse_expr()?)
20738 } else {
20739 None
20740 };
20741
20742 self.expect_keyword_is(Keyword::DO)?;
20743
20744 let instead = self.parse_keyword(Keyword::INSTEAD);
20745 if !instead {
20746 let _ = self.parse_keyword(Keyword::ALSO);
20748 }
20749
20750 let action = if self.parse_keyword(Keyword::NOTHING) {
20751 RuleAction::Nothing
20752 } else if self.peek_token_ref().token == Token::LParen {
20753 self.expect_token(&Token::LParen)?;
20754 let mut stmts = Vec::new();
20755 loop {
20756 stmts.push(self.parse_statement()?);
20757 if !self.consume_token(&Token::SemiColon) {
20758 break;
20759 }
20760 if self.peek_token_ref().token == Token::RParen {
20761 break;
20762 }
20763 }
20764 self.expect_token(&Token::RParen)?;
20765 RuleAction::Statements(stmts)
20766 } else {
20767 let stmt = self.parse_statement()?;
20768 RuleAction::Statements(vec![stmt])
20769 };
20770
20771 Ok(CreateRule {
20772 name,
20773 event,
20774 table,
20775 condition,
20776 instead,
20777 action,
20778 })
20779 }
20780
20781 pub fn parse_create_statistics(&mut self) -> Result<CreateStatistics, ParserError> {
20785 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20786 let name = self.parse_object_name(false)?;
20787
20788 let kinds = if self.consume_token(&Token::LParen) {
20789 let kinds = self.parse_comma_separated(|p| {
20790 let ident = p.parse_identifier()?;
20791 match ident.value.to_lowercase().as_str() {
20792 "ndistinct" => Ok(StatisticsKind::NDistinct),
20793 "dependencies" => Ok(StatisticsKind::Dependencies),
20794 "mcv" => Ok(StatisticsKind::Mcv),
20795 other => Err(ParserError::ParserError(format!(
20796 "Unknown statistics kind: {other}"
20797 ))),
20798 }
20799 })?;
20800 self.expect_token(&Token::RParen)?;
20801 kinds
20802 } else {
20803 vec![]
20804 };
20805
20806 self.expect_keyword_is(Keyword::ON)?;
20807 let on = self.parse_comma_separated(Parser::parse_expr)?;
20808 self.expect_keyword_is(Keyword::FROM)?;
20809 let from = self.parse_object_name(false)?;
20810
20811 Ok(CreateStatistics {
20812 if_not_exists,
20813 name,
20814 kinds,
20815 on,
20816 from,
20817 })
20818 }
20819
20820 pub fn parse_create_access_method(&mut self) -> Result<CreateAccessMethod, ParserError> {
20824 let name = self.parse_identifier()?;
20825 self.expect_keyword_is(Keyword::TYPE)?;
20826 let method_type = if self.parse_keyword(Keyword::INDEX) {
20827 AccessMethodType::Index
20828 } else if self.parse_keyword(Keyword::TABLE) {
20829 AccessMethodType::Table
20830 } else {
20831 return self.expected_ref("INDEX or TABLE after TYPE", self.peek_token_ref());
20832 };
20833 self.expect_keyword_is(Keyword::HANDLER)?;
20834 let handler = self.parse_object_name(false)?;
20835
20836 Ok(CreateAccessMethod {
20837 name,
20838 method_type,
20839 handler,
20840 })
20841 }
20842
20843 pub fn parse_create_event_trigger(&mut self) -> Result<CreateEventTrigger, ParserError> {
20847 let name = self.parse_identifier()?;
20848 self.expect_keyword_is(Keyword::ON)?;
20849 let event_ident = self.parse_identifier()?;
20850 let event = match event_ident.value.to_lowercase().as_str() {
20851 "ddl_command_start" => EventTriggerEvent::DdlCommandStart,
20852 "ddl_command_end" => EventTriggerEvent::DdlCommandEnd,
20853 "table_rewrite" => EventTriggerEvent::TableRewrite,
20854 "sql_drop" => EventTriggerEvent::SqlDrop,
20855 other => {
20856 return Err(ParserError::ParserError(format!(
20857 "Unknown event trigger event: {other}"
20858 )))
20859 }
20860 };
20861
20862 let when_tags = if self.parse_keyword(Keyword::WHEN) {
20863 self.expect_keyword_is(Keyword::TAG)?;
20864 self.expect_keyword_is(Keyword::IN)?;
20865 self.expect_token(&Token::LParen)?;
20866 let tags = self.parse_comma_separated(|p| p.parse_value().map(|v| v.value))?;
20867 self.expect_token(&Token::RParen)?;
20868 Some(tags)
20869 } else {
20870 None
20871 };
20872
20873 self.expect_keyword_is(Keyword::EXECUTE)?;
20874 let is_procedure = if self.parse_keyword(Keyword::FUNCTION) {
20875 false
20876 } else if self.parse_keyword(Keyword::PROCEDURE) {
20877 true
20878 } else {
20879 return self.expected_ref("FUNCTION or PROCEDURE after EXECUTE", self.peek_token_ref());
20880 };
20881 let execute = self.parse_object_name(false)?;
20882 self.expect_token(&Token::LParen)?;
20883 self.expect_token(&Token::RParen)?;
20884
20885 Ok(CreateEventTrigger {
20886 name,
20887 event,
20888 when_tags,
20889 execute,
20890 is_procedure,
20891 })
20892 }
20893
20894 pub fn parse_create_transform(
20898 &mut self,
20899 or_replace: bool,
20900 ) -> Result<CreateTransform, ParserError> {
20901 self.expect_keyword_is(Keyword::FOR)?;
20902 let type_name = self.parse_data_type()?;
20903 self.expect_keyword_is(Keyword::LANGUAGE)?;
20904 let language = self.parse_identifier()?;
20905 self.expect_token(&Token::LParen)?;
20906 let elements = self.parse_comma_separated(|p| {
20907 let is_from = if p.parse_keyword(Keyword::FROM) {
20908 true
20909 } else {
20910 p.expect_keyword_is(Keyword::TO)?;
20911 false
20912 };
20913 p.expect_keyword_is(Keyword::SQL)?;
20914 p.expect_keyword_is(Keyword::WITH)?;
20915 p.expect_keyword_is(Keyword::FUNCTION)?;
20916 let function = p.parse_object_name(false)?;
20917 p.expect_token(&Token::LParen)?;
20918 let arg_types = if p.peek_token().token == Token::RParen {
20919 vec![]
20920 } else {
20921 p.parse_comma_separated(|p| p.parse_data_type())?
20922 };
20923 p.expect_token(&Token::RParen)?;
20924 Ok(TransformElement {
20925 is_from,
20926 function,
20927 arg_types,
20928 })
20929 })?;
20930 self.expect_token(&Token::RParen)?;
20931
20932 Ok(CreateTransform {
20933 or_replace,
20934 type_name,
20935 language,
20936 elements,
20937 })
20938 }
20939
20940 pub fn parse_security_label(&mut self) -> Result<SecurityLabel, ParserError> {
20944 self.expect_keyword_is(Keyword::LABEL)?;
20945
20946 let provider = if self.parse_keyword(Keyword::FOR) {
20947 Some(self.parse_identifier()?)
20948 } else {
20949 None
20950 };
20951
20952 self.expect_keyword_is(Keyword::ON)?;
20953
20954 let object_kind = if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
20955 SecurityLabelObjectKind::MaterializedView
20956 } else if self.parse_keyword(Keyword::TABLE) {
20957 SecurityLabelObjectKind::Table
20958 } else if self.parse_keyword(Keyword::COLUMN) {
20959 SecurityLabelObjectKind::Column
20960 } else if self.parse_keyword(Keyword::DATABASE) {
20961 SecurityLabelObjectKind::Database
20962 } else if self.parse_keyword(Keyword::DOMAIN) {
20963 SecurityLabelObjectKind::Domain
20964 } else if self.parse_keyword(Keyword::FUNCTION) {
20965 SecurityLabelObjectKind::Function
20966 } else if self.parse_keyword(Keyword::ROLE) {
20967 SecurityLabelObjectKind::Role
20968 } else if self.parse_keyword(Keyword::SCHEMA) {
20969 SecurityLabelObjectKind::Schema
20970 } else if self.parse_keyword(Keyword::SEQUENCE) {
20971 SecurityLabelObjectKind::Sequence
20972 } else if self.parse_keyword(Keyword::TYPE) {
20973 SecurityLabelObjectKind::Type
20974 } else if self.parse_keyword(Keyword::VIEW) {
20975 SecurityLabelObjectKind::View
20976 } else {
20977 return self.expected_ref(
20978 "TABLE, COLUMN, DATABASE, DOMAIN, FUNCTION, MATERIALIZED VIEW, ROLE, SCHEMA, SEQUENCE, TYPE, or VIEW after ON",
20979 self.peek_token_ref(),
20980 );
20981 };
20982
20983 let object_name = self.parse_object_name(false)?;
20984
20985 self.expect_keyword_is(Keyword::IS)?;
20986
20987 let label = if self.parse_keyword(Keyword::NULL) {
20988 None
20989 } else {
20990 Some(self.parse_value()?.value)
20991 };
20992
20993 Ok(SecurityLabel {
20994 provider,
20995 object_kind,
20996 object_name,
20997 label,
20998 })
20999 }
21000
21001 pub fn parse_create_user_mapping(&mut self) -> Result<CreateUserMapping, ParserError> {
21005 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
21006
21007 self.expect_keyword_is(Keyword::FOR)?;
21008
21009 let user = if self.parse_keyword(Keyword::CURRENT_ROLE) {
21010 UserMappingUser::CurrentRole
21011 } else if self.parse_keyword(Keyword::CURRENT_USER) {
21012 UserMappingUser::CurrentUser
21013 } else if self.parse_keyword(Keyword::PUBLIC) {
21014 UserMappingUser::Public
21015 } else if self.parse_keyword(Keyword::USER) {
21016 UserMappingUser::User
21017 } else {
21018 UserMappingUser::Ident(self.parse_identifier()?)
21019 };
21020
21021 self.expect_keyword_is(Keyword::SERVER)?;
21022 let server_name = self.parse_identifier()?;
21023
21024 let options = if self.parse_keyword(Keyword::OPTIONS) {
21025 self.expect_token(&Token::LParen)?;
21026 let opts = self.parse_comma_separated(|p| {
21027 let key = p.parse_identifier()?;
21028 let value = p.parse_identifier()?;
21029 Ok(CreateServerOption { key, value })
21030 })?;
21031 self.expect_token(&Token::RParen)?;
21032 Some(opts)
21033 } else {
21034 None
21035 };
21036
21037 Ok(CreateUserMapping {
21038 if_not_exists,
21039 user,
21040 server_name,
21041 options,
21042 })
21043 }
21044
21045 pub fn parse_create_tablespace(&mut self) -> Result<CreateTablespace, ParserError> {
21049 let name = self.parse_identifier()?;
21050
21051 let owner = if self.parse_keyword(Keyword::OWNER) {
21052 Some(self.parse_identifier()?)
21053 } else {
21054 None
21055 };
21056
21057 self.expect_keyword_is(Keyword::LOCATION)?;
21058 let location = self.parse_value()?.value;
21059
21060 let with_options = self.parse_options(Keyword::WITH)?;
21061
21062 Ok(CreateTablespace {
21063 name,
21064 owner,
21065 location,
21066 with_options,
21067 })
21068 }
21069
21070 pub fn index(&self) -> usize {
21072 self.index
21073 }
21074
21075 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
21077 let ident = self.parse_identifier()?;
21078 self.expect_keyword_is(Keyword::AS)?;
21079
21080 let window_expr = if self.consume_token(&Token::LParen) {
21081 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
21082 } else if self.dialect.supports_window_clause_named_window_reference() {
21083 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
21084 } else {
21085 return self.expected_ref("(", self.peek_token_ref());
21086 };
21087
21088 Ok(NamedWindowDefinition(ident, window_expr))
21089 }
21090
21091 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
21093 let name = self.parse_object_name(false)?;
21094 let params = self.parse_optional_procedure_parameters()?;
21095
21096 let language = if self.parse_keyword(Keyword::LANGUAGE) {
21097 Some(self.parse_identifier()?)
21098 } else {
21099 None
21100 };
21101
21102 self.expect_keyword_is(Keyword::AS)?;
21103
21104 let body = self.parse_conditional_statements(&[Keyword::END])?;
21105
21106 Ok(Statement::CreateProcedure {
21107 name,
21108 or_alter,
21109 params,
21110 language,
21111 body,
21112 })
21113 }
21114
21115 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
21117 let window_name = match &self.peek_token_ref().token {
21118 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
21119 self.parse_optional_ident()?
21120 }
21121 _ => None,
21122 };
21123
21124 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
21125 self.parse_comma_separated(Parser::parse_expr)?
21126 } else {
21127 vec![]
21128 };
21129 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
21130 self.parse_comma_separated(Parser::parse_order_by_expr)?
21131 } else {
21132 vec![]
21133 };
21134
21135 let window_frame = if !self.consume_token(&Token::RParen) {
21136 let window_frame = self.parse_window_frame()?;
21137 self.expect_token(&Token::RParen)?;
21138 Some(window_frame)
21139 } else {
21140 None
21141 };
21142 Ok(WindowSpec {
21143 window_name,
21144 partition_by,
21145 order_by,
21146 window_frame,
21147 })
21148 }
21149
21150 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
21152 let name = self.parse_object_name(false)?;
21153
21154 let has_as = self.parse_keyword(Keyword::AS);
21156
21157 if !has_as {
21158 if self.consume_token(&Token::LParen) {
21160 let options = self.parse_create_type_sql_definition_options()?;
21162 self.expect_token(&Token::RParen)?;
21163 return Ok(Statement::CreateType {
21164 name,
21165 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
21166 });
21167 }
21168
21169 return Ok(Statement::CreateType {
21171 name,
21172 representation: None,
21173 });
21174 }
21175
21176 if self.parse_keyword(Keyword::ENUM) {
21178 self.parse_create_type_enum(name)
21180 } else if self.parse_keyword(Keyword::RANGE) {
21181 self.parse_create_type_range(name)
21183 } else if self.consume_token(&Token::LParen) {
21184 self.parse_create_type_composite(name)
21186 } else {
21187 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
21188 }
21189 }
21190
21191 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21195 if self.consume_token(&Token::RParen) {
21196 return Ok(Statement::CreateType {
21198 name,
21199 representation: Some(UserDefinedTypeRepresentation::Composite {
21200 attributes: vec![],
21201 }),
21202 });
21203 }
21204
21205 let mut attributes = vec![];
21206 loop {
21207 let attr_name = self.parse_identifier()?;
21208 let attr_data_type = self.parse_data_type()?;
21209 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
21210 Some(self.parse_object_name(false)?)
21211 } else {
21212 None
21213 };
21214 attributes.push(UserDefinedTypeCompositeAttributeDef {
21215 name: attr_name,
21216 data_type: attr_data_type,
21217 collation: attr_collation,
21218 });
21219
21220 if !self.consume_token(&Token::Comma) {
21221 break;
21222 }
21223 }
21224 self.expect_token(&Token::RParen)?;
21225
21226 Ok(Statement::CreateType {
21227 name,
21228 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
21229 })
21230 }
21231
21232 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21236 self.expect_token(&Token::LParen)?;
21237 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21238 self.expect_token(&Token::RParen)?;
21239
21240 Ok(Statement::CreateType {
21241 name,
21242 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
21243 })
21244 }
21245
21246 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21250 self.expect_token(&Token::LParen)?;
21251 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
21252 self.expect_token(&Token::RParen)?;
21253
21254 Ok(Statement::CreateType {
21255 name,
21256 representation: Some(UserDefinedTypeRepresentation::Range { options }),
21257 })
21258 }
21259
21260 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
21262 let keyword = self.parse_one_of_keywords(&[
21263 Keyword::SUBTYPE,
21264 Keyword::SUBTYPE_OPCLASS,
21265 Keyword::COLLATION,
21266 Keyword::CANONICAL,
21267 Keyword::SUBTYPE_DIFF,
21268 Keyword::MULTIRANGE_TYPE_NAME,
21269 ]);
21270
21271 match keyword {
21272 Some(Keyword::SUBTYPE) => {
21273 self.expect_token(&Token::Eq)?;
21274 let data_type = self.parse_data_type()?;
21275 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
21276 }
21277 Some(Keyword::SUBTYPE_OPCLASS) => {
21278 self.expect_token(&Token::Eq)?;
21279 let name = self.parse_object_name(false)?;
21280 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
21281 }
21282 Some(Keyword::COLLATION) => {
21283 self.expect_token(&Token::Eq)?;
21284 let name = self.parse_object_name(false)?;
21285 Ok(UserDefinedTypeRangeOption::Collation(name))
21286 }
21287 Some(Keyword::CANONICAL) => {
21288 self.expect_token(&Token::Eq)?;
21289 let name = self.parse_object_name(false)?;
21290 Ok(UserDefinedTypeRangeOption::Canonical(name))
21291 }
21292 Some(Keyword::SUBTYPE_DIFF) => {
21293 self.expect_token(&Token::Eq)?;
21294 let name = self.parse_object_name(false)?;
21295 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
21296 }
21297 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
21298 self.expect_token(&Token::Eq)?;
21299 let name = self.parse_object_name(false)?;
21300 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
21301 }
21302 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
21303 }
21304 }
21305
21306 fn parse_create_type_sql_definition_options(
21308 &mut self,
21309 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
21310 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
21311 }
21312
21313 fn parse_sql_definition_option(
21315 &mut self,
21316 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
21317 let keyword = self.parse_one_of_keywords(&[
21318 Keyword::INPUT,
21319 Keyword::OUTPUT,
21320 Keyword::RECEIVE,
21321 Keyword::SEND,
21322 Keyword::TYPMOD_IN,
21323 Keyword::TYPMOD_OUT,
21324 Keyword::ANALYZE,
21325 Keyword::SUBSCRIPT,
21326 Keyword::INTERNALLENGTH,
21327 Keyword::PASSEDBYVALUE,
21328 Keyword::ALIGNMENT,
21329 Keyword::STORAGE,
21330 Keyword::LIKE,
21331 Keyword::CATEGORY,
21332 Keyword::PREFERRED,
21333 Keyword::DEFAULT,
21334 Keyword::ELEMENT,
21335 Keyword::DELIMITER,
21336 Keyword::COLLATABLE,
21337 ]);
21338
21339 match keyword {
21340 Some(Keyword::INPUT) => {
21341 self.expect_token(&Token::Eq)?;
21342 let name = self.parse_object_name(false)?;
21343 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
21344 }
21345 Some(Keyword::OUTPUT) => {
21346 self.expect_token(&Token::Eq)?;
21347 let name = self.parse_object_name(false)?;
21348 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
21349 }
21350 Some(Keyword::RECEIVE) => {
21351 self.expect_token(&Token::Eq)?;
21352 let name = self.parse_object_name(false)?;
21353 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
21354 }
21355 Some(Keyword::SEND) => {
21356 self.expect_token(&Token::Eq)?;
21357 let name = self.parse_object_name(false)?;
21358 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
21359 }
21360 Some(Keyword::TYPMOD_IN) => {
21361 self.expect_token(&Token::Eq)?;
21362 let name = self.parse_object_name(false)?;
21363 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
21364 }
21365 Some(Keyword::TYPMOD_OUT) => {
21366 self.expect_token(&Token::Eq)?;
21367 let name = self.parse_object_name(false)?;
21368 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
21369 }
21370 Some(Keyword::ANALYZE) => {
21371 self.expect_token(&Token::Eq)?;
21372 let name = self.parse_object_name(false)?;
21373 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
21374 }
21375 Some(Keyword::SUBSCRIPT) => {
21376 self.expect_token(&Token::Eq)?;
21377 let name = self.parse_object_name(false)?;
21378 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
21379 }
21380 Some(Keyword::INTERNALLENGTH) => {
21381 self.expect_token(&Token::Eq)?;
21382 if self.parse_keyword(Keyword::VARIABLE) {
21383 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21384 UserDefinedTypeInternalLength::Variable,
21385 ))
21386 } else {
21387 let value = self.parse_literal_uint()?;
21388 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21389 UserDefinedTypeInternalLength::Fixed(value),
21390 ))
21391 }
21392 }
21393 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
21394 Some(Keyword::ALIGNMENT) => {
21395 self.expect_token(&Token::Eq)?;
21396 let align_keyword = self.parse_one_of_keywords(&[
21397 Keyword::CHAR,
21398 Keyword::INT2,
21399 Keyword::INT4,
21400 Keyword::DOUBLE,
21401 ]);
21402 match align_keyword {
21403 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21404 Alignment::Char,
21405 )),
21406 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21407 Alignment::Int2,
21408 )),
21409 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21410 Alignment::Int4,
21411 )),
21412 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21413 Alignment::Double,
21414 )),
21415 _ => self.expected_ref(
21416 "alignment value (char, int2, int4, or double)",
21417 self.peek_token_ref(),
21418 ),
21419 }
21420 }
21421 Some(Keyword::STORAGE) => {
21422 self.expect_token(&Token::Eq)?;
21423 let storage_keyword = self.parse_one_of_keywords(&[
21424 Keyword::PLAIN,
21425 Keyword::EXTERNAL,
21426 Keyword::EXTENDED,
21427 Keyword::MAIN,
21428 ]);
21429 match storage_keyword {
21430 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21431 UserDefinedTypeStorage::Plain,
21432 )),
21433 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21434 UserDefinedTypeStorage::External,
21435 )),
21436 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21437 UserDefinedTypeStorage::Extended,
21438 )),
21439 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21440 UserDefinedTypeStorage::Main,
21441 )),
21442 _ => self.expected_ref(
21443 "storage value (plain, external, extended, or main)",
21444 self.peek_token_ref(),
21445 ),
21446 }
21447 }
21448 Some(Keyword::LIKE) => {
21449 self.expect_token(&Token::Eq)?;
21450 let name = self.parse_object_name(false)?;
21451 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
21452 }
21453 Some(Keyword::CATEGORY) => {
21454 self.expect_token(&Token::Eq)?;
21455 let category_str = self.parse_literal_string()?;
21456 let category_char = category_str.chars().next().ok_or_else(|| {
21457 ParserError::ParserError(
21458 "CATEGORY value must be a single character".to_string(),
21459 )
21460 })?;
21461 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
21462 }
21463 Some(Keyword::PREFERRED) => {
21464 self.expect_token(&Token::Eq)?;
21465 let value =
21466 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21467 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
21468 }
21469 Some(Keyword::DEFAULT) => {
21470 self.expect_token(&Token::Eq)?;
21471 let expr = self.parse_expr()?;
21472 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
21473 }
21474 Some(Keyword::ELEMENT) => {
21475 self.expect_token(&Token::Eq)?;
21476 let data_type = self.parse_data_type()?;
21477 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
21478 }
21479 Some(Keyword::DELIMITER) => {
21480 self.expect_token(&Token::Eq)?;
21481 let delimiter = self.parse_literal_string()?;
21482 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
21483 }
21484 Some(Keyword::COLLATABLE) => {
21485 self.expect_token(&Token::Eq)?;
21486 let value =
21487 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21488 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
21489 }
21490 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
21491 }
21492 }
21493
21494 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
21495 self.expect_token(&Token::LParen)?;
21496 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21497 self.expect_token(&Token::RParen)?;
21498 Ok(idents)
21499 }
21500
21501 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
21502 if dialect_of!(self is MySqlDialect | GenericDialect) {
21503 if self.parse_keyword(Keyword::FIRST) {
21504 Ok(Some(MySQLColumnPosition::First))
21505 } else if self.parse_keyword(Keyword::AFTER) {
21506 let ident = self.parse_identifier()?;
21507 Ok(Some(MySQLColumnPosition::After(ident)))
21508 } else {
21509 Ok(None)
21510 }
21511 } else {
21512 Ok(None)
21513 }
21514 }
21515
21516 fn parse_print(&mut self) -> Result<Statement, ParserError> {
21518 Ok(Statement::Print(PrintStatement {
21519 message: Box::new(self.parse_expr()?),
21520 }))
21521 }
21522
21523 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
21527 let wait_type = if self.parse_keyword(Keyword::DELAY) {
21528 WaitForType::Delay
21529 } else if self.parse_keyword(Keyword::TIME) {
21530 WaitForType::Time
21531 } else {
21532 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
21533 };
21534 let expr = self.parse_expr()?;
21535 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
21536 }
21537
21538 fn parse_return(&mut self) -> Result<Statement, ParserError> {
21540 match self.maybe_parse(|p| p.parse_expr())? {
21541 Some(expr) => Ok(Statement::Return(ReturnStatement {
21542 value: Some(ReturnStatementValue::Expr(expr)),
21543 })),
21544 None => Ok(Statement::Return(ReturnStatement { value: None })),
21545 }
21546 }
21547
21548 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
21552 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
21553
21554 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
21555 Some(self.parse_object_name(false)?)
21556 } else {
21557 None
21558 };
21559 self.expect_keyword(Keyword::OPTIONS)?;
21560 self.expect_token(&Token::LParen)?;
21561 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
21562 self.expect_token(&Token::RParen)?;
21563 self.expect_keyword(Keyword::AS)?;
21564 let query = self.parse_query()?;
21565 Ok(Statement::ExportData(ExportData {
21566 options,
21567 query,
21568 connection,
21569 }))
21570 }
21571
21572 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
21573 self.expect_keyword(Keyword::VACUUM)?;
21574 let full = self.parse_keyword(Keyword::FULL);
21575 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
21576 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
21577 let reindex = self.parse_keyword(Keyword::REINDEX);
21578 let recluster = self.parse_keyword(Keyword::RECLUSTER);
21579 let (table_name, threshold, boost) =
21580 match self.maybe_parse(|p| p.parse_object_name(false))? {
21581 Some(table_name) => {
21582 let threshold = if self.parse_keyword(Keyword::TO) {
21583 let value = self.parse_value()?;
21584 self.expect_keyword(Keyword::PERCENT)?;
21585 Some(value)
21586 } else {
21587 None
21588 };
21589 let boost = self.parse_keyword(Keyword::BOOST);
21590 (Some(table_name), threshold, boost)
21591 }
21592 _ => (None, None, false),
21593 };
21594 Ok(Statement::Vacuum(VacuumStatement {
21595 full,
21596 sort_only,
21597 delete_only,
21598 reindex,
21599 recluster,
21600 table_name,
21601 threshold,
21602 boost,
21603 }))
21604 }
21605
21606 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
21608 self.tokens
21609 }
21610
21611 fn peek_sub_query(&mut self) -> bool {
21613 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
21614 .is_some()
21615 }
21616
21617 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
21618 let show_in;
21619 let mut filter_position = None;
21620 if self.dialect.supports_show_like_before_in() {
21621 if let Some(filter) = self.parse_show_statement_filter()? {
21622 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
21623 }
21624 show_in = self.maybe_parse_show_stmt_in()?;
21625 } else {
21626 show_in = self.maybe_parse_show_stmt_in()?;
21627 if let Some(filter) = self.parse_show_statement_filter()? {
21628 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
21629 }
21630 }
21631 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
21632 let limit = self.maybe_parse_show_stmt_limit()?;
21633 let from = self.maybe_parse_show_stmt_from()?;
21634 Ok(ShowStatementOptions {
21635 filter_position,
21636 show_in,
21637 starts_with,
21638 limit,
21639 limit_from: from,
21640 })
21641 }
21642
21643 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
21644 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
21645 Some(Keyword::FROM) => ShowStatementInClause::FROM,
21646 Some(Keyword::IN) => ShowStatementInClause::IN,
21647 None => return Ok(None),
21648 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
21649 };
21650
21651 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
21652 Keyword::ACCOUNT,
21653 Keyword::DATABASE,
21654 Keyword::SCHEMA,
21655 Keyword::TABLE,
21656 Keyword::VIEW,
21657 ]) {
21658 Some(Keyword::DATABASE)
21660 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21661 | self.peek_keyword(Keyword::LIMIT) =>
21662 {
21663 (Some(ShowStatementInParentType::Database), None)
21664 }
21665 Some(Keyword::SCHEMA)
21666 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21667 | self.peek_keyword(Keyword::LIMIT) =>
21668 {
21669 (Some(ShowStatementInParentType::Schema), None)
21670 }
21671 Some(parent_kw) => {
21672 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
21676 match parent_kw {
21677 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
21678 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
21679 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
21680 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
21681 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
21682 _ => {
21683 return self.expected_ref(
21684 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
21685 self.peek_token_ref(),
21686 )
21687 }
21688 }
21689 }
21690 None => {
21691 let mut parent_name = self.parse_object_name(false)?;
21694 if self
21695 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
21696 .is_some()
21697 {
21698 parent_name
21699 .0
21700 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
21701 }
21702 (None, Some(parent_name))
21703 }
21704 };
21705
21706 Ok(Some(ShowStatementIn {
21707 clause,
21708 parent_type,
21709 parent_name,
21710 }))
21711 }
21712
21713 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21714 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
21715 Ok(Some(self.parse_value()?))
21716 } else {
21717 Ok(None)
21718 }
21719 }
21720
21721 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
21722 if self.parse_keyword(Keyword::LIMIT) {
21723 Ok(self.parse_limit()?)
21724 } else {
21725 Ok(None)
21726 }
21727 }
21728
21729 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21730 if self.parse_keyword(Keyword::FROM) {
21731 Ok(Some(self.parse_value()?))
21732 } else {
21733 Ok(None)
21734 }
21735 }
21736
21737 pub(crate) fn in_column_definition_state(&self) -> bool {
21738 matches!(self.state, ColumnDefinition)
21739 }
21740
21741 pub(crate) fn parse_key_value_options(
21746 &mut self,
21747 parenthesized: bool,
21748 end_words: &[Keyword],
21749 ) -> Result<KeyValueOptions, ParserError> {
21750 let mut options: Vec<KeyValueOption> = Vec::new();
21751 let mut delimiter = KeyValueOptionsDelimiter::Space;
21752 if parenthesized {
21753 self.expect_token(&Token::LParen)?;
21754 }
21755 loop {
21756 match self.next_token().token {
21757 Token::RParen => {
21758 if parenthesized {
21759 break;
21760 } else {
21761 return self.expected_ref(" another option or EOF", self.peek_token_ref());
21762 }
21763 }
21764 Token::EOF | Token::SemiColon => break,
21765 Token::Comma => {
21766 delimiter = KeyValueOptionsDelimiter::Comma;
21767 continue;
21768 }
21769 Token::Word(w) if !end_words.contains(&w.keyword) => {
21770 options.push(self.parse_key_value_option(&w)?)
21771 }
21772 Token::Word(w) if end_words.contains(&w.keyword) => {
21773 self.prev_token();
21774 break;
21775 }
21776 _ => {
21777 return self.expected_ref(
21778 "another option, EOF, SemiColon, Comma or ')'",
21779 self.peek_token_ref(),
21780 )
21781 }
21782 };
21783 }
21784
21785 Ok(KeyValueOptions { delimiter, options })
21786 }
21787
21788 pub(crate) fn parse_key_value_option(
21790 &mut self,
21791 key: &Word,
21792 ) -> Result<KeyValueOption, ParserError> {
21793 self.expect_token(&Token::Eq)?;
21794 let peeked_token = self.peek_token();
21795 match peeked_token.token {
21796 Token::SingleQuotedString(_) => Ok(KeyValueOption {
21797 option_name: key.value.clone(),
21798 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21799 }),
21800 Token::Word(word)
21801 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21802 {
21803 Ok(KeyValueOption {
21804 option_name: key.value.clone(),
21805 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21806 })
21807 }
21808 Token::Number(..) => Ok(KeyValueOption {
21809 option_name: key.value.clone(),
21810 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21811 }),
21812 Token::Word(word) => {
21813 self.next_token();
21814 Ok(KeyValueOption {
21815 option_name: key.value.clone(),
21816 option_value: KeyValueOptionKind::Single(
21817 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21818 ),
21819 })
21820 }
21821 Token::LParen => {
21822 match self.maybe_parse(|parser| {
21826 parser.expect_token(&Token::LParen)?;
21827 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21828 parser.expect_token(&Token::RParen)?;
21829 values
21830 })? {
21831 Some(values) => Ok(KeyValueOption {
21832 option_name: key.value.clone(),
21833 option_value: KeyValueOptionKind::Multi(values),
21834 }),
21835 None => Ok(KeyValueOption {
21836 option_name: key.value.clone(),
21837 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21838 self.parse_key_value_options(true, &[])?,
21839 )),
21840 }),
21841 }
21842 }
21843 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21844 }
21845 }
21846
21847 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21849 if self.parse_keyword(Keyword::ALL) {
21850 return Ok(ResetStatement { reset: Reset::ALL });
21851 }
21852
21853 let obj = self.parse_object_name(false)?;
21854 Ok(ResetStatement {
21855 reset: Reset::ConfigurationParameter(obj),
21856 })
21857 }
21858}
21859
21860fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21861 if let Some(prefix) = prefix {
21862 Expr::Prefixed {
21863 prefix,
21864 value: Box::new(expr),
21865 }
21866 } else {
21867 expr
21868 }
21869}
21870
21871impl Word {
21872 pub fn to_ident(&self, span: Span) -> Ident {
21878 Ident {
21879 value: self.value.clone(),
21880 quote_style: self.quote_style,
21881 span,
21882 }
21883 }
21884
21885 pub fn into_ident(self, span: Span) -> Ident {
21890 Ident {
21891 value: self.value,
21892 quote_style: self.quote_style,
21893 span,
21894 }
21895 }
21896}
21897
21898#[cfg(test)]
21899mod tests {
21900 use crate::test_utils::{all_dialects, TestedDialects};
21901
21902 use super::*;
21903
21904 #[test]
21905 fn test_prev_index() {
21906 let sql = "SELECT version";
21907 all_dialects().run_parser_method(sql, |parser| {
21908 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21909 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21910 parser.prev_token();
21911 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21912 assert_eq!(parser.next_token(), Token::make_word("version", None));
21913 parser.prev_token();
21914 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21915 assert_eq!(parser.next_token(), Token::make_word("version", None));
21916 assert_eq!(parser.peek_token(), Token::EOF);
21917 parser.prev_token();
21918 assert_eq!(parser.next_token(), Token::make_word("version", None));
21919 assert_eq!(parser.next_token(), Token::EOF);
21920 assert_eq!(parser.next_token(), Token::EOF);
21921 parser.prev_token();
21922 });
21923 }
21924
21925 #[test]
21926 fn test_peek_tokens() {
21927 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21928 assert!(matches!(
21929 parser.peek_tokens(),
21930 [Token::Word(Word {
21931 keyword: Keyword::SELECT,
21932 ..
21933 })]
21934 ));
21935
21936 assert!(matches!(
21937 parser.peek_tokens(),
21938 [
21939 Token::Word(Word {
21940 keyword: Keyword::SELECT,
21941 ..
21942 }),
21943 Token::Word(_),
21944 Token::Word(Word {
21945 keyword: Keyword::AS,
21946 ..
21947 }),
21948 ]
21949 ));
21950
21951 for _ in 0..4 {
21952 parser.next_token();
21953 }
21954
21955 assert!(matches!(
21956 parser.peek_tokens(),
21957 [
21958 Token::Word(Word {
21959 keyword: Keyword::FROM,
21960 ..
21961 }),
21962 Token::Word(_),
21963 Token::EOF,
21964 Token::EOF,
21965 ]
21966 ))
21967 })
21968 }
21969
21970 #[cfg(test)]
21971 mod test_parse_data_type {
21972 use crate::ast::{
21973 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21974 };
21975 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21976 use crate::test_utils::TestedDialects;
21977
21978 macro_rules! test_parse_data_type {
21979 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21980 $dialect.run_parser_method(&*$input, |parser| {
21981 let data_type = parser.parse_data_type().unwrap();
21982 assert_eq!($expected_type, data_type);
21983 assert_eq!($input.to_string(), data_type.to_string());
21984 });
21985 }};
21986 }
21987
21988 #[test]
21989 fn test_ansii_character_string_types() {
21990 let dialect =
21992 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21993
21994 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21995
21996 test_parse_data_type!(
21997 dialect,
21998 "CHARACTER(20)",
21999 DataType::Character(Some(CharacterLength::IntegerLength {
22000 length: 20,
22001 unit: None
22002 }))
22003 );
22004
22005 test_parse_data_type!(
22006 dialect,
22007 "CHARACTER(20 CHARACTERS)",
22008 DataType::Character(Some(CharacterLength::IntegerLength {
22009 length: 20,
22010 unit: Some(CharLengthUnits::Characters)
22011 }))
22012 );
22013
22014 test_parse_data_type!(
22015 dialect,
22016 "CHARACTER(20 OCTETS)",
22017 DataType::Character(Some(CharacterLength::IntegerLength {
22018 length: 20,
22019 unit: Some(CharLengthUnits::Octets)
22020 }))
22021 );
22022
22023 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
22024
22025 test_parse_data_type!(
22026 dialect,
22027 "CHAR(20)",
22028 DataType::Char(Some(CharacterLength::IntegerLength {
22029 length: 20,
22030 unit: None
22031 }))
22032 );
22033
22034 test_parse_data_type!(
22035 dialect,
22036 "CHAR(20 CHARACTERS)",
22037 DataType::Char(Some(CharacterLength::IntegerLength {
22038 length: 20,
22039 unit: Some(CharLengthUnits::Characters)
22040 }))
22041 );
22042
22043 test_parse_data_type!(
22044 dialect,
22045 "CHAR(20 OCTETS)",
22046 DataType::Char(Some(CharacterLength::IntegerLength {
22047 length: 20,
22048 unit: Some(CharLengthUnits::Octets)
22049 }))
22050 );
22051
22052 test_parse_data_type!(
22053 dialect,
22054 "CHARACTER VARYING(20)",
22055 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22056 length: 20,
22057 unit: None
22058 }))
22059 );
22060
22061 test_parse_data_type!(
22062 dialect,
22063 "CHARACTER VARYING(20 CHARACTERS)",
22064 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22065 length: 20,
22066 unit: Some(CharLengthUnits::Characters)
22067 }))
22068 );
22069
22070 test_parse_data_type!(
22071 dialect,
22072 "CHARACTER VARYING(20 OCTETS)",
22073 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22074 length: 20,
22075 unit: Some(CharLengthUnits::Octets)
22076 }))
22077 );
22078
22079 test_parse_data_type!(
22080 dialect,
22081 "CHAR VARYING(20)",
22082 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22083 length: 20,
22084 unit: None
22085 }))
22086 );
22087
22088 test_parse_data_type!(
22089 dialect,
22090 "CHAR VARYING(20 CHARACTERS)",
22091 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22092 length: 20,
22093 unit: Some(CharLengthUnits::Characters)
22094 }))
22095 );
22096
22097 test_parse_data_type!(
22098 dialect,
22099 "CHAR VARYING(20 OCTETS)",
22100 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22101 length: 20,
22102 unit: Some(CharLengthUnits::Octets)
22103 }))
22104 );
22105
22106 test_parse_data_type!(
22107 dialect,
22108 "VARCHAR(20)",
22109 DataType::Varchar(Some(CharacterLength::IntegerLength {
22110 length: 20,
22111 unit: None
22112 }))
22113 );
22114 }
22115
22116 #[test]
22117 fn test_ansii_character_large_object_types() {
22118 let dialect =
22120 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22121
22122 test_parse_data_type!(
22123 dialect,
22124 "CHARACTER LARGE OBJECT",
22125 DataType::CharacterLargeObject(None)
22126 );
22127 test_parse_data_type!(
22128 dialect,
22129 "CHARACTER LARGE OBJECT(20)",
22130 DataType::CharacterLargeObject(Some(20))
22131 );
22132
22133 test_parse_data_type!(
22134 dialect,
22135 "CHAR LARGE OBJECT",
22136 DataType::CharLargeObject(None)
22137 );
22138 test_parse_data_type!(
22139 dialect,
22140 "CHAR LARGE OBJECT(20)",
22141 DataType::CharLargeObject(Some(20))
22142 );
22143
22144 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
22145 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
22146 }
22147
22148 #[test]
22149 fn test_parse_custom_types() {
22150 let dialect =
22151 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22152
22153 test_parse_data_type!(
22154 dialect,
22155 "GEOMETRY",
22156 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
22157 );
22158
22159 test_parse_data_type!(
22160 dialect,
22161 "GEOMETRY(POINT)",
22162 DataType::Custom(
22163 ObjectName::from(vec!["GEOMETRY".into()]),
22164 vec!["POINT".to_string()]
22165 )
22166 );
22167
22168 test_parse_data_type!(
22169 dialect,
22170 "GEOMETRY(POINT, 4326)",
22171 DataType::Custom(
22172 ObjectName::from(vec!["GEOMETRY".into()]),
22173 vec!["POINT".to_string(), "4326".to_string()]
22174 )
22175 );
22176 }
22177
22178 #[test]
22179 fn test_ansii_exact_numeric_types() {
22180 let dialect = TestedDialects::new(vec![
22182 Box::new(GenericDialect {}),
22183 Box::new(AnsiDialect {}),
22184 Box::new(PostgreSqlDialect {}),
22185 ]);
22186
22187 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
22188
22189 test_parse_data_type!(
22190 dialect,
22191 "NUMERIC(2)",
22192 DataType::Numeric(ExactNumberInfo::Precision(2))
22193 );
22194
22195 test_parse_data_type!(
22196 dialect,
22197 "NUMERIC(2,10)",
22198 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
22199 );
22200
22201 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
22202
22203 test_parse_data_type!(
22204 dialect,
22205 "DECIMAL(2)",
22206 DataType::Decimal(ExactNumberInfo::Precision(2))
22207 );
22208
22209 test_parse_data_type!(
22210 dialect,
22211 "DECIMAL(2,10)",
22212 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
22213 );
22214
22215 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
22216
22217 test_parse_data_type!(
22218 dialect,
22219 "DEC(2)",
22220 DataType::Dec(ExactNumberInfo::Precision(2))
22221 );
22222
22223 test_parse_data_type!(
22224 dialect,
22225 "DEC(2,10)",
22226 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
22227 );
22228
22229 test_parse_data_type!(
22231 dialect,
22232 "NUMERIC(10,-2)",
22233 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
22234 );
22235
22236 test_parse_data_type!(
22237 dialect,
22238 "DECIMAL(1000,-10)",
22239 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
22240 );
22241
22242 test_parse_data_type!(
22243 dialect,
22244 "DEC(5,-1000)",
22245 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
22246 );
22247
22248 test_parse_data_type!(
22249 dialect,
22250 "NUMERIC(10,-5)",
22251 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
22252 );
22253
22254 test_parse_data_type!(
22255 dialect,
22256 "DECIMAL(20,-10)",
22257 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
22258 );
22259
22260 test_parse_data_type!(
22261 dialect,
22262 "DEC(5,-2)",
22263 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
22264 );
22265
22266 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
22267 let data_type = parser.parse_data_type().unwrap();
22268 assert_eq!(
22269 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
22270 data_type
22271 );
22272 assert_eq!("NUMERIC(10,5)", data_type.to_string());
22274 });
22275 }
22276
22277 #[test]
22278 fn test_ansii_date_type() {
22279 let dialect =
22281 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22282
22283 test_parse_data_type!(dialect, "DATE", DataType::Date);
22284
22285 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
22286
22287 test_parse_data_type!(
22288 dialect,
22289 "TIME(6)",
22290 DataType::Time(Some(6), TimezoneInfo::None)
22291 );
22292
22293 test_parse_data_type!(
22294 dialect,
22295 "TIME WITH TIME ZONE",
22296 DataType::Time(None, TimezoneInfo::WithTimeZone)
22297 );
22298
22299 test_parse_data_type!(
22300 dialect,
22301 "TIME(6) WITH TIME ZONE",
22302 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
22303 );
22304
22305 test_parse_data_type!(
22306 dialect,
22307 "TIME WITHOUT TIME ZONE",
22308 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
22309 );
22310
22311 test_parse_data_type!(
22312 dialect,
22313 "TIME(6) WITHOUT TIME ZONE",
22314 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
22315 );
22316
22317 test_parse_data_type!(
22318 dialect,
22319 "TIMESTAMP",
22320 DataType::Timestamp(None, TimezoneInfo::None)
22321 );
22322
22323 test_parse_data_type!(
22324 dialect,
22325 "TIMESTAMP(22)",
22326 DataType::Timestamp(Some(22), TimezoneInfo::None)
22327 );
22328
22329 test_parse_data_type!(
22330 dialect,
22331 "TIMESTAMP(22) WITH TIME ZONE",
22332 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
22333 );
22334
22335 test_parse_data_type!(
22336 dialect,
22337 "TIMESTAMP(33) WITHOUT TIME ZONE",
22338 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
22339 );
22340 }
22341 }
22342
22343 #[test]
22344 fn test_parse_schema_name() {
22345 macro_rules! test_parse_schema_name {
22347 ($input:expr, $expected_name:expr $(,)?) => {{
22348 all_dialects().run_parser_method(&*$input, |parser| {
22349 let schema_name = parser.parse_schema_name().unwrap();
22350 assert_eq!(schema_name, $expected_name);
22352 assert_eq!(schema_name.to_string(), $input.to_string());
22354 });
22355 }};
22356 }
22357
22358 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
22359 let dummy_authorization = Ident::new("dummy_authorization");
22360
22361 test_parse_schema_name!(
22362 format!("{dummy_name}"),
22363 SchemaName::Simple(dummy_name.clone())
22364 );
22365
22366 test_parse_schema_name!(
22367 format!("AUTHORIZATION {dummy_authorization}"),
22368 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
22369 );
22370 test_parse_schema_name!(
22371 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
22372 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
22373 );
22374 }
22375
22376 #[test]
22377 fn mysql_parse_index_table_constraint() {
22378 macro_rules! test_parse_table_constraint {
22379 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
22380 $dialect.run_parser_method(&*$input, |parser| {
22381 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
22382 assert_eq!(constraint, $expected);
22384 assert_eq!(constraint.to_string(), $input.to_string());
22386 });
22387 }};
22388 }
22389
22390 fn mk_expected_col(name: &str) -> IndexColumn {
22391 IndexColumn {
22392 column: OrderByExpr {
22393 expr: Expr::Identifier(name.into()),
22394 options: OrderByOptions {
22395 asc: None,
22396 nulls_first: None,
22397 },
22398 with_fill: None,
22399 },
22400 operator_class: None,
22401 }
22402 }
22403
22404 let dialect =
22405 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
22406
22407 test_parse_table_constraint!(
22408 dialect,
22409 "INDEX (c1)",
22410 IndexConstraint {
22411 display_as_key: false,
22412 name: None,
22413 index_type: None,
22414 columns: vec![mk_expected_col("c1")],
22415 index_options: vec![],
22416 }
22417 .into()
22418 );
22419
22420 test_parse_table_constraint!(
22421 dialect,
22422 "KEY (c1)",
22423 IndexConstraint {
22424 display_as_key: true,
22425 name: None,
22426 index_type: None,
22427 columns: vec![mk_expected_col("c1")],
22428 index_options: vec![],
22429 }
22430 .into()
22431 );
22432
22433 test_parse_table_constraint!(
22434 dialect,
22435 "INDEX 'index' (c1, c2)",
22436 TableConstraint::Index(IndexConstraint {
22437 display_as_key: false,
22438 name: Some(Ident::with_quote('\'', "index")),
22439 index_type: None,
22440 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
22441 index_options: vec![],
22442 })
22443 );
22444
22445 test_parse_table_constraint!(
22446 dialect,
22447 "INDEX USING BTREE (c1)",
22448 IndexConstraint {
22449 display_as_key: false,
22450 name: None,
22451 index_type: Some(IndexType::BTree),
22452 columns: vec![mk_expected_col("c1")],
22453 index_options: vec![],
22454 }
22455 .into()
22456 );
22457
22458 test_parse_table_constraint!(
22459 dialect,
22460 "INDEX USING HASH (c1)",
22461 IndexConstraint {
22462 display_as_key: false,
22463 name: None,
22464 index_type: Some(IndexType::Hash),
22465 columns: vec![mk_expected_col("c1")],
22466 index_options: vec![],
22467 }
22468 .into()
22469 );
22470
22471 test_parse_table_constraint!(
22472 dialect,
22473 "INDEX idx_name USING BTREE (c1)",
22474 IndexConstraint {
22475 display_as_key: false,
22476 name: Some(Ident::new("idx_name")),
22477 index_type: Some(IndexType::BTree),
22478 columns: vec![mk_expected_col("c1")],
22479 index_options: vec![],
22480 }
22481 .into()
22482 );
22483
22484 test_parse_table_constraint!(
22485 dialect,
22486 "INDEX idx_name USING HASH (c1)",
22487 IndexConstraint {
22488 display_as_key: false,
22489 name: Some(Ident::new("idx_name")),
22490 index_type: Some(IndexType::Hash),
22491 columns: vec![mk_expected_col("c1")],
22492 index_options: vec![],
22493 }
22494 .into()
22495 );
22496 }
22497
22498 #[test]
22499 fn test_tokenizer_error_loc() {
22500 let sql = "foo '";
22501 let ast = Parser::parse_sql(&GenericDialect, sql);
22502 assert_eq!(
22503 ast,
22504 Err(ParserError::TokenizerError(
22505 "Unterminated string literal at Line: 1, Column: 5".to_string()
22506 ))
22507 );
22508 }
22509
22510 #[test]
22511 fn test_parser_error_loc() {
22512 let sql = "SELECT this is a syntax error";
22513 let ast = Parser::parse_sql(&GenericDialect, sql);
22514 assert_eq!(
22515 ast,
22516 Err(ParserError::ParserError(
22517 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
22518 .to_string()
22519 ))
22520 );
22521 }
22522
22523 #[test]
22524 fn test_nested_explain_error() {
22525 let sql = "EXPLAIN EXPLAIN SELECT 1";
22526 let ast = Parser::parse_sql(&GenericDialect, sql);
22527 assert_eq!(
22528 ast,
22529 Err(ParserError::ParserError(
22530 "Explain must be root of the plan".to_string()
22531 ))
22532 );
22533 }
22534
22535 #[test]
22536 fn test_parse_multipart_identifier_positive() {
22537 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
22538
22539 let expected = vec![
22541 Ident {
22542 value: "CATALOG".to_string(),
22543 quote_style: None,
22544 span: Span::empty(),
22545 },
22546 Ident {
22547 value: "F(o)o. \"bar".to_string(),
22548 quote_style: Some('"'),
22549 span: Span::empty(),
22550 },
22551 Ident {
22552 value: "table".to_string(),
22553 quote_style: None,
22554 span: Span::empty(),
22555 },
22556 ];
22557 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
22558 let actual = parser.parse_multipart_identifier().unwrap();
22559 assert_eq!(expected, actual);
22560 });
22561
22562 let expected = vec![
22564 Ident {
22565 value: "CATALOG".to_string(),
22566 quote_style: None,
22567 span: Span::empty(),
22568 },
22569 Ident {
22570 value: "table".to_string(),
22571 quote_style: None,
22572 span: Span::empty(),
22573 },
22574 ];
22575 dialect.run_parser_method("CATALOG . table", |parser| {
22576 let actual = parser.parse_multipart_identifier().unwrap();
22577 assert_eq!(expected, actual);
22578 });
22579 }
22580
22581 #[test]
22582 fn test_parse_multipart_identifier_negative() {
22583 macro_rules! test_parse_multipart_identifier_error {
22584 ($input:expr, $expected_err:expr $(,)?) => {{
22585 all_dialects().run_parser_method(&*$input, |parser| {
22586 let actual_err = parser.parse_multipart_identifier().unwrap_err();
22587 assert_eq!(actual_err.to_string(), $expected_err);
22588 });
22589 }};
22590 }
22591
22592 test_parse_multipart_identifier_error!(
22593 "",
22594 "sql parser error: Empty input when parsing identifier",
22595 );
22596
22597 test_parse_multipart_identifier_error!(
22598 "*schema.table",
22599 "sql parser error: Unexpected token in identifier: *",
22600 );
22601
22602 test_parse_multipart_identifier_error!(
22603 "schema.table*",
22604 "sql parser error: Unexpected token in identifier: *",
22605 );
22606
22607 test_parse_multipart_identifier_error!(
22608 "schema.table.",
22609 "sql parser error: Trailing period in identifier",
22610 );
22611
22612 test_parse_multipart_identifier_error!(
22613 "schema.*",
22614 "sql parser error: Unexpected token following period in identifier: *",
22615 );
22616 }
22617
22618 #[test]
22619 fn test_mysql_partition_selection() {
22620 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
22621 let expected = vec!["p0", "p2"];
22622
22623 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
22624 assert_eq!(ast.len(), 1);
22625 if let Statement::Query(v) = &ast[0] {
22626 if let SetExpr::Select(select) = &*v.body {
22627 assert_eq!(select.from.len(), 1);
22628 let from: &TableWithJoins = &select.from[0];
22629 let table_factor = &from.relation;
22630 if let TableFactor::Table { partitions, .. } = table_factor {
22631 let actual: Vec<&str> = partitions
22632 .iter()
22633 .map(|ident| ident.value.as_str())
22634 .collect();
22635 assert_eq!(expected, actual);
22636 }
22637 }
22638 } else {
22639 panic!("fail to parse mysql partition selection");
22640 }
22641 }
22642
22643 #[test]
22644 fn test_replace_into_placeholders() {
22645 let sql = "REPLACE INTO t (a) VALUES (&a)";
22646
22647 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22648 }
22649
22650 #[test]
22651 fn test_replace_into_set_placeholder() {
22652 let sql = "REPLACE INTO t SET ?";
22653
22654 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22655 }
22656
22657 #[test]
22658 fn test_replace_incomplete() {
22659 let sql = r#"REPLACE"#;
22660
22661 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
22662 }
22663
22664 #[test]
22665 fn test_placeholder_invalid_whitespace() {
22666 for w in [" ", "/*invalid*/"] {
22667 let sql = format!("\nSELECT\n :{w}fooBar");
22668 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
22669 }
22670 }
22671}