1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word)
512 if expecting_statement_delimiter && word.keyword == Keyword::END =>
513 {
514 break;
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 Keyword::SECURITY => self.parse_security_label().map(Into::into),
723 _ => self.expected("an SQL statement", next_token),
724 },
725 Token::LParen => {
726 self.prev_token();
727 self.parse_query().map(Into::into)
728 }
729 _ => self.expected("an SQL statement", next_token),
730 }
731 }
732
733 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
737 let case_token = self.expect_keyword(Keyword::CASE)?;
738
739 let match_expr = if self.peek_keyword(Keyword::WHEN) {
740 None
741 } else {
742 Some(self.parse_expr()?)
743 };
744
745 self.expect_keyword_is(Keyword::WHEN)?;
746 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
747 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
748 })?;
749
750 let else_block = if self.parse_keyword(Keyword::ELSE) {
751 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
752 } else {
753 None
754 };
755
756 let mut end_case_token = self.expect_keyword(Keyword::END)?;
757 if self.peek_keyword(Keyword::CASE) {
758 end_case_token = self.expect_keyword(Keyword::CASE)?;
759 }
760
761 Ok(CaseStatement {
762 case_token: AttachedToken(case_token),
763 match_expr,
764 when_blocks,
765 else_block,
766 end_case_token: AttachedToken(end_case_token),
767 })
768 }
769
770 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
774 self.expect_keyword_is(Keyword::IF)?;
775 let if_block = self.parse_conditional_statement_block(&[
776 Keyword::ELSE,
777 Keyword::ELSEIF,
778 Keyword::END,
779 ])?;
780
781 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
782 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
783 parser.parse_conditional_statement_block(&[
784 Keyword::ELSEIF,
785 Keyword::ELSE,
786 Keyword::END,
787 ])
788 })?
789 } else {
790 vec![]
791 };
792
793 let else_block = if self.parse_keyword(Keyword::ELSE) {
794 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
795 } else {
796 None
797 };
798
799 self.expect_keyword_is(Keyword::END)?;
800 let end_token = self.expect_keyword(Keyword::IF)?;
801
802 Ok(IfStatement {
803 if_block,
804 elseif_blocks,
805 else_block,
806 end_token: Some(AttachedToken(end_token)),
807 })
808 }
809
810 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
814 self.expect_keyword_is(Keyword::WHILE)?;
815 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
816
817 Ok(WhileStatement { while_block })
818 }
819
820 fn parse_conditional_statement_block(
828 &mut self,
829 terminal_keywords: &[Keyword],
830 ) -> Result<ConditionalStatementBlock, ParserError> {
831 let start_token = self.get_current_token().clone(); let mut then_token = None;
833
834 let condition = match &start_token.token {
835 Token::Word(w) if w.keyword == Keyword::ELSE => None,
836 Token::Word(w) if w.keyword == Keyword::WHILE => {
837 let expr = self.parse_expr()?;
838 Some(expr)
839 }
840 _ => {
841 let expr = self.parse_expr()?;
842 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
843 Some(expr)
844 }
845 };
846
847 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
848
849 Ok(ConditionalStatementBlock {
850 start_token: AttachedToken(start_token),
851 condition,
852 then_token,
853 conditional_statements,
854 })
855 }
856
857 pub(crate) fn parse_conditional_statements(
860 &mut self,
861 terminal_keywords: &[Keyword],
862 ) -> Result<ConditionalStatements, ParserError> {
863 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
864 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
865 let statements = self.parse_statement_list(terminal_keywords)?;
866 let end_token = self.expect_keyword(Keyword::END)?;
867
868 ConditionalStatements::BeginEnd(BeginEndStatements {
869 begin_token: AttachedToken(begin_token),
870 statements,
871 end_token: AttachedToken(end_token),
872 })
873 } else {
874 ConditionalStatements::Sequence {
875 statements: self.parse_statement_list(terminal_keywords)?,
876 }
877 };
878 Ok(conditional_statements)
879 }
880
881 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
885 self.expect_keyword_is(Keyword::RAISE)?;
886
887 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
888 self.expect_token(&Token::Eq)?;
889 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
890 } else {
891 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
892 };
893
894 Ok(RaiseStatement { value })
895 }
896 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
900 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
901
902 self.expect_keyword_is(Keyword::ON)?;
903 let token = self.next_token();
904
905 let keyword = match &token.token {
906 Token::Word(w) => Some(w.keyword),
907 _ => None,
908 };
909 let object_type = match keyword {
910 Some(Keyword::MATERIALIZED) => {
911 self.expect_keyword_is(Keyword::VIEW)?;
912 CommentObject::MaterializedView
913 }
914 Some(other) => match CommentObject::from_keyword(other) {
915 Some(obj) => obj,
916 None => return self.expected("comment object_type", token),
917 },
918 None => return self.expected("comment object_type", token),
919 };
920 let object_name = self.parse_object_name(false)?;
921
922 let arguments = match object_type {
923 CommentObject::Function | CommentObject::Procedure | CommentObject::Aggregate => {
924 if self.consume_token(&Token::LParen) {
925 let args =
926 self.parse_comma_separated0(Self::parse_function_arg, Token::RParen)?;
927 self.expect_token(&Token::RParen)?;
928 Some(args.into_iter().map(|a| a.data_type).collect())
929 } else {
930 None
931 }
932 }
933 _ => None,
934 };
935
936 if object_type == CommentObject::Aggregate && arguments.is_none() {
937 return Err(ParserError::ParserError(
938 "COMMENT ON AGGREGATE requires an argument list, e.g. AGGREGATE foo(int)".into(),
939 ));
940 }
941
942 let table_name = match object_type {
943 CommentObject::Trigger | CommentObject::Policy => {
944 self.expect_keyword_is(Keyword::ON)?;
945 Some(self.parse_object_name(false)?)
946 }
947 _ => None,
948 };
949
950 self.expect_keyword_is(Keyword::IS)?;
951 let comment = if self.parse_keyword(Keyword::NULL) {
952 None
953 } else {
954 Some(self.parse_literal_string()?)
955 };
956 Ok(Statement::Comment {
957 object_type,
958 object_name,
959 arguments,
960 table_name,
961 comment,
962 if_exists,
963 })
964 }
965
966 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
968 let mut channel = None;
969 let mut tables: Vec<ObjectName> = vec![];
970 let mut read_lock = false;
971 let mut export = false;
972
973 if !dialect_of!(self is MySqlDialect | GenericDialect) {
974 return parser_err!(
975 "Unsupported statement FLUSH",
976 self.peek_token_ref().span.start
977 );
978 }
979
980 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
981 Some(FlushLocation::NoWriteToBinlog)
982 } else if self.parse_keyword(Keyword::LOCAL) {
983 Some(FlushLocation::Local)
984 } else {
985 None
986 };
987
988 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
989 FlushType::BinaryLogs
990 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
991 FlushType::EngineLogs
992 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
993 FlushType::ErrorLogs
994 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
995 FlushType::GeneralLogs
996 } else if self.parse_keywords(&[Keyword::HOSTS]) {
997 FlushType::Hosts
998 } else if self.parse_keyword(Keyword::PRIVILEGES) {
999 FlushType::Privileges
1000 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1001 FlushType::OptimizerCosts
1002 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1003 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1004 channel = Some(self.parse_object_name(false).unwrap().to_string());
1005 }
1006 FlushType::RelayLogs
1007 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1008 FlushType::SlowLogs
1009 } else if self.parse_keyword(Keyword::STATUS) {
1010 FlushType::Status
1011 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1012 FlushType::UserResources
1013 } else if self.parse_keywords(&[Keyword::LOGS]) {
1014 FlushType::Logs
1015 } else if self.parse_keywords(&[Keyword::TABLES]) {
1016 loop {
1017 let next_token = self.next_token();
1018 match &next_token.token {
1019 Token::Word(w) => match w.keyword {
1020 Keyword::WITH => {
1021 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1022 }
1023 Keyword::FOR => {
1024 export = self.parse_keyword(Keyword::EXPORT);
1025 }
1026 Keyword::NoKeyword => {
1027 self.prev_token();
1028 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1029 }
1030 _ => {}
1031 },
1032 _ => {
1033 break;
1034 }
1035 }
1036 }
1037
1038 FlushType::Tables
1039 } else {
1040 return self.expected_ref(
1041 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1042 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1043 self.peek_token_ref(),
1044 );
1045 };
1046
1047 Ok(Statement::Flush {
1048 object_type,
1049 location,
1050 channel,
1051 read_lock,
1052 export,
1053 tables,
1054 })
1055 }
1056
1057 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1059 let repair = self.parse_keyword(Keyword::REPAIR);
1060 self.expect_keyword_is(Keyword::TABLE)?;
1061 let table_name = self.parse_object_name(false)?;
1062 let partition_action = self
1063 .maybe_parse(|parser| {
1064 let pa = match parser.parse_one_of_keywords(&[
1065 Keyword::ADD,
1066 Keyword::DROP,
1067 Keyword::SYNC,
1068 ]) {
1069 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1070 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1071 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1072 _ => None,
1073 };
1074 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1075 Ok(pa)
1076 })?
1077 .unwrap_or_default();
1078 Ok(Msck {
1079 repair,
1080 table_name,
1081 partition_action,
1082 })
1083 }
1084
1085 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1087 let table = self.parse_keyword(Keyword::TABLE);
1088 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1089
1090 let table_names = self.parse_comma_separated(|p| {
1091 let only = p.parse_keyword(Keyword::ONLY);
1092 let name = p.parse_object_name(false)?;
1093 let has_asterisk = p.consume_token(&Token::Mul);
1094 Ok(TruncateTableTarget {
1095 name,
1096 only,
1097 has_asterisk,
1098 })
1099 })?;
1100
1101 let mut partitions = None;
1102 if self.parse_keyword(Keyword::PARTITION) {
1103 self.expect_token(&Token::LParen)?;
1104 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1105 self.expect_token(&Token::RParen)?;
1106 }
1107
1108 let mut identity = None;
1109 let mut cascade = None;
1110
1111 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1112 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1113 Some(TruncateIdentityOption::Restart)
1114 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1115 Some(TruncateIdentityOption::Continue)
1116 } else {
1117 None
1118 };
1119
1120 cascade = self.parse_cascade_option();
1121 };
1122
1123 let on_cluster = self.parse_optional_on_cluster()?;
1124
1125 Ok(Truncate {
1126 table_names,
1127 partitions,
1128 table,
1129 if_exists,
1130 identity,
1131 cascade,
1132 on_cluster,
1133 })
1134 }
1135
1136 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1137 if self.parse_keyword(Keyword::CASCADE) {
1138 Some(CascadeOption::Cascade)
1139 } else if self.parse_keyword(Keyword::RESTRICT) {
1140 Some(CascadeOption::Restrict)
1141 } else {
1142 None
1143 }
1144 }
1145
1146 pub fn parse_attach_duckdb_database_options(
1148 &mut self,
1149 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1150 if !self.consume_token(&Token::LParen) {
1151 return Ok(vec![]);
1152 }
1153
1154 let mut options = vec![];
1155 loop {
1156 if self.parse_keyword(Keyword::READ_ONLY) {
1157 let boolean = if self.parse_keyword(Keyword::TRUE) {
1158 Some(true)
1159 } else if self.parse_keyword(Keyword::FALSE) {
1160 Some(false)
1161 } else {
1162 None
1163 };
1164 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1165 } else if self.parse_keyword(Keyword::TYPE) {
1166 let ident = self.parse_identifier()?;
1167 options.push(AttachDuckDBDatabaseOption::Type(ident));
1168 } else {
1169 return self
1170 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1171 };
1172
1173 if self.consume_token(&Token::RParen) {
1174 return Ok(options);
1175 } else if self.consume_token(&Token::Comma) {
1176 continue;
1177 } else {
1178 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1179 }
1180 }
1181 }
1182
1183 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1185 let database = self.parse_keyword(Keyword::DATABASE);
1186 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1187 let database_path = self.parse_identifier()?;
1188 let database_alias = if self.parse_keyword(Keyword::AS) {
1189 Some(self.parse_identifier()?)
1190 } else {
1191 None
1192 };
1193
1194 let attach_options = self.parse_attach_duckdb_database_options()?;
1195 Ok(Statement::AttachDuckDBDatabase {
1196 if_not_exists,
1197 database,
1198 database_path,
1199 database_alias,
1200 attach_options,
1201 })
1202 }
1203
1204 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1206 let database = self.parse_keyword(Keyword::DATABASE);
1207 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1208 let database_alias = self.parse_identifier()?;
1209 Ok(Statement::DetachDuckDBDatabase {
1210 if_exists,
1211 database,
1212 database_alias,
1213 })
1214 }
1215
1216 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1218 let database = self.parse_keyword(Keyword::DATABASE);
1219 let database_file_name = self.parse_expr()?;
1220 self.expect_keyword_is(Keyword::AS)?;
1221 let schema_name = self.parse_identifier()?;
1222 Ok(Statement::AttachDatabase {
1223 database,
1224 schema_name,
1225 database_file_name,
1226 })
1227 }
1228
1229 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1231 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1232 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1233 let mut for_columns = false;
1234 let mut cache_metadata = false;
1235 let mut noscan = false;
1236 let mut partitions = None;
1237 let mut compute_statistics = false;
1238 let mut columns = vec![];
1239
1240 if table_name.is_some() && self.consume_token(&Token::LParen) {
1242 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1243 self.expect_token(&Token::RParen)?;
1244 }
1245
1246 loop {
1247 match self.parse_one_of_keywords(&[
1248 Keyword::PARTITION,
1249 Keyword::FOR,
1250 Keyword::CACHE,
1251 Keyword::NOSCAN,
1252 Keyword::COMPUTE,
1253 ]) {
1254 Some(Keyword::PARTITION) => {
1255 self.expect_token(&Token::LParen)?;
1256 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1257 self.expect_token(&Token::RParen)?;
1258 }
1259 Some(Keyword::NOSCAN) => noscan = true,
1260 Some(Keyword::FOR) => {
1261 self.expect_keyword_is(Keyword::COLUMNS)?;
1262
1263 columns = self
1264 .maybe_parse(|parser| {
1265 parser.parse_comma_separated(|p| p.parse_identifier())
1266 })?
1267 .unwrap_or_default();
1268 for_columns = true
1269 }
1270 Some(Keyword::CACHE) => {
1271 self.expect_keyword_is(Keyword::METADATA)?;
1272 cache_metadata = true
1273 }
1274 Some(Keyword::COMPUTE) => {
1275 self.expect_keyword_is(Keyword::STATISTICS)?;
1276 compute_statistics = true
1277 }
1278 _ => break,
1279 }
1280 }
1281
1282 Ok(Analyze {
1283 has_table_keyword,
1284 table_name,
1285 for_columns,
1286 columns,
1287 partitions,
1288 cache_metadata,
1289 noscan,
1290 compute_statistics,
1291 })
1292 }
1293
1294 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1296 let index = self.index;
1297
1298 let next_token = self.next_token();
1299 match next_token.token {
1300 t @ (Token::Word(_) | Token::SingleQuotedString(_))
1301 if self.peek_token_ref().token == Token::Period =>
1302 {
1303 let mut id_parts: Vec<Ident> = vec![match t {
1304 Token::Word(w) => w.into_ident(next_token.span),
1305 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1306 _ => {
1307 return Err(ParserError::ParserError(
1308 "Internal parser error: unexpected token type".to_string(),
1309 ))
1310 }
1311 }];
1312
1313 while self.consume_token(&Token::Period) {
1314 let next_token = self.next_token();
1315 match next_token.token {
1316 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1317 Token::SingleQuotedString(s) => {
1318 id_parts.push(Ident::with_quote('\'', s))
1320 }
1321 Token::Placeholder(s) => {
1322 id_parts.push(Ident::new(s))
1325 }
1326 Token::Mul => {
1327 return Ok(Expr::QualifiedWildcard(
1328 ObjectName::from(id_parts),
1329 AttachedToken(next_token),
1330 ));
1331 }
1332 _ => {
1333 return self.expected("an identifier or a '*' after '.'", next_token);
1334 }
1335 }
1336 }
1337 }
1338 Token::Mul => {
1339 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1340 }
1341 Token::LParen => {
1343 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1344 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1345 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1348 }
1349 }
1350 _ => (),
1351 };
1352
1353 self.index = index;
1354 self.parse_expr()
1355 }
1356
1357 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1359 self.parse_subexpr(self.dialect.prec_unknown())
1360 }
1361
1362 pub fn parse_expr_with_alias_and_order_by(
1364 &mut self,
1365 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1366 let expr = self.parse_expr()?;
1367
1368 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1369 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1370 }
1371 let alias = self.parse_optional_alias_inner(None, validator)?;
1372 let order_by = OrderByOptions {
1373 asc: self.parse_asc_desc(),
1374 nulls_first: None,
1375 };
1376 Ok(ExprWithAliasAndOrderBy {
1377 expr: ExprWithAlias { expr, alias },
1378 order_by,
1379 })
1380 }
1381
1382 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1384 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1385 let _guard = self.recursion_counter.try_decrease()?;
1386 debug!("parsing expr");
1387 let mut expr = self.parse_prefix()?;
1388
1389 expr = self.parse_compound_expr(expr, vec![])?;
1390
1391 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1395 expr = Expr::Collate {
1396 expr: Box::new(expr),
1397 collation: self.parse_object_name(false)?,
1398 };
1399 }
1400
1401 debug!("prefix: {expr:?}");
1402 loop {
1403 let next_precedence = self.get_next_precedence()?;
1404 debug!("next precedence: {next_precedence:?}");
1405
1406 if precedence >= next_precedence {
1407 break;
1408 }
1409
1410 if Token::Period == self.peek_token_ref().token {
1413 break;
1414 }
1415
1416 expr = self.parse_infix(expr, next_precedence)?;
1417 }
1418 Ok(expr)
1419 }
1420
1421 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1423 let condition = self.parse_expr()?;
1424 let message = if self.parse_keyword(Keyword::AS) {
1425 Some(self.parse_expr()?)
1426 } else {
1427 None
1428 };
1429
1430 Ok(Statement::Assert { condition, message })
1431 }
1432
1433 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1435 let name = self.parse_identifier()?;
1436 Ok(Statement::Savepoint { name })
1437 }
1438
1439 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1441 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1442 let name = self.parse_identifier()?;
1443
1444 Ok(Statement::ReleaseSavepoint { name })
1445 }
1446
1447 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1449 let channel = self.parse_identifier()?;
1450 Ok(Statement::LISTEN { channel })
1451 }
1452
1453 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1455 let channel = if self.consume_token(&Token::Mul) {
1456 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1457 } else {
1458 match self.parse_identifier() {
1459 Ok(expr) => expr,
1460 _ => {
1461 self.prev_token();
1462 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1463 }
1464 }
1465 };
1466 Ok(Statement::UNLISTEN { channel })
1467 }
1468
1469 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1471 let channel = self.parse_identifier()?;
1472 let payload = if self.consume_token(&Token::Comma) {
1473 Some(self.parse_literal_string()?)
1474 } else {
1475 None
1476 };
1477 Ok(Statement::NOTIFY { channel, payload })
1478 }
1479
1480 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1482 if self.peek_keyword(Keyword::TABLE) {
1483 self.expect_keyword(Keyword::TABLE)?;
1484 let rename_tables = self.parse_comma_separated(|parser| {
1485 let old_name = parser.parse_object_name(false)?;
1486 parser.expect_keyword(Keyword::TO)?;
1487 let new_name = parser.parse_object_name(false)?;
1488
1489 Ok(RenameTable { old_name, new_name })
1490 })?;
1491 Ok(rename_tables.into())
1492 } else {
1493 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1494 }
1495 }
1496
1497 fn parse_expr_prefix_by_reserved_word(
1500 &mut self,
1501 w: &Word,
1502 w_span: Span,
1503 ) -> Result<Option<Expr>, ParserError> {
1504 match w.keyword {
1505 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1506 self.prev_token();
1507 Ok(Some(Expr::Value(self.parse_value()?)))
1508 }
1509 Keyword::NULL => {
1510 self.prev_token();
1511 Ok(Some(Expr::Value(self.parse_value()?)))
1512 }
1513 Keyword::CURRENT_CATALOG
1514 | Keyword::CURRENT_USER
1515 | Keyword::SESSION_USER
1516 | Keyword::USER
1517 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1518 {
1519 Ok(Some(Expr::Function(Function {
1520 name: ObjectName::from(vec![w.to_ident(w_span)]),
1521 uses_odbc_syntax: false,
1522 parameters: FunctionArguments::None,
1523 args: FunctionArguments::None,
1524 null_treatment: None,
1525 filter: None,
1526 over: None,
1527 within_group: vec![],
1528 })))
1529 }
1530 Keyword::CURRENT_TIMESTAMP
1531 | Keyword::CURRENT_TIME
1532 | Keyword::CURRENT_DATE
1533 | Keyword::LOCALTIME
1534 | Keyword::LOCALTIMESTAMP => {
1535 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1536 }
1537 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1538 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1539 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1540 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1541 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1542 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1543 Keyword::EXISTS
1544 if !dialect_of!(self is DatabricksDialect)
1546 || matches!(
1547 self.peek_nth_token_ref(1).token,
1548 Token::Word(Word {
1549 keyword: Keyword::SELECT | Keyword::WITH,
1550 ..
1551 })
1552 ) =>
1553 {
1554 Ok(Some(self.parse_exists_expr(false)?))
1555 }
1556 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1557 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1558 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1559 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1560 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1561 }
1562 Keyword::SUBSTR | Keyword::SUBSTRING => {
1563 self.prev_token();
1564 Ok(Some(self.parse_substring()?))
1565 }
1566 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1567 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1568 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1569 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1571 self.expect_token(&Token::LBracket)?;
1572 Ok(Some(self.parse_array_expr(true)?))
1573 }
1574 Keyword::ARRAY
1575 if self.peek_token_ref().token == Token::LParen
1576 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1577 {
1578 self.expect_token(&Token::LParen)?;
1579 let query = self.parse_query()?;
1580 self.expect_token(&Token::RParen)?;
1581 Ok(Some(Expr::Function(Function {
1582 name: ObjectName::from(vec![w.to_ident(w_span)]),
1583 uses_odbc_syntax: false,
1584 parameters: FunctionArguments::None,
1585 args: FunctionArguments::Subquery(query),
1586 filter: None,
1587 null_treatment: None,
1588 over: None,
1589 within_group: vec![],
1590 })))
1591 }
1592 Keyword::NOT => Ok(Some(self.parse_not()?)),
1593 Keyword::MATCH if self.dialect.supports_match_against() => {
1594 Ok(Some(self.parse_match_against()?))
1595 }
1596 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1597 let struct_expr = self.parse_struct_literal()?;
1598 Ok(Some(struct_expr))
1599 }
1600 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1601 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1602 Ok(Some(Expr::Prior(Box::new(expr))))
1603 }
1604 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1605 Ok(Some(self.parse_duckdb_map_literal()?))
1606 }
1607 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1608 Ok(Some(self.parse_lambda_expr()?))
1609 }
1610 _ if self.dialect.supports_geometric_types() => match w.keyword {
1611 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1612 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1613 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1614 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1615 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1616 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1617 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1618 _ => Ok(None),
1619 },
1620 _ => Ok(None),
1621 }
1622 }
1623
1624 fn parse_expr_prefix_by_unreserved_word(
1626 &mut self,
1627 w: &Word,
1628 w_span: Span,
1629 ) -> Result<Expr, ParserError> {
1630 let is_outer_join = self.peek_outer_join_operator();
1631 match &self.peek_token_ref().token {
1632 Token::LParen if !is_outer_join => {
1633 let id_parts = vec![w.to_ident(w_span)];
1634 self.parse_function(ObjectName::from(id_parts))
1635 }
1636 Token::SingleQuotedString(_)
1638 | Token::DoubleQuotedString(_)
1639 | Token::HexStringLiteral(_)
1640 if w.value.starts_with('_') =>
1641 {
1642 Ok(Expr::Prefixed {
1643 prefix: w.to_ident(w_span),
1644 value: self.parse_introduced_string_expr()?.into(),
1645 })
1646 }
1647 Token::SingleQuotedString(_)
1649 | Token::DoubleQuotedString(_)
1650 | Token::HexStringLiteral(_)
1651 if w.value.starts_with('_') =>
1652 {
1653 Ok(Expr::Prefixed {
1654 prefix: w.to_ident(w_span),
1655 value: self.parse_introduced_string_expr()?.into(),
1656 })
1657 }
1658 Token::Arrow if self.dialect.supports_lambda_functions() => {
1662 self.expect_token(&Token::Arrow)?;
1663 Ok(Expr::Lambda(LambdaFunction {
1664 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1665 name: w.to_ident(w_span),
1666 data_type: None,
1667 }),
1668 body: Box::new(self.parse_expr()?),
1669 syntax: LambdaSyntax::Arrow,
1670 }))
1671 }
1672 Token::Word(_)
1676 if self.dialect.supports_lambda_functions()
1677 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1678 {
1679 let data_type = self.parse_data_type()?;
1680 self.expect_token(&Token::Arrow)?;
1681 Ok(Expr::Lambda(LambdaFunction {
1682 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1683 name: w.to_ident(w_span),
1684 data_type: Some(data_type),
1685 }),
1686 body: Box::new(self.parse_expr()?),
1687 syntax: LambdaSyntax::Arrow,
1688 }))
1689 }
1690 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1691 }
1692 }
1693
1694 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1697 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1698 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1699 } else {
1700 false
1701 }
1702 }
1703
1704 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1706 if let Some(prefix) = self.dialect.parse_prefix(self) {
1708 return prefix;
1709 }
1710
1711 let loc = self.peek_token_ref().span.start;
1728 let opt_expr = self.maybe_parse(|parser| {
1729 match parser.parse_data_type()? {
1730 DataType::Interval { .. } => parser.parse_interval(),
1731 DataType::Custom(ref name, ref modifiers)
1742 if modifiers.is_empty()
1743 && Self::is_simple_unquoted_object_name(name, "xml")
1744 && parser.dialect.supports_xml_expressions() =>
1745 {
1746 Ok(Expr::TypedString(TypedString {
1747 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1748 value: parser.parse_value()?,
1749 uses_odbc_syntax: false,
1750 }))
1751 }
1752 DataType::Custom(..) => parser_err!("dummy", loc),
1753 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1755 Ok(Expr::Cast {
1756 kind: CastKind::Cast,
1757 expr: Box::new(parser.parse_expr()?),
1758 data_type: DataType::Binary(None),
1759 array: false,
1760 format: None,
1761 })
1762 }
1763 data_type => Ok(Expr::TypedString(TypedString {
1764 data_type,
1765 value: parser.parse_value()?,
1766 uses_odbc_syntax: false,
1767 })),
1768 }
1769 })?;
1770
1771 if let Some(expr) = opt_expr {
1772 return Ok(expr);
1773 }
1774
1775 let dialect = self.dialect;
1779
1780 self.advance_token();
1781 let next_token_index = self.get_current_index();
1782 let next_token = self.get_current_token();
1783 let span = next_token.span;
1784 let expr = match &next_token.token {
1785 Token::Word(w) => {
1786 let w = w.clone();
1795 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1796 Ok(Some(expr)) => Ok(expr),
1798
1799 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1801
1802 Err(e) => {
1809 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1810 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1811 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1812 }) {
1813 return Ok(expr);
1814 }
1815 }
1816 return Err(e);
1817 }
1818 }
1819 } Token::LBracket => self.parse_array_expr(false),
1822 tok @ Token::Minus | tok @ Token::Plus => {
1823 let op = if *tok == Token::Plus {
1824 UnaryOperator::Plus
1825 } else {
1826 UnaryOperator::Minus
1827 };
1828 Ok(Expr::UnaryOp {
1829 op,
1830 expr: Box::new(
1831 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1832 ),
1833 })
1834 }
1835 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1836 op: UnaryOperator::BangNot,
1837 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1838 }),
1839 tok @ Token::DoubleExclamationMark
1840 | tok @ Token::PGSquareRoot
1841 | tok @ Token::PGCubeRoot
1842 | tok @ Token::AtSign
1843 if dialect_is!(dialect is PostgreSqlDialect) =>
1844 {
1845 let op = match tok {
1846 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1847 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1848 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1849 Token::AtSign => UnaryOperator::PGAbs,
1850 _ => {
1851 return Err(ParserError::ParserError(
1852 "Internal parser error: unexpected unary operator token".to_string(),
1853 ))
1854 }
1855 };
1856 Ok(Expr::UnaryOp {
1857 op,
1858 expr: Box::new(
1859 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1860 ),
1861 })
1862 }
1863 Token::Tilde => Ok(Expr::UnaryOp {
1864 op: UnaryOperator::BitwiseNot,
1865 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1866 }),
1867 tok @ Token::Sharp
1868 | tok @ Token::AtDashAt
1869 | tok @ Token::AtAt
1870 | tok @ Token::QuestionMarkDash
1871 | tok @ Token::QuestionPipe
1872 if self.dialect.supports_geometric_types() =>
1873 {
1874 let op = match tok {
1875 Token::Sharp => UnaryOperator::Hash,
1876 Token::AtDashAt => UnaryOperator::AtDashAt,
1877 Token::AtAt => UnaryOperator::DoubleAt,
1878 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1879 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1880 _ => {
1881 return Err(ParserError::ParserError(format!(
1882 "Unexpected token in unary operator parsing: {tok:?}"
1883 )))
1884 }
1885 };
1886 Ok(Expr::UnaryOp {
1887 op,
1888 expr: Box::new(
1889 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1890 ),
1891 })
1892 }
1893 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1894 {
1895 self.prev_token();
1896 Ok(Expr::Value(self.parse_value()?))
1897 }
1898 Token::UnicodeStringLiteral(_) => {
1899 self.prev_token();
1900 Ok(Expr::Value(self.parse_value()?))
1901 }
1902 Token::Number(_, _)
1903 | Token::SingleQuotedString(_)
1904 | Token::DoubleQuotedString(_)
1905 | Token::TripleSingleQuotedString(_)
1906 | Token::TripleDoubleQuotedString(_)
1907 | Token::DollarQuotedString(_)
1908 | Token::SingleQuotedByteStringLiteral(_)
1909 | Token::DoubleQuotedByteStringLiteral(_)
1910 | Token::TripleSingleQuotedByteStringLiteral(_)
1911 | Token::TripleDoubleQuotedByteStringLiteral(_)
1912 | Token::SingleQuotedRawStringLiteral(_)
1913 | Token::DoubleQuotedRawStringLiteral(_)
1914 | Token::TripleSingleQuotedRawStringLiteral(_)
1915 | Token::TripleDoubleQuotedRawStringLiteral(_)
1916 | Token::NationalStringLiteral(_)
1917 | Token::QuoteDelimitedStringLiteral(_)
1918 | Token::NationalQuoteDelimitedStringLiteral(_)
1919 | Token::HexStringLiteral(_) => {
1920 self.prev_token();
1921 Ok(Expr::Value(self.parse_value()?))
1922 }
1923 Token::LParen => {
1924 let expr =
1925 if let Some(expr) = self.try_parse_expr_sub_query()? {
1926 expr
1927 } else if let Some(lambda) = self.try_parse_lambda()? {
1928 return Ok(lambda);
1929 } else {
1930 let exprs = self.with_state(ParserState::Normal, |p| {
1941 p.parse_comma_separated(Parser::parse_expr)
1942 })?;
1943 match exprs.len() {
1944 0 => return Err(ParserError::ParserError(
1945 "Internal parser error: parse_comma_separated returned empty list"
1946 .to_string(),
1947 )),
1948 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1949 _ => Expr::Tuple(exprs),
1950 }
1951 };
1952 self.expect_token(&Token::RParen)?;
1953 Ok(expr)
1954 }
1955 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1956 self.prev_token();
1957 Ok(Expr::Value(self.parse_value()?))
1958 }
1959 Token::LBrace => {
1960 self.prev_token();
1961 self.parse_lbrace_expr()
1962 }
1963 _ => self.expected_at("an expression", next_token_index),
1964 }?;
1965
1966 Ok(expr)
1967 }
1968
1969 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1970 Ok(Expr::TypedString(TypedString {
1971 data_type: DataType::GeometricType(kind),
1972 value: self.parse_value()?,
1973 uses_odbc_syntax: false,
1974 }))
1975 }
1976
1977 pub fn parse_compound_expr(
1984 &mut self,
1985 root: Expr,
1986 mut chain: Vec<AccessExpr>,
1987 ) -> Result<Expr, ParserError> {
1988 let mut ending_wildcard: Option<TokenWithSpan> = None;
1989 loop {
1990 if self.consume_token(&Token::Period) {
1991 let next_token = self.peek_token_ref();
1992 match &next_token.token {
1993 Token::Mul => {
1994 if dialect_of!(self is PostgreSqlDialect) {
1997 ending_wildcard = Some(self.next_token());
1998 } else {
1999 self.prev_token(); }
2006
2007 break;
2008 }
2009 Token::SingleQuotedString(s) => {
2010 let expr =
2011 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2012 chain.push(AccessExpr::Dot(expr));
2013 self.advance_token(); }
2015 Token::Placeholder(s) => {
2016 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2019 chain.push(AccessExpr::Dot(expr));
2020 self.advance_token(); }
2022 _ => {
2027 let expr = self.maybe_parse(|parser| {
2028 let expr = parser
2029 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2030 match &expr {
2031 Expr::CompoundFieldAccess { .. }
2032 | Expr::CompoundIdentifier(_)
2033 | Expr::Identifier(_)
2034 | Expr::Value(_)
2035 | Expr::Function(_) => Ok(expr),
2036 _ => parser.expected_ref(
2037 "an identifier or value",
2038 parser.peek_token_ref(),
2039 ),
2040 }
2041 })?;
2042
2043 match expr {
2044 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2053 chain.push(AccessExpr::Dot(*root));
2054 chain.extend(access_chain);
2055 }
2056 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2057 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2058 ),
2059 Some(expr) => {
2060 chain.push(AccessExpr::Dot(expr));
2061 }
2062 None => {
2066 chain.push(AccessExpr::Dot(Expr::Identifier(
2067 self.parse_identifier()?,
2068 )));
2069 }
2070 }
2071 }
2072 }
2073 } else if !self.dialect.supports_partiql()
2074 && self.peek_token_ref().token == Token::LBracket
2075 {
2076 self.parse_multi_dim_subscript(&mut chain)?;
2077 } else {
2078 break;
2079 }
2080 }
2081
2082 let tok_index = self.get_current_index();
2083 if let Some(wildcard_token) = ending_wildcard {
2084 if !Self::is_all_ident(&root, &chain) {
2085 return self
2086 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2087 };
2088 Ok(Expr::QualifiedWildcard(
2089 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2090 AttachedToken(wildcard_token),
2091 ))
2092 } else if self.maybe_parse_outer_join_operator() {
2093 if !Self::is_all_ident(&root, &chain) {
2094 return self.expected_at("column identifier before (+)", tok_index);
2095 };
2096 let expr = if chain.is_empty() {
2097 root
2098 } else {
2099 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2100 };
2101 Ok(Expr::OuterJoin(expr.into()))
2102 } else {
2103 Self::build_compound_expr(root, chain)
2104 }
2105 }
2106
2107 fn build_compound_expr(
2112 root: Expr,
2113 mut access_chain: Vec<AccessExpr>,
2114 ) -> Result<Expr, ParserError> {
2115 if access_chain.is_empty() {
2116 return Ok(root);
2117 }
2118
2119 if Self::is_all_ident(&root, &access_chain) {
2120 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2121 root,
2122 access_chain,
2123 )?));
2124 }
2125
2126 if matches!(root, Expr::Identifier(_))
2131 && matches!(
2132 access_chain.last(),
2133 Some(AccessExpr::Dot(Expr::Function(_)))
2134 )
2135 && access_chain
2136 .iter()
2137 .rev()
2138 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2140 {
2141 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2142 return parser_err!("expected function expression", root.span().start);
2143 };
2144
2145 let compound_func_name = [root]
2146 .into_iter()
2147 .chain(access_chain.into_iter().flat_map(|access| match access {
2148 AccessExpr::Dot(expr) => Some(expr),
2149 _ => None,
2150 }))
2151 .flat_map(|expr| match expr {
2152 Expr::Identifier(ident) => Some(ident),
2153 _ => None,
2154 })
2155 .map(ObjectNamePart::Identifier)
2156 .chain(func.name.0)
2157 .collect::<Vec<_>>();
2158 func.name = ObjectName(compound_func_name);
2159
2160 return Ok(Expr::Function(func));
2161 }
2162
2163 if access_chain.len() == 1
2168 && matches!(
2169 access_chain.last(),
2170 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2171 )
2172 {
2173 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2174 return parser_err!("expected (+) expression", root.span().start);
2175 };
2176
2177 if !Self::is_all_ident(&root, &[]) {
2178 return parser_err!("column identifier before (+)", root.span().start);
2179 };
2180
2181 let token_start = root.span().start;
2182 let mut idents = Self::exprs_to_idents(root, vec![])?;
2183 match *inner_expr {
2184 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2185 Expr::Identifier(suffix) => idents.push(suffix),
2186 _ => {
2187 return parser_err!("column identifier before (+)", token_start);
2188 }
2189 }
2190
2191 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2192 }
2193
2194 Ok(Expr::CompoundFieldAccess {
2195 root: Box::new(root),
2196 access_chain,
2197 })
2198 }
2199
2200 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2201 match k {
2202 Keyword::LOCAL => Some(ContextModifier::Local),
2203 Keyword::GLOBAL => Some(ContextModifier::Global),
2204 Keyword::SESSION => Some(ContextModifier::Session),
2205 _ => None,
2206 }
2207 }
2208
2209 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2211 if !matches!(root, Expr::Identifier(_)) {
2212 return false;
2213 }
2214 fields
2215 .iter()
2216 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2217 }
2218
2219 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2221 let mut idents = vec![];
2222 if let Expr::Identifier(root) = root {
2223 idents.push(root);
2224 for x in fields {
2225 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2226 idents.push(ident);
2227 } else {
2228 return parser_err!(
2229 format!("Expected identifier, found: {}", x),
2230 x.span().start
2231 );
2232 }
2233 }
2234 Ok(idents)
2235 } else {
2236 parser_err!(
2237 format!("Expected identifier, found: {}", root),
2238 root.span().start
2239 )
2240 }
2241 }
2242
2243 fn peek_outer_join_operator(&mut self) -> bool {
2245 if !self.dialect.supports_outer_join_operator() {
2246 return false;
2247 }
2248
2249 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2250 Token::LParen == maybe_lparen.token
2251 && Token::Plus == maybe_plus.token
2252 && Token::RParen == maybe_rparen.token
2253 }
2254
2255 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2258 self.dialect.supports_outer_join_operator()
2259 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2260 }
2261
2262 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2264 self.expect_token(&Token::LParen)?;
2265 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2266 self.expect_token(&Token::RParen)?;
2267
2268 Ok(options)
2269 }
2270
2271 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2272 let name = self.parse_identifier()?;
2273
2274 let next_token = self.peek_token_ref();
2275 if next_token == &Token::Comma || next_token == &Token::RParen {
2276 return Ok(UtilityOption { name, arg: None });
2277 }
2278 let arg = self.parse_expr()?;
2279
2280 Ok(UtilityOption {
2281 name,
2282 arg: Some(arg),
2283 })
2284 }
2285
2286 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2287 if !self.peek_sub_query() {
2288 return Ok(None);
2289 }
2290
2291 Ok(Some(Expr::Subquery(self.parse_query()?)))
2292 }
2293
2294 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2295 if !self.dialect.supports_lambda_functions() {
2296 return Ok(None);
2297 }
2298 self.maybe_parse(|p| {
2299 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2300 p.expect_token(&Token::RParen)?;
2301 p.expect_token(&Token::Arrow)?;
2302 let expr = p.parse_expr()?;
2303 Ok(Expr::Lambda(LambdaFunction {
2304 params: OneOrManyWithParens::Many(params),
2305 body: Box::new(expr),
2306 syntax: LambdaSyntax::Arrow,
2307 }))
2308 })
2309 }
2310
2311 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2321 let params = self.parse_lambda_function_parameters()?;
2323 self.expect_token(&Token::Colon)?;
2325 let body = self.parse_expr()?;
2327 Ok(Expr::Lambda(LambdaFunction {
2328 params,
2329 body: Box::new(body),
2330 syntax: LambdaSyntax::LambdaKeyword,
2331 }))
2332 }
2333
2334 fn parse_lambda_function_parameters(
2336 &mut self,
2337 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2338 let params = if self.consume_token(&Token::LParen) {
2340 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2342 self.expect_token(&Token::RParen)?;
2343 OneOrManyWithParens::Many(params)
2344 } else {
2345 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2347 if params.len() == 1 {
2348 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2349 } else {
2350 OneOrManyWithParens::Many(params)
2351 }
2352 };
2353 Ok(params)
2354 }
2355
2356 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2358 let name = self.parse_identifier()?;
2359 let data_type = match &self.peek_token_ref().token {
2360 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2361 _ => None,
2362 };
2363 Ok(LambdaFunctionParameter { name, data_type })
2364 }
2365
2366 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2373 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2375 return Ok(Some(expr));
2376 }
2377 self.maybe_parse_odbc_body_datetime()
2379 }
2380
2381 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2392 self.maybe_parse(|p| {
2393 let token = p.next_token().clone();
2394 let word_string = token.token.to_string();
2395 let data_type = match word_string.as_str() {
2396 "t" => DataType::Time(None, TimezoneInfo::None),
2397 "d" => DataType::Date,
2398 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2399 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2400 };
2401 let value = p.parse_value()?;
2402 Ok(Expr::TypedString(TypedString {
2403 data_type,
2404 value,
2405 uses_odbc_syntax: true,
2406 }))
2407 })
2408 }
2409
2410 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2419 self.maybe_parse(|p| {
2420 p.expect_keyword(Keyword::FN)?;
2421 let fn_name = p.parse_object_name(false)?;
2422 let mut fn_call = p.parse_function_call(fn_name)?;
2423 fn_call.uses_odbc_syntax = true;
2424 Ok(Expr::Function(fn_call))
2425 })
2426 }
2427
2428 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2430 self.parse_function_call(name).map(Expr::Function)
2431 }
2432
2433 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2434 self.expect_token(&Token::LParen)?;
2435
2436 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2439 let subquery = self.parse_query()?;
2440 self.expect_token(&Token::RParen)?;
2441 return Ok(Function {
2442 name,
2443 uses_odbc_syntax: false,
2444 parameters: FunctionArguments::None,
2445 args: FunctionArguments::Subquery(subquery),
2446 filter: None,
2447 null_treatment: None,
2448 over: None,
2449 within_group: vec![],
2450 });
2451 }
2452
2453 let mut args = self.parse_function_argument_list()?;
2454 let mut parameters = FunctionArguments::None;
2455 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2458 && self.consume_token(&Token::LParen)
2459 {
2460 parameters = FunctionArguments::List(args);
2461 args = self.parse_function_argument_list()?;
2462 }
2463
2464 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2465 self.expect_token(&Token::LParen)?;
2466 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2467 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2468 self.expect_token(&Token::RParen)?;
2469 order_by
2470 } else {
2471 vec![]
2472 };
2473
2474 let filter = if self.dialect.supports_filter_during_aggregation()
2475 && self.parse_keyword(Keyword::FILTER)
2476 && self.consume_token(&Token::LParen)
2477 && self.parse_keyword(Keyword::WHERE)
2478 {
2479 let filter = Some(Box::new(self.parse_expr()?));
2480 self.expect_token(&Token::RParen)?;
2481 filter
2482 } else {
2483 None
2484 };
2485
2486 let null_treatment = if args
2489 .clauses
2490 .iter()
2491 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2492 {
2493 self.parse_null_treatment()?
2494 } else {
2495 None
2496 };
2497
2498 let over = if self.parse_keyword(Keyword::OVER) {
2499 if self.consume_token(&Token::LParen) {
2500 let window_spec = self.parse_window_spec()?;
2501 Some(WindowType::WindowSpec(window_spec))
2502 } else {
2503 Some(WindowType::NamedWindow(self.parse_identifier()?))
2504 }
2505 } else {
2506 None
2507 };
2508
2509 Ok(Function {
2510 name,
2511 uses_odbc_syntax: false,
2512 parameters,
2513 args: FunctionArguments::List(args),
2514 null_treatment,
2515 filter,
2516 over,
2517 within_group,
2518 })
2519 }
2520
2521 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2523 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2524 Some(keyword) => {
2525 self.expect_keyword_is(Keyword::NULLS)?;
2526
2527 Ok(match keyword {
2528 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2529 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2530 _ => None,
2531 })
2532 }
2533 None => Ok(None),
2534 }
2535 }
2536
2537 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2539 let args = if self.consume_token(&Token::LParen) {
2540 FunctionArguments::List(self.parse_function_argument_list()?)
2541 } else {
2542 FunctionArguments::None
2543 };
2544 Ok(Expr::Function(Function {
2545 name,
2546 uses_odbc_syntax: false,
2547 parameters: FunctionArguments::None,
2548 args,
2549 filter: None,
2550 over: None,
2551 null_treatment: None,
2552 within_group: vec![],
2553 }))
2554 }
2555
2556 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2558 let next_token = self.next_token();
2559 match &next_token.token {
2560 Token::Word(w) => match w.keyword {
2561 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2562 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2563 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2564 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2565 },
2566 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2567 }
2568 }
2569
2570 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2572 let units = self.parse_window_frame_units()?;
2573 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2574 let start_bound = self.parse_window_frame_bound()?;
2575 self.expect_keyword_is(Keyword::AND)?;
2576 let end_bound = Some(self.parse_window_frame_bound()?);
2577 (start_bound, end_bound)
2578 } else {
2579 (self.parse_window_frame_bound()?, None)
2580 };
2581 Ok(WindowFrame {
2582 units,
2583 start_bound,
2584 end_bound,
2585 })
2586 }
2587
2588 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2590 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2591 Ok(WindowFrameBound::CurrentRow)
2592 } else {
2593 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2594 None
2595 } else {
2596 Some(Box::new(match &self.peek_token_ref().token {
2597 Token::SingleQuotedString(_) => self.parse_interval()?,
2598 _ => self.parse_expr()?,
2599 }))
2600 };
2601 if self.parse_keyword(Keyword::PRECEDING) {
2602 Ok(WindowFrameBound::Preceding(rows))
2603 } else if self.parse_keyword(Keyword::FOLLOWING) {
2604 Ok(WindowFrameBound::Following(rows))
2605 } else {
2606 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2607 }
2608 }
2609 }
2610
2611 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2613 if self.dialect.supports_group_by_expr() {
2614 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2615 self.expect_token(&Token::LParen)?;
2616 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2617 self.expect_token(&Token::RParen)?;
2618 Ok(Expr::GroupingSets(result))
2619 } else if self.parse_keyword(Keyword::CUBE) {
2620 self.expect_token(&Token::LParen)?;
2621 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2622 self.expect_token(&Token::RParen)?;
2623 Ok(Expr::Cube(result))
2624 } else if self.parse_keyword(Keyword::ROLLUP) {
2625 self.expect_token(&Token::LParen)?;
2626 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2627 self.expect_token(&Token::RParen)?;
2628 Ok(Expr::Rollup(result))
2629 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2630 Ok(Expr::Tuple(vec![]))
2634 } else {
2635 self.parse_expr()
2636 }
2637 } else {
2638 self.parse_expr()
2640 }
2641 }
2642
2643 fn parse_tuple(
2647 &mut self,
2648 lift_singleton: bool,
2649 allow_empty: bool,
2650 ) -> Result<Vec<Expr>, ParserError> {
2651 if lift_singleton {
2652 if self.consume_token(&Token::LParen) {
2653 let result = if allow_empty && self.consume_token(&Token::RParen) {
2654 vec![]
2655 } else {
2656 let result = self.parse_comma_separated(Parser::parse_expr)?;
2657 self.expect_token(&Token::RParen)?;
2658 result
2659 };
2660 Ok(result)
2661 } else {
2662 Ok(vec![self.parse_expr()?])
2663 }
2664 } else {
2665 self.expect_token(&Token::LParen)?;
2666 let result = if allow_empty && self.consume_token(&Token::RParen) {
2667 vec![]
2668 } else {
2669 let result = self.parse_comma_separated(Parser::parse_expr)?;
2670 self.expect_token(&Token::RParen)?;
2671 result
2672 };
2673 Ok(result)
2674 }
2675 }
2676
2677 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2679 let case_token = AttachedToken(self.get_current_token().clone());
2680 let mut operand = None;
2681 if !self.parse_keyword(Keyword::WHEN) {
2682 operand = Some(Box::new(self.parse_expr()?));
2683 self.expect_keyword_is(Keyword::WHEN)?;
2684 }
2685 let mut conditions = vec![];
2686 loop {
2687 let condition = self.parse_expr()?;
2688 self.expect_keyword_is(Keyword::THEN)?;
2689 let result = self.parse_expr()?;
2690 conditions.push(CaseWhen { condition, result });
2691 if !self.parse_keyword(Keyword::WHEN) {
2692 break;
2693 }
2694 }
2695 let else_result = if self.parse_keyword(Keyword::ELSE) {
2696 Some(Box::new(self.parse_expr()?))
2697 } else {
2698 None
2699 };
2700 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2701 Ok(Expr::Case {
2702 case_token,
2703 end_token,
2704 operand,
2705 conditions,
2706 else_result,
2707 })
2708 }
2709
2710 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2712 if self.parse_keyword(Keyword::FORMAT) {
2713 let value = self.parse_value()?;
2714 match self.parse_optional_time_zone()? {
2715 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2716 None => Ok(Some(CastFormat::Value(value))),
2717 }
2718 } else {
2719 Ok(None)
2720 }
2721 }
2722
2723 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2725 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2726 self.parse_value().map(Some)
2727 } else {
2728 Ok(None)
2729 }
2730 }
2731
2732 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2734 self.expect_token(&Token::LParen)?;
2735 let data_type = self.parse_data_type()?;
2736 self.expect_token(&Token::Comma)?;
2737 let expr = self.parse_expr()?;
2738 let styles = if self.consume_token(&Token::Comma) {
2739 self.parse_comma_separated(Parser::parse_expr)?
2740 } else {
2741 Default::default()
2742 };
2743 self.expect_token(&Token::RParen)?;
2744 Ok(Expr::Convert {
2745 is_try,
2746 expr: Box::new(expr),
2747 data_type: Some(data_type),
2748 charset: None,
2749 target_before_value: true,
2750 styles,
2751 })
2752 }
2753
2754 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2759 if self.dialect.convert_type_before_value() {
2760 return self.parse_mssql_convert(is_try);
2761 }
2762 self.expect_token(&Token::LParen)?;
2763 let expr = self.parse_expr()?;
2764 if self.parse_keyword(Keyword::USING) {
2765 let charset = self.parse_object_name(false)?;
2766 self.expect_token(&Token::RParen)?;
2767 return Ok(Expr::Convert {
2768 is_try,
2769 expr: Box::new(expr),
2770 data_type: None,
2771 charset: Some(charset),
2772 target_before_value: false,
2773 styles: vec![],
2774 });
2775 }
2776 self.expect_token(&Token::Comma)?;
2777 let data_type = self.parse_data_type()?;
2778 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2779 Some(self.parse_object_name(false)?)
2780 } else {
2781 None
2782 };
2783 self.expect_token(&Token::RParen)?;
2784 Ok(Expr::Convert {
2785 is_try,
2786 expr: Box::new(expr),
2787 data_type: Some(data_type),
2788 charset,
2789 target_before_value: false,
2790 styles: vec![],
2791 })
2792 }
2793
2794 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2796 self.expect_token(&Token::LParen)?;
2797 let expr = self.parse_expr()?;
2798 self.expect_keyword_is(Keyword::AS)?;
2799 let data_type = self.parse_data_type()?;
2800 let array = self.parse_keyword(Keyword::ARRAY);
2801 let format = self.parse_optional_cast_format()?;
2802 self.expect_token(&Token::RParen)?;
2803 Ok(Expr::Cast {
2804 kind,
2805 expr: Box::new(expr),
2806 data_type,
2807 array,
2808 format,
2809 })
2810 }
2811
2812 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2814 self.expect_token(&Token::LParen)?;
2815 let exists_node = Expr::Exists {
2816 negated,
2817 subquery: self.parse_query()?,
2818 };
2819 self.expect_token(&Token::RParen)?;
2820 Ok(exists_node)
2821 }
2822
2823 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2825 self.expect_token(&Token::LParen)?;
2826 let field = self.parse_date_time_field()?;
2827
2828 let syntax = if self.parse_keyword(Keyword::FROM) {
2829 ExtractSyntax::From
2830 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2831 {
2832 ExtractSyntax::Comma
2833 } else {
2834 return Err(ParserError::ParserError(
2835 "Expected 'FROM' or ','".to_string(),
2836 ));
2837 };
2838
2839 let expr = self.parse_expr()?;
2840 self.expect_token(&Token::RParen)?;
2841 Ok(Expr::Extract {
2842 field,
2843 expr: Box::new(expr),
2844 syntax,
2845 })
2846 }
2847
2848 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2850 self.expect_token(&Token::LParen)?;
2851 let expr = self.parse_expr()?;
2852 let field = if self.parse_keyword(Keyword::TO) {
2854 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2856 } else if self.consume_token(&Token::Comma) {
2857 let v = self.parse_value()?;
2859 if matches!(v.value, Value::Number(_, _)) {
2860 CeilFloorKind::Scale(v)
2861 } else {
2862 return Err(ParserError::ParserError(
2863 "Scale field can only be of number type".to_string(),
2864 ));
2865 }
2866 } else {
2867 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2868 };
2869 self.expect_token(&Token::RParen)?;
2870 if is_ceil {
2871 Ok(Expr::Ceil {
2872 expr: Box::new(expr),
2873 field,
2874 })
2875 } else {
2876 Ok(Expr::Floor {
2877 expr: Box::new(expr),
2878 field,
2879 })
2880 }
2881 }
2882
2883 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2885 let between_prec = self.dialect.prec_value(Precedence::Between);
2886 let position_expr = self.maybe_parse(|p| {
2887 p.expect_token(&Token::LParen)?;
2889
2890 let expr = p.parse_subexpr(between_prec)?;
2892 p.expect_keyword_is(Keyword::IN)?;
2893 let from = p.parse_expr()?;
2894 p.expect_token(&Token::RParen)?;
2895 Ok(Expr::Position {
2896 expr: Box::new(expr),
2897 r#in: Box::new(from),
2898 })
2899 })?;
2900 match position_expr {
2901 Some(expr) => Ok(expr),
2902 None => self.parse_function(ObjectName::from(vec![ident])),
2905 }
2906 }
2907
2908 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2910 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2911 Keyword::SUBSTR => true,
2912 Keyword::SUBSTRING => false,
2913 _ => {
2914 self.prev_token();
2915 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2916 }
2917 };
2918 self.expect_token(&Token::LParen)?;
2919 let expr = self.parse_expr()?;
2920 let mut from_expr = None;
2921 let special = self.consume_token(&Token::Comma);
2922 if special || self.parse_keyword(Keyword::FROM) {
2923 from_expr = Some(self.parse_expr()?);
2924 }
2925
2926 let mut to_expr = None;
2927 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2928 to_expr = Some(self.parse_expr()?);
2929 }
2930 self.expect_token(&Token::RParen)?;
2931
2932 Ok(Expr::Substring {
2933 expr: Box::new(expr),
2934 substring_from: from_expr.map(Box::new),
2935 substring_for: to_expr.map(Box::new),
2936 special,
2937 shorthand,
2938 })
2939 }
2940
2941 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2945 self.expect_token(&Token::LParen)?;
2947 let expr = self.parse_expr()?;
2948 self.expect_keyword_is(Keyword::PLACING)?;
2949 let what_expr = self.parse_expr()?;
2950 self.expect_keyword_is(Keyword::FROM)?;
2951 let from_expr = self.parse_expr()?;
2952 let mut for_expr = None;
2953 if self.parse_keyword(Keyword::FOR) {
2954 for_expr = Some(self.parse_expr()?);
2955 }
2956 self.expect_token(&Token::RParen)?;
2957
2958 Ok(Expr::Overlay {
2959 expr: Box::new(expr),
2960 overlay_what: Box::new(what_expr),
2961 overlay_from: Box::new(from_expr),
2962 overlay_for: for_expr.map(Box::new),
2963 })
2964 }
2965
2966 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2972 self.expect_token(&Token::LParen)?;
2973 let mut trim_where = None;
2974 if let Token::Word(word) = &self.peek_token_ref().token {
2975 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2976 trim_where = Some(self.parse_trim_where()?);
2977 }
2978 }
2979 let expr = self.parse_expr()?;
2980 if self.parse_keyword(Keyword::FROM) {
2981 let trim_what = Box::new(expr);
2982 let expr = self.parse_expr()?;
2983 self.expect_token(&Token::RParen)?;
2984 Ok(Expr::Trim {
2985 expr: Box::new(expr),
2986 trim_where,
2987 trim_what: Some(trim_what),
2988 trim_characters: None,
2989 })
2990 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
2991 {
2992 let characters = self.parse_comma_separated(Parser::parse_expr)?;
2993 self.expect_token(&Token::RParen)?;
2994 Ok(Expr::Trim {
2995 expr: Box::new(expr),
2996 trim_where: None,
2997 trim_what: None,
2998 trim_characters: Some(characters),
2999 })
3000 } else {
3001 self.expect_token(&Token::RParen)?;
3002 Ok(Expr::Trim {
3003 expr: Box::new(expr),
3004 trim_where,
3005 trim_what: None,
3006 trim_characters: None,
3007 })
3008 }
3009 }
3010
3011 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3015 let next_token = self.next_token();
3016 match &next_token.token {
3017 Token::Word(w) => match w.keyword {
3018 Keyword::BOTH => Ok(TrimWhereField::Both),
3019 Keyword::LEADING => Ok(TrimWhereField::Leading),
3020 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3021 _ => self.expected("trim_where field", next_token)?,
3022 },
3023 _ => self.expected("trim_where field", next_token),
3024 }
3025 }
3026
3027 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3030 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3031 self.expect_token(&Token::RBracket)?;
3032 Ok(Expr::Array(Array { elem: exprs, named }))
3033 }
3034
3035 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3039 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3040 if self.parse_keyword(Keyword::ERROR) {
3041 Ok(Some(ListAggOnOverflow::Error))
3042 } else {
3043 self.expect_keyword_is(Keyword::TRUNCATE)?;
3044 let filler = match &self.peek_token_ref().token {
3045 Token::Word(w)
3046 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3047 {
3048 None
3049 }
3050 Token::SingleQuotedString(_)
3051 | Token::EscapedStringLiteral(_)
3052 | Token::UnicodeStringLiteral(_)
3053 | Token::NationalStringLiteral(_)
3054 | Token::QuoteDelimitedStringLiteral(_)
3055 | Token::NationalQuoteDelimitedStringLiteral(_)
3056 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3057 _ => self.expected_ref(
3058 "either filler, WITH, or WITHOUT in LISTAGG",
3059 self.peek_token_ref(),
3060 )?,
3061 };
3062 let with_count = self.parse_keyword(Keyword::WITH);
3063 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3064 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3065 }
3066 self.expect_keyword_is(Keyword::COUNT)?;
3067 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3068 }
3069 } else {
3070 Ok(None)
3071 }
3072 }
3073
3074 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3081 let next_token = self.next_token();
3082 match &next_token.token {
3083 Token::Word(w) => match w.keyword {
3084 Keyword::YEAR => Ok(DateTimeField::Year),
3085 Keyword::YEARS => Ok(DateTimeField::Years),
3086 Keyword::MONTH => Ok(DateTimeField::Month),
3087 Keyword::MONTHS => Ok(DateTimeField::Months),
3088 Keyword::WEEK => {
3089 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3090 && self.consume_token(&Token::LParen)
3091 {
3092 let week_day = self.parse_identifier()?;
3093 self.expect_token(&Token::RParen)?;
3094 Some(week_day)
3095 } else {
3096 None
3097 };
3098 Ok(DateTimeField::Week(week_day))
3099 }
3100 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3101 Keyword::DAY => Ok(DateTimeField::Day),
3102 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3103 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3104 Keyword::DAYS => Ok(DateTimeField::Days),
3105 Keyword::DATE => Ok(DateTimeField::Date),
3106 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3107 Keyword::HOUR => Ok(DateTimeField::Hour),
3108 Keyword::HOURS => Ok(DateTimeField::Hours),
3109 Keyword::MINUTE => Ok(DateTimeField::Minute),
3110 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3111 Keyword::SECOND => Ok(DateTimeField::Second),
3112 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3113 Keyword::CENTURY => Ok(DateTimeField::Century),
3114 Keyword::DECADE => Ok(DateTimeField::Decade),
3115 Keyword::DOY => Ok(DateTimeField::Doy),
3116 Keyword::DOW => Ok(DateTimeField::Dow),
3117 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3118 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3119 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3120 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3121 Keyword::JULIAN => Ok(DateTimeField::Julian),
3122 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3123 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3124 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3125 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3126 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3127 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3128 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3129 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3130 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3131 Keyword::TIME => Ok(DateTimeField::Time),
3132 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3133 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3134 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3135 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3136 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3137 _ if self.dialect.allow_extract_custom() => {
3138 self.prev_token();
3139 let custom = self.parse_identifier()?;
3140 Ok(DateTimeField::Custom(custom))
3141 }
3142 _ => self.expected("date/time field", next_token),
3143 },
3144 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3145 self.prev_token();
3146 let custom = self.parse_identifier()?;
3147 Ok(DateTimeField::Custom(custom))
3148 }
3149 _ => self.expected("date/time field", next_token),
3150 }
3151 }
3152
3153 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3157 match &self.peek_token_ref().token {
3158 Token::Word(w) => match w.keyword {
3159 Keyword::EXISTS => {
3160 let negated = true;
3161 let _ = self.parse_keyword(Keyword::EXISTS);
3162 self.parse_exists_expr(negated)
3163 }
3164 _ => Ok(Expr::UnaryOp {
3165 op: UnaryOperator::Not,
3166 expr: Box::new(
3167 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3168 ),
3169 }),
3170 },
3171 _ => Ok(Expr::UnaryOp {
3172 op: UnaryOperator::Not,
3173 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3174 }),
3175 }
3176 }
3177
3178 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3188 let token = self.expect_token(&Token::LBrace)?;
3189
3190 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3191 self.expect_token(&Token::RBrace)?;
3192 return Ok(fn_expr);
3193 }
3194
3195 if self.dialect.supports_dictionary_syntax() {
3196 self.prev_token(); return self.parse_dictionary();
3198 }
3199
3200 self.expected("an expression", token)
3201 }
3202
3203 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3209 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3210
3211 self.expect_keyword_is(Keyword::AGAINST)?;
3212
3213 self.expect_token(&Token::LParen)?;
3214
3215 let match_value = self.parse_value()?;
3217
3218 let in_natural_language_mode_keywords = &[
3219 Keyword::IN,
3220 Keyword::NATURAL,
3221 Keyword::LANGUAGE,
3222 Keyword::MODE,
3223 ];
3224
3225 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3226
3227 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3228
3229 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3230 if self.parse_keywords(with_query_expansion_keywords) {
3231 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3232 } else {
3233 Some(SearchModifier::InNaturalLanguageMode)
3234 }
3235 } else if self.parse_keywords(in_boolean_mode_keywords) {
3236 Some(SearchModifier::InBooleanMode)
3237 } else if self.parse_keywords(with_query_expansion_keywords) {
3238 Some(SearchModifier::WithQueryExpansion)
3239 } else {
3240 None
3241 };
3242
3243 self.expect_token(&Token::RParen)?;
3244
3245 Ok(Expr::MatchAgainst {
3246 columns,
3247 match_value,
3248 opt_search_modifier,
3249 })
3250 }
3251
3252 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3268 let value = if self.dialect.require_interval_qualifier() {
3277 self.parse_expr()?
3279 } else {
3280 self.parse_prefix()?
3283 };
3284
3285 let leading_field = if self.next_token_is_temporal_unit() {
3291 Some(self.parse_date_time_field()?)
3292 } else if self.dialect.require_interval_qualifier() {
3293 return parser_err!(
3294 "INTERVAL requires a unit after the literal value",
3295 self.peek_token_ref().span.start
3296 );
3297 } else {
3298 None
3299 };
3300
3301 let (leading_precision, last_field, fsec_precision) =
3302 if leading_field == Some(DateTimeField::Second) {
3303 let last_field = None;
3309 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3310 (leading_precision, last_field, fsec_precision)
3311 } else {
3312 let leading_precision = self.parse_optional_precision()?;
3313 if self.parse_keyword(Keyword::TO) {
3314 let last_field = Some(self.parse_date_time_field()?);
3315 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3316 self.parse_optional_precision()?
3317 } else {
3318 None
3319 };
3320 (leading_precision, last_field, fsec_precision)
3321 } else {
3322 (leading_precision, None, None)
3323 }
3324 };
3325
3326 Ok(Expr::Interval(Interval {
3327 value: Box::new(value),
3328 leading_field,
3329 leading_precision,
3330 last_field,
3331 fractional_seconds_precision: fsec_precision,
3332 }))
3333 }
3334
3335 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3338 if let Token::Word(word) = &self.peek_token_ref().token {
3339 matches!(
3340 word.keyword,
3341 Keyword::YEAR
3342 | Keyword::YEARS
3343 | Keyword::MONTH
3344 | Keyword::MONTHS
3345 | Keyword::WEEK
3346 | Keyword::WEEKS
3347 | Keyword::DAY
3348 | Keyword::DAYS
3349 | Keyword::HOUR
3350 | Keyword::HOURS
3351 | Keyword::MINUTE
3352 | Keyword::MINUTES
3353 | Keyword::SECOND
3354 | Keyword::SECONDS
3355 | Keyword::CENTURY
3356 | Keyword::DECADE
3357 | Keyword::DOW
3358 | Keyword::DOY
3359 | Keyword::EPOCH
3360 | Keyword::ISODOW
3361 | Keyword::ISOYEAR
3362 | Keyword::JULIAN
3363 | Keyword::MICROSECOND
3364 | Keyword::MICROSECONDS
3365 | Keyword::MILLENIUM
3366 | Keyword::MILLENNIUM
3367 | Keyword::MILLISECOND
3368 | Keyword::MILLISECONDS
3369 | Keyword::NANOSECOND
3370 | Keyword::NANOSECONDS
3371 | Keyword::QUARTER
3372 | Keyword::TIMEZONE
3373 | Keyword::TIMEZONE_HOUR
3374 | Keyword::TIMEZONE_MINUTE
3375 )
3376 } else {
3377 false
3378 }
3379 }
3380
3381 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3389 self.prev_token();
3391 let (fields, trailing_bracket) =
3392 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3393 if trailing_bracket.0 {
3394 return parser_err!(
3395 "unmatched > in STRUCT literal",
3396 self.peek_token_ref().span.start
3397 );
3398 }
3399
3400 self.expect_token(&Token::LParen)?;
3402 let values = self
3403 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3404 self.expect_token(&Token::RParen)?;
3405
3406 Ok(Expr::Struct { values, fields })
3407 }
3408
3409 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3423 let expr = self.parse_expr()?;
3424 if self.parse_keyword(Keyword::AS) {
3425 if typed_syntax {
3426 return parser_err!("Typed syntax does not allow AS", {
3427 self.prev_token();
3428 self.peek_token_ref().span.start
3429 });
3430 }
3431 let field_name = self.parse_identifier()?;
3432 Ok(Expr::Named {
3433 expr: expr.into(),
3434 name: field_name,
3435 })
3436 } else {
3437 Ok(expr)
3438 }
3439 }
3440
3441 fn parse_struct_type_def<F>(
3454 &mut self,
3455 mut elem_parser: F,
3456 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3457 where
3458 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3459 {
3460 self.expect_keyword_is(Keyword::STRUCT)?;
3461
3462 if self.peek_token_ref().token != Token::Lt {
3464 return Ok((Default::default(), false.into()));
3465 }
3466 self.next_token();
3467
3468 let mut field_defs = vec![];
3469 let trailing_bracket = loop {
3470 let (def, trailing_bracket) = elem_parser(self)?;
3471 field_defs.push(def);
3472 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3474 break trailing_bracket;
3475 }
3476 };
3477
3478 Ok((
3479 field_defs,
3480 self.expect_closing_angle_bracket(trailing_bracket)?,
3481 ))
3482 }
3483
3484 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3486 self.expect_keyword_is(Keyword::STRUCT)?;
3487 self.expect_token(&Token::LParen)?;
3488 let struct_body = self.parse_comma_separated(|parser| {
3489 let field_name = parser.parse_identifier()?;
3490 let field_type = parser.parse_data_type()?;
3491
3492 Ok(StructField {
3493 field_name: Some(field_name),
3494 field_type,
3495 options: None,
3496 })
3497 });
3498 self.expect_token(&Token::RParen)?;
3499 struct_body
3500 }
3501
3502 fn parse_struct_field_def(
3514 &mut self,
3515 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3516 let is_named_field = matches!(
3519 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3520 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3521 );
3522
3523 let field_name = if is_named_field {
3524 let name = self.parse_identifier()?;
3525 let _ = self.consume_token(&Token::Colon);
3526 Some(name)
3527 } else {
3528 None
3529 };
3530
3531 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3532
3533 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3534 Ok((
3535 StructField {
3536 field_name,
3537 field_type,
3538 options,
3539 },
3540 trailing_bracket,
3541 ))
3542 }
3543
3544 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3554 self.expect_keyword_is(Keyword::UNION)?;
3555
3556 self.expect_token(&Token::LParen)?;
3557
3558 let fields = self.parse_comma_separated(|p| {
3559 Ok(UnionField {
3560 field_name: p.parse_identifier()?,
3561 field_type: p.parse_data_type()?,
3562 })
3563 })?;
3564
3565 self.expect_token(&Token::RParen)?;
3566
3567 Ok(fields)
3568 }
3569
3570 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3581 self.expect_token(&Token::LBrace)?;
3582
3583 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3584
3585 self.expect_token(&Token::RBrace)?;
3586
3587 Ok(Expr::Dictionary(fields))
3588 }
3589
3590 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3601 let key = self.parse_identifier()?;
3602
3603 self.expect_token(&Token::Colon)?;
3604
3605 let expr = self.parse_expr()?;
3606
3607 Ok(DictionaryField {
3608 key,
3609 value: Box::new(expr),
3610 })
3611 }
3612
3613 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3623 self.expect_token(&Token::LBrace)?;
3624 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3625 self.expect_token(&Token::RBrace)?;
3626 Ok(Expr::Map(Map { entries: fields }))
3627 }
3628
3629 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3639 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3641
3642 self.expect_token(&Token::Colon)?;
3643
3644 let value = self.parse_expr()?;
3645
3646 Ok(MapEntry {
3647 key: Box::new(key),
3648 value: Box::new(value),
3649 })
3650 }
3651
3652 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3662 self.expect_keyword_is(Keyword::MAP)?;
3663 self.expect_token(&Token::LParen)?;
3664 let key_data_type = self.parse_data_type()?;
3665 self.expect_token(&Token::Comma)?;
3666 let value_data_type = self.parse_data_type()?;
3667 self.expect_token(&Token::RParen)?;
3668
3669 Ok((key_data_type, value_data_type))
3670 }
3671
3672 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3682 self.expect_keyword_is(Keyword::TUPLE)?;
3683 self.expect_token(&Token::LParen)?;
3684 let mut field_defs = vec![];
3685 loop {
3686 let (def, _) = self.parse_struct_field_def()?;
3687 field_defs.push(def);
3688 if !self.consume_token(&Token::Comma) {
3689 break;
3690 }
3691 }
3692 self.expect_token(&Token::RParen)?;
3693
3694 Ok(field_defs)
3695 }
3696
3697 fn expect_closing_angle_bracket(
3702 &mut self,
3703 trailing_bracket: MatchedTrailingBracket,
3704 ) -> Result<MatchedTrailingBracket, ParserError> {
3705 let trailing_bracket = if !trailing_bracket.0 {
3706 match &self.peek_token_ref().token {
3707 Token::Gt => {
3708 self.next_token();
3709 false.into()
3710 }
3711 Token::ShiftRight => {
3712 self.next_token();
3713 true.into()
3714 }
3715 _ => return self.expected_ref(">", self.peek_token_ref()),
3716 }
3717 } else {
3718 false.into()
3719 };
3720
3721 Ok(trailing_bracket)
3722 }
3723
3724 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3726 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3728 return infix;
3729 }
3730
3731 let dialect = self.dialect;
3732
3733 self.advance_token();
3734 let tok = self.get_current_token();
3735 debug!("infix: {tok:?}");
3736 let tok_index = self.get_current_index();
3737 let span = tok.span;
3738 let regular_binary_operator = match &tok.token {
3739 Token::Spaceship => Some(BinaryOperator::Spaceship),
3740 Token::DoubleEq => Some(BinaryOperator::Eq),
3741 Token::Assignment => Some(BinaryOperator::Assignment),
3742 Token::Eq => Some(BinaryOperator::Eq),
3743 Token::Neq => Some(BinaryOperator::NotEq),
3744 Token::Gt => Some(BinaryOperator::Gt),
3745 Token::GtEq => Some(BinaryOperator::GtEq),
3746 Token::Lt => Some(BinaryOperator::Lt),
3747 Token::LtEq => Some(BinaryOperator::LtEq),
3748 Token::Plus => Some(BinaryOperator::Plus),
3749 Token::Minus => Some(BinaryOperator::Minus),
3750 Token::Mul => Some(BinaryOperator::Multiply),
3751 Token::Mod => Some(BinaryOperator::Modulo),
3752 Token::StringConcat => Some(BinaryOperator::StringConcat),
3753 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3754 Token::Caret => {
3755 if dialect_is!(dialect is PostgreSqlDialect) {
3758 Some(BinaryOperator::PGExp)
3759 } else {
3760 Some(BinaryOperator::BitwiseXor)
3761 }
3762 }
3763 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3764 Token::Div => Some(BinaryOperator::Divide),
3765 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3766 Some(BinaryOperator::DuckIntegerDivide)
3767 }
3768 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3769 Some(BinaryOperator::PGBitwiseShiftLeft)
3770 }
3771 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3772 Some(BinaryOperator::PGBitwiseShiftRight)
3773 }
3774 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3775 Some(BinaryOperator::PGBitwiseXor)
3776 }
3777 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3778 Some(BinaryOperator::PGOverlap)
3779 }
3780 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3781 Some(BinaryOperator::PGOverlap)
3782 }
3783 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3784 Some(BinaryOperator::And)
3785 }
3786 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3787 Some(BinaryOperator::PGStartsWith)
3788 }
3789 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3790 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3791 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3792 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3793 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3794 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3795 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3796 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3797 Token::Arrow => Some(BinaryOperator::Arrow),
3798 Token::LongArrow => Some(BinaryOperator::LongArrow),
3799 Token::HashArrow => Some(BinaryOperator::HashArrow),
3800 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3801 Token::AtArrow => Some(BinaryOperator::AtArrow),
3802 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3803 Token::HashMinus => Some(BinaryOperator::HashMinus),
3804 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3805 Token::AtAt => Some(BinaryOperator::AtAt),
3806 Token::Question => Some(BinaryOperator::Question),
3807 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3808 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3809 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3810 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3811 Some(BinaryOperator::DoubleHash)
3812 }
3813
3814 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3815 Some(BinaryOperator::AndLt)
3816 }
3817 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3818 Some(BinaryOperator::AndGt)
3819 }
3820 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3821 Some(BinaryOperator::QuestionDash)
3822 }
3823 Token::AmpersandLeftAngleBracketVerticalBar
3824 if self.dialect.supports_geometric_types() =>
3825 {
3826 Some(BinaryOperator::AndLtPipe)
3827 }
3828 Token::VerticalBarAmpersandRightAngleBracket
3829 if self.dialect.supports_geometric_types() =>
3830 {
3831 Some(BinaryOperator::PipeAndGt)
3832 }
3833 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3834 Some(BinaryOperator::LtDashGt)
3835 }
3836 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3837 Some(BinaryOperator::LtCaret)
3838 }
3839 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3840 Some(BinaryOperator::GtCaret)
3841 }
3842 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3843 Some(BinaryOperator::QuestionHash)
3844 }
3845 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3846 Some(BinaryOperator::QuestionDoublePipe)
3847 }
3848 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3849 Some(BinaryOperator::QuestionDashPipe)
3850 }
3851 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3852 Some(BinaryOperator::TildeEq)
3853 }
3854 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3855 Some(BinaryOperator::LtLtPipe)
3856 }
3857 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3858 Some(BinaryOperator::PipeGtGt)
3859 }
3860 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3861
3862 Token::Word(w) => match w.keyword {
3863 Keyword::AND => Some(BinaryOperator::And),
3864 Keyword::OR => Some(BinaryOperator::Or),
3865 Keyword::XOR => Some(BinaryOperator::Xor),
3866 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3867 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3868 self.expect_token(&Token::LParen)?;
3869 let mut idents = vec![];
3874 loop {
3875 self.advance_token();
3876 idents.push(self.get_current_token().to_string());
3877 if !self.consume_token(&Token::Period) {
3878 break;
3879 }
3880 }
3881 self.expect_token(&Token::RParen)?;
3882 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3883 }
3884 _ => None,
3885 },
3886 _ => None,
3887 };
3888
3889 let tok = self.token_at(tok_index);
3890 if let Some(op) = regular_binary_operator {
3891 if let Some(keyword) =
3892 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3893 {
3894 self.expect_token(&Token::LParen)?;
3895 let right = if self.peek_sub_query() {
3896 self.prev_token(); self.parse_subexpr(precedence)?
3900 } else {
3901 let right = self.parse_subexpr(precedence)?;
3903 self.expect_token(&Token::RParen)?;
3904 right
3905 };
3906
3907 if !matches!(
3908 op,
3909 BinaryOperator::Gt
3910 | BinaryOperator::Lt
3911 | BinaryOperator::GtEq
3912 | BinaryOperator::LtEq
3913 | BinaryOperator::Eq
3914 | BinaryOperator::NotEq
3915 | BinaryOperator::PGRegexMatch
3916 | BinaryOperator::PGRegexIMatch
3917 | BinaryOperator::PGRegexNotMatch
3918 | BinaryOperator::PGRegexNotIMatch
3919 | BinaryOperator::PGLikeMatch
3920 | BinaryOperator::PGILikeMatch
3921 | BinaryOperator::PGNotLikeMatch
3922 | BinaryOperator::PGNotILikeMatch
3923 ) {
3924 return parser_err!(
3925 format!(
3926 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3927 ),
3928 span.start
3929 );
3930 };
3931
3932 Ok(match keyword {
3933 Keyword::ALL => Expr::AllOp {
3934 left: Box::new(expr),
3935 compare_op: op,
3936 right: Box::new(right),
3937 },
3938 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3939 left: Box::new(expr),
3940 compare_op: op,
3941 right: Box::new(right),
3942 is_some: keyword == Keyword::SOME,
3943 },
3944 unexpected_keyword => return Err(ParserError::ParserError(
3945 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3946 )),
3947 })
3948 } else {
3949 Ok(Expr::BinaryOp {
3950 left: Box::new(expr),
3951 op,
3952 right: Box::new(self.parse_subexpr(precedence)?),
3953 })
3954 }
3955 } else if let Token::Word(w) = &tok.token {
3956 match w.keyword {
3957 Keyword::IS => {
3958 if self.parse_keyword(Keyword::NULL) {
3959 Ok(Expr::IsNull(Box::new(expr)))
3960 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3961 Ok(Expr::IsNotNull(Box::new(expr)))
3962 } else if self.parse_keywords(&[Keyword::TRUE]) {
3963 Ok(Expr::IsTrue(Box::new(expr)))
3964 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3965 Ok(Expr::IsNotTrue(Box::new(expr)))
3966 } else if self.parse_keywords(&[Keyword::FALSE]) {
3967 Ok(Expr::IsFalse(Box::new(expr)))
3968 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3969 Ok(Expr::IsNotFalse(Box::new(expr)))
3970 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3971 Ok(Expr::IsUnknown(Box::new(expr)))
3972 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3973 Ok(Expr::IsNotUnknown(Box::new(expr)))
3974 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3975 let expr2 = self.parse_expr()?;
3976 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3977 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3978 {
3979 let expr2 = self.parse_expr()?;
3980 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3981 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3982 Ok(is_normalized)
3983 } else {
3984 self.expected_ref(
3985 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3986 self.peek_token_ref(),
3987 )
3988 }
3989 }
3990 Keyword::AT => {
3991 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3992 Ok(Expr::AtTimeZone {
3993 timestamp: Box::new(expr),
3994 time_zone: Box::new(self.parse_subexpr(precedence)?),
3995 })
3996 }
3997 Keyword::NOT
3998 | Keyword::IN
3999 | Keyword::BETWEEN
4000 | Keyword::LIKE
4001 | Keyword::ILIKE
4002 | Keyword::SIMILAR
4003 | Keyword::REGEXP
4004 | Keyword::RLIKE => {
4005 self.prev_token();
4006 let negated = self.parse_keyword(Keyword::NOT);
4007 let regexp = self.parse_keyword(Keyword::REGEXP);
4008 let rlike = self.parse_keyword(Keyword::RLIKE);
4009 let null = if !self.in_column_definition_state() {
4010 self.parse_keyword(Keyword::NULL)
4011 } else {
4012 false
4013 };
4014 if regexp || rlike {
4015 Ok(Expr::RLike {
4016 negated,
4017 expr: Box::new(expr),
4018 pattern: Box::new(
4019 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4020 ),
4021 regexp,
4022 })
4023 } else if negated && null {
4024 Ok(Expr::IsNotNull(Box::new(expr)))
4025 } else if self.parse_keyword(Keyword::IN) {
4026 self.parse_in(expr, negated)
4027 } else if self.parse_keyword(Keyword::BETWEEN) {
4028 self.parse_between(expr, negated)
4029 } else if self.parse_keyword(Keyword::LIKE) {
4030 Ok(Expr::Like {
4031 negated,
4032 any: self.parse_keyword(Keyword::ANY),
4033 expr: Box::new(expr),
4034 pattern: Box::new(
4035 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4036 ),
4037 escape_char: self.parse_escape_char()?,
4038 })
4039 } else if self.parse_keyword(Keyword::ILIKE) {
4040 Ok(Expr::ILike {
4041 negated,
4042 any: self.parse_keyword(Keyword::ANY),
4043 expr: Box::new(expr),
4044 pattern: Box::new(
4045 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4046 ),
4047 escape_char: self.parse_escape_char()?,
4048 })
4049 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4050 Ok(Expr::SimilarTo {
4051 negated,
4052 expr: Box::new(expr),
4053 pattern: Box::new(
4054 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4055 ),
4056 escape_char: self.parse_escape_char()?,
4057 })
4058 } else {
4059 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4060 }
4061 }
4062 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4063 Ok(Expr::IsNotNull(Box::new(expr)))
4064 }
4065 Keyword::MEMBER => {
4066 if self.parse_keyword(Keyword::OF) {
4067 self.expect_token(&Token::LParen)?;
4068 let array = self.parse_expr()?;
4069 self.expect_token(&Token::RParen)?;
4070 Ok(Expr::MemberOf(MemberOf {
4071 value: Box::new(expr),
4072 array: Box::new(array),
4073 }))
4074 } else {
4075 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4076 }
4077 }
4078 _ => parser_err!(
4080 format!("No infix parser for token {:?}", tok.token),
4081 tok.span.start
4082 ),
4083 }
4084 } else if Token::DoubleColon == *tok {
4085 Ok(Expr::Cast {
4086 kind: CastKind::DoubleColon,
4087 expr: Box::new(expr),
4088 data_type: self.parse_data_type()?,
4089 array: false,
4090 format: None,
4091 })
4092 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4093 Ok(Expr::UnaryOp {
4094 op: UnaryOperator::PGPostfixFactorial,
4095 expr: Box::new(expr),
4096 })
4097 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4098 || (Token::Colon == *tok)
4099 {
4100 self.prev_token();
4101 self.parse_json_access(expr)
4102 } else {
4103 parser_err!(
4105 format!("No infix parser for token {:?}", tok.token),
4106 tok.span.start
4107 )
4108 }
4109 }
4110
4111 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4113 if self.parse_keyword(Keyword::ESCAPE) {
4114 Ok(Some(self.parse_value()?))
4115 } else {
4116 Ok(None)
4117 }
4118 }
4119
4120 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4130 let lower_bound = if self.consume_token(&Token::Colon) {
4132 None
4133 } else {
4134 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4136 };
4137
4138 if self.consume_token(&Token::RBracket) {
4140 if let Some(lower_bound) = lower_bound {
4141 return Ok(Subscript::Index { index: lower_bound });
4142 };
4143 return Ok(Subscript::Slice {
4144 lower_bound,
4145 upper_bound: None,
4146 stride: None,
4147 });
4148 }
4149
4150 if lower_bound.is_some() {
4152 self.expect_token(&Token::Colon)?;
4153 }
4154
4155 let upper_bound = if self.consume_token(&Token::RBracket) {
4157 return Ok(Subscript::Slice {
4158 lower_bound,
4159 upper_bound: None,
4160 stride: None,
4161 });
4162 } else {
4163 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4165 };
4166
4167 if self.consume_token(&Token::RBracket) {
4169 return Ok(Subscript::Slice {
4170 lower_bound,
4171 upper_bound,
4172 stride: None,
4173 });
4174 }
4175
4176 self.expect_token(&Token::Colon)?;
4178 let stride = if self.consume_token(&Token::RBracket) {
4179 None
4180 } else {
4181 Some(self.parse_expr()?)
4182 };
4183
4184 if stride.is_some() {
4185 self.expect_token(&Token::RBracket)?;
4186 }
4187
4188 Ok(Subscript::Slice {
4189 lower_bound,
4190 upper_bound,
4191 stride,
4192 })
4193 }
4194
4195 pub fn parse_multi_dim_subscript(
4197 &mut self,
4198 chain: &mut Vec<AccessExpr>,
4199 ) -> Result<(), ParserError> {
4200 while self.consume_token(&Token::LBracket) {
4201 self.parse_subscript(chain)?;
4202 }
4203 Ok(())
4204 }
4205
4206 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4210 let subscript = self.parse_subscript_inner()?;
4211 chain.push(AccessExpr::Subscript(subscript));
4212 Ok(())
4213 }
4214
4215 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4216 let token = self.next_token();
4217 match token.token {
4218 Token::Word(Word {
4219 value,
4220 quote_style: quote_style @ (Some('"') | Some('`') | None),
4223 keyword: _,
4226 }) => Ok(JsonPathElem::Dot {
4227 key: value,
4228 quoted: quote_style.is_some(),
4229 }),
4230
4231 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4235
4236 _ => self.expected("variant object key name", token),
4237 }
4238 }
4239
4240 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4241 let path = self.parse_json_path()?;
4242 Ok(Expr::JsonAccess {
4243 value: Box::new(expr),
4244 path,
4245 })
4246 }
4247
4248 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4249 let mut path = Vec::new();
4250 loop {
4251 match self.next_token().token {
4252 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4253 self.next_token();
4254 let key = self.parse_wildcard_expr()?;
4255 self.expect_token(&Token::RBracket)?;
4256 path.push(JsonPathElem::ColonBracket { key });
4257 }
4258 Token::Colon if path.is_empty() => {
4259 path.push(self.parse_json_path_object_key()?);
4260 }
4261 Token::Period if !path.is_empty() => {
4262 path.push(self.parse_json_path_object_key()?);
4263 }
4264 Token::LBracket => {
4265 let key = self.parse_wildcard_expr()?;
4266 self.expect_token(&Token::RBracket)?;
4267
4268 path.push(JsonPathElem::Bracket { key });
4269 }
4270 _ => {
4271 self.prev_token();
4272 break;
4273 }
4274 };
4275 }
4276
4277 debug_assert!(!path.is_empty());
4278 Ok(JsonPath { path })
4279 }
4280
4281 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4283 if self.parse_keyword(Keyword::UNNEST) {
4286 self.expect_token(&Token::LParen)?;
4287 let array_expr = self.parse_expr()?;
4288 self.expect_token(&Token::RParen)?;
4289 return Ok(Expr::InUnnest {
4290 expr: Box::new(expr),
4291 array_expr: Box::new(array_expr),
4292 negated,
4293 });
4294 }
4295 self.expect_token(&Token::LParen)?;
4296 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4297 Some(subquery) => Expr::InSubquery {
4298 expr: Box::new(expr),
4299 subquery,
4300 negated,
4301 },
4302 None => Expr::InList {
4303 expr: Box::new(expr),
4304 list: if self.dialect.supports_in_empty_list() {
4305 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4306 } else {
4307 self.parse_comma_separated(Parser::parse_expr)?
4308 },
4309 negated,
4310 },
4311 };
4312 self.expect_token(&Token::RParen)?;
4313 Ok(in_op)
4314 }
4315
4316 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4318 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4321 self.expect_keyword_is(Keyword::AND)?;
4322 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4323 Ok(Expr::Between {
4324 expr: Box::new(expr),
4325 negated,
4326 low: Box::new(low),
4327 high: Box::new(high),
4328 })
4329 }
4330
4331 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4333 Ok(Expr::Cast {
4334 kind: CastKind::DoubleColon,
4335 expr: Box::new(expr),
4336 data_type: self.parse_data_type()?,
4337 array: false,
4338 format: None,
4339 })
4340 }
4341
4342 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4344 self.dialect.get_next_precedence_default(self)
4345 }
4346
4347 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4350 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4351 }
4352
4353 pub fn peek_token(&self) -> TokenWithSpan {
4358 self.peek_nth_token(0)
4359 }
4360
4361 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4364 self.peek_nth_token_ref(0)
4365 }
4366
4367 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4390 self.peek_tokens_with_location()
4391 .map(|with_loc| with_loc.token)
4392 }
4393
4394 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4399 let mut index = self.index;
4400 core::array::from_fn(|_| loop {
4401 let token = self.tokens.get(index);
4402 index += 1;
4403 if let Some(TokenWithSpan {
4404 token: Token::Whitespace(_),
4405 span: _,
4406 }) = token
4407 {
4408 continue;
4409 }
4410 break token.cloned().unwrap_or(TokenWithSpan {
4411 token: Token::EOF,
4412 span: Span::empty(),
4413 });
4414 })
4415 }
4416
4417 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4422 let mut index = self.index;
4423 core::array::from_fn(|_| loop {
4424 let token = self.tokens.get(index);
4425 index += 1;
4426 if let Some(TokenWithSpan {
4427 token: Token::Whitespace(_),
4428 span: _,
4429 }) = token
4430 {
4431 continue;
4432 }
4433 break token.unwrap_or(&EOF_TOKEN);
4434 })
4435 }
4436
4437 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4439 self.peek_nth_token_ref(n).clone()
4440 }
4441
4442 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4444 let mut index = self.index;
4445 loop {
4446 index += 1;
4447 match self.tokens.get(index - 1) {
4448 Some(TokenWithSpan {
4449 token: Token::Whitespace(_),
4450 span: _,
4451 }) => continue,
4452 non_whitespace => {
4453 if n == 0 {
4454 return non_whitespace.unwrap_or(&EOF_TOKEN);
4455 }
4456 n -= 1;
4457 }
4458 }
4459 }
4460 }
4461
4462 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4465 self.peek_nth_token_no_skip(0)
4466 }
4467
4468 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4470 self.tokens
4471 .get(self.index + n)
4472 .cloned()
4473 .unwrap_or(TokenWithSpan {
4474 token: Token::EOF,
4475 span: Span::empty(),
4476 })
4477 }
4478
4479 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4481 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4482 }
4483
4484 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4488 let index = self.index;
4489 let matched = self.parse_keywords(expected);
4490 self.index = index;
4491 matched
4492 }
4493
4494 pub fn next_token(&mut self) -> TokenWithSpan {
4499 self.advance_token();
4500 self.get_current_token().clone()
4501 }
4502
4503 pub fn get_current_index(&self) -> usize {
4508 self.index.saturating_sub(1)
4509 }
4510
4511 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4513 self.index += 1;
4514 self.tokens.get(self.index - 1)
4515 }
4516
4517 pub fn advance_token(&mut self) {
4521 loop {
4522 self.index += 1;
4523 match self.tokens.get(self.index - 1) {
4524 Some(TokenWithSpan {
4525 token: Token::Whitespace(_),
4526 span: _,
4527 }) => continue,
4528 _ => break,
4529 }
4530 }
4531 }
4532
4533 pub fn get_current_token(&self) -> &TokenWithSpan {
4537 self.token_at(self.index.saturating_sub(1))
4538 }
4539
4540 pub fn get_previous_token(&self) -> &TokenWithSpan {
4544 self.token_at(self.index.saturating_sub(2))
4545 }
4546
4547 pub fn get_next_token(&self) -> &TokenWithSpan {
4551 self.token_at(self.index)
4552 }
4553
4554 pub fn prev_token(&mut self) {
4561 loop {
4562 assert!(self.index > 0);
4563 self.index -= 1;
4564 if let Some(TokenWithSpan {
4565 token: Token::Whitespace(_),
4566 span: _,
4567 }) = self.tokens.get(self.index)
4568 {
4569 continue;
4570 }
4571 return;
4572 }
4573 }
4574
4575 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4577 parser_err!(
4578 format!("Expected: {expected}, found: {found}"),
4579 found.span.start
4580 )
4581 }
4582
4583 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4585 parser_err!(
4586 format!("Expected: {expected}, found: {found}"),
4587 found.span.start
4588 )
4589 }
4590
4591 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4593 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4594 parser_err!(
4595 format!("Expected: {expected}, found: {found}"),
4596 found.span.start
4597 )
4598 }
4599
4600 #[must_use]
4603 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4604 if self.peek_keyword(expected) {
4605 self.advance_token();
4606 true
4607 } else {
4608 false
4609 }
4610 }
4611
4612 #[must_use]
4613 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4617 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4618 }
4619
4620 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4628 self.keyword_with_tokens(expected, tokens, true)
4629 }
4630
4631 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4636 self.keyword_with_tokens(expected, tokens, false)
4637 }
4638
4639 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4640 match &self.peek_token_ref().token {
4641 Token::Word(w) if expected == w.keyword => {
4642 for (idx, token) in tokens.iter().enumerate() {
4643 if self.peek_nth_token_ref(idx + 1).token != *token {
4644 return false;
4645 }
4646 }
4647
4648 if consume {
4649 for _ in 0..(tokens.len() + 1) {
4650 self.advance_token();
4651 }
4652 }
4653
4654 true
4655 }
4656 _ => false,
4657 }
4658 }
4659
4660 #[must_use]
4664 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4665 self.parse_keywords_indexed(keywords).is_some()
4666 }
4667
4668 #[must_use]
4671 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4672 let start_index = self.index;
4673 let mut first_keyword_index = None;
4674 for &keyword in keywords {
4675 if !self.parse_keyword(keyword) {
4676 self.index = start_index;
4677 return None;
4678 }
4679 if first_keyword_index.is_none() {
4680 first_keyword_index = Some(self.index.saturating_sub(1));
4681 }
4682 }
4683 first_keyword_index
4684 }
4685
4686 #[must_use]
4689 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4690 for keyword in keywords {
4691 if self.peek_keyword(*keyword) {
4692 return Some(*keyword);
4693 }
4694 }
4695 None
4696 }
4697
4698 #[must_use]
4702 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4703 match &self.peek_token_ref().token {
4704 Token::Word(w) => {
4705 keywords
4706 .iter()
4707 .find(|keyword| **keyword == w.keyword)
4708 .map(|keyword| {
4709 self.advance_token();
4710 *keyword
4711 })
4712 }
4713 _ => None,
4714 }
4715 }
4716
4717 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4720 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4721 Ok(keyword)
4722 } else {
4723 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4724 self.expected_ref(
4725 &format!("one of {}", keywords.join(" or ")),
4726 self.peek_token_ref(),
4727 )
4728 }
4729 }
4730
4731 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4736 if self.parse_keyword(expected) {
4737 Ok(self.get_current_token().clone())
4738 } else {
4739 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4740 }
4741 }
4742
4743 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4749 if self.parse_keyword(expected) {
4750 Ok(())
4751 } else {
4752 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4753 }
4754 }
4755
4756 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4759 for &kw in expected {
4760 self.expect_keyword_is(kw)?;
4761 }
4762 Ok(())
4763 }
4764
4765 #[must_use]
4769 pub fn consume_token(&mut self, expected: &Token) -> bool {
4770 if self.peek_token_ref() == expected {
4771 self.advance_token();
4772 true
4773 } else {
4774 false
4775 }
4776 }
4777
4778 #[must_use]
4782 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4783 let index = self.index;
4784 for token in tokens {
4785 if !self.consume_token(token) {
4786 self.index = index;
4787 return false;
4788 }
4789 }
4790 true
4791 }
4792
4793 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4795 if self.peek_token_ref() == expected {
4796 Ok(self.next_token())
4797 } else {
4798 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4799 }
4800 }
4801
4802 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4803 where
4804 <T as FromStr>::Err: Display,
4805 {
4806 s.parse::<T>().map_err(|e| {
4807 ParserError::ParserError(format!(
4808 "Could not parse '{s}' as {}: {e}{loc}",
4809 core::any::type_name::<T>()
4810 ))
4811 })
4812 }
4813
4814 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4816 let trailing_commas =
4822 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4823
4824 self.parse_comma_separated_with_trailing_commas(
4825 |p| p.parse_select_item(),
4826 trailing_commas,
4827 Self::is_reserved_for_column_alias,
4828 )
4829 }
4830
4831 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4833 let mut values = vec![];
4834 loop {
4835 values.push(self.parse_grant_permission()?);
4836 if !self.consume_token(&Token::Comma) {
4837 break;
4838 } else if self.options.trailing_commas {
4839 match &self.peek_token_ref().token {
4840 Token::Word(kw) if kw.keyword == Keyword::ON => {
4841 break;
4842 }
4843 Token::RParen
4844 | Token::SemiColon
4845 | Token::EOF
4846 | Token::RBracket
4847 | Token::RBrace => break,
4848 _ => continue,
4849 }
4850 }
4851 }
4852 Ok(values)
4853 }
4854
4855 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4857 let trailing_commas = self.dialect.supports_from_trailing_commas();
4858
4859 self.parse_comma_separated_with_trailing_commas(
4860 Parser::parse_table_and_joins,
4861 trailing_commas,
4862 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4863 )
4864 }
4865
4866 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4873 &mut self,
4874 trailing_commas: bool,
4875 is_reserved_keyword: &R,
4876 ) -> bool
4877 where
4878 R: Fn(&Keyword, &mut Parser) -> bool,
4879 {
4880 if !self.consume_token(&Token::Comma) {
4881 true
4882 } else if trailing_commas {
4883 let token = self.next_token().token;
4884 let is_end = match token {
4885 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4886 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4887 true
4888 }
4889 _ => false,
4890 };
4891 self.prev_token();
4892
4893 is_end
4894 } else {
4895 false
4896 }
4897 }
4898
4899 fn is_parse_comma_separated_end(&mut self) -> bool {
4902 self.is_parse_comma_separated_end_with_trailing_commas(
4903 self.options.trailing_commas,
4904 &Self::is_reserved_for_column_alias,
4905 )
4906 }
4907
4908 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4910 where
4911 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4912 {
4913 self.parse_comma_separated_with_trailing_commas(
4914 f,
4915 self.options.trailing_commas,
4916 Self::is_reserved_for_column_alias,
4917 )
4918 }
4919
4920 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4925 &mut self,
4926 mut f: F,
4927 trailing_commas: bool,
4928 is_reserved_keyword: R,
4929 ) -> Result<Vec<T>, ParserError>
4930 where
4931 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4932 R: Fn(&Keyword, &mut Parser) -> bool,
4933 {
4934 let mut values = vec![];
4935 loop {
4936 values.push(f(self)?);
4937 if self.is_parse_comma_separated_end_with_trailing_commas(
4938 trailing_commas,
4939 &is_reserved_keyword,
4940 ) {
4941 break;
4942 }
4943 }
4944 Ok(values)
4945 }
4946
4947 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4949 where
4950 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4951 {
4952 let mut values = vec![];
4953 loop {
4954 values.push(f(self)?);
4955 if !self.consume_token(&Token::Period) {
4956 break;
4957 }
4958 }
4959 Ok(values)
4960 }
4961
4962 pub fn parse_keyword_separated<T, F>(
4964 &mut self,
4965 keyword: Keyword,
4966 mut f: F,
4967 ) -> Result<Vec<T>, ParserError>
4968 where
4969 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4970 {
4971 let mut values = vec![];
4972 loop {
4973 values.push(f(self)?);
4974 if !self.parse_keyword(keyword) {
4975 break;
4976 }
4977 }
4978 Ok(values)
4979 }
4980
4981 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4983 where
4984 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4985 {
4986 self.expect_token(&Token::LParen)?;
4987 let res = f(self)?;
4988 self.expect_token(&Token::RParen)?;
4989 Ok(res)
4990 }
4991
4992 pub fn parse_comma_separated0<T, F>(
4995 &mut self,
4996 f: F,
4997 end_token: Token,
4998 ) -> Result<Vec<T>, ParserError>
4999 where
5000 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5001 {
5002 if self.peek_token_ref().token == end_token {
5003 return Ok(vec![]);
5004 }
5005
5006 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5007 let _ = self.consume_token(&Token::Comma);
5008 return Ok(vec![]);
5009 }
5010
5011 self.parse_comma_separated(f)
5012 }
5013
5014 pub(crate) fn parse_statement_list(
5018 &mut self,
5019 terminal_keywords: &[Keyword],
5020 ) -> Result<Vec<Statement>, ParserError> {
5021 let mut values = vec![];
5022 loop {
5023 match &self.peek_nth_token_ref(0).token {
5024 Token::EOF => break,
5025 Token::Word(w)
5026 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
5027 {
5028 break;
5029 }
5030 _ => {}
5031 }
5032
5033 values.push(self.parse_statement()?);
5034 self.expect_token(&Token::SemiColon)?;
5035 }
5036 Ok(values)
5037 }
5038
5039 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5043 !parser.dialect.is_column_alias(kw, parser)
5044 }
5045
5046 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5050 where
5051 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5052 {
5053 match self.try_parse(f) {
5054 Ok(t) => Ok(Some(t)),
5055 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5056 _ => Ok(None),
5057 }
5058 }
5059
5060 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5062 where
5063 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5064 {
5065 let index = self.index;
5066 match f(self) {
5067 Ok(t) => Ok(t),
5068 Err(e) => {
5069 self.index = index;
5071 Err(e)
5072 }
5073 }
5074 }
5075
5076 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5079 let loc = self.peek_token_ref().span.start;
5080 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5081 Some(Keyword::ALL) => {
5082 if self.peek_keyword(Keyword::DISTINCT) {
5083 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5084 }
5085 Some(Distinct::All)
5086 }
5087 Some(Keyword::DISTINCT) => {
5088 if self.peek_keyword(Keyword::ALL) {
5089 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5090 }
5091 Some(Distinct::Distinct)
5092 }
5093 None => return Ok(None),
5094 _ => return parser_err!("ALL or DISTINCT", loc),
5095 };
5096
5097 let Some(Distinct::Distinct) = distinct else {
5098 return Ok(distinct);
5099 };
5100 if !self.parse_keyword(Keyword::ON) {
5101 return Ok(Some(Distinct::Distinct));
5102 }
5103
5104 self.expect_token(&Token::LParen)?;
5105 let col_names = if self.consume_token(&Token::RParen) {
5106 self.prev_token();
5107 Vec::new()
5108 } else {
5109 self.parse_comma_separated(Parser::parse_expr)?
5110 };
5111 self.expect_token(&Token::RParen)?;
5112 Ok(Some(Distinct::On(col_names)))
5113 }
5114
5115 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5117 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5118 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5119 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5120 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5121 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5122 let global: Option<bool> = if global {
5123 Some(true)
5124 } else if local {
5125 Some(false)
5126 } else {
5127 None
5128 };
5129 let temporary = self
5130 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5131 .is_some();
5132 let persistent = dialect_of!(self is DuckDbDialect)
5133 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5134 let create_view_params = self.parse_create_view_params()?;
5135 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5136 self.parse_create_snapshot_table().map(Into::into)
5137 } else if self.parse_keyword(Keyword::TABLE) {
5138 self.parse_create_table(or_replace, temporary, global, transient)
5139 .map(Into::into)
5140 } else if self.peek_keyword(Keyword::MATERIALIZED)
5141 || self.peek_keyword(Keyword::VIEW)
5142 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5143 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5144 {
5145 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5146 .map(Into::into)
5147 } else if self.parse_keyword(Keyword::POLICY) {
5148 self.parse_create_policy().map(Into::into)
5149 } else if self.parse_keyword(Keyword::EXTERNAL) {
5150 self.parse_create_external_table(or_replace).map(Into::into)
5151 } else if self.parse_keyword(Keyword::FUNCTION) {
5152 self.parse_create_function(or_alter, or_replace, temporary)
5153 } else if self.parse_keyword(Keyword::DOMAIN) {
5154 self.parse_create_domain().map(Into::into)
5155 } else if self.parse_keyword(Keyword::TRIGGER) {
5156 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5157 .map(Into::into)
5158 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5159 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5160 .map(Into::into)
5161 } else if self.parse_keyword(Keyword::MACRO) {
5162 self.parse_create_macro(or_replace, temporary)
5163 } else if self.parse_keyword(Keyword::SECRET) {
5164 self.parse_create_secret(or_replace, temporary, persistent)
5165 } else if self.parse_keyword(Keyword::USER) {
5166 if self.parse_keyword(Keyword::MAPPING) {
5167 self.parse_create_user_mapping().map(Into::into)
5168 } else {
5169 self.parse_create_user(or_replace).map(Into::into)
5170 }
5171 } else if self.parse_keyword(Keyword::AGGREGATE) {
5172 self.parse_create_aggregate(or_replace).map(Into::into)
5173 } else if self.peek_keyword(Keyword::TRUSTED)
5174 || self.peek_keyword(Keyword::PROCEDURAL)
5175 || self.peek_keyword(Keyword::LANGUAGE)
5176 {
5177 let trusted = self.parse_keyword(Keyword::TRUSTED);
5178 let procedural = self.parse_keyword(Keyword::PROCEDURAL);
5179 if self.parse_keyword(Keyword::LANGUAGE) {
5180 self.parse_create_language(or_replace, trusted, procedural)
5181 .map(Into::into)
5182 } else {
5183 self.expected_ref(
5184 "LANGUAGE after TRUSTED or PROCEDURAL",
5185 self.peek_token_ref(),
5186 )
5187 }
5188 } else if self.parse_keyword(Keyword::TRANSFORM) {
5189 self.parse_create_transform(or_replace).map(Into::into)
5190 } else if or_replace {
5191 self.expected_ref(
5192 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5193 self.peek_token_ref(),
5194 )
5195 } else if self.parse_keyword(Keyword::CAST) {
5196 self.parse_create_cast().map(Into::into)
5197 } else if self.parse_keyword(Keyword::CONVERSION) {
5198 self.parse_create_conversion(false).map(Into::into)
5199 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CONVERSION]) {
5200 self.parse_create_conversion(true).map(Into::into)
5201 } else if self.parse_keyword(Keyword::RULE) {
5202 self.parse_create_rule().map(Into::into)
5203 } else if self.parse_keyword(Keyword::EXTENSION) {
5204 self.parse_create_extension().map(Into::into)
5205 } else if self.parse_keyword(Keyword::INDEX) {
5206 self.parse_create_index(false).map(Into::into)
5207 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5208 self.parse_create_index(true).map(Into::into)
5209 } else if self.parse_keyword(Keyword::VIRTUAL) {
5210 self.parse_create_virtual_table()
5211 } else if self.parse_keyword(Keyword::SCHEMA) {
5212 self.parse_create_schema()
5213 } else if self.parse_keyword(Keyword::DATABASE) {
5214 self.parse_create_database()
5215 } else if self.parse_keyword(Keyword::ROLE) {
5216 self.parse_create_role().map(Into::into)
5217 } else if self.parse_keyword(Keyword::SEQUENCE) {
5218 self.parse_create_sequence(temporary)
5219 } else if self.parse_keyword(Keyword::COLLATION) {
5220 self.parse_create_collation().map(Into::into)
5221 } else if self.parse_keyword(Keyword::TYPE) {
5222 self.parse_create_type()
5223 } else if self.parse_keyword(Keyword::PROCEDURE) {
5224 self.parse_create_procedure(or_alter)
5225 } else if self.parse_keyword(Keyword::CONNECTOR) {
5226 self.parse_create_connector().map(Into::into)
5227 } else if self.parse_keyword(Keyword::OPERATOR) {
5228 if self.parse_keyword(Keyword::FAMILY) {
5230 self.parse_create_operator_family().map(Into::into)
5231 } else if self.parse_keyword(Keyword::CLASS) {
5232 self.parse_create_operator_class().map(Into::into)
5233 } else {
5234 self.parse_create_operator().map(Into::into)
5235 }
5236 } else if self.parse_keyword(Keyword::SERVER) {
5237 self.parse_pg_create_server()
5238 } else if self.parse_keyword(Keyword::FOREIGN) {
5239 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5240 self.parse_create_foreign_data_wrapper().map(Into::into)
5241 } else if self.parse_keyword(Keyword::TABLE) {
5242 self.parse_create_foreign_table().map(Into::into)
5243 } else {
5244 self.expected_ref(
5245 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5246 self.peek_token_ref(),
5247 )
5248 }
5249 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5250 self.parse_create_text_search()
5251 } else if self.parse_keyword(Keyword::PUBLICATION) {
5252 self.parse_create_publication().map(Into::into)
5253 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5254 self.parse_create_subscription().map(Into::into)
5255 } else if self.parse_keyword(Keyword::STATISTICS) {
5256 self.parse_create_statistics().map(Into::into)
5257 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::METHOD]) {
5258 self.parse_create_access_method().map(Into::into)
5259 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::TRIGGER]) {
5260 self.parse_create_event_trigger().map(Into::into)
5261 } else if self.parse_keyword(Keyword::TABLESPACE) {
5262 self.parse_create_tablespace().map(Into::into)
5263 } else {
5264 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5265 }
5266 }
5267
5268 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5269 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5270 let name = self.parse_identifier()?;
5271 let options = self
5272 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5273 .options;
5274 let with_tags = self.parse_keyword(Keyword::WITH);
5275 let tags = if self.parse_keyword(Keyword::TAG) {
5276 self.parse_key_value_options(true, &[])?.options
5277 } else {
5278 vec![]
5279 };
5280 Ok(CreateUser {
5281 or_replace,
5282 if_not_exists,
5283 name,
5284 options: KeyValueOptions {
5285 options,
5286 delimiter: KeyValueOptionsDelimiter::Space,
5287 },
5288 with_tags,
5289 tags: KeyValueOptions {
5290 options: tags,
5291 delimiter: KeyValueOptionsDelimiter::Comma,
5292 },
5293 })
5294 }
5295
5296 pub fn parse_create_secret(
5298 &mut self,
5299 or_replace: bool,
5300 temporary: bool,
5301 persistent: bool,
5302 ) -> Result<Statement, ParserError> {
5303 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5304
5305 let mut storage_specifier = None;
5306 let mut name = None;
5307 if self.peek_token_ref().token != Token::LParen {
5308 if self.parse_keyword(Keyword::IN) {
5309 storage_specifier = self.parse_identifier().ok()
5310 } else {
5311 name = self.parse_identifier().ok();
5312 }
5313
5314 if storage_specifier.is_none()
5316 && self.peek_token_ref().token != Token::LParen
5317 && self.parse_keyword(Keyword::IN)
5318 {
5319 storage_specifier = self.parse_identifier().ok();
5320 }
5321 }
5322
5323 self.expect_token(&Token::LParen)?;
5324 self.expect_keyword_is(Keyword::TYPE)?;
5325 let secret_type = self.parse_identifier()?;
5326
5327 let mut options = Vec::new();
5328 if self.consume_token(&Token::Comma) {
5329 options.append(&mut self.parse_comma_separated(|p| {
5330 let key = p.parse_identifier()?;
5331 let value = p.parse_identifier()?;
5332 Ok(SecretOption { key, value })
5333 })?);
5334 }
5335 self.expect_token(&Token::RParen)?;
5336
5337 let temp = match (temporary, persistent) {
5338 (true, false) => Some(true),
5339 (false, true) => Some(false),
5340 (false, false) => None,
5341 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5342 };
5343
5344 Ok(Statement::CreateSecret {
5345 or_replace,
5346 temporary: temp,
5347 if_not_exists,
5348 name,
5349 storage_specifier,
5350 secret_type,
5351 options,
5352 })
5353 }
5354
5355 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5357 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5358 if self.parse_keyword(Keyword::TABLE) {
5359 let table_name = self.parse_object_name(false)?;
5360 if self.peek_token_ref().token != Token::EOF {
5361 if let Token::Word(word) = &self.peek_token_ref().token {
5362 if word.keyword == Keyword::OPTIONS {
5363 options = self.parse_options(Keyword::OPTIONS)?
5364 }
5365 };
5366
5367 if self.peek_token_ref().token != Token::EOF {
5368 let (a, q) = self.parse_as_query()?;
5369 has_as = a;
5370 query = Some(q);
5371 }
5372
5373 Ok(Statement::Cache {
5374 table_flag,
5375 table_name,
5376 has_as,
5377 options,
5378 query,
5379 })
5380 } else {
5381 Ok(Statement::Cache {
5382 table_flag,
5383 table_name,
5384 has_as,
5385 options,
5386 query,
5387 })
5388 }
5389 } else {
5390 table_flag = Some(self.parse_object_name(false)?);
5391 if self.parse_keyword(Keyword::TABLE) {
5392 let table_name = self.parse_object_name(false)?;
5393 if self.peek_token_ref().token != Token::EOF {
5394 if let Token::Word(word) = &self.peek_token_ref().token {
5395 if word.keyword == Keyword::OPTIONS {
5396 options = self.parse_options(Keyword::OPTIONS)?
5397 }
5398 };
5399
5400 if self.peek_token_ref().token != Token::EOF {
5401 let (a, q) = self.parse_as_query()?;
5402 has_as = a;
5403 query = Some(q);
5404 }
5405
5406 Ok(Statement::Cache {
5407 table_flag,
5408 table_name,
5409 has_as,
5410 options,
5411 query,
5412 })
5413 } else {
5414 Ok(Statement::Cache {
5415 table_flag,
5416 table_name,
5417 has_as,
5418 options,
5419 query,
5420 })
5421 }
5422 } else {
5423 if self.peek_token_ref().token == Token::EOF {
5424 self.prev_token();
5425 }
5426 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5427 }
5428 }
5429 }
5430
5431 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5433 match &self.peek_token_ref().token {
5434 Token::Word(word) => match word.keyword {
5435 Keyword::AS => {
5436 self.next_token();
5437 Ok((true, self.parse_query()?))
5438 }
5439 _ => Ok((false, self.parse_query()?)),
5440 },
5441 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5442 }
5443 }
5444
5445 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5447 self.expect_keyword_is(Keyword::TABLE)?;
5448 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5449 let table_name = self.parse_object_name(false)?;
5450 Ok(Statement::UNCache {
5451 table_name,
5452 if_exists,
5453 })
5454 }
5455
5456 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5458 self.expect_keyword_is(Keyword::TABLE)?;
5459 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5460 let table_name = self.parse_object_name(false)?;
5461 self.expect_keyword_is(Keyword::USING)?;
5462 let module_name = self.parse_identifier()?;
5463 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5468 Ok(Statement::CreateVirtualTable {
5469 name: table_name,
5470 if_not_exists,
5471 module_name,
5472 module_args,
5473 })
5474 }
5475
5476 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5478 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5479
5480 let schema_name = self.parse_schema_name()?;
5481
5482 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5483 Some(self.parse_expr()?)
5484 } else {
5485 None
5486 };
5487
5488 let with = if self.peek_keyword(Keyword::WITH) {
5489 Some(self.parse_options(Keyword::WITH)?)
5490 } else {
5491 None
5492 };
5493
5494 let options = if self.peek_keyword(Keyword::OPTIONS) {
5495 Some(self.parse_options(Keyword::OPTIONS)?)
5496 } else {
5497 None
5498 };
5499
5500 let clone = if self.parse_keyword(Keyword::CLONE) {
5501 Some(self.parse_object_name(false)?)
5502 } else {
5503 None
5504 };
5505
5506 Ok(Statement::CreateSchema {
5507 schema_name,
5508 if_not_exists,
5509 with,
5510 options,
5511 default_collate_spec,
5512 clone,
5513 })
5514 }
5515
5516 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5517 if self.parse_keyword(Keyword::AUTHORIZATION) {
5518 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5519 } else {
5520 let name = self.parse_object_name(false)?;
5521
5522 if self.parse_keyword(Keyword::AUTHORIZATION) {
5523 Ok(SchemaName::NamedAuthorization(
5524 name,
5525 self.parse_identifier()?,
5526 ))
5527 } else {
5528 Ok(SchemaName::Simple(name))
5529 }
5530 }
5531 }
5532
5533 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5535 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5536 let db_name = self.parse_object_name(false)?;
5537 let mut location = None;
5538 let mut managed_location = None;
5539 loop {
5540 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5541 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5542 Some(Keyword::MANAGEDLOCATION) => {
5543 managed_location = Some(self.parse_literal_string()?)
5544 }
5545 _ => break,
5546 }
5547 }
5548 let clone = if self.parse_keyword(Keyword::CLONE) {
5549 Some(self.parse_object_name(false)?)
5550 } else {
5551 None
5552 };
5553
5554 let mut default_charset = None;
5562 let mut default_collation = None;
5563 loop {
5564 let has_default = self.parse_keyword(Keyword::DEFAULT);
5565 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5566 || self.parse_keyword(Keyword::CHARSET)
5567 {
5568 let _ = self.consume_token(&Token::Eq);
5569 default_charset = Some(self.parse_identifier()?.value);
5570 } else if self.parse_keyword(Keyword::COLLATE) {
5571 let _ = self.consume_token(&Token::Eq);
5572 default_collation = Some(self.parse_identifier()?.value);
5573 } else if has_default {
5574 self.prev_token();
5576 break;
5577 } else {
5578 break;
5579 }
5580 }
5581
5582 Ok(Statement::CreateDatabase {
5583 db_name,
5584 if_not_exists: ine,
5585 location,
5586 managed_location,
5587 or_replace: false,
5588 transient: false,
5589 clone,
5590 data_retention_time_in_days: None,
5591 max_data_extension_time_in_days: None,
5592 external_volume: None,
5593 catalog: None,
5594 replace_invalid_characters: None,
5595 default_ddl_collation: None,
5596 storage_serialization_policy: None,
5597 comment: None,
5598 default_charset,
5599 default_collation,
5600 catalog_sync: None,
5601 catalog_sync_namespace_mode: None,
5602 catalog_sync_namespace_flatten_delimiter: None,
5603 with_tags: None,
5604 with_contacts: None,
5605 })
5606 }
5607
5608 pub fn parse_optional_create_function_using(
5610 &mut self,
5611 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5612 if !self.parse_keyword(Keyword::USING) {
5613 return Ok(None);
5614 };
5615 let keyword =
5616 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5617
5618 let uri = self.parse_literal_string()?;
5619
5620 match keyword {
5621 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5622 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5623 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5624 _ => self.expected(
5625 "JAR, FILE or ARCHIVE, got {:?}",
5626 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5627 ),
5628 }
5629 }
5630
5631 pub fn parse_create_function(
5633 &mut self,
5634 or_alter: bool,
5635 or_replace: bool,
5636 temporary: bool,
5637 ) -> Result<Statement, ParserError> {
5638 if dialect_of!(self is HiveDialect) {
5639 self.parse_hive_create_function(or_replace, temporary)
5640 .map(Into::into)
5641 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5642 self.parse_postgres_create_function(or_replace, temporary)
5643 .map(Into::into)
5644 } else if dialect_of!(self is DuckDbDialect) {
5645 self.parse_create_macro(or_replace, temporary)
5646 } else if dialect_of!(self is BigQueryDialect) {
5647 self.parse_bigquery_create_function(or_replace, temporary)
5648 .map(Into::into)
5649 } else if dialect_of!(self is MsSqlDialect) {
5650 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5651 .map(Into::into)
5652 } else {
5653 self.prev_token();
5654 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5655 }
5656 }
5657
5658 fn parse_postgres_create_function(
5662 &mut self,
5663 or_replace: bool,
5664 temporary: bool,
5665 ) -> Result<CreateFunction, ParserError> {
5666 let name = self.parse_object_name(false)?;
5667
5668 self.expect_token(&Token::LParen)?;
5669 let args = if Token::RParen != self.peek_token_ref().token {
5670 self.parse_comma_separated(Parser::parse_function_arg)?
5671 } else {
5672 vec![]
5673 };
5674 self.expect_token(&Token::RParen)?;
5675
5676 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5677 Some(self.parse_function_return_type()?)
5678 } else {
5679 None
5680 };
5681
5682 #[derive(Default)]
5683 struct Body {
5684 language: Option<Ident>,
5685 behavior: Option<FunctionBehavior>,
5686 function_body: Option<CreateFunctionBody>,
5687 called_on_null: Option<FunctionCalledOnNull>,
5688 parallel: Option<FunctionParallel>,
5689 security: Option<FunctionSecurity>,
5690 }
5691 let mut body = Body::default();
5692 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5693 loop {
5694 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5695 if field.is_some() {
5696 return Err(ParserError::ParserError(format!(
5697 "{name} specified more than once",
5698 )));
5699 }
5700 Ok(())
5701 }
5702 if self.parse_keyword(Keyword::AS) {
5703 ensure_not_set(&body.function_body, "AS")?;
5704 body.function_body = Some(self.parse_create_function_body_string()?);
5705 } else if self.parse_keyword(Keyword::LANGUAGE) {
5706 ensure_not_set(&body.language, "LANGUAGE")?;
5707 body.language = Some(self.parse_identifier()?);
5708 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5709 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5710 body.behavior = Some(FunctionBehavior::Immutable);
5711 } else if self.parse_keyword(Keyword::STABLE) {
5712 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5713 body.behavior = Some(FunctionBehavior::Stable);
5714 } else if self.parse_keyword(Keyword::VOLATILE) {
5715 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5716 body.behavior = Some(FunctionBehavior::Volatile);
5717 } else if self.parse_keywords(&[
5718 Keyword::CALLED,
5719 Keyword::ON,
5720 Keyword::NULL,
5721 Keyword::INPUT,
5722 ]) {
5723 ensure_not_set(
5724 &body.called_on_null,
5725 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5726 )?;
5727 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5728 } else if self.parse_keywords(&[
5729 Keyword::RETURNS,
5730 Keyword::NULL,
5731 Keyword::ON,
5732 Keyword::NULL,
5733 Keyword::INPUT,
5734 ]) {
5735 ensure_not_set(
5736 &body.called_on_null,
5737 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5738 )?;
5739 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5740 } else if self.parse_keyword(Keyword::STRICT) {
5741 ensure_not_set(
5742 &body.called_on_null,
5743 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5744 )?;
5745 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5746 } else if self.parse_keyword(Keyword::PARALLEL) {
5747 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5748 if self.parse_keyword(Keyword::UNSAFE) {
5749 body.parallel = Some(FunctionParallel::Unsafe);
5750 } else if self.parse_keyword(Keyword::RESTRICTED) {
5751 body.parallel = Some(FunctionParallel::Restricted);
5752 } else if self.parse_keyword(Keyword::SAFE) {
5753 body.parallel = Some(FunctionParallel::Safe);
5754 } else {
5755 return self
5756 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5757 }
5758 } else if self.parse_keyword(Keyword::SECURITY) {
5759 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5760 if self.parse_keyword(Keyword::DEFINER) {
5761 body.security = Some(FunctionSecurity::Definer);
5762 } else if self.parse_keyword(Keyword::INVOKER) {
5763 body.security = Some(FunctionSecurity::Invoker);
5764 } else {
5765 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5766 }
5767 } else if self.parse_keyword(Keyword::SET) {
5768 let name = self.parse_object_name(false)?;
5769 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5770 FunctionSetValue::FromCurrent
5771 } else {
5772 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5773 return self.expected_ref("= or TO", self.peek_token_ref());
5774 }
5775 if self.parse_keyword(Keyword::DEFAULT) {
5776 FunctionSetValue::Default
5777 } else {
5778 let values = self.parse_comma_separated(Parser::parse_expr)?;
5779 FunctionSetValue::Values(values)
5780 }
5781 };
5782 set_params.push(FunctionDefinitionSetParam { name, value });
5783 } else if self.parse_keyword(Keyword::RETURN) {
5784 ensure_not_set(&body.function_body, "RETURN")?;
5785 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5786 } else {
5787 break;
5788 }
5789 }
5790
5791 Ok(CreateFunction {
5792 or_alter: false,
5793 or_replace,
5794 temporary,
5795 name,
5796 args: Some(args),
5797 return_type,
5798 behavior: body.behavior,
5799 called_on_null: body.called_on_null,
5800 parallel: body.parallel,
5801 security: body.security,
5802 set_params,
5803 language: body.language,
5804 function_body: body.function_body,
5805 if_not_exists: false,
5806 using: None,
5807 determinism_specifier: None,
5808 options: None,
5809 remote_connection: None,
5810 })
5811 }
5812
5813 fn parse_hive_create_function(
5817 &mut self,
5818 or_replace: bool,
5819 temporary: bool,
5820 ) -> Result<CreateFunction, ParserError> {
5821 let name = self.parse_object_name(false)?;
5822 self.expect_keyword_is(Keyword::AS)?;
5823
5824 let body = self.parse_create_function_body_string()?;
5825 let using = self.parse_optional_create_function_using()?;
5826
5827 Ok(CreateFunction {
5828 or_alter: false,
5829 or_replace,
5830 temporary,
5831 name,
5832 function_body: Some(body),
5833 using,
5834 if_not_exists: false,
5835 args: None,
5836 return_type: None,
5837 behavior: None,
5838 called_on_null: None,
5839 parallel: None,
5840 security: None,
5841 set_params: vec![],
5842 language: None,
5843 determinism_specifier: None,
5844 options: None,
5845 remote_connection: None,
5846 })
5847 }
5848
5849 fn parse_bigquery_create_function(
5853 &mut self,
5854 or_replace: bool,
5855 temporary: bool,
5856 ) -> Result<CreateFunction, ParserError> {
5857 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5858 let (name, args) = self.parse_create_function_name_and_params()?;
5859
5860 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5861 Some(self.parse_function_return_type()?)
5862 } else {
5863 None
5864 };
5865
5866 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5867 Some(FunctionDeterminismSpecifier::Deterministic)
5868 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5869 Some(FunctionDeterminismSpecifier::NotDeterministic)
5870 } else {
5871 None
5872 };
5873
5874 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5875 Some(self.parse_identifier()?)
5876 } else {
5877 None
5878 };
5879
5880 let remote_connection =
5881 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5882 Some(self.parse_object_name(false)?)
5883 } else {
5884 None
5885 };
5886
5887 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5890
5891 let function_body = if remote_connection.is_none() {
5892 self.expect_keyword_is(Keyword::AS)?;
5893 let expr = self.parse_expr()?;
5894 if options.is_none() {
5895 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5896 Some(CreateFunctionBody::AsBeforeOptions {
5897 body: expr,
5898 link_symbol: None,
5899 })
5900 } else {
5901 Some(CreateFunctionBody::AsAfterOptions(expr))
5902 }
5903 } else {
5904 None
5905 };
5906
5907 Ok(CreateFunction {
5908 or_alter: false,
5909 or_replace,
5910 temporary,
5911 if_not_exists,
5912 name,
5913 args: Some(args),
5914 return_type,
5915 function_body,
5916 language,
5917 determinism_specifier,
5918 options,
5919 remote_connection,
5920 using: None,
5921 behavior: None,
5922 called_on_null: None,
5923 parallel: None,
5924 security: None,
5925 set_params: vec![],
5926 })
5927 }
5928
5929 fn parse_mssql_create_function(
5933 &mut self,
5934 or_alter: bool,
5935 or_replace: bool,
5936 temporary: bool,
5937 ) -> Result<CreateFunction, ParserError> {
5938 let (name, args) = self.parse_create_function_name_and_params()?;
5939
5940 self.expect_keyword(Keyword::RETURNS)?;
5941
5942 let return_table = self.maybe_parse(|p| {
5943 let return_table_name = p.parse_identifier()?;
5944
5945 p.expect_keyword_is(Keyword::TABLE)?;
5946 p.prev_token();
5947
5948 let table_column_defs = match p.parse_data_type()? {
5949 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5950 table_column_defs
5951 }
5952 _ => parser_err!(
5953 "Expected table column definitions after TABLE keyword",
5954 p.peek_token_ref().span.start
5955 )?,
5956 };
5957
5958 Ok(DataType::NamedTable {
5959 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5960 columns: table_column_defs,
5961 })
5962 })?;
5963
5964 let data_type = match return_table {
5965 Some(table_type) => table_type,
5966 None => self.parse_data_type()?,
5967 };
5968 let return_type = Some(FunctionReturnType::DataType(data_type));
5969
5970 let _ = self.parse_keyword(Keyword::AS);
5971
5972 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5973 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5974 let statements = self.parse_statement_list(&[Keyword::END])?;
5975 let end_token = self.expect_keyword(Keyword::END)?;
5976
5977 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5978 begin_token: AttachedToken(begin_token),
5979 statements,
5980 end_token: AttachedToken(end_token),
5981 }))
5982 } else if self.parse_keyword(Keyword::RETURN) {
5983 if self.peek_token_ref().token == Token::LParen {
5984 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5985 } else if self.peek_keyword(Keyword::SELECT) {
5986 let select = self.parse_select()?;
5987 Some(CreateFunctionBody::AsReturnSelect(select))
5988 } else {
5989 parser_err!(
5990 "Expected a subquery (or bare SELECT statement) after RETURN",
5991 self.peek_token_ref().span.start
5992 )?
5993 }
5994 } else {
5995 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
5996 };
5997
5998 Ok(CreateFunction {
5999 or_alter,
6000 or_replace,
6001 temporary,
6002 if_not_exists: false,
6003 name,
6004 args: Some(args),
6005 return_type,
6006 function_body,
6007 language: None,
6008 determinism_specifier: None,
6009 options: None,
6010 remote_connection: None,
6011 using: None,
6012 behavior: None,
6013 called_on_null: None,
6014 parallel: None,
6015 security: None,
6016 set_params: vec![],
6017 })
6018 }
6019
6020 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
6021 if self.parse_keyword(Keyword::SETOF) {
6022 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
6023 } else {
6024 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
6025 }
6026 }
6027
6028 fn parse_create_function_name_and_params(
6029 &mut self,
6030 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6031 let name = self.parse_object_name(false)?;
6032 let parse_function_param =
6033 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6034 let name = parser.parse_identifier()?;
6035 let data_type = parser.parse_data_type()?;
6036 let default_expr = if parser.consume_token(&Token::Eq) {
6037 Some(parser.parse_expr()?)
6038 } else {
6039 None
6040 };
6041
6042 Ok(OperateFunctionArg {
6043 mode: None,
6044 name: Some(name),
6045 data_type,
6046 default_expr,
6047 })
6048 };
6049 self.expect_token(&Token::LParen)?;
6050 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6051 self.expect_token(&Token::RParen)?;
6052 Ok((name, args))
6053 }
6054
6055 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6056 let mode = if self.parse_keyword(Keyword::IN) {
6057 Some(ArgMode::In)
6058 } else if self.parse_keyword(Keyword::OUT) {
6059 Some(ArgMode::Out)
6060 } else if self.parse_keyword(Keyword::INOUT) {
6061 Some(ArgMode::InOut)
6062 } else if self.parse_keyword(Keyword::VARIADIC) {
6063 Some(ArgMode::Variadic)
6064 } else {
6065 None
6066 };
6067
6068 let mut name = None;
6070 let mut data_type = self.parse_data_type()?;
6071
6072 let data_type_idx = self.get_current_index();
6076
6077 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6079 if parser.peek_keyword(Keyword::DEFAULT) {
6080 parser_err!(
6082 "The DEFAULT keyword is not a type",
6083 parser.peek_token_ref().span.start
6084 )
6085 } else {
6086 parser.parse_data_type()
6087 }
6088 }
6089
6090 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6091 let token = self.token_at(data_type_idx);
6092
6093 if !matches!(token.token, Token::Word(_)) {
6095 return self.expected("a name or type", token.clone());
6096 }
6097
6098 name = Some(Ident::new(token.to_string()));
6099 data_type = next_data_type;
6100 }
6101
6102 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6103 {
6104 Some(self.parse_expr()?)
6105 } else {
6106 None
6107 };
6108 Ok(OperateFunctionArg {
6109 mode,
6110 name,
6111 data_type,
6112 default_expr,
6113 })
6114 }
6115
6116 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6117 let mode = if self.parse_keyword(Keyword::IN) {
6118 Some(ArgMode::In)
6119 } else {
6120 if self
6121 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6122 .is_some()
6123 {
6124 return self.expected_ref(
6125 "IN or argument type in aggregate signature",
6126 self.peek_token_ref(),
6127 );
6128 }
6129 None
6130 };
6131
6132 let mut name = None;
6135 let mut data_type = self.parse_data_type()?;
6136 let data_type_idx = self.get_current_index();
6137
6138 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6139 if parser.peek_keyword(Keyword::DEFAULT)
6140 || parser.peek_keyword(Keyword::ORDER)
6141 || parser.peek_token_ref().token == Token::Comma
6142 || parser.peek_token_ref().token == Token::RParen
6143 {
6144 parser_err!(
6146 "The current token cannot start an aggregate argument type",
6147 parser.peek_token_ref().span.start
6148 )
6149 } else {
6150 parser.parse_data_type()
6151 }
6152 }
6153
6154 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6155 let token = self.token_at(data_type_idx);
6156 if !matches!(token.token, Token::Word(_)) {
6157 return self.expected("a name or type", token.clone());
6158 }
6159
6160 name = Some(Ident::new(token.to_string()));
6161 data_type = next_data_type;
6162 }
6163
6164 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6165 return self.expected_ref(
6166 "',' or ')' or ORDER BY after aggregate argument type",
6167 self.peek_token_ref(),
6168 );
6169 }
6170
6171 Ok(OperateFunctionArg {
6172 mode,
6173 name,
6174 data_type,
6175 default_expr: None,
6176 })
6177 }
6178
6179 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6185 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6186 {
6187 self.prev_token();
6188 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6189 }
6190 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6191 let trigger_name = self.parse_object_name(false)?;
6192 let table_name = if self.parse_keyword(Keyword::ON) {
6193 Some(self.parse_object_name(false)?)
6194 } else {
6195 None
6196 };
6197 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6198 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6199 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6200 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6201 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6202 )),
6203 None => None,
6204 };
6205 Ok(DropTrigger {
6206 if_exists,
6207 trigger_name,
6208 table_name,
6209 option,
6210 })
6211 }
6212
6213 pub fn parse_create_trigger(
6215 &mut self,
6216 temporary: bool,
6217 or_alter: bool,
6218 or_replace: bool,
6219 is_constraint: bool,
6220 ) -> Result<CreateTrigger, ParserError> {
6221 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6222 {
6223 self.prev_token();
6224 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6225 }
6226
6227 let name = self.parse_object_name(false)?;
6228 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6229
6230 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6231 self.expect_keyword_is(Keyword::ON)?;
6232 let table_name = self.parse_object_name(false)?;
6233
6234 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6235 self.parse_object_name(true).ok()
6236 } else {
6237 None
6238 };
6239
6240 let characteristics = self.parse_constraint_characteristics()?;
6241
6242 let mut referencing = vec![];
6243 if self.parse_keyword(Keyword::REFERENCING) {
6244 while let Some(refer) = self.parse_trigger_referencing()? {
6245 referencing.push(refer);
6246 }
6247 }
6248
6249 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6250 let include_each = self.parse_keyword(Keyword::EACH);
6251 let trigger_object =
6252 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6253 Keyword::ROW => TriggerObject::Row,
6254 Keyword::STATEMENT => TriggerObject::Statement,
6255 unexpected_keyword => return Err(ParserError::ParserError(
6256 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6257 )),
6258 };
6259
6260 Some(if include_each {
6261 TriggerObjectKind::ForEach(trigger_object)
6262 } else {
6263 TriggerObjectKind::For(trigger_object)
6264 })
6265 } else {
6266 let _ = self.parse_keyword(Keyword::FOR);
6267
6268 None
6269 };
6270
6271 let condition = self
6272 .parse_keyword(Keyword::WHEN)
6273 .then(|| self.parse_expr())
6274 .transpose()?;
6275
6276 let mut exec_body = None;
6277 let mut statements = None;
6278 if self.parse_keyword(Keyword::EXECUTE) {
6279 exec_body = Some(self.parse_trigger_exec_body()?);
6280 } else {
6281 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6282 }
6283
6284 Ok(CreateTrigger {
6285 or_alter,
6286 temporary,
6287 or_replace,
6288 is_constraint,
6289 name,
6290 period,
6291 period_before_table: true,
6292 events,
6293 table_name,
6294 referenced_table_name,
6295 referencing,
6296 trigger_object,
6297 condition,
6298 exec_body,
6299 statements_as: false,
6300 statements,
6301 characteristics,
6302 })
6303 }
6304
6305 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6307 Ok(
6308 match self.expect_one_of_keywords(&[
6309 Keyword::FOR,
6310 Keyword::BEFORE,
6311 Keyword::AFTER,
6312 Keyword::INSTEAD,
6313 ])? {
6314 Keyword::FOR => TriggerPeriod::For,
6315 Keyword::BEFORE => TriggerPeriod::Before,
6316 Keyword::AFTER => TriggerPeriod::After,
6317 Keyword::INSTEAD => self
6318 .expect_keyword_is(Keyword::OF)
6319 .map(|_| TriggerPeriod::InsteadOf)?,
6320 unexpected_keyword => return Err(ParserError::ParserError(
6321 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6322 )),
6323 },
6324 )
6325 }
6326
6327 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6329 Ok(
6330 match self.expect_one_of_keywords(&[
6331 Keyword::INSERT,
6332 Keyword::UPDATE,
6333 Keyword::DELETE,
6334 Keyword::TRUNCATE,
6335 ])? {
6336 Keyword::INSERT => TriggerEvent::Insert,
6337 Keyword::UPDATE => {
6338 if self.parse_keyword(Keyword::OF) {
6339 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6340 TriggerEvent::Update(cols)
6341 } else {
6342 TriggerEvent::Update(vec![])
6343 }
6344 }
6345 Keyword::DELETE => TriggerEvent::Delete,
6346 Keyword::TRUNCATE => TriggerEvent::Truncate,
6347 unexpected_keyword => return Err(ParserError::ParserError(
6348 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6349 )),
6350 },
6351 )
6352 }
6353
6354 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6356 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6357 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6358 TriggerReferencingType::OldTable
6359 }
6360 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6361 TriggerReferencingType::NewTable
6362 }
6363 _ => {
6364 return Ok(None);
6365 }
6366 };
6367
6368 let is_as = self.parse_keyword(Keyword::AS);
6369 let transition_relation_name = self.parse_object_name(false)?;
6370 Ok(Some(TriggerReferencing {
6371 refer_type,
6372 is_as,
6373 transition_relation_name,
6374 }))
6375 }
6376
6377 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6384 let exec_type = match self
6385 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6386 {
6387 Keyword::FUNCTION => TriggerExecBodyType::Function,
6388 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6389 unexpected_keyword => {
6390 return Err(ParserError::ParserError(format!(
6391 "Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"
6392 )))
6393 }
6394 };
6395
6396 let func_name = self.parse_object_name(false)?;
6397
6398 let args = if self.consume_token(&Token::LParen) {
6399 if self.consume_token(&Token::RParen) {
6400 Some(vec![])
6401 } else {
6402 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
6403 self.expect_token(&Token::RParen)?;
6404 Some(exprs)
6405 }
6406 } else {
6407 None
6408 };
6409
6410 Ok(TriggerExecBody {
6411 exec_type,
6412 func_name,
6413 args,
6414 })
6415 }
6416
6417 pub fn parse_create_macro(
6419 &mut self,
6420 or_replace: bool,
6421 temporary: bool,
6422 ) -> Result<Statement, ParserError> {
6423 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6424 let name = self.parse_object_name(false)?;
6425 self.expect_token(&Token::LParen)?;
6426 let args = if self.consume_token(&Token::RParen) {
6427 self.prev_token();
6428 None
6429 } else {
6430 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6431 };
6432
6433 self.expect_token(&Token::RParen)?;
6434 self.expect_keyword_is(Keyword::AS)?;
6435
6436 Ok(Statement::CreateMacro {
6437 or_replace,
6438 temporary,
6439 name,
6440 args,
6441 definition: if self.parse_keyword(Keyword::TABLE) {
6442 MacroDefinition::Table(self.parse_query()?)
6443 } else {
6444 MacroDefinition::Expr(self.parse_expr()?)
6445 },
6446 })
6447 } else {
6448 self.prev_token();
6449 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6450 }
6451 }
6452
6453 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6454 let name = self.parse_identifier()?;
6455
6456 let default_expr =
6457 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6458 Some(self.parse_expr()?)
6459 } else {
6460 None
6461 };
6462 Ok(MacroArg { name, default_expr })
6463 }
6464
6465 pub fn parse_create_external_table(
6467 &mut self,
6468 or_replace: bool,
6469 ) -> Result<CreateTable, ParserError> {
6470 self.expect_keyword_is(Keyword::TABLE)?;
6471 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6472 let table_name = self.parse_object_name(false)?;
6473 let (columns, constraints) = self.parse_columns()?;
6474
6475 let hive_distribution = self.parse_hive_distribution()?;
6476 let hive_formats = self.parse_hive_formats()?;
6477
6478 let file_format = if let Some(ref hf) = hive_formats {
6479 if let Some(ref ff) = hf.storage {
6480 match ff {
6481 HiveIOFormat::FileFormat { format } => Some(*format),
6482 _ => None,
6483 }
6484 } else {
6485 None
6486 }
6487 } else {
6488 None
6489 };
6490 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6491 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6492 let table_options = if !table_properties.is_empty() {
6493 CreateTableOptions::TableProperties(table_properties)
6494 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6495 CreateTableOptions::Options(options)
6496 } else {
6497 CreateTableOptions::None
6498 };
6499 Ok(CreateTableBuilder::new(table_name)
6500 .columns(columns)
6501 .constraints(constraints)
6502 .hive_distribution(hive_distribution)
6503 .hive_formats(hive_formats)
6504 .table_options(table_options)
6505 .or_replace(or_replace)
6506 .if_not_exists(if_not_exists)
6507 .external(true)
6508 .file_format(file_format)
6509 .location(location)
6510 .build())
6511 }
6512
6513 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6517 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6518 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6519 let table_name = self.parse_object_name(true)?;
6520
6521 self.expect_keyword_is(Keyword::CLONE)?;
6522 let clone = Some(self.parse_object_name(true)?);
6523
6524 let version =
6525 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6526 {
6527 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6528 } else {
6529 None
6530 };
6531
6532 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6533 CreateTableOptions::Options(options)
6534 } else {
6535 CreateTableOptions::None
6536 };
6537
6538 Ok(CreateTableBuilder::new(table_name)
6539 .snapshot(true)
6540 .if_not_exists(if_not_exists)
6541 .clone_clause(clone)
6542 .version(version)
6543 .table_options(table_options)
6544 .build())
6545 }
6546
6547 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6549 let next_token = self.next_token();
6550 match &next_token.token {
6551 Token::Word(w) => match w.keyword {
6552 Keyword::AVRO => Ok(FileFormat::AVRO),
6553 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6554 Keyword::ORC => Ok(FileFormat::ORC),
6555 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6556 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6557 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6558 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6559 _ => self.expected("fileformat", next_token),
6560 },
6561 _ => self.expected("fileformat", next_token),
6562 }
6563 }
6564
6565 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6566 if self.consume_token(&Token::Eq) {
6567 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6568 } else {
6569 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6570 }
6571 }
6572
6573 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6575 let next_token = self.next_token();
6576 match &next_token.token {
6577 Token::Word(w) => match w.keyword {
6578 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6579 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6580 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6581 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6582 _ => self.expected("fileformat", next_token),
6583 },
6584 _ => self.expected("fileformat", next_token),
6585 }
6586 }
6587
6588 pub fn parse_create_view(
6590 &mut self,
6591 or_alter: bool,
6592 or_replace: bool,
6593 temporary: bool,
6594 create_view_params: Option<CreateViewParams>,
6595 ) -> Result<CreateView, ParserError> {
6596 let secure = self.parse_keyword(Keyword::SECURE);
6597 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6598 self.expect_keyword_is(Keyword::VIEW)?;
6599 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6600 let if_not_exists_first =
6603 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6604 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6605 let name_before_not_exists = !if_not_exists_first
6606 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6607 let if_not_exists = if_not_exists_first || name_before_not_exists;
6608 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6609 let columns = self.parse_view_columns()?;
6612 let mut options = CreateTableOptions::None;
6613 let with_options = self.parse_options(Keyword::WITH)?;
6614 if !with_options.is_empty() {
6615 options = CreateTableOptions::With(with_options);
6616 }
6617
6618 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6619 self.expect_keyword_is(Keyword::BY)?;
6620 self.parse_parenthesized_column_list(Optional, false)?
6621 } else {
6622 vec![]
6623 };
6624
6625 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6626 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6627 if !opts.is_empty() {
6628 options = CreateTableOptions::Options(opts);
6629 }
6630 };
6631 }
6632
6633 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6634 && self.parse_keyword(Keyword::TO)
6635 {
6636 Some(self.parse_object_name(false)?)
6637 } else {
6638 None
6639 };
6640
6641 let comment = if self.dialect.supports_create_view_comment_syntax()
6642 && self.parse_keyword(Keyword::COMMENT)
6643 {
6644 self.expect_token(&Token::Eq)?;
6645 Some(self.parse_comment_value()?)
6646 } else {
6647 None
6648 };
6649
6650 self.expect_keyword_is(Keyword::AS)?;
6651 let query = self.parse_query()?;
6652 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6655 && self.parse_keywords(&[
6656 Keyword::WITH,
6657 Keyword::NO,
6658 Keyword::SCHEMA,
6659 Keyword::BINDING,
6660 ]);
6661
6662 let with_data = if materialized && self.parse_keyword(Keyword::WITH) {
6665 if self.parse_keyword(Keyword::NO) {
6666 self.expect_keyword_is(Keyword::DATA)?;
6667 Some(false)
6668 } else {
6669 self.expect_keyword_is(Keyword::DATA)?;
6670 Some(true)
6671 }
6672 } else {
6673 None
6674 };
6675
6676 Ok(CreateView {
6677 or_alter,
6678 name,
6679 columns,
6680 query,
6681 materialized,
6682 secure,
6683 or_replace,
6684 options,
6685 cluster_by,
6686 comment,
6687 with_no_schema_binding,
6688 if_not_exists,
6689 temporary,
6690 copy_grants,
6691 to,
6692 params: create_view_params,
6693 name_before_not_exists,
6694 with_data,
6695 })
6696 }
6697
6698 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6702 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6703 self.expect_token(&Token::Eq)?;
6704 Some(
6705 match self.expect_one_of_keywords(&[
6706 Keyword::UNDEFINED,
6707 Keyword::MERGE,
6708 Keyword::TEMPTABLE,
6709 ])? {
6710 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6711 Keyword::MERGE => CreateViewAlgorithm::Merge,
6712 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6713 _ => {
6714 self.prev_token();
6715 let found = self.next_token();
6716 return self
6717 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6718 }
6719 },
6720 )
6721 } else {
6722 None
6723 };
6724 let definer = if self.parse_keyword(Keyword::DEFINER) {
6725 self.expect_token(&Token::Eq)?;
6726 Some(self.parse_grantee_name()?)
6727 } else {
6728 None
6729 };
6730 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6731 Some(
6732 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6733 Keyword::DEFINER => CreateViewSecurity::Definer,
6734 Keyword::INVOKER => CreateViewSecurity::Invoker,
6735 _ => {
6736 self.prev_token();
6737 let found = self.next_token();
6738 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6739 }
6740 },
6741 )
6742 } else {
6743 None
6744 };
6745 if algorithm.is_some() || definer.is_some() || security.is_some() {
6746 Ok(Some(CreateViewParams {
6747 algorithm,
6748 definer,
6749 security,
6750 }))
6751 } else {
6752 Ok(None)
6753 }
6754 }
6755
6756 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6758 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6759 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6760
6761 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6764 vec![Keyword::AUTHORIZATION]
6765 } else if dialect_of!(self is PostgreSqlDialect) {
6766 vec![
6767 Keyword::LOGIN,
6768 Keyword::NOLOGIN,
6769 Keyword::INHERIT,
6770 Keyword::NOINHERIT,
6771 Keyword::BYPASSRLS,
6772 Keyword::NOBYPASSRLS,
6773 Keyword::PASSWORD,
6774 Keyword::CREATEDB,
6775 Keyword::NOCREATEDB,
6776 Keyword::CREATEROLE,
6777 Keyword::NOCREATEROLE,
6778 Keyword::SUPERUSER,
6779 Keyword::NOSUPERUSER,
6780 Keyword::REPLICATION,
6781 Keyword::NOREPLICATION,
6782 Keyword::CONNECTION,
6783 Keyword::VALID,
6784 Keyword::IN,
6785 Keyword::ROLE,
6786 Keyword::ADMIN,
6787 Keyword::USER,
6788 ]
6789 } else {
6790 vec![]
6791 };
6792
6793 let mut authorization_owner = None;
6795 let mut login = None;
6797 let mut inherit = None;
6798 let mut bypassrls = None;
6799 let mut password = None;
6800 let mut create_db = None;
6801 let mut create_role = None;
6802 let mut superuser = None;
6803 let mut replication = None;
6804 let mut connection_limit = None;
6805 let mut valid_until = None;
6806 let mut in_role = vec![];
6807 let mut in_group = vec![];
6808 let mut role = vec![];
6809 let mut user = vec![];
6810 let mut admin = vec![];
6811
6812 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6813 let loc = self
6814 .tokens
6815 .get(self.index - 1)
6816 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6817 match keyword {
6818 Keyword::AUTHORIZATION => {
6819 if authorization_owner.is_some() {
6820 parser_err!("Found multiple AUTHORIZATION", loc)
6821 } else {
6822 authorization_owner = Some(self.parse_object_name(false)?);
6823 Ok(())
6824 }
6825 }
6826 Keyword::LOGIN | Keyword::NOLOGIN => {
6827 if login.is_some() {
6828 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6829 } else {
6830 login = Some(keyword == Keyword::LOGIN);
6831 Ok(())
6832 }
6833 }
6834 Keyword::INHERIT | Keyword::NOINHERIT => {
6835 if inherit.is_some() {
6836 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6837 } else {
6838 inherit = Some(keyword == Keyword::INHERIT);
6839 Ok(())
6840 }
6841 }
6842 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6843 if bypassrls.is_some() {
6844 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6845 } else {
6846 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6847 Ok(())
6848 }
6849 }
6850 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6851 if create_db.is_some() {
6852 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6853 } else {
6854 create_db = Some(keyword == Keyword::CREATEDB);
6855 Ok(())
6856 }
6857 }
6858 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6859 if create_role.is_some() {
6860 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6861 } else {
6862 create_role = Some(keyword == Keyword::CREATEROLE);
6863 Ok(())
6864 }
6865 }
6866 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6867 if superuser.is_some() {
6868 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6869 } else {
6870 superuser = Some(keyword == Keyword::SUPERUSER);
6871 Ok(())
6872 }
6873 }
6874 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6875 if replication.is_some() {
6876 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6877 } else {
6878 replication = Some(keyword == Keyword::REPLICATION);
6879 Ok(())
6880 }
6881 }
6882 Keyword::PASSWORD => {
6883 if password.is_some() {
6884 parser_err!("Found multiple PASSWORD", loc)
6885 } else {
6886 password = if self.parse_keyword(Keyword::NULL) {
6887 Some(Password::NullPassword)
6888 } else {
6889 Some(Password::Password(Expr::Value(self.parse_value()?)))
6890 };
6891 Ok(())
6892 }
6893 }
6894 Keyword::CONNECTION => {
6895 self.expect_keyword_is(Keyword::LIMIT)?;
6896 if connection_limit.is_some() {
6897 parser_err!("Found multiple CONNECTION LIMIT", loc)
6898 } else {
6899 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6900 Ok(())
6901 }
6902 }
6903 Keyword::VALID => {
6904 self.expect_keyword_is(Keyword::UNTIL)?;
6905 if valid_until.is_some() {
6906 parser_err!("Found multiple VALID UNTIL", loc)
6907 } else {
6908 valid_until = Some(Expr::Value(self.parse_value()?));
6909 Ok(())
6910 }
6911 }
6912 Keyword::IN => {
6913 if self.parse_keyword(Keyword::ROLE) {
6914 if !in_role.is_empty() {
6915 parser_err!("Found multiple IN ROLE", loc)
6916 } else {
6917 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6918 Ok(())
6919 }
6920 } else if self.parse_keyword(Keyword::GROUP) {
6921 if !in_group.is_empty() {
6922 parser_err!("Found multiple IN GROUP", loc)
6923 } else {
6924 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6925 Ok(())
6926 }
6927 } else {
6928 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6929 }
6930 }
6931 Keyword::ROLE => {
6932 if !role.is_empty() {
6933 parser_err!("Found multiple ROLE", loc)
6934 } else {
6935 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6936 Ok(())
6937 }
6938 }
6939 Keyword::USER => {
6940 if !user.is_empty() {
6941 parser_err!("Found multiple USER", loc)
6942 } else {
6943 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6944 Ok(())
6945 }
6946 }
6947 Keyword::ADMIN => {
6948 if !admin.is_empty() {
6949 parser_err!("Found multiple ADMIN", loc)
6950 } else {
6951 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6952 Ok(())
6953 }
6954 }
6955 _ => break,
6956 }?
6957 }
6958
6959 Ok(CreateRole {
6960 names,
6961 if_not_exists,
6962 login,
6963 inherit,
6964 bypassrls,
6965 password,
6966 create_db,
6967 create_role,
6968 replication,
6969 superuser,
6970 connection_limit,
6971 valid_until,
6972 in_role,
6973 in_group,
6974 role,
6975 user,
6976 admin,
6977 authorization_owner,
6978 })
6979 }
6980
6981 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6983 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6984 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6985 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6986 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6987 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6988 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6989 )),
6990 None => {
6991 match self.parse_identifier() {
6992 Ok(ident) => Owner::Ident(ident),
6993 Err(e) => {
6994 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6995 }
6996 }
6997 }
6998 };
6999 Ok(owner)
7000 }
7001
7002 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
7004 let name = self.parse_object_name(false)?;
7005 self.expect_keyword_is(Keyword::AS)?;
7006 let data_type = self.parse_data_type()?;
7007 let collation = if self.parse_keyword(Keyword::COLLATE) {
7008 Some(self.parse_identifier()?)
7009 } else {
7010 None
7011 };
7012 let default = if self.parse_keyword(Keyword::DEFAULT) {
7013 Some(self.parse_expr()?)
7014 } else {
7015 None
7016 };
7017 let mut constraints = Vec::new();
7018 while let Some(constraint) = self.parse_optional_table_constraint()? {
7019 constraints.push(constraint);
7020 }
7021
7022 Ok(CreateDomain {
7023 name,
7024 data_type,
7025 collation,
7026 default,
7027 constraints,
7028 })
7029 }
7030
7031 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
7041 let name = self.parse_identifier()?;
7042 self.expect_keyword_is(Keyword::ON)?;
7043 let table_name = self.parse_object_name(false)?;
7044
7045 let policy_type = if self.parse_keyword(Keyword::AS) {
7046 let keyword =
7047 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
7048 Some(match keyword {
7049 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
7050 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
7051 unexpected_keyword => return Err(ParserError::ParserError(
7052 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
7053 )),
7054 })
7055 } else {
7056 None
7057 };
7058
7059 let command = if self.parse_keyword(Keyword::FOR) {
7060 let keyword = self.expect_one_of_keywords(&[
7061 Keyword::ALL,
7062 Keyword::SELECT,
7063 Keyword::INSERT,
7064 Keyword::UPDATE,
7065 Keyword::DELETE,
7066 ])?;
7067 Some(match keyword {
7068 Keyword::ALL => CreatePolicyCommand::All,
7069 Keyword::SELECT => CreatePolicyCommand::Select,
7070 Keyword::INSERT => CreatePolicyCommand::Insert,
7071 Keyword::UPDATE => CreatePolicyCommand::Update,
7072 Keyword::DELETE => CreatePolicyCommand::Delete,
7073 unexpected_keyword => return Err(ParserError::ParserError(
7074 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7075 )),
7076 })
7077 } else {
7078 None
7079 };
7080
7081 let to = if self.parse_keyword(Keyword::TO) {
7082 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7083 } else {
7084 None
7085 };
7086
7087 let using = if self.parse_keyword(Keyword::USING) {
7088 self.expect_token(&Token::LParen)?;
7089 let expr = self.parse_expr()?;
7090 self.expect_token(&Token::RParen)?;
7091 Some(expr)
7092 } else {
7093 None
7094 };
7095
7096 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7097 self.expect_token(&Token::LParen)?;
7098 let expr = self.parse_expr()?;
7099 self.expect_token(&Token::RParen)?;
7100 Some(expr)
7101 } else {
7102 None
7103 };
7104
7105 Ok(CreatePolicy {
7106 name,
7107 table_name,
7108 policy_type,
7109 command,
7110 to,
7111 using,
7112 with_check,
7113 })
7114 }
7115
7116 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7126 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7127 let name = self.parse_identifier()?;
7128
7129 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7130 Some(self.parse_literal_string()?)
7131 } else {
7132 None
7133 };
7134
7135 let url = if self.parse_keyword(Keyword::URL) {
7136 Some(self.parse_literal_string()?)
7137 } else {
7138 None
7139 };
7140
7141 let comment = self.parse_optional_inline_comment()?;
7142
7143 let with_dcproperties =
7144 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7145 properties if !properties.is_empty() => Some(properties),
7146 _ => None,
7147 };
7148
7149 Ok(CreateConnector {
7150 name,
7151 if_not_exists,
7152 connector_type,
7153 url,
7154 comment,
7155 with_dcproperties,
7156 })
7157 }
7158
7159 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7165 let mut parts = vec![];
7166 loop {
7167 parts.push(ObjectNamePart::Identifier(Ident::new(
7168 self.next_token().to_string(),
7169 )));
7170 if !self.consume_token(&Token::Period) {
7171 break;
7172 }
7173 }
7174 Ok(ObjectName(parts))
7175 }
7176
7177 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7181 let name = self.parse_operator_name()?;
7182 self.expect_token(&Token::LParen)?;
7183
7184 let mut function: Option<ObjectName> = None;
7185 let mut is_procedure = false;
7186 let mut left_arg: Option<DataType> = None;
7187 let mut right_arg: Option<DataType> = None;
7188 let mut options: Vec<OperatorOption> = Vec::new();
7189
7190 loop {
7191 let keyword = self.expect_one_of_keywords(&[
7192 Keyword::FUNCTION,
7193 Keyword::PROCEDURE,
7194 Keyword::LEFTARG,
7195 Keyword::RIGHTARG,
7196 Keyword::COMMUTATOR,
7197 Keyword::NEGATOR,
7198 Keyword::RESTRICT,
7199 Keyword::JOIN,
7200 Keyword::HASHES,
7201 Keyword::MERGES,
7202 ])?;
7203
7204 match keyword {
7205 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7206 options.push(OperatorOption::Hashes);
7207 }
7208 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7209 options.push(OperatorOption::Merges);
7210 }
7211 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7212 self.expect_token(&Token::Eq)?;
7213 function = Some(self.parse_object_name(false)?);
7214 is_procedure = keyword == Keyword::PROCEDURE;
7215 }
7216 Keyword::LEFTARG if left_arg.is_none() => {
7217 self.expect_token(&Token::Eq)?;
7218 left_arg = Some(self.parse_data_type()?);
7219 }
7220 Keyword::RIGHTARG if right_arg.is_none() => {
7221 self.expect_token(&Token::Eq)?;
7222 right_arg = Some(self.parse_data_type()?);
7223 }
7224 Keyword::COMMUTATOR
7225 if !options
7226 .iter()
7227 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7228 {
7229 self.expect_token(&Token::Eq)?;
7230 if self.parse_keyword(Keyword::OPERATOR) {
7231 self.expect_token(&Token::LParen)?;
7232 let op = self.parse_operator_name()?;
7233 self.expect_token(&Token::RParen)?;
7234 options.push(OperatorOption::Commutator(op));
7235 } else {
7236 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7237 }
7238 }
7239 Keyword::NEGATOR
7240 if !options
7241 .iter()
7242 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7243 {
7244 self.expect_token(&Token::Eq)?;
7245 if self.parse_keyword(Keyword::OPERATOR) {
7246 self.expect_token(&Token::LParen)?;
7247 let op = self.parse_operator_name()?;
7248 self.expect_token(&Token::RParen)?;
7249 options.push(OperatorOption::Negator(op));
7250 } else {
7251 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7252 }
7253 }
7254 Keyword::RESTRICT
7255 if !options
7256 .iter()
7257 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7258 {
7259 self.expect_token(&Token::Eq)?;
7260 options.push(OperatorOption::Restrict(Some(
7261 self.parse_object_name(false)?,
7262 )));
7263 }
7264 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7265 self.expect_token(&Token::Eq)?;
7266 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7267 }
7268 _ => {
7269 return Err(ParserError::ParserError(format!(
7270 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7271 keyword
7272 )))
7273 }
7274 }
7275
7276 if !self.consume_token(&Token::Comma) {
7277 break;
7278 }
7279 }
7280
7281 self.expect_token(&Token::RParen)?;
7283
7284 let function = function.ok_or_else(|| {
7286 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7287 })?;
7288
7289 Ok(CreateOperator {
7290 name,
7291 function,
7292 is_procedure,
7293 left_arg,
7294 right_arg,
7295 options,
7296 })
7297 }
7298
7299 pub fn parse_create_aggregate(
7303 &mut self,
7304 or_replace: bool,
7305 ) -> Result<CreateAggregate, ParserError> {
7306 let name = self.parse_object_name(false)?;
7307
7308 self.expect_token(&Token::LParen)?;
7310 let args = if self.consume_token(&Token::Mul) {
7311 vec![]
7313 } else if self.consume_token(&Token::RParen) {
7314 self.prev_token();
7315 vec![]
7316 } else {
7317 self.parse_comma_separated(|p| p.parse_data_type())?
7318 };
7319 self.expect_token(&Token::RParen)?;
7320
7321 self.expect_token(&Token::LParen)?;
7323 let mut options: Vec<CreateAggregateOption> = Vec::new();
7324 loop {
7325 let token = self.next_token();
7326 match &token.token {
7327 Token::RParen => break,
7328 Token::Comma => continue,
7329 Token::Word(word) => {
7330 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7331 options.push(option);
7332 }
7333 other => {
7334 return Err(ParserError::ParserError(format!(
7335 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7336 )));
7337 }
7338 }
7339 }
7340
7341 Ok(CreateAggregate {
7342 or_replace,
7343 name,
7344 args,
7345 options,
7346 })
7347 }
7348
7349 fn parse_create_aggregate_option(
7350 &mut self,
7351 key: &str,
7352 ) -> Result<CreateAggregateOption, ParserError> {
7353 match key {
7354 "SFUNC" => {
7355 self.expect_token(&Token::Eq)?;
7356 Ok(CreateAggregateOption::Sfunc(self.parse_object_name(false)?))
7357 }
7358 "STYPE" => {
7359 self.expect_token(&Token::Eq)?;
7360 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7361 }
7362 "SSPACE" => {
7363 self.expect_token(&Token::Eq)?;
7364 let size = self.parse_literal_uint()?;
7365 Ok(CreateAggregateOption::Sspace(size))
7366 }
7367 "FINALFUNC" => {
7368 self.expect_token(&Token::Eq)?;
7369 Ok(CreateAggregateOption::Finalfunc(
7370 self.parse_object_name(false)?,
7371 ))
7372 }
7373 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7374 "FINALFUNC_MODIFY" => {
7375 self.expect_token(&Token::Eq)?;
7376 Ok(CreateAggregateOption::FinalfuncModify(
7377 self.parse_aggregate_modify_kind()?,
7378 ))
7379 }
7380 "COMBINEFUNC" => {
7381 self.expect_token(&Token::Eq)?;
7382 Ok(CreateAggregateOption::Combinefunc(
7383 self.parse_object_name(false)?,
7384 ))
7385 }
7386 "SERIALFUNC" => {
7387 self.expect_token(&Token::Eq)?;
7388 Ok(CreateAggregateOption::Serialfunc(
7389 self.parse_object_name(false)?,
7390 ))
7391 }
7392 "DESERIALFUNC" => {
7393 self.expect_token(&Token::Eq)?;
7394 Ok(CreateAggregateOption::Deserialfunc(
7395 self.parse_object_name(false)?,
7396 ))
7397 }
7398 "INITCOND" => {
7399 self.expect_token(&Token::Eq)?;
7400 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7401 }
7402 "MSFUNC" => {
7403 self.expect_token(&Token::Eq)?;
7404 Ok(CreateAggregateOption::Msfunc(
7405 self.parse_object_name(false)?,
7406 ))
7407 }
7408 "MINVFUNC" => {
7409 self.expect_token(&Token::Eq)?;
7410 Ok(CreateAggregateOption::Minvfunc(
7411 self.parse_object_name(false)?,
7412 ))
7413 }
7414 "MSTYPE" => {
7415 self.expect_token(&Token::Eq)?;
7416 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7417 }
7418 "MSSPACE" => {
7419 self.expect_token(&Token::Eq)?;
7420 let size = self.parse_literal_uint()?;
7421 Ok(CreateAggregateOption::Msspace(size))
7422 }
7423 "MFINALFUNC" => {
7424 self.expect_token(&Token::Eq)?;
7425 Ok(CreateAggregateOption::Mfinalfunc(
7426 self.parse_object_name(false)?,
7427 ))
7428 }
7429 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7430 "MFINALFUNC_MODIFY" => {
7431 self.expect_token(&Token::Eq)?;
7432 Ok(CreateAggregateOption::MfinalfuncModify(
7433 self.parse_aggregate_modify_kind()?,
7434 ))
7435 }
7436 "MINITCOND" => {
7437 self.expect_token(&Token::Eq)?;
7438 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7439 }
7440 "SORTOP" => {
7441 self.expect_token(&Token::Eq)?;
7442 Ok(CreateAggregateOption::Sortop(
7443 self.parse_object_name(false)?,
7444 ))
7445 }
7446 "PARALLEL" => {
7447 self.expect_token(&Token::Eq)?;
7448 let parallel = match self.expect_one_of_keywords(&[
7449 Keyword::SAFE,
7450 Keyword::RESTRICTED,
7451 Keyword::UNSAFE,
7452 ])? {
7453 Keyword::SAFE => FunctionParallel::Safe,
7454 Keyword::RESTRICTED => FunctionParallel::Restricted,
7455 Keyword::UNSAFE => FunctionParallel::Unsafe,
7456 other => {
7457 return Err(ParserError::ParserError(format!(
7458 "Internal parser error: unexpected keyword `{other}` for PARALLEL"
7459 )))
7460 }
7461 };
7462 Ok(CreateAggregateOption::Parallel(parallel))
7463 }
7464 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7465 other => Err(ParserError::ParserError(format!(
7466 "Unknown CREATE AGGREGATE option: {other}"
7467 ))),
7468 }
7469 }
7470
7471 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7472 let token = self.next_token();
7473 match &token.token {
7474 Token::Word(word) => match word.value.to_uppercase().as_str() {
7475 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7476 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7477 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7478 other => Err(ParserError::ParserError(format!(
7479 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7480 ))),
7481 },
7482 other => Err(ParserError::ParserError(format!(
7483 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7484 ))),
7485 }
7486 }
7487
7488 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7492 let name = self.parse_object_name(false)?;
7493 self.expect_keyword(Keyword::USING)?;
7494 let using = self.parse_identifier()?;
7495
7496 Ok(CreateOperatorFamily { name, using })
7497 }
7498
7499 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7503 let name = self.parse_object_name(false)?;
7504 let default = self.parse_keyword(Keyword::DEFAULT);
7505 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7506 let for_type = self.parse_data_type()?;
7507 self.expect_keyword(Keyword::USING)?;
7508 let using = self.parse_identifier()?;
7509
7510 let family = if self.parse_keyword(Keyword::FAMILY) {
7511 Some(self.parse_object_name(false)?)
7512 } else {
7513 None
7514 };
7515
7516 self.expect_keyword(Keyword::AS)?;
7517
7518 let mut items = vec![];
7519 loop {
7520 if self.parse_keyword(Keyword::OPERATOR) {
7521 let strategy_number = self.parse_literal_uint()?;
7522 let operator_name = self.parse_operator_name()?;
7523
7524 let op_types = if self.consume_token(&Token::LParen) {
7526 let left = self.parse_data_type()?;
7527 self.expect_token(&Token::Comma)?;
7528 let right = self.parse_data_type()?;
7529 self.expect_token(&Token::RParen)?;
7530 Some(OperatorArgTypes { left, right })
7531 } else {
7532 None
7533 };
7534
7535 let purpose = if self.parse_keyword(Keyword::FOR) {
7537 if self.parse_keyword(Keyword::SEARCH) {
7538 Some(OperatorPurpose::ForSearch)
7539 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7540 let sort_family = self.parse_object_name(false)?;
7541 Some(OperatorPurpose::ForOrderBy { sort_family })
7542 } else {
7543 return self
7544 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7545 }
7546 } else {
7547 None
7548 };
7549
7550 items.push(OperatorClassItem::Operator {
7551 strategy_number,
7552 operator_name,
7553 op_types,
7554 purpose,
7555 });
7556 } else if self.parse_keyword(Keyword::FUNCTION) {
7557 let support_number = self.parse_literal_uint()?;
7558
7559 let op_types = if self.consume_token(&Token::LParen)
7561 && self.peek_token_ref().token != Token::RParen
7562 {
7563 let mut types = vec![];
7564 loop {
7565 types.push(self.parse_data_type()?);
7566 if !self.consume_token(&Token::Comma) {
7567 break;
7568 }
7569 }
7570 self.expect_token(&Token::RParen)?;
7571 Some(types)
7572 } else if self.consume_token(&Token::LParen) {
7573 self.expect_token(&Token::RParen)?;
7574 Some(vec![])
7575 } else {
7576 None
7577 };
7578
7579 let function_name = self.parse_object_name(false)?;
7580
7581 let argument_types = if self.consume_token(&Token::LParen) {
7583 let mut types = vec![];
7584 loop {
7585 if self.peek_token_ref().token == Token::RParen {
7586 break;
7587 }
7588 types.push(self.parse_data_type()?);
7589 if !self.consume_token(&Token::Comma) {
7590 break;
7591 }
7592 }
7593 self.expect_token(&Token::RParen)?;
7594 types
7595 } else {
7596 vec![]
7597 };
7598
7599 items.push(OperatorClassItem::Function {
7600 support_number,
7601 op_types,
7602 function_name,
7603 argument_types,
7604 });
7605 } else if self.parse_keyword(Keyword::STORAGE) {
7606 let storage_type = self.parse_data_type()?;
7607 items.push(OperatorClassItem::Storage { storage_type });
7608 } else {
7609 break;
7610 }
7611
7612 if !self.consume_token(&Token::Comma) {
7614 break;
7615 }
7616 }
7617
7618 Ok(CreateOperatorClass {
7619 name,
7620 default,
7621 for_type,
7622 using,
7623 family,
7624 items,
7625 })
7626 }
7627
7628 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7630 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7632 && self.parse_keyword(Keyword::TEMPORARY);
7633 let persistent = dialect_of!(self is DuckDbDialect)
7634 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7635
7636 let object_type = if self.parse_keyword(Keyword::TABLE) {
7637 ObjectType::Table
7638 } else if self.parse_keyword(Keyword::COLLATION) {
7639 ObjectType::Collation
7640 } else if self.parse_keyword(Keyword::VIEW) {
7641 ObjectType::View
7642 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7643 ObjectType::MaterializedView
7644 } else if self.parse_keyword(Keyword::INDEX) {
7645 ObjectType::Index
7646 } else if self.parse_keyword(Keyword::ROLE) {
7647 ObjectType::Role
7648 } else if self.parse_keyword(Keyword::SCHEMA) {
7649 ObjectType::Schema
7650 } else if self.parse_keyword(Keyword::DATABASE) {
7651 ObjectType::Database
7652 } else if self.parse_keyword(Keyword::SEQUENCE) {
7653 ObjectType::Sequence
7654 } else if self.parse_keyword(Keyword::STAGE) {
7655 ObjectType::Stage
7656 } else if self.parse_keyword(Keyword::TYPE) {
7657 ObjectType::Type
7658 } else if self.parse_keyword(Keyword::USER) {
7659 ObjectType::User
7660 } else if self.parse_keyword(Keyword::STREAM) {
7661 ObjectType::Stream
7662 } else if self.parse_keyword(Keyword::FUNCTION) {
7663 return self.parse_drop_function().map(Into::into);
7664 } else if self.parse_keyword(Keyword::POLICY) {
7665 return self.parse_drop_policy().map(Into::into);
7666 } else if self.parse_keyword(Keyword::CONNECTOR) {
7667 return self.parse_drop_connector();
7668 } else if self.parse_keyword(Keyword::DOMAIN) {
7669 return self.parse_drop_domain().map(Into::into);
7670 } else if self.parse_keyword(Keyword::PROCEDURE) {
7671 return self.parse_drop_procedure();
7672 } else if self.parse_keyword(Keyword::SECRET) {
7673 return self.parse_drop_secret(temporary, persistent);
7674 } else if self.parse_keyword(Keyword::TRIGGER) {
7675 return self.parse_drop_trigger().map(Into::into);
7676 } else if self.parse_keyword(Keyword::EXTENSION) {
7677 return self.parse_drop_extension();
7678 } else if self.parse_keyword(Keyword::OPERATOR) {
7679 return if self.parse_keyword(Keyword::FAMILY) {
7681 self.parse_drop_operator_family()
7682 } else if self.parse_keyword(Keyword::CLASS) {
7683 self.parse_drop_operator_class()
7684 } else {
7685 self.parse_drop_operator()
7686 };
7687 } else {
7688 return self.expected_ref(
7689 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7690 self.peek_token_ref(),
7691 );
7692 };
7693 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7696 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7697
7698 let loc = self.peek_token_ref().span.start;
7699 let cascade = self.parse_keyword(Keyword::CASCADE);
7700 let restrict = self.parse_keyword(Keyword::RESTRICT);
7701 let purge = self.parse_keyword(Keyword::PURGE);
7702 if cascade && restrict {
7703 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7704 }
7705 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7706 return parser_err!(
7707 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7708 loc
7709 );
7710 }
7711 let table = if self.parse_keyword(Keyword::ON) {
7712 Some(self.parse_object_name(false)?)
7713 } else {
7714 None
7715 };
7716 Ok(Statement::Drop {
7717 object_type,
7718 if_exists,
7719 names,
7720 cascade,
7721 restrict,
7722 purge,
7723 temporary,
7724 table,
7725 })
7726 }
7727
7728 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7729 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7730 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7731 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7732 _ => None,
7733 }
7734 }
7735
7736 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7741 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7742 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7743 let drop_behavior = self.parse_optional_drop_behavior();
7744 Ok(DropFunction {
7745 if_exists,
7746 func_desc,
7747 drop_behavior,
7748 })
7749 }
7750
7751 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7757 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7758 let name = self.parse_identifier()?;
7759 self.expect_keyword_is(Keyword::ON)?;
7760 let table_name = self.parse_object_name(false)?;
7761 let drop_behavior = self.parse_optional_drop_behavior();
7762 Ok(DropPolicy {
7763 if_exists,
7764 name,
7765 table_name,
7766 drop_behavior,
7767 })
7768 }
7769 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7775 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7776 let name = self.parse_identifier()?;
7777 Ok(Statement::DropConnector { if_exists, name })
7778 }
7779
7780 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7784 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7785 let name = self.parse_object_name(false)?;
7786 let drop_behavior = self.parse_optional_drop_behavior();
7787 Ok(DropDomain {
7788 if_exists,
7789 name,
7790 drop_behavior,
7791 })
7792 }
7793
7794 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7799 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7800 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7801 let drop_behavior = self.parse_optional_drop_behavior();
7802 Ok(Statement::DropProcedure {
7803 if_exists,
7804 proc_desc,
7805 drop_behavior,
7806 })
7807 }
7808
7809 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7810 let name = self.parse_object_name(false)?;
7811
7812 let args = if self.consume_token(&Token::LParen) {
7813 if self.consume_token(&Token::RParen) {
7814 Some(vec![])
7815 } else {
7816 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7817 self.expect_token(&Token::RParen)?;
7818 Some(args)
7819 }
7820 } else {
7821 None
7822 };
7823
7824 Ok(FunctionDesc { name, args })
7825 }
7826
7827 fn parse_drop_secret(
7829 &mut self,
7830 temporary: bool,
7831 persistent: bool,
7832 ) -> Result<Statement, ParserError> {
7833 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7834 let name = self.parse_identifier()?;
7835 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7836 self.parse_identifier().ok()
7837 } else {
7838 None
7839 };
7840 let temp = match (temporary, persistent) {
7841 (true, false) => Some(true),
7842 (false, true) => Some(false),
7843 (false, false) => None,
7844 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7845 };
7846
7847 Ok(Statement::DropSecret {
7848 if_exists,
7849 temporary: temp,
7850 name,
7851 storage_specifier,
7852 })
7853 }
7854
7855 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7865 if dialect_of!(self is BigQueryDialect) {
7866 return self.parse_big_query_declare();
7867 }
7868 if dialect_of!(self is SnowflakeDialect) {
7869 return self.parse_snowflake_declare();
7870 }
7871 if dialect_of!(self is MsSqlDialect) {
7872 return self.parse_mssql_declare();
7873 }
7874
7875 let name = self.parse_identifier()?;
7876
7877 let binary = Some(self.parse_keyword(Keyword::BINARY));
7878 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7879 Some(true)
7880 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7881 Some(false)
7882 } else {
7883 None
7884 };
7885 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7886 Some(true)
7887 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7888 Some(false)
7889 } else {
7890 None
7891 };
7892
7893 self.expect_keyword_is(Keyword::CURSOR)?;
7894 let declare_type = Some(DeclareType::Cursor);
7895
7896 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7897 Some(keyword) => {
7898 self.expect_keyword_is(Keyword::HOLD)?;
7899
7900 match keyword {
7901 Keyword::WITH => Some(true),
7902 Keyword::WITHOUT => Some(false),
7903 unexpected_keyword => return Err(ParserError::ParserError(
7904 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7905 )),
7906 }
7907 }
7908 None => None,
7909 };
7910
7911 self.expect_keyword_is(Keyword::FOR)?;
7912
7913 let query = Some(self.parse_query()?);
7914
7915 Ok(Statement::Declare {
7916 stmts: vec![Declare {
7917 names: vec![name],
7918 data_type: None,
7919 assignment: None,
7920 declare_type,
7921 binary,
7922 sensitive,
7923 scroll,
7924 hold,
7925 for_query: query,
7926 }],
7927 })
7928 }
7929
7930 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7938 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7939
7940 let data_type = match &self.peek_token_ref().token {
7941 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7942 _ => Some(self.parse_data_type()?),
7943 };
7944
7945 let expr = if data_type.is_some() {
7946 if self.parse_keyword(Keyword::DEFAULT) {
7947 Some(self.parse_expr()?)
7948 } else {
7949 None
7950 }
7951 } else {
7952 self.expect_keyword_is(Keyword::DEFAULT)?;
7955 Some(self.parse_expr()?)
7956 };
7957
7958 Ok(Statement::Declare {
7959 stmts: vec![Declare {
7960 names,
7961 data_type,
7962 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7963 declare_type: None,
7964 binary: None,
7965 sensitive: None,
7966 scroll: None,
7967 hold: None,
7968 for_query: None,
7969 }],
7970 })
7971 }
7972
7973 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7998 let mut stmts = vec![];
7999 loop {
8000 let name = self.parse_identifier()?;
8001 let (declare_type, for_query, assigned_expr, data_type) =
8002 if self.parse_keyword(Keyword::CURSOR) {
8003 self.expect_keyword_is(Keyword::FOR)?;
8004 match &self.peek_token_ref().token {
8005 Token::Word(w) if w.keyword == Keyword::SELECT => (
8006 Some(DeclareType::Cursor),
8007 Some(self.parse_query()?),
8008 None,
8009 None,
8010 ),
8011 _ => (
8012 Some(DeclareType::Cursor),
8013 None,
8014 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
8015 None,
8016 ),
8017 }
8018 } else if self.parse_keyword(Keyword::RESULTSET) {
8019 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
8020 self.parse_snowflake_variable_declaration_expression()?
8021 } else {
8022 None
8024 };
8025
8026 (Some(DeclareType::ResultSet), None, assigned_expr, None)
8027 } else if self.parse_keyword(Keyword::EXCEPTION) {
8028 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
8029 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
8030 } else {
8031 None
8033 };
8034
8035 (Some(DeclareType::Exception), None, assigned_expr, None)
8036 } else {
8037 let (assigned_expr, data_type) = if let Some(assigned_expr) =
8039 self.parse_snowflake_variable_declaration_expression()?
8040 {
8041 (Some(assigned_expr), None)
8042 } else if let Token::Word(_) = &self.peek_token_ref().token {
8043 let data_type = self.parse_data_type()?;
8044 (
8045 self.parse_snowflake_variable_declaration_expression()?,
8046 Some(data_type),
8047 )
8048 } else {
8049 (None, None)
8050 };
8051 (None, None, assigned_expr, data_type)
8052 };
8053 let stmt = Declare {
8054 names: vec![name],
8055 data_type,
8056 assignment: assigned_expr,
8057 declare_type,
8058 binary: None,
8059 sensitive: None,
8060 scroll: None,
8061 hold: None,
8062 for_query,
8063 };
8064
8065 stmts.push(stmt);
8066 if self.consume_token(&Token::SemiColon) {
8067 match &self.peek_token_ref().token {
8068 Token::Word(w)
8069 if ALL_KEYWORDS
8070 .binary_search(&w.value.to_uppercase().as_str())
8071 .is_err() =>
8072 {
8073 continue;
8075 }
8076 _ => {
8077 self.prev_token();
8079 }
8080 }
8081 }
8082
8083 break;
8084 }
8085
8086 Ok(Statement::Declare { stmts })
8087 }
8088
8089 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8101 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8102
8103 Ok(Statement::Declare { stmts })
8104 }
8105
8106 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8117 let name = {
8118 let ident = self.parse_identifier()?;
8119 if !ident.value.starts_with('@')
8120 && !matches!(
8121 &self.peek_token_ref().token,
8122 Token::Word(w) if w.keyword == Keyword::CURSOR
8123 )
8124 {
8125 Err(ParserError::TokenizerError(
8126 "Invalid MsSql variable declaration.".to_string(),
8127 ))
8128 } else {
8129 Ok(ident)
8130 }
8131 }?;
8132
8133 let (declare_type, data_type) = match &self.peek_token_ref().token {
8134 Token::Word(w) => match w.keyword {
8135 Keyword::CURSOR => {
8136 self.next_token();
8137 (Some(DeclareType::Cursor), None)
8138 }
8139 Keyword::AS => {
8140 self.next_token();
8141 (None, Some(self.parse_data_type()?))
8142 }
8143 _ => (None, Some(self.parse_data_type()?)),
8144 },
8145 _ => (None, Some(self.parse_data_type()?)),
8146 };
8147
8148 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8149 self.next_token();
8150 let query = Some(self.parse_query()?);
8151 (query, None)
8152 } else {
8153 let assignment = self.parse_mssql_variable_declaration_expression()?;
8154 (None, assignment)
8155 };
8156
8157 Ok(Declare {
8158 names: vec![name],
8159 data_type,
8160 assignment,
8161 declare_type,
8162 binary: None,
8163 sensitive: None,
8164 scroll: None,
8165 hold: None,
8166 for_query,
8167 })
8168 }
8169
8170 pub fn parse_snowflake_variable_declaration_expression(
8178 &mut self,
8179 ) -> Result<Option<DeclareAssignment>, ParserError> {
8180 Ok(match &self.peek_token_ref().token {
8181 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8182 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8184 }
8185 Token::Assignment => {
8186 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8188 self.parse_expr()?,
8189 )))
8190 }
8191 _ => None,
8192 })
8193 }
8194
8195 pub fn parse_mssql_variable_declaration_expression(
8202 &mut self,
8203 ) -> Result<Option<DeclareAssignment>, ParserError> {
8204 Ok(match &self.peek_token_ref().token {
8205 Token::Eq => {
8206 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8208 self.parse_expr()?,
8209 )))
8210 }
8211 _ => None,
8212 })
8213 }
8214
8215 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8217 let direction = if self.parse_keyword(Keyword::NEXT) {
8218 FetchDirection::Next
8219 } else if self.parse_keyword(Keyword::PRIOR) {
8220 FetchDirection::Prior
8221 } else if self.parse_keyword(Keyword::FIRST) {
8222 FetchDirection::First
8223 } else if self.parse_keyword(Keyword::LAST) {
8224 FetchDirection::Last
8225 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8226 FetchDirection::Absolute {
8227 limit: self.parse_number_value()?,
8228 }
8229 } else if self.parse_keyword(Keyword::RELATIVE) {
8230 FetchDirection::Relative {
8231 limit: self.parse_number_value()?,
8232 }
8233 } else if self.parse_keyword(Keyword::FORWARD) {
8234 if self.parse_keyword(Keyword::ALL) {
8235 FetchDirection::ForwardAll
8236 } else {
8237 FetchDirection::Forward {
8238 limit: Some(self.parse_number_value()?),
8240 }
8241 }
8242 } else if self.parse_keyword(Keyword::BACKWARD) {
8243 if self.parse_keyword(Keyword::ALL) {
8244 FetchDirection::BackwardAll
8245 } else {
8246 FetchDirection::Backward {
8247 limit: Some(self.parse_number_value()?),
8249 }
8250 }
8251 } else if self.parse_keyword(Keyword::ALL) {
8252 FetchDirection::All
8253 } else {
8254 FetchDirection::Count {
8255 limit: self.parse_number_value()?,
8256 }
8257 };
8258
8259 let position = if self.peek_keyword(Keyword::FROM) {
8260 self.expect_keyword(Keyword::FROM)?;
8261 FetchPosition::From
8262 } else if self.peek_keyword(Keyword::IN) {
8263 self.expect_keyword(Keyword::IN)?;
8264 FetchPosition::In
8265 } else {
8266 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8267 };
8268
8269 let name = self.parse_identifier()?;
8270
8271 let into = if self.parse_keyword(Keyword::INTO) {
8272 Some(self.parse_object_name(false)?)
8273 } else {
8274 None
8275 };
8276
8277 Ok(Statement::Fetch {
8278 name,
8279 direction,
8280 position,
8281 into,
8282 })
8283 }
8284
8285 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8287 let object_type = if self.parse_keyword(Keyword::ALL) {
8288 DiscardObject::ALL
8289 } else if self.parse_keyword(Keyword::PLANS) {
8290 DiscardObject::PLANS
8291 } else if self.parse_keyword(Keyword::SEQUENCES) {
8292 DiscardObject::SEQUENCES
8293 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8294 DiscardObject::TEMP
8295 } else {
8296 return self.expected_ref(
8297 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8298 self.peek_token_ref(),
8299 );
8300 };
8301 Ok(Statement::Discard { object_type })
8302 }
8303
8304 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8306 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8307 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8308
8309 let mut using = None;
8310
8311 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8312 let index_name = self.parse_object_name(false)?;
8313 using = self.parse_optional_using_then_index_type()?;
8315 self.expect_keyword_is(Keyword::ON)?;
8316 Some(index_name)
8317 } else {
8318 None
8319 };
8320
8321 let table_name = self.parse_object_name(false)?;
8322
8323 using = self.parse_optional_using_then_index_type()?.or(using);
8326
8327 let columns = self.parse_parenthesized_index_column_list()?;
8328
8329 let include = if self.parse_keyword(Keyword::INCLUDE) {
8330 self.expect_token(&Token::LParen)?;
8331 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8332 self.expect_token(&Token::RParen)?;
8333 columns
8334 } else {
8335 vec![]
8336 };
8337
8338 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8339 let not = self.parse_keyword(Keyword::NOT);
8340 self.expect_keyword_is(Keyword::DISTINCT)?;
8341 Some(!not)
8342 } else {
8343 None
8344 };
8345
8346 let with = if self.dialect.supports_create_index_with_clause()
8347 && self.parse_keyword(Keyword::WITH)
8348 {
8349 self.expect_token(&Token::LParen)?;
8350 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8351 self.expect_token(&Token::RParen)?;
8352 with_params
8353 } else {
8354 Vec::new()
8355 };
8356
8357 let predicate = if self.parse_keyword(Keyword::WHERE) {
8358 Some(self.parse_expr()?)
8359 } else {
8360 None
8361 };
8362
8363 let index_options = self.parse_index_options()?;
8369
8370 let mut alter_options = Vec::new();
8372 while self
8373 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8374 .is_some()
8375 {
8376 alter_options.push(self.parse_alter_table_operation()?)
8377 }
8378
8379 Ok(CreateIndex {
8380 name: index_name,
8381 table_name,
8382 using,
8383 columns,
8384 unique,
8385 concurrently,
8386 if_not_exists,
8387 include,
8388 nulls_distinct,
8389 with,
8390 predicate,
8391 index_options,
8392 alter_options,
8393 })
8394 }
8395
8396 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8398 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8399 let name = self.parse_identifier()?;
8400
8401 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8402 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8403 Some(self.parse_identifier()?)
8404 } else {
8405 None
8406 };
8407
8408 let version = if self.parse_keyword(Keyword::VERSION) {
8409 Some(self.parse_identifier()?)
8410 } else {
8411 None
8412 };
8413
8414 let cascade = self.parse_keyword(Keyword::CASCADE);
8415
8416 (schema, version, cascade)
8417 } else {
8418 (None, None, false)
8419 };
8420
8421 Ok(CreateExtension {
8422 name,
8423 if_not_exists,
8424 schema,
8425 version,
8426 cascade,
8427 })
8428 }
8429
8430 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8432 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8433 let name = self.parse_object_name(false)?;
8434
8435 let definition = if self.parse_keyword(Keyword::FROM) {
8436 CreateCollationDefinition::From(self.parse_object_name(false)?)
8437 } else if self.consume_token(&Token::LParen) {
8438 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8439 self.expect_token(&Token::RParen)?;
8440 CreateCollationDefinition::Options(options)
8441 } else {
8442 return self.expected_ref(
8443 "FROM or parenthesized option list after CREATE COLLATION name",
8444 self.peek_token_ref(),
8445 );
8446 };
8447
8448 Ok(CreateCollation {
8449 if_not_exists,
8450 name,
8451 definition,
8452 })
8453 }
8454
8455 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8457 if self.parse_keyword(Keyword::CONFIGURATION) {
8458 let name = self.parse_object_name(false)?;
8459 self.expect_token(&Token::LParen)?;
8460 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8461 self.expect_token(&Token::RParen)?;
8462 Ok(Statement::CreateTextSearchConfiguration(
8463 CreateTextSearchConfiguration { name, options },
8464 ))
8465 } else if self.parse_keyword(Keyword::DICTIONARY) {
8466 let name = self.parse_object_name(false)?;
8467 self.expect_token(&Token::LParen)?;
8468 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8469 self.expect_token(&Token::RParen)?;
8470 Ok(Statement::CreateTextSearchDictionary(
8471 CreateTextSearchDictionary { name, options },
8472 ))
8473 } else if self.parse_keyword(Keyword::PARSER) {
8474 let name = self.parse_object_name(false)?;
8475 self.expect_token(&Token::LParen)?;
8476 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8477 self.expect_token(&Token::RParen)?;
8478 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8479 name,
8480 options,
8481 }))
8482 } else if self.parse_keyword(Keyword::TEMPLATE) {
8483 let name = self.parse_object_name(false)?;
8484 self.expect_token(&Token::LParen)?;
8485 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8486 self.expect_token(&Token::RParen)?;
8487 Ok(Statement::CreateTextSearchTemplate(
8488 CreateTextSearchTemplate { name, options },
8489 ))
8490 } else {
8491 self.expected_ref(
8492 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8493 self.peek_token_ref(),
8494 )
8495 }
8496 }
8497
8498 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8500 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8501 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8502 let cascade_or_restrict =
8503 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8504 Ok(Statement::DropExtension(DropExtension {
8505 names,
8506 if_exists,
8507 cascade_or_restrict: cascade_or_restrict
8508 .map(|k| match k {
8509 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8510 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8511 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8512 })
8513 .transpose()?,
8514 }))
8515 }
8516
8517 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8520 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8521 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8522 let drop_behavior = self.parse_optional_drop_behavior();
8523 Ok(Statement::DropOperator(DropOperator {
8524 if_exists,
8525 operators,
8526 drop_behavior,
8527 }))
8528 }
8529
8530 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8533 let name = self.parse_operator_name()?;
8534 self.expect_token(&Token::LParen)?;
8535
8536 let left_type = if self.parse_keyword(Keyword::NONE) {
8538 None
8539 } else {
8540 Some(self.parse_data_type()?)
8541 };
8542
8543 self.expect_token(&Token::Comma)?;
8544
8545 let right_type = self.parse_data_type()?;
8547
8548 self.expect_token(&Token::RParen)?;
8549
8550 Ok(DropOperatorSignature {
8551 name,
8552 left_type,
8553 right_type,
8554 })
8555 }
8556
8557 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8561 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8562 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8563 self.expect_keyword(Keyword::USING)?;
8564 let using = self.parse_identifier()?;
8565 let drop_behavior = self.parse_optional_drop_behavior();
8566 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8567 if_exists,
8568 names,
8569 using,
8570 drop_behavior,
8571 }))
8572 }
8573
8574 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8578 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8579 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8580 self.expect_keyword(Keyword::USING)?;
8581 let using = self.parse_identifier()?;
8582 let drop_behavior = self.parse_optional_drop_behavior();
8583 Ok(Statement::DropOperatorClass(DropOperatorClass {
8584 if_exists,
8585 names,
8586 using,
8587 drop_behavior,
8588 }))
8589 }
8590
8591 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8595 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8596 self.expect_token(&Token::LParen)?;
8597 let columns =
8598 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8599 self.expect_token(&Token::RParen)?;
8600 Ok(HiveDistributionStyle::PARTITIONED { columns })
8601 } else {
8602 Ok(HiveDistributionStyle::NONE)
8603 }
8604 }
8605
8606 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8610 let token = self.next_token();
8611 match &token.token {
8612 Token::Word(w) => match w.keyword {
8613 Keyword::AUTO => Ok(DistStyle::Auto),
8614 Keyword::EVEN => Ok(DistStyle::Even),
8615 Keyword::KEY => Ok(DistStyle::Key),
8616 Keyword::ALL => Ok(DistStyle::All),
8617 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8618 },
8619 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8620 }
8621 }
8622
8623 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8625 let mut hive_format: Option<HiveFormat> = None;
8626 loop {
8627 match self.parse_one_of_keywords(&[
8628 Keyword::ROW,
8629 Keyword::STORED,
8630 Keyword::LOCATION,
8631 Keyword::WITH,
8632 ]) {
8633 Some(Keyword::ROW) => {
8634 hive_format
8635 .get_or_insert_with(HiveFormat::default)
8636 .row_format = Some(self.parse_row_format()?);
8637 }
8638 Some(Keyword::STORED) => {
8639 self.expect_keyword_is(Keyword::AS)?;
8640 if self.parse_keyword(Keyword::INPUTFORMAT) {
8641 let input_format = self.parse_expr()?;
8642 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8643 let output_format = self.parse_expr()?;
8644 hive_format.get_or_insert_with(HiveFormat::default).storage =
8645 Some(HiveIOFormat::IOF {
8646 input_format,
8647 output_format,
8648 });
8649 } else {
8650 let format = self.parse_file_format()?;
8651 hive_format.get_or_insert_with(HiveFormat::default).storage =
8652 Some(HiveIOFormat::FileFormat { format });
8653 }
8654 }
8655 Some(Keyword::LOCATION) => {
8656 hive_format.get_or_insert_with(HiveFormat::default).location =
8657 Some(self.parse_literal_string()?);
8658 }
8659 Some(Keyword::WITH) => {
8660 self.prev_token();
8661 let properties = self
8662 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8663 if !properties.is_empty() {
8664 hive_format
8665 .get_or_insert_with(HiveFormat::default)
8666 .serde_properties = Some(properties);
8667 } else {
8668 break;
8669 }
8670 }
8671 None => break,
8672 _ => break,
8673 }
8674 }
8675
8676 Ok(hive_format)
8677 }
8678
8679 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8681 self.expect_keyword_is(Keyword::FORMAT)?;
8682 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8683 Some(Keyword::SERDE) => {
8684 let class = self.parse_literal_string()?;
8685 Ok(HiveRowFormat::SERDE { class })
8686 }
8687 _ => {
8688 let mut row_delimiters = vec![];
8689
8690 loop {
8691 match self.parse_one_of_keywords(&[
8692 Keyword::FIELDS,
8693 Keyword::COLLECTION,
8694 Keyword::MAP,
8695 Keyword::LINES,
8696 Keyword::NULL,
8697 ]) {
8698 Some(Keyword::FIELDS)
8699 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8700 {
8701 row_delimiters.push(HiveRowDelimiter {
8702 delimiter: HiveDelimiter::FieldsTerminatedBy,
8703 char: self.parse_identifier()?,
8704 });
8705
8706 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8707 row_delimiters.push(HiveRowDelimiter {
8708 delimiter: HiveDelimiter::FieldsEscapedBy,
8709 char: self.parse_identifier()?,
8710 });
8711 }
8712 }
8713 Some(Keyword::COLLECTION)
8714 if self.parse_keywords(&[
8715 Keyword::ITEMS,
8716 Keyword::TERMINATED,
8717 Keyword::BY,
8718 ]) =>
8719 {
8720 row_delimiters.push(HiveRowDelimiter {
8721 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8722 char: self.parse_identifier()?,
8723 });
8724 }
8725 Some(Keyword::MAP)
8726 if self.parse_keywords(&[
8727 Keyword::KEYS,
8728 Keyword::TERMINATED,
8729 Keyword::BY,
8730 ]) =>
8731 {
8732 row_delimiters.push(HiveRowDelimiter {
8733 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8734 char: self.parse_identifier()?,
8735 });
8736 }
8737 Some(Keyword::LINES)
8738 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8739 {
8740 row_delimiters.push(HiveRowDelimiter {
8741 delimiter: HiveDelimiter::LinesTerminatedBy,
8742 char: self.parse_identifier()?,
8743 });
8744 }
8745 Some(Keyword::NULL)
8746 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
8747 {
8748 row_delimiters.push(HiveRowDelimiter {
8749 delimiter: HiveDelimiter::NullDefinedAs,
8750 char: self.parse_identifier()?,
8751 });
8752 }
8753 _ => {
8754 break;
8755 }
8756 }
8757 }
8758
8759 Ok(HiveRowFormat::DELIMITED {
8760 delimiters: row_delimiters,
8761 })
8762 }
8763 }
8764 }
8765
8766 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8767 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8768 Ok(Some(self.parse_identifier()?))
8769 } else {
8770 Ok(None)
8771 }
8772 }
8773
8774 pub fn parse_create_table(
8776 &mut self,
8777 or_replace: bool,
8778 temporary: bool,
8779 global: Option<bool>,
8780 transient: bool,
8781 ) -> Result<CreateTable, ParserError> {
8782 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8783 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8784 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8785
8786 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8796 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8797 } else {
8798 None
8799 };
8800
8801 let on_cluster = self.parse_optional_on_cluster()?;
8803
8804 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8805
8806 let clone = if self.parse_keyword(Keyword::CLONE) {
8807 self.parse_object_name(allow_unquoted_hyphen).ok()
8808 } else {
8809 None
8810 };
8811
8812 let (columns, constraints) = self.parse_columns()?;
8814 let comment_after_column_def =
8815 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8816 let next_token = self.next_token();
8817 match next_token.token {
8818 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8819 _ => self.expected("comment", next_token)?,
8820 }
8821 } else {
8822 None
8823 };
8824
8825 let for_values = if partition_of.is_some() {
8827 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8828 Some(self.parse_partition_for_values()?)
8829 } else {
8830 return self.expected_ref(
8831 "FOR VALUES or DEFAULT after PARTITION OF",
8832 self.peek_token_ref(),
8833 );
8834 }
8835 } else {
8836 None
8837 };
8838
8839 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8841
8842 let hive_distribution = self.parse_hive_distribution()?;
8843 let clustered_by = self.parse_optional_clustered_by()?;
8844 let hive_formats = self.parse_hive_formats()?;
8845
8846 let create_table_config = self.parse_optional_create_table_config()?;
8847
8848 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8851 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8852 {
8853 Some(Box::new(self.parse_expr()?))
8854 } else {
8855 None
8856 };
8857
8858 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8859 if self.consume_token(&Token::LParen) {
8860 let columns = if self.peek_token_ref().token != Token::RParen {
8861 self.parse_comma_separated(|p| p.parse_expr())?
8862 } else {
8863 vec![]
8864 };
8865 self.expect_token(&Token::RParen)?;
8866 Some(OneOrManyWithParens::Many(columns))
8867 } else {
8868 Some(OneOrManyWithParens::One(self.parse_expr()?))
8869 }
8870 } else {
8871 None
8872 };
8873
8874 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8875 Some(self.parse_create_table_on_commit()?)
8876 } else {
8877 None
8878 };
8879
8880 let strict = self.parse_keyword(Keyword::STRICT);
8881
8882 let backup = if self.parse_keyword(Keyword::BACKUP) {
8884 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8885 Some(keyword == Keyword::YES)
8886 } else {
8887 None
8888 };
8889
8890 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8892 Some(self.parse_dist_style()?)
8893 } else {
8894 None
8895 };
8896 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8897 self.expect_token(&Token::LParen)?;
8898 let expr = self.parse_expr()?;
8899 self.expect_token(&Token::RParen)?;
8900 Some(expr)
8901 } else {
8902 None
8903 };
8904 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8905 self.expect_token(&Token::LParen)?;
8906 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8907 self.expect_token(&Token::RParen)?;
8908 Some(columns)
8909 } else {
8910 None
8911 };
8912
8913 let query = if self.parse_keyword(Keyword::AS) {
8915 Some(self.parse_query()?)
8916 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8917 {
8918 self.prev_token();
8920 Some(self.parse_query()?)
8921 } else {
8922 None
8923 };
8924
8925 Ok(CreateTableBuilder::new(table_name)
8926 .temporary(temporary)
8927 .columns(columns)
8928 .constraints(constraints)
8929 .or_replace(or_replace)
8930 .if_not_exists(if_not_exists)
8931 .transient(transient)
8932 .hive_distribution(hive_distribution)
8933 .hive_formats(hive_formats)
8934 .global(global)
8935 .query(query)
8936 .without_rowid(without_rowid)
8937 .like(like)
8938 .clone_clause(clone)
8939 .comment_after_column_def(comment_after_column_def)
8940 .order_by(order_by)
8941 .on_commit(on_commit)
8942 .on_cluster(on_cluster)
8943 .clustered_by(clustered_by)
8944 .partition_by(create_table_config.partition_by)
8945 .cluster_by(create_table_config.cluster_by)
8946 .inherits(create_table_config.inherits)
8947 .partition_of(partition_of)
8948 .for_values(for_values)
8949 .table_options(create_table_config.table_options)
8950 .primary_key(primary_key)
8951 .strict(strict)
8952 .backup(backup)
8953 .diststyle(diststyle)
8954 .distkey(distkey)
8955 .sortkey(sortkey)
8956 .build())
8957 }
8958
8959 fn maybe_parse_create_table_like(
8960 &mut self,
8961 allow_unquoted_hyphen: bool,
8962 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8963 let like = if self.dialect.supports_create_table_like_parenthesized()
8964 && self.consume_token(&Token::LParen)
8965 {
8966 if self.parse_keyword(Keyword::LIKE) {
8967 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8968 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8969 Some(CreateTableLikeDefaults::Including)
8970 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8971 Some(CreateTableLikeDefaults::Excluding)
8972 } else {
8973 None
8974 };
8975 self.expect_token(&Token::RParen)?;
8976 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8977 name,
8978 defaults,
8979 }))
8980 } else {
8981 self.prev_token();
8983 None
8984 }
8985 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8986 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8987 Some(CreateTableLikeKind::Plain(CreateTableLike {
8988 name,
8989 defaults: None,
8990 }))
8991 } else {
8992 None
8993 };
8994 Ok(like)
8995 }
8996
8997 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8998 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8999 Ok(OnCommit::DeleteRows)
9000 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
9001 Ok(OnCommit::PreserveRows)
9002 } else if self.parse_keywords(&[Keyword::DROP]) {
9003 Ok(OnCommit::Drop)
9004 } else {
9005 parser_err!(
9006 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
9007 self.peek_token_ref()
9008 )
9009 }
9010 }
9011
9012 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
9018 if self.parse_keyword(Keyword::DEFAULT) {
9019 return Ok(ForValues::Default);
9020 }
9021
9022 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9023
9024 if self.parse_keyword(Keyword::IN) {
9025 self.expect_token(&Token::LParen)?;
9027 if self.peek_token_ref().token == Token::RParen {
9028 return self.expected_ref("at least one value", self.peek_token_ref());
9029 }
9030 let values = self.parse_comma_separated(Parser::parse_expr)?;
9031 self.expect_token(&Token::RParen)?;
9032 Ok(ForValues::In(values))
9033 } else if self.parse_keyword(Keyword::FROM) {
9034 self.expect_token(&Token::LParen)?;
9036 if self.peek_token_ref().token == Token::RParen {
9037 return self.expected_ref("at least one value", self.peek_token_ref());
9038 }
9039 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9040 self.expect_token(&Token::RParen)?;
9041 self.expect_keyword(Keyword::TO)?;
9042 self.expect_token(&Token::LParen)?;
9043 if self.peek_token_ref().token == Token::RParen {
9044 return self.expected_ref("at least one value", self.peek_token_ref());
9045 }
9046 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9047 self.expect_token(&Token::RParen)?;
9048 Ok(ForValues::From { from, to })
9049 } else if self.parse_keyword(Keyword::WITH) {
9050 self.expect_token(&Token::LParen)?;
9052 self.expect_keyword(Keyword::MODULUS)?;
9053 let modulus = self.parse_literal_uint()?;
9054 self.expect_token(&Token::Comma)?;
9055 self.expect_keyword(Keyword::REMAINDER)?;
9056 let remainder = self.parse_literal_uint()?;
9057 self.expect_token(&Token::RParen)?;
9058 Ok(ForValues::With { modulus, remainder })
9059 } else {
9060 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9061 }
9062 }
9063
9064 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9066 if self.parse_keyword(Keyword::MINVALUE) {
9067 Ok(PartitionBoundValue::MinValue)
9068 } else if self.parse_keyword(Keyword::MAXVALUE) {
9069 Ok(PartitionBoundValue::MaxValue)
9070 } else {
9071 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9072 }
9073 }
9074
9075 fn parse_optional_create_table_config(
9081 &mut self,
9082 ) -> Result<CreateTableConfiguration, ParserError> {
9083 let mut table_options = CreateTableOptions::None;
9084
9085 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9086 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9087 } else {
9088 None
9089 };
9090
9091 let with_options = self.parse_options(Keyword::WITH)?;
9093 if !with_options.is_empty() {
9094 table_options = CreateTableOptions::With(with_options)
9095 }
9096
9097 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9098 if !table_properties.is_empty() {
9099 table_options = CreateTableOptions::TableProperties(table_properties);
9100 }
9101 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9102 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9103 {
9104 Some(Box::new(self.parse_expr()?))
9105 } else {
9106 None
9107 };
9108
9109 let mut cluster_by = None;
9110 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9111 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9112 cluster_by = Some(WrappedCollection::NoWrapping(
9113 self.parse_comma_separated(|p| p.parse_expr())?,
9114 ));
9115 };
9116
9117 if let Token::Word(word) = &self.peek_token_ref().token {
9118 if word.keyword == Keyword::OPTIONS {
9119 table_options =
9120 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9121 }
9122 };
9123 }
9124
9125 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9126 let plain_options = self.parse_plain_options()?;
9127 if !plain_options.is_empty() {
9128 table_options = CreateTableOptions::Plain(plain_options)
9129 }
9130 };
9131
9132 Ok(CreateTableConfiguration {
9133 partition_by,
9134 cluster_by,
9135 inherits,
9136 table_options,
9137 })
9138 }
9139
9140 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9141 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9144 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9145 }
9146
9147 if self.parse_keywords(&[Keyword::COMMENT]) {
9150 let has_eq = self.consume_token(&Token::Eq);
9151 let value = self.next_token();
9152
9153 let comment = match (has_eq, value.token) {
9154 (true, Token::SingleQuotedString(s)) => {
9155 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9156 }
9157 (false, Token::SingleQuotedString(s)) => {
9158 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9159 }
9160 (_, token) => {
9161 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9162 }
9163 };
9164 return comment;
9165 }
9166
9167 if self.parse_keywords(&[Keyword::ENGINE]) {
9170 let _ = self.consume_token(&Token::Eq);
9171 let value = self.next_token();
9172
9173 let engine = match value.token {
9174 Token::Word(w) => {
9175 let parameters = if self.peek_token_ref().token == Token::LParen {
9176 self.parse_parenthesized_identifiers()?
9177 } else {
9178 vec![]
9179 };
9180
9181 Ok(Some(SqlOption::NamedParenthesizedList(
9182 NamedParenthesizedList {
9183 key: Ident::new("ENGINE"),
9184 name: Some(Ident::new(w.value)),
9185 values: parameters,
9186 },
9187 )))
9188 }
9189 _ => {
9190 return self.expected("Token::Word", value)?;
9191 }
9192 };
9193
9194 return engine;
9195 }
9196
9197 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9199 let _ = self.consume_token(&Token::Eq);
9200 let value = self.next_token();
9201
9202 let tablespace = match value.token {
9203 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9204 let storage = match self.parse_keyword(Keyword::STORAGE) {
9205 true => {
9206 let _ = self.consume_token(&Token::Eq);
9207 let storage_token = self.next_token();
9208 match &storage_token.token {
9209 Token::Word(w) => match w.value.to_uppercase().as_str() {
9210 "DISK" => Some(StorageType::Disk),
9211 "MEMORY" => Some(StorageType::Memory),
9212 _ => self
9213 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9214 },
9215 _ => self.expected("Token::Word", storage_token)?,
9216 }
9217 }
9218 false => None,
9219 };
9220
9221 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9222 name,
9223 storage,
9224 })))
9225 }
9226 _ => {
9227 return self.expected("Token::Word", value)?;
9228 }
9229 };
9230
9231 return tablespace;
9232 }
9233
9234 if self.parse_keyword(Keyword::UNION) {
9236 let _ = self.consume_token(&Token::Eq);
9237 let value = self.next_token();
9238
9239 match value.token {
9240 Token::LParen => {
9241 let tables: Vec<Ident> =
9242 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9243 self.expect_token(&Token::RParen)?;
9244
9245 return Ok(Some(SqlOption::NamedParenthesizedList(
9246 NamedParenthesizedList {
9247 key: Ident::new("UNION"),
9248 name: None,
9249 values: tables,
9250 },
9251 )));
9252 }
9253 _ => {
9254 return self.expected("Token::LParen", value)?;
9255 }
9256 }
9257 }
9258
9259 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9261 Ident::new("DEFAULT CHARSET")
9262 } else if self.parse_keyword(Keyword::CHARSET) {
9263 Ident::new("CHARSET")
9264 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9265 Ident::new("DEFAULT CHARACTER SET")
9266 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9267 Ident::new("CHARACTER SET")
9268 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9269 Ident::new("DEFAULT COLLATE")
9270 } else if self.parse_keyword(Keyword::COLLATE) {
9271 Ident::new("COLLATE")
9272 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9273 Ident::new("DATA DIRECTORY")
9274 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9275 Ident::new("INDEX DIRECTORY")
9276 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9277 Ident::new("KEY_BLOCK_SIZE")
9278 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9279 Ident::new("ROW_FORMAT")
9280 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9281 Ident::new("PACK_KEYS")
9282 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9283 Ident::new("STATS_AUTO_RECALC")
9284 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9285 Ident::new("STATS_PERSISTENT")
9286 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9287 Ident::new("STATS_SAMPLE_PAGES")
9288 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9289 Ident::new("DELAY_KEY_WRITE")
9290 } else if self.parse_keyword(Keyword::COMPRESSION) {
9291 Ident::new("COMPRESSION")
9292 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9293 Ident::new("ENCRYPTION")
9294 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9295 Ident::new("MAX_ROWS")
9296 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9297 Ident::new("MIN_ROWS")
9298 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9299 Ident::new("AUTOEXTEND_SIZE")
9300 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9301 Ident::new("AVG_ROW_LENGTH")
9302 } else if self.parse_keyword(Keyword::CHECKSUM) {
9303 Ident::new("CHECKSUM")
9304 } else if self.parse_keyword(Keyword::CONNECTION) {
9305 Ident::new("CONNECTION")
9306 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9307 Ident::new("ENGINE_ATTRIBUTE")
9308 } else if self.parse_keyword(Keyword::PASSWORD) {
9309 Ident::new("PASSWORD")
9310 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9311 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9312 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9313 Ident::new("INSERT_METHOD")
9314 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9315 Ident::new("AUTO_INCREMENT")
9316 } else {
9317 return Ok(None);
9318 };
9319
9320 let _ = self.consume_token(&Token::Eq);
9321
9322 let value = match self
9323 .maybe_parse(|parser| parser.parse_value())?
9324 .map(Expr::Value)
9325 {
9326 Some(expr) => expr,
9327 None => Expr::Identifier(self.parse_identifier()?),
9328 };
9329
9330 Ok(Some(SqlOption::KeyValue { key, value }))
9331 }
9332
9333 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9335 let mut options = Vec::new();
9336
9337 while let Some(option) = self.parse_plain_option()? {
9338 options.push(option);
9339 let _ = self.consume_token(&Token::Comma);
9342 }
9343
9344 Ok(options)
9345 }
9346
9347 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9349 let comment = if self.parse_keyword(Keyword::COMMENT) {
9350 let has_eq = self.consume_token(&Token::Eq);
9351 let comment = self.parse_comment_value()?;
9352 Some(if has_eq {
9353 CommentDef::WithEq(comment)
9354 } else {
9355 CommentDef::WithoutEq(comment)
9356 })
9357 } else {
9358 None
9359 };
9360 Ok(comment)
9361 }
9362
9363 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9365 let next_token = self.next_token();
9366 let value = match next_token.token {
9367 Token::SingleQuotedString(str) => str,
9368 Token::DollarQuotedString(str) => str.value,
9369 _ => self.expected("string literal", next_token)?,
9370 };
9371 Ok(value)
9372 }
9373
9374 pub fn parse_optional_procedure_parameters(
9376 &mut self,
9377 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9378 let mut params = vec![];
9379 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9380 return Ok(Some(params));
9381 }
9382 loop {
9383 if let Token::Word(_) = &self.peek_token_ref().token {
9384 params.push(self.parse_procedure_param()?)
9385 }
9386 let comma = self.consume_token(&Token::Comma);
9387 if self.consume_token(&Token::RParen) {
9388 break;
9390 } else if !comma {
9391 return self.expected_ref(
9392 "',' or ')' after parameter definition",
9393 self.peek_token_ref(),
9394 );
9395 }
9396 }
9397 Ok(Some(params))
9398 }
9399
9400 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9402 let mut columns = vec![];
9403 let mut constraints = vec![];
9404 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9405 return Ok((columns, constraints));
9406 }
9407
9408 loop {
9409 if let Some(constraint) = self.parse_optional_table_constraint()? {
9410 constraints.push(constraint);
9411 } else if let Token::Word(_) = &self.peek_token_ref().token {
9412 columns.push(self.parse_column_def()?);
9413 } else {
9414 return self.expected_ref(
9415 "column name or constraint definition",
9416 self.peek_token_ref(),
9417 );
9418 }
9419
9420 let comma = self.consume_token(&Token::Comma);
9421 let rparen = self.peek_token_ref().token == Token::RParen;
9422
9423 if !comma && !rparen {
9424 return self
9425 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9426 };
9427
9428 if rparen
9429 && (!comma
9430 || self.dialect.supports_column_definition_trailing_commas()
9431 || self.options.trailing_commas)
9432 {
9433 let _ = self.consume_token(&Token::RParen);
9434 break;
9435 }
9436 }
9437
9438 Ok((columns, constraints))
9439 }
9440
9441 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9443 let mode = if self.parse_keyword(Keyword::IN) {
9444 Some(ArgMode::In)
9445 } else if self.parse_keyword(Keyword::OUT) {
9446 Some(ArgMode::Out)
9447 } else if self.parse_keyword(Keyword::INOUT) {
9448 Some(ArgMode::InOut)
9449 } else {
9450 None
9451 };
9452 let name = self.parse_identifier()?;
9453 let data_type = self.parse_data_type()?;
9454 let default = if self.consume_token(&Token::Eq) {
9455 Some(self.parse_expr()?)
9456 } else {
9457 None
9458 };
9459
9460 Ok(ProcedureParam {
9461 name,
9462 data_type,
9463 mode,
9464 default,
9465 })
9466 }
9467
9468 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9470 self.parse_column_def_inner(false)
9471 }
9472
9473 fn parse_column_def_inner(
9474 &mut self,
9475 optional_data_type: bool,
9476 ) -> Result<ColumnDef, ParserError> {
9477 let col_name = self.parse_identifier()?;
9478 let data_type = if self.is_column_type_sqlite_unspecified() {
9479 DataType::Unspecified
9480 } else if optional_data_type {
9481 self.maybe_parse(|parser| parser.parse_data_type())?
9482 .unwrap_or(DataType::Unspecified)
9483 } else {
9484 self.parse_data_type()?
9485 };
9486 let mut options = vec![];
9487 loop {
9488 if self.parse_keyword(Keyword::CONSTRAINT) {
9489 let name = Some(self.parse_identifier()?);
9490 if let Some(option) = self.parse_optional_column_option()? {
9491 options.push(ColumnOptionDef { name, option });
9492 } else {
9493 return self.expected_ref(
9494 "constraint details after CONSTRAINT <name>",
9495 self.peek_token_ref(),
9496 );
9497 }
9498 } else if let Some(option) = self.parse_optional_column_option()? {
9499 options.push(ColumnOptionDef { name: None, option });
9500 } else {
9501 break;
9502 };
9503 }
9504 Ok(ColumnDef {
9505 name: col_name,
9506 data_type,
9507 options,
9508 })
9509 }
9510
9511 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9512 if dialect_of!(self is SQLiteDialect) {
9513 match &self.peek_token_ref().token {
9514 Token::Word(word) => matches!(
9515 word.keyword,
9516 Keyword::CONSTRAINT
9517 | Keyword::PRIMARY
9518 | Keyword::NOT
9519 | Keyword::UNIQUE
9520 | Keyword::CHECK
9521 | Keyword::DEFAULT
9522 | Keyword::COLLATE
9523 | Keyword::REFERENCES
9524 | Keyword::GENERATED
9525 | Keyword::AS
9526 ),
9527 _ => true, }
9529 } else {
9530 false
9531 }
9532 }
9533
9534 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9536 if let Some(option) = self.dialect.parse_column_option(self)? {
9537 return option;
9538 }
9539
9540 self.with_state(
9541 ColumnDefinition,
9542 |parser| -> Result<Option<ColumnOption>, ParserError> {
9543 parser.parse_optional_column_option_inner()
9544 },
9545 )
9546 }
9547
9548 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9549 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9550 Ok(Some(ColumnOption::CharacterSet(
9551 self.parse_object_name(false)?,
9552 )))
9553 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9554 Ok(Some(ColumnOption::Collation(
9555 self.parse_object_name(false)?,
9556 )))
9557 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9558 Ok(Some(ColumnOption::NotNull))
9559 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9560 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9561 } else if self.parse_keyword(Keyword::NULL) {
9562 Ok(Some(ColumnOption::Null))
9563 } else if self.parse_keyword(Keyword::DEFAULT) {
9564 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9565 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9566 && self.parse_keyword(Keyword::MATERIALIZED)
9567 {
9568 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9569 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9570 && self.parse_keyword(Keyword::ALIAS)
9571 {
9572 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9573 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9574 && self.parse_keyword(Keyword::EPHEMERAL)
9575 {
9576 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9579 Ok(Some(ColumnOption::Ephemeral(None)))
9580 } else {
9581 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9582 }
9583 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9584 let characteristics = self.parse_constraint_characteristics()?;
9585 Ok(Some(
9586 PrimaryKeyConstraint {
9587 name: None,
9588 index_name: None,
9589 index_type: None,
9590 columns: vec![],
9591 index_options: vec![],
9592 characteristics,
9593 }
9594 .into(),
9595 ))
9596 } else if self.parse_keyword(Keyword::UNIQUE) {
9597 let index_type_display =
9598 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9599 KeyOrIndexDisplay::Key
9600 } else {
9601 KeyOrIndexDisplay::None
9602 };
9603 let characteristics = self.parse_constraint_characteristics()?;
9604 Ok(Some(
9605 UniqueConstraint {
9606 name: None,
9607 index_name: None,
9608 index_type_display,
9609 index_type: None,
9610 columns: vec![],
9611 index_options: vec![],
9612 characteristics,
9613 nulls_distinct: NullsDistinctOption::None,
9614 }
9615 .into(),
9616 ))
9617 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9618 let characteristics = self.parse_constraint_characteristics()?;
9621 Ok(Some(
9622 PrimaryKeyConstraint {
9623 name: None,
9624 index_name: None,
9625 index_type: None,
9626 columns: vec![],
9627 index_options: vec![],
9628 characteristics,
9629 }
9630 .into(),
9631 ))
9632 } else if self.parse_keyword(Keyword::REFERENCES) {
9633 let foreign_table = self.parse_object_name(false)?;
9634 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9637 let mut match_kind = None;
9638 let mut on_delete = None;
9639 let mut on_update = None;
9640 loop {
9641 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9642 match_kind = Some(self.parse_match_kind()?);
9643 } else if on_delete.is_none()
9644 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9645 {
9646 on_delete = Some(self.parse_referential_action()?);
9647 } else if on_update.is_none()
9648 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9649 {
9650 on_update = Some(self.parse_referential_action()?);
9651 } else {
9652 break;
9653 }
9654 }
9655 let characteristics = self.parse_constraint_characteristics()?;
9656
9657 Ok(Some(
9658 ForeignKeyConstraint {
9659 name: None, index_name: None, columns: vec![], foreign_table,
9663 referred_columns,
9664 on_delete,
9665 on_update,
9666 match_kind,
9667 characteristics,
9668 }
9669 .into(),
9670 ))
9671 } else if self.parse_keyword(Keyword::CHECK) {
9672 self.expect_token(&Token::LParen)?;
9673 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9675 self.expect_token(&Token::RParen)?;
9676
9677 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9678 Some(true)
9679 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9680 Some(false)
9681 } else {
9682 None
9683 };
9684
9685 Ok(Some(
9686 CheckConstraint {
9687 name: None, expr: Box::new(expr),
9689 enforced,
9690 }
9691 .into(),
9692 ))
9693 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9694 && dialect_of!(self is MySqlDialect | GenericDialect)
9695 {
9696 Ok(Some(ColumnOption::DialectSpecific(vec![
9698 Token::make_keyword("AUTO_INCREMENT"),
9699 ])))
9700 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9701 && dialect_of!(self is SQLiteDialect | GenericDialect)
9702 {
9703 Ok(Some(ColumnOption::DialectSpecific(vec![
9705 Token::make_keyword("AUTOINCREMENT"),
9706 ])))
9707 } else if self.parse_keyword(Keyword::ASC)
9708 && self.dialect.supports_asc_desc_in_column_definition()
9709 {
9710 Ok(Some(ColumnOption::DialectSpecific(vec![
9712 Token::make_keyword("ASC"),
9713 ])))
9714 } else if self.parse_keyword(Keyword::DESC)
9715 && self.dialect.supports_asc_desc_in_column_definition()
9716 {
9717 Ok(Some(ColumnOption::DialectSpecific(vec![
9719 Token::make_keyword("DESC"),
9720 ])))
9721 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9722 && dialect_of!(self is MySqlDialect | GenericDialect)
9723 {
9724 let expr = self.parse_expr()?;
9725 Ok(Some(ColumnOption::OnUpdate(expr)))
9726 } else if self.parse_keyword(Keyword::GENERATED) {
9727 self.parse_optional_column_option_generated()
9728 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9729 && self.parse_keyword(Keyword::OPTIONS)
9730 {
9731 self.prev_token();
9732 Ok(Some(ColumnOption::Options(
9733 self.parse_options(Keyword::OPTIONS)?,
9734 )))
9735 } else if self.parse_keyword(Keyword::AS)
9736 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9737 {
9738 self.parse_optional_column_option_as()
9739 } else if self.parse_keyword(Keyword::SRID)
9740 && dialect_of!(self is MySqlDialect | GenericDialect)
9741 {
9742 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9743 } else if self.parse_keyword(Keyword::IDENTITY)
9744 && dialect_of!(self is MsSqlDialect | GenericDialect)
9745 {
9746 let parameters = if self.consume_token(&Token::LParen) {
9747 let seed = self.parse_number()?;
9748 self.expect_token(&Token::Comma)?;
9749 let increment = self.parse_number()?;
9750 self.expect_token(&Token::RParen)?;
9751
9752 Some(IdentityPropertyFormatKind::FunctionCall(
9753 IdentityParameters { seed, increment },
9754 ))
9755 } else {
9756 None
9757 };
9758 Ok(Some(ColumnOption::Identity(
9759 IdentityPropertyKind::Identity(IdentityProperty {
9760 parameters,
9761 order: None,
9762 }),
9763 )))
9764 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9765 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9766 {
9767 Ok(Some(ColumnOption::OnConflict(
9769 self.expect_one_of_keywords(&[
9770 Keyword::ROLLBACK,
9771 Keyword::ABORT,
9772 Keyword::FAIL,
9773 Keyword::IGNORE,
9774 Keyword::REPLACE,
9775 ])?,
9776 )))
9777 } else if self.parse_keyword(Keyword::INVISIBLE) {
9778 Ok(Some(ColumnOption::Invisible))
9779 } else {
9780 Ok(None)
9781 }
9782 }
9783
9784 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9785 let name = self.parse_object_name(false)?;
9786 self.expect_token(&Token::Eq)?;
9787 let value = self.parse_literal_string()?;
9788
9789 Ok(Tag::new(name, value))
9790 }
9791
9792 fn parse_optional_column_option_generated(
9793 &mut self,
9794 ) -> Result<Option<ColumnOption>, ParserError> {
9795 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9796 let mut sequence_options = vec![];
9797 if self.expect_token(&Token::LParen).is_ok() {
9798 sequence_options = self.parse_create_sequence_options()?;
9799 self.expect_token(&Token::RParen)?;
9800 }
9801 Ok(Some(ColumnOption::Generated {
9802 generated_as: GeneratedAs::Always,
9803 sequence_options: Some(sequence_options),
9804 generation_expr: None,
9805 generation_expr_mode: None,
9806 generated_keyword: true,
9807 }))
9808 } else if self.parse_keywords(&[
9809 Keyword::BY,
9810 Keyword::DEFAULT,
9811 Keyword::AS,
9812 Keyword::IDENTITY,
9813 ]) {
9814 let mut sequence_options = vec![];
9815 if self.expect_token(&Token::LParen).is_ok() {
9816 sequence_options = self.parse_create_sequence_options()?;
9817 self.expect_token(&Token::RParen)?;
9818 }
9819 Ok(Some(ColumnOption::Generated {
9820 generated_as: GeneratedAs::ByDefault,
9821 sequence_options: Some(sequence_options),
9822 generation_expr: None,
9823 generation_expr_mode: None,
9824 generated_keyword: true,
9825 }))
9826 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9827 if self.expect_token(&Token::LParen).is_ok() {
9828 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9829 self.expect_token(&Token::RParen)?;
9830 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9831 Ok((
9832 GeneratedAs::ExpStored,
9833 Some(GeneratedExpressionMode::Stored),
9834 ))
9835 } else if dialect_of!(self is PostgreSqlDialect) {
9836 self.expected_ref("STORED", self.peek_token_ref())
9838 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9839 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9840 } else {
9841 Ok((GeneratedAs::Always, None))
9842 }?;
9843
9844 Ok(Some(ColumnOption::Generated {
9845 generated_as: gen_as,
9846 sequence_options: None,
9847 generation_expr: Some(expr),
9848 generation_expr_mode: expr_mode,
9849 generated_keyword: true,
9850 }))
9851 } else {
9852 Ok(None)
9853 }
9854 } else {
9855 Ok(None)
9856 }
9857 }
9858
9859 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9860 self.expect_token(&Token::LParen)?;
9862 let expr = self.parse_expr()?;
9863 self.expect_token(&Token::RParen)?;
9864
9865 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9866 (
9867 GeneratedAs::ExpStored,
9868 Some(GeneratedExpressionMode::Stored),
9869 )
9870 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9871 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9872 } else {
9873 (GeneratedAs::Always, None)
9874 };
9875
9876 Ok(Some(ColumnOption::Generated {
9877 generated_as: gen_as,
9878 sequence_options: None,
9879 generation_expr: Some(expr),
9880 generation_expr_mode: expr_mode,
9881 generated_keyword: false,
9882 }))
9883 }
9884
9885 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9887 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9888 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9889 {
9890 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9891
9892 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9893 self.expect_token(&Token::LParen)?;
9894 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9895 self.expect_token(&Token::RParen)?;
9896 Some(sorted_by_columns)
9897 } else {
9898 None
9899 };
9900
9901 self.expect_keyword_is(Keyword::INTO)?;
9902 let num_buckets = self.parse_number_value()?.value;
9903 self.expect_keyword_is(Keyword::BUCKETS)?;
9904 Some(ClusteredBy {
9905 columns,
9906 sorted_by,
9907 num_buckets,
9908 })
9909 } else {
9910 None
9911 };
9912 Ok(clustered_by)
9913 }
9914
9915 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9919 if self.parse_keyword(Keyword::RESTRICT) {
9920 Ok(ReferentialAction::Restrict)
9921 } else if self.parse_keyword(Keyword::CASCADE) {
9922 Ok(ReferentialAction::Cascade)
9923 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9924 Ok(ReferentialAction::SetNull)
9925 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9926 Ok(ReferentialAction::NoAction)
9927 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9928 Ok(ReferentialAction::SetDefault)
9929 } else {
9930 self.expected_ref(
9931 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9932 self.peek_token_ref(),
9933 )
9934 }
9935 }
9936
9937 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9939 if self.parse_keyword(Keyword::FULL) {
9940 Ok(ConstraintReferenceMatchKind::Full)
9941 } else if self.parse_keyword(Keyword::PARTIAL) {
9942 Ok(ConstraintReferenceMatchKind::Partial)
9943 } else if self.parse_keyword(Keyword::SIMPLE) {
9944 Ok(ConstraintReferenceMatchKind::Simple)
9945 } else {
9946 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9947 }
9948 }
9949
9950 fn parse_constraint_using_index(
9953 &mut self,
9954 name: Option<Ident>,
9955 ) -> Result<ConstraintUsingIndex, ParserError> {
9956 let index_name = self.parse_identifier()?;
9957 let characteristics = self.parse_constraint_characteristics()?;
9958 Ok(ConstraintUsingIndex {
9959 name,
9960 index_name,
9961 characteristics,
9962 })
9963 }
9964
9965 pub fn parse_constraint_characteristics(
9967 &mut self,
9968 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9969 let mut cc = ConstraintCharacteristics::default();
9970
9971 loop {
9972 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9973 {
9974 cc.deferrable = Some(false);
9975 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9976 cc.deferrable = Some(true);
9977 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9978 if self.parse_keyword(Keyword::DEFERRED) {
9979 cc.initially = Some(DeferrableInitial::Deferred);
9980 } else if self.parse_keyword(Keyword::IMMEDIATE) {
9981 cc.initially = Some(DeferrableInitial::Immediate);
9982 } else {
9983 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
9984 }
9985 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9986 cc.enforced = Some(true);
9987 } else if cc.enforced.is_none()
9988 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9989 {
9990 cc.enforced = Some(false);
9991 } else {
9992 break;
9993 }
9994 }
9995
9996 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9997 Ok(Some(cc))
9998 } else {
9999 Ok(None)
10000 }
10001 }
10002
10003 pub fn parse_optional_table_constraint(
10005 &mut self,
10006 ) -> Result<Option<TableConstraint>, ParserError> {
10007 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
10008 if self.dialect.supports_constraint_keyword_without_name()
10009 && self
10010 .peek_one_of_keywords(&[
10011 Keyword::CHECK,
10012 Keyword::PRIMARY,
10013 Keyword::UNIQUE,
10014 Keyword::FOREIGN,
10015 ])
10016 .is_some()
10017 {
10018 None
10019 } else {
10020 Some(self.parse_identifier()?)
10021 }
10022 } else {
10023 None
10024 };
10025
10026 if name.is_none()
10031 && self
10032 .peek_one_of_keywords(&[Keyword::FULLTEXT, Keyword::SPATIAL])
10033 .is_some()
10034 && !dialect_of!(self is GenericDialect | MySqlDialect)
10035 {
10036 return Ok(None);
10037 }
10038
10039 let next_token = self.next_token();
10040 match next_token.token {
10041 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
10042 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10045 return Ok(Some(TableConstraint::UniqueUsingIndex(
10046 self.parse_constraint_using_index(name)?,
10047 )));
10048 }
10049
10050 let index_type_display = self.parse_index_type_display();
10051 if !dialect_of!(self is GenericDialect | MySqlDialect)
10052 && !index_type_display.is_none()
10053 {
10054 return self.expected_ref(
10055 "`index_name` or `(column_name [, ...])`",
10056 self.peek_token_ref(),
10057 );
10058 }
10059
10060 let nulls_distinct = self.parse_optional_nulls_distinct()?;
10061
10062 let index_name = self.parse_optional_ident()?;
10064 let index_type = self.parse_optional_using_then_index_type()?;
10065
10066 let columns = self.parse_parenthesized_index_column_list()?;
10067 let index_options = self.parse_index_options()?;
10068 let characteristics = self.parse_constraint_characteristics()?;
10069 Ok(Some(
10070 UniqueConstraint {
10071 name,
10072 index_name,
10073 index_type_display,
10074 index_type,
10075 columns,
10076 index_options,
10077 characteristics,
10078 nulls_distinct,
10079 }
10080 .into(),
10081 ))
10082 }
10083 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10084 self.expect_keyword_is(Keyword::KEY)?;
10086
10087 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10090 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10091 self.parse_constraint_using_index(name)?,
10092 )));
10093 }
10094
10095 let index_name = self.parse_optional_ident()?;
10097 let index_type = self.parse_optional_using_then_index_type()?;
10098
10099 let columns = self.parse_parenthesized_index_column_list()?;
10100 let index_options = self.parse_index_options()?;
10101 let characteristics = self.parse_constraint_characteristics()?;
10102 Ok(Some(
10103 PrimaryKeyConstraint {
10104 name,
10105 index_name,
10106 index_type,
10107 columns,
10108 index_options,
10109 characteristics,
10110 }
10111 .into(),
10112 ))
10113 }
10114 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10115 self.expect_keyword_is(Keyword::KEY)?;
10116 let index_name = self.parse_optional_ident()?;
10117 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10118 self.expect_keyword_is(Keyword::REFERENCES)?;
10119 let foreign_table = self.parse_object_name(false)?;
10120 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10121 let mut match_kind = None;
10122 let mut on_delete = None;
10123 let mut on_update = None;
10124 loop {
10125 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10126 match_kind = Some(self.parse_match_kind()?);
10127 } else if on_delete.is_none()
10128 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10129 {
10130 on_delete = Some(self.parse_referential_action()?);
10131 } else if on_update.is_none()
10132 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10133 {
10134 on_update = Some(self.parse_referential_action()?);
10135 } else {
10136 break;
10137 }
10138 }
10139
10140 let characteristics = self.parse_constraint_characteristics()?;
10141
10142 Ok(Some(
10143 ForeignKeyConstraint {
10144 name,
10145 index_name,
10146 columns,
10147 foreign_table,
10148 referred_columns,
10149 on_delete,
10150 on_update,
10151 match_kind,
10152 characteristics,
10153 }
10154 .into(),
10155 ))
10156 }
10157 Token::Word(w) if w.keyword == Keyword::CHECK => {
10158 self.expect_token(&Token::LParen)?;
10159 let expr = Box::new(self.parse_expr()?);
10160 self.expect_token(&Token::RParen)?;
10161
10162 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10163 Some(true)
10164 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10165 Some(false)
10166 } else {
10167 None
10168 };
10169
10170 Ok(Some(
10171 CheckConstraint {
10172 name,
10173 expr,
10174 enforced,
10175 }
10176 .into(),
10177 ))
10178 }
10179 Token::Word(w)
10180 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10181 && dialect_of!(self is GenericDialect | MySqlDialect)
10182 && name.is_none() =>
10183 {
10184 let display_as_key = w.keyword == Keyword::KEY;
10185
10186 let name = match &self.peek_token_ref().token {
10187 Token::Word(word) if word.keyword == Keyword::USING => None,
10188 _ => self.parse_optional_ident()?,
10189 };
10190
10191 let index_type = self.parse_optional_using_then_index_type()?;
10192 let columns = self.parse_parenthesized_index_column_list()?;
10193 let index_options = self.parse_index_options()?;
10194
10195 Ok(Some(
10196 IndexConstraint {
10197 display_as_key,
10198 name,
10199 index_type,
10200 columns,
10201 index_options,
10202 }
10203 .into(),
10204 ))
10205 }
10206 Token::Word(w)
10207 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10208 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10209 {
10210 if let Some(name) = name {
10211 return self.expected(
10212 "FULLTEXT or SPATIAL option without constraint name",
10213 TokenWithSpan {
10214 token: Token::make_keyword(&name.to_string()),
10215 span: next_token.span,
10216 },
10217 );
10218 }
10219
10220 let fulltext = w.keyword == Keyword::FULLTEXT;
10221
10222 let index_type_display = self.parse_index_type_display();
10223
10224 let opt_index_name = self.parse_optional_ident()?;
10225
10226 let columns = self.parse_parenthesized_index_column_list()?;
10227
10228 Ok(Some(
10229 FullTextOrSpatialConstraint {
10230 fulltext,
10231 index_type_display,
10232 opt_index_name,
10233 columns,
10234 }
10235 .into(),
10236 ))
10237 }
10238 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10239 let index_method = if self.parse_keyword(Keyword::USING) {
10240 Some(self.parse_identifier()?)
10241 } else {
10242 None
10243 };
10244
10245 self.expect_token(&Token::LParen)?;
10246 let elements = self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10247 self.expect_token(&Token::RParen)?;
10248
10249 let include = if self.parse_keyword(Keyword::INCLUDE) {
10250 self.expect_token(&Token::LParen)?;
10251 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10252 self.expect_token(&Token::RParen)?;
10253 cols
10254 } else {
10255 vec![]
10256 };
10257
10258 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10259 self.expect_token(&Token::LParen)?;
10260 let predicate = self.parse_expr()?;
10261 self.expect_token(&Token::RParen)?;
10262 Some(Box::new(predicate))
10263 } else {
10264 None
10265 };
10266
10267 let characteristics = self.parse_constraint_characteristics()?;
10268
10269 Ok(Some(
10270 ExclusionConstraint {
10271 name,
10272 index_method,
10273 elements,
10274 include,
10275 where_clause,
10276 characteristics,
10277 }
10278 .into(),
10279 ))
10280 }
10281 _ => {
10282 if name.is_some() {
10283 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10284 } else {
10285 self.prev_token();
10286 Ok(None)
10287 }
10288 }
10289 }
10290 }
10291
10292 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10293 let expr = self.parse_expr()?;
10294 self.expect_keyword_is(Keyword::WITH)?;
10295 let operator_token = self.next_token();
10296 let operator = operator_token.token.to_string();
10297 Ok(ExclusionElement { expr, operator })
10298 }
10299
10300 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10301 Ok(if self.parse_keyword(Keyword::NULLS) {
10302 let not = self.parse_keyword(Keyword::NOT);
10303 self.expect_keyword_is(Keyword::DISTINCT)?;
10304 if not {
10305 NullsDistinctOption::NotDistinct
10306 } else {
10307 NullsDistinctOption::Distinct
10308 }
10309 } else {
10310 NullsDistinctOption::None
10311 })
10312 }
10313
10314 pub fn maybe_parse_options(
10316 &mut self,
10317 keyword: Keyword,
10318 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10319 if let Token::Word(word) = &self.peek_token_ref().token {
10320 if word.keyword == keyword {
10321 return Ok(Some(self.parse_options(keyword)?));
10322 }
10323 };
10324 Ok(None)
10325 }
10326
10327 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10329 if self.parse_keyword(keyword) {
10330 self.expect_token(&Token::LParen)?;
10331 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10332 self.expect_token(&Token::RParen)?;
10333 Ok(options)
10334 } else {
10335 Ok(vec![])
10336 }
10337 }
10338
10339 pub fn parse_options_with_keywords(
10341 &mut self,
10342 keywords: &[Keyword],
10343 ) -> Result<Vec<SqlOption>, ParserError> {
10344 if self.parse_keywords(keywords) {
10345 self.expect_token(&Token::LParen)?;
10346 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10347 self.expect_token(&Token::RParen)?;
10348 Ok(options)
10349 } else {
10350 Ok(vec![])
10351 }
10352 }
10353
10354 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10356 Ok(if self.parse_keyword(Keyword::BTREE) {
10357 IndexType::BTree
10358 } else if self.parse_keyword(Keyword::HASH) {
10359 IndexType::Hash
10360 } else if self.parse_keyword(Keyword::GIN) {
10361 IndexType::GIN
10362 } else if self.parse_keyword(Keyword::GIST) {
10363 IndexType::GiST
10364 } else if self.parse_keyword(Keyword::SPGIST) {
10365 IndexType::SPGiST
10366 } else if self.parse_keyword(Keyword::BRIN) {
10367 IndexType::BRIN
10368 } else if self.parse_keyword(Keyword::BLOOM) {
10369 IndexType::Bloom
10370 } else {
10371 IndexType::Custom(self.parse_identifier()?)
10372 })
10373 }
10374
10375 pub fn parse_optional_using_then_index_type(
10382 &mut self,
10383 ) -> Result<Option<IndexType>, ParserError> {
10384 if self.parse_keyword(Keyword::USING) {
10385 Ok(Some(self.parse_index_type()?))
10386 } else {
10387 Ok(None)
10388 }
10389 }
10390
10391 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10395 self.maybe_parse(|parser| parser.parse_identifier())
10396 }
10397
10398 #[must_use]
10399 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10401 if self.parse_keyword(Keyword::KEY) {
10402 KeyOrIndexDisplay::Key
10403 } else if self.parse_keyword(Keyword::INDEX) {
10404 KeyOrIndexDisplay::Index
10405 } else {
10406 KeyOrIndexDisplay::None
10407 }
10408 }
10409
10410 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10412 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10413 Ok(Some(IndexOption::Using(index_type)))
10414 } else if self.parse_keyword(Keyword::COMMENT) {
10415 let s = self.parse_literal_string()?;
10416 Ok(Some(IndexOption::Comment(s)))
10417 } else {
10418 Ok(None)
10419 }
10420 }
10421
10422 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10424 let mut options = Vec::new();
10425
10426 loop {
10427 match self.parse_optional_index_option()? {
10428 Some(index_option) => options.push(index_option),
10429 None => return Ok(options),
10430 }
10431 }
10432 }
10433
10434 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10436 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10437
10438 match &self.peek_token_ref().token {
10439 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10440 Ok(SqlOption::Ident(self.parse_identifier()?))
10441 }
10442 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10443 self.parse_option_partition()
10444 }
10445 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10446 self.parse_option_clustered()
10447 }
10448 _ => {
10449 let name = self.parse_identifier()?;
10450 self.expect_token(&Token::Eq)?;
10451 let value = self.parse_expr()?;
10452
10453 Ok(SqlOption::KeyValue { key: name, value })
10454 }
10455 }
10456 }
10457
10458 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10460 if self.parse_keywords(&[
10461 Keyword::CLUSTERED,
10462 Keyword::COLUMNSTORE,
10463 Keyword::INDEX,
10464 Keyword::ORDER,
10465 ]) {
10466 Ok(SqlOption::Clustered(
10467 TableOptionsClustered::ColumnstoreIndexOrder(
10468 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10469 ),
10470 ))
10471 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10472 Ok(SqlOption::Clustered(
10473 TableOptionsClustered::ColumnstoreIndex,
10474 ))
10475 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10476 self.expect_token(&Token::LParen)?;
10477
10478 let columns = self.parse_comma_separated(|p| {
10479 let name = p.parse_identifier()?;
10480 let asc = p.parse_asc_desc();
10481
10482 Ok(ClusteredIndex { name, asc })
10483 })?;
10484
10485 self.expect_token(&Token::RParen)?;
10486
10487 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10488 } else {
10489 Err(ParserError::ParserError(
10490 "invalid CLUSTERED sequence".to_string(),
10491 ))
10492 }
10493 }
10494
10495 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10497 self.expect_keyword_is(Keyword::PARTITION)?;
10498 self.expect_token(&Token::LParen)?;
10499 let column_name = self.parse_identifier()?;
10500
10501 self.expect_keyword_is(Keyword::RANGE)?;
10502 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10503 Some(PartitionRangeDirection::Left)
10504 } else if self.parse_keyword(Keyword::RIGHT) {
10505 Some(PartitionRangeDirection::Right)
10506 } else {
10507 None
10508 };
10509
10510 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10511 self.expect_token(&Token::LParen)?;
10512
10513 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10514
10515 self.expect_token(&Token::RParen)?;
10516 self.expect_token(&Token::RParen)?;
10517
10518 Ok(SqlOption::Partition {
10519 column_name,
10520 range_direction,
10521 for_values,
10522 })
10523 }
10524
10525 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10527 self.expect_token(&Token::LParen)?;
10528 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10529 self.expect_token(&Token::RParen)?;
10530 Ok(Partition::Partitions(partitions))
10531 }
10532
10533 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10535 self.expect_token(&Token::LParen)?;
10536 self.expect_keyword_is(Keyword::SELECT)?;
10537 let projection = self.parse_projection()?;
10538 let group_by = self.parse_optional_group_by()?;
10539 let order_by = self.parse_optional_order_by()?;
10540 self.expect_token(&Token::RParen)?;
10541 Ok(ProjectionSelect {
10542 projection,
10543 group_by,
10544 order_by,
10545 })
10546 }
10547 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10549 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10550 let name = self.parse_identifier()?;
10551 let query = self.parse_projection_select()?;
10552 Ok(AlterTableOperation::AddProjection {
10553 if_not_exists,
10554 name,
10555 select: query,
10556 })
10557 }
10558
10559 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10563 self.expect_keyword_is(Keyword::ALTER)?;
10564 self.expect_keyword_is(Keyword::SORTKEY)?;
10565 self.expect_token(&Token::LParen)?;
10566 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10567 self.expect_token(&Token::RParen)?;
10568 Ok(AlterTableOperation::AlterSortKey { columns })
10569 }
10570
10571 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10573 let operation = if self.parse_keyword(Keyword::ADD) {
10574 if let Some(constraint) = self.parse_optional_table_constraint()? {
10575 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10576 AlterTableOperation::AddConstraint {
10577 constraint,
10578 not_valid,
10579 }
10580 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10581 && self.parse_keyword(Keyword::PROJECTION)
10582 {
10583 return self.parse_alter_table_add_projection();
10584 } else {
10585 let if_not_exists =
10586 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10587 let mut new_partitions = vec![];
10588 loop {
10589 if self.parse_keyword(Keyword::PARTITION) {
10590 new_partitions.push(self.parse_partition()?);
10591 } else {
10592 break;
10593 }
10594 }
10595 if !new_partitions.is_empty() {
10596 AlterTableOperation::AddPartitions {
10597 if_not_exists,
10598 new_partitions,
10599 }
10600 } else {
10601 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10602
10603 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10604 {
10605 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10606 || if_not_exists
10607 } else {
10608 false
10609 };
10610
10611 let column_def = self.parse_column_def()?;
10612
10613 let column_position = self.parse_column_position()?;
10614
10615 AlterTableOperation::AddColumn {
10616 column_keyword,
10617 if_not_exists,
10618 column_def,
10619 column_position,
10620 }
10621 }
10622 }
10623 } else if self.parse_keyword(Keyword::RENAME) {
10624 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10625 let old_name = self.parse_identifier()?;
10626 self.expect_keyword_is(Keyword::TO)?;
10627 let new_name = self.parse_identifier()?;
10628 AlterTableOperation::RenameConstraint { old_name, new_name }
10629 } else if self.parse_keyword(Keyword::TO) {
10630 let table_name = self.parse_object_name(false)?;
10631 AlterTableOperation::RenameTable {
10632 table_name: RenameTableNameKind::To(table_name),
10633 }
10634 } else if self.parse_keyword(Keyword::AS) {
10635 let table_name = self.parse_object_name(false)?;
10636 AlterTableOperation::RenameTable {
10637 table_name: RenameTableNameKind::As(table_name),
10638 }
10639 } else {
10640 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10642 self.expect_keyword_is(Keyword::TO)?;
10643 let new_column_name = self.parse_identifier()?;
10644 AlterTableOperation::RenameColumn {
10645 old_column_name,
10646 new_column_name,
10647 }
10648 }
10649 } else if self.parse_keyword(Keyword::DISABLE) {
10650 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10651 AlterTableOperation::DisableRowLevelSecurity {}
10652 } else if self.parse_keyword(Keyword::RULE) {
10653 let name = self.parse_identifier()?;
10654 AlterTableOperation::DisableRule { name }
10655 } else if self.parse_keyword(Keyword::TRIGGER) {
10656 let name = self.parse_identifier()?;
10657 AlterTableOperation::DisableTrigger { name }
10658 } else {
10659 return self.expected_ref(
10660 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10661 self.peek_token_ref(),
10662 );
10663 }
10664 } else if self.parse_keyword(Keyword::ENABLE) {
10665 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10666 let name = self.parse_identifier()?;
10667 AlterTableOperation::EnableAlwaysRule { name }
10668 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10669 let name = self.parse_identifier()?;
10670 AlterTableOperation::EnableAlwaysTrigger { name }
10671 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10672 AlterTableOperation::EnableRowLevelSecurity {}
10673 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10674 let name = self.parse_identifier()?;
10675 AlterTableOperation::EnableReplicaRule { name }
10676 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10677 let name = self.parse_identifier()?;
10678 AlterTableOperation::EnableReplicaTrigger { name }
10679 } else if self.parse_keyword(Keyword::RULE) {
10680 let name = self.parse_identifier()?;
10681 AlterTableOperation::EnableRule { name }
10682 } else if self.parse_keyword(Keyword::TRIGGER) {
10683 let name = self.parse_identifier()?;
10684 AlterTableOperation::EnableTrigger { name }
10685 } else {
10686 return self.expected_ref(
10687 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10688 self.peek_token_ref(),
10689 );
10690 }
10691 } else if self.parse_keywords(&[
10692 Keyword::FORCE,
10693 Keyword::ROW,
10694 Keyword::LEVEL,
10695 Keyword::SECURITY,
10696 ]) {
10697 AlterTableOperation::ForceRowLevelSecurity
10698 } else if self.parse_keywords(&[
10699 Keyword::NO,
10700 Keyword::FORCE,
10701 Keyword::ROW,
10702 Keyword::LEVEL,
10703 Keyword::SECURITY,
10704 ]) {
10705 AlterTableOperation::NoForceRowLevelSecurity
10706 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10707 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10708 {
10709 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10710 let name = self.parse_identifier()?;
10711 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10712 Some(self.parse_identifier()?)
10713 } else {
10714 None
10715 };
10716 AlterTableOperation::ClearProjection {
10717 if_exists,
10718 name,
10719 partition,
10720 }
10721 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10722 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10723 {
10724 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10725 let name = self.parse_identifier()?;
10726 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10727 Some(self.parse_identifier()?)
10728 } else {
10729 None
10730 };
10731 AlterTableOperation::MaterializeProjection {
10732 if_exists,
10733 name,
10734 partition,
10735 }
10736 } else if self.parse_keyword(Keyword::DROP) {
10737 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10738 self.expect_token(&Token::LParen)?;
10739 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10740 self.expect_token(&Token::RParen)?;
10741 AlterTableOperation::DropPartitions {
10742 partitions,
10743 if_exists: true,
10744 }
10745 } else if self.parse_keyword(Keyword::PARTITION) {
10746 self.expect_token(&Token::LParen)?;
10747 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10748 self.expect_token(&Token::RParen)?;
10749 AlterTableOperation::DropPartitions {
10750 partitions,
10751 if_exists: false,
10752 }
10753 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10754 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10755 let name = self.parse_identifier()?;
10756 let drop_behavior = self.parse_optional_drop_behavior();
10757 AlterTableOperation::DropConstraint {
10758 if_exists,
10759 name,
10760 drop_behavior,
10761 }
10762 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10763 let drop_behavior = self.parse_optional_drop_behavior();
10764 AlterTableOperation::DropPrimaryKey { drop_behavior }
10765 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10766 let name = self.parse_identifier()?;
10767 let drop_behavior = self.parse_optional_drop_behavior();
10768 AlterTableOperation::DropForeignKey {
10769 name,
10770 drop_behavior,
10771 }
10772 } else if self.parse_keyword(Keyword::INDEX) {
10773 let name = self.parse_identifier()?;
10774 AlterTableOperation::DropIndex { name }
10775 } else if self.parse_keyword(Keyword::PROJECTION)
10776 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10777 {
10778 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10779 let name = self.parse_identifier()?;
10780 AlterTableOperation::DropProjection { if_exists, name }
10781 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10782 AlterTableOperation::DropClusteringKey
10783 } else {
10784 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10786 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10787 self.parse_comma_separated(Parser::parse_identifier)?
10788 } else {
10789 vec![self.parse_identifier()?]
10790 };
10791 let drop_behavior = self.parse_optional_drop_behavior();
10792 AlterTableOperation::DropColumn {
10793 has_column_keyword,
10794 column_names,
10795 if_exists,
10796 drop_behavior,
10797 }
10798 }
10799 } else if self.parse_keyword(Keyword::PARTITION) {
10800 self.expect_token(&Token::LParen)?;
10801 let before = self.parse_comma_separated(Parser::parse_expr)?;
10802 self.expect_token(&Token::RParen)?;
10803 self.expect_keyword_is(Keyword::RENAME)?;
10804 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10805 self.expect_token(&Token::LParen)?;
10806 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10807 self.expect_token(&Token::RParen)?;
10808 AlterTableOperation::RenamePartitions {
10809 old_partitions: before,
10810 new_partitions: renames,
10811 }
10812 } else if self.parse_keyword(Keyword::CHANGE) {
10813 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10815 let new_name = self.parse_identifier()?;
10816 let data_type = self.parse_data_type()?;
10817 let mut options = vec![];
10818 while let Some(option) = self.parse_optional_column_option()? {
10819 options.push(option);
10820 }
10821
10822 let column_position = self.parse_column_position()?;
10823
10824 AlterTableOperation::ChangeColumn {
10825 old_name,
10826 new_name,
10827 data_type,
10828 options,
10829 column_position,
10830 }
10831 } else if self.parse_keyword(Keyword::MODIFY) {
10832 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10834 let data_type = self.parse_data_type()?;
10835 let mut options = vec![];
10836 while let Some(option) = self.parse_optional_column_option()? {
10837 options.push(option);
10838 }
10839
10840 let column_position = self.parse_column_position()?;
10841
10842 AlterTableOperation::ModifyColumn {
10843 col_name,
10844 data_type,
10845 options,
10846 column_position,
10847 }
10848 } else if self.parse_keyword(Keyword::ALTER) {
10849 if self.peek_keyword(Keyword::SORTKEY) {
10850 self.prev_token();
10851 return self.parse_alter_sort_key();
10852 }
10853
10854 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10856 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10857
10858 let op: AlterColumnOperation = if self.parse_keywords(&[
10859 Keyword::SET,
10860 Keyword::NOT,
10861 Keyword::NULL,
10862 ]) {
10863 AlterColumnOperation::SetNotNull {}
10864 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10865 AlterColumnOperation::DropNotNull {}
10866 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10867 AlterColumnOperation::SetDefault {
10868 value: self.parse_expr()?,
10869 }
10870 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10871 AlterColumnOperation::DropDefault {}
10872 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10873 self.parse_set_data_type(true)?
10874 } else if self.parse_keyword(Keyword::TYPE) {
10875 self.parse_set_data_type(false)?
10876 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10877 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10878 Some(GeneratedAs::Always)
10879 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10880 Some(GeneratedAs::ByDefault)
10881 } else {
10882 None
10883 };
10884
10885 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10886
10887 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10888
10889 if self.peek_token_ref().token == Token::LParen {
10890 self.expect_token(&Token::LParen)?;
10891 sequence_options = Some(self.parse_create_sequence_options()?);
10892 self.expect_token(&Token::RParen)?;
10893 }
10894
10895 AlterColumnOperation::AddGenerated {
10896 generated_as,
10897 sequence_options,
10898 }
10899 } else {
10900 let message = if is_postgresql {
10901 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10902 } else {
10903 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10904 };
10905
10906 return self.expected_ref(message, self.peek_token_ref());
10907 };
10908 AlterTableOperation::AlterColumn { column_name, op }
10909 } else if self.parse_keyword(Keyword::SWAP) {
10910 self.expect_keyword_is(Keyword::WITH)?;
10911 let table_name = self.parse_object_name(false)?;
10912 AlterTableOperation::SwapWith { table_name }
10913 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10914 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10915 {
10916 let new_owner = self.parse_owner()?;
10917 AlterTableOperation::OwnerTo { new_owner }
10918 } else if dialect_of!(self is PostgreSqlDialect)
10919 && self.parse_keywords(&[Keyword::ATTACH, Keyword::PARTITION])
10920 {
10921 let partition_name = self.parse_object_name(false)?;
10922 let partition_bound = self.parse_partition_for_values()?;
10923 AlterTableOperation::AttachPartitionOf {
10924 partition_name,
10925 partition_bound,
10926 }
10927 } else if dialect_of!(self is PostgreSqlDialect)
10928 && self.parse_keywords(&[Keyword::DETACH, Keyword::PARTITION])
10929 {
10930 let partition_name = self.parse_object_name(false)?;
10931 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
10932 let finalize = self.parse_keyword(Keyword::FINALIZE);
10933 AlterTableOperation::DetachPartitionOf {
10934 partition_name,
10935 concurrently,
10936 finalize,
10937 }
10938 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10939 && self.parse_keyword(Keyword::ATTACH)
10940 {
10941 AlterTableOperation::AttachPartition {
10942 partition: self.parse_part_or_partition()?,
10943 }
10944 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10945 && self.parse_keyword(Keyword::DETACH)
10946 {
10947 AlterTableOperation::DetachPartition {
10948 partition: self.parse_part_or_partition()?,
10949 }
10950 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10951 && self.parse_keyword(Keyword::FREEZE)
10952 {
10953 let partition = self.parse_part_or_partition()?;
10954 let with_name = if self.parse_keyword(Keyword::WITH) {
10955 self.expect_keyword_is(Keyword::NAME)?;
10956 Some(self.parse_identifier()?)
10957 } else {
10958 None
10959 };
10960 AlterTableOperation::FreezePartition {
10961 partition,
10962 with_name,
10963 }
10964 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10965 && self.parse_keyword(Keyword::UNFREEZE)
10966 {
10967 let partition = self.parse_part_or_partition()?;
10968 let with_name = if self.parse_keyword(Keyword::WITH) {
10969 self.expect_keyword_is(Keyword::NAME)?;
10970 Some(self.parse_identifier()?)
10971 } else {
10972 None
10973 };
10974 AlterTableOperation::UnfreezePartition {
10975 partition,
10976 with_name,
10977 }
10978 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10979 self.expect_token(&Token::LParen)?;
10980 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10981 self.expect_token(&Token::RParen)?;
10982 AlterTableOperation::ClusterBy { exprs }
10983 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10984 AlterTableOperation::SuspendRecluster
10985 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10986 AlterTableOperation::ResumeRecluster
10987 } else if self.parse_keyword(Keyword::LOCK) {
10988 let equals = self.consume_token(&Token::Eq);
10989 let lock = match self.parse_one_of_keywords(&[
10990 Keyword::DEFAULT,
10991 Keyword::EXCLUSIVE,
10992 Keyword::NONE,
10993 Keyword::SHARED,
10994 ]) {
10995 Some(Keyword::DEFAULT) => AlterTableLock::Default,
10996 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10997 Some(Keyword::NONE) => AlterTableLock::None,
10998 Some(Keyword::SHARED) => AlterTableLock::Shared,
10999 _ => self.expected_ref(
11000 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
11001 self.peek_token_ref(),
11002 )?,
11003 };
11004 AlterTableOperation::Lock { equals, lock }
11005 } else if self.parse_keyword(Keyword::ALGORITHM) {
11006 let equals = self.consume_token(&Token::Eq);
11007 let algorithm = match self.parse_one_of_keywords(&[
11008 Keyword::DEFAULT,
11009 Keyword::INSTANT,
11010 Keyword::INPLACE,
11011 Keyword::COPY,
11012 ]) {
11013 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
11014 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
11015 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
11016 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
11017 _ => self.expected_ref(
11018 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
11019 self.peek_token_ref(),
11020 )?,
11021 };
11022 AlterTableOperation::Algorithm { equals, algorithm }
11023 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
11024 let equals = self.consume_token(&Token::Eq);
11025 let value = self.parse_number_value()?;
11026 AlterTableOperation::AutoIncrement { equals, value }
11027 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
11028 let identity = if self.parse_keyword(Keyword::NOTHING) {
11029 ReplicaIdentity::Nothing
11030 } else if self.parse_keyword(Keyword::FULL) {
11031 ReplicaIdentity::Full
11032 } else if self.parse_keyword(Keyword::DEFAULT) {
11033 ReplicaIdentity::Default
11034 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
11035 ReplicaIdentity::Index(self.parse_identifier()?)
11036 } else {
11037 return self.expected_ref(
11038 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
11039 self.peek_token_ref(),
11040 );
11041 };
11042
11043 AlterTableOperation::ReplicaIdentity { identity }
11044 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11045 let name = self.parse_identifier()?;
11046 AlterTableOperation::ValidateConstraint { name }
11047 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11048 let tablespace_name = self.parse_identifier()?;
11049 AlterTableOperation::SetTablespace { tablespace_name }
11050 } else {
11051 let mut options =
11052 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
11053 if !options.is_empty() {
11054 AlterTableOperation::SetTblProperties {
11055 table_properties: options,
11056 }
11057 } else {
11058 options = self.parse_options(Keyword::SET)?;
11059 if !options.is_empty() {
11060 AlterTableOperation::SetOptionsParens { options }
11061 } else {
11062 return self.expected_ref(
11063 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
11064 self.peek_token_ref(),
11065 );
11066 }
11067 }
11068 };
11069 Ok(operation)
11070 }
11071
11072 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
11073 let data_type = self.parse_data_type()?;
11074 let using = if self.dialect.supports_alter_column_type_using()
11075 && self.parse_keyword(Keyword::USING)
11076 {
11077 Some(self.parse_expr()?)
11078 } else {
11079 None
11080 };
11081 Ok(AlterColumnOperation::SetDataType {
11082 data_type,
11083 using,
11084 had_set,
11085 })
11086 }
11087
11088 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
11089 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11090 match keyword {
11091 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11092 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11093 unexpected_keyword => Err(ParserError::ParserError(
11095 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11096 )),
11097 }
11098 }
11099
11100 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11102 let object_type = self.expect_one_of_keywords(&[
11103 Keyword::VIEW,
11104 Keyword::TYPE,
11105 Keyword::COLLATION,
11106 Keyword::TABLE,
11107 Keyword::INDEX,
11108 Keyword::FUNCTION,
11109 Keyword::AGGREGATE,
11110 Keyword::ROLE,
11111 Keyword::POLICY,
11112 Keyword::CONNECTOR,
11113 Keyword::ICEBERG,
11114 Keyword::SCHEMA,
11115 Keyword::USER,
11116 Keyword::OPERATOR,
11117 Keyword::DOMAIN,
11118 Keyword::TRIGGER,
11119 Keyword::EXTENSION,
11120 Keyword::PROCEDURE,
11121 Keyword::DEFAULT,
11122 ])?;
11123 match object_type {
11124 Keyword::SCHEMA => {
11125 self.prev_token();
11126 self.prev_token();
11127 self.parse_alter_schema()
11128 }
11129 Keyword::VIEW => self.parse_alter_view(),
11130 Keyword::TYPE => self.parse_alter_type(),
11131 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11132 Keyword::TABLE => self.parse_alter_table(false),
11133 Keyword::ICEBERG => {
11134 self.expect_keyword(Keyword::TABLE)?;
11135 self.parse_alter_table(true)
11136 }
11137 Keyword::DEFAULT => self.parse_alter_default_privileges().map(Into::into),
11138 Keyword::INDEX => {
11139 let index_name = self.parse_object_name(false)?;
11140 let operation = if self.parse_keyword(Keyword::RENAME) {
11141 if self.parse_keyword(Keyword::TO) {
11142 let index_name = self.parse_object_name(false)?;
11143 AlterIndexOperation::RenameIndex { index_name }
11144 } else {
11145 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11146 }
11147 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11148 let tablespace_name = self.parse_identifier()?;
11149 AlterIndexOperation::SetTablespace { tablespace_name }
11150 } else {
11151 return self.expected_ref(
11152 "RENAME or SET TABLESPACE after ALTER INDEX",
11153 self.peek_token_ref(),
11154 );
11155 };
11156
11157 Ok(Statement::AlterIndex {
11158 name: index_name,
11159 operation,
11160 })
11161 }
11162 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11163 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11164 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11165 Keyword::OPERATOR => {
11166 if self.parse_keyword(Keyword::FAMILY) {
11167 self.parse_alter_operator_family().map(Into::into)
11168 } else if self.parse_keyword(Keyword::CLASS) {
11169 self.parse_alter_operator_class().map(Into::into)
11170 } else {
11171 self.parse_alter_operator().map(Into::into)
11172 }
11173 }
11174 Keyword::ROLE => self.parse_alter_role(),
11175 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11176 Keyword::CONNECTOR => self.parse_alter_connector(),
11177 Keyword::USER => self.parse_alter_user().map(Into::into),
11178 Keyword::DOMAIN => self.parse_alter_domain(),
11179 Keyword::TRIGGER => self.parse_alter_trigger(),
11180 Keyword::EXTENSION => self.parse_alter_extension(),
11181 unexpected_keyword => Err(ParserError::ParserError(
11183 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE, DEFAULT}}, got {unexpected_keyword:?}"),
11184 )),
11185 }
11186 }
11187
11188 fn parse_alter_aggregate_signature(
11189 &mut self,
11190 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11191 let name = self.parse_object_name(false)?;
11192 self.expect_token(&Token::LParen)?;
11193
11194 if self.consume_token(&Token::Mul) {
11195 self.expect_token(&Token::RParen)?;
11196 return Ok((
11197 FunctionDesc {
11198 name,
11199 args: Some(vec![]),
11200 },
11201 true,
11202 None,
11203 ));
11204 }
11205
11206 let args =
11207 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11208 vec![]
11209 } else {
11210 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11211 };
11212
11213 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11214 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11215 } else {
11216 None
11217 };
11218
11219 self.expect_token(&Token::RParen)?;
11220 Ok((
11221 FunctionDesc {
11222 name,
11223 args: Some(args),
11224 },
11225 false,
11226 aggregate_order_by,
11227 ))
11228 }
11229
11230 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11231 let action = if self.parse_keywords(&[
11232 Keyword::CALLED,
11233 Keyword::ON,
11234 Keyword::NULL,
11235 Keyword::INPUT,
11236 ]) {
11237 Some(AlterFunctionAction::CalledOnNull(
11238 FunctionCalledOnNull::CalledOnNullInput,
11239 ))
11240 } else if self.parse_keywords(&[
11241 Keyword::RETURNS,
11242 Keyword::NULL,
11243 Keyword::ON,
11244 Keyword::NULL,
11245 Keyword::INPUT,
11246 ]) {
11247 Some(AlterFunctionAction::CalledOnNull(
11248 FunctionCalledOnNull::ReturnsNullOnNullInput,
11249 ))
11250 } else if self.parse_keyword(Keyword::STRICT) {
11251 Some(AlterFunctionAction::CalledOnNull(
11252 FunctionCalledOnNull::Strict,
11253 ))
11254 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11255 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11256 } else if self.parse_keyword(Keyword::STABLE) {
11257 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11258 } else if self.parse_keyword(Keyword::VOLATILE) {
11259 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11260 } else if self.parse_keyword(Keyword::NOT) {
11261 self.expect_keyword(Keyword::LEAKPROOF)?;
11262 Some(AlterFunctionAction::Leakproof(false))
11263 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11264 Some(AlterFunctionAction::Leakproof(true))
11265 } else if self.parse_keyword(Keyword::EXTERNAL) {
11266 self.expect_keyword(Keyword::SECURITY)?;
11267 let security = if self.parse_keyword(Keyword::DEFINER) {
11268 FunctionSecurity::Definer
11269 } else if self.parse_keyword(Keyword::INVOKER) {
11270 FunctionSecurity::Invoker
11271 } else {
11272 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11273 };
11274 Some(AlterFunctionAction::Security {
11275 external: true,
11276 security,
11277 })
11278 } else if self.parse_keyword(Keyword::SECURITY) {
11279 let security = if self.parse_keyword(Keyword::DEFINER) {
11280 FunctionSecurity::Definer
11281 } else if self.parse_keyword(Keyword::INVOKER) {
11282 FunctionSecurity::Invoker
11283 } else {
11284 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11285 };
11286 Some(AlterFunctionAction::Security {
11287 external: false,
11288 security,
11289 })
11290 } else if self.parse_keyword(Keyword::PARALLEL) {
11291 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11292 FunctionParallel::Unsafe
11293 } else if self.parse_keyword(Keyword::RESTRICTED) {
11294 FunctionParallel::Restricted
11295 } else if self.parse_keyword(Keyword::SAFE) {
11296 FunctionParallel::Safe
11297 } else {
11298 return self
11299 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11300 };
11301 Some(AlterFunctionAction::Parallel(parallel))
11302 } else if self.parse_keyword(Keyword::COST) {
11303 Some(AlterFunctionAction::Cost(self.parse_number()?))
11304 } else if self.parse_keyword(Keyword::ROWS) {
11305 Some(AlterFunctionAction::Rows(self.parse_number()?))
11306 } else if self.parse_keyword(Keyword::SUPPORT) {
11307 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11308 } else if self.parse_keyword(Keyword::SET) {
11309 let name = self.parse_object_name(false)?;
11310 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11311 FunctionSetValue::FromCurrent
11312 } else {
11313 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11314 return self.expected_ref("= or TO", self.peek_token_ref());
11315 }
11316 if self.parse_keyword(Keyword::DEFAULT) {
11317 FunctionSetValue::Default
11318 } else {
11319 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11320 }
11321 };
11322 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11323 name,
11324 value,
11325 }))
11326 } else if self.parse_keyword(Keyword::RESET) {
11327 let reset_config = if self.parse_keyword(Keyword::ALL) {
11328 ResetConfig::ALL
11329 } else {
11330 ResetConfig::ConfigName(self.parse_object_name(false)?)
11331 };
11332 Some(AlterFunctionAction::Reset(reset_config))
11333 } else {
11334 None
11335 };
11336
11337 Ok(action)
11338 }
11339
11340 fn parse_alter_function_actions(
11341 &mut self,
11342 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11343 let mut actions = vec![];
11344 while let Some(action) = self.parse_alter_function_action()? {
11345 actions.push(action);
11346 }
11347 if actions.is_empty() {
11348 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11349 }
11350 let restrict = self.parse_keyword(Keyword::RESTRICT);
11351 Ok((actions, restrict))
11352 }
11353
11354 pub fn parse_alter_function(
11356 &mut self,
11357 kind: AlterFunctionKind,
11358 ) -> Result<Statement, ParserError> {
11359 let (function, aggregate_star, aggregate_order_by) = match kind {
11360 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11361 (self.parse_function_desc()?, false, None)
11362 }
11363 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11364 };
11365
11366 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11367 let new_name = self.parse_identifier()?;
11368 AlterFunctionOperation::RenameTo { new_name }
11369 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11370 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11371 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11372 AlterFunctionOperation::SetSchema {
11373 schema_name: self.parse_object_name(false)?,
11374 }
11375 } else if matches!(
11376 kind,
11377 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11378 ) && self.parse_keyword(Keyword::NO)
11379 {
11380 if !self.parse_keyword(Keyword::DEPENDS) {
11381 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11382 }
11383 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11384 AlterFunctionOperation::DependsOnExtension {
11385 no: true,
11386 extension_name: self.parse_object_name(false)?,
11387 }
11388 } else if matches!(
11389 kind,
11390 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11391 ) && self.parse_keyword(Keyword::DEPENDS)
11392 {
11393 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11394 AlterFunctionOperation::DependsOnExtension {
11395 no: false,
11396 extension_name: self.parse_object_name(false)?,
11397 }
11398 } else if matches!(
11399 kind,
11400 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11401 ) {
11402 let (actions, restrict) = self.parse_alter_function_actions()?;
11403 AlterFunctionOperation::Actions { actions, restrict }
11404 } else {
11405 return self.expected_ref(
11406 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11407 self.peek_token_ref(),
11408 );
11409 };
11410
11411 Ok(Statement::AlterFunction(AlterFunction {
11412 kind,
11413 function,
11414 aggregate_order_by,
11415 aggregate_star,
11416 operation,
11417 }))
11418 }
11419
11420 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11422 let name = self.parse_object_name(false)?;
11423
11424 let operation = if self.parse_keyword(Keyword::ADD) {
11425 if let Some(constraint) = self.parse_optional_table_constraint()? {
11426 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11427 AlterDomainOperation::AddConstraint {
11428 constraint,
11429 not_valid,
11430 }
11431 } else {
11432 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11433 }
11434 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11435 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11436 let name = self.parse_identifier()?;
11437 let drop_behavior = self.parse_optional_drop_behavior();
11438 AlterDomainOperation::DropConstraint {
11439 if_exists,
11440 name,
11441 drop_behavior,
11442 }
11443 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11444 AlterDomainOperation::DropDefault
11445 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11446 let old_name = self.parse_identifier()?;
11447 self.expect_keyword_is(Keyword::TO)?;
11448 let new_name = self.parse_identifier()?;
11449 AlterDomainOperation::RenameConstraint { old_name, new_name }
11450 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11451 let new_name = self.parse_identifier()?;
11452 AlterDomainOperation::RenameTo { new_name }
11453 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11454 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11455 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11456 AlterDomainOperation::SetSchema {
11457 schema_name: self.parse_object_name(false)?,
11458 }
11459 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11460 AlterDomainOperation::SetDefault {
11461 default: self.parse_expr()?,
11462 }
11463 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11464 let name = self.parse_identifier()?;
11465 AlterDomainOperation::ValidateConstraint { name }
11466 } else {
11467 return self.expected_ref(
11468 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11469 self.peek_token_ref(),
11470 );
11471 };
11472
11473 Ok(AlterDomain { name, operation }.into())
11474 }
11475
11476 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11478 let name = self.parse_identifier()?;
11479 self.expect_keyword_is(Keyword::ON)?;
11480 let table_name = self.parse_object_name(false)?;
11481
11482 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11483 let new_name = self.parse_identifier()?;
11484 AlterTriggerOperation::RenameTo { new_name }
11485 } else {
11486 return self.expected_ref(
11487 "RENAME TO after ALTER TRIGGER ... ON ...",
11488 self.peek_token_ref(),
11489 );
11490 };
11491
11492 Ok(AlterTrigger {
11493 name,
11494 table_name,
11495 operation,
11496 }
11497 .into())
11498 }
11499
11500 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11502 let name = self.parse_identifier()?;
11503
11504 let operation = if self.parse_keyword(Keyword::UPDATE) {
11505 let version = if self.parse_keyword(Keyword::TO) {
11506 Some(self.parse_identifier()?)
11507 } else {
11508 None
11509 };
11510 AlterExtensionOperation::UpdateTo { version }
11511 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11512 AlterExtensionOperation::SetSchema {
11513 schema_name: self.parse_object_name(false)?,
11514 }
11515 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11516 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11517 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11518 let new_name = self.parse_identifier()?;
11519 AlterExtensionOperation::RenameTo { new_name }
11520 } else {
11521 return self.expected_ref(
11522 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11523 self.peek_token_ref(),
11524 );
11525 };
11526
11527 Ok(AlterExtension { name, operation }.into())
11528 }
11529
11530 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11532 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11533 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11535 let on_cluster = self.parse_optional_on_cluster()?;
11536 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11537
11538 let mut location = None;
11539 if self.parse_keyword(Keyword::LOCATION) {
11540 location = Some(HiveSetLocation {
11541 has_set: false,
11542 location: self.parse_identifier()?,
11543 });
11544 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11545 location = Some(HiveSetLocation {
11546 has_set: true,
11547 location: self.parse_identifier()?,
11548 });
11549 }
11550
11551 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11552 self.peek_token_ref().clone()
11553 } else {
11554 self.get_current_token().clone()
11555 };
11556
11557 Ok(AlterTable {
11558 name: table_name,
11559 if_exists,
11560 only,
11561 operations,
11562 location,
11563 on_cluster,
11564 table_type: if iceberg {
11565 Some(AlterTableType::Iceberg)
11566 } else {
11567 None
11568 },
11569 end_token: AttachedToken(end_token),
11570 }
11571 .into())
11572 }
11573
11574 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11576 let name = self.parse_object_name(false)?;
11577 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11578
11579 let with_options = self.parse_options(Keyword::WITH)?;
11580
11581 self.expect_keyword_is(Keyword::AS)?;
11582 let query = self.parse_query()?;
11583
11584 Ok(Statement::AlterView {
11585 name,
11586 columns,
11587 query,
11588 with_options,
11589 })
11590 }
11591
11592 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11594 let name = self.parse_object_name(false)?;
11595
11596 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11597 let new_name = self.parse_identifier()?;
11598 AlterTypeOperation::Rename(AlterTypeRename { new_name })
11599 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11600 let existing_enum_value = self.parse_identifier()?;
11601 self.expect_keyword(Keyword::TO)?;
11602 let new_enum_value = self.parse_identifier()?;
11603 AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11604 from: existing_enum_value,
11605 to: new_enum_value,
11606 })
11607 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::ATTRIBUTE]) {
11608 let old_name = self.parse_identifier()?;
11609 self.expect_keyword(Keyword::TO)?;
11610 let new_name = self.parse_identifier()?;
11611 let drop_behavior = self.parse_optional_drop_behavior();
11612 AlterTypeOperation::RenameAttribute {
11613 old_name,
11614 new_name,
11615 drop_behavior,
11616 }
11617 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11618 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11619 let new_enum_value = self.parse_identifier()?;
11620 let position = if self.parse_keyword(Keyword::BEFORE) {
11621 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11622 } else if self.parse_keyword(Keyword::AFTER) {
11623 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11624 } else {
11625 None
11626 };
11627 AlterTypeOperation::AddValue(AlterTypeAddValue {
11628 if_not_exists,
11629 value: new_enum_value,
11630 position,
11631 })
11632 } else if self.parse_keywords(&[Keyword::ADD, Keyword::ATTRIBUTE]) {
11633 let attr_name = self.parse_identifier()?;
11634 let data_type = self.parse_data_type()?;
11635 let collation = if self.parse_keyword(Keyword::COLLATE) {
11636 Some(self.parse_object_name(false)?)
11637 } else {
11638 None
11639 };
11640 let drop_behavior = self.parse_optional_drop_behavior();
11641 AlterTypeOperation::AddAttribute {
11642 name: attr_name,
11643 data_type,
11644 collation,
11645 drop_behavior,
11646 }
11647 } else if self.parse_keywords(&[Keyword::DROP, Keyword::ATTRIBUTE]) {
11648 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11649 let attr_name = self.parse_identifier()?;
11650 let drop_behavior = self.parse_optional_drop_behavior();
11651 AlterTypeOperation::DropAttribute {
11652 if_exists,
11653 name: attr_name,
11654 drop_behavior,
11655 }
11656 } else if self.parse_keywords(&[Keyword::ALTER, Keyword::ATTRIBUTE]) {
11657 let attr_name = self.parse_identifier()?;
11658 let _ = self.parse_keywords(&[Keyword::SET, Keyword::DATA]);
11662 self.expect_keyword(Keyword::TYPE)?;
11663 let data_type = self.parse_data_type()?;
11664 let collation = if self.parse_keyword(Keyword::COLLATE) {
11665 Some(self.parse_object_name(false)?)
11666 } else {
11667 None
11668 };
11669 let drop_behavior = self.parse_optional_drop_behavior();
11670 AlterTypeOperation::AlterAttribute {
11671 name: attr_name,
11672 data_type,
11673 collation,
11674 drop_behavior,
11675 }
11676 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11677 let new_owner = self.parse_owner()?;
11678 AlterTypeOperation::OwnerTo { new_owner }
11679 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11680 let new_schema = self.parse_object_name(false)?;
11681 AlterTypeOperation::SetSchema { new_schema }
11682 } else {
11683 return self.expected_ref(
11684 "{RENAME TO | RENAME VALUE | RENAME ATTRIBUTE | ADD VALUE | \
11685 ADD ATTRIBUTE | DROP ATTRIBUTE | ALTER ATTRIBUTE | OWNER TO | SET SCHEMA}",
11686 self.peek_token_ref(),
11687 );
11688 };
11689
11690 Ok(Statement::AlterType(AlterType { name, operation }))
11691 }
11692
11693 pub fn parse_alter_default_privileges(
11700 &mut self,
11701 ) -> Result<AlterDefaultPrivileges, ParserError> {
11702 self.expect_keyword(Keyword::PRIVILEGES)?;
11703
11704 let for_roles = if self.parse_keyword(Keyword::FOR) {
11705 self.expect_one_of_keywords(&[Keyword::ROLE, Keyword::USER])?;
11707 self.parse_comma_separated(Parser::parse_identifier)?
11708 } else {
11709 Vec::new()
11710 };
11711
11712 let in_schemas = if self.parse_keywords(&[Keyword::IN, Keyword::SCHEMA]) {
11713 self.parse_comma_separated(Parser::parse_identifier)?
11714 } else {
11715 Vec::new()
11716 };
11717
11718 let action = self.parse_alter_default_privileges_action()?;
11719
11720 Ok(AlterDefaultPrivileges {
11721 for_roles,
11722 in_schemas,
11723 action,
11724 })
11725 }
11726
11727 fn parse_alter_default_privileges_action(
11728 &mut self,
11729 ) -> Result<AlterDefaultPrivilegesAction, ParserError> {
11730 let kw = self.expect_one_of_keywords(&[Keyword::GRANT, Keyword::REVOKE])?;
11731 match kw {
11732 Keyword::GRANT => {
11733 let privileges = self.parse_alter_default_privileges_privileges()?;
11734 self.expect_keyword(Keyword::ON)?;
11735 let object_type = self.parse_alter_default_privileges_object_type()?;
11736 self.expect_keyword(Keyword::TO)?;
11737 let grantees = self.parse_grantees()?;
11738 let with_grant_option =
11739 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
11740 Ok(AlterDefaultPrivilegesAction::Grant {
11741 privileges,
11742 object_type,
11743 grantees,
11744 with_grant_option,
11745 })
11746 }
11747 Keyword::REVOKE => {
11748 let grant_option_for =
11749 self.parse_keywords(&[Keyword::GRANT, Keyword::OPTION, Keyword::FOR]);
11750 let privileges = self.parse_alter_default_privileges_privileges()?;
11751 self.expect_keyword(Keyword::ON)?;
11752 let object_type = self.parse_alter_default_privileges_object_type()?;
11753 self.expect_keyword(Keyword::FROM)?;
11754 let grantees = self.parse_grantees()?;
11755 let cascade = self.parse_cascade_option();
11756 Ok(AlterDefaultPrivilegesAction::Revoke {
11757 grant_option_for,
11758 privileges,
11759 object_type,
11760 grantees,
11761 cascade,
11762 })
11763 }
11764 unexpected_keyword => Err(ParserError::ParserError(format!(
11765 "Internal parser error: expected GRANT or REVOKE, got {unexpected_keyword:?}"
11766 ))),
11767 }
11768 }
11769
11770 fn parse_alter_default_privileges_privileges(&mut self) -> Result<Privileges, ParserError> {
11771 if self.parse_keyword(Keyword::ALL) {
11772 Ok(Privileges::All {
11773 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
11774 })
11775 } else {
11776 Ok(Privileges::Actions(self.parse_actions_list()?))
11777 }
11778 }
11779
11780 fn parse_alter_default_privileges_object_type(
11781 &mut self,
11782 ) -> Result<AlterDefaultPrivilegesObjectType, ParserError> {
11783 let kw = self.expect_one_of_keywords(&[
11784 Keyword::TABLES,
11785 Keyword::SEQUENCES,
11786 Keyword::FUNCTIONS,
11787 Keyword::ROUTINES,
11788 Keyword::TYPES,
11789 Keyword::SCHEMAS,
11790 ])?;
11791 match kw {
11792 Keyword::TABLES => Ok(AlterDefaultPrivilegesObjectType::Tables),
11793 Keyword::SEQUENCES => Ok(AlterDefaultPrivilegesObjectType::Sequences),
11794 Keyword::FUNCTIONS => Ok(AlterDefaultPrivilegesObjectType::Functions),
11795 Keyword::ROUTINES => Ok(AlterDefaultPrivilegesObjectType::Routines),
11796 Keyword::TYPES => Ok(AlterDefaultPrivilegesObjectType::Types),
11797 Keyword::SCHEMAS => Ok(AlterDefaultPrivilegesObjectType::Schemas),
11798 unexpected_keyword => Err(ParserError::ParserError(format!(
11799 "Internal parser error: expected one of {{TABLES, SEQUENCES, FUNCTIONS, ROUTINES, TYPES, SCHEMAS}}, got {unexpected_keyword:?}"
11800 ))),
11801 }
11802 }
11803
11804 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11808 let name = self.parse_object_name(false)?;
11809 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11810 AlterCollationOperation::RenameTo {
11811 new_name: self.parse_identifier()?,
11812 }
11813 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11814 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11815 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11816 AlterCollationOperation::SetSchema {
11817 schema_name: self.parse_object_name(false)?,
11818 }
11819 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11820 AlterCollationOperation::RefreshVersion
11821 } else {
11822 return self.expected_ref(
11823 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11824 self.peek_token_ref(),
11825 );
11826 };
11827
11828 Ok(AlterCollation { name, operation })
11829 }
11830
11831 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11835 let name = self.parse_operator_name()?;
11836
11837 self.expect_token(&Token::LParen)?;
11839
11840 let left_type = if self.parse_keyword(Keyword::NONE) {
11841 None
11842 } else {
11843 Some(self.parse_data_type()?)
11844 };
11845
11846 self.expect_token(&Token::Comma)?;
11847 let right_type = self.parse_data_type()?;
11848 self.expect_token(&Token::RParen)?;
11849
11850 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11852 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11853 Owner::CurrentRole
11854 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11855 Owner::CurrentUser
11856 } else if self.parse_keyword(Keyword::SESSION_USER) {
11857 Owner::SessionUser
11858 } else {
11859 Owner::Ident(self.parse_identifier()?)
11860 };
11861 AlterOperatorOperation::OwnerTo(owner)
11862 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11863 let schema_name = self.parse_object_name(false)?;
11864 AlterOperatorOperation::SetSchema { schema_name }
11865 } else if self.parse_keyword(Keyword::SET) {
11866 self.expect_token(&Token::LParen)?;
11867
11868 let mut options = Vec::new();
11869 loop {
11870 let keyword = self.expect_one_of_keywords(&[
11871 Keyword::RESTRICT,
11872 Keyword::JOIN,
11873 Keyword::COMMUTATOR,
11874 Keyword::NEGATOR,
11875 Keyword::HASHES,
11876 Keyword::MERGES,
11877 ])?;
11878
11879 match keyword {
11880 Keyword::RESTRICT => {
11881 self.expect_token(&Token::Eq)?;
11882 let proc_name = if self.parse_keyword(Keyword::NONE) {
11883 None
11884 } else {
11885 Some(self.parse_object_name(false)?)
11886 };
11887 options.push(OperatorOption::Restrict(proc_name));
11888 }
11889 Keyword::JOIN => {
11890 self.expect_token(&Token::Eq)?;
11891 let proc_name = if self.parse_keyword(Keyword::NONE) {
11892 None
11893 } else {
11894 Some(self.parse_object_name(false)?)
11895 };
11896 options.push(OperatorOption::Join(proc_name));
11897 }
11898 Keyword::COMMUTATOR => {
11899 self.expect_token(&Token::Eq)?;
11900 let op_name = self.parse_operator_name()?;
11901 options.push(OperatorOption::Commutator(op_name));
11902 }
11903 Keyword::NEGATOR => {
11904 self.expect_token(&Token::Eq)?;
11905 let op_name = self.parse_operator_name()?;
11906 options.push(OperatorOption::Negator(op_name));
11907 }
11908 Keyword::HASHES => {
11909 options.push(OperatorOption::Hashes);
11910 }
11911 Keyword::MERGES => {
11912 options.push(OperatorOption::Merges);
11913 }
11914 unexpected_keyword => return Err(ParserError::ParserError(
11915 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11916 )),
11917 }
11918
11919 if !self.consume_token(&Token::Comma) {
11920 break;
11921 }
11922 }
11923
11924 self.expect_token(&Token::RParen)?;
11925 AlterOperatorOperation::Set { options }
11926 } else {
11927 return self.expected_ref(
11928 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11929 self.peek_token_ref(),
11930 );
11931 };
11932
11933 Ok(AlterOperator {
11934 name,
11935 left_type,
11936 right_type,
11937 operation,
11938 })
11939 }
11940
11941 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11943 let strategy_number = self.parse_literal_uint()?;
11944 let operator_name = self.parse_operator_name()?;
11945
11946 self.expect_token(&Token::LParen)?;
11948 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11949 self.expect_token(&Token::RParen)?;
11950
11951 let purpose = if self.parse_keyword(Keyword::FOR) {
11953 if self.parse_keyword(Keyword::SEARCH) {
11954 Some(OperatorPurpose::ForSearch)
11955 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11956 let sort_family = self.parse_object_name(false)?;
11957 Some(OperatorPurpose::ForOrderBy { sort_family })
11958 } else {
11959 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11960 }
11961 } else {
11962 None
11963 };
11964
11965 Ok(OperatorFamilyItem::Operator {
11966 strategy_number,
11967 operator_name,
11968 op_types,
11969 purpose,
11970 })
11971 }
11972
11973 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11975 let support_number = self.parse_literal_uint()?;
11976
11977 let op_types =
11979 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11980 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11981 self.expect_token(&Token::RParen)?;
11982 Some(types)
11983 } else if self.consume_token(&Token::LParen) {
11984 self.expect_token(&Token::RParen)?;
11985 Some(vec![])
11986 } else {
11987 None
11988 };
11989
11990 let function_name = self.parse_object_name(false)?;
11991
11992 let argument_types = if self.consume_token(&Token::LParen) {
11994 if self.peek_token_ref().token == Token::RParen {
11995 self.expect_token(&Token::RParen)?;
11996 vec![]
11997 } else {
11998 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11999 self.expect_token(&Token::RParen)?;
12000 types
12001 }
12002 } else {
12003 vec![]
12004 };
12005
12006 Ok(OperatorFamilyItem::Function {
12007 support_number,
12008 op_types,
12009 function_name,
12010 argument_types,
12011 })
12012 }
12013
12014 fn parse_operator_family_drop_operator(
12016 &mut self,
12017 ) -> Result<OperatorFamilyDropItem, ParserError> {
12018 let strategy_number = self.parse_literal_uint()?;
12019
12020 self.expect_token(&Token::LParen)?;
12022 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12023 self.expect_token(&Token::RParen)?;
12024
12025 Ok(OperatorFamilyDropItem::Operator {
12026 strategy_number,
12027 op_types,
12028 })
12029 }
12030
12031 fn parse_operator_family_drop_function(
12033 &mut self,
12034 ) -> Result<OperatorFamilyDropItem, ParserError> {
12035 let support_number = self.parse_literal_uint()?;
12036
12037 self.expect_token(&Token::LParen)?;
12039 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12040 self.expect_token(&Token::RParen)?;
12041
12042 Ok(OperatorFamilyDropItem::Function {
12043 support_number,
12044 op_types,
12045 })
12046 }
12047
12048 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
12050 if self.parse_keyword(Keyword::OPERATOR) {
12051 self.parse_operator_family_add_operator()
12052 } else if self.parse_keyword(Keyword::FUNCTION) {
12053 self.parse_operator_family_add_function()
12054 } else {
12055 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12056 }
12057 }
12058
12059 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
12061 if self.parse_keyword(Keyword::OPERATOR) {
12062 self.parse_operator_family_drop_operator()
12063 } else if self.parse_keyword(Keyword::FUNCTION) {
12064 self.parse_operator_family_drop_function()
12065 } else {
12066 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12067 }
12068 }
12069
12070 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
12073 let name = self.parse_object_name(false)?;
12074 self.expect_keyword(Keyword::USING)?;
12075 let using = self.parse_identifier()?;
12076
12077 let operation = if self.parse_keyword(Keyword::ADD) {
12078 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
12079 AlterOperatorFamilyOperation::Add { items }
12080 } else if self.parse_keyword(Keyword::DROP) {
12081 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
12082 AlterOperatorFamilyOperation::Drop { items }
12083 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12084 let new_name = self.parse_object_name(false)?;
12085 AlterOperatorFamilyOperation::RenameTo { new_name }
12086 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12087 let owner = self.parse_owner()?;
12088 AlterOperatorFamilyOperation::OwnerTo(owner)
12089 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12090 let schema_name = self.parse_object_name(false)?;
12091 AlterOperatorFamilyOperation::SetSchema { schema_name }
12092 } else {
12093 return self.expected_ref(
12094 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
12095 self.peek_token_ref(),
12096 );
12097 };
12098
12099 Ok(AlterOperatorFamily {
12100 name,
12101 using,
12102 operation,
12103 })
12104 }
12105
12106 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
12110 let name = self.parse_object_name(false)?;
12111 self.expect_keyword(Keyword::USING)?;
12112 let using = self.parse_identifier()?;
12113
12114 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12115 let new_name = self.parse_object_name(false)?;
12116 AlterOperatorClassOperation::RenameTo { new_name }
12117 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12118 let owner = self.parse_owner()?;
12119 AlterOperatorClassOperation::OwnerTo(owner)
12120 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12121 let schema_name = self.parse_object_name(false)?;
12122 AlterOperatorClassOperation::SetSchema { schema_name }
12123 } else {
12124 return self.expected_ref(
12125 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
12126 self.peek_token_ref(),
12127 );
12128 };
12129
12130 Ok(AlterOperatorClass {
12131 name,
12132 using,
12133 operation,
12134 })
12135 }
12136
12137 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
12141 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
12142 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
12143 let name = self.parse_object_name(false)?;
12144 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
12145 self.prev_token();
12146 let options = self.parse_options(Keyword::OPTIONS)?;
12147 AlterSchemaOperation::SetOptionsParens { options }
12148 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
12149 let collate = self.parse_expr()?;
12150 AlterSchemaOperation::SetDefaultCollate { collate }
12151 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
12152 let replica = self.parse_identifier()?;
12153 let options = if self.peek_keyword(Keyword::OPTIONS) {
12154 Some(self.parse_options(Keyword::OPTIONS)?)
12155 } else {
12156 None
12157 };
12158 AlterSchemaOperation::AddReplica { replica, options }
12159 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
12160 let replica = self.parse_identifier()?;
12161 AlterSchemaOperation::DropReplica { replica }
12162 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12163 let new_name = self.parse_object_name(false)?;
12164 AlterSchemaOperation::Rename { name: new_name }
12165 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12166 let owner = self.parse_owner()?;
12167 AlterSchemaOperation::OwnerTo { owner }
12168 } else {
12169 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
12170 };
12171 Ok(Statement::AlterSchema(AlterSchema {
12172 name,
12173 if_exists,
12174 operations: vec![operation],
12175 }))
12176 }
12177
12178 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
12181 let object_name = self.parse_object_name(false)?;
12182 if self.peek_token_ref().token == Token::LParen {
12183 match self.parse_function(object_name)? {
12184 Expr::Function(f) => Ok(Statement::Call(f)),
12185 other => parser_err!(
12186 format!("Expected a simple procedure call but found: {other}"),
12187 self.peek_token_ref().span.start
12188 ),
12189 }
12190 } else {
12191 Ok(Statement::Call(Function {
12192 name: object_name,
12193 uses_odbc_syntax: false,
12194 parameters: FunctionArguments::None,
12195 args: FunctionArguments::None,
12196 over: None,
12197 filter: None,
12198 null_treatment: None,
12199 within_group: vec![],
12200 }))
12201 }
12202 }
12203
12204 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
12206 let source;
12207 if self.consume_token(&Token::LParen) {
12208 source = CopySource::Query(self.parse_query()?);
12209 self.expect_token(&Token::RParen)?;
12210 } else {
12211 let table_name = self.parse_object_name(false)?;
12212 let columns = self.parse_parenthesized_column_list(Optional, false)?;
12213 source = CopySource::Table {
12214 table_name,
12215 columns,
12216 };
12217 }
12218 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
12219 Some(Keyword::FROM) => false,
12220 Some(Keyword::TO) => true,
12221 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
12222 };
12223 if !to {
12224 if let CopySource::Query(_) = source {
12227 return Err(ParserError::ParserError(
12228 "COPY ... FROM does not support query as a source".to_string(),
12229 ));
12230 }
12231 }
12232 let target = if self.parse_keyword(Keyword::STDIN) {
12233 CopyTarget::Stdin
12234 } else if self.parse_keyword(Keyword::STDOUT) {
12235 CopyTarget::Stdout
12236 } else if self.parse_keyword(Keyword::PROGRAM) {
12237 CopyTarget::Program {
12238 command: self.parse_literal_string()?,
12239 }
12240 } else {
12241 CopyTarget::File {
12242 filename: self.parse_literal_string()?,
12243 }
12244 };
12245 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
12247 if self.consume_token(&Token::LParen) {
12248 options = self.parse_comma_separated(Parser::parse_copy_option)?;
12249 self.expect_token(&Token::RParen)?;
12250 }
12251 let mut legacy_options = vec![];
12252 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
12253 legacy_options.push(opt);
12254 }
12255 let values =
12256 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
12257 self.expect_token(&Token::SemiColon)?;
12258 self.parse_tsv()
12259 } else {
12260 vec![]
12261 };
12262 Ok(Statement::Copy {
12263 source,
12264 to,
12265 target,
12266 options,
12267 legacy_options,
12268 values,
12269 })
12270 }
12271
12272 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12274 self.expect_keyword(Keyword::OPEN)?;
12275 Ok(Statement::Open(OpenStatement {
12276 cursor_name: self.parse_identifier()?,
12277 }))
12278 }
12279
12280 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12282 let cursor = if self.parse_keyword(Keyword::ALL) {
12283 CloseCursor::All
12284 } else {
12285 let name = self.parse_identifier()?;
12286
12287 CloseCursor::Specific { name }
12288 };
12289
12290 Ok(Statement::Close { cursor })
12291 }
12292
12293 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12294 let ret = match self.parse_one_of_keywords(&[
12295 Keyword::FORMAT,
12296 Keyword::FREEZE,
12297 Keyword::DELIMITER,
12298 Keyword::NULL,
12299 Keyword::HEADER,
12300 Keyword::QUOTE,
12301 Keyword::ESCAPE,
12302 Keyword::FORCE_QUOTE,
12303 Keyword::FORCE_NOT_NULL,
12304 Keyword::FORCE_NULL,
12305 Keyword::ENCODING,
12306 ]) {
12307 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12308 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12309 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12310 Some(Keyword::FALSE)
12311 )),
12312 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12313 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12314 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12315 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12316 Some(Keyword::FALSE)
12317 )),
12318 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12319 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12320 Some(Keyword::FORCE_QUOTE) => {
12321 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12322 }
12323 Some(Keyword::FORCE_NOT_NULL) => {
12324 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12325 }
12326 Some(Keyword::FORCE_NULL) => {
12327 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12328 }
12329 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12330 _ => self.expected_ref("option", self.peek_token_ref())?,
12331 };
12332 Ok(ret)
12333 }
12334
12335 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12336 if self.parse_keyword(Keyword::FORMAT) {
12338 let _ = self.parse_keyword(Keyword::AS);
12339 }
12340
12341 let ret = match self.parse_one_of_keywords(&[
12342 Keyword::ACCEPTANYDATE,
12343 Keyword::ACCEPTINVCHARS,
12344 Keyword::ADDQUOTES,
12345 Keyword::ALLOWOVERWRITE,
12346 Keyword::BINARY,
12347 Keyword::BLANKSASNULL,
12348 Keyword::BZIP2,
12349 Keyword::CLEANPATH,
12350 Keyword::COMPUPDATE,
12351 Keyword::CREDENTIALS,
12352 Keyword::CSV,
12353 Keyword::DATEFORMAT,
12354 Keyword::DELIMITER,
12355 Keyword::EMPTYASNULL,
12356 Keyword::ENCRYPTED,
12357 Keyword::ESCAPE,
12358 Keyword::EXTENSION,
12359 Keyword::FIXEDWIDTH,
12360 Keyword::GZIP,
12361 Keyword::HEADER,
12362 Keyword::IAM_ROLE,
12363 Keyword::IGNOREHEADER,
12364 Keyword::JSON,
12365 Keyword::MANIFEST,
12366 Keyword::MAXFILESIZE,
12367 Keyword::NULL,
12368 Keyword::PARALLEL,
12369 Keyword::PARQUET,
12370 Keyword::PARTITION,
12371 Keyword::REGION,
12372 Keyword::REMOVEQUOTES,
12373 Keyword::ROWGROUPSIZE,
12374 Keyword::STATUPDATE,
12375 Keyword::TIMEFORMAT,
12376 Keyword::TRUNCATECOLUMNS,
12377 Keyword::ZSTD,
12378 ]) {
12379 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12380 Some(Keyword::ACCEPTINVCHARS) => {
12381 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12383 Some(self.parse_literal_string()?)
12384 } else {
12385 None
12386 };
12387 CopyLegacyOption::AcceptInvChars(ch)
12388 }
12389 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12390 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12391 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12392 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12393 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12394 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12395 Some(Keyword::COMPUPDATE) => {
12396 let preset = self.parse_keyword(Keyword::PRESET);
12397 let enabled = match self.parse_one_of_keywords(&[
12398 Keyword::TRUE,
12399 Keyword::FALSE,
12400 Keyword::ON,
12401 Keyword::OFF,
12402 ]) {
12403 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12404 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12405 _ => None,
12406 };
12407 CopyLegacyOption::CompUpdate { preset, enabled }
12408 }
12409 Some(Keyword::CREDENTIALS) => {
12410 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12411 }
12412 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12413 let mut opts = vec![];
12414 while let Some(opt) =
12415 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12416 {
12417 opts.push(opt);
12418 }
12419 opts
12420 }),
12421 Some(Keyword::DATEFORMAT) => {
12422 let _ = self.parse_keyword(Keyword::AS);
12423 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12424 Some(self.parse_literal_string()?)
12425 } else {
12426 None
12427 };
12428 CopyLegacyOption::DateFormat(fmt)
12429 }
12430 Some(Keyword::DELIMITER) => {
12431 let _ = self.parse_keyword(Keyword::AS);
12432 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12433 }
12434 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12435 Some(Keyword::ENCRYPTED) => {
12436 let auto = self.parse_keyword(Keyword::AUTO);
12437 CopyLegacyOption::Encrypted { auto }
12438 }
12439 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12440 Some(Keyword::EXTENSION) => {
12441 let ext = self.parse_literal_string()?;
12442 CopyLegacyOption::Extension(ext)
12443 }
12444 Some(Keyword::FIXEDWIDTH) => {
12445 let spec = self.parse_literal_string()?;
12446 CopyLegacyOption::FixedWidth(spec)
12447 }
12448 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12449 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12450 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12451 Some(Keyword::IGNOREHEADER) => {
12452 let _ = self.parse_keyword(Keyword::AS);
12453 let num_rows = self.parse_literal_uint()?;
12454 CopyLegacyOption::IgnoreHeader(num_rows)
12455 }
12456 Some(Keyword::JSON) => {
12457 let _ = self.parse_keyword(Keyword::AS);
12458 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12459 Some(self.parse_literal_string()?)
12460 } else {
12461 None
12462 };
12463 CopyLegacyOption::Json(fmt)
12464 }
12465 Some(Keyword::MANIFEST) => {
12466 let verbose = self.parse_keyword(Keyword::VERBOSE);
12467 CopyLegacyOption::Manifest { verbose }
12468 }
12469 Some(Keyword::MAXFILESIZE) => {
12470 let _ = self.parse_keyword(Keyword::AS);
12471 let size = self.parse_number_value()?;
12472 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12473 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12474 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12475 _ => None,
12476 };
12477 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12478 }
12479 Some(Keyword::NULL) => {
12480 let _ = self.parse_keyword(Keyword::AS);
12481 CopyLegacyOption::Null(self.parse_literal_string()?)
12482 }
12483 Some(Keyword::PARALLEL) => {
12484 let enabled = match self.parse_one_of_keywords(&[
12485 Keyword::TRUE,
12486 Keyword::FALSE,
12487 Keyword::ON,
12488 Keyword::OFF,
12489 ]) {
12490 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12491 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12492 _ => None,
12493 };
12494 CopyLegacyOption::Parallel(enabled)
12495 }
12496 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12497 Some(Keyword::PARTITION) => {
12498 self.expect_keyword(Keyword::BY)?;
12499 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12500 let include = self.parse_keyword(Keyword::INCLUDE);
12501 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12502 }
12503 Some(Keyword::REGION) => {
12504 let _ = self.parse_keyword(Keyword::AS);
12505 let region = self.parse_literal_string()?;
12506 CopyLegacyOption::Region(region)
12507 }
12508 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12509 Some(Keyword::ROWGROUPSIZE) => {
12510 let _ = self.parse_keyword(Keyword::AS);
12511 let file_size = self.parse_file_size()?;
12512 CopyLegacyOption::RowGroupSize(file_size)
12513 }
12514 Some(Keyword::STATUPDATE) => {
12515 let enabled = match self.parse_one_of_keywords(&[
12516 Keyword::TRUE,
12517 Keyword::FALSE,
12518 Keyword::ON,
12519 Keyword::OFF,
12520 ]) {
12521 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12522 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12523 _ => None,
12524 };
12525 CopyLegacyOption::StatUpdate(enabled)
12526 }
12527 Some(Keyword::TIMEFORMAT) => {
12528 let _ = self.parse_keyword(Keyword::AS);
12529 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12530 Some(self.parse_literal_string()?)
12531 } else {
12532 None
12533 };
12534 CopyLegacyOption::TimeFormat(fmt)
12535 }
12536 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12537 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12538 _ => self.expected_ref("option", self.peek_token_ref())?,
12539 };
12540 Ok(ret)
12541 }
12542
12543 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12544 let size = self.parse_number_value()?;
12545 let unit = self.maybe_parse_file_size_unit();
12546 Ok(FileSize { size, unit })
12547 }
12548
12549 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12550 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12551 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12552 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12553 _ => None,
12554 }
12555 }
12556
12557 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12558 if self.parse_keyword(Keyword::DEFAULT) {
12559 Ok(IamRoleKind::Default)
12560 } else {
12561 let arn = self.parse_literal_string()?;
12562 Ok(IamRoleKind::Arn(arn))
12563 }
12564 }
12565
12566 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12567 let ret = match self.parse_one_of_keywords(&[
12568 Keyword::HEADER,
12569 Keyword::QUOTE,
12570 Keyword::ESCAPE,
12571 Keyword::FORCE,
12572 ]) {
12573 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12574 Some(Keyword::QUOTE) => {
12575 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12577 }
12578 Some(Keyword::ESCAPE) => {
12579 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12581 }
12582 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12583 CopyLegacyCsvOption::ForceNotNull(
12584 self.parse_comma_separated(|p| p.parse_identifier())?,
12585 )
12586 }
12587 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12588 CopyLegacyCsvOption::ForceQuote(
12589 self.parse_comma_separated(|p| p.parse_identifier())?,
12590 )
12591 }
12592 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12593 };
12594 Ok(ret)
12595 }
12596
12597 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12598 let s = self.parse_literal_string()?;
12599 if s.len() != 1 {
12600 let loc = self
12601 .tokens
12602 .get(self.index - 1)
12603 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12604 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12605 }
12606 Ok(s.chars().next().unwrap())
12607 }
12608
12609 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12612 self.parse_tab_value()
12613 }
12614
12615 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12617 let mut values = vec![];
12618 let mut content = String::new();
12619 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12620 match t {
12621 Token::Whitespace(Whitespace::Tab) => {
12622 values.push(Some(core::mem::take(&mut content)));
12623 }
12624 Token::Whitespace(Whitespace::Newline) => {
12625 values.push(Some(core::mem::take(&mut content)));
12626 }
12627 Token::Backslash => {
12628 if self.consume_token(&Token::Period) {
12629 return values;
12630 }
12631 if let Token::Word(w) = self.next_token().token {
12632 if w.value == "N" {
12633 values.push(None);
12634 }
12635 }
12636 }
12637 _ => {
12638 content.push_str(&t.to_string());
12639 }
12640 }
12641 }
12642 values
12643 }
12644
12645 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12647 let next_token = self.next_token();
12648 let span = next_token.span;
12649 let ok_value = |value: Value| Ok(value.with_span(span));
12650 match next_token.token {
12651 Token::Word(w) => match w.keyword {
12652 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12653 ok_value(Value::Boolean(true))
12654 }
12655 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12656 ok_value(Value::Boolean(false))
12657 }
12658 Keyword::NULL => ok_value(Value::Null),
12659 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12660 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12661 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12662 _ => self.expected(
12663 "A value?",
12664 TokenWithSpan {
12665 token: Token::Word(w),
12666 span,
12667 },
12668 )?,
12669 },
12670 _ => self.expected(
12671 "a concrete value",
12672 TokenWithSpan {
12673 token: Token::Word(w),
12674 span,
12675 },
12676 ),
12677 },
12678 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12682 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12683 self.maybe_concat_string_literal(s.to_string()),
12684 )),
12685 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12686 self.maybe_concat_string_literal(s.to_string()),
12687 )),
12688 Token::TripleSingleQuotedString(ref s) => {
12689 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12690 }
12691 Token::TripleDoubleQuotedString(ref s) => {
12692 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12693 }
12694 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12695 Token::SingleQuotedByteStringLiteral(ref s) => {
12696 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12697 }
12698 Token::DoubleQuotedByteStringLiteral(ref s) => {
12699 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12700 }
12701 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12702 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12703 }
12704 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12705 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12706 }
12707 Token::SingleQuotedRawStringLiteral(ref s) => {
12708 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12709 }
12710 Token::DoubleQuotedRawStringLiteral(ref s) => {
12711 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12712 }
12713 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12714 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12715 }
12716 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12717 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12718 }
12719 Token::NationalStringLiteral(ref s) => {
12720 ok_value(Value::NationalStringLiteral(s.to_string()))
12721 }
12722 Token::QuoteDelimitedStringLiteral(v) => {
12723 ok_value(Value::QuoteDelimitedStringLiteral(v))
12724 }
12725 Token::NationalQuoteDelimitedStringLiteral(v) => {
12726 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12727 }
12728 Token::EscapedStringLiteral(ref s) => {
12729 ok_value(Value::EscapedStringLiteral(s.to_string()))
12730 }
12731 Token::UnicodeStringLiteral(ref s) => {
12732 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12733 }
12734 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12735 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12736 tok @ Token::Colon | tok @ Token::AtSign => {
12737 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12745 let ident = match next_token.token {
12746 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12747 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12748 _ => self.expected("placeholder", next_token),
12749 }?;
12750 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12751 .with_span(Span::new(span.start, ident.span.end)))
12752 }
12753 unexpected => self.expected(
12754 "a value",
12755 TokenWithSpan {
12756 token: unexpected,
12757 span,
12758 },
12759 ),
12760 }
12761 }
12762
12763 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12764 if self.dialect.supports_string_literal_concatenation() {
12765 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12766 self.peek_token_ref().token
12767 {
12768 str.push_str(s);
12769 self.advance_token();
12770 }
12771 } else if self
12772 .dialect
12773 .supports_string_literal_concatenation_with_newline()
12774 {
12775 let mut after_newline = false;
12778 loop {
12779 match self.peek_token_no_skip().token {
12780 Token::Whitespace(Whitespace::Newline) => {
12781 after_newline = true;
12782 self.next_token_no_skip();
12783 }
12784 Token::Whitespace(_) => {
12785 self.next_token_no_skip();
12786 }
12787 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12788 if after_newline =>
12789 {
12790 str.push_str(s.clone().as_str());
12791 self.next_token_no_skip();
12792 after_newline = false;
12793 }
12794 _ => break,
12795 }
12796 }
12797 }
12798
12799 str
12800 }
12801
12802 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12804 let value_wrapper = self.parse_value()?;
12805 match &value_wrapper.value {
12806 Value::Number(_, _) => Ok(value_wrapper),
12807 Value::Placeholder(_) => Ok(value_wrapper),
12808 _ => {
12809 self.prev_token();
12810 self.expected_ref("literal number", self.peek_token_ref())
12811 }
12812 }
12813 }
12814
12815 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12818 let next_token = self.next_token();
12819 match next_token.token {
12820 Token::Plus => Ok(Expr::UnaryOp {
12821 op: UnaryOperator::Plus,
12822 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12823 }),
12824 Token::Minus => Ok(Expr::UnaryOp {
12825 op: UnaryOperator::Minus,
12826 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12827 }),
12828 _ => {
12829 self.prev_token();
12830 Ok(Expr::Value(self.parse_number_value()?))
12831 }
12832 }
12833 }
12834
12835 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12836 let next_token = self.next_token();
12837 let span = next_token.span;
12838 match next_token.token {
12839 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12840 Value::SingleQuotedString(s.to_string()).with_span(span),
12841 )),
12842 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12843 Value::DoubleQuotedString(s.to_string()).with_span(span),
12844 )),
12845 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12846 Value::HexStringLiteral(s.to_string()).with_span(span),
12847 )),
12848 unexpected => self.expected(
12849 "a string value",
12850 TokenWithSpan {
12851 token: unexpected,
12852 span,
12853 },
12854 ),
12855 }
12856 }
12857
12858 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12860 let next_token = self.next_token();
12861 match next_token.token {
12862 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12863 _ => self.expected("literal int", next_token),
12864 }
12865 }
12866
12867 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12870 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12871 let peek_token = parser.peek_token();
12872 let span = peek_token.span;
12873 match peek_token.token {
12874 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12875 {
12876 parser.next_token();
12877 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12878 }
12879 _ => Ok(Expr::Value(
12880 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12881 )),
12882 }
12883 };
12884
12885 Ok(CreateFunctionBody::AsBeforeOptions {
12886 body: parse_string_expr(self)?,
12887 link_symbol: if self.consume_token(&Token::Comma) {
12888 Some(parse_string_expr(self)?)
12889 } else {
12890 None
12891 },
12892 })
12893 }
12894
12895 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12897 let next_token = self.next_token();
12898 match next_token.token {
12899 Token::Word(Word {
12900 value,
12901 keyword: Keyword::NoKeyword,
12902 ..
12903 }) => Ok(value),
12904 Token::SingleQuotedString(s) => Ok(s),
12905 Token::DoubleQuotedString(s) => Ok(s),
12906 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12907 Ok(s)
12908 }
12909 Token::UnicodeStringLiteral(s) => Ok(s),
12910 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12911 Ok(s.value)
12912 }
12913 _ => self.expected("literal string", next_token),
12914 }
12915 }
12916
12917 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12919 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12920 Some(Keyword::TRUE) => Ok(true),
12921 Some(Keyword::FALSE) => Ok(false),
12922 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12923 }
12924 }
12925
12926 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12928 let neg = self.parse_keyword(Keyword::NOT);
12929 let normalized_form = self.maybe_parse(|parser| {
12930 match parser.parse_one_of_keywords(&[
12931 Keyword::NFC,
12932 Keyword::NFD,
12933 Keyword::NFKC,
12934 Keyword::NFKD,
12935 ]) {
12936 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12937 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12938 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12939 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12940 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12941 }
12942 })?;
12943 if self.parse_keyword(Keyword::NORMALIZED) {
12944 return Ok(Expr::IsNormalized {
12945 expr: Box::new(expr),
12946 form: normalized_form,
12947 negated: neg,
12948 });
12949 }
12950 self.expected_ref("unicode normalization form", self.peek_token_ref())
12951 }
12952
12953 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12955 self.expect_token(&Token::LParen)?;
12956 let values = self.parse_comma_separated(|parser| {
12957 let name = parser.parse_literal_string()?;
12958 let e = if parser.consume_token(&Token::Eq) {
12959 let value = parser.parse_number()?;
12960 EnumMember::NamedValue(name, value)
12961 } else {
12962 EnumMember::Name(name)
12963 };
12964 Ok(e)
12965 })?;
12966 self.expect_token(&Token::RParen)?;
12967
12968 Ok(values)
12969 }
12970
12971 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12973 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12974 if trailing_bracket.0 {
12975 return parser_err!(
12976 format!("unmatched > after parsing data type {ty}"),
12977 self.peek_token_ref()
12978 );
12979 }
12980
12981 Ok(ty)
12982 }
12983
12984 fn parse_data_type_helper(
12985 &mut self,
12986 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12987 let dialect = self.dialect;
12988 self.advance_token();
12989 let next_token = self.get_current_token();
12990 let next_token_index = self.get_current_index();
12991
12992 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12993 let mut data = match &next_token.token {
12994 Token::Word(w) => match w.keyword {
12995 Keyword::BOOLEAN => Ok(DataType::Boolean),
12996 Keyword::BOOL => Ok(DataType::Bool),
12997 Keyword::FLOAT => {
12998 let precision = self.parse_exact_number_optional_precision_scale()?;
12999
13000 if self.parse_keyword(Keyword::UNSIGNED) {
13001 Ok(DataType::FloatUnsigned(precision))
13002 } else {
13003 Ok(DataType::Float(precision))
13004 }
13005 }
13006 Keyword::REAL => {
13007 if self.parse_keyword(Keyword::UNSIGNED) {
13008 Ok(DataType::RealUnsigned)
13009 } else {
13010 Ok(DataType::Real)
13011 }
13012 }
13013 Keyword::FLOAT4 => Ok(DataType::Float4),
13014 Keyword::FLOAT32 => Ok(DataType::Float32),
13015 Keyword::FLOAT64 => Ok(DataType::Float64),
13016 Keyword::FLOAT8 => Ok(DataType::Float8),
13017 Keyword::DOUBLE => {
13018 if self.parse_keyword(Keyword::PRECISION) {
13019 if self.parse_keyword(Keyword::UNSIGNED) {
13020 Ok(DataType::DoublePrecisionUnsigned)
13021 } else {
13022 Ok(DataType::DoublePrecision)
13023 }
13024 } else {
13025 let precision = self.parse_exact_number_optional_precision_scale()?;
13026
13027 if self.parse_keyword(Keyword::UNSIGNED) {
13028 Ok(DataType::DoubleUnsigned(precision))
13029 } else {
13030 Ok(DataType::Double(precision))
13031 }
13032 }
13033 }
13034 Keyword::TINYINT => {
13035 let optional_precision = self.parse_optional_precision();
13036 if self.parse_keyword(Keyword::UNSIGNED) {
13037 Ok(DataType::TinyIntUnsigned(optional_precision?))
13038 } else {
13039 if dialect.supports_data_type_signed_suffix() {
13040 let _ = self.parse_keyword(Keyword::SIGNED);
13041 }
13042 Ok(DataType::TinyInt(optional_precision?))
13043 }
13044 }
13045 Keyword::INT2 => {
13046 let optional_precision = self.parse_optional_precision();
13047 if self.parse_keyword(Keyword::UNSIGNED) {
13048 Ok(DataType::Int2Unsigned(optional_precision?))
13049 } else {
13050 Ok(DataType::Int2(optional_precision?))
13051 }
13052 }
13053 Keyword::SMALLINT => {
13054 let optional_precision = self.parse_optional_precision();
13055 if self.parse_keyword(Keyword::UNSIGNED) {
13056 Ok(DataType::SmallIntUnsigned(optional_precision?))
13057 } else {
13058 if dialect.supports_data_type_signed_suffix() {
13059 let _ = self.parse_keyword(Keyword::SIGNED);
13060 }
13061 Ok(DataType::SmallInt(optional_precision?))
13062 }
13063 }
13064 Keyword::MEDIUMINT => {
13065 let optional_precision = self.parse_optional_precision();
13066 if self.parse_keyword(Keyword::UNSIGNED) {
13067 Ok(DataType::MediumIntUnsigned(optional_precision?))
13068 } else {
13069 if dialect.supports_data_type_signed_suffix() {
13070 let _ = self.parse_keyword(Keyword::SIGNED);
13071 }
13072 Ok(DataType::MediumInt(optional_precision?))
13073 }
13074 }
13075 Keyword::INT => {
13076 let optional_precision = self.parse_optional_precision();
13077 if self.parse_keyword(Keyword::UNSIGNED) {
13078 Ok(DataType::IntUnsigned(optional_precision?))
13079 } else {
13080 if dialect.supports_data_type_signed_suffix() {
13081 let _ = self.parse_keyword(Keyword::SIGNED);
13082 }
13083 Ok(DataType::Int(optional_precision?))
13084 }
13085 }
13086 Keyword::INT4 => {
13087 let optional_precision = self.parse_optional_precision();
13088 if self.parse_keyword(Keyword::UNSIGNED) {
13089 Ok(DataType::Int4Unsigned(optional_precision?))
13090 } else {
13091 Ok(DataType::Int4(optional_precision?))
13092 }
13093 }
13094 Keyword::INT8 => {
13095 let optional_precision = self.parse_optional_precision();
13096 if self.parse_keyword(Keyword::UNSIGNED) {
13097 Ok(DataType::Int8Unsigned(optional_precision?))
13098 } else {
13099 Ok(DataType::Int8(optional_precision?))
13100 }
13101 }
13102 Keyword::INT16 => Ok(DataType::Int16),
13103 Keyword::INT32 => Ok(DataType::Int32),
13104 Keyword::INT64 => Ok(DataType::Int64),
13105 Keyword::INT128 => Ok(DataType::Int128),
13106 Keyword::INT256 => Ok(DataType::Int256),
13107 Keyword::INTEGER => {
13108 let optional_precision = self.parse_optional_precision();
13109 if self.parse_keyword(Keyword::UNSIGNED) {
13110 Ok(DataType::IntegerUnsigned(optional_precision?))
13111 } else {
13112 if dialect.supports_data_type_signed_suffix() {
13113 let _ = self.parse_keyword(Keyword::SIGNED);
13114 }
13115 Ok(DataType::Integer(optional_precision?))
13116 }
13117 }
13118 Keyword::BIGINT => {
13119 let optional_precision = self.parse_optional_precision();
13120 if self.parse_keyword(Keyword::UNSIGNED) {
13121 Ok(DataType::BigIntUnsigned(optional_precision?))
13122 } else {
13123 if dialect.supports_data_type_signed_suffix() {
13124 let _ = self.parse_keyword(Keyword::SIGNED);
13125 }
13126 Ok(DataType::BigInt(optional_precision?))
13127 }
13128 }
13129 Keyword::HUGEINT => Ok(DataType::HugeInt),
13130 Keyword::UBIGINT => Ok(DataType::UBigInt),
13131 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
13132 Keyword::USMALLINT => Ok(DataType::USmallInt),
13133 Keyword::UTINYINT => Ok(DataType::UTinyInt),
13134 Keyword::UINT8 => Ok(DataType::UInt8),
13135 Keyword::UINT16 => Ok(DataType::UInt16),
13136 Keyword::UINT32 => Ok(DataType::UInt32),
13137 Keyword::UINT64 => Ok(DataType::UInt64),
13138 Keyword::UINT128 => Ok(DataType::UInt128),
13139 Keyword::UINT256 => Ok(DataType::UInt256),
13140 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
13141 Keyword::NVARCHAR => {
13142 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
13143 }
13144 Keyword::CHARACTER => {
13145 if self.parse_keyword(Keyword::VARYING) {
13146 Ok(DataType::CharacterVarying(
13147 self.parse_optional_character_length()?,
13148 ))
13149 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13150 Ok(DataType::CharacterLargeObject(
13151 self.parse_optional_precision()?,
13152 ))
13153 } else {
13154 Ok(DataType::Character(self.parse_optional_character_length()?))
13155 }
13156 }
13157 Keyword::CHAR => {
13158 if self.parse_keyword(Keyword::VARYING) {
13159 Ok(DataType::CharVarying(
13160 self.parse_optional_character_length()?,
13161 ))
13162 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13163 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
13164 } else {
13165 Ok(DataType::Char(self.parse_optional_character_length()?))
13166 }
13167 }
13168 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
13169 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
13170 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
13171 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
13172 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
13173 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
13174 Keyword::LONGBLOB => Ok(DataType::LongBlob),
13175 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
13176 Keyword::BIT => {
13177 if self.parse_keyword(Keyword::VARYING) {
13178 Ok(DataType::BitVarying(self.parse_optional_precision()?))
13179 } else {
13180 Ok(DataType::Bit(self.parse_optional_precision()?))
13181 }
13182 }
13183 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
13184 Keyword::UUID => Ok(DataType::Uuid),
13185 Keyword::DATE => Ok(DataType::Date),
13186 Keyword::DATE32 => Ok(DataType::Date32),
13187 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
13188 Keyword::DATETIME64 => {
13189 self.prev_token();
13190 let (precision, time_zone) = self.parse_datetime_64()?;
13191 Ok(DataType::Datetime64(precision, time_zone))
13192 }
13193 Keyword::TIMESTAMP => {
13194 let precision = self.parse_optional_precision()?;
13195 let tz = if self.parse_keyword(Keyword::WITH) {
13196 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13197 TimezoneInfo::WithTimeZone
13198 } else if self.parse_keyword(Keyword::WITHOUT) {
13199 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13200 TimezoneInfo::WithoutTimeZone
13201 } else {
13202 TimezoneInfo::None
13203 };
13204 Ok(DataType::Timestamp(precision, tz))
13205 }
13206 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
13207 self.parse_optional_precision()?,
13208 TimezoneInfo::Tz,
13209 )),
13210 Keyword::TIMESTAMP_NTZ => {
13211 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
13212 }
13213 Keyword::TIME => {
13214 let precision = self.parse_optional_precision()?;
13215 let tz = if self.parse_keyword(Keyword::WITH) {
13216 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13217 TimezoneInfo::WithTimeZone
13218 } else if self.parse_keyword(Keyword::WITHOUT) {
13219 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13220 TimezoneInfo::WithoutTimeZone
13221 } else {
13222 TimezoneInfo::None
13223 };
13224 Ok(DataType::Time(precision, tz))
13225 }
13226 Keyword::TIMETZ => Ok(DataType::Time(
13227 self.parse_optional_precision()?,
13228 TimezoneInfo::Tz,
13229 )),
13230 Keyword::INTERVAL => {
13231 if self.dialect.supports_interval_options() {
13232 let fields = self.maybe_parse_optional_interval_fields()?;
13233 let precision = self.parse_optional_precision()?;
13234 Ok(DataType::Interval { fields, precision })
13235 } else {
13236 Ok(DataType::Interval {
13237 fields: None,
13238 precision: None,
13239 })
13240 }
13241 }
13242 Keyword::JSON => Ok(DataType::JSON),
13243 Keyword::JSONB => Ok(DataType::JSONB),
13244 Keyword::REGCLASS => Ok(DataType::Regclass),
13245 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
13246 Keyword::FIXEDSTRING => {
13247 self.expect_token(&Token::LParen)?;
13248 let character_length = self.parse_literal_uint()?;
13249 self.expect_token(&Token::RParen)?;
13250 Ok(DataType::FixedString(character_length))
13251 }
13252 Keyword::TEXT => Ok(DataType::Text),
13253 Keyword::TINYTEXT => Ok(DataType::TinyText),
13254 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
13255 Keyword::LONGTEXT => Ok(DataType::LongText),
13256 Keyword::BYTEA => Ok(DataType::Bytea),
13257 Keyword::NUMERIC => Ok(DataType::Numeric(
13258 self.parse_exact_number_optional_precision_scale()?,
13259 )),
13260 Keyword::DECIMAL => {
13261 let precision = self.parse_exact_number_optional_precision_scale()?;
13262
13263 if self.parse_keyword(Keyword::UNSIGNED) {
13264 Ok(DataType::DecimalUnsigned(precision))
13265 } else {
13266 Ok(DataType::Decimal(precision))
13267 }
13268 }
13269 Keyword::DEC => {
13270 let precision = self.parse_exact_number_optional_precision_scale()?;
13271
13272 if self.parse_keyword(Keyword::UNSIGNED) {
13273 Ok(DataType::DecUnsigned(precision))
13274 } else {
13275 Ok(DataType::Dec(precision))
13276 }
13277 }
13278 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13279 self.parse_exact_number_optional_precision_scale()?,
13280 )),
13281 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13282 self.parse_exact_number_optional_precision_scale()?,
13283 )),
13284 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13285 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13286 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13287 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13288 Keyword::ARRAY => {
13289 if self.dialect.supports_array_typedef_without_element_type() {
13290 Ok(DataType::Array(ArrayElemTypeDef::None))
13291 } else if dialect_of!(self is ClickHouseDialect) {
13292 Ok(self.parse_sub_type(|internal_type| {
13293 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13294 })?)
13295 } else {
13296 self.expect_token(&Token::Lt)?;
13297 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13298 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13299 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13300 inside_type,
13301 ))))
13302 }
13303 }
13304 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13305 self.prev_token();
13306 let field_defs = self.parse_duckdb_struct_type_def()?;
13307 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13308 }
13309 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13310 {
13311 self.prev_token();
13312 let (field_defs, _trailing_bracket) =
13313 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13314 trailing_bracket = _trailing_bracket;
13315 Ok(DataType::Struct(
13316 field_defs,
13317 StructBracketKind::AngleBrackets,
13318 ))
13319 }
13320 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13321 self.prev_token();
13322 let fields = self.parse_union_type_def()?;
13323 Ok(DataType::Union(fields))
13324 }
13325 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13326 Ok(self.parse_sub_type(DataType::Nullable)?)
13327 }
13328 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13329 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13330 }
13331 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13332 self.prev_token();
13333 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13334 Ok(DataType::Map(
13335 Box::new(key_data_type),
13336 Box::new(value_data_type),
13337 ))
13338 }
13339 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13340 self.expect_token(&Token::LParen)?;
13341 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13342 self.expect_token(&Token::RParen)?;
13343 Ok(DataType::Nested(field_defs))
13344 }
13345 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13346 self.prev_token();
13347 let field_defs = self.parse_click_house_tuple_def()?;
13348 Ok(DataType::Tuple(field_defs))
13349 }
13350 Keyword::TRIGGER => Ok(DataType::Trigger),
13351 Keyword::SETOF => {
13352 let inner = self.parse_data_type()?;
13353 Ok(DataType::SetOf(Box::new(inner)))
13354 }
13355 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13356 let _ = self.parse_keyword(Keyword::TYPE);
13357 Ok(DataType::AnyType)
13358 }
13359 Keyword::TABLE => {
13360 if self.peek_token_ref().token == Token::LParen {
13363 let columns = self.parse_returns_table_columns()?;
13364 Ok(DataType::Table(Some(columns)))
13365 } else {
13366 Ok(DataType::Table(None))
13367 }
13368 }
13369 Keyword::SIGNED => {
13370 if self.parse_keyword(Keyword::INTEGER) {
13371 Ok(DataType::SignedInteger)
13372 } else {
13373 Ok(DataType::Signed)
13374 }
13375 }
13376 Keyword::UNSIGNED => {
13377 if self.parse_keyword(Keyword::INTEGER) {
13378 Ok(DataType::UnsignedInteger)
13379 } else {
13380 Ok(DataType::Unsigned)
13381 }
13382 }
13383 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13384 Ok(DataType::TsVector)
13385 }
13386 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13387 Ok(DataType::TsQuery)
13388 }
13389 _ => {
13390 self.prev_token();
13391 let type_name = self.parse_object_name(false)?;
13392 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13393 Ok(DataType::Custom(type_name, modifiers))
13394 } else {
13395 Ok(DataType::Custom(type_name, vec![]))
13396 }
13397 }
13398 },
13399 _ => self.expected_at("a data type name", next_token_index),
13400 }?;
13401
13402 if self.dialect.supports_array_typedef_with_brackets() {
13403 while self.consume_token(&Token::LBracket) {
13404 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13406 self.expect_token(&Token::RBracket)?;
13407 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13408 }
13409 }
13410 Ok((data, trailing_bracket))
13411 }
13412
13413 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13414 self.parse_column_def()
13415 }
13416
13417 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13418 self.expect_token(&Token::LParen)?;
13419 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13420 self.expect_token(&Token::RParen)?;
13421 Ok(columns)
13422 }
13423
13424 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13426 self.expect_token(&Token::LParen)?;
13427 let mut values = Vec::new();
13428 loop {
13429 let next_token = self.next_token();
13430 match next_token.token {
13431 Token::SingleQuotedString(value) => values.push(value),
13432 _ => self.expected("a string", next_token)?,
13433 }
13434 let next_token = self.next_token();
13435 match next_token.token {
13436 Token::Comma => (),
13437 Token::RParen => break,
13438 _ => self.expected(", or }", next_token)?,
13439 }
13440 }
13441 Ok(values)
13442 }
13443
13444 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13446 let ident = self.parse_identifier()?;
13447 self.expect_keyword_is(Keyword::AS)?;
13448 let alias = self.parse_identifier()?;
13449 Ok(IdentWithAlias { ident, alias })
13450 }
13451
13452 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13454 let ident = self.parse_identifier()?;
13455 let _after_as = self.parse_keyword(Keyword::AS);
13456 let alias = self.parse_identifier()?;
13457 Ok(IdentWithAlias { ident, alias })
13458 }
13459
13460 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13462 self.parse_comma_separated(|parser| {
13463 parser.expect_token(&Token::LParen)?;
13464 let query = parser.parse_query()?;
13465 parser.expect_token(&Token::RParen)?;
13466 Ok(*query)
13467 })
13468 }
13469
13470 fn parse_distinct_required_set_quantifier(
13472 &mut self,
13473 operator_name: &str,
13474 ) -> Result<SetQuantifier, ParserError> {
13475 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13476 match quantifier {
13477 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13478 _ => Err(ParserError::ParserError(format!(
13479 "{operator_name} pipe operator requires DISTINCT modifier",
13480 ))),
13481 }
13482 }
13483
13484 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13486 if self.parse_keyword(Keyword::AS) {
13487 Ok(Some(self.parse_identifier()?))
13488 } else {
13489 self.maybe_parse(|parser| parser.parse_identifier())
13491 }
13492 }
13493
13494 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13496 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13497 parser.dialect.is_select_item_alias(explicit, kw, parser)
13498 }
13499 self.parse_optional_alias_inner(None, validator)
13500 }
13501
13502 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13506 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13507 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13508 }
13509 let explicit = self.peek_keyword(Keyword::AS);
13510 match self.parse_optional_alias_inner(None, validator)? {
13511 Some(name) => {
13512 let columns = self.parse_table_alias_column_defs()?;
13513 Ok(Some(TableAlias {
13514 explicit,
13515 name,
13516 columns,
13517 }))
13518 }
13519 None => Ok(None),
13520 }
13521 }
13522
13523 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13524 let mut hints = vec![];
13525 while let Some(hint_type) =
13526 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13527 {
13528 let hint_type = match hint_type {
13529 Keyword::USE => TableIndexHintType::Use,
13530 Keyword::IGNORE => TableIndexHintType::Ignore,
13531 Keyword::FORCE => TableIndexHintType::Force,
13532 _ => {
13533 return self.expected_ref(
13534 "expected to match USE/IGNORE/FORCE keyword",
13535 self.peek_token_ref(),
13536 )
13537 }
13538 };
13539 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13540 Some(Keyword::INDEX) => TableIndexType::Index,
13541 Some(Keyword::KEY) => TableIndexType::Key,
13542 _ => {
13543 return self
13544 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13545 }
13546 };
13547 let for_clause = if self.parse_keyword(Keyword::FOR) {
13548 let clause = if self.parse_keyword(Keyword::JOIN) {
13549 TableIndexHintForClause::Join
13550 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13551 TableIndexHintForClause::OrderBy
13552 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13553 TableIndexHintForClause::GroupBy
13554 } else {
13555 return self.expected_ref(
13556 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13557 self.peek_token_ref(),
13558 );
13559 };
13560 Some(clause)
13561 } else {
13562 None
13563 };
13564
13565 self.expect_token(&Token::LParen)?;
13566 let index_names = if self.peek_token_ref().token != Token::RParen {
13567 self.parse_comma_separated(Parser::parse_identifier)?
13568 } else {
13569 vec![]
13570 };
13571 self.expect_token(&Token::RParen)?;
13572 hints.push(TableIndexHints {
13573 hint_type,
13574 index_type,
13575 for_clause,
13576 index_names,
13577 });
13578 }
13579 Ok(hints)
13580 }
13581
13582 pub fn parse_optional_alias(
13586 &mut self,
13587 reserved_kwds: &[Keyword],
13588 ) -> Result<Option<Ident>, ParserError> {
13589 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13590 false
13591 }
13592 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13593 }
13594
13595 fn parse_optional_alias_inner<F>(
13602 &mut self,
13603 reserved_kwds: Option<&[Keyword]>,
13604 validator: F,
13605 ) -> Result<Option<Ident>, ParserError>
13606 where
13607 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13608 {
13609 let after_as = self.parse_keyword(Keyword::AS);
13610
13611 let next_token = self.next_token();
13612 match next_token.token {
13613 Token::Word(w)
13616 if reserved_kwds.is_some()
13617 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13618 {
13619 Ok(Some(w.into_ident(next_token.span)))
13620 }
13621 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13625 Ok(Some(w.into_ident(next_token.span)))
13626 }
13627 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13629 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13630 _ => {
13631 if after_as {
13632 return self.expected("an identifier after AS", next_token);
13633 }
13634 self.prev_token();
13635 Ok(None) }
13637 }
13638 }
13639
13640 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13642 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13643 let expressions = if self.parse_keyword(Keyword::ALL) {
13644 None
13645 } else {
13646 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13647 };
13648
13649 let mut modifiers = vec![];
13650 if self.dialect.supports_group_by_with_modifier() {
13651 loop {
13652 if !self.parse_keyword(Keyword::WITH) {
13653 break;
13654 }
13655 let keyword = self.expect_one_of_keywords(&[
13656 Keyword::ROLLUP,
13657 Keyword::CUBE,
13658 Keyword::TOTALS,
13659 ])?;
13660 modifiers.push(match keyword {
13661 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13662 Keyword::CUBE => GroupByWithModifier::Cube,
13663 Keyword::TOTALS => GroupByWithModifier::Totals,
13664 _ => {
13665 return parser_err!(
13666 "BUG: expected to match GroupBy modifier keyword",
13667 self.peek_token_ref().span.start
13668 )
13669 }
13670 });
13671 }
13672 }
13673 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13674 self.expect_token(&Token::LParen)?;
13675 let result = self.parse_comma_separated(|p| {
13676 if p.peek_token_ref().token == Token::LParen {
13677 p.parse_tuple(true, true)
13678 } else {
13679 Ok(vec![p.parse_expr()?])
13680 }
13681 })?;
13682 self.expect_token(&Token::RParen)?;
13683 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13684 result,
13685 )));
13686 };
13687 let group_by = match expressions {
13688 None => GroupByExpr::All(modifiers),
13689 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13690 };
13691 Ok(Some(group_by))
13692 } else {
13693 Ok(None)
13694 }
13695 }
13696
13697 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13699 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13700 let order_by =
13701 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13702 let order_by_options = self.parse_order_by_options()?;
13703 OrderBy {
13704 kind: OrderByKind::All(order_by_options),
13705 interpolate: None,
13706 }
13707 } else {
13708 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13709 let interpolate = if self.dialect.supports_interpolate() {
13710 self.parse_interpolations()?
13711 } else {
13712 None
13713 };
13714 OrderBy {
13715 kind: OrderByKind::Expressions(exprs),
13716 interpolate,
13717 }
13718 };
13719 Ok(Some(order_by))
13720 } else {
13721 Ok(None)
13722 }
13723 }
13724
13725 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13726 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13727 Some(self.parse_offset()?)
13728 } else {
13729 None
13730 };
13731
13732 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13733 let expr = self.parse_limit()?;
13734
13735 if self.dialect.supports_limit_comma()
13736 && offset.is_none()
13737 && expr.is_some() && self.consume_token(&Token::Comma)
13739 {
13740 let offset = expr.ok_or_else(|| {
13741 ParserError::ParserError(
13742 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13743 )
13744 })?;
13745 return Ok(Some(LimitClause::OffsetCommaLimit {
13746 offset,
13747 limit: self.parse_expr()?,
13748 }));
13749 }
13750
13751 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13752 Some(self.parse_comma_separated(Parser::parse_expr)?)
13753 } else {
13754 None
13755 };
13756
13757 (Some(expr), limit_by)
13758 } else {
13759 (None, None)
13760 };
13761
13762 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13763 offset = Some(self.parse_offset()?);
13764 }
13765
13766 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13767 Ok(Some(LimitClause::LimitOffset {
13768 limit: limit.unwrap_or_default(),
13769 offset,
13770 limit_by: limit_by.unwrap_or_default(),
13771 }))
13772 } else {
13773 Ok(None)
13774 }
13775 }
13776
13777 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13780 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13781 let fn_name = self.parse_object_name(false)?;
13782 self.parse_function_call(fn_name)
13783 .map(TableObject::TableFunction)
13784 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13785 self.parse_parenthesized(|p| p.parse_query())
13786 .map(TableObject::TableQuery)
13787 } else {
13788 self.parse_object_name(false).map(TableObject::TableName)
13789 }
13790 }
13791
13792 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13799 self.parse_object_name_inner(in_table_clause, false)
13800 }
13801
13802 fn parse_object_name_inner(
13812 &mut self,
13813 in_table_clause: bool,
13814 allow_wildcards: bool,
13815 ) -> Result<ObjectName, ParserError> {
13816 let mut parts = vec![];
13817 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13818 loop {
13819 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13820 parts.push(ObjectNamePart::Identifier(ident));
13821 if !self.consume_token(&Token::Period) && !end_with_period {
13822 break;
13823 }
13824 }
13825 } else {
13826 loop {
13827 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13828 let span = self.next_token().span;
13829 parts.push(ObjectNamePart::Identifier(Ident {
13830 value: Token::Mul.to_string(),
13831 quote_style: None,
13832 span,
13833 }));
13834 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13835 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13836 parts.push(ObjectNamePart::Identifier(ident));
13837 if !self.consume_token(&Token::Period) && !end_with_period {
13838 break;
13839 }
13840 } else if self.dialect.supports_object_name_double_dot_notation()
13841 && parts.len() == 1
13842 && matches!(self.peek_token_ref().token, Token::Period)
13843 {
13844 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13846 } else {
13847 let ident = self.parse_identifier()?;
13848 let part = if self
13849 .dialect
13850 .is_identifier_generating_function_name(&ident, &parts)
13851 {
13852 self.expect_token(&Token::LParen)?;
13853 let args: Vec<FunctionArg> =
13854 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13855 self.expect_token(&Token::RParen)?;
13856 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13857 } else {
13858 ObjectNamePart::Identifier(ident)
13859 };
13860 parts.push(part);
13861 }
13862
13863 if !self.consume_token(&Token::Period) {
13864 break;
13865 }
13866 }
13867 }
13868
13869 if dialect_of!(self is BigQueryDialect)
13872 && parts.iter().any(|part| {
13873 part.as_ident()
13874 .is_some_and(|ident| ident.value.contains('.'))
13875 })
13876 {
13877 parts = parts
13878 .into_iter()
13879 .flat_map(|part| match part.as_ident() {
13880 Some(ident) => ident
13881 .value
13882 .split('.')
13883 .map(|value| {
13884 ObjectNamePart::Identifier(Ident {
13885 value: value.into(),
13886 quote_style: ident.quote_style,
13887 span: ident.span,
13888 })
13889 })
13890 .collect::<Vec<_>>(),
13891 None => vec![part],
13892 })
13893 .collect()
13894 }
13895
13896 Ok(ObjectName(parts))
13897 }
13898
13899 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13901 let mut idents = vec![];
13902 loop {
13903 let token = self.peek_token_ref();
13904 match &token.token {
13905 Token::Word(w) => {
13906 idents.push(w.to_ident(token.span));
13907 }
13908 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13909 break
13910 }
13911 _ => {}
13912 }
13913 self.advance_token();
13914 }
13915 Ok(idents)
13916 }
13917
13918 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13958 let mut idents = vec![];
13959
13960 let next_token = self.next_token();
13962 match next_token.token {
13963 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13964 Token::EOF => {
13965 return Err(ParserError::ParserError(
13966 "Empty input when parsing identifier".to_string(),
13967 ))?
13968 }
13969 token => {
13970 return Err(ParserError::ParserError(format!(
13971 "Unexpected token in identifier: {token}"
13972 )))?
13973 }
13974 };
13975
13976 loop {
13978 match self.next_token().token {
13979 Token::Period => {
13981 let next_token = self.next_token();
13982 match next_token.token {
13983 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13984 Token::EOF => {
13985 return Err(ParserError::ParserError(
13986 "Trailing period in identifier".to_string(),
13987 ))?
13988 }
13989 token => {
13990 return Err(ParserError::ParserError(format!(
13991 "Unexpected token following period in identifier: {token}"
13992 )))?
13993 }
13994 }
13995 }
13996 Token::EOF => break,
13997 token => {
13998 return Err(ParserError::ParserError(format!(
13999 "Unexpected token in identifier: {token}"
14000 )))?;
14001 }
14002 }
14003 }
14004
14005 Ok(idents)
14006 }
14007
14008 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
14010 let next_token = self.next_token();
14011 match next_token.token {
14012 Token::Word(w) => Ok(w.into_ident(next_token.span)),
14013 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
14014 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
14015 _ => self.expected("identifier", next_token),
14016 }
14017 }
14018
14019 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
14030 match self.peek_token().token {
14031 Token::Word(w) => {
14032 let quote_style_is_none = w.quote_style.is_none();
14033 let mut requires_whitespace = false;
14034 let mut ident = w.into_ident(self.next_token().span);
14035 if quote_style_is_none {
14036 while matches!(self.peek_token_no_skip().token, Token::Minus) {
14037 self.next_token();
14038 ident.value.push('-');
14039
14040 let token = self
14041 .next_token_no_skip()
14042 .cloned()
14043 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
14044 requires_whitespace = match token.token {
14045 Token::Word(next_word) if next_word.quote_style.is_none() => {
14046 ident.value.push_str(&next_word.value);
14047 false
14048 }
14049 Token::Number(s, false) => {
14050 if s.ends_with('.') {
14057 let Some(s) = s.split('.').next().filter(|s| {
14058 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
14059 }) else {
14060 return self.expected(
14061 "continuation of hyphenated identifier",
14062 TokenWithSpan::new(Token::Number(s, false), token.span),
14063 );
14064 };
14065 ident.value.push_str(s);
14066 return Ok((ident, true));
14067 } else {
14068 ident.value.push_str(&s);
14069 }
14070 !matches!(self.peek_token_ref().token, Token::Period)
14073 }
14074 _ => {
14075 return self
14076 .expected("continuation of hyphenated identifier", token);
14077 }
14078 }
14079 }
14080
14081 if requires_whitespace {
14084 let token = self.next_token();
14085 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
14086 return self
14087 .expected("whitespace following hyphenated identifier", token);
14088 }
14089 }
14090 }
14091 Ok((ident, false))
14092 }
14093 _ => Ok((self.parse_identifier()?, false)),
14094 }
14095 }
14096
14097 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
14099 if self.consume_token(&Token::LParen) {
14100 if self.peek_token_ref().token == Token::RParen {
14101 self.next_token();
14102 Ok(vec![])
14103 } else {
14104 let cols = self.parse_comma_separated_with_trailing_commas(
14105 Parser::parse_view_column,
14106 self.dialect.supports_column_definition_trailing_commas(),
14107 Self::is_reserved_for_column_alias,
14108 )?;
14109 self.expect_token(&Token::RParen)?;
14110 Ok(cols)
14111 }
14112 } else {
14113 Ok(vec![])
14114 }
14115 }
14116
14117 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
14119 let name = self.parse_identifier()?;
14120 let options = self.parse_view_column_options()?;
14121 let data_type = if dialect_of!(self is ClickHouseDialect) {
14122 Some(self.parse_data_type()?)
14123 } else {
14124 None
14125 };
14126 Ok(ViewColumnDef {
14127 name,
14128 data_type,
14129 options,
14130 })
14131 }
14132
14133 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
14134 let mut options = Vec::new();
14135 loop {
14136 let option = self.parse_optional_column_option()?;
14137 if let Some(option) = option {
14138 options.push(option);
14139 } else {
14140 break;
14141 }
14142 }
14143 if options.is_empty() {
14144 Ok(None)
14145 } else if self.dialect.supports_space_separated_column_options() {
14146 Ok(Some(ColumnOptions::SpaceSeparated(options)))
14147 } else {
14148 Ok(Some(ColumnOptions::CommaSeparated(options)))
14149 }
14150 }
14151
14152 pub fn parse_parenthesized_column_list(
14155 &mut self,
14156 optional: IsOptional,
14157 allow_empty: bool,
14158 ) -> Result<Vec<Ident>, ParserError> {
14159 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
14160 }
14161
14162 pub fn parse_parenthesized_compound_identifier_list(
14164 &mut self,
14165 optional: IsOptional,
14166 allow_empty: bool,
14167 ) -> Result<Vec<Expr>, ParserError> {
14168 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14169 Ok(Expr::CompoundIdentifier(
14170 p.parse_period_separated(|p| p.parse_identifier())?,
14171 ))
14172 })
14173 }
14174
14175 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
14178 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
14179 p.parse_create_index_expr()
14180 })
14181 }
14182
14183 pub fn parse_parenthesized_qualified_column_list(
14186 &mut self,
14187 optional: IsOptional,
14188 allow_empty: bool,
14189 ) -> Result<Vec<ObjectName>, ParserError> {
14190 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14191 p.parse_object_name(true)
14192 })
14193 }
14194
14195 fn parse_parenthesized_column_list_inner<F, T>(
14198 &mut self,
14199 optional: IsOptional,
14200 allow_empty: bool,
14201 mut f: F,
14202 ) -> Result<Vec<T>, ParserError>
14203 where
14204 F: FnMut(&mut Parser) -> Result<T, ParserError>,
14205 {
14206 if self.consume_token(&Token::LParen) {
14207 if allow_empty && self.peek_token_ref().token == Token::RParen {
14208 self.next_token();
14209 Ok(vec![])
14210 } else {
14211 let cols = self.parse_comma_separated(|p| f(p))?;
14212 self.expect_token(&Token::RParen)?;
14213 Ok(cols)
14214 }
14215 } else if optional == Optional {
14216 Ok(vec![])
14217 } else {
14218 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
14219 }
14220 }
14221
14222 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
14224 if self.consume_token(&Token::LParen) {
14225 let cols = self.parse_comma_separated(|p| {
14226 let name = p.parse_identifier()?;
14227 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
14228 Ok(TableAliasColumnDef { name, data_type })
14229 })?;
14230 self.expect_token(&Token::RParen)?;
14231 Ok(cols)
14232 } else {
14233 Ok(vec![])
14234 }
14235 }
14236
14237 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
14239 self.expect_token(&Token::LParen)?;
14240 let n = self.parse_literal_uint()?;
14241 self.expect_token(&Token::RParen)?;
14242 Ok(n)
14243 }
14244
14245 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
14247 if self.consume_token(&Token::LParen) {
14248 let n = self.parse_literal_uint()?;
14249 self.expect_token(&Token::RParen)?;
14250 Ok(Some(n))
14251 } else {
14252 Ok(None)
14253 }
14254 }
14255
14256 fn maybe_parse_optional_interval_fields(
14257 &mut self,
14258 ) -> Result<Option<IntervalFields>, ParserError> {
14259 match self.parse_one_of_keywords(&[
14260 Keyword::YEAR,
14262 Keyword::DAY,
14263 Keyword::HOUR,
14264 Keyword::MINUTE,
14265 Keyword::MONTH,
14267 Keyword::SECOND,
14268 ]) {
14269 Some(Keyword::YEAR) => {
14270 if self.peek_keyword(Keyword::TO) {
14271 self.expect_keyword(Keyword::TO)?;
14272 self.expect_keyword(Keyword::MONTH)?;
14273 Ok(Some(IntervalFields::YearToMonth))
14274 } else {
14275 Ok(Some(IntervalFields::Year))
14276 }
14277 }
14278 Some(Keyword::DAY) => {
14279 if self.peek_keyword(Keyword::TO) {
14280 self.expect_keyword(Keyword::TO)?;
14281 match self.expect_one_of_keywords(&[
14282 Keyword::HOUR,
14283 Keyword::MINUTE,
14284 Keyword::SECOND,
14285 ])? {
14286 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14287 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14288 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14289 _ => {
14290 self.prev_token();
14291 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14292 }
14293 }
14294 } else {
14295 Ok(Some(IntervalFields::Day))
14296 }
14297 }
14298 Some(Keyword::HOUR) => {
14299 if self.peek_keyword(Keyword::TO) {
14300 self.expect_keyword(Keyword::TO)?;
14301 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14302 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14303 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14304 _ => {
14305 self.prev_token();
14306 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14307 }
14308 }
14309 } else {
14310 Ok(Some(IntervalFields::Hour))
14311 }
14312 }
14313 Some(Keyword::MINUTE) => {
14314 if self.peek_keyword(Keyword::TO) {
14315 self.expect_keyword(Keyword::TO)?;
14316 self.expect_keyword(Keyword::SECOND)?;
14317 Ok(Some(IntervalFields::MinuteToSecond))
14318 } else {
14319 Ok(Some(IntervalFields::Minute))
14320 }
14321 }
14322 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14323 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14324 Some(_) => {
14325 self.prev_token();
14326 self.expected_ref(
14327 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14328 self.peek_token_ref(),
14329 )
14330 }
14331 None => Ok(None),
14332 }
14333 }
14334
14335 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14343 self.expect_keyword_is(Keyword::DATETIME64)?;
14344 self.expect_token(&Token::LParen)?;
14345 let precision = self.parse_literal_uint()?;
14346 let time_zone = if self.consume_token(&Token::Comma) {
14347 Some(self.parse_literal_string()?)
14348 } else {
14349 None
14350 };
14351 self.expect_token(&Token::RParen)?;
14352 Ok((precision, time_zone))
14353 }
14354
14355 pub fn parse_optional_character_length(
14357 &mut self,
14358 ) -> Result<Option<CharacterLength>, ParserError> {
14359 if self.consume_token(&Token::LParen) {
14360 let character_length = self.parse_character_length()?;
14361 self.expect_token(&Token::RParen)?;
14362 Ok(Some(character_length))
14363 } else {
14364 Ok(None)
14365 }
14366 }
14367
14368 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14370 if self.consume_token(&Token::LParen) {
14371 let binary_length = self.parse_binary_length()?;
14372 self.expect_token(&Token::RParen)?;
14373 Ok(Some(binary_length))
14374 } else {
14375 Ok(None)
14376 }
14377 }
14378
14379 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14381 if self.parse_keyword(Keyword::MAX) {
14382 return Ok(CharacterLength::Max);
14383 }
14384 let length = self.parse_literal_uint()?;
14385 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14386 Some(CharLengthUnits::Characters)
14387 } else if self.parse_keyword(Keyword::OCTETS) {
14388 Some(CharLengthUnits::Octets)
14389 } else {
14390 None
14391 };
14392 Ok(CharacterLength::IntegerLength { length, unit })
14393 }
14394
14395 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14397 if self.parse_keyword(Keyword::MAX) {
14398 return Ok(BinaryLength::Max);
14399 }
14400 let length = self.parse_literal_uint()?;
14401 Ok(BinaryLength::IntegerLength { length })
14402 }
14403
14404 pub fn parse_optional_precision_scale(
14406 &mut self,
14407 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14408 if self.consume_token(&Token::LParen) {
14409 let n = self.parse_literal_uint()?;
14410 let scale = if self.consume_token(&Token::Comma) {
14411 Some(self.parse_literal_uint()?)
14412 } else {
14413 None
14414 };
14415 self.expect_token(&Token::RParen)?;
14416 Ok((Some(n), scale))
14417 } else {
14418 Ok((None, None))
14419 }
14420 }
14421
14422 pub fn parse_exact_number_optional_precision_scale(
14424 &mut self,
14425 ) -> Result<ExactNumberInfo, ParserError> {
14426 if self.consume_token(&Token::LParen) {
14427 let precision = self.parse_literal_uint()?;
14428 let scale = if self.consume_token(&Token::Comma) {
14429 Some(self.parse_signed_integer()?)
14430 } else {
14431 None
14432 };
14433
14434 self.expect_token(&Token::RParen)?;
14435
14436 match scale {
14437 None => Ok(ExactNumberInfo::Precision(precision)),
14438 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14439 }
14440 } else {
14441 Ok(ExactNumberInfo::None)
14442 }
14443 }
14444
14445 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14447 let is_negative = self.consume_token(&Token::Minus);
14448
14449 if !is_negative {
14450 let _ = self.consume_token(&Token::Plus);
14451 }
14452
14453 let current_token = self.peek_token_ref();
14454 match ¤t_token.token {
14455 Token::Number(s, _) => {
14456 let s = s.clone();
14457 let span_start = current_token.span.start;
14458 self.advance_token();
14459 let value = Self::parse::<i64>(s, span_start)?;
14460 Ok(if is_negative { -value } else { value })
14461 }
14462 _ => self.expected_ref("number", current_token),
14463 }
14464 }
14465
14466 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14468 if self.consume_token(&Token::LParen) {
14469 let mut modifiers = Vec::new();
14470 loop {
14471 let next_token = self.next_token();
14472 match next_token.token {
14473 Token::Word(w) => modifiers.push(w.to_string()),
14474 Token::Number(n, _) => modifiers.push(n),
14475 Token::SingleQuotedString(s) => modifiers.push(s),
14476
14477 Token::Comma => {
14478 continue;
14479 }
14480 Token::RParen => {
14481 break;
14482 }
14483 _ => self.expected("type modifiers", next_token)?,
14484 }
14485 }
14486
14487 Ok(Some(modifiers))
14488 } else {
14489 Ok(None)
14490 }
14491 }
14492
14493 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14495 where
14496 F: FnOnce(Box<DataType>) -> DataType,
14497 {
14498 self.expect_token(&Token::LParen)?;
14499 let inside_type = self.parse_data_type()?;
14500 self.expect_token(&Token::RParen)?;
14501 Ok(parent_type(inside_type.into()))
14502 }
14503
14504 fn parse_delete_setexpr_boxed(
14508 &mut self,
14509 delete_token: TokenWithSpan,
14510 ) -> Result<Box<SetExpr>, ParserError> {
14511 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14512 }
14513
14514 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14516 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14517 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14518 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14521 (vec![], false)
14522 } else {
14523 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14524 self.expect_keyword_is(Keyword::FROM)?;
14525 (tables, true)
14526 }
14527 } else {
14528 (vec![], true)
14529 };
14530
14531 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14532
14533 let output = self.maybe_parse_output_clause()?;
14534
14535 let using = if self.parse_keyword(Keyword::USING) {
14536 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14537 } else {
14538 None
14539 };
14540 let selection = if self.parse_keyword(Keyword::WHERE) {
14541 Some(self.parse_expr()?)
14542 } else {
14543 None
14544 };
14545 let returning = if self.parse_keyword(Keyword::RETURNING) {
14546 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14547 } else {
14548 None
14549 };
14550 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14551 self.parse_comma_separated(Parser::parse_order_by_expr)?
14552 } else {
14553 vec![]
14554 };
14555 let limit = if self.parse_keyword(Keyword::LIMIT) {
14556 self.parse_limit()?
14557 } else {
14558 None
14559 };
14560
14561 Ok(Statement::Delete(Delete {
14562 delete_token: delete_token.into(),
14563 optimizer_hints,
14564 tables,
14565 from: if with_from_keyword {
14566 FromTable::WithFromKeyword(from)
14567 } else {
14568 FromTable::WithoutKeyword(from)
14569 },
14570 using,
14571 selection,
14572 returning,
14573 output,
14574 order_by,
14575 limit,
14576 }))
14577 }
14578
14579 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14582 let modifier_keyword =
14583 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14584
14585 let id = self.parse_literal_uint()?;
14586
14587 let modifier = match modifier_keyword {
14588 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14589 Some(Keyword::QUERY) => Some(KillType::Query),
14590 Some(Keyword::MUTATION) => {
14591 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14592 Some(KillType::Mutation)
14593 } else {
14594 self.expected_ref(
14595 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14596 self.peek_token_ref(),
14597 )?
14598 }
14599 }
14600 _ => None,
14601 };
14602
14603 Ok(Statement::Kill { modifier, id })
14604 }
14605
14606 pub fn parse_explain(
14608 &mut self,
14609 describe_alias: DescribeAlias,
14610 ) -> Result<Statement, ParserError> {
14611 let mut analyze = false;
14612 let mut verbose = false;
14613 let mut query_plan = false;
14614 let mut estimate = false;
14615 let mut format = None;
14616 let mut options = None;
14617
14618 if describe_alias == DescribeAlias::Explain
14621 && self.dialect.supports_explain_with_utility_options()
14622 && self.peek_token_ref().token == Token::LParen
14623 {
14624 options = Some(self.parse_utility_options()?)
14625 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14626 query_plan = true;
14627 } else if self.parse_keyword(Keyword::ESTIMATE) {
14628 estimate = true;
14629 } else {
14630 analyze = self.parse_keyword(Keyword::ANALYZE);
14631 verbose = self.parse_keyword(Keyword::VERBOSE);
14632 if self.parse_keyword(Keyword::FORMAT) {
14633 format = Some(self.parse_analyze_format_kind()?);
14634 }
14635 }
14636
14637 match self.maybe_parse(|parser| parser.parse_statement())? {
14638 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14639 ParserError::ParserError("Explain must be root of the plan".to_string()),
14640 ),
14641 Some(statement) => Ok(Statement::Explain {
14642 describe_alias,
14643 analyze,
14644 verbose,
14645 query_plan,
14646 estimate,
14647 statement: Box::new(statement),
14648 format,
14649 options,
14650 }),
14651 _ => {
14652 let hive_format =
14653 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14654 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14655 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14656 _ => None,
14657 };
14658
14659 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14660 self.parse_keyword(Keyword::TABLE)
14662 } else {
14663 false
14664 };
14665
14666 let table_name = self.parse_object_name(false)?;
14667 Ok(Statement::ExplainTable {
14668 describe_alias,
14669 hive_format,
14670 has_table_keyword,
14671 table_name,
14672 })
14673 }
14674 }
14675 }
14676
14677 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14682 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14683 let _guard = self.recursion_counter.try_decrease()?;
14684 let with = if self.parse_keyword(Keyword::WITH) {
14685 let with_token = self.get_current_token();
14686 Some(With {
14687 with_token: with_token.clone().into(),
14688 recursive: self.parse_keyword(Keyword::RECURSIVE),
14689 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14690 })
14691 } else {
14692 None
14693 };
14694 if self.parse_keyword(Keyword::INSERT) {
14695 Ok(Query {
14696 with,
14697 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14698 order_by: None,
14699 limit_clause: None,
14700 fetch: None,
14701 locks: vec![],
14702 for_clause: None,
14703 settings: None,
14704 format_clause: None,
14705 pipe_operators: vec![],
14706 }
14707 .into())
14708 } else if self.parse_keyword(Keyword::UPDATE) {
14709 Ok(Query {
14710 with,
14711 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14712 order_by: None,
14713 limit_clause: None,
14714 fetch: None,
14715 locks: vec![],
14716 for_clause: None,
14717 settings: None,
14718 format_clause: None,
14719 pipe_operators: vec![],
14720 }
14721 .into())
14722 } else if self.parse_keyword(Keyword::DELETE) {
14723 Ok(Query {
14724 with,
14725 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14726 limit_clause: None,
14727 order_by: None,
14728 fetch: None,
14729 locks: vec![],
14730 for_clause: None,
14731 settings: None,
14732 format_clause: None,
14733 pipe_operators: vec![],
14734 }
14735 .into())
14736 } else if self.parse_keyword(Keyword::MERGE) {
14737 Ok(Query {
14738 with,
14739 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14740 limit_clause: None,
14741 order_by: None,
14742 fetch: None,
14743 locks: vec![],
14744 for_clause: None,
14745 settings: None,
14746 format_clause: None,
14747 pipe_operators: vec![],
14748 }
14749 .into())
14750 } else {
14751 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14752
14753 let order_by = self.parse_optional_order_by()?;
14754
14755 let limit_clause = self.parse_optional_limit_clause()?;
14756
14757 let settings = self.parse_settings()?;
14758
14759 let fetch = if self.parse_keyword(Keyword::FETCH) {
14760 Some(self.parse_fetch()?)
14761 } else {
14762 None
14763 };
14764
14765 let mut for_clause = None;
14766 let mut locks = Vec::new();
14767 while self.parse_keyword(Keyword::FOR) {
14768 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14769 for_clause = Some(parsed_for_clause);
14770 break;
14771 } else {
14772 locks.push(self.parse_lock()?);
14773 }
14774 }
14775 let format_clause =
14776 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14777 if self.parse_keyword(Keyword::NULL) {
14778 Some(FormatClause::Null)
14779 } else {
14780 let ident = self.parse_identifier()?;
14781 Some(FormatClause::Identifier(ident))
14782 }
14783 } else {
14784 None
14785 };
14786
14787 let pipe_operators = if self.dialect.supports_pipe_operator() {
14788 self.parse_pipe_operators()?
14789 } else {
14790 Vec::new()
14791 };
14792
14793 Ok(Query {
14794 with,
14795 body,
14796 order_by,
14797 limit_clause,
14798 fetch,
14799 locks,
14800 for_clause,
14801 settings,
14802 format_clause,
14803 pipe_operators,
14804 }
14805 .into())
14806 }
14807 }
14808
14809 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14810 let mut pipe_operators = Vec::new();
14811
14812 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14813 let kw = self.expect_one_of_keywords(&[
14814 Keyword::SELECT,
14815 Keyword::EXTEND,
14816 Keyword::SET,
14817 Keyword::DROP,
14818 Keyword::AS,
14819 Keyword::WHERE,
14820 Keyword::LIMIT,
14821 Keyword::AGGREGATE,
14822 Keyword::ORDER,
14823 Keyword::TABLESAMPLE,
14824 Keyword::RENAME,
14825 Keyword::UNION,
14826 Keyword::INTERSECT,
14827 Keyword::EXCEPT,
14828 Keyword::CALL,
14829 Keyword::PIVOT,
14830 Keyword::UNPIVOT,
14831 Keyword::JOIN,
14832 Keyword::INNER,
14833 Keyword::LEFT,
14834 Keyword::RIGHT,
14835 Keyword::FULL,
14836 Keyword::CROSS,
14837 ])?;
14838 match kw {
14839 Keyword::SELECT => {
14840 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14841 pipe_operators.push(PipeOperator::Select { exprs })
14842 }
14843 Keyword::EXTEND => {
14844 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14845 pipe_operators.push(PipeOperator::Extend { exprs })
14846 }
14847 Keyword::SET => {
14848 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14849 pipe_operators.push(PipeOperator::Set { assignments })
14850 }
14851 Keyword::DROP => {
14852 let columns = self.parse_identifiers()?;
14853 pipe_operators.push(PipeOperator::Drop { columns })
14854 }
14855 Keyword::AS => {
14856 let alias = self.parse_identifier()?;
14857 pipe_operators.push(PipeOperator::As { alias })
14858 }
14859 Keyword::WHERE => {
14860 let expr = self.parse_expr()?;
14861 pipe_operators.push(PipeOperator::Where { expr })
14862 }
14863 Keyword::LIMIT => {
14864 let expr = self.parse_expr()?;
14865 let offset = if self.parse_keyword(Keyword::OFFSET) {
14866 Some(self.parse_expr()?)
14867 } else {
14868 None
14869 };
14870 pipe_operators.push(PipeOperator::Limit { expr, offset })
14871 }
14872 Keyword::AGGREGATE => {
14873 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14874 vec![]
14875 } else {
14876 self.parse_comma_separated(|parser| {
14877 parser.parse_expr_with_alias_and_order_by()
14878 })?
14879 };
14880
14881 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14882 self.parse_comma_separated(|parser| {
14883 parser.parse_expr_with_alias_and_order_by()
14884 })?
14885 } else {
14886 vec![]
14887 };
14888
14889 pipe_operators.push(PipeOperator::Aggregate {
14890 full_table_exprs,
14891 group_by_expr,
14892 })
14893 }
14894 Keyword::ORDER => {
14895 self.expect_one_of_keywords(&[Keyword::BY])?;
14896 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14897 pipe_operators.push(PipeOperator::OrderBy { exprs })
14898 }
14899 Keyword::TABLESAMPLE => {
14900 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14901 pipe_operators.push(PipeOperator::TableSample { sample });
14902 }
14903 Keyword::RENAME => {
14904 let mappings =
14905 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14906 pipe_operators.push(PipeOperator::Rename { mappings });
14907 }
14908 Keyword::UNION => {
14909 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14910 let queries = self.parse_pipe_operator_queries()?;
14911 pipe_operators.push(PipeOperator::Union {
14912 set_quantifier,
14913 queries,
14914 });
14915 }
14916 Keyword::INTERSECT => {
14917 let set_quantifier =
14918 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14919 let queries = self.parse_pipe_operator_queries()?;
14920 pipe_operators.push(PipeOperator::Intersect {
14921 set_quantifier,
14922 queries,
14923 });
14924 }
14925 Keyword::EXCEPT => {
14926 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14927 let queries = self.parse_pipe_operator_queries()?;
14928 pipe_operators.push(PipeOperator::Except {
14929 set_quantifier,
14930 queries,
14931 });
14932 }
14933 Keyword::CALL => {
14934 let function_name = self.parse_object_name(false)?;
14935 let function_expr = self.parse_function(function_name)?;
14936 if let Expr::Function(function) = function_expr {
14937 let alias = self.parse_identifier_optional_alias()?;
14938 pipe_operators.push(PipeOperator::Call { function, alias });
14939 } else {
14940 return Err(ParserError::ParserError(
14941 "Expected function call after CALL".to_string(),
14942 ));
14943 }
14944 }
14945 Keyword::PIVOT => {
14946 self.expect_token(&Token::LParen)?;
14947 let aggregate_functions =
14948 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14949 self.expect_keyword_is(Keyword::FOR)?;
14950 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14951 self.expect_keyword_is(Keyword::IN)?;
14952
14953 self.expect_token(&Token::LParen)?;
14954 let value_source = if self.parse_keyword(Keyword::ANY) {
14955 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14956 self.parse_comma_separated(Parser::parse_order_by_expr)?
14957 } else {
14958 vec![]
14959 };
14960 PivotValueSource::Any(order_by)
14961 } else if self.peek_sub_query() {
14962 PivotValueSource::Subquery(self.parse_query()?)
14963 } else {
14964 PivotValueSource::List(
14965 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14966 )
14967 };
14968 self.expect_token(&Token::RParen)?;
14969 self.expect_token(&Token::RParen)?;
14970
14971 let alias = self.parse_identifier_optional_alias()?;
14972
14973 pipe_operators.push(PipeOperator::Pivot {
14974 aggregate_functions,
14975 value_column,
14976 value_source,
14977 alias,
14978 });
14979 }
14980 Keyword::UNPIVOT => {
14981 self.expect_token(&Token::LParen)?;
14982 let value_column = self.parse_identifier()?;
14983 self.expect_keyword(Keyword::FOR)?;
14984 let name_column = self.parse_identifier()?;
14985 self.expect_keyword(Keyword::IN)?;
14986
14987 self.expect_token(&Token::LParen)?;
14988 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14989 self.expect_token(&Token::RParen)?;
14990
14991 self.expect_token(&Token::RParen)?;
14992
14993 let alias = self.parse_identifier_optional_alias()?;
14994
14995 pipe_operators.push(PipeOperator::Unpivot {
14996 value_column,
14997 name_column,
14998 unpivot_columns,
14999 alias,
15000 });
15001 }
15002 Keyword::JOIN
15003 | Keyword::INNER
15004 | Keyword::LEFT
15005 | Keyword::RIGHT
15006 | Keyword::FULL
15007 | Keyword::CROSS => {
15008 self.prev_token();
15009 let mut joins = self.parse_joins()?;
15010 if joins.len() != 1 {
15011 return Err(ParserError::ParserError(
15012 "Join pipe operator must have a single join".to_string(),
15013 ));
15014 }
15015 let join = joins.swap_remove(0);
15016 pipe_operators.push(PipeOperator::Join(join))
15017 }
15018 unhandled => {
15019 return Err(ParserError::ParserError(format!(
15020 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
15021 )))
15022 }
15023 }
15024 }
15025 Ok(pipe_operators)
15026 }
15027
15028 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
15029 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
15030 {
15031 let key_values = self.parse_comma_separated(|p| {
15032 let key = p.parse_identifier()?;
15033 p.expect_token(&Token::Eq)?;
15034 let value = p.parse_expr()?;
15035 Ok(Setting { key, value })
15036 })?;
15037 Some(key_values)
15038 } else {
15039 None
15040 };
15041 Ok(settings)
15042 }
15043
15044 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
15046 if self.parse_keyword(Keyword::XML) {
15047 Ok(Some(self.parse_for_xml()?))
15048 } else if self.parse_keyword(Keyword::JSON) {
15049 Ok(Some(self.parse_for_json()?))
15050 } else if self.parse_keyword(Keyword::BROWSE) {
15051 Ok(Some(ForClause::Browse))
15052 } else {
15053 Ok(None)
15054 }
15055 }
15056
15057 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
15059 let for_xml = if self.parse_keyword(Keyword::RAW) {
15060 let mut element_name = None;
15061 if self.peek_token_ref().token == Token::LParen {
15062 self.expect_token(&Token::LParen)?;
15063 element_name = Some(self.parse_literal_string()?);
15064 self.expect_token(&Token::RParen)?;
15065 }
15066 ForXml::Raw(element_name)
15067 } else if self.parse_keyword(Keyword::AUTO) {
15068 ForXml::Auto
15069 } else if self.parse_keyword(Keyword::EXPLICIT) {
15070 ForXml::Explicit
15071 } else if self.parse_keyword(Keyword::PATH) {
15072 let mut element_name = None;
15073 if self.peek_token_ref().token == Token::LParen {
15074 self.expect_token(&Token::LParen)?;
15075 element_name = Some(self.parse_literal_string()?);
15076 self.expect_token(&Token::RParen)?;
15077 }
15078 ForXml::Path(element_name)
15079 } else {
15080 return Err(ParserError::ParserError(
15081 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
15082 ));
15083 };
15084 let mut elements = false;
15085 let mut binary_base64 = false;
15086 let mut root = None;
15087 let mut r#type = false;
15088 while self.peek_token_ref().token == Token::Comma {
15089 self.next_token();
15090 if self.parse_keyword(Keyword::ELEMENTS) {
15091 elements = true;
15092 } else if self.parse_keyword(Keyword::BINARY) {
15093 self.expect_keyword_is(Keyword::BASE64)?;
15094 binary_base64 = true;
15095 } else if self.parse_keyword(Keyword::ROOT) {
15096 self.expect_token(&Token::LParen)?;
15097 root = Some(self.parse_literal_string()?);
15098 self.expect_token(&Token::RParen)?;
15099 } else if self.parse_keyword(Keyword::TYPE) {
15100 r#type = true;
15101 }
15102 }
15103 Ok(ForClause::Xml {
15104 for_xml,
15105 elements,
15106 binary_base64,
15107 root,
15108 r#type,
15109 })
15110 }
15111
15112 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
15114 let for_json = if self.parse_keyword(Keyword::AUTO) {
15115 ForJson::Auto
15116 } else if self.parse_keyword(Keyword::PATH) {
15117 ForJson::Path
15118 } else {
15119 return Err(ParserError::ParserError(
15120 "Expected FOR JSON [AUTO | PATH ]".to_string(),
15121 ));
15122 };
15123 let mut root = None;
15124 let mut include_null_values = false;
15125 let mut without_array_wrapper = false;
15126 while self.peek_token_ref().token == Token::Comma {
15127 self.next_token();
15128 if self.parse_keyword(Keyword::ROOT) {
15129 self.expect_token(&Token::LParen)?;
15130 root = Some(self.parse_literal_string()?);
15131 self.expect_token(&Token::RParen)?;
15132 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
15133 include_null_values = true;
15134 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
15135 without_array_wrapper = true;
15136 }
15137 }
15138 Ok(ForClause::Json {
15139 for_json,
15140 root,
15141 include_null_values,
15142 without_array_wrapper,
15143 })
15144 }
15145
15146 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
15148 let name = self.parse_identifier()?;
15149
15150 let as_optional = self.dialect.supports_cte_without_as();
15151
15152 if as_optional && !self.peek_keyword(Keyword::AS) {
15154 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
15155 p.expect_token(&Token::LParen)?;
15156 let query = p.parse_query()?;
15157 let closing_paren_token = p.expect_token(&Token::RParen)?;
15158 Ok((query, closing_paren_token))
15159 })? {
15160 let mut cte = Cte {
15161 alias: TableAlias {
15162 explicit: false,
15163 name,
15164 columns: vec![],
15165 },
15166 query,
15167 from: None,
15168 materialized: None,
15169 closing_paren_token: closing_paren_token.into(),
15170 };
15171 if self.parse_keyword(Keyword::FROM) {
15172 cte.from = Some(self.parse_identifier()?);
15173 }
15174 return Ok(cte);
15175 }
15176 }
15177
15178 let columns = if self.parse_keyword(Keyword::AS) {
15180 vec![]
15181 } else {
15182 let columns = self.parse_table_alias_column_defs()?;
15183 if as_optional {
15184 let _ = self.parse_keyword(Keyword::AS);
15185 } else {
15186 self.expect_keyword_is(Keyword::AS)?;
15187 }
15188 columns
15189 };
15190
15191 let mut is_materialized = None;
15192 if dialect_of!(self is PostgreSqlDialect) {
15193 if self.parse_keyword(Keyword::MATERIALIZED) {
15194 is_materialized = Some(CteAsMaterialized::Materialized);
15195 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
15196 is_materialized = Some(CteAsMaterialized::NotMaterialized);
15197 }
15198 }
15199
15200 self.expect_token(&Token::LParen)?;
15201 let query = self.parse_query()?;
15202 let closing_paren_token = self.expect_token(&Token::RParen)?;
15203
15204 let mut cte = Cte {
15205 alias: TableAlias {
15206 explicit: false,
15207 name,
15208 columns,
15209 },
15210 query,
15211 from: None,
15212 materialized: is_materialized,
15213 closing_paren_token: closing_paren_token.into(),
15214 };
15215 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
15216 cte.from = Some(self.parse_identifier()?);
15217 }
15218 Ok(cte)
15219 }
15220
15221 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
15230 let expr = if self.peek_keyword(Keyword::SELECT)
15233 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
15234 {
15235 SetExpr::Select(self.parse_select().map(Box::new)?)
15236 } else if self.consume_token(&Token::LParen) {
15237 let subquery = self.parse_query()?;
15239 self.expect_token(&Token::RParen)?;
15240 SetExpr::Query(subquery)
15241 } else if self.parse_keyword(Keyword::VALUES) {
15242 let is_mysql = dialect_of!(self is MySqlDialect);
15243 SetExpr::Values(self.parse_values(is_mysql, false)?)
15244 } else if self.parse_keyword(Keyword::VALUE) {
15245 let is_mysql = dialect_of!(self is MySqlDialect);
15246 SetExpr::Values(self.parse_values(is_mysql, true)?)
15247 } else if self.parse_keyword(Keyword::TABLE) {
15248 SetExpr::Table(Box::new(self.parse_as_table()?))
15249 } else {
15250 return self.expected_ref(
15251 "SELECT, VALUES, or a subquery in the query body",
15252 self.peek_token_ref(),
15253 );
15254 };
15255
15256 self.parse_remaining_set_exprs(expr, precedence)
15257 }
15258
15259 fn parse_remaining_set_exprs(
15263 &mut self,
15264 mut expr: SetExpr,
15265 precedence: u8,
15266 ) -> Result<Box<SetExpr>, ParserError> {
15267 loop {
15268 let op = self.parse_set_operator(&self.peek_token().token);
15270 let next_precedence = match op {
15271 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15273 10
15274 }
15275 Some(SetOperator::Intersect) => 20,
15277 None => break,
15279 };
15280 if precedence >= next_precedence {
15281 break;
15282 }
15283 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15285 expr = SetExpr::SetOperation {
15286 left: Box::new(expr),
15287 op: op.unwrap(),
15288 set_quantifier,
15289 right: self.parse_query_body(next_precedence)?,
15290 };
15291 }
15292
15293 Ok(expr.into())
15294 }
15295
15296 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15298 match token {
15299 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15300 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15301 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15302 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15303 _ => None,
15304 }
15305 }
15306
15307 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15309 match op {
15310 Some(
15311 SetOperator::Except
15312 | SetOperator::Intersect
15313 | SetOperator::Union
15314 | SetOperator::Minus,
15315 ) => {
15316 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15317 SetQuantifier::DistinctByName
15318 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15319 SetQuantifier::ByName
15320 } else if self.parse_keyword(Keyword::ALL) {
15321 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15322 SetQuantifier::AllByName
15323 } else {
15324 SetQuantifier::All
15325 }
15326 } else if self.parse_keyword(Keyword::DISTINCT) {
15327 SetQuantifier::Distinct
15328 } else {
15329 SetQuantifier::None
15330 }
15331 }
15332 _ => SetQuantifier::None,
15333 }
15334 }
15335
15336 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15338 let mut from_first = None;
15339
15340 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15341 let from_token = self.expect_keyword(Keyword::FROM)?;
15342 let from = self.parse_table_with_joins()?;
15343 if !self.peek_keyword(Keyword::SELECT) {
15344 return Ok(Select {
15345 select_token: AttachedToken(from_token),
15346 optimizer_hints: vec![],
15347 distinct: None,
15348 select_modifiers: None,
15349 top: None,
15350 top_before_distinct: false,
15351 projection: vec![],
15352 exclude: None,
15353 into: None,
15354 from,
15355 lateral_views: vec![],
15356 prewhere: None,
15357 selection: None,
15358 group_by: GroupByExpr::Expressions(vec![], vec![]),
15359 cluster_by: vec![],
15360 distribute_by: vec![],
15361 sort_by: vec![],
15362 having: None,
15363 named_window: vec![],
15364 window_before_qualify: false,
15365 qualify: None,
15366 value_table_mode: None,
15367 connect_by: vec![],
15368 flavor: SelectFlavor::FromFirstNoSelect,
15369 });
15370 }
15371 from_first = Some(from);
15372 }
15373
15374 let select_token = self.expect_keyword(Keyword::SELECT)?;
15375 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15376 let value_table_mode = self.parse_value_table_mode()?;
15377
15378 let (select_modifiers, distinct_select_modifier) =
15379 if self.dialect.supports_select_modifiers() {
15380 self.parse_select_modifiers()?
15381 } else {
15382 (None, None)
15383 };
15384
15385 let mut top_before_distinct = false;
15386 let mut top = None;
15387 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15388 top = Some(self.parse_top()?);
15389 top_before_distinct = true;
15390 }
15391
15392 let distinct = if distinct_select_modifier.is_some() {
15393 distinct_select_modifier
15394 } else {
15395 self.parse_all_or_distinct()?
15396 };
15397
15398 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15399 top = Some(self.parse_top()?);
15400 }
15401
15402 let projection =
15403 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15404 vec![]
15405 } else {
15406 self.parse_projection()?
15407 };
15408
15409 let exclude = if self.dialect.supports_select_exclude() {
15410 self.parse_optional_select_item_exclude()?
15411 } else {
15412 None
15413 };
15414
15415 let into = if self.parse_keyword(Keyword::INTO) {
15416 Some(self.parse_select_into()?)
15417 } else {
15418 None
15419 };
15420
15421 let (from, from_first) = if let Some(from) = from_first.take() {
15427 (from, true)
15428 } else if self.parse_keyword(Keyword::FROM) {
15429 (self.parse_table_with_joins()?, false)
15430 } else {
15431 (vec![], false)
15432 };
15433
15434 let mut lateral_views = vec![];
15435 loop {
15436 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15437 let outer = self.parse_keyword(Keyword::OUTER);
15438 let lateral_view = self.parse_expr()?;
15439 let lateral_view_name = self.parse_object_name(false)?;
15440 let lateral_col_alias = self
15441 .parse_comma_separated(|parser| {
15442 parser.parse_optional_alias(&[
15443 Keyword::WHERE,
15444 Keyword::GROUP,
15445 Keyword::CLUSTER,
15446 Keyword::HAVING,
15447 Keyword::LATERAL,
15448 ]) })?
15450 .into_iter()
15451 .flatten()
15452 .collect();
15453
15454 lateral_views.push(LateralView {
15455 lateral_view,
15456 lateral_view_name,
15457 lateral_col_alias,
15458 outer,
15459 });
15460 } else {
15461 break;
15462 }
15463 }
15464
15465 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15466 {
15467 Some(self.parse_expr()?)
15468 } else {
15469 None
15470 };
15471
15472 let selection = if self.parse_keyword(Keyword::WHERE) {
15473 Some(self.parse_expr()?)
15474 } else {
15475 None
15476 };
15477
15478 let connect_by = self.maybe_parse_connect_by()?;
15479
15480 let group_by = self
15481 .parse_optional_group_by()?
15482 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15483
15484 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15485 self.parse_comma_separated(Parser::parse_expr)?
15486 } else {
15487 vec![]
15488 };
15489
15490 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15491 self.parse_comma_separated(Parser::parse_expr)?
15492 } else {
15493 vec![]
15494 };
15495
15496 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15497 self.parse_comma_separated(Parser::parse_order_by_expr)?
15498 } else {
15499 vec![]
15500 };
15501
15502 let having = if self.parse_keyword(Keyword::HAVING) {
15503 Some(self.parse_expr()?)
15504 } else {
15505 None
15506 };
15507
15508 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15510 {
15511 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15512 if self.parse_keyword(Keyword::QUALIFY) {
15513 (named_windows, Some(self.parse_expr()?), true)
15514 } else {
15515 (named_windows, None, true)
15516 }
15517 } else if self.parse_keyword(Keyword::QUALIFY) {
15518 let qualify = Some(self.parse_expr()?);
15519 if self.parse_keyword(Keyword::WINDOW) {
15520 (
15521 self.parse_comma_separated(Parser::parse_named_window)?,
15522 qualify,
15523 false,
15524 )
15525 } else {
15526 (Default::default(), qualify, false)
15527 }
15528 } else {
15529 Default::default()
15530 };
15531
15532 Ok(Select {
15533 select_token: AttachedToken(select_token),
15534 optimizer_hints,
15535 distinct,
15536 select_modifiers,
15537 top,
15538 top_before_distinct,
15539 projection,
15540 exclude,
15541 into,
15542 from,
15543 lateral_views,
15544 prewhere,
15545 selection,
15546 group_by,
15547 cluster_by,
15548 distribute_by,
15549 sort_by,
15550 having,
15551 named_window: named_windows,
15552 window_before_qualify,
15553 qualify,
15554 value_table_mode,
15555 connect_by,
15556 flavor: if from_first {
15557 SelectFlavor::FromFirst
15558 } else {
15559 SelectFlavor::Standard
15560 },
15561 })
15562 }
15563
15564 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15573 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15574 if !supports_hints {
15575 return Ok(vec![]);
15576 }
15577 let mut hints = vec![];
15578 loop {
15579 let t = self.peek_nth_token_no_skip_ref(0);
15580 let Token::Whitespace(ws) = &t.token else {
15581 break;
15582 };
15583 match ws {
15584 Whitespace::SingleLineComment { comment, prefix } => {
15585 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15586 hints.push(OptimizerHint {
15587 prefix: hint_prefix,
15588 text,
15589 style: OptimizerHintStyle::SingleLine {
15590 prefix: prefix.clone(),
15591 },
15592 });
15593 }
15594 self.next_token_no_skip();
15595 }
15596 Whitespace::MultiLineComment(comment) => {
15597 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15598 hints.push(OptimizerHint {
15599 prefix: hint_prefix,
15600 text,
15601 style: OptimizerHintStyle::MultiLine,
15602 });
15603 }
15604 self.next_token_no_skip();
15605 }
15606 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15607 self.next_token_no_skip();
15608 }
15609 }
15610 }
15611 Ok(hints)
15612 }
15613
15614 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15617 let (before_plus, text) = comment.split_once('+')?;
15618 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15619 Some((before_plus.to_string(), text.to_string()))
15620 } else {
15621 None
15622 }
15623 }
15624
15625 fn parse_select_modifiers(
15632 &mut self,
15633 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15634 let mut modifiers = SelectModifiers::default();
15635 let mut distinct = None;
15636
15637 let keywords = &[
15638 Keyword::ALL,
15639 Keyword::DISTINCT,
15640 Keyword::DISTINCTROW,
15641 Keyword::HIGH_PRIORITY,
15642 Keyword::STRAIGHT_JOIN,
15643 Keyword::SQL_SMALL_RESULT,
15644 Keyword::SQL_BIG_RESULT,
15645 Keyword::SQL_BUFFER_RESULT,
15646 Keyword::SQL_NO_CACHE,
15647 Keyword::SQL_CALC_FOUND_ROWS,
15648 ];
15649
15650 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15651 match keyword {
15652 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15653 self.prev_token();
15654 distinct = self.parse_all_or_distinct()?;
15655 }
15656 Keyword::DISTINCTROW if distinct.is_none() => {
15658 distinct = Some(Distinct::Distinct);
15659 }
15660 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15661 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15662 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15663 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15664 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15665 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15666 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15667 _ => {
15668 self.prev_token();
15669 return self.expected_ref(
15670 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15671 self.peek_token_ref(),
15672 );
15673 }
15674 }
15675 }
15676
15677 let select_modifiers = if modifiers.is_any_set() {
15680 Some(modifiers)
15681 } else {
15682 None
15683 };
15684 Ok((select_modifiers, distinct))
15685 }
15686
15687 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15688 if !dialect_of!(self is BigQueryDialect) {
15689 return Ok(None);
15690 }
15691
15692 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15693 Some(ValueTableMode::DistinctAsValue)
15694 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15695 Some(ValueTableMode::DistinctAsStruct)
15696 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15697 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15698 {
15699 Some(ValueTableMode::AsValue)
15700 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15701 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15702 {
15703 Some(ValueTableMode::AsStruct)
15704 } else if self.parse_keyword(Keyword::AS) {
15705 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15706 } else {
15707 None
15708 };
15709
15710 Ok(mode)
15711 }
15712
15713 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15717 where
15718 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15719 {
15720 let current_state = self.state;
15721 self.state = state;
15722 let res = f(self);
15723 self.state = current_state;
15724 res
15725 }
15726
15727 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15729 let mut clauses = Vec::with_capacity(2);
15730 loop {
15731 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15732 clauses.push(ConnectByKind::StartWith {
15733 start_token: self.token_at(idx).clone().into(),
15734 condition: self.parse_expr()?.into(),
15735 });
15736 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15737 {
15738 clauses.push(ConnectByKind::ConnectBy {
15739 connect_token: self.token_at(idx).clone().into(),
15740 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15741 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15742 parser.parse_comma_separated(Parser::parse_expr)
15743 })?,
15744 });
15745 } else {
15746 break;
15747 }
15748 }
15749 Ok(clauses)
15750 }
15751
15752 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15754 let token1 = self.next_token();
15755 let token2 = self.next_token();
15756 let token3 = self.next_token();
15757
15758 let table_name;
15759 let schema_name;
15760 if token2 == Token::Period {
15761 match token1.token {
15762 Token::Word(w) => {
15763 schema_name = w.value;
15764 }
15765 _ => {
15766 return self.expected("Schema name", token1);
15767 }
15768 }
15769 match token3.token {
15770 Token::Word(w) => {
15771 table_name = w.value;
15772 }
15773 _ => {
15774 return self.expected("Table name", token3);
15775 }
15776 }
15777 Ok(Table {
15778 table_name: Some(table_name),
15779 schema_name: Some(schema_name),
15780 })
15781 } else {
15782 match token1.token {
15783 Token::Word(w) => {
15784 table_name = w.value;
15785 }
15786 _ => {
15787 return self.expected("Table name", token1);
15788 }
15789 }
15790 Ok(Table {
15791 table_name: Some(table_name),
15792 schema_name: None,
15793 })
15794 }
15795 }
15796
15797 fn parse_set_role(
15799 &mut self,
15800 modifier: Option<ContextModifier>,
15801 ) -> Result<Statement, ParserError> {
15802 self.expect_keyword_is(Keyword::ROLE)?;
15803
15804 let role_name = if self.parse_keyword(Keyword::NONE) {
15805 None
15806 } else {
15807 Some(self.parse_identifier()?)
15808 };
15809 Ok(Statement::Set(Set::SetRole {
15810 context_modifier: modifier,
15811 role_name,
15812 }))
15813 }
15814
15815 fn parse_set_values(
15816 &mut self,
15817 parenthesized_assignment: bool,
15818 ) -> Result<Vec<Expr>, ParserError> {
15819 let mut values = vec![];
15820
15821 if parenthesized_assignment {
15822 self.expect_token(&Token::LParen)?;
15823 }
15824
15825 loop {
15826 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15827 expr
15828 } else if let Ok(expr) = self.parse_expr() {
15829 expr
15830 } else {
15831 self.expected_ref("variable value", self.peek_token_ref())?
15832 };
15833
15834 values.push(value);
15835 if self.consume_token(&Token::Comma) {
15836 continue;
15837 }
15838
15839 if parenthesized_assignment {
15840 self.expect_token(&Token::RParen)?;
15841 }
15842 return Ok(values);
15843 }
15844 }
15845
15846 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15847 let modifier =
15848 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15849
15850 Self::keyword_to_modifier(modifier)
15851 }
15852
15853 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15855 let scope = self.parse_context_modifier();
15856
15857 let name = if self.dialect.supports_parenthesized_set_variables()
15858 && self.consume_token(&Token::LParen)
15859 {
15860 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15864 } else {
15865 self.parse_object_name(false)?
15866 };
15867
15868 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15869 return self.expected_ref("assignment operator", self.peek_token_ref());
15870 }
15871
15872 let value = self.parse_expr()?;
15873
15874 Ok(SetAssignment { scope, name, value })
15875 }
15876
15877 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15878 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15879
15880 let scope = if !hivevar {
15882 self.parse_context_modifier()
15883 } else {
15884 None
15885 };
15886
15887 if hivevar {
15888 self.expect_token(&Token::Colon)?;
15889 }
15890
15891 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15892 return Ok(set_role_stmt);
15893 }
15894
15895 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15897 || self.parse_keyword(Keyword::TIMEZONE)
15898 {
15899 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15900 return Ok(Set::SingleAssignment {
15901 scope,
15902 hivevar,
15903 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15904 values: self.parse_set_values(false)?,
15905 }
15906 .into());
15907 } else {
15908 return Ok(Set::SetTimeZone {
15912 local: scope == Some(ContextModifier::Local),
15913 value: self.parse_expr()?,
15914 }
15915 .into());
15916 }
15917 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15918 if self.parse_keyword(Keyword::DEFAULT) {
15919 return Ok(Set::SetNamesDefault {}.into());
15920 }
15921 let charset_name = self.parse_identifier()?;
15922 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15923 Some(self.parse_literal_string()?)
15924 } else {
15925 None
15926 };
15927
15928 return Ok(Set::SetNames {
15929 charset_name,
15930 collation_name,
15931 }
15932 .into());
15933 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15934 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15935 return Ok(Set::SetTransaction {
15936 modes: self.parse_transaction_modes()?,
15937 snapshot: None,
15938 session: true,
15939 }
15940 .into());
15941 } else if self.parse_keyword(Keyword::TRANSACTION) {
15942 if self.parse_keyword(Keyword::SNAPSHOT) {
15943 let snapshot_id = self.parse_value()?;
15944 return Ok(Set::SetTransaction {
15945 modes: vec![],
15946 snapshot: Some(snapshot_id),
15947 session: false,
15948 }
15949 .into());
15950 }
15951 return Ok(Set::SetTransaction {
15952 modes: self.parse_transaction_modes()?,
15953 snapshot: None,
15954 session: false,
15955 }
15956 .into());
15957 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15958 let scope = match scope {
15959 Some(s) => s,
15960 None => {
15961 return self.expected_at(
15962 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15963 self.get_current_index(),
15964 )
15965 }
15966 };
15967 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15968 SetSessionAuthorizationParamKind::Default
15969 } else {
15970 let value = self.parse_identifier()?;
15971 SetSessionAuthorizationParamKind::User(value)
15972 };
15973 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15974 scope,
15975 kind: auth_value,
15976 })
15977 .into());
15978 }
15979
15980 if self.dialect.supports_comma_separated_set_assignments() {
15981 if scope.is_some() {
15982 self.prev_token();
15983 }
15984
15985 if let Some(assignments) = self
15986 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15987 {
15988 return if assignments.len() > 1 {
15989 Ok(Set::MultipleAssignments { assignments }.into())
15990 } else {
15991 let SetAssignment { scope, name, value } =
15992 assignments.into_iter().next().ok_or_else(|| {
15993 ParserError::ParserError("Expected at least one assignment".to_string())
15994 })?;
15995
15996 Ok(Set::SingleAssignment {
15997 scope,
15998 hivevar,
15999 variable: name,
16000 values: vec![value],
16001 }
16002 .into())
16003 };
16004 }
16005 }
16006
16007 let variables = if self.dialect.supports_parenthesized_set_variables()
16008 && self.consume_token(&Token::LParen)
16009 {
16010 let vars = OneOrManyWithParens::Many(
16011 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
16012 .into_iter()
16013 .map(|ident| ObjectName::from(vec![ident]))
16014 .collect(),
16015 );
16016 self.expect_token(&Token::RParen)?;
16017 vars
16018 } else {
16019 OneOrManyWithParens::One(self.parse_object_name(false)?)
16020 };
16021
16022 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
16023 let stmt = match variables {
16024 OneOrManyWithParens::One(var) => Set::SingleAssignment {
16025 scope,
16026 hivevar,
16027 variable: var,
16028 values: self.parse_set_values(false)?,
16029 },
16030 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
16031 variables: vars,
16032 values: self.parse_set_values(true)?,
16033 },
16034 };
16035
16036 return Ok(stmt.into());
16037 }
16038
16039 if self.dialect.supports_set_stmt_without_operator() {
16040 self.prev_token();
16041 return self.parse_set_session_params();
16042 };
16043
16044 self.expected_ref("equals sign or TO", self.peek_token_ref())
16045 }
16046
16047 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
16049 if self.parse_keyword(Keyword::STATISTICS) {
16050 let topic = match self.parse_one_of_keywords(&[
16051 Keyword::IO,
16052 Keyword::PROFILE,
16053 Keyword::TIME,
16054 Keyword::XML,
16055 ]) {
16056 Some(Keyword::IO) => SessionParamStatsTopic::IO,
16057 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
16058 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
16059 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
16060 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
16061 };
16062 let value = self.parse_session_param_value()?;
16063 Ok(
16064 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
16065 topic,
16066 value,
16067 }))
16068 .into(),
16069 )
16070 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
16071 let obj = self.parse_object_name(false)?;
16072 let value = self.parse_session_param_value()?;
16073 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
16074 SetSessionParamIdentityInsert { obj, value },
16075 ))
16076 .into())
16077 } else if self.parse_keyword(Keyword::OFFSETS) {
16078 let keywords = self.parse_comma_separated(|parser| {
16079 let next_token = parser.next_token();
16080 match &next_token.token {
16081 Token::Word(w) => Ok(w.to_string()),
16082 _ => parser.expected("SQL keyword", next_token),
16083 }
16084 })?;
16085 let value = self.parse_session_param_value()?;
16086 Ok(
16087 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
16088 keywords,
16089 value,
16090 }))
16091 .into(),
16092 )
16093 } else {
16094 let names = self.parse_comma_separated(|parser| {
16095 let next_token = parser.next_token();
16096 match next_token.token {
16097 Token::Word(w) => Ok(w.to_string()),
16098 _ => parser.expected("Session param name", next_token),
16099 }
16100 })?;
16101 let value = self.parse_expr()?.to_string();
16102 Ok(
16103 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
16104 names,
16105 value,
16106 }))
16107 .into(),
16108 )
16109 }
16110 }
16111
16112 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
16113 if self.parse_keyword(Keyword::ON) {
16114 Ok(SessionParamValue::On)
16115 } else if self.parse_keyword(Keyword::OFF) {
16116 Ok(SessionParamValue::Off)
16117 } else {
16118 self.expected_ref("ON or OFF", self.peek_token_ref())
16119 }
16120 }
16121
16122 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
16124 let terse = self.parse_keyword(Keyword::TERSE);
16125 let extended = self.parse_keyword(Keyword::EXTENDED);
16126 let full = self.parse_keyword(Keyword::FULL);
16127 let session = self.parse_keyword(Keyword::SESSION);
16128 let global = self.parse_keyword(Keyword::GLOBAL);
16129 let external = self.parse_keyword(Keyword::EXTERNAL);
16130 if self
16131 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
16132 .is_some()
16133 {
16134 Ok(self.parse_show_columns(extended, full)?)
16135 } else if self.parse_keyword(Keyword::TABLES) {
16136 Ok(self.parse_show_tables(terse, extended, full, external)?)
16137 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
16138 Ok(self.parse_show_views(terse, true)?)
16139 } else if self.parse_keyword(Keyword::VIEWS) {
16140 Ok(self.parse_show_views(terse, false)?)
16141 } else if self.parse_keyword(Keyword::FUNCTIONS) {
16142 Ok(self.parse_show_functions()?)
16143 } else if self.parse_keyword(Keyword::PROCESSLIST) {
16144 Ok(Statement::ShowProcessList { full })
16145 } else if extended || full {
16146 Err(ParserError::ParserError(
16147 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
16148 ))
16149 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
16150 Ok(self.parse_show_create()?)
16151 } else if self.parse_keyword(Keyword::COLLATION) {
16152 Ok(self.parse_show_collation()?)
16153 } else if self.parse_keyword(Keyword::VARIABLES)
16154 && dialect_of!(self is MySqlDialect | GenericDialect)
16155 {
16156 Ok(Statement::ShowVariables {
16157 filter: self.parse_show_statement_filter()?,
16158 session,
16159 global,
16160 })
16161 } else if self.parse_keyword(Keyword::STATUS)
16162 && dialect_of!(self is MySqlDialect | GenericDialect)
16163 {
16164 Ok(Statement::ShowStatus {
16165 filter: self.parse_show_statement_filter()?,
16166 session,
16167 global,
16168 })
16169 } else if self.parse_keyword(Keyword::CATALOGS) {
16170 self.parse_show_catalogs(terse)
16171 } else if self.parse_keyword(Keyword::DATABASES) {
16172 self.parse_show_databases(terse)
16173 } else if self.parse_keyword(Keyword::SCHEMAS) {
16174 self.parse_show_schemas(terse)
16175 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
16176 self.parse_show_charset(false)
16177 } else if self.parse_keyword(Keyword::CHARSET) {
16178 self.parse_show_charset(true)
16179 } else {
16180 Ok(Statement::ShowVariable {
16181 variable: self.parse_identifiers()?,
16182 })
16183 }
16184 }
16185
16186 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
16187 Ok(Statement::ShowCharset(ShowCharset {
16189 is_shorthand,
16190 filter: self.parse_show_statement_filter()?,
16191 }))
16192 }
16193
16194 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
16195 let history = self.parse_keyword(Keyword::HISTORY);
16196 let show_options = self.parse_show_stmt_options()?;
16197 Ok(Statement::ShowCatalogs {
16198 terse,
16199 history,
16200 show_options,
16201 })
16202 }
16203
16204 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
16205 let history = self.parse_keyword(Keyword::HISTORY);
16206 let show_options = self.parse_show_stmt_options()?;
16207 Ok(Statement::ShowDatabases {
16208 terse,
16209 history,
16210 show_options,
16211 })
16212 }
16213
16214 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
16215 let history = self.parse_keyword(Keyword::HISTORY);
16216 let show_options = self.parse_show_stmt_options()?;
16217 Ok(Statement::ShowSchemas {
16218 terse,
16219 history,
16220 show_options,
16221 })
16222 }
16223
16224 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
16226 let obj_type = match self.expect_one_of_keywords(&[
16227 Keyword::TABLE,
16228 Keyword::TRIGGER,
16229 Keyword::FUNCTION,
16230 Keyword::PROCEDURE,
16231 Keyword::EVENT,
16232 Keyword::VIEW,
16233 ])? {
16234 Keyword::TABLE => Ok(ShowCreateObject::Table),
16235 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
16236 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
16237 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
16238 Keyword::EVENT => Ok(ShowCreateObject::Event),
16239 Keyword::VIEW => Ok(ShowCreateObject::View),
16240 keyword => Err(ParserError::ParserError(format!(
16241 "Unable to map keyword to ShowCreateObject: {keyword:?}"
16242 ))),
16243 }?;
16244
16245 let obj_name = self.parse_object_name(false)?;
16246
16247 Ok(Statement::ShowCreate { obj_type, obj_name })
16248 }
16249
16250 pub fn parse_show_columns(
16252 &mut self,
16253 extended: bool,
16254 full: bool,
16255 ) -> Result<Statement, ParserError> {
16256 let show_options = self.parse_show_stmt_options()?;
16257 Ok(Statement::ShowColumns {
16258 extended,
16259 full,
16260 show_options,
16261 })
16262 }
16263
16264 fn parse_show_tables(
16265 &mut self,
16266 terse: bool,
16267 extended: bool,
16268 full: bool,
16269 external: bool,
16270 ) -> Result<Statement, ParserError> {
16271 let history = !external && self.parse_keyword(Keyword::HISTORY);
16272 let show_options = self.parse_show_stmt_options()?;
16273 Ok(Statement::ShowTables {
16274 terse,
16275 history,
16276 extended,
16277 full,
16278 external,
16279 show_options,
16280 })
16281 }
16282
16283 fn parse_show_views(
16284 &mut self,
16285 terse: bool,
16286 materialized: bool,
16287 ) -> Result<Statement, ParserError> {
16288 let show_options = self.parse_show_stmt_options()?;
16289 Ok(Statement::ShowViews {
16290 materialized,
16291 terse,
16292 show_options,
16293 })
16294 }
16295
16296 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16298 let filter = self.parse_show_statement_filter()?;
16299 Ok(Statement::ShowFunctions { filter })
16300 }
16301
16302 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16304 let filter = self.parse_show_statement_filter()?;
16305 Ok(Statement::ShowCollation { filter })
16306 }
16307
16308 pub fn parse_show_statement_filter(
16310 &mut self,
16311 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16312 if self.parse_keyword(Keyword::LIKE) {
16313 Ok(Some(ShowStatementFilter::Like(
16314 self.parse_literal_string()?,
16315 )))
16316 } else if self.parse_keyword(Keyword::ILIKE) {
16317 Ok(Some(ShowStatementFilter::ILike(
16318 self.parse_literal_string()?,
16319 )))
16320 } else if self.parse_keyword(Keyword::WHERE) {
16321 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16322 } else {
16323 self.maybe_parse(|parser| -> Result<String, ParserError> {
16324 parser.parse_literal_string()
16325 })?
16326 .map_or(Ok(None), |filter| {
16327 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16328 })
16329 }
16330 }
16331
16332 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16334 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16336 if self.parse_keyword(Keyword::DEFAULT) {
16338 return Ok(Statement::Use(Use::Default));
16339 }
16340 None } else if dialect_of!(self is DatabricksDialect) {
16342 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16343 } else if dialect_of!(self is SnowflakeDialect) {
16344 self.parse_one_of_keywords(&[
16345 Keyword::DATABASE,
16346 Keyword::SCHEMA,
16347 Keyword::WAREHOUSE,
16348 Keyword::ROLE,
16349 Keyword::SECONDARY,
16350 ])
16351 } else {
16352 None };
16354
16355 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16356 self.parse_secondary_roles()?
16357 } else {
16358 let obj_name = self.parse_object_name(false)?;
16359 match parsed_keyword {
16360 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16361 Some(Keyword::DATABASE) => Use::Database(obj_name),
16362 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16363 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16364 Some(Keyword::ROLE) => Use::Role(obj_name),
16365 _ => Use::Object(obj_name),
16366 }
16367 };
16368
16369 Ok(Statement::Use(result))
16370 }
16371
16372 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16373 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16374 if self.parse_keyword(Keyword::NONE) {
16375 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16376 } else if self.parse_keyword(Keyword::ALL) {
16377 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16378 } else {
16379 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16380 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16381 }
16382 }
16383
16384 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16386 let relation = self.parse_table_factor()?;
16387 let joins = self.parse_joins()?;
16391 Ok(TableWithJoins { relation, joins })
16392 }
16393
16394 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16395 let mut joins = vec![];
16396 loop {
16397 let global = self.parse_keyword(Keyword::GLOBAL);
16398 let join = if self.parse_keyword(Keyword::CROSS) {
16399 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16400 JoinOperator::CrossJoin(JoinConstraint::None)
16401 } else if self.parse_keyword(Keyword::APPLY) {
16402 JoinOperator::CrossApply
16404 } else {
16405 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16406 };
16407 let relation = self.parse_table_factor()?;
16408 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16409 && self.dialect.supports_cross_join_constraint()
16410 {
16411 let constraint = self.parse_join_constraint(false)?;
16412 JoinOperator::CrossJoin(constraint)
16413 } else {
16414 join_operator
16415 };
16416 Join {
16417 relation,
16418 global,
16419 join_operator,
16420 }
16421 } else if self.parse_keyword(Keyword::OUTER) {
16422 self.expect_keyword_is(Keyword::APPLY)?;
16424 Join {
16425 relation: self.parse_table_factor()?,
16426 global,
16427 join_operator: JoinOperator::OuterApply,
16428 }
16429 } else if self.parse_keyword(Keyword::ASOF) {
16430 self.expect_keyword_is(Keyword::JOIN)?;
16431 let relation = self.parse_table_factor()?;
16432 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16433 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16434 Join {
16435 relation,
16436 global,
16437 join_operator: JoinOperator::AsOf {
16438 match_condition,
16439 constraint: self.parse_join_constraint(false)?,
16440 },
16441 }
16442 } else {
16443 let natural = self.parse_keyword(Keyword::NATURAL);
16444 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16445 w.keyword
16446 } else {
16447 Keyword::NoKeyword
16448 };
16449
16450 let join_operator_type = match peek_keyword {
16451 Keyword::INNER | Keyword::JOIN => {
16452 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16454 if inner {
16455 JoinOperator::Inner
16456 } else {
16457 JoinOperator::Join
16458 }
16459 }
16460 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16461 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16463 let join_type = self.parse_one_of_keywords(&[
16464 Keyword::OUTER,
16465 Keyword::SEMI,
16466 Keyword::ANTI,
16467 Keyword::JOIN,
16468 ]);
16469 match join_type {
16470 Some(Keyword::OUTER) => {
16471 self.expect_keyword_is(Keyword::JOIN)?;
16472 if is_left {
16473 JoinOperator::LeftOuter
16474 } else {
16475 JoinOperator::RightOuter
16476 }
16477 }
16478 Some(Keyword::SEMI) => {
16479 self.expect_keyword_is(Keyword::JOIN)?;
16480 if is_left {
16481 JoinOperator::LeftSemi
16482 } else {
16483 JoinOperator::RightSemi
16484 }
16485 }
16486 Some(Keyword::ANTI) => {
16487 self.expect_keyword_is(Keyword::JOIN)?;
16488 if is_left {
16489 JoinOperator::LeftAnti
16490 } else {
16491 JoinOperator::RightAnti
16492 }
16493 }
16494 Some(Keyword::JOIN) => {
16495 if is_left {
16496 JoinOperator::Left
16497 } else {
16498 JoinOperator::Right
16499 }
16500 }
16501 _ => {
16502 return Err(ParserError::ParserError(format!(
16503 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16504 )))
16505 }
16506 }
16507 }
16508 Keyword::ANTI => {
16509 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16511 JoinOperator::Anti
16512 }
16513 Keyword::SEMI => {
16514 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16516 JoinOperator::Semi
16517 }
16518 Keyword::FULL => {
16519 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16522 JoinOperator::FullOuter
16523 }
16524 Keyword::OUTER => {
16525 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16526 }
16527 Keyword::STRAIGHT_JOIN => {
16528 let _ = self.next_token(); JoinOperator::StraightJoin
16530 }
16531 _ if natural => {
16532 return self
16533 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16534 }
16535 _ => break,
16536 };
16537 let mut relation = self.parse_table_factor()?;
16538
16539 if !self
16540 .dialect
16541 .supports_left_associative_joins_without_parens()
16542 && self.peek_parens_less_nested_join()
16543 {
16544 let joins = self.parse_joins()?;
16545 relation = TableFactor::NestedJoin {
16546 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16547 alias: None,
16548 };
16549 }
16550
16551 let join_constraint = self.parse_join_constraint(natural)?;
16552 Join {
16553 relation,
16554 global,
16555 join_operator: join_operator_type(join_constraint),
16556 }
16557 };
16558 joins.push(join);
16559 }
16560 Ok(joins)
16561 }
16562
16563 fn peek_parens_less_nested_join(&self) -> bool {
16564 matches!(
16565 self.peek_token_ref().token,
16566 Token::Word(Word {
16567 keyword: Keyword::JOIN
16568 | Keyword::INNER
16569 | Keyword::LEFT
16570 | Keyword::RIGHT
16571 | Keyword::FULL,
16572 ..
16573 })
16574 )
16575 }
16576
16577 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16579 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16580 let _guard = self.recursion_counter.try_decrease()?;
16581 if self.parse_keyword(Keyword::LATERAL) {
16582 if self.consume_token(&Token::LParen) {
16584 self.parse_derived_table_factor(Lateral)
16585 } else {
16586 let name = self.parse_object_name(false)?;
16587 self.expect_token(&Token::LParen)?;
16588 let args = self.parse_optional_args()?;
16589 let alias = self.maybe_parse_table_alias()?;
16590 Ok(TableFactor::Function {
16591 lateral: true,
16592 name,
16593 args,
16594 alias,
16595 })
16596 }
16597 } else if self.parse_keyword(Keyword::TABLE) {
16598 self.expect_token(&Token::LParen)?;
16600 let expr = self.parse_expr()?;
16601 self.expect_token(&Token::RParen)?;
16602 let alias = self.maybe_parse_table_alias()?;
16603 Ok(TableFactor::TableFunction { expr, alias })
16604 } else if self.consume_token(&Token::LParen) {
16605 if let Some(mut table) =
16627 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16628 {
16629 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16630 {
16631 table = match kw {
16632 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16633 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16634 unexpected_keyword => return Err(ParserError::ParserError(
16635 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16636 )),
16637 }
16638 }
16639 return Ok(table);
16640 }
16641
16642 let mut table_and_joins = self.parse_table_and_joins()?;
16649
16650 #[allow(clippy::if_same_then_else)]
16651 if !table_and_joins.joins.is_empty() {
16652 self.expect_token(&Token::RParen)?;
16653 let alias = self.maybe_parse_table_alias()?;
16654 Ok(TableFactor::NestedJoin {
16655 table_with_joins: Box::new(table_and_joins),
16656 alias,
16657 }) } else if let TableFactor::NestedJoin {
16659 table_with_joins: _,
16660 alias: _,
16661 } = &table_and_joins.relation
16662 {
16663 self.expect_token(&Token::RParen)?;
16666 let alias = self.maybe_parse_table_alias()?;
16667 Ok(TableFactor::NestedJoin {
16668 table_with_joins: Box::new(table_and_joins),
16669 alias,
16670 })
16671 } else if self.dialect.supports_parens_around_table_factor() {
16672 self.expect_token(&Token::RParen)?;
16679
16680 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16681 match &mut table_and_joins.relation {
16684 TableFactor::Derived { alias, .. }
16685 | TableFactor::Table { alias, .. }
16686 | TableFactor::Function { alias, .. }
16687 | TableFactor::UNNEST { alias, .. }
16688 | TableFactor::JsonTable { alias, .. }
16689 | TableFactor::XmlTable { alias, .. }
16690 | TableFactor::OpenJsonTable { alias, .. }
16691 | TableFactor::TableFunction { alias, .. }
16692 | TableFactor::Pivot { alias, .. }
16693 | TableFactor::Unpivot { alias, .. }
16694 | TableFactor::MatchRecognize { alias, .. }
16695 | TableFactor::SemanticView { alias, .. }
16696 | TableFactor::NestedJoin { alias, .. } => {
16697 if let Some(inner_alias) = alias {
16699 return Err(ParserError::ParserError(format!(
16700 "duplicate alias {inner_alias}"
16701 )));
16702 }
16703 alias.replace(outer_alias);
16707 }
16708 };
16709 }
16710 Ok(table_and_joins.relation)
16712 } else {
16713 self.expected_ref("joined table", self.peek_token_ref())
16716 }
16717 } else if self.dialect.supports_values_as_table_factor()
16718 && matches!(
16719 self.peek_tokens(),
16720 [
16721 Token::Word(Word {
16722 keyword: Keyword::VALUES,
16723 ..
16724 }),
16725 Token::LParen
16726 ]
16727 )
16728 {
16729 self.expect_keyword_is(Keyword::VALUES)?;
16730
16731 let values = SetExpr::Values(self.parse_values(false, false)?);
16735 let alias = self.maybe_parse_table_alias()?;
16736 Ok(TableFactor::Derived {
16737 lateral: false,
16738 subquery: Box::new(Query {
16739 with: None,
16740 body: Box::new(values),
16741 order_by: None,
16742 limit_clause: None,
16743 fetch: None,
16744 locks: vec![],
16745 for_clause: None,
16746 settings: None,
16747 format_clause: None,
16748 pipe_operators: vec![],
16749 }),
16750 alias,
16751 sample: None,
16752 })
16753 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16754 && self.parse_keyword(Keyword::UNNEST)
16755 {
16756 self.expect_token(&Token::LParen)?;
16757 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16758 self.expect_token(&Token::RParen)?;
16759
16760 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16761 let alias = match self.maybe_parse_table_alias() {
16762 Ok(Some(alias)) => Some(alias),
16763 Ok(None) => None,
16764 Err(e) => return Err(e),
16765 };
16766
16767 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16768 Ok(()) => true,
16769 Err(_) => false,
16770 };
16771
16772 let with_offset_alias = if with_offset {
16773 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16774 Ok(Some(alias)) => Some(alias),
16775 Ok(None) => None,
16776 Err(e) => return Err(e),
16777 }
16778 } else {
16779 None
16780 };
16781
16782 Ok(TableFactor::UNNEST {
16783 alias,
16784 array_exprs,
16785 with_offset,
16786 with_offset_alias,
16787 with_ordinality,
16788 })
16789 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16790 let json_expr = self.parse_expr()?;
16791 self.expect_token(&Token::Comma)?;
16792 let json_path = self.parse_value()?;
16793 self.expect_keyword_is(Keyword::COLUMNS)?;
16794 self.expect_token(&Token::LParen)?;
16795 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16796 self.expect_token(&Token::RParen)?;
16797 self.expect_token(&Token::RParen)?;
16798 let alias = self.maybe_parse_table_alias()?;
16799 Ok(TableFactor::JsonTable {
16800 json_expr,
16801 json_path,
16802 columns,
16803 alias,
16804 })
16805 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16806 self.prev_token();
16807 self.parse_open_json_table_factor()
16808 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16809 self.prev_token();
16810 self.parse_xml_table_factor()
16811 } else if self.dialect.supports_semantic_view_table_factor()
16812 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16813 {
16814 self.parse_semantic_view_table_factor()
16815 } else if self.peek_token_ref().token == Token::AtSign {
16816 self.parse_snowflake_stage_table_factor()
16818 } else {
16819 let name = self.parse_object_name(true)?;
16820
16821 let json_path = match &self.peek_token_ref().token {
16822 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16823 _ => None,
16824 };
16825
16826 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16827 && self.parse_keyword(Keyword::PARTITION)
16828 {
16829 self.parse_parenthesized_identifiers()?
16830 } else {
16831 vec![]
16832 };
16833
16834 let version = self.maybe_parse_table_version()?;
16836
16837 let args = if self.consume_token(&Token::LParen) {
16839 Some(self.parse_table_function_args()?)
16840 } else {
16841 None
16842 };
16843
16844 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16845
16846 let mut sample = None;
16847 if self.dialect.supports_table_sample_before_alias() {
16848 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16849 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16850 }
16851 }
16852
16853 let alias = self.maybe_parse_table_alias()?;
16854
16855 let index_hints = if self.dialect.supports_table_hints() {
16857 self.maybe_parse(|p| p.parse_table_index_hints())?
16858 .unwrap_or(vec![])
16859 } else {
16860 vec![]
16861 };
16862
16863 let mut with_hints = vec![];
16865 if self.parse_keyword(Keyword::WITH) {
16866 if self.consume_token(&Token::LParen) {
16867 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16868 self.expect_token(&Token::RParen)?;
16869 } else {
16870 self.prev_token();
16872 }
16873 };
16874
16875 if !self.dialect.supports_table_sample_before_alias() {
16876 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16877 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16878 }
16879 }
16880
16881 let mut table = TableFactor::Table {
16882 name,
16883 alias,
16884 args,
16885 with_hints,
16886 version,
16887 partitions,
16888 with_ordinality,
16889 json_path,
16890 sample,
16891 index_hints,
16892 };
16893
16894 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16895 table = match kw {
16896 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16897 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16898 unexpected_keyword => return Err(ParserError::ParserError(
16899 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16900 )),
16901 }
16902 }
16903
16904 if self.dialect.supports_match_recognize()
16905 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16906 {
16907 table = self.parse_match_recognize(table)?;
16908 }
16909
16910 Ok(table)
16911 }
16912 }
16913
16914 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16919 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16921
16922 let args = if self.consume_token(&Token::LParen) {
16924 Some(self.parse_table_function_args()?)
16925 } else {
16926 None
16927 };
16928
16929 let alias = self.maybe_parse_table_alias()?;
16930
16931 Ok(TableFactor::Table {
16932 name,
16933 alias,
16934 args,
16935 with_hints: vec![],
16936 version: None,
16937 partitions: vec![],
16938 with_ordinality: false,
16939 json_path: None,
16940 sample: None,
16941 index_hints: vec![],
16942 })
16943 }
16944
16945 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16946 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16947 TableSampleModifier::TableSample
16948 } else if self.parse_keyword(Keyword::SAMPLE) {
16949 TableSampleModifier::Sample
16950 } else {
16951 return Ok(None);
16952 };
16953 self.parse_table_sample(modifier).map(Some)
16954 }
16955
16956 fn parse_table_sample(
16957 &mut self,
16958 modifier: TableSampleModifier,
16959 ) -> Result<Box<TableSample>, ParserError> {
16960 let name = match self.parse_one_of_keywords(&[
16961 Keyword::BERNOULLI,
16962 Keyword::ROW,
16963 Keyword::SYSTEM,
16964 Keyword::BLOCK,
16965 ]) {
16966 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16967 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16968 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16969 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16970 _ => None,
16971 };
16972
16973 let parenthesized = self.consume_token(&Token::LParen);
16974
16975 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16976 let selected_bucket = self.parse_number_value()?;
16977 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16978 let total = self.parse_number_value()?;
16979 let on = if self.parse_keyword(Keyword::ON) {
16980 Some(self.parse_expr()?)
16981 } else {
16982 None
16983 };
16984 (
16985 None,
16986 Some(TableSampleBucket {
16987 bucket: selected_bucket,
16988 total,
16989 on,
16990 }),
16991 )
16992 } else {
16993 let value = match self.maybe_parse(|p| p.parse_expr())? {
16994 Some(num) => num,
16995 None => {
16996 let next_token = self.next_token();
16997 if let Token::Word(w) = next_token.token {
16998 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16999 } else {
17000 return parser_err!(
17001 "Expecting number or byte length e.g. 100M",
17002 self.peek_token_ref().span.start
17003 );
17004 }
17005 }
17006 };
17007 let unit = if self.parse_keyword(Keyword::ROWS) {
17008 Some(TableSampleUnit::Rows)
17009 } else if self.parse_keyword(Keyword::PERCENT) {
17010 Some(TableSampleUnit::Percent)
17011 } else {
17012 None
17013 };
17014 (
17015 Some(TableSampleQuantity {
17016 parenthesized,
17017 value,
17018 unit,
17019 }),
17020 None,
17021 )
17022 };
17023 if parenthesized {
17024 self.expect_token(&Token::RParen)?;
17025 }
17026
17027 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
17028 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
17029 } else if self.parse_keyword(Keyword::SEED) {
17030 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
17031 } else {
17032 None
17033 };
17034
17035 let offset = if self.parse_keyword(Keyword::OFFSET) {
17036 Some(self.parse_expr()?)
17037 } else {
17038 None
17039 };
17040
17041 Ok(Box::new(TableSample {
17042 modifier,
17043 name,
17044 quantity,
17045 seed,
17046 bucket,
17047 offset,
17048 }))
17049 }
17050
17051 fn parse_table_sample_seed(
17052 &mut self,
17053 modifier: TableSampleSeedModifier,
17054 ) -> Result<TableSampleSeed, ParserError> {
17055 self.expect_token(&Token::LParen)?;
17056 let value = self.parse_number_value()?;
17057 self.expect_token(&Token::RParen)?;
17058 Ok(TableSampleSeed { modifier, value })
17059 }
17060
17061 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17064 self.expect_token(&Token::LParen)?;
17065 let json_expr = self.parse_expr()?;
17066 let json_path = if self.consume_token(&Token::Comma) {
17067 Some(self.parse_value()?)
17068 } else {
17069 None
17070 };
17071 self.expect_token(&Token::RParen)?;
17072 let columns = if self.parse_keyword(Keyword::WITH) {
17073 self.expect_token(&Token::LParen)?;
17074 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
17075 self.expect_token(&Token::RParen)?;
17076 columns
17077 } else {
17078 Vec::new()
17079 };
17080 let alias = self.maybe_parse_table_alias()?;
17081 Ok(TableFactor::OpenJsonTable {
17082 json_expr,
17083 json_path,
17084 columns,
17085 alias,
17086 })
17087 }
17088
17089 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17090 self.expect_token(&Token::LParen)?;
17091 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
17092 self.expect_token(&Token::LParen)?;
17093 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
17094 self.expect_token(&Token::RParen)?;
17095 self.expect_token(&Token::Comma)?;
17096 namespaces
17097 } else {
17098 vec![]
17099 };
17100 let row_expression = self.parse_expr()?;
17101 let passing = self.parse_xml_passing_clause()?;
17102 self.expect_keyword_is(Keyword::COLUMNS)?;
17103 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
17104 self.expect_token(&Token::RParen)?;
17105 let alias = self.maybe_parse_table_alias()?;
17106 Ok(TableFactor::XmlTable {
17107 namespaces,
17108 row_expression,
17109 passing,
17110 columns,
17111 alias,
17112 })
17113 }
17114
17115 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
17116 let uri = self.parse_expr()?;
17117 self.expect_keyword_is(Keyword::AS)?;
17118 let name = self.parse_identifier()?;
17119 Ok(XmlNamespaceDefinition { uri, name })
17120 }
17121
17122 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
17123 let name = self.parse_identifier()?;
17124
17125 let option = if self.parse_keyword(Keyword::FOR) {
17126 self.expect_keyword(Keyword::ORDINALITY)?;
17127 XmlTableColumnOption::ForOrdinality
17128 } else {
17129 let r#type = self.parse_data_type()?;
17130 let mut path = None;
17131 let mut default = None;
17132
17133 if self.parse_keyword(Keyword::PATH) {
17134 path = Some(self.parse_expr()?);
17135 }
17136
17137 if self.parse_keyword(Keyword::DEFAULT) {
17138 default = Some(self.parse_expr()?);
17139 }
17140
17141 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
17142 if !not_null {
17143 let _ = self.parse_keyword(Keyword::NULL);
17145 }
17146
17147 XmlTableColumnOption::NamedInfo {
17148 r#type,
17149 path,
17150 default,
17151 nullable: !not_null,
17152 }
17153 };
17154 Ok(XmlTableColumn { name, option })
17155 }
17156
17157 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
17158 let mut arguments = vec![];
17159 if self.parse_keyword(Keyword::PASSING) {
17160 loop {
17161 let by_value =
17162 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
17163 let expr = self.parse_expr()?;
17164 let alias = if self.parse_keyword(Keyword::AS) {
17165 Some(self.parse_identifier()?)
17166 } else {
17167 None
17168 };
17169 arguments.push(XmlPassingArgument {
17170 expr,
17171 alias,
17172 by_value,
17173 });
17174 if !self.consume_token(&Token::Comma) {
17175 break;
17176 }
17177 }
17178 }
17179 Ok(XmlPassingClause { arguments })
17180 }
17181
17182 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17184 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
17185 self.expect_token(&Token::LParen)?;
17186
17187 let name = self.parse_object_name(true)?;
17188
17189 let mut dimensions = Vec::new();
17191 let mut metrics = Vec::new();
17192 let mut facts = Vec::new();
17193 let mut where_clause = None;
17194
17195 while self.peek_token_ref().token != Token::RParen {
17196 if self.parse_keyword(Keyword::DIMENSIONS) {
17197 if !dimensions.is_empty() {
17198 return Err(ParserError::ParserError(
17199 "DIMENSIONS clause can only be specified once".to_string(),
17200 ));
17201 }
17202 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17203 } else if self.parse_keyword(Keyword::METRICS) {
17204 if !metrics.is_empty() {
17205 return Err(ParserError::ParserError(
17206 "METRICS clause can only be specified once".to_string(),
17207 ));
17208 }
17209 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17210 } else if self.parse_keyword(Keyword::FACTS) {
17211 if !facts.is_empty() {
17212 return Err(ParserError::ParserError(
17213 "FACTS clause can only be specified once".to_string(),
17214 ));
17215 }
17216 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17217 } else if self.parse_keyword(Keyword::WHERE) {
17218 if where_clause.is_some() {
17219 return Err(ParserError::ParserError(
17220 "WHERE clause can only be specified once".to_string(),
17221 ));
17222 }
17223 where_clause = Some(self.parse_expr()?);
17224 } else {
17225 let tok = self.peek_token_ref();
17226 return parser_err!(
17227 format!(
17228 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
17229 tok.token
17230 ),
17231 tok.span.start
17232 )?;
17233 }
17234 }
17235
17236 self.expect_token(&Token::RParen)?;
17237
17238 let alias = self.maybe_parse_table_alias()?;
17239
17240 Ok(TableFactor::SemanticView {
17241 name,
17242 dimensions,
17243 metrics,
17244 facts,
17245 where_clause,
17246 alias,
17247 })
17248 }
17249
17250 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
17251 self.expect_token(&Token::LParen)?;
17252
17253 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17254 self.parse_comma_separated(Parser::parse_expr)?
17255 } else {
17256 vec![]
17257 };
17258
17259 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17260 self.parse_comma_separated(Parser::parse_order_by_expr)?
17261 } else {
17262 vec![]
17263 };
17264
17265 let measures = if self.parse_keyword(Keyword::MEASURES) {
17266 self.parse_comma_separated(|p| {
17267 let expr = p.parse_expr()?;
17268 let _ = p.parse_keyword(Keyword::AS);
17269 let alias = p.parse_identifier()?;
17270 Ok(Measure { expr, alias })
17271 })?
17272 } else {
17273 vec![]
17274 };
17275
17276 let rows_per_match =
17277 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17278 Some(RowsPerMatch::OneRow)
17279 } else if self.parse_keywords(&[
17280 Keyword::ALL,
17281 Keyword::ROWS,
17282 Keyword::PER,
17283 Keyword::MATCH,
17284 ]) {
17285 Some(RowsPerMatch::AllRows(
17286 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17287 Some(EmptyMatchesMode::Show)
17288 } else if self.parse_keywords(&[
17289 Keyword::OMIT,
17290 Keyword::EMPTY,
17291 Keyword::MATCHES,
17292 ]) {
17293 Some(EmptyMatchesMode::Omit)
17294 } else if self.parse_keywords(&[
17295 Keyword::WITH,
17296 Keyword::UNMATCHED,
17297 Keyword::ROWS,
17298 ]) {
17299 Some(EmptyMatchesMode::WithUnmatched)
17300 } else {
17301 None
17302 },
17303 ))
17304 } else {
17305 None
17306 };
17307
17308 let after_match_skip =
17309 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17310 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17311 Some(AfterMatchSkip::PastLastRow)
17312 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17313 Some(AfterMatchSkip::ToNextRow)
17314 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17315 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17316 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17317 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17318 } else {
17319 let found = self.next_token();
17320 return self.expected("after match skip option", found);
17321 }
17322 } else {
17323 None
17324 };
17325
17326 self.expect_keyword_is(Keyword::PATTERN)?;
17327 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17328
17329 self.expect_keyword_is(Keyword::DEFINE)?;
17330
17331 let symbols = self.parse_comma_separated(|p| {
17332 let symbol = p.parse_identifier()?;
17333 p.expect_keyword_is(Keyword::AS)?;
17334 let definition = p.parse_expr()?;
17335 Ok(SymbolDefinition { symbol, definition })
17336 })?;
17337
17338 self.expect_token(&Token::RParen)?;
17339
17340 let alias = self.maybe_parse_table_alias()?;
17341
17342 Ok(TableFactor::MatchRecognize {
17343 table: Box::new(table),
17344 partition_by,
17345 order_by,
17346 measures,
17347 rows_per_match,
17348 after_match_skip,
17349 pattern,
17350 symbols,
17351 alias,
17352 })
17353 }
17354
17355 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17356 match self.next_token().token {
17357 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17358 Token::Placeholder(s) if s == "$" => {
17359 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17360 }
17361 Token::LBrace => {
17362 self.expect_token(&Token::Minus)?;
17363 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17364 self.expect_token(&Token::Minus)?;
17365 self.expect_token(&Token::RBrace)?;
17366 Ok(MatchRecognizePattern::Exclude(symbol))
17367 }
17368 Token::Word(Word {
17369 value,
17370 quote_style: None,
17371 ..
17372 }) if value == "PERMUTE" => {
17373 self.expect_token(&Token::LParen)?;
17374 let symbols = self.parse_comma_separated(|p| {
17375 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17376 })?;
17377 self.expect_token(&Token::RParen)?;
17378 Ok(MatchRecognizePattern::Permute(symbols))
17379 }
17380 Token::LParen => {
17381 let pattern = self.parse_pattern()?;
17382 self.expect_token(&Token::RParen)?;
17383 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17384 }
17385 _ => {
17386 self.prev_token();
17387 self.parse_identifier()
17388 .map(MatchRecognizeSymbol::Named)
17389 .map(MatchRecognizePattern::Symbol)
17390 }
17391 }
17392 }
17393
17394 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17395 let mut pattern = self.parse_base_pattern()?;
17396 loop {
17397 let token = self.next_token();
17398 let quantifier = match token.token {
17399 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17400 Token::Plus => RepetitionQuantifier::OneOrMore,
17401 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17402 Token::LBrace => {
17403 let token = self.next_token();
17405 match token.token {
17406 Token::Comma => {
17407 let next_token = self.next_token();
17408 let Token::Number(n, _) = next_token.token else {
17409 return self.expected("literal number", next_token);
17410 };
17411 self.expect_token(&Token::RBrace)?;
17412 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17413 }
17414 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17415 let next_token = self.next_token();
17416 match next_token.token {
17417 Token::Number(m, _) => {
17418 self.expect_token(&Token::RBrace)?;
17419 RepetitionQuantifier::Range(
17420 Self::parse(n, token.span.start)?,
17421 Self::parse(m, token.span.start)?,
17422 )
17423 }
17424 Token::RBrace => {
17425 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17426 }
17427 _ => {
17428 return self.expected("} or upper bound", next_token);
17429 }
17430 }
17431 }
17432 Token::Number(n, _) => {
17433 self.expect_token(&Token::RBrace)?;
17434 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17435 }
17436 _ => return self.expected("quantifier range", token),
17437 }
17438 }
17439 _ => {
17440 self.prev_token();
17441 break;
17442 }
17443 };
17444 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17445 }
17446 Ok(pattern)
17447 }
17448
17449 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17450 let mut patterns = vec![self.parse_repetition_pattern()?];
17451 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17452 patterns.push(self.parse_repetition_pattern()?);
17453 }
17454 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17455 Ok([pattern]) => Ok(pattern),
17456 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17457 }
17458 }
17459
17460 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17461 let pattern = self.parse_concat_pattern()?;
17462 if self.consume_token(&Token::Pipe) {
17463 match self.parse_pattern()? {
17464 MatchRecognizePattern::Alternation(mut patterns) => {
17466 patterns.insert(0, pattern);
17467 Ok(MatchRecognizePattern::Alternation(patterns))
17468 }
17469 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17470 }
17471 } else {
17472 Ok(pattern)
17473 }
17474 }
17475
17476 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17478 if self.dialect.supports_table_versioning() {
17479 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17480 {
17481 let expr = self.parse_expr()?;
17482 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17483 } else if self.peek_keyword(Keyword::CHANGES) {
17484 return self.parse_table_version_changes().map(Some);
17485 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17486 let func_name = self.parse_object_name(true)?;
17487 let func = self.parse_function(func_name)?;
17488 return Ok(Some(TableVersion::Function(func)));
17489 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17490 let expr = self.parse_expr()?;
17491 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17492 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17493 let expr = Expr::Value(self.parse_number_value()?);
17494 return Ok(Some(TableVersion::VersionAsOf(expr)));
17495 }
17496 }
17497 Ok(None)
17498 }
17499
17500 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17511 let changes_name = self.parse_object_name(true)?;
17512 let changes = self.parse_function(changes_name)?;
17513 let at_name = self.parse_object_name(true)?;
17514 let at = self.parse_function(at_name)?;
17515 let end = if self.peek_keyword(Keyword::END) {
17516 let end_name = self.parse_object_name(true)?;
17517 Some(self.parse_function(end_name)?)
17518 } else {
17519 None
17520 };
17521 Ok(TableVersion::Changes { changes, at, end })
17522 }
17523
17524 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17527 if self.parse_keyword(Keyword::NESTED) {
17528 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17529 let path = self.parse_value()?;
17530 self.expect_keyword_is(Keyword::COLUMNS)?;
17531 let columns = self.parse_parenthesized(|p| {
17532 p.parse_comma_separated(Self::parse_json_table_column_def)
17533 })?;
17534 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17535 path,
17536 columns,
17537 }));
17538 }
17539 let name = self.parse_identifier()?;
17540 if self.parse_keyword(Keyword::FOR) {
17541 self.expect_keyword_is(Keyword::ORDINALITY)?;
17542 return Ok(JsonTableColumn::ForOrdinality(name));
17543 }
17544 let r#type = self.parse_data_type()?;
17545 let exists = self.parse_keyword(Keyword::EXISTS);
17546 self.expect_keyword_is(Keyword::PATH)?;
17547 let path = self.parse_value()?;
17548 let mut on_empty = None;
17549 let mut on_error = None;
17550 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17551 if self.parse_keyword(Keyword::EMPTY) {
17552 on_empty = Some(error_handling);
17553 } else {
17554 self.expect_keyword_is(Keyword::ERROR)?;
17555 on_error = Some(error_handling);
17556 }
17557 }
17558 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17559 name,
17560 r#type,
17561 path,
17562 exists,
17563 on_empty,
17564 on_error,
17565 }))
17566 }
17567
17568 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17576 let name = self.parse_identifier()?;
17577 let r#type = self.parse_data_type()?;
17578 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17579 self.next_token();
17580 Some(path)
17581 } else {
17582 None
17583 };
17584 let as_json = self.parse_keyword(Keyword::AS);
17585 if as_json {
17586 self.expect_keyword_is(Keyword::JSON)?;
17587 }
17588 Ok(OpenJsonTableColumn {
17589 name,
17590 r#type,
17591 path,
17592 as_json,
17593 })
17594 }
17595
17596 fn parse_json_table_column_error_handling(
17597 &mut self,
17598 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17599 let res = if self.parse_keyword(Keyword::NULL) {
17600 JsonTableColumnErrorHandling::Null
17601 } else if self.parse_keyword(Keyword::ERROR) {
17602 JsonTableColumnErrorHandling::Error
17603 } else if self.parse_keyword(Keyword::DEFAULT) {
17604 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17605 } else {
17606 return Ok(None);
17607 };
17608 self.expect_keyword_is(Keyword::ON)?;
17609 Ok(Some(res))
17610 }
17611
17612 pub fn parse_derived_table_factor(
17614 &mut self,
17615 lateral: IsLateral,
17616 ) -> Result<TableFactor, ParserError> {
17617 let subquery = self.parse_query()?;
17618 self.expect_token(&Token::RParen)?;
17619 let alias = self.maybe_parse_table_alias()?;
17620
17621 let sample = self
17623 .maybe_parse_table_sample()?
17624 .map(TableSampleKind::AfterTableAlias);
17625
17626 Ok(TableFactor::Derived {
17627 lateral: match lateral {
17628 Lateral => true,
17629 NotLateral => false,
17630 },
17631 subquery,
17632 alias,
17633 sample,
17634 })
17635 }
17636
17637 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17660 let expr = self.parse_expr()?;
17661 let alias = if self.parse_keyword(Keyword::AS) {
17662 Some(self.parse_identifier()?)
17663 } else {
17664 None
17665 };
17666
17667 Ok(ExprWithAlias { expr, alias })
17668 }
17669
17670 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17674 let expr = self.parse_expr()?;
17675 let alias = self.parse_identifier_optional_alias()?;
17676 Ok(ExprWithAlias { expr, alias })
17677 }
17678
17679 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17681 let function_name = match self.next_token().token {
17682 Token::Word(w) => Ok(w.value),
17683 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17684 }?;
17685 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17686 let alias = {
17687 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17688 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17690 }
17691 self.parse_optional_alias_inner(None, validator)?
17692 };
17693 Ok(ExprWithAlias { expr, alias })
17694 }
17695
17696 pub fn parse_pivot_table_factor(
17698 &mut self,
17699 table: TableFactor,
17700 ) -> Result<TableFactor, ParserError> {
17701 self.expect_token(&Token::LParen)?;
17702 let aggregate_functions =
17703 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17704 self.expect_keyword_is(Keyword::FOR)?;
17705 let value_column = if self.peek_token_ref().token == Token::LParen {
17706 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17707 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17708 })?
17709 } else {
17710 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17711 };
17712 self.expect_keyword_is(Keyword::IN)?;
17713
17714 self.expect_token(&Token::LParen)?;
17715 let value_source = if self.parse_keyword(Keyword::ANY) {
17716 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17717 self.parse_comma_separated(Parser::parse_order_by_expr)?
17718 } else {
17719 vec![]
17720 };
17721 PivotValueSource::Any(order_by)
17722 } else if self.peek_sub_query() {
17723 PivotValueSource::Subquery(self.parse_query()?)
17724 } else {
17725 PivotValueSource::List(
17726 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17727 )
17728 };
17729 self.expect_token(&Token::RParen)?;
17730
17731 let default_on_null =
17732 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17733 self.expect_token(&Token::LParen)?;
17734 let expr = self.parse_expr()?;
17735 self.expect_token(&Token::RParen)?;
17736 Some(expr)
17737 } else {
17738 None
17739 };
17740
17741 self.expect_token(&Token::RParen)?;
17742 let alias = self.maybe_parse_table_alias()?;
17743 Ok(TableFactor::Pivot {
17744 table: Box::new(table),
17745 aggregate_functions,
17746 value_column,
17747 value_source,
17748 default_on_null,
17749 alias,
17750 })
17751 }
17752
17753 pub fn parse_unpivot_table_factor(
17755 &mut self,
17756 table: TableFactor,
17757 ) -> Result<TableFactor, ParserError> {
17758 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17759 self.expect_keyword_is(Keyword::NULLS)?;
17760 Some(NullInclusion::IncludeNulls)
17761 } else if self.parse_keyword(Keyword::EXCLUDE) {
17762 self.expect_keyword_is(Keyword::NULLS)?;
17763 Some(NullInclusion::ExcludeNulls)
17764 } else {
17765 None
17766 };
17767 self.expect_token(&Token::LParen)?;
17768 let value = self.parse_expr()?;
17769 self.expect_keyword_is(Keyword::FOR)?;
17770 let name = self.parse_identifier()?;
17771 self.expect_keyword_is(Keyword::IN)?;
17772 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17773 p.parse_expr_with_alias()
17774 })?;
17775 self.expect_token(&Token::RParen)?;
17776 let alias = self.maybe_parse_table_alias()?;
17777 Ok(TableFactor::Unpivot {
17778 table: Box::new(table),
17779 value,
17780 null_inclusion,
17781 name,
17782 columns,
17783 alias,
17784 })
17785 }
17786
17787 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17789 if natural {
17790 Ok(JoinConstraint::Natural)
17791 } else if self.parse_keyword(Keyword::ON) {
17792 let constraint = self.parse_expr()?;
17793 Ok(JoinConstraint::On(constraint))
17794 } else if self.parse_keyword(Keyword::USING) {
17795 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17796 Ok(JoinConstraint::Using(columns))
17797 } else {
17798 Ok(JoinConstraint::None)
17799 }
17801 }
17802
17803 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17805 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17806
17807 self.expect_keyword_is(Keyword::TO)?;
17808 let grantees = self.parse_grantees()?;
17809
17810 let with_grant_option =
17811 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17812
17813 let current_grants =
17814 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17815 Some(CurrentGrantsKind::CopyCurrentGrants)
17816 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17817 Some(CurrentGrantsKind::RevokeCurrentGrants)
17818 } else {
17819 None
17820 };
17821
17822 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17823 Some(self.parse_identifier()?)
17824 } else {
17825 None
17826 };
17827
17828 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17829 Some(self.parse_identifier()?)
17830 } else {
17831 None
17832 };
17833
17834 Ok(Grant {
17835 privileges,
17836 objects,
17837 grantees,
17838 with_grant_option,
17839 as_grantor,
17840 granted_by,
17841 current_grants,
17842 })
17843 }
17844
17845 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17846 let mut values = vec![];
17847 let mut grantee_type = GranteesType::None;
17848 loop {
17849 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17850 GranteesType::Role
17851 } else if self.parse_keyword(Keyword::USER) {
17852 GranteesType::User
17853 } else if self.parse_keyword(Keyword::SHARE) {
17854 GranteesType::Share
17855 } else if self.parse_keyword(Keyword::GROUP) {
17856 GranteesType::Group
17857 } else if self.parse_keyword(Keyword::PUBLIC) {
17858 GranteesType::Public
17859 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17860 GranteesType::DatabaseRole
17861 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17862 GranteesType::ApplicationRole
17863 } else if self.parse_keyword(Keyword::APPLICATION) {
17864 GranteesType::Application
17865 } else {
17866 grantee_type.clone() };
17868
17869 if self
17870 .dialect
17871 .get_reserved_grantees_types()
17872 .contains(&new_grantee_type)
17873 {
17874 self.prev_token();
17875 } else {
17876 grantee_type = new_grantee_type;
17877 }
17878
17879 let grantee = if grantee_type == GranteesType::Public {
17880 Grantee {
17881 grantee_type: grantee_type.clone(),
17882 name: None,
17883 }
17884 } else {
17885 let mut name = self.parse_grantee_name()?;
17886 if self.consume_token(&Token::Colon) {
17887 let ident = self.parse_identifier()?;
17891 if let GranteeName::ObjectName(namespace) = name {
17892 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17893 format!("{namespace}:{ident}"),
17894 )]));
17895 };
17896 }
17897 Grantee {
17898 grantee_type: grantee_type.clone(),
17899 name: Some(name),
17900 }
17901 };
17902
17903 values.push(grantee);
17904
17905 if !self.consume_token(&Token::Comma) {
17906 break;
17907 }
17908 }
17909
17910 Ok(values)
17911 }
17912
17913 pub fn parse_grant_deny_revoke_privileges_objects(
17915 &mut self,
17916 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17917 let privileges = if self.parse_keyword(Keyword::ALL) {
17918 Privileges::All {
17919 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17920 }
17921 } else {
17922 let actions = self.parse_actions_list()?;
17923 Privileges::Actions(actions)
17924 };
17925
17926 let objects = if self.parse_keyword(Keyword::ON) {
17927 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17928 Some(GrantObjects::AllTablesInSchema {
17929 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17930 })
17931 } else if self.parse_keywords(&[
17932 Keyword::ALL,
17933 Keyword::EXTERNAL,
17934 Keyword::TABLES,
17935 Keyword::IN,
17936 Keyword::SCHEMA,
17937 ]) {
17938 Some(GrantObjects::AllExternalTablesInSchema {
17939 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17940 })
17941 } else if self.parse_keywords(&[
17942 Keyword::ALL,
17943 Keyword::VIEWS,
17944 Keyword::IN,
17945 Keyword::SCHEMA,
17946 ]) {
17947 Some(GrantObjects::AllViewsInSchema {
17948 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17949 })
17950 } else if self.parse_keywords(&[
17951 Keyword::ALL,
17952 Keyword::MATERIALIZED,
17953 Keyword::VIEWS,
17954 Keyword::IN,
17955 Keyword::SCHEMA,
17956 ]) {
17957 Some(GrantObjects::AllMaterializedViewsInSchema {
17958 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17959 })
17960 } else if self.parse_keywords(&[
17961 Keyword::ALL,
17962 Keyword::FUNCTIONS,
17963 Keyword::IN,
17964 Keyword::SCHEMA,
17965 ]) {
17966 Some(GrantObjects::AllFunctionsInSchema {
17967 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17968 })
17969 } else if self.parse_keywords(&[
17970 Keyword::FUTURE,
17971 Keyword::SCHEMAS,
17972 Keyword::IN,
17973 Keyword::DATABASE,
17974 ]) {
17975 Some(GrantObjects::FutureSchemasInDatabase {
17976 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17977 })
17978 } else if self.parse_keywords(&[
17979 Keyword::FUTURE,
17980 Keyword::TABLES,
17981 Keyword::IN,
17982 Keyword::SCHEMA,
17983 ]) {
17984 Some(GrantObjects::FutureTablesInSchema {
17985 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17986 })
17987 } else if self.parse_keywords(&[
17988 Keyword::FUTURE,
17989 Keyword::EXTERNAL,
17990 Keyword::TABLES,
17991 Keyword::IN,
17992 Keyword::SCHEMA,
17993 ]) {
17994 Some(GrantObjects::FutureExternalTablesInSchema {
17995 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17996 })
17997 } else if self.parse_keywords(&[
17998 Keyword::FUTURE,
17999 Keyword::VIEWS,
18000 Keyword::IN,
18001 Keyword::SCHEMA,
18002 ]) {
18003 Some(GrantObjects::FutureViewsInSchema {
18004 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18005 })
18006 } else if self.parse_keywords(&[
18007 Keyword::FUTURE,
18008 Keyword::MATERIALIZED,
18009 Keyword::VIEWS,
18010 Keyword::IN,
18011 Keyword::SCHEMA,
18012 ]) {
18013 Some(GrantObjects::FutureMaterializedViewsInSchema {
18014 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18015 })
18016 } else if self.parse_keywords(&[
18017 Keyword::ALL,
18018 Keyword::SEQUENCES,
18019 Keyword::IN,
18020 Keyword::SCHEMA,
18021 ]) {
18022 Some(GrantObjects::AllSequencesInSchema {
18023 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18024 })
18025 } else if self.parse_keywords(&[
18026 Keyword::FUTURE,
18027 Keyword::SEQUENCES,
18028 Keyword::IN,
18029 Keyword::SCHEMA,
18030 ]) {
18031 Some(GrantObjects::FutureSequencesInSchema {
18032 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18033 })
18034 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
18035 Some(GrantObjects::ResourceMonitors(
18036 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18037 ))
18038 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18039 Some(GrantObjects::ComputePools(
18040 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18041 ))
18042 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18043 Some(GrantObjects::FailoverGroup(
18044 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18045 ))
18046 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18047 Some(GrantObjects::ReplicationGroup(
18048 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18049 ))
18050 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18051 Some(GrantObjects::ExternalVolumes(
18052 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18053 ))
18054 } else {
18055 let object_type = self.parse_one_of_keywords(&[
18056 Keyword::SEQUENCE,
18057 Keyword::DATABASE,
18058 Keyword::SCHEMA,
18059 Keyword::TABLE,
18060 Keyword::VIEW,
18061 Keyword::WAREHOUSE,
18062 Keyword::INTEGRATION,
18063 Keyword::VIEW,
18064 Keyword::WAREHOUSE,
18065 Keyword::INTEGRATION,
18066 Keyword::USER,
18067 Keyword::CONNECTION,
18068 Keyword::PROCEDURE,
18069 Keyword::FUNCTION,
18070 Keyword::TYPE,
18071 Keyword::DOMAIN,
18072 ]);
18073 let objects =
18074 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
18075 match object_type {
18076 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
18077 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
18078 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
18079 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
18080 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
18081 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
18082 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
18083 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
18084 Some(Keyword::TYPE) => Some(GrantObjects::Types(objects?)),
18085 Some(Keyword::DOMAIN) => Some(GrantObjects::Domains(objects?)),
18086 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
18087 if let Some(name) = objects?.first() {
18088 self.parse_grant_procedure_or_function(name, &kw)?
18089 } else {
18090 self.expected_ref("procedure or function name", self.peek_token_ref())?
18091 }
18092 }
18093 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
18094 Some(unexpected_keyword) => return Err(ParserError::ParserError(
18095 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
18096 )),
18097 }
18098 }
18099 } else {
18100 None
18101 };
18102
18103 Ok((privileges, objects))
18104 }
18105
18106 fn parse_grant_procedure_or_function(
18107 &mut self,
18108 name: &ObjectName,
18109 kw: &Option<Keyword>,
18110 ) -> Result<Option<GrantObjects>, ParserError> {
18111 let arg_types = if self.consume_token(&Token::LParen) {
18112 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
18113 self.expect_token(&Token::RParen)?;
18114 list
18115 } else {
18116 vec![]
18117 };
18118 match kw {
18119 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
18120 name: name.clone(),
18121 arg_types,
18122 })),
18123 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
18124 name: name.clone(),
18125 arg_types,
18126 })),
18127 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
18128 }
18129 }
18130
18131 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
18133 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
18134 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
18135 if columns.is_empty() {
18136 Ok(None)
18137 } else {
18138 Ok(Some(columns))
18139 }
18140 }
18141
18142 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
18144 Ok(Action::ImportedPrivileges)
18145 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
18146 Ok(Action::AddSearchOptimization)
18147 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
18148 Ok(Action::AttachListing)
18149 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
18150 Ok(Action::AttachPolicy)
18151 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
18152 Ok(Action::BindServiceEndpoint)
18153 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
18154 let role = self.parse_object_name(false)?;
18155 Ok(Action::DatabaseRole { role })
18156 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
18157 Ok(Action::EvolveSchema)
18158 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
18159 Ok(Action::ImportShare)
18160 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
18161 Ok(Action::ManageVersions)
18162 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
18163 Ok(Action::ManageReleases)
18164 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
18165 Ok(Action::OverrideShareRestrictions)
18166 } else if self.parse_keywords(&[
18167 Keyword::PURCHASE,
18168 Keyword::DATA,
18169 Keyword::EXCHANGE,
18170 Keyword::LISTING,
18171 ]) {
18172 Ok(Action::PurchaseDataExchangeListing)
18173 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
18174 Ok(Action::ResolveAll)
18175 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
18176 Ok(Action::ReadSession)
18177
18178 } else if self.parse_keyword(Keyword::APPLY) {
18180 let apply_type = self.parse_action_apply_type()?;
18181 Ok(Action::Apply { apply_type })
18182 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
18183 Ok(Action::ApplyBudget)
18184 } else if self.parse_keyword(Keyword::AUDIT) {
18185 Ok(Action::Audit)
18186 } else if self.parse_keyword(Keyword::CONNECT) {
18187 Ok(Action::Connect)
18188 } else if self.parse_keyword(Keyword::CREATE) {
18189 let obj_type = self.maybe_parse_action_create_object_type();
18190 Ok(Action::Create { obj_type })
18191 } else if self.parse_keyword(Keyword::DELETE) {
18192 Ok(Action::Delete)
18193 } else if self.parse_keyword(Keyword::EXEC) {
18194 let obj_type = self.maybe_parse_action_execute_obj_type();
18195 Ok(Action::Exec { obj_type })
18196 } else if self.parse_keyword(Keyword::EXECUTE) {
18197 let obj_type = self.maybe_parse_action_execute_obj_type();
18198 Ok(Action::Execute { obj_type })
18199 } else if self.parse_keyword(Keyword::FAILOVER) {
18200 Ok(Action::Failover)
18201 } else if self.parse_keyword(Keyword::INSERT) {
18202 Ok(Action::Insert {
18203 columns: parse_columns(self)?,
18204 })
18205 } else if self.parse_keyword(Keyword::MANAGE) {
18206 let manage_type = self.parse_action_manage_type()?;
18207 Ok(Action::Manage { manage_type })
18208 } else if self.parse_keyword(Keyword::MODIFY) {
18209 let modify_type = self.parse_action_modify_type();
18210 Ok(Action::Modify { modify_type })
18211 } else if self.parse_keyword(Keyword::MONITOR) {
18212 let monitor_type = self.parse_action_monitor_type();
18213 Ok(Action::Monitor { monitor_type })
18214 } else if self.parse_keyword(Keyword::OPERATE) {
18215 Ok(Action::Operate)
18216 } else if self.parse_keyword(Keyword::REFERENCES) {
18217 Ok(Action::References {
18218 columns: parse_columns(self)?,
18219 })
18220 } else if self.parse_keyword(Keyword::READ) {
18221 Ok(Action::Read)
18222 } else if self.parse_keyword(Keyword::REPLICATE) {
18223 Ok(Action::Replicate)
18224 } else if self.parse_keyword(Keyword::ROLE) {
18225 let role = self.parse_object_name(false)?;
18226 Ok(Action::Role { role })
18227 } else if self.parse_keyword(Keyword::SELECT) {
18228 Ok(Action::Select {
18229 columns: parse_columns(self)?,
18230 })
18231 } else if self.parse_keyword(Keyword::TEMPORARY) {
18232 Ok(Action::Temporary)
18233 } else if self.parse_keyword(Keyword::TRIGGER) {
18234 Ok(Action::Trigger)
18235 } else if self.parse_keyword(Keyword::TRUNCATE) {
18236 Ok(Action::Truncate)
18237 } else if self.parse_keyword(Keyword::UPDATE) {
18238 Ok(Action::Update {
18239 columns: parse_columns(self)?,
18240 })
18241 } else if self.parse_keyword(Keyword::USAGE) {
18242 Ok(Action::Usage)
18243 } else if self.parse_keyword(Keyword::OWNERSHIP) {
18244 Ok(Action::Ownership)
18245 } else if self.parse_keyword(Keyword::DROP) {
18246 Ok(Action::Drop)
18247 } else {
18248 self.expected_ref("a privilege keyword", self.peek_token_ref())?
18249 }
18250 }
18251
18252 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
18253 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
18255 Some(ActionCreateObjectType::ApplicationPackage)
18256 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18257 Some(ActionCreateObjectType::ComputePool)
18258 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
18259 Some(ActionCreateObjectType::DataExchangeListing)
18260 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18261 Some(ActionCreateObjectType::ExternalVolume)
18262 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18263 Some(ActionCreateObjectType::FailoverGroup)
18264 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
18265 Some(ActionCreateObjectType::NetworkPolicy)
18266 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
18267 Some(ActionCreateObjectType::OrganiationListing)
18268 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18269 Some(ActionCreateObjectType::ReplicationGroup)
18270 }
18271 else if self.parse_keyword(Keyword::ACCOUNT) {
18273 Some(ActionCreateObjectType::Account)
18274 } else if self.parse_keyword(Keyword::APPLICATION) {
18275 Some(ActionCreateObjectType::Application)
18276 } else if self.parse_keyword(Keyword::DATABASE) {
18277 Some(ActionCreateObjectType::Database)
18278 } else if self.parse_keyword(Keyword::INTEGRATION) {
18279 Some(ActionCreateObjectType::Integration)
18280 } else if self.parse_keyword(Keyword::ROLE) {
18281 Some(ActionCreateObjectType::Role)
18282 } else if self.parse_keyword(Keyword::SCHEMA) {
18283 Some(ActionCreateObjectType::Schema)
18284 } else if self.parse_keyword(Keyword::SHARE) {
18285 Some(ActionCreateObjectType::Share)
18286 } else if self.parse_keyword(Keyword::USER) {
18287 Some(ActionCreateObjectType::User)
18288 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18289 Some(ActionCreateObjectType::Warehouse)
18290 } else {
18291 None
18292 }
18293 }
18294
18295 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18296 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18297 Ok(ActionApplyType::AggregationPolicy)
18298 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18299 Ok(ActionApplyType::AuthenticationPolicy)
18300 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18301 Ok(ActionApplyType::JoinPolicy)
18302 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18303 Ok(ActionApplyType::MaskingPolicy)
18304 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18305 Ok(ActionApplyType::PackagesPolicy)
18306 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18307 Ok(ActionApplyType::PasswordPolicy)
18308 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18309 Ok(ActionApplyType::ProjectionPolicy)
18310 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18311 Ok(ActionApplyType::RowAccessPolicy)
18312 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18313 Ok(ActionApplyType::SessionPolicy)
18314 } else if self.parse_keyword(Keyword::TAG) {
18315 Ok(ActionApplyType::Tag)
18316 } else {
18317 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18318 }
18319 }
18320
18321 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18322 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18323 Some(ActionExecuteObjectType::DataMetricFunction)
18324 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18325 Some(ActionExecuteObjectType::ManagedAlert)
18326 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18327 Some(ActionExecuteObjectType::ManagedTask)
18328 } else if self.parse_keyword(Keyword::ALERT) {
18329 Some(ActionExecuteObjectType::Alert)
18330 } else if self.parse_keyword(Keyword::TASK) {
18331 Some(ActionExecuteObjectType::Task)
18332 } else {
18333 None
18334 }
18335 }
18336
18337 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18338 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18339 Ok(ActionManageType::AccountSupportCases)
18340 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18341 Ok(ActionManageType::EventSharing)
18342 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18343 Ok(ActionManageType::ListingAutoFulfillment)
18344 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18345 Ok(ActionManageType::OrganizationSupportCases)
18346 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18347 Ok(ActionManageType::UserSupportCases)
18348 } else if self.parse_keyword(Keyword::GRANTS) {
18349 Ok(ActionManageType::Grants)
18350 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18351 Ok(ActionManageType::Warehouses)
18352 } else {
18353 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18354 }
18355 }
18356
18357 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18358 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18359 Some(ActionModifyType::LogLevel)
18360 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18361 Some(ActionModifyType::TraceLevel)
18362 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18363 Some(ActionModifyType::SessionLogLevel)
18364 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18365 Some(ActionModifyType::SessionTraceLevel)
18366 } else {
18367 None
18368 }
18369 }
18370
18371 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18372 if self.parse_keyword(Keyword::EXECUTION) {
18373 Some(ActionMonitorType::Execution)
18374 } else if self.parse_keyword(Keyword::SECURITY) {
18375 Some(ActionMonitorType::Security)
18376 } else if self.parse_keyword(Keyword::USAGE) {
18377 Some(ActionMonitorType::Usage)
18378 } else {
18379 None
18380 }
18381 }
18382
18383 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18385 let mut name = self.parse_object_name(false)?;
18386 if self.dialect.supports_user_host_grantee()
18387 && name.0.len() == 1
18388 && name.0[0].as_ident().is_some()
18389 && self.consume_token(&Token::AtSign)
18390 {
18391 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18392 let host = self.parse_identifier()?;
18393 Ok(GranteeName::UserHost { user, host })
18394 } else {
18395 Ok(GranteeName::ObjectName(name))
18396 }
18397 }
18398
18399 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18401 self.expect_keyword(Keyword::DENY)?;
18402
18403 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18404 let objects = match objects {
18405 Some(o) => o,
18406 None => {
18407 return parser_err!(
18408 "DENY statements must specify an object",
18409 self.peek_token_ref().span.start
18410 )
18411 }
18412 };
18413
18414 self.expect_keyword_is(Keyword::TO)?;
18415 let grantees = self.parse_grantees()?;
18416 let cascade = self.parse_cascade_option();
18417 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18418 Some(self.parse_identifier()?)
18419 } else {
18420 None
18421 };
18422
18423 Ok(Statement::Deny(DenyStatement {
18424 privileges,
18425 objects,
18426 grantees,
18427 cascade,
18428 granted_by,
18429 }))
18430 }
18431
18432 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18434 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18435
18436 self.expect_keyword_is(Keyword::FROM)?;
18437 let grantees = self.parse_grantees()?;
18438
18439 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18440 Some(self.parse_identifier()?)
18441 } else {
18442 None
18443 };
18444
18445 let cascade = self.parse_cascade_option();
18446
18447 Ok(Revoke {
18448 privileges,
18449 objects,
18450 grantees,
18451 granted_by,
18452 cascade,
18453 })
18454 }
18455
18456 pub fn parse_replace(
18458 &mut self,
18459 replace_token: TokenWithSpan,
18460 ) -> Result<Statement, ParserError> {
18461 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18462 return parser_err!(
18463 "Unsupported statement REPLACE",
18464 self.peek_token_ref().span.start
18465 );
18466 }
18467
18468 let mut insert = self.parse_insert(replace_token)?;
18469 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18470 *replace_into = true;
18471 }
18472
18473 Ok(insert)
18474 }
18475
18476 fn parse_insert_setexpr_boxed(
18480 &mut self,
18481 insert_token: TokenWithSpan,
18482 ) -> Result<Box<SetExpr>, ParserError> {
18483 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18484 }
18485
18486 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18488 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18489 let or = self.parse_conflict_clause();
18490 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18491 None
18492 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18493 Some(MysqlInsertPriority::LowPriority)
18494 } else if self.parse_keyword(Keyword::DELAYED) {
18495 Some(MysqlInsertPriority::Delayed)
18496 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18497 Some(MysqlInsertPriority::HighPriority)
18498 } else {
18499 None
18500 };
18501
18502 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18503 && self.parse_keyword(Keyword::IGNORE);
18504
18505 let replace_into = false;
18506
18507 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18508 let into = self.parse_keyword(Keyword::INTO);
18509
18510 let local = self.parse_keyword(Keyword::LOCAL);
18511
18512 if self.parse_keyword(Keyword::DIRECTORY) {
18513 let path = self.parse_literal_string()?;
18514 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18515 Some(self.parse_file_format()?)
18516 } else {
18517 None
18518 };
18519 let source = self.parse_query()?;
18520 Ok(Statement::Directory {
18521 local,
18522 path,
18523 overwrite,
18524 file_format,
18525 source,
18526 })
18527 } else {
18528 let table = self.parse_keyword(Keyword::TABLE);
18530 let table_object = self.parse_table_object()?;
18531
18532 let table_alias = if self.dialect.supports_insert_table_alias()
18533 && !self.peek_sub_query()
18534 && self
18535 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18536 .is_none()
18537 {
18538 if self.parse_keyword(Keyword::AS) {
18539 Some(TableAliasWithoutColumns {
18540 explicit: true,
18541 alias: self.parse_identifier()?,
18542 })
18543 } else {
18544 self.maybe_parse(|parser| parser.parse_identifier())?
18545 .map(|alias| TableAliasWithoutColumns {
18546 explicit: false,
18547 alias,
18548 })
18549 }
18550 } else {
18551 None
18552 };
18553
18554 let is_mysql = dialect_of!(self is MySqlDialect);
18555
18556 let (columns, partitioned, after_columns, output, source, assignments) = if self
18557 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18558 {
18559 (vec![], None, vec![], None, None, vec![])
18560 } else {
18561 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18562 let columns =
18563 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18564
18565 let partitioned = self.parse_insert_partition()?;
18566 let after_columns = if dialect_of!(self is HiveDialect) {
18568 self.parse_parenthesized_column_list(Optional, false)?
18569 } else {
18570 vec![]
18571 };
18572 (columns, partitioned, after_columns)
18573 } else {
18574 Default::default()
18575 };
18576
18577 let output = self.maybe_parse_output_clause()?;
18578
18579 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18580 || self.peek_keyword(Keyword::SETTINGS)
18581 {
18582 (None, vec![])
18583 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18584 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18585 } else {
18586 (Some(self.parse_query()?), vec![])
18587 };
18588
18589 (
18590 columns,
18591 partitioned,
18592 after_columns,
18593 output,
18594 source,
18595 assignments,
18596 )
18597 };
18598
18599 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18600 let settings = self.parse_settings()?;
18603
18604 let format = if self.parse_keyword(Keyword::FORMAT) {
18605 Some(self.parse_input_format_clause()?)
18606 } else {
18607 None
18608 };
18609
18610 (format, settings)
18611 } else {
18612 Default::default()
18613 };
18614
18615 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18616 && self.parse_keyword(Keyword::AS)
18617 {
18618 let row_alias = self.parse_object_name(false)?;
18619 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18620 Some(InsertAliases {
18621 row_alias,
18622 col_aliases,
18623 })
18624 } else {
18625 None
18626 };
18627
18628 let on = if self.parse_keyword(Keyword::ON) {
18629 if self.parse_keyword(Keyword::CONFLICT) {
18630 let conflict_target =
18631 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18632 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18633 } else if self.peek_token_ref().token == Token::LParen {
18634 Some(ConflictTarget::Columns(
18635 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18636 ))
18637 } else {
18638 None
18639 };
18640
18641 self.expect_keyword_is(Keyword::DO)?;
18642 let action = if self.parse_keyword(Keyword::NOTHING) {
18643 OnConflictAction::DoNothing
18644 } else {
18645 self.expect_keyword_is(Keyword::UPDATE)?;
18646 self.expect_keyword_is(Keyword::SET)?;
18647 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18648 let selection = if self.parse_keyword(Keyword::WHERE) {
18649 Some(self.parse_expr()?)
18650 } else {
18651 None
18652 };
18653 OnConflictAction::DoUpdate(DoUpdate {
18654 assignments,
18655 selection,
18656 })
18657 };
18658
18659 Some(OnInsert::OnConflict(OnConflict {
18660 conflict_target,
18661 action,
18662 }))
18663 } else {
18664 self.expect_keyword_is(Keyword::DUPLICATE)?;
18665 self.expect_keyword_is(Keyword::KEY)?;
18666 self.expect_keyword_is(Keyword::UPDATE)?;
18667 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18668
18669 Some(OnInsert::DuplicateKeyUpdate(l))
18670 }
18671 } else {
18672 None
18673 };
18674
18675 let returning = if self.parse_keyword(Keyword::RETURNING) {
18676 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18677 } else {
18678 None
18679 };
18680
18681 Ok(Insert {
18682 insert_token: insert_token.into(),
18683 optimizer_hints,
18684 or,
18685 table: table_object,
18686 table_alias,
18687 ignore,
18688 into,
18689 overwrite,
18690 partitioned,
18691 columns,
18692 after_columns,
18693 source,
18694 assignments,
18695 has_table_keyword: table,
18696 on,
18697 returning,
18698 output,
18699 replace_into,
18700 priority,
18701 insert_alias,
18702 settings,
18703 format_clause,
18704 multi_table_insert_type: None,
18705 multi_table_into_clauses: vec![],
18706 multi_table_when_clauses: vec![],
18707 multi_table_else_clause: None,
18708 }
18709 .into())
18710 }
18711 }
18712
18713 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18717 let ident = self.parse_identifier()?;
18718 let values = self
18719 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18720 .unwrap_or_default();
18721
18722 Ok(InputFormatClause { ident, values })
18723 }
18724
18725 fn peek_subquery_start(&mut self) -> bool {
18728 matches!(
18729 self.peek_tokens_ref(),
18730 [
18731 TokenWithSpan {
18732 token: Token::LParen,
18733 ..
18734 },
18735 TokenWithSpan {
18736 token: Token::Word(Word {
18737 keyword: Keyword::SELECT,
18738 ..
18739 }),
18740 ..
18741 },
18742 ]
18743 )
18744 }
18745
18746 fn peek_subquery_or_cte_start(&mut self) -> bool {
18750 matches!(
18751 self.peek_tokens_ref(),
18752 [
18753 TokenWithSpan {
18754 token: Token::LParen,
18755 ..
18756 },
18757 TokenWithSpan {
18758 token: Token::Word(Word {
18759 keyword: Keyword::SELECT | Keyword::WITH,
18760 ..
18761 }),
18762 ..
18763 },
18764 ]
18765 )
18766 }
18767
18768 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18769 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18770 Some(SqliteOnConflict::Replace)
18771 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18772 Some(SqliteOnConflict::Rollback)
18773 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18774 Some(SqliteOnConflict::Abort)
18775 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18776 Some(SqliteOnConflict::Fail)
18777 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18778 Some(SqliteOnConflict::Ignore)
18779 } else if self.parse_keyword(Keyword::REPLACE) {
18780 Some(SqliteOnConflict::Replace)
18781 } else {
18782 None
18783 }
18784 }
18785
18786 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18788 if self.parse_keyword(Keyword::PARTITION) {
18789 self.expect_token(&Token::LParen)?;
18790 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18791 self.expect_token(&Token::RParen)?;
18792 Ok(partition_cols)
18793 } else {
18794 Ok(None)
18795 }
18796 }
18797
18798 pub fn parse_load_data_table_format(
18800 &mut self,
18801 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18802 if self.parse_keyword(Keyword::INPUTFORMAT) {
18803 let input_format = self.parse_expr()?;
18804 self.expect_keyword_is(Keyword::SERDE)?;
18805 let serde = self.parse_expr()?;
18806 Ok(Some(HiveLoadDataFormat {
18807 input_format,
18808 serde,
18809 }))
18810 } else {
18811 Ok(None)
18812 }
18813 }
18814
18815 fn parse_update_setexpr_boxed(
18819 &mut self,
18820 update_token: TokenWithSpan,
18821 ) -> Result<Box<SetExpr>, ParserError> {
18822 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18823 }
18824
18825 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18827 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18828 let or = self.parse_conflict_clause();
18829 let table = self.parse_table_and_joins()?;
18830 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18831 Some(UpdateTableFromKind::BeforeSet(
18832 self.parse_table_with_joins()?,
18833 ))
18834 } else {
18835 None
18836 };
18837 self.expect_keyword(Keyword::SET)?;
18838 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18839
18840 let output = self.maybe_parse_output_clause()?;
18841
18842 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18843 Some(UpdateTableFromKind::AfterSet(
18844 self.parse_table_with_joins()?,
18845 ))
18846 } else {
18847 from_before_set
18848 };
18849 let selection = if self.parse_keyword(Keyword::WHERE) {
18850 Some(self.parse_expr()?)
18851 } else {
18852 None
18853 };
18854 let returning = if self.parse_keyword(Keyword::RETURNING) {
18855 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18856 } else {
18857 None
18858 };
18859 let order_by = if self.dialect.supports_update_order_by()
18860 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18861 {
18862 self.parse_comma_separated(Parser::parse_order_by_expr)?
18863 } else {
18864 vec![]
18865 };
18866 let limit = if self.parse_keyword(Keyword::LIMIT) {
18867 Some(self.parse_expr()?)
18868 } else {
18869 None
18870 };
18871 Ok(Update {
18872 update_token: update_token.into(),
18873 optimizer_hints,
18874 table,
18875 assignments,
18876 from,
18877 selection,
18878 returning,
18879 output,
18880 or,
18881 order_by,
18882 limit,
18883 }
18884 .into())
18885 }
18886
18887 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18889 let target = self.parse_assignment_target()?;
18890 self.expect_token(&Token::Eq)?;
18891 let value = self.parse_expr()?;
18892 Ok(Assignment { target, value })
18893 }
18894
18895 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18897 if self.consume_token(&Token::LParen) {
18898 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18899 self.expect_token(&Token::RParen)?;
18900 Ok(AssignmentTarget::Tuple(columns))
18901 } else {
18902 let column = self.parse_object_name(false)?;
18903 Ok(AssignmentTarget::ColumnName(column))
18904 }
18905 }
18906
18907 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18909 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18910 self.maybe_parse(|p| {
18911 let name = p.parse_expr()?;
18912 let operator = p.parse_function_named_arg_operator()?;
18913 let arg = p.parse_wildcard_expr()?.into();
18914 Ok(FunctionArg::ExprNamed {
18915 name,
18916 arg,
18917 operator,
18918 })
18919 })?
18920 } else {
18921 self.maybe_parse(|p| {
18922 let name = p.parse_identifier()?;
18923 let operator = p.parse_function_named_arg_operator()?;
18924 let arg = p.parse_wildcard_expr()?.into();
18925 Ok(FunctionArg::Named {
18926 name,
18927 arg,
18928 operator,
18929 })
18930 })?
18931 };
18932 if let Some(arg) = arg {
18933 return Ok(arg);
18934 }
18935 let wildcard_expr = self.parse_wildcard_expr()?;
18936 let arg_expr: FunctionArgExpr = match wildcard_expr {
18937 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18938 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18941 if opts.opt_exclude.is_some()
18942 || opts.opt_except.is_some()
18943 || opts.opt_replace.is_some()
18944 || opts.opt_rename.is_some()
18945 || opts.opt_ilike.is_some()
18946 {
18947 FunctionArgExpr::WildcardWithOptions(opts)
18948 } else {
18949 wildcard_expr.into()
18950 }
18951 }
18952 other => other.into(),
18953 };
18954 Ok(FunctionArg::Unnamed(arg_expr))
18955 }
18956
18957 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18958 if self.parse_keyword(Keyword::VALUE) {
18959 return Ok(FunctionArgOperator::Value);
18960 }
18961 let tok = self.next_token();
18962 match tok.token {
18963 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18964 Ok(FunctionArgOperator::RightArrow)
18965 }
18966 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18967 Ok(FunctionArgOperator::Equals)
18968 }
18969 Token::Assignment
18970 if self
18971 .dialect
18972 .supports_named_fn_args_with_assignment_operator() =>
18973 {
18974 Ok(FunctionArgOperator::Assignment)
18975 }
18976 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18977 Ok(FunctionArgOperator::Colon)
18978 }
18979 _ => {
18980 self.prev_token();
18981 self.expected("argument operator", tok)
18982 }
18983 }
18984 }
18985
18986 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18988 if self.consume_token(&Token::RParen) {
18989 Ok(vec![])
18990 } else {
18991 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18992 self.expect_token(&Token::RParen)?;
18993 Ok(args)
18994 }
18995 }
18996
18997 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18998 if self.consume_token(&Token::RParen) {
18999 return Ok(TableFunctionArgs {
19000 args: vec![],
19001 settings: None,
19002 });
19003 }
19004 let mut args = vec![];
19005 let settings = loop {
19006 if let Some(settings) = self.parse_settings()? {
19007 break Some(settings);
19008 }
19009 args.push(self.parse_function_args()?);
19010 if self.is_parse_comma_separated_end() {
19011 break None;
19012 }
19013 };
19014 self.expect_token(&Token::RParen)?;
19015 Ok(TableFunctionArgs { args, settings })
19016 }
19017
19018 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
19027 let mut clauses = vec![];
19028
19029 if let Some(null_clause) = self.parse_json_null_clause() {
19032 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19033 }
19034
19035 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19036 clauses.push(FunctionArgumentClause::JsonReturningClause(
19037 json_returning_clause,
19038 ));
19039 }
19040
19041 if self.consume_token(&Token::RParen) {
19042 return Ok(FunctionArgumentList {
19043 duplicate_treatment: None,
19044 args: vec![],
19045 clauses,
19046 });
19047 }
19048
19049 let duplicate_treatment = self.parse_duplicate_treatment()?;
19050 let args = self.parse_comma_separated(Parser::parse_function_args)?;
19051
19052 if self.dialect.supports_window_function_null_treatment_arg() {
19053 if let Some(null_treatment) = self.parse_null_treatment()? {
19054 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
19055 }
19056 }
19057
19058 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
19059 clauses.push(FunctionArgumentClause::OrderBy(
19060 self.parse_comma_separated(Parser::parse_order_by_expr)?,
19061 ));
19062 }
19063
19064 if self.parse_keyword(Keyword::LIMIT) {
19065 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
19066 }
19067
19068 if dialect_of!(self is GenericDialect | BigQueryDialect)
19069 && self.parse_keyword(Keyword::HAVING)
19070 {
19071 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
19072 Keyword::MIN => HavingBoundKind::Min,
19073 Keyword::MAX => HavingBoundKind::Max,
19074 unexpected_keyword => return Err(ParserError::ParserError(
19075 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
19076 )),
19077 };
19078 clauses.push(FunctionArgumentClause::Having(HavingBound(
19079 kind,
19080 self.parse_expr()?,
19081 )))
19082 }
19083
19084 if dialect_of!(self is GenericDialect | MySqlDialect)
19085 && self.parse_keyword(Keyword::SEPARATOR)
19086 {
19087 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
19088 }
19089
19090 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
19091 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
19092 }
19093
19094 if let Some(null_clause) = self.parse_json_null_clause() {
19095 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19096 }
19097
19098 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19099 clauses.push(FunctionArgumentClause::JsonReturningClause(
19100 json_returning_clause,
19101 ));
19102 }
19103
19104 self.expect_token(&Token::RParen)?;
19105 Ok(FunctionArgumentList {
19106 duplicate_treatment,
19107 args,
19108 clauses,
19109 })
19110 }
19111
19112 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
19113 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
19114 Some(JsonNullClause::AbsentOnNull)
19115 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
19116 Some(JsonNullClause::NullOnNull)
19117 } else {
19118 None
19119 }
19120 }
19121
19122 fn maybe_parse_json_returning_clause(
19123 &mut self,
19124 ) -> Result<Option<JsonReturningClause>, ParserError> {
19125 if self.parse_keyword(Keyword::RETURNING) {
19126 let data_type = self.parse_data_type()?;
19127 Ok(Some(JsonReturningClause { data_type }))
19128 } else {
19129 Ok(None)
19130 }
19131 }
19132
19133 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
19134 let loc = self.peek_token_ref().span.start;
19135 match (
19136 self.parse_keyword(Keyword::ALL),
19137 self.parse_keyword(Keyword::DISTINCT),
19138 ) {
19139 (true, false) => Ok(Some(DuplicateTreatment::All)),
19140 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
19141 (false, false) => Ok(None),
19142 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
19143 }
19144 }
19145
19146 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
19148 let prefix = self
19149 .parse_one_of_keywords(
19150 self.dialect
19151 .get_reserved_keywords_for_select_item_operator(),
19152 )
19153 .map(|keyword| Ident::new(format!("{keyword:?}")));
19154
19155 match self.parse_wildcard_expr()? {
19156 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
19157 SelectItemQualifiedWildcardKind::ObjectName(prefix),
19158 self.parse_wildcard_additional_options(token.0)?,
19159 )),
19160 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
19161 self.parse_wildcard_additional_options(token.0)?,
19162 )),
19163 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
19164 parser_err!(
19165 format!("Expected an expression, found: {}", v),
19166 self.peek_token_ref().span.start
19167 )
19168 }
19169 Expr::BinaryOp {
19170 left,
19171 op: BinaryOperator::Eq,
19172 right,
19173 } if self.dialect.supports_eq_alias_assignment()
19174 && matches!(left.as_ref(), Expr::Identifier(_)) =>
19175 {
19176 let Expr::Identifier(alias) = *left else {
19177 return parser_err!(
19178 "BUG: expected identifier expression as alias",
19179 self.peek_token_ref().span.start
19180 );
19181 };
19182 Ok(SelectItem::ExprWithAlias {
19183 expr: *right,
19184 alias,
19185 })
19186 }
19187 expr if self.dialect.supports_select_expr_star()
19188 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
19189 {
19190 let wildcard_token = self.get_previous_token().clone();
19191 Ok(SelectItem::QualifiedWildcard(
19192 SelectItemQualifiedWildcardKind::Expr(expr),
19193 self.parse_wildcard_additional_options(wildcard_token)?,
19194 ))
19195 }
19196 expr if self.dialect.supports_select_item_multi_column_alias()
19197 && self.peek_keyword(Keyword::AS)
19198 && self.peek_nth_token(1).token == Token::LParen =>
19199 {
19200 self.expect_keyword(Keyword::AS)?;
19201 self.expect_token(&Token::LParen)?;
19202 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
19203 self.expect_token(&Token::RParen)?;
19204 Ok(SelectItem::ExprWithAliases {
19205 expr: maybe_prefixed_expr(expr, prefix),
19206 aliases,
19207 })
19208 }
19209 expr => self
19210 .maybe_parse_select_item_alias()
19211 .map(|alias| match alias {
19212 Some(alias) => SelectItem::ExprWithAlias {
19213 expr: maybe_prefixed_expr(expr, prefix),
19214 alias,
19215 },
19216 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
19217 }),
19218 }
19219 }
19220
19221 pub fn parse_wildcard_additional_options(
19225 &mut self,
19226 wildcard_token: TokenWithSpan,
19227 ) -> Result<WildcardAdditionalOptions, ParserError> {
19228 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
19229 self.parse_optional_select_item_ilike()?
19230 } else {
19231 None
19232 };
19233 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
19234 {
19235 self.parse_optional_select_item_exclude()?
19236 } else {
19237 None
19238 };
19239 let opt_except = if self.dialect.supports_select_wildcard_except() {
19240 self.parse_optional_select_item_except()?
19241 } else {
19242 None
19243 };
19244 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
19245 self.parse_optional_select_item_replace()?
19246 } else {
19247 None
19248 };
19249 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
19250 self.parse_optional_select_item_rename()?
19251 } else {
19252 None
19253 };
19254
19255 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
19256 self.maybe_parse_select_item_alias()?
19257 } else {
19258 None
19259 };
19260
19261 Ok(WildcardAdditionalOptions {
19262 wildcard_token: wildcard_token.into(),
19263 opt_ilike,
19264 opt_exclude,
19265 opt_except,
19266 opt_rename,
19267 opt_replace,
19268 opt_alias,
19269 })
19270 }
19271
19272 pub fn parse_optional_select_item_ilike(
19276 &mut self,
19277 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19278 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19279 let next_token = self.next_token();
19280 let pattern = match next_token.token {
19281 Token::SingleQuotedString(s) => s,
19282 _ => return self.expected("ilike pattern", next_token),
19283 };
19284 Some(IlikeSelectItem { pattern })
19285 } else {
19286 None
19287 };
19288 Ok(opt_ilike)
19289 }
19290
19291 pub fn parse_optional_select_item_exclude(
19295 &mut self,
19296 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19297 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19298 if self.consume_token(&Token::LParen) {
19299 let columns =
19300 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19301 self.expect_token(&Token::RParen)?;
19302 Some(ExcludeSelectItem::Multiple(columns))
19303 } else {
19304 let column = self.parse_object_name(false)?;
19305 Some(ExcludeSelectItem::Single(column))
19306 }
19307 } else {
19308 None
19309 };
19310
19311 Ok(opt_exclude)
19312 }
19313
19314 pub fn parse_optional_select_item_except(
19318 &mut self,
19319 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19320 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19321 if self.peek_token_ref().token == Token::LParen {
19322 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19323 match &idents[..] {
19324 [] => {
19325 return self.expected_ref(
19326 "at least one column should be parsed by the expect clause",
19327 self.peek_token_ref(),
19328 )?;
19329 }
19330 [first, idents @ ..] => Some(ExceptSelectItem {
19331 first_element: first.clone(),
19332 additional_elements: idents.to_vec(),
19333 }),
19334 }
19335 } else {
19336 let ident = self.parse_identifier()?;
19338 Some(ExceptSelectItem {
19339 first_element: ident,
19340 additional_elements: vec![],
19341 })
19342 }
19343 } else {
19344 None
19345 };
19346
19347 Ok(opt_except)
19348 }
19349
19350 pub fn parse_optional_select_item_rename(
19352 &mut self,
19353 ) -> Result<Option<RenameSelectItem>, ParserError> {
19354 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19355 if self.consume_token(&Token::LParen) {
19356 let idents =
19357 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19358 self.expect_token(&Token::RParen)?;
19359 Some(RenameSelectItem::Multiple(idents))
19360 } else {
19361 let ident = self.parse_identifier_with_alias()?;
19362 Some(RenameSelectItem::Single(ident))
19363 }
19364 } else {
19365 None
19366 };
19367
19368 Ok(opt_rename)
19369 }
19370
19371 pub fn parse_optional_select_item_replace(
19373 &mut self,
19374 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19375 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19376 if self.consume_token(&Token::LParen) {
19377 let items = self.parse_comma_separated(|parser| {
19378 Ok(Box::new(parser.parse_replace_elements()?))
19379 })?;
19380 self.expect_token(&Token::RParen)?;
19381 Some(ReplaceSelectItem { items })
19382 } else {
19383 let tok = self.next_token();
19384 return self.expected("( after REPLACE but", tok);
19385 }
19386 } else {
19387 None
19388 };
19389
19390 Ok(opt_replace)
19391 }
19392 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19394 let expr = self.parse_expr()?;
19395 let as_keyword = self.parse_keyword(Keyword::AS);
19396 let ident = self.parse_identifier()?;
19397 Ok(ReplaceSelectElement {
19398 expr,
19399 column_name: ident,
19400 as_keyword,
19401 })
19402 }
19403
19404 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19407 if self.parse_keyword(Keyword::ASC) {
19408 Some(true)
19409 } else if self.parse_keyword(Keyword::DESC) {
19410 Some(false)
19411 } else {
19412 None
19413 }
19414 }
19415
19416 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19418 self.parse_order_by_expr_inner(false)
19419 .map(|(order_by, _)| order_by)
19420 }
19421
19422 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19424 self.parse_order_by_expr_inner(true)
19425 .map(|(column, operator_class)| IndexColumn {
19426 column,
19427 operator_class,
19428 })
19429 }
19430
19431 fn parse_order_by_expr_inner(
19432 &mut self,
19433 with_operator_class: bool,
19434 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19435 let expr = self.parse_expr()?;
19436
19437 let operator_class: Option<ObjectName> = if with_operator_class {
19438 if self
19441 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19442 .is_some()
19443 {
19444 None
19445 } else {
19446 self.maybe_parse(|parser| parser.parse_object_name(false))?
19447 }
19448 } else {
19449 None
19450 };
19451
19452 let options = self.parse_order_by_options()?;
19453
19454 let with_fill = if self.dialect.supports_with_fill()
19455 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19456 {
19457 Some(self.parse_with_fill()?)
19458 } else {
19459 None
19460 };
19461
19462 Ok((
19463 OrderByExpr {
19464 expr,
19465 options,
19466 with_fill,
19467 },
19468 operator_class,
19469 ))
19470 }
19471
19472 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19473 let asc = self.parse_asc_desc();
19474
19475 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19476 Some(true)
19477 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19478 Some(false)
19479 } else {
19480 None
19481 };
19482
19483 Ok(OrderByOptions { asc, nulls_first })
19484 }
19485
19486 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19490 let from = if self.parse_keyword(Keyword::FROM) {
19491 Some(self.parse_expr()?)
19492 } else {
19493 None
19494 };
19495
19496 let to = if self.parse_keyword(Keyword::TO) {
19497 Some(self.parse_expr()?)
19498 } else {
19499 None
19500 };
19501
19502 let step = if self.parse_keyword(Keyword::STEP) {
19503 Some(self.parse_expr()?)
19504 } else {
19505 None
19506 };
19507
19508 Ok(WithFill { from, to, step })
19509 }
19510
19511 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19514 if !self.parse_keyword(Keyword::INTERPOLATE) {
19515 return Ok(None);
19516 }
19517
19518 if self.consume_token(&Token::LParen) {
19519 let interpolations =
19520 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19521 self.expect_token(&Token::RParen)?;
19522 return Ok(Some(Interpolate {
19524 exprs: Some(interpolations),
19525 }));
19526 }
19527
19528 Ok(Some(Interpolate { exprs: None }))
19530 }
19531
19532 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19534 let column = self.parse_identifier()?;
19535 let expr = if self.parse_keyword(Keyword::AS) {
19536 Some(self.parse_expr()?)
19537 } else {
19538 None
19539 };
19540 Ok(InterpolateExpr { column, expr })
19541 }
19542
19543 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19546 let quantity = if self.consume_token(&Token::LParen) {
19547 let quantity = self.parse_expr()?;
19548 self.expect_token(&Token::RParen)?;
19549 Some(TopQuantity::Expr(quantity))
19550 } else {
19551 let next_token = self.next_token();
19552 let quantity = match next_token.token {
19553 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19554 _ => self.expected("literal int", next_token)?,
19555 };
19556 Some(TopQuantity::Constant(quantity))
19557 };
19558
19559 let percent = self.parse_keyword(Keyword::PERCENT);
19560
19561 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19562
19563 Ok(Top {
19564 with_ties,
19565 percent,
19566 quantity,
19567 })
19568 }
19569
19570 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19572 if self.parse_keyword(Keyword::ALL) {
19573 Ok(None)
19574 } else {
19575 Ok(Some(self.parse_expr()?))
19576 }
19577 }
19578
19579 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19581 let value = self.parse_expr()?;
19582 let rows = if self.parse_keyword(Keyword::ROW) {
19583 OffsetRows::Row
19584 } else if self.parse_keyword(Keyword::ROWS) {
19585 OffsetRows::Rows
19586 } else {
19587 OffsetRows::None
19588 };
19589 Ok(Offset { value, rows })
19590 }
19591
19592 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19594 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19595
19596 let (quantity, percent) = if self
19597 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19598 .is_some()
19599 {
19600 (None, false)
19601 } else {
19602 let quantity = Expr::Value(self.parse_value()?);
19603 let percent = self.parse_keyword(Keyword::PERCENT);
19604 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19605 (Some(quantity), percent)
19606 };
19607
19608 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19609 false
19610 } else {
19611 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19612 };
19613
19614 Ok(Fetch {
19615 with_ties,
19616 percent,
19617 quantity,
19618 })
19619 }
19620
19621 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19623 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19624 Keyword::UPDATE => LockType::Update,
19625 Keyword::SHARE => LockType::Share,
19626 unexpected_keyword => return Err(ParserError::ParserError(
19627 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19628 )),
19629 };
19630 let of = if self.parse_keyword(Keyword::OF) {
19631 Some(self.parse_object_name(false)?)
19632 } else {
19633 None
19634 };
19635 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19636 Some(NonBlock::Nowait)
19637 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19638 Some(NonBlock::SkipLocked)
19639 } else {
19640 None
19641 };
19642 Ok(LockClause {
19643 lock_type,
19644 of,
19645 nonblock,
19646 })
19647 }
19648
19649 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19651 self.expect_keyword(Keyword::LOCK)?;
19652
19653 if self.peek_keyword(Keyword::TABLES) {
19654 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19655 }
19656
19657 let _ = self.parse_keyword(Keyword::TABLE);
19658 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19659 let lock_mode = if self.parse_keyword(Keyword::IN) {
19660 let lock_mode = self.parse_lock_table_mode()?;
19661 self.expect_keyword(Keyword::MODE)?;
19662 Some(lock_mode)
19663 } else {
19664 None
19665 };
19666 let nowait = self.parse_keyword(Keyword::NOWAIT);
19667
19668 Ok(Lock {
19669 tables,
19670 lock_mode,
19671 nowait,
19672 })
19673 }
19674
19675 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19676 let only = self.parse_keyword(Keyword::ONLY);
19677 let name = self.parse_object_name(false)?;
19678 let has_asterisk = self.consume_token(&Token::Mul);
19679
19680 Ok(LockTableTarget {
19681 name,
19682 only,
19683 has_asterisk,
19684 })
19685 }
19686
19687 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19688 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19689 Ok(LockTableMode::AccessShare)
19690 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19691 Ok(LockTableMode::AccessExclusive)
19692 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19693 Ok(LockTableMode::RowShare)
19694 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19695 Ok(LockTableMode::RowExclusive)
19696 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19697 Ok(LockTableMode::ShareUpdateExclusive)
19698 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19699 Ok(LockTableMode::ShareRowExclusive)
19700 } else if self.parse_keyword(Keyword::SHARE) {
19701 Ok(LockTableMode::Share)
19702 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19703 Ok(LockTableMode::Exclusive)
19704 } else {
19705 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19706 }
19707 }
19708
19709 pub fn parse_values(
19711 &mut self,
19712 allow_empty: bool,
19713 value_keyword: bool,
19714 ) -> Result<Values, ParserError> {
19715 let mut explicit_row = false;
19716
19717 let rows = self.parse_comma_separated(|parser| {
19718 if parser.parse_keyword(Keyword::ROW) {
19719 explicit_row = true;
19720 }
19721
19722 parser.expect_token(&Token::LParen)?;
19723 if allow_empty && parser.peek_token().token == Token::RParen {
19724 parser.next_token();
19725 Ok(vec![])
19726 } else {
19727 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19728 parser.expect_token(&Token::RParen)?;
19729 Ok(exprs)
19730 }
19731 })?;
19732 Ok(Values {
19733 explicit_row,
19734 rows,
19735 value_keyword,
19736 })
19737 }
19738
19739 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19741 self.expect_keyword_is(Keyword::TRANSACTION)?;
19742 Ok(Statement::StartTransaction {
19743 modes: self.parse_transaction_modes()?,
19744 begin: false,
19745 transaction: Some(BeginTransactionKind::Transaction),
19746 modifier: None,
19747 statements: vec![],
19748 exception: None,
19749 has_end_keyword: false,
19750 })
19751 }
19752
19753 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19755 if !self.dialect.supports_start_transaction_modifier() {
19756 None
19757 } else if self.parse_keyword(Keyword::DEFERRED) {
19758 Some(TransactionModifier::Deferred)
19759 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19760 Some(TransactionModifier::Immediate)
19761 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19762 Some(TransactionModifier::Exclusive)
19763 } else if self.parse_keyword(Keyword::TRY) {
19764 Some(TransactionModifier::Try)
19765 } else if self.parse_keyword(Keyword::CATCH) {
19766 Some(TransactionModifier::Catch)
19767 } else {
19768 None
19769 }
19770 }
19771
19772 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19774 let modifier = self.parse_transaction_modifier();
19775 let transaction =
19776 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19777 {
19778 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19779 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19780 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19781 _ => None,
19782 };
19783 Ok(Statement::StartTransaction {
19784 modes: self.parse_transaction_modes()?,
19785 begin: true,
19786 transaction,
19787 modifier,
19788 statements: vec![],
19789 exception: None,
19790 has_end_keyword: false,
19791 })
19792 }
19793
19794 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19796 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19797
19798 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19799 let mut when = Vec::new();
19800
19801 while !self.peek_keyword(Keyword::END) {
19803 self.expect_keyword(Keyword::WHEN)?;
19804
19805 let mut idents = Vec::new();
19809
19810 while !self.parse_keyword(Keyword::THEN) {
19811 let ident = self.parse_identifier()?;
19812 idents.push(ident);
19813
19814 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19815 }
19816
19817 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19818
19819 when.push(ExceptionWhen { idents, statements });
19820 }
19821
19822 Some(when)
19823 } else {
19824 None
19825 };
19826
19827 self.expect_keyword(Keyword::END)?;
19828
19829 Ok(Statement::StartTransaction {
19830 begin: true,
19831 statements,
19832 exception,
19833 has_end_keyword: true,
19834 transaction: None,
19835 modifier: None,
19836 modes: Default::default(),
19837 })
19838 }
19839
19840 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19842 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19843 None
19844 } else if self.parse_keyword(Keyword::TRY) {
19845 Some(TransactionModifier::Try)
19846 } else if self.parse_keyword(Keyword::CATCH) {
19847 Some(TransactionModifier::Catch)
19848 } else {
19849 None
19850 };
19851 Ok(Statement::Commit {
19852 chain: self.parse_commit_rollback_chain()?,
19853 end: true,
19854 modifier,
19855 })
19856 }
19857
19858 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19860 let mut modes = vec![];
19861 let mut required = false;
19862 loop {
19863 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19864 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19865 TransactionIsolationLevel::ReadUncommitted
19866 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19867 TransactionIsolationLevel::ReadCommitted
19868 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19869 TransactionIsolationLevel::RepeatableRead
19870 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19871 TransactionIsolationLevel::Serializable
19872 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19873 TransactionIsolationLevel::Snapshot
19874 } else {
19875 self.expected_ref("isolation level", self.peek_token_ref())?
19876 };
19877 TransactionMode::IsolationLevel(iso_level)
19878 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19879 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19880 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19881 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19882 } else if required {
19883 self.expected_ref("transaction mode", self.peek_token_ref())?
19884 } else {
19885 break;
19886 };
19887 modes.push(mode);
19888 required = self.consume_token(&Token::Comma);
19893 }
19894 Ok(modes)
19895 }
19896
19897 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19899 Ok(Statement::Commit {
19900 chain: self.parse_commit_rollback_chain()?,
19901 end: false,
19902 modifier: None,
19903 })
19904 }
19905
19906 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19908 let chain = self.parse_commit_rollback_chain()?;
19909 let savepoint = self.parse_rollback_savepoint()?;
19910
19911 Ok(Statement::Rollback { chain, savepoint })
19912 }
19913
19914 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19916 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19917 if self.parse_keyword(Keyword::AND) {
19918 let chain = !self.parse_keyword(Keyword::NO);
19919 self.expect_keyword_is(Keyword::CHAIN)?;
19920 Ok(chain)
19921 } else {
19922 Ok(false)
19923 }
19924 }
19925
19926 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19928 if self.parse_keyword(Keyword::TO) {
19929 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19930 let savepoint = self.parse_identifier()?;
19931
19932 Ok(Some(savepoint))
19933 } else {
19934 Ok(None)
19935 }
19936 }
19937
19938 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19940 self.expect_token(&Token::LParen)?;
19941 let message = Box::new(self.parse_expr()?);
19942 self.expect_token(&Token::Comma)?;
19943 let severity = Box::new(self.parse_expr()?);
19944 self.expect_token(&Token::Comma)?;
19945 let state = Box::new(self.parse_expr()?);
19946 let arguments = if self.consume_token(&Token::Comma) {
19947 self.parse_comma_separated(Parser::parse_expr)?
19948 } else {
19949 vec![]
19950 };
19951 self.expect_token(&Token::RParen)?;
19952 let options = if self.parse_keyword(Keyword::WITH) {
19953 self.parse_comma_separated(Parser::parse_raiserror_option)?
19954 } else {
19955 vec![]
19956 };
19957 Ok(Statement::RaisError {
19958 message,
19959 severity,
19960 state,
19961 arguments,
19962 options,
19963 })
19964 }
19965
19966 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19968 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19969 Keyword::LOG => Ok(RaisErrorOption::Log),
19970 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19971 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19972 _ => self.expected_ref(
19973 "LOG, NOWAIT OR SETERROR raiserror option",
19974 self.peek_token_ref(),
19975 ),
19976 }
19977 }
19978
19979 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19983 self.expect_keyword_is(Keyword::THROW)?;
19984
19985 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19986 let (message, state) = if error_number.is_some() {
19987 self.expect_token(&Token::Comma)?;
19988 let message = Box::new(self.parse_expr()?);
19989 self.expect_token(&Token::Comma)?;
19990 let state = Box::new(self.parse_expr()?);
19991 (Some(message), Some(state))
19992 } else {
19993 (None, None)
19994 };
19995
19996 Ok(ThrowStatement {
19997 error_number,
19998 message,
19999 state,
20000 })
20001 }
20002
20003 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
20005 let prepare = self.parse_keyword(Keyword::PREPARE);
20006 let name = self.parse_identifier()?;
20007 Ok(Statement::Deallocate { name, prepare })
20008 }
20009
20010 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
20012 let immediate =
20013 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
20014
20015 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
20021 None
20022 } else {
20023 Some(self.parse_object_name(false)?)
20024 };
20025
20026 let has_parentheses = self.consume_token(&Token::LParen);
20027
20028 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
20029 let end_token = match (has_parentheses, self.peek_token().token) {
20030 (true, _) => Token::RParen,
20031 (false, Token::EOF) => Token::EOF,
20032 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
20033 (false, _) => Token::SemiColon,
20034 };
20035
20036 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
20037
20038 if has_parentheses {
20039 self.expect_token(&Token::RParen)?;
20040 }
20041
20042 let into = if self.parse_keyword(Keyword::INTO) {
20043 self.parse_comma_separated(Self::parse_identifier)?
20044 } else {
20045 vec![]
20046 };
20047
20048 let using = if self.parse_keyword(Keyword::USING) {
20049 self.parse_comma_separated(Self::parse_expr_with_alias)?
20050 } else {
20051 vec![]
20052 };
20053
20054 let output = self.parse_keyword(Keyword::OUTPUT);
20055
20056 let default = self.parse_keyword(Keyword::DEFAULT);
20057
20058 Ok(Statement::Execute {
20059 immediate,
20060 name,
20061 parameters,
20062 has_parentheses,
20063 into,
20064 using,
20065 output,
20066 default,
20067 })
20068 }
20069
20070 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
20072 let name = self.parse_identifier()?;
20073
20074 let mut data_types = vec![];
20075 if self.consume_token(&Token::LParen) {
20076 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
20077 self.expect_token(&Token::RParen)?;
20078 }
20079
20080 self.expect_keyword_is(Keyword::AS)?;
20081 let statement = Box::new(self.parse_statement()?);
20082 Ok(Statement::Prepare {
20083 name,
20084 data_types,
20085 statement,
20086 })
20087 }
20088
20089 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
20091 self.expect_keyword(Keyword::UNLOAD)?;
20092 self.expect_token(&Token::LParen)?;
20093 let (query, query_text) =
20094 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
20095 (None, Some(self.parse_literal_string()?))
20096 } else {
20097 (Some(self.parse_query()?), None)
20098 };
20099 self.expect_token(&Token::RParen)?;
20100
20101 self.expect_keyword_is(Keyword::TO)?;
20102 let to = self.parse_identifier()?;
20103 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
20104 Some(self.parse_iam_role_kind()?)
20105 } else {
20106 None
20107 };
20108 let with = self.parse_options(Keyword::WITH)?;
20109 let mut options = vec![];
20110 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
20111 options.push(opt);
20112 }
20113 Ok(Statement::Unload {
20114 query,
20115 query_text,
20116 to,
20117 auth,
20118 with,
20119 options,
20120 })
20121 }
20122
20123 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
20124 let temporary = self
20125 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
20126 .is_some();
20127 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
20128 let table = self.parse_keyword(Keyword::TABLE);
20129 let name = self.parse_object_name(false)?;
20130
20131 Ok(SelectInto {
20132 temporary,
20133 unlogged,
20134 table,
20135 name,
20136 })
20137 }
20138
20139 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
20140 let v = self.parse_value()?;
20141 match &v.value {
20142 Value::SingleQuotedString(_) => Ok(v),
20143 Value::DoubleQuotedString(_) => Ok(v),
20144 Value::Number(_, _) => Ok(v),
20145 Value::Placeholder(_) => Ok(v),
20146 _ => {
20147 self.prev_token();
20148 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
20149 }
20150 }
20151 }
20152
20153 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
20155 let name = self.parse_object_name(false)?;
20156 if self.consume_token(&Token::LParen) {
20157 let value = self.parse_pragma_value()?;
20158 self.expect_token(&Token::RParen)?;
20159 Ok(Statement::Pragma {
20160 name,
20161 value: Some(value),
20162 is_eq: false,
20163 })
20164 } else if self.consume_token(&Token::Eq) {
20165 Ok(Statement::Pragma {
20166 name,
20167 value: Some(self.parse_pragma_value()?),
20168 is_eq: true,
20169 })
20170 } else {
20171 Ok(Statement::Pragma {
20172 name,
20173 value: None,
20174 is_eq: false,
20175 })
20176 }
20177 }
20178
20179 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
20181 let extension_name = self.parse_identifier()?;
20182
20183 Ok(Statement::Install { extension_name })
20184 }
20185
20186 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
20188 if self.dialect.supports_load_extension() {
20189 let extension_name = self.parse_identifier()?;
20190 Ok(Statement::Load { extension_name })
20191 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
20192 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
20193 self.expect_keyword_is(Keyword::INPATH)?;
20194 let inpath = self.parse_literal_string()?;
20195 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
20196 self.expect_keyword_is(Keyword::INTO)?;
20197 self.expect_keyword_is(Keyword::TABLE)?;
20198 let table_name = self.parse_object_name(false)?;
20199 let partitioned = self.parse_insert_partition()?;
20200 let table_format = self.parse_load_data_table_format()?;
20201 Ok(Statement::LoadData {
20202 local,
20203 inpath,
20204 overwrite,
20205 table_name,
20206 partitioned,
20207 table_format,
20208 })
20209 } else {
20210 self.expected_ref(
20211 "`DATA` or an extension name after `LOAD`",
20212 self.peek_token_ref(),
20213 )
20214 }
20215 }
20216
20217 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
20229 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
20230
20231 let name = self.parse_object_name(false)?;
20232
20233 let on_cluster = self.parse_optional_on_cluster()?;
20235
20236 let partition = if self.parse_keyword(Keyword::PARTITION) {
20237 if self.parse_keyword(Keyword::ID) {
20238 Some(Partition::Identifier(self.parse_identifier()?))
20239 } else {
20240 Some(Partition::Expr(self.parse_expr()?))
20241 }
20242 } else {
20243 None
20244 };
20245
20246 let include_final = self.parse_keyword(Keyword::FINAL);
20247
20248 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
20249 if self.parse_keyword(Keyword::BY) {
20250 Some(Deduplicate::ByExpression(self.parse_expr()?))
20251 } else {
20252 Some(Deduplicate::All)
20253 }
20254 } else {
20255 None
20256 };
20257
20258 let predicate = if self.parse_keyword(Keyword::WHERE) {
20260 Some(self.parse_expr()?)
20261 } else {
20262 None
20263 };
20264
20265 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
20266 self.expect_token(&Token::LParen)?;
20267 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
20268 self.expect_token(&Token::RParen)?;
20269 Some(columns)
20270 } else {
20271 None
20272 };
20273
20274 Ok(Statement::OptimizeTable {
20275 name,
20276 has_table_keyword,
20277 on_cluster,
20278 partition,
20279 include_final,
20280 deduplicate,
20281 predicate,
20282 zorder,
20283 })
20284 }
20285
20286 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20292 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20294 let name = self.parse_object_name(false)?;
20296 let mut data_type: Option<DataType> = None;
20298 if self.parse_keywords(&[Keyword::AS]) {
20299 data_type = Some(self.parse_data_type()?)
20300 }
20301 let sequence_options = self.parse_create_sequence_options()?;
20302 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20304 if self.parse_keywords(&[Keyword::NONE]) {
20305 Some(ObjectName::from(vec![Ident::new("NONE")]))
20306 } else {
20307 Some(self.parse_object_name(false)?)
20308 }
20309 } else {
20310 None
20311 };
20312 Ok(Statement::CreateSequence {
20313 temporary,
20314 if_not_exists,
20315 name,
20316 data_type,
20317 sequence_options,
20318 owned_by,
20319 })
20320 }
20321
20322 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20323 let mut sequence_options = vec![];
20324 if self.parse_keywords(&[Keyword::INCREMENT]) {
20326 if self.parse_keywords(&[Keyword::BY]) {
20327 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20328 } else {
20329 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20330 }
20331 }
20332 if self.parse_keyword(Keyword::MINVALUE) {
20334 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20335 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20336 sequence_options.push(SequenceOptions::MinValue(None));
20337 }
20338 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20340 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20341 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20342 sequence_options.push(SequenceOptions::MaxValue(None));
20343 }
20344
20345 if self.parse_keywords(&[Keyword::START]) {
20347 if self.parse_keywords(&[Keyword::WITH]) {
20348 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20349 } else {
20350 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20351 }
20352 }
20353 if self.parse_keywords(&[Keyword::CACHE]) {
20355 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20356 }
20357 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20359 sequence_options.push(SequenceOptions::Cycle(true));
20360 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20361 sequence_options.push(SequenceOptions::Cycle(false));
20362 }
20363
20364 Ok(sequence_options)
20365 }
20366
20367 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20371 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20372 let name = self.parse_object_name(false)?;
20373
20374 let server_type = if self.parse_keyword(Keyword::TYPE) {
20375 Some(self.parse_identifier()?)
20376 } else {
20377 None
20378 };
20379
20380 let version = if self.parse_keyword(Keyword::VERSION) {
20381 Some(self.parse_identifier()?)
20382 } else {
20383 None
20384 };
20385
20386 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20387 let foreign_data_wrapper = self.parse_object_name(false)?;
20388
20389 let mut options = None;
20390 if self.parse_keyword(Keyword::OPTIONS) {
20391 self.expect_token(&Token::LParen)?;
20392 options = Some(self.parse_comma_separated(|p| {
20393 let key = p.parse_identifier()?;
20394 let value = p.parse_identifier()?;
20395 Ok(CreateServerOption { key, value })
20396 })?);
20397 self.expect_token(&Token::RParen)?;
20398 }
20399
20400 Ok(Statement::CreateServer(CreateServerStatement {
20401 name,
20402 if_not_exists: ine,
20403 server_type,
20404 version,
20405 foreign_data_wrapper,
20406 options,
20407 }))
20408 }
20409
20410 pub fn parse_create_foreign_data_wrapper(
20414 &mut self,
20415 ) -> Result<CreateForeignDataWrapper, ParserError> {
20416 let name = self.parse_identifier()?;
20417
20418 let handler = if self.parse_keyword(Keyword::HANDLER) {
20419 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20420 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20421 Some(FdwRoutineClause::NoFunction)
20422 } else {
20423 None
20424 };
20425
20426 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20427 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20428 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20429 Some(FdwRoutineClause::NoFunction)
20430 } else {
20431 None
20432 };
20433
20434 let options = if self.parse_keyword(Keyword::OPTIONS) {
20435 self.expect_token(&Token::LParen)?;
20436 let opts = self.parse_comma_separated(|p| {
20437 let key = p.parse_identifier()?;
20438 let value = p.parse_identifier()?;
20439 Ok(CreateServerOption { key, value })
20440 })?;
20441 self.expect_token(&Token::RParen)?;
20442 Some(opts)
20443 } else {
20444 None
20445 };
20446
20447 Ok(CreateForeignDataWrapper {
20448 name,
20449 handler,
20450 validator,
20451 options,
20452 })
20453 }
20454
20455 pub fn parse_create_foreign_table(&mut self) -> Result<CreateForeignTable, ParserError> {
20459 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20460 let name = self.parse_object_name(false)?;
20461 let (columns, _constraints) = self.parse_columns()?;
20462 self.expect_keyword_is(Keyword::SERVER)?;
20463 let server_name = self.parse_identifier()?;
20464
20465 let options = if self.parse_keyword(Keyword::OPTIONS) {
20466 self.expect_token(&Token::LParen)?;
20467 let opts = self.parse_comma_separated(|p| {
20468 let key = p.parse_identifier()?;
20469 let value = p.parse_identifier()?;
20470 Ok(CreateServerOption { key, value })
20471 })?;
20472 self.expect_token(&Token::RParen)?;
20473 Some(opts)
20474 } else {
20475 None
20476 };
20477
20478 Ok(CreateForeignTable {
20479 name,
20480 if_not_exists,
20481 columns,
20482 server_name,
20483 options,
20484 })
20485 }
20486
20487 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20491 let name = self.parse_identifier()?;
20492
20493 let target = if self.parse_keyword(Keyword::FOR) {
20494 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20495 Some(PublicationTarget::AllTables)
20496 } else if self.parse_keyword(Keyword::TABLE) {
20497 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20498 Some(PublicationTarget::Tables(tables))
20499 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20500 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20501 Some(PublicationTarget::TablesInSchema(schemas))
20502 } else {
20503 return self.expected_ref(
20504 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20505 self.peek_token_ref(),
20506 );
20507 }
20508 } else {
20509 None
20510 };
20511
20512 let with_options = self.parse_options(Keyword::WITH)?;
20513
20514 Ok(CreatePublication {
20515 name,
20516 target,
20517 with_options,
20518 })
20519 }
20520
20521 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20525 let name = self.parse_identifier()?;
20526 self.expect_keyword_is(Keyword::CONNECTION)?;
20527 let connection = self.parse_value()?.value;
20528 self.expect_keyword_is(Keyword::PUBLICATION)?;
20529 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20530 let with_options = self.parse_options(Keyword::WITH)?;
20531
20532 Ok(CreateSubscription {
20533 name,
20534 connection,
20535 publications,
20536 with_options,
20537 })
20538 }
20539
20540 pub fn parse_create_cast(&mut self) -> Result<CreateCast, ParserError> {
20544 self.expect_token(&Token::LParen)?;
20545 let source_type = self.parse_data_type()?;
20546 self.expect_keyword_is(Keyword::AS)?;
20547 let target_type = self.parse_data_type()?;
20548 self.expect_token(&Token::RParen)?;
20549
20550 let function_kind = if self.parse_keywords(&[Keyword::WITHOUT, Keyword::FUNCTION]) {
20551 CastFunctionKind::WithoutFunction
20552 } else if self.parse_keywords(&[Keyword::WITH, Keyword::INOUT]) {
20553 CastFunctionKind::WithInout
20554 } else if self.parse_keywords(&[Keyword::WITH, Keyword::FUNCTION]) {
20555 let function_name = self.parse_object_name(false)?;
20556 let argument_types = if self.peek_token_ref().token == Token::LParen {
20557 self.expect_token(&Token::LParen)?;
20558 let types = if self.peek_token_ref().token == Token::RParen {
20559 vec![]
20560 } else {
20561 self.parse_comma_separated(|p| p.parse_data_type())?
20562 };
20563 self.expect_token(&Token::RParen)?;
20564 types
20565 } else {
20566 vec![]
20567 };
20568 CastFunctionKind::WithFunction {
20569 function_name,
20570 argument_types,
20571 }
20572 } else {
20573 return self.expected_ref(
20574 "WITH FUNCTION, WITHOUT FUNCTION, or WITH INOUT",
20575 self.peek_token_ref(),
20576 );
20577 };
20578
20579 let cast_context = if self.parse_keyword(Keyword::AS) {
20580 if self.parse_keyword(Keyword::ASSIGNMENT) {
20581 CastContext::Assignment
20582 } else if self.parse_keyword(Keyword::IMPLICIT) {
20583 CastContext::Implicit
20584 } else {
20585 return self.expected_ref("ASSIGNMENT or IMPLICIT after AS", self.peek_token_ref());
20586 }
20587 } else {
20588 CastContext::Explicit
20589 };
20590
20591 Ok(CreateCast {
20592 source_type,
20593 target_type,
20594 function_kind,
20595 cast_context,
20596 })
20597 }
20598
20599 pub fn parse_create_conversion(
20603 &mut self,
20604 is_default: bool,
20605 ) -> Result<CreateConversion, ParserError> {
20606 let name = self.parse_object_name(false)?;
20607 self.expect_keyword_is(Keyword::FOR)?;
20608 let source_encoding = self.parse_literal_string()?;
20609 self.expect_keyword_is(Keyword::TO)?;
20610 let destination_encoding = self.parse_literal_string()?;
20611 self.expect_keyword_is(Keyword::FROM)?;
20612 let function_name = self.parse_object_name(false)?;
20613
20614 Ok(CreateConversion {
20615 name,
20616 is_default,
20617 source_encoding,
20618 destination_encoding,
20619 function_name,
20620 })
20621 }
20622
20623 pub fn parse_create_language(
20627 &mut self,
20628 or_replace: bool,
20629 trusted: bool,
20630 procedural: bool,
20631 ) -> Result<CreateLanguage, ParserError> {
20632 let name = self.parse_identifier()?;
20633
20634 let handler = if self.parse_keyword(Keyword::HANDLER) {
20635 Some(self.parse_object_name(false)?)
20636 } else {
20637 None
20638 };
20639
20640 let inline_handler = if self.parse_keyword(Keyword::INLINE) {
20641 Some(self.parse_object_name(false)?)
20642 } else {
20643 None
20644 };
20645
20646 let validator = if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20647 None
20648 } else if self.parse_keyword(Keyword::VALIDATOR) {
20649 Some(self.parse_object_name(false)?)
20650 } else {
20651 None
20652 };
20653
20654 Ok(CreateLanguage {
20655 name,
20656 or_replace,
20657 trusted,
20658 procedural,
20659 handler,
20660 inline_handler,
20661 validator,
20662 })
20663 }
20664
20665 pub fn parse_create_rule(&mut self) -> Result<CreateRule, ParserError> {
20669 let name = self.parse_identifier()?;
20670 self.expect_keyword_is(Keyword::AS)?;
20671 self.expect_keyword_is(Keyword::ON)?;
20672
20673 let event = if self.parse_keyword(Keyword::SELECT) {
20674 RuleEvent::Select
20675 } else if self.parse_keyword(Keyword::INSERT) {
20676 RuleEvent::Insert
20677 } else if self.parse_keyword(Keyword::UPDATE) {
20678 RuleEvent::Update
20679 } else if self.parse_keyword(Keyword::DELETE) {
20680 RuleEvent::Delete
20681 } else {
20682 return self.expected_ref(
20683 "SELECT, INSERT, UPDATE, or DELETE after ON",
20684 self.peek_token_ref(),
20685 );
20686 };
20687
20688 self.expect_keyword_is(Keyword::TO)?;
20689 let table = self.parse_object_name(false)?;
20690
20691 let condition = if self.parse_keyword(Keyword::WHERE) {
20692 Some(self.parse_expr()?)
20693 } else {
20694 None
20695 };
20696
20697 self.expect_keyword_is(Keyword::DO)?;
20698
20699 let instead = self.parse_keyword(Keyword::INSTEAD);
20700 if !instead {
20701 let _ = self.parse_keyword(Keyword::ALSO);
20703 }
20704
20705 let action = if self.parse_keyword(Keyword::NOTHING) {
20706 RuleAction::Nothing
20707 } else if self.peek_token_ref().token == Token::LParen {
20708 self.expect_token(&Token::LParen)?;
20709 let mut stmts = Vec::new();
20710 loop {
20711 stmts.push(self.parse_statement()?);
20712 if !self.consume_token(&Token::SemiColon) {
20713 break;
20714 }
20715 if self.peek_token_ref().token == Token::RParen {
20716 break;
20717 }
20718 }
20719 self.expect_token(&Token::RParen)?;
20720 RuleAction::Statements(stmts)
20721 } else {
20722 let stmt = self.parse_statement()?;
20723 RuleAction::Statements(vec![stmt])
20724 };
20725
20726 Ok(CreateRule {
20727 name,
20728 event,
20729 table,
20730 condition,
20731 instead,
20732 action,
20733 })
20734 }
20735
20736 pub fn parse_create_statistics(&mut self) -> Result<CreateStatistics, ParserError> {
20740 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20741 let name = self.parse_object_name(false)?;
20742
20743 let kinds = if self.consume_token(&Token::LParen) {
20744 let kinds = self.parse_comma_separated(|p| {
20745 let ident = p.parse_identifier()?;
20746 match ident.value.to_lowercase().as_str() {
20747 "ndistinct" => Ok(StatisticsKind::NDistinct),
20748 "dependencies" => Ok(StatisticsKind::Dependencies),
20749 "mcv" => Ok(StatisticsKind::Mcv),
20750 other => Err(ParserError::ParserError(format!(
20751 "Unknown statistics kind: {other}"
20752 ))),
20753 }
20754 })?;
20755 self.expect_token(&Token::RParen)?;
20756 kinds
20757 } else {
20758 vec![]
20759 };
20760
20761 self.expect_keyword_is(Keyword::ON)?;
20762 let on = self.parse_comma_separated(Parser::parse_expr)?;
20763 self.expect_keyword_is(Keyword::FROM)?;
20764 let from = self.parse_object_name(false)?;
20765
20766 Ok(CreateStatistics {
20767 if_not_exists,
20768 name,
20769 kinds,
20770 on,
20771 from,
20772 })
20773 }
20774
20775 pub fn parse_create_access_method(&mut self) -> Result<CreateAccessMethod, ParserError> {
20779 let name = self.parse_identifier()?;
20780 self.expect_keyword_is(Keyword::TYPE)?;
20781 let method_type = if self.parse_keyword(Keyword::INDEX) {
20782 AccessMethodType::Index
20783 } else if self.parse_keyword(Keyword::TABLE) {
20784 AccessMethodType::Table
20785 } else {
20786 return self.expected_ref("INDEX or TABLE after TYPE", self.peek_token_ref());
20787 };
20788 self.expect_keyword_is(Keyword::HANDLER)?;
20789 let handler = self.parse_object_name(false)?;
20790
20791 Ok(CreateAccessMethod {
20792 name,
20793 method_type,
20794 handler,
20795 })
20796 }
20797
20798 pub fn parse_create_event_trigger(&mut self) -> Result<CreateEventTrigger, ParserError> {
20802 let name = self.parse_identifier()?;
20803 self.expect_keyword_is(Keyword::ON)?;
20804 let event_ident = self.parse_identifier()?;
20805 let event = match event_ident.value.to_lowercase().as_str() {
20806 "ddl_command_start" => EventTriggerEvent::DdlCommandStart,
20807 "ddl_command_end" => EventTriggerEvent::DdlCommandEnd,
20808 "table_rewrite" => EventTriggerEvent::TableRewrite,
20809 "sql_drop" => EventTriggerEvent::SqlDrop,
20810 other => {
20811 return Err(ParserError::ParserError(format!(
20812 "Unknown event trigger event: {other}"
20813 )))
20814 }
20815 };
20816
20817 let when_tags = if self.parse_keyword(Keyword::WHEN) {
20818 self.expect_keyword_is(Keyword::TAG)?;
20819 self.expect_keyword_is(Keyword::IN)?;
20820 self.expect_token(&Token::LParen)?;
20821 let tags = self.parse_comma_separated(|p| p.parse_value().map(|v| v.value))?;
20822 self.expect_token(&Token::RParen)?;
20823 Some(tags)
20824 } else {
20825 None
20826 };
20827
20828 self.expect_keyword_is(Keyword::EXECUTE)?;
20829 let is_procedure = if self.parse_keyword(Keyword::FUNCTION) {
20830 false
20831 } else if self.parse_keyword(Keyword::PROCEDURE) {
20832 true
20833 } else {
20834 return self.expected_ref("FUNCTION or PROCEDURE after EXECUTE", self.peek_token_ref());
20835 };
20836 let execute = self.parse_object_name(false)?;
20837 self.expect_token(&Token::LParen)?;
20838 self.expect_token(&Token::RParen)?;
20839
20840 Ok(CreateEventTrigger {
20841 name,
20842 event,
20843 when_tags,
20844 execute,
20845 is_procedure,
20846 })
20847 }
20848
20849 pub fn parse_create_transform(
20853 &mut self,
20854 or_replace: bool,
20855 ) -> Result<CreateTransform, ParserError> {
20856 self.expect_keyword_is(Keyword::FOR)?;
20857 let type_name = self.parse_data_type()?;
20858 self.expect_keyword_is(Keyword::LANGUAGE)?;
20859 let language = self.parse_identifier()?;
20860 self.expect_token(&Token::LParen)?;
20861 let elements = self.parse_comma_separated(|p| {
20862 let is_from = if p.parse_keyword(Keyword::FROM) {
20863 true
20864 } else {
20865 p.expect_keyword_is(Keyword::TO)?;
20866 false
20867 };
20868 p.expect_keyword_is(Keyword::SQL)?;
20869 p.expect_keyword_is(Keyword::WITH)?;
20870 p.expect_keyword_is(Keyword::FUNCTION)?;
20871 let function = p.parse_object_name(false)?;
20872 p.expect_token(&Token::LParen)?;
20873 let arg_types = if p.peek_token().token == Token::RParen {
20874 vec![]
20875 } else {
20876 p.parse_comma_separated(|p| p.parse_data_type())?
20877 };
20878 p.expect_token(&Token::RParen)?;
20879 Ok(TransformElement {
20880 is_from,
20881 function,
20882 arg_types,
20883 })
20884 })?;
20885 self.expect_token(&Token::RParen)?;
20886
20887 Ok(CreateTransform {
20888 or_replace,
20889 type_name,
20890 language,
20891 elements,
20892 })
20893 }
20894
20895 pub fn parse_security_label(&mut self) -> Result<SecurityLabel, ParserError> {
20899 self.expect_keyword_is(Keyword::LABEL)?;
20900
20901 let provider = if self.parse_keyword(Keyword::FOR) {
20902 Some(self.parse_identifier()?)
20903 } else {
20904 None
20905 };
20906
20907 self.expect_keyword_is(Keyword::ON)?;
20908
20909 let object_kind = if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
20910 SecurityLabelObjectKind::MaterializedView
20911 } else if self.parse_keyword(Keyword::TABLE) {
20912 SecurityLabelObjectKind::Table
20913 } else if self.parse_keyword(Keyword::COLUMN) {
20914 SecurityLabelObjectKind::Column
20915 } else if self.parse_keyword(Keyword::DATABASE) {
20916 SecurityLabelObjectKind::Database
20917 } else if self.parse_keyword(Keyword::DOMAIN) {
20918 SecurityLabelObjectKind::Domain
20919 } else if self.parse_keyword(Keyword::FUNCTION) {
20920 SecurityLabelObjectKind::Function
20921 } else if self.parse_keyword(Keyword::ROLE) {
20922 SecurityLabelObjectKind::Role
20923 } else if self.parse_keyword(Keyword::SCHEMA) {
20924 SecurityLabelObjectKind::Schema
20925 } else if self.parse_keyword(Keyword::SEQUENCE) {
20926 SecurityLabelObjectKind::Sequence
20927 } else if self.parse_keyword(Keyword::TYPE) {
20928 SecurityLabelObjectKind::Type
20929 } else if self.parse_keyword(Keyword::VIEW) {
20930 SecurityLabelObjectKind::View
20931 } else {
20932 return self.expected_ref(
20933 "TABLE, COLUMN, DATABASE, DOMAIN, FUNCTION, MATERIALIZED VIEW, ROLE, SCHEMA, SEQUENCE, TYPE, or VIEW after ON",
20934 self.peek_token_ref(),
20935 );
20936 };
20937
20938 let object_name = self.parse_object_name(false)?;
20939
20940 self.expect_keyword_is(Keyword::IS)?;
20941
20942 let label = if self.parse_keyword(Keyword::NULL) {
20943 None
20944 } else {
20945 Some(self.parse_value()?.value)
20946 };
20947
20948 Ok(SecurityLabel {
20949 provider,
20950 object_kind,
20951 object_name,
20952 label,
20953 })
20954 }
20955
20956 pub fn parse_create_user_mapping(&mut self) -> Result<CreateUserMapping, ParserError> {
20960 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20961
20962 self.expect_keyword_is(Keyword::FOR)?;
20963
20964 let user = if self.parse_keyword(Keyword::CURRENT_ROLE) {
20965 UserMappingUser::CurrentRole
20966 } else if self.parse_keyword(Keyword::CURRENT_USER) {
20967 UserMappingUser::CurrentUser
20968 } else if self.parse_keyword(Keyword::PUBLIC) {
20969 UserMappingUser::Public
20970 } else if self.parse_keyword(Keyword::USER) {
20971 UserMappingUser::User
20972 } else {
20973 UserMappingUser::Ident(self.parse_identifier()?)
20974 };
20975
20976 self.expect_keyword_is(Keyword::SERVER)?;
20977 let server_name = self.parse_identifier()?;
20978
20979 let options = if self.parse_keyword(Keyword::OPTIONS) {
20980 self.expect_token(&Token::LParen)?;
20981 let opts = self.parse_comma_separated(|p| {
20982 let key = p.parse_identifier()?;
20983 let value = p.parse_identifier()?;
20984 Ok(CreateServerOption { key, value })
20985 })?;
20986 self.expect_token(&Token::RParen)?;
20987 Some(opts)
20988 } else {
20989 None
20990 };
20991
20992 Ok(CreateUserMapping {
20993 if_not_exists,
20994 user,
20995 server_name,
20996 options,
20997 })
20998 }
20999
21000 pub fn parse_create_tablespace(&mut self) -> Result<CreateTablespace, ParserError> {
21004 let name = self.parse_identifier()?;
21005
21006 let owner = if self.parse_keyword(Keyword::OWNER) {
21007 Some(self.parse_identifier()?)
21008 } else {
21009 None
21010 };
21011
21012 self.expect_keyword_is(Keyword::LOCATION)?;
21013 let location = self.parse_value()?.value;
21014
21015 let with_options = self.parse_options(Keyword::WITH)?;
21016
21017 Ok(CreateTablespace {
21018 name,
21019 owner,
21020 location,
21021 with_options,
21022 })
21023 }
21024
21025 pub fn index(&self) -> usize {
21027 self.index
21028 }
21029
21030 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
21032 let ident = self.parse_identifier()?;
21033 self.expect_keyword_is(Keyword::AS)?;
21034
21035 let window_expr = if self.consume_token(&Token::LParen) {
21036 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
21037 } else if self.dialect.supports_window_clause_named_window_reference() {
21038 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
21039 } else {
21040 return self.expected_ref("(", self.peek_token_ref());
21041 };
21042
21043 Ok(NamedWindowDefinition(ident, window_expr))
21044 }
21045
21046 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
21048 let name = self.parse_object_name(false)?;
21049 let params = self.parse_optional_procedure_parameters()?;
21050
21051 let language = if self.parse_keyword(Keyword::LANGUAGE) {
21052 Some(self.parse_identifier()?)
21053 } else {
21054 None
21055 };
21056
21057 self.expect_keyword_is(Keyword::AS)?;
21058
21059 let body = self.parse_conditional_statements(&[Keyword::END])?;
21060
21061 Ok(Statement::CreateProcedure {
21062 name,
21063 or_alter,
21064 params,
21065 language,
21066 body,
21067 })
21068 }
21069
21070 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
21072 let window_name = match &self.peek_token_ref().token {
21073 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
21074 self.parse_optional_ident()?
21075 }
21076 _ => None,
21077 };
21078
21079 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
21080 self.parse_comma_separated(Parser::parse_expr)?
21081 } else {
21082 vec![]
21083 };
21084 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
21085 self.parse_comma_separated(Parser::parse_order_by_expr)?
21086 } else {
21087 vec![]
21088 };
21089
21090 let window_frame = if !self.consume_token(&Token::RParen) {
21091 let window_frame = self.parse_window_frame()?;
21092 self.expect_token(&Token::RParen)?;
21093 Some(window_frame)
21094 } else {
21095 None
21096 };
21097 Ok(WindowSpec {
21098 window_name,
21099 partition_by,
21100 order_by,
21101 window_frame,
21102 })
21103 }
21104
21105 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
21107 let name = self.parse_object_name(false)?;
21108
21109 let has_as = self.parse_keyword(Keyword::AS);
21111
21112 if !has_as {
21113 if self.consume_token(&Token::LParen) {
21115 let options = self.parse_create_type_sql_definition_options()?;
21117 self.expect_token(&Token::RParen)?;
21118 return Ok(Statement::CreateType {
21119 name,
21120 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
21121 });
21122 }
21123
21124 return Ok(Statement::CreateType {
21126 name,
21127 representation: None,
21128 });
21129 }
21130
21131 if self.parse_keyword(Keyword::ENUM) {
21133 self.parse_create_type_enum(name)
21135 } else if self.parse_keyword(Keyword::RANGE) {
21136 self.parse_create_type_range(name)
21138 } else if self.consume_token(&Token::LParen) {
21139 self.parse_create_type_composite(name)
21141 } else {
21142 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
21143 }
21144 }
21145
21146 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21150 if self.consume_token(&Token::RParen) {
21151 return Ok(Statement::CreateType {
21153 name,
21154 representation: Some(UserDefinedTypeRepresentation::Composite {
21155 attributes: vec![],
21156 }),
21157 });
21158 }
21159
21160 let mut attributes = vec![];
21161 loop {
21162 let attr_name = self.parse_identifier()?;
21163 let attr_data_type = self.parse_data_type()?;
21164 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
21165 Some(self.parse_object_name(false)?)
21166 } else {
21167 None
21168 };
21169 attributes.push(UserDefinedTypeCompositeAttributeDef {
21170 name: attr_name,
21171 data_type: attr_data_type,
21172 collation: attr_collation,
21173 });
21174
21175 if !self.consume_token(&Token::Comma) {
21176 break;
21177 }
21178 }
21179 self.expect_token(&Token::RParen)?;
21180
21181 Ok(Statement::CreateType {
21182 name,
21183 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
21184 })
21185 }
21186
21187 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21191 self.expect_token(&Token::LParen)?;
21192 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21193 self.expect_token(&Token::RParen)?;
21194
21195 Ok(Statement::CreateType {
21196 name,
21197 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
21198 })
21199 }
21200
21201 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21205 self.expect_token(&Token::LParen)?;
21206 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
21207 self.expect_token(&Token::RParen)?;
21208
21209 Ok(Statement::CreateType {
21210 name,
21211 representation: Some(UserDefinedTypeRepresentation::Range { options }),
21212 })
21213 }
21214
21215 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
21217 let keyword = self.parse_one_of_keywords(&[
21218 Keyword::SUBTYPE,
21219 Keyword::SUBTYPE_OPCLASS,
21220 Keyword::COLLATION,
21221 Keyword::CANONICAL,
21222 Keyword::SUBTYPE_DIFF,
21223 Keyword::MULTIRANGE_TYPE_NAME,
21224 ]);
21225
21226 match keyword {
21227 Some(Keyword::SUBTYPE) => {
21228 self.expect_token(&Token::Eq)?;
21229 let data_type = self.parse_data_type()?;
21230 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
21231 }
21232 Some(Keyword::SUBTYPE_OPCLASS) => {
21233 self.expect_token(&Token::Eq)?;
21234 let name = self.parse_object_name(false)?;
21235 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
21236 }
21237 Some(Keyword::COLLATION) => {
21238 self.expect_token(&Token::Eq)?;
21239 let name = self.parse_object_name(false)?;
21240 Ok(UserDefinedTypeRangeOption::Collation(name))
21241 }
21242 Some(Keyword::CANONICAL) => {
21243 self.expect_token(&Token::Eq)?;
21244 let name = self.parse_object_name(false)?;
21245 Ok(UserDefinedTypeRangeOption::Canonical(name))
21246 }
21247 Some(Keyword::SUBTYPE_DIFF) => {
21248 self.expect_token(&Token::Eq)?;
21249 let name = self.parse_object_name(false)?;
21250 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
21251 }
21252 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
21253 self.expect_token(&Token::Eq)?;
21254 let name = self.parse_object_name(false)?;
21255 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
21256 }
21257 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
21258 }
21259 }
21260
21261 fn parse_create_type_sql_definition_options(
21263 &mut self,
21264 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
21265 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
21266 }
21267
21268 fn parse_sql_definition_option(
21270 &mut self,
21271 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
21272 let keyword = self.parse_one_of_keywords(&[
21273 Keyword::INPUT,
21274 Keyword::OUTPUT,
21275 Keyword::RECEIVE,
21276 Keyword::SEND,
21277 Keyword::TYPMOD_IN,
21278 Keyword::TYPMOD_OUT,
21279 Keyword::ANALYZE,
21280 Keyword::SUBSCRIPT,
21281 Keyword::INTERNALLENGTH,
21282 Keyword::PASSEDBYVALUE,
21283 Keyword::ALIGNMENT,
21284 Keyword::STORAGE,
21285 Keyword::LIKE,
21286 Keyword::CATEGORY,
21287 Keyword::PREFERRED,
21288 Keyword::DEFAULT,
21289 Keyword::ELEMENT,
21290 Keyword::DELIMITER,
21291 Keyword::COLLATABLE,
21292 ]);
21293
21294 match keyword {
21295 Some(Keyword::INPUT) => {
21296 self.expect_token(&Token::Eq)?;
21297 let name = self.parse_object_name(false)?;
21298 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
21299 }
21300 Some(Keyword::OUTPUT) => {
21301 self.expect_token(&Token::Eq)?;
21302 let name = self.parse_object_name(false)?;
21303 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
21304 }
21305 Some(Keyword::RECEIVE) => {
21306 self.expect_token(&Token::Eq)?;
21307 let name = self.parse_object_name(false)?;
21308 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
21309 }
21310 Some(Keyword::SEND) => {
21311 self.expect_token(&Token::Eq)?;
21312 let name = self.parse_object_name(false)?;
21313 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
21314 }
21315 Some(Keyword::TYPMOD_IN) => {
21316 self.expect_token(&Token::Eq)?;
21317 let name = self.parse_object_name(false)?;
21318 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
21319 }
21320 Some(Keyword::TYPMOD_OUT) => {
21321 self.expect_token(&Token::Eq)?;
21322 let name = self.parse_object_name(false)?;
21323 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
21324 }
21325 Some(Keyword::ANALYZE) => {
21326 self.expect_token(&Token::Eq)?;
21327 let name = self.parse_object_name(false)?;
21328 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
21329 }
21330 Some(Keyword::SUBSCRIPT) => {
21331 self.expect_token(&Token::Eq)?;
21332 let name = self.parse_object_name(false)?;
21333 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
21334 }
21335 Some(Keyword::INTERNALLENGTH) => {
21336 self.expect_token(&Token::Eq)?;
21337 if self.parse_keyword(Keyword::VARIABLE) {
21338 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21339 UserDefinedTypeInternalLength::Variable,
21340 ))
21341 } else {
21342 let value = self.parse_literal_uint()?;
21343 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21344 UserDefinedTypeInternalLength::Fixed(value),
21345 ))
21346 }
21347 }
21348 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
21349 Some(Keyword::ALIGNMENT) => {
21350 self.expect_token(&Token::Eq)?;
21351 let align_keyword = self.parse_one_of_keywords(&[
21352 Keyword::CHAR,
21353 Keyword::INT2,
21354 Keyword::INT4,
21355 Keyword::DOUBLE,
21356 ]);
21357 match align_keyword {
21358 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21359 Alignment::Char,
21360 )),
21361 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21362 Alignment::Int2,
21363 )),
21364 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21365 Alignment::Int4,
21366 )),
21367 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21368 Alignment::Double,
21369 )),
21370 _ => self.expected_ref(
21371 "alignment value (char, int2, int4, or double)",
21372 self.peek_token_ref(),
21373 ),
21374 }
21375 }
21376 Some(Keyword::STORAGE) => {
21377 self.expect_token(&Token::Eq)?;
21378 let storage_keyword = self.parse_one_of_keywords(&[
21379 Keyword::PLAIN,
21380 Keyword::EXTERNAL,
21381 Keyword::EXTENDED,
21382 Keyword::MAIN,
21383 ]);
21384 match storage_keyword {
21385 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21386 UserDefinedTypeStorage::Plain,
21387 )),
21388 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21389 UserDefinedTypeStorage::External,
21390 )),
21391 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21392 UserDefinedTypeStorage::Extended,
21393 )),
21394 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21395 UserDefinedTypeStorage::Main,
21396 )),
21397 _ => self.expected_ref(
21398 "storage value (plain, external, extended, or main)",
21399 self.peek_token_ref(),
21400 ),
21401 }
21402 }
21403 Some(Keyword::LIKE) => {
21404 self.expect_token(&Token::Eq)?;
21405 let name = self.parse_object_name(false)?;
21406 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
21407 }
21408 Some(Keyword::CATEGORY) => {
21409 self.expect_token(&Token::Eq)?;
21410 let category_str = self.parse_literal_string()?;
21411 let category_char = category_str.chars().next().ok_or_else(|| {
21412 ParserError::ParserError(
21413 "CATEGORY value must be a single character".to_string(),
21414 )
21415 })?;
21416 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
21417 }
21418 Some(Keyword::PREFERRED) => {
21419 self.expect_token(&Token::Eq)?;
21420 let value =
21421 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21422 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
21423 }
21424 Some(Keyword::DEFAULT) => {
21425 self.expect_token(&Token::Eq)?;
21426 let expr = self.parse_expr()?;
21427 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
21428 }
21429 Some(Keyword::ELEMENT) => {
21430 self.expect_token(&Token::Eq)?;
21431 let data_type = self.parse_data_type()?;
21432 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
21433 }
21434 Some(Keyword::DELIMITER) => {
21435 self.expect_token(&Token::Eq)?;
21436 let delimiter = self.parse_literal_string()?;
21437 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
21438 }
21439 Some(Keyword::COLLATABLE) => {
21440 self.expect_token(&Token::Eq)?;
21441 let value =
21442 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21443 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
21444 }
21445 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
21446 }
21447 }
21448
21449 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
21450 self.expect_token(&Token::LParen)?;
21451 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21452 self.expect_token(&Token::RParen)?;
21453 Ok(idents)
21454 }
21455
21456 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
21457 if dialect_of!(self is MySqlDialect | GenericDialect) {
21458 if self.parse_keyword(Keyword::FIRST) {
21459 Ok(Some(MySQLColumnPosition::First))
21460 } else if self.parse_keyword(Keyword::AFTER) {
21461 let ident = self.parse_identifier()?;
21462 Ok(Some(MySQLColumnPosition::After(ident)))
21463 } else {
21464 Ok(None)
21465 }
21466 } else {
21467 Ok(None)
21468 }
21469 }
21470
21471 fn parse_print(&mut self) -> Result<Statement, ParserError> {
21473 Ok(Statement::Print(PrintStatement {
21474 message: Box::new(self.parse_expr()?),
21475 }))
21476 }
21477
21478 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
21482 let wait_type = if self.parse_keyword(Keyword::DELAY) {
21483 WaitForType::Delay
21484 } else if self.parse_keyword(Keyword::TIME) {
21485 WaitForType::Time
21486 } else {
21487 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
21488 };
21489 let expr = self.parse_expr()?;
21490 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
21491 }
21492
21493 fn parse_return(&mut self) -> Result<Statement, ParserError> {
21495 match self.maybe_parse(|p| p.parse_expr())? {
21496 Some(expr) => Ok(Statement::Return(ReturnStatement {
21497 value: Some(ReturnStatementValue::Expr(expr)),
21498 })),
21499 None => Ok(Statement::Return(ReturnStatement { value: None })),
21500 }
21501 }
21502
21503 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
21507 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
21508
21509 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
21510 Some(self.parse_object_name(false)?)
21511 } else {
21512 None
21513 };
21514 self.expect_keyword(Keyword::OPTIONS)?;
21515 self.expect_token(&Token::LParen)?;
21516 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
21517 self.expect_token(&Token::RParen)?;
21518 self.expect_keyword(Keyword::AS)?;
21519 let query = self.parse_query()?;
21520 Ok(Statement::ExportData(ExportData {
21521 options,
21522 query,
21523 connection,
21524 }))
21525 }
21526
21527 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
21528 self.expect_keyword(Keyword::VACUUM)?;
21529 let full = self.parse_keyword(Keyword::FULL);
21530 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
21531 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
21532 let reindex = self.parse_keyword(Keyword::REINDEX);
21533 let recluster = self.parse_keyword(Keyword::RECLUSTER);
21534 let (table_name, threshold, boost) =
21535 match self.maybe_parse(|p| p.parse_object_name(false))? {
21536 Some(table_name) => {
21537 let threshold = if self.parse_keyword(Keyword::TO) {
21538 let value = self.parse_value()?;
21539 self.expect_keyword(Keyword::PERCENT)?;
21540 Some(value)
21541 } else {
21542 None
21543 };
21544 let boost = self.parse_keyword(Keyword::BOOST);
21545 (Some(table_name), threshold, boost)
21546 }
21547 _ => (None, None, false),
21548 };
21549 Ok(Statement::Vacuum(VacuumStatement {
21550 full,
21551 sort_only,
21552 delete_only,
21553 reindex,
21554 recluster,
21555 table_name,
21556 threshold,
21557 boost,
21558 }))
21559 }
21560
21561 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
21563 self.tokens
21564 }
21565
21566 fn peek_sub_query(&mut self) -> bool {
21568 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
21569 .is_some()
21570 }
21571
21572 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
21573 let show_in;
21574 let mut filter_position = None;
21575 if self.dialect.supports_show_like_before_in() {
21576 if let Some(filter) = self.parse_show_statement_filter()? {
21577 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
21578 }
21579 show_in = self.maybe_parse_show_stmt_in()?;
21580 } else {
21581 show_in = self.maybe_parse_show_stmt_in()?;
21582 if let Some(filter) = self.parse_show_statement_filter()? {
21583 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
21584 }
21585 }
21586 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
21587 let limit = self.maybe_parse_show_stmt_limit()?;
21588 let from = self.maybe_parse_show_stmt_from()?;
21589 Ok(ShowStatementOptions {
21590 filter_position,
21591 show_in,
21592 starts_with,
21593 limit,
21594 limit_from: from,
21595 })
21596 }
21597
21598 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
21599 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
21600 Some(Keyword::FROM) => ShowStatementInClause::FROM,
21601 Some(Keyword::IN) => ShowStatementInClause::IN,
21602 None => return Ok(None),
21603 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
21604 };
21605
21606 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
21607 Keyword::ACCOUNT,
21608 Keyword::DATABASE,
21609 Keyword::SCHEMA,
21610 Keyword::TABLE,
21611 Keyword::VIEW,
21612 ]) {
21613 Some(Keyword::DATABASE)
21615 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21616 | self.peek_keyword(Keyword::LIMIT) =>
21617 {
21618 (Some(ShowStatementInParentType::Database), None)
21619 }
21620 Some(Keyword::SCHEMA)
21621 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21622 | self.peek_keyword(Keyword::LIMIT) =>
21623 {
21624 (Some(ShowStatementInParentType::Schema), None)
21625 }
21626 Some(parent_kw) => {
21627 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
21631 match parent_kw {
21632 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
21633 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
21634 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
21635 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
21636 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
21637 _ => {
21638 return self.expected_ref(
21639 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
21640 self.peek_token_ref(),
21641 )
21642 }
21643 }
21644 }
21645 None => {
21646 let mut parent_name = self.parse_object_name(false)?;
21649 if self
21650 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
21651 .is_some()
21652 {
21653 parent_name
21654 .0
21655 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
21656 }
21657 (None, Some(parent_name))
21658 }
21659 };
21660
21661 Ok(Some(ShowStatementIn {
21662 clause,
21663 parent_type,
21664 parent_name,
21665 }))
21666 }
21667
21668 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21669 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
21670 Ok(Some(self.parse_value()?))
21671 } else {
21672 Ok(None)
21673 }
21674 }
21675
21676 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
21677 if self.parse_keyword(Keyword::LIMIT) {
21678 Ok(self.parse_limit()?)
21679 } else {
21680 Ok(None)
21681 }
21682 }
21683
21684 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21685 if self.parse_keyword(Keyword::FROM) {
21686 Ok(Some(self.parse_value()?))
21687 } else {
21688 Ok(None)
21689 }
21690 }
21691
21692 pub(crate) fn in_column_definition_state(&self) -> bool {
21693 matches!(self.state, ColumnDefinition)
21694 }
21695
21696 pub(crate) fn parse_key_value_options(
21701 &mut self,
21702 parenthesized: bool,
21703 end_words: &[Keyword],
21704 ) -> Result<KeyValueOptions, ParserError> {
21705 let mut options: Vec<KeyValueOption> = Vec::new();
21706 let mut delimiter = KeyValueOptionsDelimiter::Space;
21707 if parenthesized {
21708 self.expect_token(&Token::LParen)?;
21709 }
21710 loop {
21711 match self.next_token().token {
21712 Token::RParen => {
21713 if parenthesized {
21714 break;
21715 } else {
21716 return self.expected_ref(" another option or EOF", self.peek_token_ref());
21717 }
21718 }
21719 Token::EOF | Token::SemiColon => break,
21720 Token::Comma => {
21721 delimiter = KeyValueOptionsDelimiter::Comma;
21722 continue;
21723 }
21724 Token::Word(w) if !end_words.contains(&w.keyword) => {
21725 options.push(self.parse_key_value_option(&w)?)
21726 }
21727 Token::Word(w) if end_words.contains(&w.keyword) => {
21728 self.prev_token();
21729 break;
21730 }
21731 _ => {
21732 return self.expected_ref(
21733 "another option, EOF, SemiColon, Comma or ')'",
21734 self.peek_token_ref(),
21735 )
21736 }
21737 };
21738 }
21739
21740 Ok(KeyValueOptions { delimiter, options })
21741 }
21742
21743 pub(crate) fn parse_key_value_option(
21745 &mut self,
21746 key: &Word,
21747 ) -> Result<KeyValueOption, ParserError> {
21748 self.expect_token(&Token::Eq)?;
21749 let peeked_token = self.peek_token();
21750 match peeked_token.token {
21751 Token::SingleQuotedString(_) => Ok(KeyValueOption {
21752 option_name: key.value.clone(),
21753 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21754 }),
21755 Token::Word(word)
21756 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21757 {
21758 Ok(KeyValueOption {
21759 option_name: key.value.clone(),
21760 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21761 })
21762 }
21763 Token::Number(..) => Ok(KeyValueOption {
21764 option_name: key.value.clone(),
21765 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21766 }),
21767 Token::Word(word) => {
21768 self.next_token();
21769 Ok(KeyValueOption {
21770 option_name: key.value.clone(),
21771 option_value: KeyValueOptionKind::Single(
21772 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21773 ),
21774 })
21775 }
21776 Token::LParen => {
21777 match self.maybe_parse(|parser| {
21781 parser.expect_token(&Token::LParen)?;
21782 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21783 parser.expect_token(&Token::RParen)?;
21784 values
21785 })? {
21786 Some(values) => Ok(KeyValueOption {
21787 option_name: key.value.clone(),
21788 option_value: KeyValueOptionKind::Multi(values),
21789 }),
21790 None => Ok(KeyValueOption {
21791 option_name: key.value.clone(),
21792 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21793 self.parse_key_value_options(true, &[])?,
21794 )),
21795 }),
21796 }
21797 }
21798 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21799 }
21800 }
21801
21802 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21804 if self.parse_keyword(Keyword::ALL) {
21805 return Ok(ResetStatement { reset: Reset::ALL });
21806 }
21807
21808 let obj = self.parse_object_name(false)?;
21809 Ok(ResetStatement {
21810 reset: Reset::ConfigurationParameter(obj),
21811 })
21812 }
21813}
21814
21815fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21816 if let Some(prefix) = prefix {
21817 Expr::Prefixed {
21818 prefix,
21819 value: Box::new(expr),
21820 }
21821 } else {
21822 expr
21823 }
21824}
21825
21826impl Word {
21827 pub fn to_ident(&self, span: Span) -> Ident {
21833 Ident {
21834 value: self.value.clone(),
21835 quote_style: self.quote_style,
21836 span,
21837 }
21838 }
21839
21840 pub fn into_ident(self, span: Span) -> Ident {
21845 Ident {
21846 value: self.value,
21847 quote_style: self.quote_style,
21848 span,
21849 }
21850 }
21851}
21852
21853#[cfg(test)]
21854mod tests {
21855 use crate::test_utils::{all_dialects, TestedDialects};
21856
21857 use super::*;
21858
21859 #[test]
21860 fn test_prev_index() {
21861 let sql = "SELECT version";
21862 all_dialects().run_parser_method(sql, |parser| {
21863 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21864 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21865 parser.prev_token();
21866 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21867 assert_eq!(parser.next_token(), Token::make_word("version", None));
21868 parser.prev_token();
21869 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21870 assert_eq!(parser.next_token(), Token::make_word("version", None));
21871 assert_eq!(parser.peek_token(), Token::EOF);
21872 parser.prev_token();
21873 assert_eq!(parser.next_token(), Token::make_word("version", None));
21874 assert_eq!(parser.next_token(), Token::EOF);
21875 assert_eq!(parser.next_token(), Token::EOF);
21876 parser.prev_token();
21877 });
21878 }
21879
21880 #[test]
21881 fn test_peek_tokens() {
21882 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21883 assert!(matches!(
21884 parser.peek_tokens(),
21885 [Token::Word(Word {
21886 keyword: Keyword::SELECT,
21887 ..
21888 })]
21889 ));
21890
21891 assert!(matches!(
21892 parser.peek_tokens(),
21893 [
21894 Token::Word(Word {
21895 keyword: Keyword::SELECT,
21896 ..
21897 }),
21898 Token::Word(_),
21899 Token::Word(Word {
21900 keyword: Keyword::AS,
21901 ..
21902 }),
21903 ]
21904 ));
21905
21906 for _ in 0..4 {
21907 parser.next_token();
21908 }
21909
21910 assert!(matches!(
21911 parser.peek_tokens(),
21912 [
21913 Token::Word(Word {
21914 keyword: Keyword::FROM,
21915 ..
21916 }),
21917 Token::Word(_),
21918 Token::EOF,
21919 Token::EOF,
21920 ]
21921 ))
21922 })
21923 }
21924
21925 #[cfg(test)]
21926 mod test_parse_data_type {
21927 use crate::ast::{
21928 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21929 };
21930 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21931 use crate::test_utils::TestedDialects;
21932
21933 macro_rules! test_parse_data_type {
21934 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21935 $dialect.run_parser_method(&*$input, |parser| {
21936 let data_type = parser.parse_data_type().unwrap();
21937 assert_eq!($expected_type, data_type);
21938 assert_eq!($input.to_string(), data_type.to_string());
21939 });
21940 }};
21941 }
21942
21943 #[test]
21944 fn test_ansii_character_string_types() {
21945 let dialect =
21947 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21948
21949 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21950
21951 test_parse_data_type!(
21952 dialect,
21953 "CHARACTER(20)",
21954 DataType::Character(Some(CharacterLength::IntegerLength {
21955 length: 20,
21956 unit: None
21957 }))
21958 );
21959
21960 test_parse_data_type!(
21961 dialect,
21962 "CHARACTER(20 CHARACTERS)",
21963 DataType::Character(Some(CharacterLength::IntegerLength {
21964 length: 20,
21965 unit: Some(CharLengthUnits::Characters)
21966 }))
21967 );
21968
21969 test_parse_data_type!(
21970 dialect,
21971 "CHARACTER(20 OCTETS)",
21972 DataType::Character(Some(CharacterLength::IntegerLength {
21973 length: 20,
21974 unit: Some(CharLengthUnits::Octets)
21975 }))
21976 );
21977
21978 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
21979
21980 test_parse_data_type!(
21981 dialect,
21982 "CHAR(20)",
21983 DataType::Char(Some(CharacterLength::IntegerLength {
21984 length: 20,
21985 unit: None
21986 }))
21987 );
21988
21989 test_parse_data_type!(
21990 dialect,
21991 "CHAR(20 CHARACTERS)",
21992 DataType::Char(Some(CharacterLength::IntegerLength {
21993 length: 20,
21994 unit: Some(CharLengthUnits::Characters)
21995 }))
21996 );
21997
21998 test_parse_data_type!(
21999 dialect,
22000 "CHAR(20 OCTETS)",
22001 DataType::Char(Some(CharacterLength::IntegerLength {
22002 length: 20,
22003 unit: Some(CharLengthUnits::Octets)
22004 }))
22005 );
22006
22007 test_parse_data_type!(
22008 dialect,
22009 "CHARACTER VARYING(20)",
22010 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22011 length: 20,
22012 unit: None
22013 }))
22014 );
22015
22016 test_parse_data_type!(
22017 dialect,
22018 "CHARACTER VARYING(20 CHARACTERS)",
22019 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22020 length: 20,
22021 unit: Some(CharLengthUnits::Characters)
22022 }))
22023 );
22024
22025 test_parse_data_type!(
22026 dialect,
22027 "CHARACTER VARYING(20 OCTETS)",
22028 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22029 length: 20,
22030 unit: Some(CharLengthUnits::Octets)
22031 }))
22032 );
22033
22034 test_parse_data_type!(
22035 dialect,
22036 "CHAR VARYING(20)",
22037 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22038 length: 20,
22039 unit: None
22040 }))
22041 );
22042
22043 test_parse_data_type!(
22044 dialect,
22045 "CHAR VARYING(20 CHARACTERS)",
22046 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22047 length: 20,
22048 unit: Some(CharLengthUnits::Characters)
22049 }))
22050 );
22051
22052 test_parse_data_type!(
22053 dialect,
22054 "CHAR VARYING(20 OCTETS)",
22055 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22056 length: 20,
22057 unit: Some(CharLengthUnits::Octets)
22058 }))
22059 );
22060
22061 test_parse_data_type!(
22062 dialect,
22063 "VARCHAR(20)",
22064 DataType::Varchar(Some(CharacterLength::IntegerLength {
22065 length: 20,
22066 unit: None
22067 }))
22068 );
22069 }
22070
22071 #[test]
22072 fn test_ansii_character_large_object_types() {
22073 let dialect =
22075 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22076
22077 test_parse_data_type!(
22078 dialect,
22079 "CHARACTER LARGE OBJECT",
22080 DataType::CharacterLargeObject(None)
22081 );
22082 test_parse_data_type!(
22083 dialect,
22084 "CHARACTER LARGE OBJECT(20)",
22085 DataType::CharacterLargeObject(Some(20))
22086 );
22087
22088 test_parse_data_type!(
22089 dialect,
22090 "CHAR LARGE OBJECT",
22091 DataType::CharLargeObject(None)
22092 );
22093 test_parse_data_type!(
22094 dialect,
22095 "CHAR LARGE OBJECT(20)",
22096 DataType::CharLargeObject(Some(20))
22097 );
22098
22099 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
22100 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
22101 }
22102
22103 #[test]
22104 fn test_parse_custom_types() {
22105 let dialect =
22106 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22107
22108 test_parse_data_type!(
22109 dialect,
22110 "GEOMETRY",
22111 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
22112 );
22113
22114 test_parse_data_type!(
22115 dialect,
22116 "GEOMETRY(POINT)",
22117 DataType::Custom(
22118 ObjectName::from(vec!["GEOMETRY".into()]),
22119 vec!["POINT".to_string()]
22120 )
22121 );
22122
22123 test_parse_data_type!(
22124 dialect,
22125 "GEOMETRY(POINT, 4326)",
22126 DataType::Custom(
22127 ObjectName::from(vec!["GEOMETRY".into()]),
22128 vec!["POINT".to_string(), "4326".to_string()]
22129 )
22130 );
22131 }
22132
22133 #[test]
22134 fn test_ansii_exact_numeric_types() {
22135 let dialect = TestedDialects::new(vec![
22137 Box::new(GenericDialect {}),
22138 Box::new(AnsiDialect {}),
22139 Box::new(PostgreSqlDialect {}),
22140 ]);
22141
22142 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
22143
22144 test_parse_data_type!(
22145 dialect,
22146 "NUMERIC(2)",
22147 DataType::Numeric(ExactNumberInfo::Precision(2))
22148 );
22149
22150 test_parse_data_type!(
22151 dialect,
22152 "NUMERIC(2,10)",
22153 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
22154 );
22155
22156 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
22157
22158 test_parse_data_type!(
22159 dialect,
22160 "DECIMAL(2)",
22161 DataType::Decimal(ExactNumberInfo::Precision(2))
22162 );
22163
22164 test_parse_data_type!(
22165 dialect,
22166 "DECIMAL(2,10)",
22167 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
22168 );
22169
22170 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
22171
22172 test_parse_data_type!(
22173 dialect,
22174 "DEC(2)",
22175 DataType::Dec(ExactNumberInfo::Precision(2))
22176 );
22177
22178 test_parse_data_type!(
22179 dialect,
22180 "DEC(2,10)",
22181 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
22182 );
22183
22184 test_parse_data_type!(
22186 dialect,
22187 "NUMERIC(10,-2)",
22188 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
22189 );
22190
22191 test_parse_data_type!(
22192 dialect,
22193 "DECIMAL(1000,-10)",
22194 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
22195 );
22196
22197 test_parse_data_type!(
22198 dialect,
22199 "DEC(5,-1000)",
22200 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
22201 );
22202
22203 test_parse_data_type!(
22204 dialect,
22205 "NUMERIC(10,-5)",
22206 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
22207 );
22208
22209 test_parse_data_type!(
22210 dialect,
22211 "DECIMAL(20,-10)",
22212 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
22213 );
22214
22215 test_parse_data_type!(
22216 dialect,
22217 "DEC(5,-2)",
22218 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
22219 );
22220
22221 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
22222 let data_type = parser.parse_data_type().unwrap();
22223 assert_eq!(
22224 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
22225 data_type
22226 );
22227 assert_eq!("NUMERIC(10,5)", data_type.to_string());
22229 });
22230 }
22231
22232 #[test]
22233 fn test_ansii_date_type() {
22234 let dialect =
22236 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22237
22238 test_parse_data_type!(dialect, "DATE", DataType::Date);
22239
22240 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
22241
22242 test_parse_data_type!(
22243 dialect,
22244 "TIME(6)",
22245 DataType::Time(Some(6), TimezoneInfo::None)
22246 );
22247
22248 test_parse_data_type!(
22249 dialect,
22250 "TIME WITH TIME ZONE",
22251 DataType::Time(None, TimezoneInfo::WithTimeZone)
22252 );
22253
22254 test_parse_data_type!(
22255 dialect,
22256 "TIME(6) WITH TIME ZONE",
22257 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
22258 );
22259
22260 test_parse_data_type!(
22261 dialect,
22262 "TIME WITHOUT TIME ZONE",
22263 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
22264 );
22265
22266 test_parse_data_type!(
22267 dialect,
22268 "TIME(6) WITHOUT TIME ZONE",
22269 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
22270 );
22271
22272 test_parse_data_type!(
22273 dialect,
22274 "TIMESTAMP",
22275 DataType::Timestamp(None, TimezoneInfo::None)
22276 );
22277
22278 test_parse_data_type!(
22279 dialect,
22280 "TIMESTAMP(22)",
22281 DataType::Timestamp(Some(22), TimezoneInfo::None)
22282 );
22283
22284 test_parse_data_type!(
22285 dialect,
22286 "TIMESTAMP(22) WITH TIME ZONE",
22287 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
22288 );
22289
22290 test_parse_data_type!(
22291 dialect,
22292 "TIMESTAMP(33) WITHOUT TIME ZONE",
22293 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
22294 );
22295 }
22296 }
22297
22298 #[test]
22299 fn test_parse_schema_name() {
22300 macro_rules! test_parse_schema_name {
22302 ($input:expr, $expected_name:expr $(,)?) => {{
22303 all_dialects().run_parser_method(&*$input, |parser| {
22304 let schema_name = parser.parse_schema_name().unwrap();
22305 assert_eq!(schema_name, $expected_name);
22307 assert_eq!(schema_name.to_string(), $input.to_string());
22309 });
22310 }};
22311 }
22312
22313 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
22314 let dummy_authorization = Ident::new("dummy_authorization");
22315
22316 test_parse_schema_name!(
22317 format!("{dummy_name}"),
22318 SchemaName::Simple(dummy_name.clone())
22319 );
22320
22321 test_parse_schema_name!(
22322 format!("AUTHORIZATION {dummy_authorization}"),
22323 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
22324 );
22325 test_parse_schema_name!(
22326 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
22327 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
22328 );
22329 }
22330
22331 #[test]
22332 fn mysql_parse_index_table_constraint() {
22333 macro_rules! test_parse_table_constraint {
22334 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
22335 $dialect.run_parser_method(&*$input, |parser| {
22336 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
22337 assert_eq!(constraint, $expected);
22339 assert_eq!(constraint.to_string(), $input.to_string());
22341 });
22342 }};
22343 }
22344
22345 fn mk_expected_col(name: &str) -> IndexColumn {
22346 IndexColumn {
22347 column: OrderByExpr {
22348 expr: Expr::Identifier(name.into()),
22349 options: OrderByOptions {
22350 asc: None,
22351 nulls_first: None,
22352 },
22353 with_fill: None,
22354 },
22355 operator_class: None,
22356 }
22357 }
22358
22359 let dialect =
22360 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
22361
22362 test_parse_table_constraint!(
22363 dialect,
22364 "INDEX (c1)",
22365 IndexConstraint {
22366 display_as_key: false,
22367 name: None,
22368 index_type: None,
22369 columns: vec![mk_expected_col("c1")],
22370 index_options: vec![],
22371 }
22372 .into()
22373 );
22374
22375 test_parse_table_constraint!(
22376 dialect,
22377 "KEY (c1)",
22378 IndexConstraint {
22379 display_as_key: true,
22380 name: None,
22381 index_type: None,
22382 columns: vec![mk_expected_col("c1")],
22383 index_options: vec![],
22384 }
22385 .into()
22386 );
22387
22388 test_parse_table_constraint!(
22389 dialect,
22390 "INDEX 'index' (c1, c2)",
22391 TableConstraint::Index(IndexConstraint {
22392 display_as_key: false,
22393 name: Some(Ident::with_quote('\'', "index")),
22394 index_type: None,
22395 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
22396 index_options: vec![],
22397 })
22398 );
22399
22400 test_parse_table_constraint!(
22401 dialect,
22402 "INDEX USING BTREE (c1)",
22403 IndexConstraint {
22404 display_as_key: false,
22405 name: None,
22406 index_type: Some(IndexType::BTree),
22407 columns: vec![mk_expected_col("c1")],
22408 index_options: vec![],
22409 }
22410 .into()
22411 );
22412
22413 test_parse_table_constraint!(
22414 dialect,
22415 "INDEX USING HASH (c1)",
22416 IndexConstraint {
22417 display_as_key: false,
22418 name: None,
22419 index_type: Some(IndexType::Hash),
22420 columns: vec![mk_expected_col("c1")],
22421 index_options: vec![],
22422 }
22423 .into()
22424 );
22425
22426 test_parse_table_constraint!(
22427 dialect,
22428 "INDEX idx_name USING BTREE (c1)",
22429 IndexConstraint {
22430 display_as_key: false,
22431 name: Some(Ident::new("idx_name")),
22432 index_type: Some(IndexType::BTree),
22433 columns: vec![mk_expected_col("c1")],
22434 index_options: vec![],
22435 }
22436 .into()
22437 );
22438
22439 test_parse_table_constraint!(
22440 dialect,
22441 "INDEX idx_name USING HASH (c1)",
22442 IndexConstraint {
22443 display_as_key: false,
22444 name: Some(Ident::new("idx_name")),
22445 index_type: Some(IndexType::Hash),
22446 columns: vec![mk_expected_col("c1")],
22447 index_options: vec![],
22448 }
22449 .into()
22450 );
22451 }
22452
22453 #[test]
22454 fn test_tokenizer_error_loc() {
22455 let sql = "foo '";
22456 let ast = Parser::parse_sql(&GenericDialect, sql);
22457 assert_eq!(
22458 ast,
22459 Err(ParserError::TokenizerError(
22460 "Unterminated string literal at Line: 1, Column: 5".to_string()
22461 ))
22462 );
22463 }
22464
22465 #[test]
22466 fn test_parser_error_loc() {
22467 let sql = "SELECT this is a syntax error";
22468 let ast = Parser::parse_sql(&GenericDialect, sql);
22469 assert_eq!(
22470 ast,
22471 Err(ParserError::ParserError(
22472 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
22473 .to_string()
22474 ))
22475 );
22476 }
22477
22478 #[test]
22479 fn test_nested_explain_error() {
22480 let sql = "EXPLAIN EXPLAIN SELECT 1";
22481 let ast = Parser::parse_sql(&GenericDialect, sql);
22482 assert_eq!(
22483 ast,
22484 Err(ParserError::ParserError(
22485 "Explain must be root of the plan".to_string()
22486 ))
22487 );
22488 }
22489
22490 #[test]
22491 fn test_parse_multipart_identifier_positive() {
22492 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
22493
22494 let expected = vec![
22496 Ident {
22497 value: "CATALOG".to_string(),
22498 quote_style: None,
22499 span: Span::empty(),
22500 },
22501 Ident {
22502 value: "F(o)o. \"bar".to_string(),
22503 quote_style: Some('"'),
22504 span: Span::empty(),
22505 },
22506 Ident {
22507 value: "table".to_string(),
22508 quote_style: None,
22509 span: Span::empty(),
22510 },
22511 ];
22512 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
22513 let actual = parser.parse_multipart_identifier().unwrap();
22514 assert_eq!(expected, actual);
22515 });
22516
22517 let expected = vec![
22519 Ident {
22520 value: "CATALOG".to_string(),
22521 quote_style: None,
22522 span: Span::empty(),
22523 },
22524 Ident {
22525 value: "table".to_string(),
22526 quote_style: None,
22527 span: Span::empty(),
22528 },
22529 ];
22530 dialect.run_parser_method("CATALOG . table", |parser| {
22531 let actual = parser.parse_multipart_identifier().unwrap();
22532 assert_eq!(expected, actual);
22533 });
22534 }
22535
22536 #[test]
22537 fn test_parse_multipart_identifier_negative() {
22538 macro_rules! test_parse_multipart_identifier_error {
22539 ($input:expr, $expected_err:expr $(,)?) => {{
22540 all_dialects().run_parser_method(&*$input, |parser| {
22541 let actual_err = parser.parse_multipart_identifier().unwrap_err();
22542 assert_eq!(actual_err.to_string(), $expected_err);
22543 });
22544 }};
22545 }
22546
22547 test_parse_multipart_identifier_error!(
22548 "",
22549 "sql parser error: Empty input when parsing identifier",
22550 );
22551
22552 test_parse_multipart_identifier_error!(
22553 "*schema.table",
22554 "sql parser error: Unexpected token in identifier: *",
22555 );
22556
22557 test_parse_multipart_identifier_error!(
22558 "schema.table*",
22559 "sql parser error: Unexpected token in identifier: *",
22560 );
22561
22562 test_parse_multipart_identifier_error!(
22563 "schema.table.",
22564 "sql parser error: Trailing period in identifier",
22565 );
22566
22567 test_parse_multipart_identifier_error!(
22568 "schema.*",
22569 "sql parser error: Unexpected token following period in identifier: *",
22570 );
22571 }
22572
22573 #[test]
22574 fn test_mysql_partition_selection() {
22575 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
22576 let expected = vec!["p0", "p2"];
22577
22578 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
22579 assert_eq!(ast.len(), 1);
22580 if let Statement::Query(v) = &ast[0] {
22581 if let SetExpr::Select(select) = &*v.body {
22582 assert_eq!(select.from.len(), 1);
22583 let from: &TableWithJoins = &select.from[0];
22584 let table_factor = &from.relation;
22585 if let TableFactor::Table { partitions, .. } = table_factor {
22586 let actual: Vec<&str> = partitions
22587 .iter()
22588 .map(|ident| ident.value.as_str())
22589 .collect();
22590 assert_eq!(expected, actual);
22591 }
22592 }
22593 } else {
22594 panic!("fail to parse mysql partition selection");
22595 }
22596 }
22597
22598 #[test]
22599 fn test_replace_into_placeholders() {
22600 let sql = "REPLACE INTO t (a) VALUES (&a)";
22601
22602 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22603 }
22604
22605 #[test]
22606 fn test_replace_into_set_placeholder() {
22607 let sql = "REPLACE INTO t SET ?";
22608
22609 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22610 }
22611
22612 #[test]
22613 fn test_replace_incomplete() {
22614 let sql = r#"REPLACE"#;
22615
22616 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
22617 }
22618
22619 #[test]
22620 fn test_placeholder_invalid_whitespace() {
22621 for w in [" ", "/*invalid*/"] {
22622 let sql = format!("\nSELECT\n :{w}fooBar");
22623 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
22624 }
22625 }
22626}