1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word)
512 if expecting_statement_delimiter && word.keyword == Keyword::END =>
513 {
514 break;
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 Keyword::SECURITY => self.parse_security_label().map(Into::into),
723 _ => self.expected("an SQL statement", next_token),
724 },
725 Token::LParen => {
726 self.prev_token();
727 self.parse_query().map(Into::into)
728 }
729 _ => self.expected("an SQL statement", next_token),
730 }
731 }
732
733 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
737 let case_token = self.expect_keyword(Keyword::CASE)?;
738
739 let match_expr = if self.peek_keyword(Keyword::WHEN) {
740 None
741 } else {
742 Some(self.parse_expr()?)
743 };
744
745 self.expect_keyword_is(Keyword::WHEN)?;
746 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
747 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
748 })?;
749
750 let else_block = if self.parse_keyword(Keyword::ELSE) {
751 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
752 } else {
753 None
754 };
755
756 let mut end_case_token = self.expect_keyword(Keyword::END)?;
757 if self.peek_keyword(Keyword::CASE) {
758 end_case_token = self.expect_keyword(Keyword::CASE)?;
759 }
760
761 Ok(CaseStatement {
762 case_token: AttachedToken(case_token),
763 match_expr,
764 when_blocks,
765 else_block,
766 end_case_token: AttachedToken(end_case_token),
767 })
768 }
769
770 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
774 self.expect_keyword_is(Keyword::IF)?;
775 let if_block = self.parse_conditional_statement_block(&[
776 Keyword::ELSE,
777 Keyword::ELSEIF,
778 Keyword::END,
779 ])?;
780
781 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
782 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
783 parser.parse_conditional_statement_block(&[
784 Keyword::ELSEIF,
785 Keyword::ELSE,
786 Keyword::END,
787 ])
788 })?
789 } else {
790 vec![]
791 };
792
793 let else_block = if self.parse_keyword(Keyword::ELSE) {
794 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
795 } else {
796 None
797 };
798
799 self.expect_keyword_is(Keyword::END)?;
800 let end_token = self.expect_keyword(Keyword::IF)?;
801
802 Ok(IfStatement {
803 if_block,
804 elseif_blocks,
805 else_block,
806 end_token: Some(AttachedToken(end_token)),
807 })
808 }
809
810 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
814 self.expect_keyword_is(Keyword::WHILE)?;
815 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
816
817 Ok(WhileStatement { while_block })
818 }
819
820 fn parse_conditional_statement_block(
828 &mut self,
829 terminal_keywords: &[Keyword],
830 ) -> Result<ConditionalStatementBlock, ParserError> {
831 let start_token = self.get_current_token().clone(); let mut then_token = None;
833
834 let condition = match &start_token.token {
835 Token::Word(w) if w.keyword == Keyword::ELSE => None,
836 Token::Word(w) if w.keyword == Keyword::WHILE => {
837 let expr = self.parse_expr()?;
838 Some(expr)
839 }
840 _ => {
841 let expr = self.parse_expr()?;
842 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
843 Some(expr)
844 }
845 };
846
847 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
848
849 Ok(ConditionalStatementBlock {
850 start_token: AttachedToken(start_token),
851 condition,
852 then_token,
853 conditional_statements,
854 })
855 }
856
857 pub(crate) fn parse_conditional_statements(
860 &mut self,
861 terminal_keywords: &[Keyword],
862 ) -> Result<ConditionalStatements, ParserError> {
863 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
864 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
865 let statements = self.parse_statement_list(terminal_keywords)?;
866 let end_token = self.expect_keyword(Keyword::END)?;
867
868 ConditionalStatements::BeginEnd(BeginEndStatements {
869 begin_token: AttachedToken(begin_token),
870 statements,
871 end_token: AttachedToken(end_token),
872 })
873 } else {
874 ConditionalStatements::Sequence {
875 statements: self.parse_statement_list(terminal_keywords)?,
876 }
877 };
878 Ok(conditional_statements)
879 }
880
881 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
885 self.expect_keyword_is(Keyword::RAISE)?;
886
887 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
888 self.expect_token(&Token::Eq)?;
889 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
890 } else {
891 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
892 };
893
894 Ok(RaiseStatement { value })
895 }
896 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
900 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
901
902 self.expect_keyword_is(Keyword::ON)?;
903 let token = self.next_token();
904
905 let keyword = match &token.token {
906 Token::Word(w) => Some(w.keyword),
907 _ => None,
908 };
909 let object_type = match keyword {
910 Some(Keyword::MATERIALIZED) => {
911 self.expect_keyword_is(Keyword::VIEW)?;
912 CommentObject::MaterializedView
913 }
914 Some(other) => match CommentObject::from_keyword(other) {
915 Some(obj) => obj,
916 None => return self.expected("comment object_type", token),
917 },
918 None => return self.expected("comment object_type", token),
919 };
920 let object_name = if object_type == CommentObject::Operator {
921 self.parse_operator_name()?
922 } else {
923 self.parse_object_name(false)?
924 };
925
926 let arguments = match object_type {
927 CommentObject::Function | CommentObject::Procedure | CommentObject::Aggregate => {
928 if self.consume_token(&Token::LParen) {
929 let args =
930 self.parse_comma_separated0(Self::parse_function_arg, Token::RParen)?;
931 self.expect_token(&Token::RParen)?;
932 Some(args.into_iter().map(|a| a.data_type).collect())
933 } else {
934 None
935 }
936 }
937 _ => None,
938 };
939
940 if object_type == CommentObject::Aggregate && arguments.is_none() {
941 return Err(ParserError::ParserError(
942 "COMMENT ON AGGREGATE requires an argument list, e.g. AGGREGATE foo(int)".into(),
943 ));
944 }
945
946 let operator_args = if object_type == CommentObject::Operator {
947 self.expect_token(&Token::LParen)?;
948 let left = self.parse_operator_arg_type_or_none()?;
949 self.expect_token(&Token::Comma)?;
950 let right = self.parse_operator_arg_type_or_none()?;
951 self.expect_token(&Token::RParen)?;
952 Some(CommentOperatorArgs { left, right })
953 } else {
954 None
955 };
956
957 let (table_name, on_domain) = match object_type {
958 CommentObject::Trigger | CommentObject::Policy | CommentObject::Rule => {
959 self.expect_keyword_is(Keyword::ON)?;
960 (Some(self.parse_object_name(false)?), false)
961 }
962 CommentObject::Constraint => {
963 self.expect_keyword_is(Keyword::ON)?;
964 let on_domain = self.parse_keyword(Keyword::DOMAIN);
965 (Some(self.parse_object_name(false)?), on_domain)
966 }
967 _ => (None, false),
968 };
969
970 self.expect_keyword_is(Keyword::IS)?;
971 let comment = if self.parse_keyword(Keyword::NULL) {
972 None
973 } else {
974 Some(self.parse_literal_string()?)
975 };
976 Ok(Statement::Comment {
977 object_type,
978 object_name,
979 arguments,
980 operator_args,
981 table_name,
982 on_domain,
983 comment,
984 if_exists,
985 })
986 }
987
988 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
990 let mut channel = None;
991 let mut tables: Vec<ObjectName> = vec![];
992 let mut read_lock = false;
993 let mut export = false;
994
995 if !dialect_of!(self is MySqlDialect | GenericDialect) {
996 return parser_err!(
997 "Unsupported statement FLUSH",
998 self.peek_token_ref().span.start
999 );
1000 }
1001
1002 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
1003 Some(FlushLocation::NoWriteToBinlog)
1004 } else if self.parse_keyword(Keyword::LOCAL) {
1005 Some(FlushLocation::Local)
1006 } else {
1007 None
1008 };
1009
1010 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
1011 FlushType::BinaryLogs
1012 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
1013 FlushType::EngineLogs
1014 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1015 FlushType::ErrorLogs
1016 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1017 FlushType::GeneralLogs
1018 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1019 FlushType::Hosts
1020 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1021 FlushType::Privileges
1022 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1023 FlushType::OptimizerCosts
1024 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1025 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1026 channel = Some(self.parse_object_name(false).unwrap().to_string());
1027 }
1028 FlushType::RelayLogs
1029 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1030 FlushType::SlowLogs
1031 } else if self.parse_keyword(Keyword::STATUS) {
1032 FlushType::Status
1033 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1034 FlushType::UserResources
1035 } else if self.parse_keywords(&[Keyword::LOGS]) {
1036 FlushType::Logs
1037 } else if self.parse_keywords(&[Keyword::TABLES]) {
1038 loop {
1039 let next_token = self.next_token();
1040 match &next_token.token {
1041 Token::Word(w) => match w.keyword {
1042 Keyword::WITH => {
1043 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1044 }
1045 Keyword::FOR => {
1046 export = self.parse_keyword(Keyword::EXPORT);
1047 }
1048 Keyword::NoKeyword => {
1049 self.prev_token();
1050 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1051 }
1052 _ => {}
1053 },
1054 _ => {
1055 break;
1056 }
1057 }
1058 }
1059
1060 FlushType::Tables
1061 } else {
1062 return self.expected_ref(
1063 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1064 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1065 self.peek_token_ref(),
1066 );
1067 };
1068
1069 Ok(Statement::Flush {
1070 object_type,
1071 location,
1072 channel,
1073 read_lock,
1074 export,
1075 tables,
1076 })
1077 }
1078
1079 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1081 let repair = self.parse_keyword(Keyword::REPAIR);
1082 self.expect_keyword_is(Keyword::TABLE)?;
1083 let table_name = self.parse_object_name(false)?;
1084 let partition_action = self
1085 .maybe_parse(|parser| {
1086 let pa = match parser.parse_one_of_keywords(&[
1087 Keyword::ADD,
1088 Keyword::DROP,
1089 Keyword::SYNC,
1090 ]) {
1091 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1092 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1093 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1094 _ => None,
1095 };
1096 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1097 Ok(pa)
1098 })?
1099 .unwrap_or_default();
1100 Ok(Msck {
1101 repair,
1102 table_name,
1103 partition_action,
1104 })
1105 }
1106
1107 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1109 let table = self.parse_keyword(Keyword::TABLE);
1110 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1111
1112 let table_names = self.parse_comma_separated(|p| {
1113 let only = p.parse_keyword(Keyword::ONLY);
1114 let name = p.parse_object_name(false)?;
1115 let has_asterisk = p.consume_token(&Token::Mul);
1116 Ok(TruncateTableTarget {
1117 name,
1118 only,
1119 has_asterisk,
1120 })
1121 })?;
1122
1123 let mut partitions = None;
1124 if self.parse_keyword(Keyword::PARTITION) {
1125 self.expect_token(&Token::LParen)?;
1126 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1127 self.expect_token(&Token::RParen)?;
1128 }
1129
1130 let mut identity = None;
1131 let mut cascade = None;
1132
1133 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1134 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1135 Some(TruncateIdentityOption::Restart)
1136 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1137 Some(TruncateIdentityOption::Continue)
1138 } else {
1139 None
1140 };
1141
1142 cascade = self.parse_cascade_option();
1143 };
1144
1145 let on_cluster = self.parse_optional_on_cluster()?;
1146
1147 Ok(Truncate {
1148 table_names,
1149 partitions,
1150 table,
1151 if_exists,
1152 identity,
1153 cascade,
1154 on_cluster,
1155 })
1156 }
1157
1158 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1159 if self.parse_keyword(Keyword::CASCADE) {
1160 Some(CascadeOption::Cascade)
1161 } else if self.parse_keyword(Keyword::RESTRICT) {
1162 Some(CascadeOption::Restrict)
1163 } else {
1164 None
1165 }
1166 }
1167
1168 pub fn parse_attach_duckdb_database_options(
1170 &mut self,
1171 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1172 if !self.consume_token(&Token::LParen) {
1173 return Ok(vec![]);
1174 }
1175
1176 let mut options = vec![];
1177 loop {
1178 if self.parse_keyword(Keyword::READ_ONLY) {
1179 let boolean = if self.parse_keyword(Keyword::TRUE) {
1180 Some(true)
1181 } else if self.parse_keyword(Keyword::FALSE) {
1182 Some(false)
1183 } else {
1184 None
1185 };
1186 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1187 } else if self.parse_keyword(Keyword::TYPE) {
1188 let ident = self.parse_identifier()?;
1189 options.push(AttachDuckDBDatabaseOption::Type(ident));
1190 } else {
1191 return self
1192 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1193 };
1194
1195 if self.consume_token(&Token::RParen) {
1196 return Ok(options);
1197 } else if self.consume_token(&Token::Comma) {
1198 continue;
1199 } else {
1200 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1201 }
1202 }
1203 }
1204
1205 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1207 let database = self.parse_keyword(Keyword::DATABASE);
1208 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1209 let database_path = self.parse_identifier()?;
1210 let database_alias = if self.parse_keyword(Keyword::AS) {
1211 Some(self.parse_identifier()?)
1212 } else {
1213 None
1214 };
1215
1216 let attach_options = self.parse_attach_duckdb_database_options()?;
1217 Ok(Statement::AttachDuckDBDatabase {
1218 if_not_exists,
1219 database,
1220 database_path,
1221 database_alias,
1222 attach_options,
1223 })
1224 }
1225
1226 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1228 let database = self.parse_keyword(Keyword::DATABASE);
1229 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1230 let database_alias = self.parse_identifier()?;
1231 Ok(Statement::DetachDuckDBDatabase {
1232 if_exists,
1233 database,
1234 database_alias,
1235 })
1236 }
1237
1238 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1240 let database = self.parse_keyword(Keyword::DATABASE);
1241 let database_file_name = self.parse_expr()?;
1242 self.expect_keyword_is(Keyword::AS)?;
1243 let schema_name = self.parse_identifier()?;
1244 Ok(Statement::AttachDatabase {
1245 database,
1246 schema_name,
1247 database_file_name,
1248 })
1249 }
1250
1251 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1253 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1254 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1255 let mut for_columns = false;
1256 let mut cache_metadata = false;
1257 let mut noscan = false;
1258 let mut partitions = None;
1259 let mut compute_statistics = false;
1260 let mut columns = vec![];
1261
1262 if table_name.is_some() && self.consume_token(&Token::LParen) {
1264 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1265 self.expect_token(&Token::RParen)?;
1266 }
1267
1268 loop {
1269 match self.parse_one_of_keywords(&[
1270 Keyword::PARTITION,
1271 Keyword::FOR,
1272 Keyword::CACHE,
1273 Keyword::NOSCAN,
1274 Keyword::COMPUTE,
1275 ]) {
1276 Some(Keyword::PARTITION) => {
1277 self.expect_token(&Token::LParen)?;
1278 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1279 self.expect_token(&Token::RParen)?;
1280 }
1281 Some(Keyword::NOSCAN) => noscan = true,
1282 Some(Keyword::FOR) => {
1283 self.expect_keyword_is(Keyword::COLUMNS)?;
1284
1285 columns = self
1286 .maybe_parse(|parser| {
1287 parser.parse_comma_separated(|p| p.parse_identifier())
1288 })?
1289 .unwrap_or_default();
1290 for_columns = true
1291 }
1292 Some(Keyword::CACHE) => {
1293 self.expect_keyword_is(Keyword::METADATA)?;
1294 cache_metadata = true
1295 }
1296 Some(Keyword::COMPUTE) => {
1297 self.expect_keyword_is(Keyword::STATISTICS)?;
1298 compute_statistics = true
1299 }
1300 _ => break,
1301 }
1302 }
1303
1304 Ok(Analyze {
1305 has_table_keyword,
1306 table_name,
1307 for_columns,
1308 columns,
1309 partitions,
1310 cache_metadata,
1311 noscan,
1312 compute_statistics,
1313 })
1314 }
1315
1316 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1318 let index = self.index;
1319
1320 let next_token = self.next_token();
1321 match next_token.token {
1322 t @ (Token::Word(_) | Token::SingleQuotedString(_))
1323 if self.peek_token_ref().token == Token::Period =>
1324 {
1325 let mut id_parts: Vec<Ident> = vec![match t {
1326 Token::Word(w) => w.into_ident(next_token.span),
1327 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1328 _ => {
1329 return Err(ParserError::ParserError(
1330 "Internal parser error: unexpected token type".to_string(),
1331 ))
1332 }
1333 }];
1334
1335 while self.consume_token(&Token::Period) {
1336 let next_token = self.next_token();
1337 match next_token.token {
1338 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1339 Token::SingleQuotedString(s) => {
1340 id_parts.push(Ident::with_quote('\'', s))
1342 }
1343 Token::Placeholder(s) => {
1344 id_parts.push(Ident::new(s))
1347 }
1348 Token::Mul => {
1349 return Ok(Expr::QualifiedWildcard(
1350 ObjectName::from(id_parts),
1351 AttachedToken(next_token),
1352 ));
1353 }
1354 _ => {
1355 return self.expected("an identifier or a '*' after '.'", next_token);
1356 }
1357 }
1358 }
1359 }
1360 Token::Mul => {
1361 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1362 }
1363 Token::LParen => {
1365 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1366 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1367 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1370 }
1371 }
1372 _ => (),
1373 };
1374
1375 self.index = index;
1376 self.parse_expr()
1377 }
1378
1379 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1381 self.parse_subexpr(self.dialect.prec_unknown())
1382 }
1383
1384 pub fn parse_expr_with_alias_and_order_by(
1386 &mut self,
1387 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1388 let expr = self.parse_expr()?;
1389
1390 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1391 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1392 }
1393 let alias = self.parse_optional_alias_inner(None, validator)?;
1394 let order_by = OrderByOptions {
1395 asc: self.parse_asc_desc(),
1396 nulls_first: None,
1397 };
1398 Ok(ExprWithAliasAndOrderBy {
1399 expr: ExprWithAlias { expr, alias },
1400 order_by,
1401 })
1402 }
1403
1404 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1406 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1407 let _guard = self.recursion_counter.try_decrease()?;
1408 debug!("parsing expr");
1409 let mut expr = self.parse_prefix()?;
1410
1411 expr = self.parse_compound_expr(expr, vec![])?;
1412
1413 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1417 expr = Expr::Collate {
1418 expr: Box::new(expr),
1419 collation: self.parse_object_name(false)?,
1420 };
1421 }
1422
1423 debug!("prefix: {expr:?}");
1424 loop {
1425 let next_precedence = self.get_next_precedence()?;
1426 debug!("next precedence: {next_precedence:?}");
1427
1428 if precedence >= next_precedence {
1429 break;
1430 }
1431
1432 if Token::Period == self.peek_token_ref().token {
1435 break;
1436 }
1437
1438 expr = self.parse_infix(expr, next_precedence)?;
1439 }
1440 Ok(expr)
1441 }
1442
1443 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1445 let condition = self.parse_expr()?;
1446 let message = if self.parse_keyword(Keyword::AS) {
1447 Some(self.parse_expr()?)
1448 } else {
1449 None
1450 };
1451
1452 Ok(Statement::Assert { condition, message })
1453 }
1454
1455 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1457 let name = self.parse_identifier()?;
1458 Ok(Statement::Savepoint { name })
1459 }
1460
1461 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1463 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1464 let name = self.parse_identifier()?;
1465
1466 Ok(Statement::ReleaseSavepoint { name })
1467 }
1468
1469 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1471 let channel = self.parse_identifier()?;
1472 Ok(Statement::LISTEN { channel })
1473 }
1474
1475 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1477 let channel = if self.consume_token(&Token::Mul) {
1478 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1479 } else {
1480 match self.parse_identifier() {
1481 Ok(expr) => expr,
1482 _ => {
1483 self.prev_token();
1484 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1485 }
1486 }
1487 };
1488 Ok(Statement::UNLISTEN { channel })
1489 }
1490
1491 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1493 let channel = self.parse_identifier()?;
1494 let payload = if self.consume_token(&Token::Comma) {
1495 Some(self.parse_literal_string()?)
1496 } else {
1497 None
1498 };
1499 Ok(Statement::NOTIFY { channel, payload })
1500 }
1501
1502 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1504 if self.peek_keyword(Keyword::TABLE) {
1505 self.expect_keyword(Keyword::TABLE)?;
1506 let rename_tables = self.parse_comma_separated(|parser| {
1507 let old_name = parser.parse_object_name(false)?;
1508 parser.expect_keyword(Keyword::TO)?;
1509 let new_name = parser.parse_object_name(false)?;
1510
1511 Ok(RenameTable { old_name, new_name })
1512 })?;
1513 Ok(rename_tables.into())
1514 } else {
1515 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1516 }
1517 }
1518
1519 fn parse_expr_prefix_by_reserved_word(
1522 &mut self,
1523 w: &Word,
1524 w_span: Span,
1525 ) -> Result<Option<Expr>, ParserError> {
1526 match w.keyword {
1527 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1528 self.prev_token();
1529 Ok(Some(Expr::Value(self.parse_value()?)))
1530 }
1531 Keyword::NULL => {
1532 self.prev_token();
1533 Ok(Some(Expr::Value(self.parse_value()?)))
1534 }
1535 Keyword::CURRENT_CATALOG
1536 | Keyword::CURRENT_USER
1537 | Keyword::SESSION_USER
1538 | Keyword::USER
1539 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1540 {
1541 Ok(Some(Expr::Function(Function {
1542 name: ObjectName::from(vec![w.to_ident(w_span)]),
1543 uses_odbc_syntax: false,
1544 parameters: FunctionArguments::None,
1545 args: FunctionArguments::None,
1546 null_treatment: None,
1547 filter: None,
1548 over: None,
1549 within_group: vec![],
1550 })))
1551 }
1552 Keyword::CURRENT_TIMESTAMP
1553 | Keyword::CURRENT_TIME
1554 | Keyword::CURRENT_DATE
1555 | Keyword::LOCALTIME
1556 | Keyword::LOCALTIMESTAMP => {
1557 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1558 }
1559 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1560 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1561 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1562 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1563 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1564 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1565 Keyword::EXISTS
1566 if !dialect_of!(self is DatabricksDialect)
1568 || matches!(
1569 self.peek_nth_token_ref(1).token,
1570 Token::Word(Word {
1571 keyword: Keyword::SELECT | Keyword::WITH,
1572 ..
1573 })
1574 ) =>
1575 {
1576 Ok(Some(self.parse_exists_expr(false)?))
1577 }
1578 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1579 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1580 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1581 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1582 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1583 }
1584 Keyword::SUBSTR | Keyword::SUBSTRING => {
1585 self.prev_token();
1586 Ok(Some(self.parse_substring()?))
1587 }
1588 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1589 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1590 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1591 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1593 self.expect_token(&Token::LBracket)?;
1594 Ok(Some(self.parse_array_expr(true)?))
1595 }
1596 Keyword::ARRAY
1597 if self.peek_token_ref().token == Token::LParen
1598 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1599 {
1600 self.expect_token(&Token::LParen)?;
1601 let query = self.parse_query()?;
1602 self.expect_token(&Token::RParen)?;
1603 Ok(Some(Expr::Function(Function {
1604 name: ObjectName::from(vec![w.to_ident(w_span)]),
1605 uses_odbc_syntax: false,
1606 parameters: FunctionArguments::None,
1607 args: FunctionArguments::Subquery(query),
1608 filter: None,
1609 null_treatment: None,
1610 over: None,
1611 within_group: vec![],
1612 })))
1613 }
1614 Keyword::NOT => Ok(Some(self.parse_not()?)),
1615 Keyword::MATCH if self.dialect.supports_match_against() => {
1616 Ok(Some(self.parse_match_against()?))
1617 }
1618 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1619 let struct_expr = self.parse_struct_literal()?;
1620 Ok(Some(struct_expr))
1621 }
1622 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1623 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1624 Ok(Some(Expr::Prior(Box::new(expr))))
1625 }
1626 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1627 Ok(Some(self.parse_duckdb_map_literal()?))
1628 }
1629 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1630 Ok(Some(self.parse_lambda_expr()?))
1631 }
1632 _ if self.dialect.supports_geometric_types() => match w.keyword {
1633 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1634 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1635 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1636 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1637 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1638 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1639 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1640 _ => Ok(None),
1641 },
1642 _ => Ok(None),
1643 }
1644 }
1645
1646 fn parse_expr_prefix_by_unreserved_word(
1648 &mut self,
1649 w: &Word,
1650 w_span: Span,
1651 ) -> Result<Expr, ParserError> {
1652 let is_outer_join = self.peek_outer_join_operator();
1653 match &self.peek_token_ref().token {
1654 Token::LParen if !is_outer_join => {
1655 let id_parts = vec![w.to_ident(w_span)];
1656 self.parse_function(ObjectName::from(id_parts))
1657 }
1658 Token::SingleQuotedString(_)
1660 | Token::DoubleQuotedString(_)
1661 | Token::HexStringLiteral(_)
1662 if w.value.starts_with('_') =>
1663 {
1664 Ok(Expr::Prefixed {
1665 prefix: w.to_ident(w_span),
1666 value: self.parse_introduced_string_expr()?.into(),
1667 })
1668 }
1669 Token::SingleQuotedString(_)
1671 | Token::DoubleQuotedString(_)
1672 | Token::HexStringLiteral(_)
1673 if w.value.starts_with('_') =>
1674 {
1675 Ok(Expr::Prefixed {
1676 prefix: w.to_ident(w_span),
1677 value: self.parse_introduced_string_expr()?.into(),
1678 })
1679 }
1680 Token::Arrow if self.dialect.supports_lambda_functions() => {
1684 self.expect_token(&Token::Arrow)?;
1685 Ok(Expr::Lambda(LambdaFunction {
1686 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1687 name: w.to_ident(w_span),
1688 data_type: None,
1689 }),
1690 body: Box::new(self.parse_expr()?),
1691 syntax: LambdaSyntax::Arrow,
1692 }))
1693 }
1694 Token::Word(_)
1698 if self.dialect.supports_lambda_functions()
1699 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1700 {
1701 let data_type = self.parse_data_type()?;
1702 self.expect_token(&Token::Arrow)?;
1703 Ok(Expr::Lambda(LambdaFunction {
1704 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1705 name: w.to_ident(w_span),
1706 data_type: Some(data_type),
1707 }),
1708 body: Box::new(self.parse_expr()?),
1709 syntax: LambdaSyntax::Arrow,
1710 }))
1711 }
1712 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1713 }
1714 }
1715
1716 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1719 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1720 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1721 } else {
1722 false
1723 }
1724 }
1725
1726 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1728 if let Some(prefix) = self.dialect.parse_prefix(self) {
1730 return prefix;
1731 }
1732
1733 let loc = self.peek_token_ref().span.start;
1750 let opt_expr = self.maybe_parse(|parser| {
1751 match parser.parse_data_type()? {
1752 DataType::Interval { .. } => parser.parse_interval(),
1753 DataType::Custom(ref name, ref modifiers)
1764 if modifiers.is_empty()
1765 && Self::is_simple_unquoted_object_name(name, "xml")
1766 && parser.dialect.supports_xml_expressions() =>
1767 {
1768 Ok(Expr::TypedString(TypedString {
1769 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1770 value: parser.parse_value()?,
1771 uses_odbc_syntax: false,
1772 }))
1773 }
1774 DataType::Custom(..) => parser_err!("dummy", loc),
1775 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1777 Ok(Expr::Cast {
1778 kind: CastKind::Cast,
1779 expr: Box::new(parser.parse_expr()?),
1780 data_type: DataType::Binary(None),
1781 array: false,
1782 format: None,
1783 })
1784 }
1785 data_type => Ok(Expr::TypedString(TypedString {
1786 data_type,
1787 value: parser.parse_value()?,
1788 uses_odbc_syntax: false,
1789 })),
1790 }
1791 })?;
1792
1793 if let Some(expr) = opt_expr {
1794 return Ok(expr);
1795 }
1796
1797 let dialect = self.dialect;
1801
1802 self.advance_token();
1803 let next_token_index = self.get_current_index();
1804 let next_token = self.get_current_token();
1805 let span = next_token.span;
1806 let expr = match &next_token.token {
1807 Token::Word(w) => {
1808 let w = w.clone();
1817 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1818 Ok(Some(expr)) => Ok(expr),
1820
1821 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1823
1824 Err(e) => {
1831 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1832 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1833 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1834 }) {
1835 return Ok(expr);
1836 }
1837 }
1838 return Err(e);
1839 }
1840 }
1841 } Token::LBracket => self.parse_array_expr(false),
1844 tok @ Token::Minus | tok @ Token::Plus => {
1845 let op = if *tok == Token::Plus {
1846 UnaryOperator::Plus
1847 } else {
1848 UnaryOperator::Minus
1849 };
1850 Ok(Expr::UnaryOp {
1851 op,
1852 expr: Box::new(
1853 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1854 ),
1855 })
1856 }
1857 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1858 op: UnaryOperator::BangNot,
1859 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1860 }),
1861 tok @ Token::DoubleExclamationMark
1862 | tok @ Token::PGSquareRoot
1863 | tok @ Token::PGCubeRoot
1864 | tok @ Token::AtSign
1865 if dialect_is!(dialect is PostgreSqlDialect) =>
1866 {
1867 let op = match tok {
1868 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1869 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1870 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1871 Token::AtSign => UnaryOperator::PGAbs,
1872 _ => {
1873 return Err(ParserError::ParserError(
1874 "Internal parser error: unexpected unary operator token".to_string(),
1875 ))
1876 }
1877 };
1878 Ok(Expr::UnaryOp {
1879 op,
1880 expr: Box::new(
1881 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1882 ),
1883 })
1884 }
1885 Token::Tilde => Ok(Expr::UnaryOp {
1886 op: UnaryOperator::BitwiseNot,
1887 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1888 }),
1889 tok @ Token::Sharp
1890 | tok @ Token::AtDashAt
1891 | tok @ Token::AtAt
1892 | tok @ Token::QuestionMarkDash
1893 | tok @ Token::QuestionPipe
1894 if self.dialect.supports_geometric_types() =>
1895 {
1896 let op = match tok {
1897 Token::Sharp => UnaryOperator::Hash,
1898 Token::AtDashAt => UnaryOperator::AtDashAt,
1899 Token::AtAt => UnaryOperator::DoubleAt,
1900 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1901 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1902 _ => {
1903 return Err(ParserError::ParserError(format!(
1904 "Unexpected token in unary operator parsing: {tok:?}"
1905 )))
1906 }
1907 };
1908 Ok(Expr::UnaryOp {
1909 op,
1910 expr: Box::new(
1911 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1912 ),
1913 })
1914 }
1915 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1916 {
1917 self.prev_token();
1918 Ok(Expr::Value(self.parse_value()?))
1919 }
1920 Token::UnicodeStringLiteral(_) => {
1921 self.prev_token();
1922 Ok(Expr::Value(self.parse_value()?))
1923 }
1924 Token::Number(_, _)
1925 | Token::SingleQuotedString(_)
1926 | Token::DoubleQuotedString(_)
1927 | Token::TripleSingleQuotedString(_)
1928 | Token::TripleDoubleQuotedString(_)
1929 | Token::DollarQuotedString(_)
1930 | Token::SingleQuotedByteStringLiteral(_)
1931 | Token::DoubleQuotedByteStringLiteral(_)
1932 | Token::TripleSingleQuotedByteStringLiteral(_)
1933 | Token::TripleDoubleQuotedByteStringLiteral(_)
1934 | Token::SingleQuotedRawStringLiteral(_)
1935 | Token::DoubleQuotedRawStringLiteral(_)
1936 | Token::TripleSingleQuotedRawStringLiteral(_)
1937 | Token::TripleDoubleQuotedRawStringLiteral(_)
1938 | Token::NationalStringLiteral(_)
1939 | Token::QuoteDelimitedStringLiteral(_)
1940 | Token::NationalQuoteDelimitedStringLiteral(_)
1941 | Token::HexStringLiteral(_) => {
1942 self.prev_token();
1943 Ok(Expr::Value(self.parse_value()?))
1944 }
1945 Token::LParen => {
1946 let expr =
1947 if let Some(expr) = self.try_parse_expr_sub_query()? {
1948 expr
1949 } else if let Some(lambda) = self.try_parse_lambda()? {
1950 return Ok(lambda);
1951 } else {
1952 let exprs = self.with_state(ParserState::Normal, |p| {
1963 p.parse_comma_separated(Parser::parse_expr)
1964 })?;
1965 match exprs.len() {
1966 0 => return Err(ParserError::ParserError(
1967 "Internal parser error: parse_comma_separated returned empty list"
1968 .to_string(),
1969 )),
1970 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1971 _ => Expr::Tuple(exprs),
1972 }
1973 };
1974 self.expect_token(&Token::RParen)?;
1975 Ok(expr)
1976 }
1977 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1978 self.prev_token();
1979 Ok(Expr::Value(self.parse_value()?))
1980 }
1981 Token::LBrace => {
1982 self.prev_token();
1983 self.parse_lbrace_expr()
1984 }
1985 _ => self.expected_at("an expression", next_token_index),
1986 }?;
1987
1988 Ok(expr)
1989 }
1990
1991 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1992 Ok(Expr::TypedString(TypedString {
1993 data_type: DataType::GeometricType(kind),
1994 value: self.parse_value()?,
1995 uses_odbc_syntax: false,
1996 }))
1997 }
1998
1999 pub fn parse_compound_expr(
2006 &mut self,
2007 root: Expr,
2008 mut chain: Vec<AccessExpr>,
2009 ) -> Result<Expr, ParserError> {
2010 let mut ending_wildcard: Option<TokenWithSpan> = None;
2011 loop {
2012 if self.consume_token(&Token::Period) {
2013 let next_token = self.peek_token_ref();
2014 match &next_token.token {
2015 Token::Mul => {
2016 if dialect_of!(self is PostgreSqlDialect) {
2019 ending_wildcard = Some(self.next_token());
2020 } else {
2021 self.prev_token(); }
2028
2029 break;
2030 }
2031 Token::SingleQuotedString(s) => {
2032 let expr =
2033 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2034 chain.push(AccessExpr::Dot(expr));
2035 self.advance_token(); }
2037 Token::Placeholder(s) => {
2038 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2041 chain.push(AccessExpr::Dot(expr));
2042 self.advance_token(); }
2044 _ => {
2049 let expr = self.maybe_parse(|parser| {
2050 let expr = parser
2051 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2052 match &expr {
2053 Expr::CompoundFieldAccess { .. }
2054 | Expr::CompoundIdentifier(_)
2055 | Expr::Identifier(_)
2056 | Expr::Value(_)
2057 | Expr::Function(_) => Ok(expr),
2058 _ => parser.expected_ref(
2059 "an identifier or value",
2060 parser.peek_token_ref(),
2061 ),
2062 }
2063 })?;
2064
2065 match expr {
2066 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2075 chain.push(AccessExpr::Dot(*root));
2076 chain.extend(access_chain);
2077 }
2078 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2079 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2080 ),
2081 Some(expr) => {
2082 chain.push(AccessExpr::Dot(expr));
2083 }
2084 None => {
2088 chain.push(AccessExpr::Dot(Expr::Identifier(
2089 self.parse_identifier()?,
2090 )));
2091 }
2092 }
2093 }
2094 }
2095 } else if !self.dialect.supports_partiql()
2096 && self.peek_token_ref().token == Token::LBracket
2097 {
2098 self.parse_multi_dim_subscript(&mut chain)?;
2099 } else {
2100 break;
2101 }
2102 }
2103
2104 let tok_index = self.get_current_index();
2105 if let Some(wildcard_token) = ending_wildcard {
2106 if !Self::is_all_ident(&root, &chain) {
2107 return self
2108 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2109 };
2110 Ok(Expr::QualifiedWildcard(
2111 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2112 AttachedToken(wildcard_token),
2113 ))
2114 } else if self.maybe_parse_outer_join_operator() {
2115 if !Self::is_all_ident(&root, &chain) {
2116 return self.expected_at("column identifier before (+)", tok_index);
2117 };
2118 let expr = if chain.is_empty() {
2119 root
2120 } else {
2121 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2122 };
2123 Ok(Expr::OuterJoin(expr.into()))
2124 } else {
2125 Self::build_compound_expr(root, chain)
2126 }
2127 }
2128
2129 fn build_compound_expr(
2134 root: Expr,
2135 mut access_chain: Vec<AccessExpr>,
2136 ) -> Result<Expr, ParserError> {
2137 if access_chain.is_empty() {
2138 return Ok(root);
2139 }
2140
2141 if Self::is_all_ident(&root, &access_chain) {
2142 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2143 root,
2144 access_chain,
2145 )?));
2146 }
2147
2148 if matches!(root, Expr::Identifier(_))
2153 && matches!(
2154 access_chain.last(),
2155 Some(AccessExpr::Dot(Expr::Function(_)))
2156 )
2157 && access_chain
2158 .iter()
2159 .rev()
2160 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2162 {
2163 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2164 return parser_err!("expected function expression", root.span().start);
2165 };
2166
2167 let compound_func_name = [root]
2168 .into_iter()
2169 .chain(access_chain.into_iter().flat_map(|access| match access {
2170 AccessExpr::Dot(expr) => Some(expr),
2171 _ => None,
2172 }))
2173 .flat_map(|expr| match expr {
2174 Expr::Identifier(ident) => Some(ident),
2175 _ => None,
2176 })
2177 .map(ObjectNamePart::Identifier)
2178 .chain(func.name.0)
2179 .collect::<Vec<_>>();
2180 func.name = ObjectName(compound_func_name);
2181
2182 return Ok(Expr::Function(func));
2183 }
2184
2185 if access_chain.len() == 1
2190 && matches!(
2191 access_chain.last(),
2192 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2193 )
2194 {
2195 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2196 return parser_err!("expected (+) expression", root.span().start);
2197 };
2198
2199 if !Self::is_all_ident(&root, &[]) {
2200 return parser_err!("column identifier before (+)", root.span().start);
2201 };
2202
2203 let token_start = root.span().start;
2204 let mut idents = Self::exprs_to_idents(root, vec![])?;
2205 match *inner_expr {
2206 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2207 Expr::Identifier(suffix) => idents.push(suffix),
2208 _ => {
2209 return parser_err!("column identifier before (+)", token_start);
2210 }
2211 }
2212
2213 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2214 }
2215
2216 Ok(Expr::CompoundFieldAccess {
2217 root: Box::new(root),
2218 access_chain,
2219 })
2220 }
2221
2222 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2223 match k {
2224 Keyword::LOCAL => Some(ContextModifier::Local),
2225 Keyword::GLOBAL => Some(ContextModifier::Global),
2226 Keyword::SESSION => Some(ContextModifier::Session),
2227 _ => None,
2228 }
2229 }
2230
2231 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2233 if !matches!(root, Expr::Identifier(_)) {
2234 return false;
2235 }
2236 fields
2237 .iter()
2238 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2239 }
2240
2241 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2243 let mut idents = vec![];
2244 if let Expr::Identifier(root) = root {
2245 idents.push(root);
2246 for x in fields {
2247 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2248 idents.push(ident);
2249 } else {
2250 return parser_err!(
2251 format!("Expected identifier, found: {}", x),
2252 x.span().start
2253 );
2254 }
2255 }
2256 Ok(idents)
2257 } else {
2258 parser_err!(
2259 format!("Expected identifier, found: {}", root),
2260 root.span().start
2261 )
2262 }
2263 }
2264
2265 fn peek_outer_join_operator(&mut self) -> bool {
2267 if !self.dialect.supports_outer_join_operator() {
2268 return false;
2269 }
2270
2271 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2272 Token::LParen == maybe_lparen.token
2273 && Token::Plus == maybe_plus.token
2274 && Token::RParen == maybe_rparen.token
2275 }
2276
2277 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2280 self.dialect.supports_outer_join_operator()
2281 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2282 }
2283
2284 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2286 self.expect_token(&Token::LParen)?;
2287 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2288 self.expect_token(&Token::RParen)?;
2289
2290 Ok(options)
2291 }
2292
2293 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2294 let name = self.parse_identifier()?;
2295
2296 let next_token = self.peek_token_ref();
2297 if next_token == &Token::Comma || next_token == &Token::RParen {
2298 return Ok(UtilityOption { name, arg: None });
2299 }
2300 let arg = self.parse_expr()?;
2301
2302 Ok(UtilityOption {
2303 name,
2304 arg: Some(arg),
2305 })
2306 }
2307
2308 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2309 if !self.peek_sub_query() {
2310 return Ok(None);
2311 }
2312
2313 Ok(Some(Expr::Subquery(self.parse_query()?)))
2314 }
2315
2316 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2317 if !self.dialect.supports_lambda_functions() {
2318 return Ok(None);
2319 }
2320 self.maybe_parse(|p| {
2321 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2322 p.expect_token(&Token::RParen)?;
2323 p.expect_token(&Token::Arrow)?;
2324 let expr = p.parse_expr()?;
2325 Ok(Expr::Lambda(LambdaFunction {
2326 params: OneOrManyWithParens::Many(params),
2327 body: Box::new(expr),
2328 syntax: LambdaSyntax::Arrow,
2329 }))
2330 })
2331 }
2332
2333 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2343 let params = self.parse_lambda_function_parameters()?;
2345 self.expect_token(&Token::Colon)?;
2347 let body = self.parse_expr()?;
2349 Ok(Expr::Lambda(LambdaFunction {
2350 params,
2351 body: Box::new(body),
2352 syntax: LambdaSyntax::LambdaKeyword,
2353 }))
2354 }
2355
2356 fn parse_lambda_function_parameters(
2358 &mut self,
2359 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2360 let params = if self.consume_token(&Token::LParen) {
2362 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2364 self.expect_token(&Token::RParen)?;
2365 OneOrManyWithParens::Many(params)
2366 } else {
2367 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2369 if params.len() == 1 {
2370 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2371 } else {
2372 OneOrManyWithParens::Many(params)
2373 }
2374 };
2375 Ok(params)
2376 }
2377
2378 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2380 let name = self.parse_identifier()?;
2381 let data_type = match &self.peek_token_ref().token {
2382 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2383 _ => None,
2384 };
2385 Ok(LambdaFunctionParameter { name, data_type })
2386 }
2387
2388 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2395 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2397 return Ok(Some(expr));
2398 }
2399 self.maybe_parse_odbc_body_datetime()
2401 }
2402
2403 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2414 self.maybe_parse(|p| {
2415 let token = p.next_token().clone();
2416 let word_string = token.token.to_string();
2417 let data_type = match word_string.as_str() {
2418 "t" => DataType::Time(None, TimezoneInfo::None),
2419 "d" => DataType::Date,
2420 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2421 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2422 };
2423 let value = p.parse_value()?;
2424 Ok(Expr::TypedString(TypedString {
2425 data_type,
2426 value,
2427 uses_odbc_syntax: true,
2428 }))
2429 })
2430 }
2431
2432 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2441 self.maybe_parse(|p| {
2442 p.expect_keyword(Keyword::FN)?;
2443 let fn_name = p.parse_object_name(false)?;
2444 let mut fn_call = p.parse_function_call(fn_name)?;
2445 fn_call.uses_odbc_syntax = true;
2446 Ok(Expr::Function(fn_call))
2447 })
2448 }
2449
2450 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2452 self.parse_function_call(name).map(Expr::Function)
2453 }
2454
2455 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2456 self.expect_token(&Token::LParen)?;
2457
2458 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2461 let subquery = self.parse_query()?;
2462 self.expect_token(&Token::RParen)?;
2463 return Ok(Function {
2464 name,
2465 uses_odbc_syntax: false,
2466 parameters: FunctionArguments::None,
2467 args: FunctionArguments::Subquery(subquery),
2468 filter: None,
2469 null_treatment: None,
2470 over: None,
2471 within_group: vec![],
2472 });
2473 }
2474
2475 let mut args = self.parse_function_argument_list()?;
2476 let mut parameters = FunctionArguments::None;
2477 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2480 && self.consume_token(&Token::LParen)
2481 {
2482 parameters = FunctionArguments::List(args);
2483 args = self.parse_function_argument_list()?;
2484 }
2485
2486 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2487 self.expect_token(&Token::LParen)?;
2488 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2489 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2490 self.expect_token(&Token::RParen)?;
2491 order_by
2492 } else {
2493 vec![]
2494 };
2495
2496 let filter = if self.dialect.supports_filter_during_aggregation()
2497 && self.parse_keyword(Keyword::FILTER)
2498 && self.consume_token(&Token::LParen)
2499 && self.parse_keyword(Keyword::WHERE)
2500 {
2501 let filter = Some(Box::new(self.parse_expr()?));
2502 self.expect_token(&Token::RParen)?;
2503 filter
2504 } else {
2505 None
2506 };
2507
2508 let null_treatment = if args
2511 .clauses
2512 .iter()
2513 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2514 {
2515 self.parse_null_treatment()?
2516 } else {
2517 None
2518 };
2519
2520 let over = if self.parse_keyword(Keyword::OVER) {
2521 if self.consume_token(&Token::LParen) {
2522 let window_spec = self.parse_window_spec()?;
2523 Some(WindowType::WindowSpec(window_spec))
2524 } else {
2525 Some(WindowType::NamedWindow(self.parse_identifier()?))
2526 }
2527 } else {
2528 None
2529 };
2530
2531 Ok(Function {
2532 name,
2533 uses_odbc_syntax: false,
2534 parameters,
2535 args: FunctionArguments::List(args),
2536 null_treatment,
2537 filter,
2538 over,
2539 within_group,
2540 })
2541 }
2542
2543 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2545 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2546 Some(keyword) => {
2547 self.expect_keyword_is(Keyword::NULLS)?;
2548
2549 Ok(match keyword {
2550 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2551 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2552 _ => None,
2553 })
2554 }
2555 None => Ok(None),
2556 }
2557 }
2558
2559 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2561 let args = if self.consume_token(&Token::LParen) {
2562 FunctionArguments::List(self.parse_function_argument_list()?)
2563 } else {
2564 FunctionArguments::None
2565 };
2566 Ok(Expr::Function(Function {
2567 name,
2568 uses_odbc_syntax: false,
2569 parameters: FunctionArguments::None,
2570 args,
2571 filter: None,
2572 over: None,
2573 null_treatment: None,
2574 within_group: vec![],
2575 }))
2576 }
2577
2578 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2580 let next_token = self.next_token();
2581 match &next_token.token {
2582 Token::Word(w) => match w.keyword {
2583 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2584 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2585 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2586 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2587 },
2588 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2589 }
2590 }
2591
2592 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2594 let units = self.parse_window_frame_units()?;
2595 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2596 let start_bound = self.parse_window_frame_bound()?;
2597 self.expect_keyword_is(Keyword::AND)?;
2598 let end_bound = Some(self.parse_window_frame_bound()?);
2599 (start_bound, end_bound)
2600 } else {
2601 (self.parse_window_frame_bound()?, None)
2602 };
2603 Ok(WindowFrame {
2604 units,
2605 start_bound,
2606 end_bound,
2607 })
2608 }
2609
2610 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2612 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2613 Ok(WindowFrameBound::CurrentRow)
2614 } else {
2615 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2616 None
2617 } else {
2618 Some(Box::new(match &self.peek_token_ref().token {
2619 Token::SingleQuotedString(_) => self.parse_interval()?,
2620 _ => self.parse_expr()?,
2621 }))
2622 };
2623 if self.parse_keyword(Keyword::PRECEDING) {
2624 Ok(WindowFrameBound::Preceding(rows))
2625 } else if self.parse_keyword(Keyword::FOLLOWING) {
2626 Ok(WindowFrameBound::Following(rows))
2627 } else {
2628 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2629 }
2630 }
2631 }
2632
2633 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2635 if self.dialect.supports_group_by_expr() {
2636 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2637 self.expect_token(&Token::LParen)?;
2638 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2639 self.expect_token(&Token::RParen)?;
2640 Ok(Expr::GroupingSets(result))
2641 } else if self.parse_keyword(Keyword::CUBE) {
2642 self.expect_token(&Token::LParen)?;
2643 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2644 self.expect_token(&Token::RParen)?;
2645 Ok(Expr::Cube(result))
2646 } else if self.parse_keyword(Keyword::ROLLUP) {
2647 self.expect_token(&Token::LParen)?;
2648 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2649 self.expect_token(&Token::RParen)?;
2650 Ok(Expr::Rollup(result))
2651 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2652 Ok(Expr::Tuple(vec![]))
2656 } else {
2657 self.parse_expr()
2658 }
2659 } else {
2660 self.parse_expr()
2662 }
2663 }
2664
2665 fn parse_tuple(
2669 &mut self,
2670 lift_singleton: bool,
2671 allow_empty: bool,
2672 ) -> Result<Vec<Expr>, ParserError> {
2673 if lift_singleton {
2674 if self.consume_token(&Token::LParen) {
2675 let result = if allow_empty && self.consume_token(&Token::RParen) {
2676 vec![]
2677 } else {
2678 let result = self.parse_comma_separated(Parser::parse_expr)?;
2679 self.expect_token(&Token::RParen)?;
2680 result
2681 };
2682 Ok(result)
2683 } else {
2684 Ok(vec![self.parse_expr()?])
2685 }
2686 } else {
2687 self.expect_token(&Token::LParen)?;
2688 let result = if allow_empty && self.consume_token(&Token::RParen) {
2689 vec![]
2690 } else {
2691 let result = self.parse_comma_separated(Parser::parse_expr)?;
2692 self.expect_token(&Token::RParen)?;
2693 result
2694 };
2695 Ok(result)
2696 }
2697 }
2698
2699 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2701 let case_token = AttachedToken(self.get_current_token().clone());
2702 let mut operand = None;
2703 if !self.parse_keyword(Keyword::WHEN) {
2704 operand = Some(Box::new(self.parse_expr()?));
2705 self.expect_keyword_is(Keyword::WHEN)?;
2706 }
2707 let mut conditions = vec![];
2708 loop {
2709 let condition = self.parse_expr()?;
2710 self.expect_keyword_is(Keyword::THEN)?;
2711 let result = self.parse_expr()?;
2712 conditions.push(CaseWhen { condition, result });
2713 if !self.parse_keyword(Keyword::WHEN) {
2714 break;
2715 }
2716 }
2717 let else_result = if self.parse_keyword(Keyword::ELSE) {
2718 Some(Box::new(self.parse_expr()?))
2719 } else {
2720 None
2721 };
2722 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2723 Ok(Expr::Case {
2724 case_token,
2725 end_token,
2726 operand,
2727 conditions,
2728 else_result,
2729 })
2730 }
2731
2732 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2734 if self.parse_keyword(Keyword::FORMAT) {
2735 let value = self.parse_value()?;
2736 match self.parse_optional_time_zone()? {
2737 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2738 None => Ok(Some(CastFormat::Value(value))),
2739 }
2740 } else {
2741 Ok(None)
2742 }
2743 }
2744
2745 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2747 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2748 self.parse_value().map(Some)
2749 } else {
2750 Ok(None)
2751 }
2752 }
2753
2754 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2756 self.expect_token(&Token::LParen)?;
2757 let data_type = self.parse_data_type()?;
2758 self.expect_token(&Token::Comma)?;
2759 let expr = self.parse_expr()?;
2760 let styles = if self.consume_token(&Token::Comma) {
2761 self.parse_comma_separated(Parser::parse_expr)?
2762 } else {
2763 Default::default()
2764 };
2765 self.expect_token(&Token::RParen)?;
2766 Ok(Expr::Convert {
2767 is_try,
2768 expr: Box::new(expr),
2769 data_type: Some(data_type),
2770 charset: None,
2771 target_before_value: true,
2772 styles,
2773 })
2774 }
2775
2776 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2781 if self.dialect.convert_type_before_value() {
2782 return self.parse_mssql_convert(is_try);
2783 }
2784 self.expect_token(&Token::LParen)?;
2785 let expr = self.parse_expr()?;
2786 if self.parse_keyword(Keyword::USING) {
2787 let charset = self.parse_object_name(false)?;
2788 self.expect_token(&Token::RParen)?;
2789 return Ok(Expr::Convert {
2790 is_try,
2791 expr: Box::new(expr),
2792 data_type: None,
2793 charset: Some(charset),
2794 target_before_value: false,
2795 styles: vec![],
2796 });
2797 }
2798 self.expect_token(&Token::Comma)?;
2799 let data_type = self.parse_data_type()?;
2800 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2801 Some(self.parse_object_name(false)?)
2802 } else {
2803 None
2804 };
2805 self.expect_token(&Token::RParen)?;
2806 Ok(Expr::Convert {
2807 is_try,
2808 expr: Box::new(expr),
2809 data_type: Some(data_type),
2810 charset,
2811 target_before_value: false,
2812 styles: vec![],
2813 })
2814 }
2815
2816 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2818 self.expect_token(&Token::LParen)?;
2819 let expr = self.parse_expr()?;
2820 self.expect_keyword_is(Keyword::AS)?;
2821 let data_type = self.parse_data_type()?;
2822 let array = self.parse_keyword(Keyword::ARRAY);
2823 let format = self.parse_optional_cast_format()?;
2824 self.expect_token(&Token::RParen)?;
2825 Ok(Expr::Cast {
2826 kind,
2827 expr: Box::new(expr),
2828 data_type,
2829 array,
2830 format,
2831 })
2832 }
2833
2834 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2836 self.expect_token(&Token::LParen)?;
2837 let exists_node = Expr::Exists {
2838 negated,
2839 subquery: self.parse_query()?,
2840 };
2841 self.expect_token(&Token::RParen)?;
2842 Ok(exists_node)
2843 }
2844
2845 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2847 self.expect_token(&Token::LParen)?;
2848 let field = self.parse_date_time_field()?;
2849
2850 let syntax = if self.parse_keyword(Keyword::FROM) {
2851 ExtractSyntax::From
2852 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2853 {
2854 ExtractSyntax::Comma
2855 } else {
2856 return Err(ParserError::ParserError(
2857 "Expected 'FROM' or ','".to_string(),
2858 ));
2859 };
2860
2861 let expr = self.parse_expr()?;
2862 self.expect_token(&Token::RParen)?;
2863 Ok(Expr::Extract {
2864 field,
2865 expr: Box::new(expr),
2866 syntax,
2867 })
2868 }
2869
2870 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2872 self.expect_token(&Token::LParen)?;
2873 let expr = self.parse_expr()?;
2874 let field = if self.parse_keyword(Keyword::TO) {
2876 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2878 } else if self.consume_token(&Token::Comma) {
2879 let v = self.parse_value()?;
2881 if matches!(v.value, Value::Number(_, _)) {
2882 CeilFloorKind::Scale(v)
2883 } else {
2884 return Err(ParserError::ParserError(
2885 "Scale field can only be of number type".to_string(),
2886 ));
2887 }
2888 } else {
2889 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2890 };
2891 self.expect_token(&Token::RParen)?;
2892 if is_ceil {
2893 Ok(Expr::Ceil {
2894 expr: Box::new(expr),
2895 field,
2896 })
2897 } else {
2898 Ok(Expr::Floor {
2899 expr: Box::new(expr),
2900 field,
2901 })
2902 }
2903 }
2904
2905 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2907 let between_prec = self.dialect.prec_value(Precedence::Between);
2908 let position_expr = self.maybe_parse(|p| {
2909 p.expect_token(&Token::LParen)?;
2911
2912 let expr = p.parse_subexpr(between_prec)?;
2914 p.expect_keyword_is(Keyword::IN)?;
2915 let from = p.parse_expr()?;
2916 p.expect_token(&Token::RParen)?;
2917 Ok(Expr::Position {
2918 expr: Box::new(expr),
2919 r#in: Box::new(from),
2920 })
2921 })?;
2922 match position_expr {
2923 Some(expr) => Ok(expr),
2924 None => self.parse_function(ObjectName::from(vec![ident])),
2927 }
2928 }
2929
2930 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2932 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2933 Keyword::SUBSTR => true,
2934 Keyword::SUBSTRING => false,
2935 _ => {
2936 self.prev_token();
2937 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2938 }
2939 };
2940 self.expect_token(&Token::LParen)?;
2941 let expr = self.parse_expr()?;
2942 let mut from_expr = None;
2943 let special = self.consume_token(&Token::Comma);
2944 if special || self.parse_keyword(Keyword::FROM) {
2945 from_expr = Some(self.parse_expr()?);
2946 }
2947
2948 let mut to_expr = None;
2949 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2950 to_expr = Some(self.parse_expr()?);
2951 }
2952 self.expect_token(&Token::RParen)?;
2953
2954 Ok(Expr::Substring {
2955 expr: Box::new(expr),
2956 substring_from: from_expr.map(Box::new),
2957 substring_for: to_expr.map(Box::new),
2958 special,
2959 shorthand,
2960 })
2961 }
2962
2963 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2967 self.expect_token(&Token::LParen)?;
2969 let expr = self.parse_expr()?;
2970 self.expect_keyword_is(Keyword::PLACING)?;
2971 let what_expr = self.parse_expr()?;
2972 self.expect_keyword_is(Keyword::FROM)?;
2973 let from_expr = self.parse_expr()?;
2974 let mut for_expr = None;
2975 if self.parse_keyword(Keyword::FOR) {
2976 for_expr = Some(self.parse_expr()?);
2977 }
2978 self.expect_token(&Token::RParen)?;
2979
2980 Ok(Expr::Overlay {
2981 expr: Box::new(expr),
2982 overlay_what: Box::new(what_expr),
2983 overlay_from: Box::new(from_expr),
2984 overlay_for: for_expr.map(Box::new),
2985 })
2986 }
2987
2988 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2994 self.expect_token(&Token::LParen)?;
2995 let mut trim_where = None;
2996 if let Token::Word(word) = &self.peek_token_ref().token {
2997 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2998 trim_where = Some(self.parse_trim_where()?);
2999 }
3000 }
3001 let expr = self.parse_expr()?;
3002 if self.parse_keyword(Keyword::FROM) {
3003 let trim_what = Box::new(expr);
3004 let expr = self.parse_expr()?;
3005 self.expect_token(&Token::RParen)?;
3006 Ok(Expr::Trim {
3007 expr: Box::new(expr),
3008 trim_where,
3009 trim_what: Some(trim_what),
3010 trim_characters: None,
3011 })
3012 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3013 {
3014 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3015 self.expect_token(&Token::RParen)?;
3016 Ok(Expr::Trim {
3017 expr: Box::new(expr),
3018 trim_where: None,
3019 trim_what: None,
3020 trim_characters: Some(characters),
3021 })
3022 } else {
3023 self.expect_token(&Token::RParen)?;
3024 Ok(Expr::Trim {
3025 expr: Box::new(expr),
3026 trim_where,
3027 trim_what: None,
3028 trim_characters: None,
3029 })
3030 }
3031 }
3032
3033 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3037 let next_token = self.next_token();
3038 match &next_token.token {
3039 Token::Word(w) => match w.keyword {
3040 Keyword::BOTH => Ok(TrimWhereField::Both),
3041 Keyword::LEADING => Ok(TrimWhereField::Leading),
3042 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3043 _ => self.expected("trim_where field", next_token)?,
3044 },
3045 _ => self.expected("trim_where field", next_token),
3046 }
3047 }
3048
3049 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3052 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3053 self.expect_token(&Token::RBracket)?;
3054 Ok(Expr::Array(Array { elem: exprs, named }))
3055 }
3056
3057 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3061 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3062 if self.parse_keyword(Keyword::ERROR) {
3063 Ok(Some(ListAggOnOverflow::Error))
3064 } else {
3065 self.expect_keyword_is(Keyword::TRUNCATE)?;
3066 let filler = match &self.peek_token_ref().token {
3067 Token::Word(w)
3068 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3069 {
3070 None
3071 }
3072 Token::SingleQuotedString(_)
3073 | Token::EscapedStringLiteral(_)
3074 | Token::UnicodeStringLiteral(_)
3075 | Token::NationalStringLiteral(_)
3076 | Token::QuoteDelimitedStringLiteral(_)
3077 | Token::NationalQuoteDelimitedStringLiteral(_)
3078 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3079 _ => self.expected_ref(
3080 "either filler, WITH, or WITHOUT in LISTAGG",
3081 self.peek_token_ref(),
3082 )?,
3083 };
3084 let with_count = self.parse_keyword(Keyword::WITH);
3085 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3086 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3087 }
3088 self.expect_keyword_is(Keyword::COUNT)?;
3089 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3090 }
3091 } else {
3092 Ok(None)
3093 }
3094 }
3095
3096 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3103 let next_token = self.next_token();
3104 match &next_token.token {
3105 Token::Word(w) => match w.keyword {
3106 Keyword::YEAR => Ok(DateTimeField::Year),
3107 Keyword::YEARS => Ok(DateTimeField::Years),
3108 Keyword::MONTH => Ok(DateTimeField::Month),
3109 Keyword::MONTHS => Ok(DateTimeField::Months),
3110 Keyword::WEEK => {
3111 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3112 && self.consume_token(&Token::LParen)
3113 {
3114 let week_day = self.parse_identifier()?;
3115 self.expect_token(&Token::RParen)?;
3116 Some(week_day)
3117 } else {
3118 None
3119 };
3120 Ok(DateTimeField::Week(week_day))
3121 }
3122 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3123 Keyword::DAY => Ok(DateTimeField::Day),
3124 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3125 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3126 Keyword::DAYS => Ok(DateTimeField::Days),
3127 Keyword::DATE => Ok(DateTimeField::Date),
3128 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3129 Keyword::HOUR => Ok(DateTimeField::Hour),
3130 Keyword::HOURS => Ok(DateTimeField::Hours),
3131 Keyword::MINUTE => Ok(DateTimeField::Minute),
3132 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3133 Keyword::SECOND => Ok(DateTimeField::Second),
3134 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3135 Keyword::CENTURY => Ok(DateTimeField::Century),
3136 Keyword::DECADE => Ok(DateTimeField::Decade),
3137 Keyword::DOY => Ok(DateTimeField::Doy),
3138 Keyword::DOW => Ok(DateTimeField::Dow),
3139 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3140 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3141 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3142 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3143 Keyword::JULIAN => Ok(DateTimeField::Julian),
3144 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3145 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3146 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3147 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3148 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3149 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3150 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3151 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3152 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3153 Keyword::TIME => Ok(DateTimeField::Time),
3154 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3155 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3156 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3157 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3158 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3159 _ if self.dialect.allow_extract_custom() => {
3160 self.prev_token();
3161 let custom = self.parse_identifier()?;
3162 Ok(DateTimeField::Custom(custom))
3163 }
3164 _ => self.expected("date/time field", next_token),
3165 },
3166 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3167 self.prev_token();
3168 let custom = self.parse_identifier()?;
3169 Ok(DateTimeField::Custom(custom))
3170 }
3171 _ => self.expected("date/time field", next_token),
3172 }
3173 }
3174
3175 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3179 match &self.peek_token_ref().token {
3180 Token::Word(w) => match w.keyword {
3181 Keyword::EXISTS => {
3182 let negated = true;
3183 let _ = self.parse_keyword(Keyword::EXISTS);
3184 self.parse_exists_expr(negated)
3185 }
3186 _ => Ok(Expr::UnaryOp {
3187 op: UnaryOperator::Not,
3188 expr: Box::new(
3189 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3190 ),
3191 }),
3192 },
3193 _ => Ok(Expr::UnaryOp {
3194 op: UnaryOperator::Not,
3195 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3196 }),
3197 }
3198 }
3199
3200 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3210 let token = self.expect_token(&Token::LBrace)?;
3211
3212 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3213 self.expect_token(&Token::RBrace)?;
3214 return Ok(fn_expr);
3215 }
3216
3217 if self.dialect.supports_dictionary_syntax() {
3218 self.prev_token(); return self.parse_dictionary();
3220 }
3221
3222 self.expected("an expression", token)
3223 }
3224
3225 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3231 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3232
3233 self.expect_keyword_is(Keyword::AGAINST)?;
3234
3235 self.expect_token(&Token::LParen)?;
3236
3237 let match_value = self.parse_value()?;
3239
3240 let in_natural_language_mode_keywords = &[
3241 Keyword::IN,
3242 Keyword::NATURAL,
3243 Keyword::LANGUAGE,
3244 Keyword::MODE,
3245 ];
3246
3247 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3248
3249 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3250
3251 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3252 if self.parse_keywords(with_query_expansion_keywords) {
3253 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3254 } else {
3255 Some(SearchModifier::InNaturalLanguageMode)
3256 }
3257 } else if self.parse_keywords(in_boolean_mode_keywords) {
3258 Some(SearchModifier::InBooleanMode)
3259 } else if self.parse_keywords(with_query_expansion_keywords) {
3260 Some(SearchModifier::WithQueryExpansion)
3261 } else {
3262 None
3263 };
3264
3265 self.expect_token(&Token::RParen)?;
3266
3267 Ok(Expr::MatchAgainst {
3268 columns,
3269 match_value,
3270 opt_search_modifier,
3271 })
3272 }
3273
3274 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3290 let value = if self.dialect.require_interval_qualifier() {
3299 self.parse_expr()?
3301 } else {
3302 self.parse_prefix()?
3305 };
3306
3307 let leading_field = if self.next_token_is_temporal_unit() {
3313 Some(self.parse_date_time_field()?)
3314 } else if self.dialect.require_interval_qualifier() {
3315 return parser_err!(
3316 "INTERVAL requires a unit after the literal value",
3317 self.peek_token_ref().span.start
3318 );
3319 } else {
3320 None
3321 };
3322
3323 let (leading_precision, last_field, fsec_precision) =
3324 if leading_field == Some(DateTimeField::Second) {
3325 let last_field = None;
3331 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3332 (leading_precision, last_field, fsec_precision)
3333 } else {
3334 let leading_precision = self.parse_optional_precision()?;
3335 if self.parse_keyword(Keyword::TO) {
3336 let last_field = Some(self.parse_date_time_field()?);
3337 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3338 self.parse_optional_precision()?
3339 } else {
3340 None
3341 };
3342 (leading_precision, last_field, fsec_precision)
3343 } else {
3344 (leading_precision, None, None)
3345 }
3346 };
3347
3348 Ok(Expr::Interval(Interval {
3349 value: Box::new(value),
3350 leading_field,
3351 leading_precision,
3352 last_field,
3353 fractional_seconds_precision: fsec_precision,
3354 }))
3355 }
3356
3357 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3360 if let Token::Word(word) = &self.peek_token_ref().token {
3361 matches!(
3362 word.keyword,
3363 Keyword::YEAR
3364 | Keyword::YEARS
3365 | Keyword::MONTH
3366 | Keyword::MONTHS
3367 | Keyword::WEEK
3368 | Keyword::WEEKS
3369 | Keyword::DAY
3370 | Keyword::DAYS
3371 | Keyword::HOUR
3372 | Keyword::HOURS
3373 | Keyword::MINUTE
3374 | Keyword::MINUTES
3375 | Keyword::SECOND
3376 | Keyword::SECONDS
3377 | Keyword::CENTURY
3378 | Keyword::DECADE
3379 | Keyword::DOW
3380 | Keyword::DOY
3381 | Keyword::EPOCH
3382 | Keyword::ISODOW
3383 | Keyword::ISOYEAR
3384 | Keyword::JULIAN
3385 | Keyword::MICROSECOND
3386 | Keyword::MICROSECONDS
3387 | Keyword::MILLENIUM
3388 | Keyword::MILLENNIUM
3389 | Keyword::MILLISECOND
3390 | Keyword::MILLISECONDS
3391 | Keyword::NANOSECOND
3392 | Keyword::NANOSECONDS
3393 | Keyword::QUARTER
3394 | Keyword::TIMEZONE
3395 | Keyword::TIMEZONE_HOUR
3396 | Keyword::TIMEZONE_MINUTE
3397 )
3398 } else {
3399 false
3400 }
3401 }
3402
3403 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3411 self.prev_token();
3413 let (fields, trailing_bracket) =
3414 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3415 if trailing_bracket.0 {
3416 return parser_err!(
3417 "unmatched > in STRUCT literal",
3418 self.peek_token_ref().span.start
3419 );
3420 }
3421
3422 self.expect_token(&Token::LParen)?;
3424 let values = self
3425 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3426 self.expect_token(&Token::RParen)?;
3427
3428 Ok(Expr::Struct { values, fields })
3429 }
3430
3431 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3445 let expr = self.parse_expr()?;
3446 if self.parse_keyword(Keyword::AS) {
3447 if typed_syntax {
3448 return parser_err!("Typed syntax does not allow AS", {
3449 self.prev_token();
3450 self.peek_token_ref().span.start
3451 });
3452 }
3453 let field_name = self.parse_identifier()?;
3454 Ok(Expr::Named {
3455 expr: expr.into(),
3456 name: field_name,
3457 })
3458 } else {
3459 Ok(expr)
3460 }
3461 }
3462
3463 fn parse_struct_type_def<F>(
3476 &mut self,
3477 mut elem_parser: F,
3478 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3479 where
3480 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3481 {
3482 self.expect_keyword_is(Keyword::STRUCT)?;
3483
3484 if self.peek_token_ref().token != Token::Lt {
3486 return Ok((Default::default(), false.into()));
3487 }
3488 self.next_token();
3489
3490 let mut field_defs = vec![];
3491 let trailing_bracket = loop {
3492 let (def, trailing_bracket) = elem_parser(self)?;
3493 field_defs.push(def);
3494 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3496 break trailing_bracket;
3497 }
3498 };
3499
3500 Ok((
3501 field_defs,
3502 self.expect_closing_angle_bracket(trailing_bracket)?,
3503 ))
3504 }
3505
3506 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3508 self.expect_keyword_is(Keyword::STRUCT)?;
3509 self.expect_token(&Token::LParen)?;
3510 let struct_body = self.parse_comma_separated(|parser| {
3511 let field_name = parser.parse_identifier()?;
3512 let field_type = parser.parse_data_type()?;
3513
3514 Ok(StructField {
3515 field_name: Some(field_name),
3516 field_type,
3517 options: None,
3518 })
3519 });
3520 self.expect_token(&Token::RParen)?;
3521 struct_body
3522 }
3523
3524 fn parse_struct_field_def(
3536 &mut self,
3537 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3538 let is_named_field = matches!(
3541 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3542 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3543 );
3544
3545 let field_name = if is_named_field {
3546 let name = self.parse_identifier()?;
3547 let _ = self.consume_token(&Token::Colon);
3548 Some(name)
3549 } else {
3550 None
3551 };
3552
3553 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3554
3555 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3556 Ok((
3557 StructField {
3558 field_name,
3559 field_type,
3560 options,
3561 },
3562 trailing_bracket,
3563 ))
3564 }
3565
3566 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3576 self.expect_keyword_is(Keyword::UNION)?;
3577
3578 self.expect_token(&Token::LParen)?;
3579
3580 let fields = self.parse_comma_separated(|p| {
3581 Ok(UnionField {
3582 field_name: p.parse_identifier()?,
3583 field_type: p.parse_data_type()?,
3584 })
3585 })?;
3586
3587 self.expect_token(&Token::RParen)?;
3588
3589 Ok(fields)
3590 }
3591
3592 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3603 self.expect_token(&Token::LBrace)?;
3604
3605 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3606
3607 self.expect_token(&Token::RBrace)?;
3608
3609 Ok(Expr::Dictionary(fields))
3610 }
3611
3612 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3623 let key = self.parse_identifier()?;
3624
3625 self.expect_token(&Token::Colon)?;
3626
3627 let expr = self.parse_expr()?;
3628
3629 Ok(DictionaryField {
3630 key,
3631 value: Box::new(expr),
3632 })
3633 }
3634
3635 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3645 self.expect_token(&Token::LBrace)?;
3646 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3647 self.expect_token(&Token::RBrace)?;
3648 Ok(Expr::Map(Map { entries: fields }))
3649 }
3650
3651 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3661 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3663
3664 self.expect_token(&Token::Colon)?;
3665
3666 let value = self.parse_expr()?;
3667
3668 Ok(MapEntry {
3669 key: Box::new(key),
3670 value: Box::new(value),
3671 })
3672 }
3673
3674 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3684 self.expect_keyword_is(Keyword::MAP)?;
3685 self.expect_token(&Token::LParen)?;
3686 let key_data_type = self.parse_data_type()?;
3687 self.expect_token(&Token::Comma)?;
3688 let value_data_type = self.parse_data_type()?;
3689 self.expect_token(&Token::RParen)?;
3690
3691 Ok((key_data_type, value_data_type))
3692 }
3693
3694 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3704 self.expect_keyword_is(Keyword::TUPLE)?;
3705 self.expect_token(&Token::LParen)?;
3706 let mut field_defs = vec![];
3707 loop {
3708 let (def, _) = self.parse_struct_field_def()?;
3709 field_defs.push(def);
3710 if !self.consume_token(&Token::Comma) {
3711 break;
3712 }
3713 }
3714 self.expect_token(&Token::RParen)?;
3715
3716 Ok(field_defs)
3717 }
3718
3719 fn expect_closing_angle_bracket(
3724 &mut self,
3725 trailing_bracket: MatchedTrailingBracket,
3726 ) -> Result<MatchedTrailingBracket, ParserError> {
3727 let trailing_bracket = if !trailing_bracket.0 {
3728 match &self.peek_token_ref().token {
3729 Token::Gt => {
3730 self.next_token();
3731 false.into()
3732 }
3733 Token::ShiftRight => {
3734 self.next_token();
3735 true.into()
3736 }
3737 _ => return self.expected_ref(">", self.peek_token_ref()),
3738 }
3739 } else {
3740 false.into()
3741 };
3742
3743 Ok(trailing_bracket)
3744 }
3745
3746 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3748 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3750 return infix;
3751 }
3752
3753 let dialect = self.dialect;
3754
3755 self.advance_token();
3756 let tok = self.get_current_token();
3757 debug!("infix: {tok:?}");
3758 let tok_index = self.get_current_index();
3759 let span = tok.span;
3760 let regular_binary_operator = match &tok.token {
3761 Token::Spaceship => Some(BinaryOperator::Spaceship),
3762 Token::DoubleEq => Some(BinaryOperator::Eq),
3763 Token::Assignment => Some(BinaryOperator::Assignment),
3764 Token::Eq => Some(BinaryOperator::Eq),
3765 Token::Neq => Some(BinaryOperator::NotEq),
3766 Token::Gt => Some(BinaryOperator::Gt),
3767 Token::GtEq => Some(BinaryOperator::GtEq),
3768 Token::Lt => Some(BinaryOperator::Lt),
3769 Token::LtEq => Some(BinaryOperator::LtEq),
3770 Token::Plus => Some(BinaryOperator::Plus),
3771 Token::Minus => Some(BinaryOperator::Minus),
3772 Token::Mul => Some(BinaryOperator::Multiply),
3773 Token::Mod => Some(BinaryOperator::Modulo),
3774 Token::StringConcat => Some(BinaryOperator::StringConcat),
3775 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3776 Token::Caret => {
3777 if dialect_is!(dialect is PostgreSqlDialect) {
3780 Some(BinaryOperator::PGExp)
3781 } else {
3782 Some(BinaryOperator::BitwiseXor)
3783 }
3784 }
3785 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3786 Token::Div => Some(BinaryOperator::Divide),
3787 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3788 Some(BinaryOperator::DuckIntegerDivide)
3789 }
3790 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3791 Some(BinaryOperator::PGBitwiseShiftLeft)
3792 }
3793 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3794 Some(BinaryOperator::PGBitwiseShiftRight)
3795 }
3796 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3797 Some(BinaryOperator::PGBitwiseXor)
3798 }
3799 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3800 Some(BinaryOperator::PGOverlap)
3801 }
3802 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3803 Some(BinaryOperator::PGOverlap)
3804 }
3805 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3806 Some(BinaryOperator::And)
3807 }
3808 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3809 Some(BinaryOperator::PGStartsWith)
3810 }
3811 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3812 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3813 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3814 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3815 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3816 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3817 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3818 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3819 Token::Arrow => Some(BinaryOperator::Arrow),
3820 Token::LongArrow => Some(BinaryOperator::LongArrow),
3821 Token::HashArrow => Some(BinaryOperator::HashArrow),
3822 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3823 Token::AtArrow => Some(BinaryOperator::AtArrow),
3824 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3825 Token::HashMinus => Some(BinaryOperator::HashMinus),
3826 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3827 Token::AtAt => Some(BinaryOperator::AtAt),
3828 Token::Question => Some(BinaryOperator::Question),
3829 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3830 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3831 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3832 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3833 Some(BinaryOperator::DoubleHash)
3834 }
3835
3836 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3837 Some(BinaryOperator::AndLt)
3838 }
3839 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3840 Some(BinaryOperator::AndGt)
3841 }
3842 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3843 Some(BinaryOperator::QuestionDash)
3844 }
3845 Token::AmpersandLeftAngleBracketVerticalBar
3846 if self.dialect.supports_geometric_types() =>
3847 {
3848 Some(BinaryOperator::AndLtPipe)
3849 }
3850 Token::VerticalBarAmpersandRightAngleBracket
3851 if self.dialect.supports_geometric_types() =>
3852 {
3853 Some(BinaryOperator::PipeAndGt)
3854 }
3855 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3856 Some(BinaryOperator::LtDashGt)
3857 }
3858 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3859 Some(BinaryOperator::LtCaret)
3860 }
3861 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3862 Some(BinaryOperator::GtCaret)
3863 }
3864 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3865 Some(BinaryOperator::QuestionHash)
3866 }
3867 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3868 Some(BinaryOperator::QuestionDoublePipe)
3869 }
3870 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3871 Some(BinaryOperator::QuestionDashPipe)
3872 }
3873 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3874 Some(BinaryOperator::TildeEq)
3875 }
3876 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3877 Some(BinaryOperator::LtLtPipe)
3878 }
3879 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3880 Some(BinaryOperator::PipeGtGt)
3881 }
3882 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3883
3884 Token::Word(w) => match w.keyword {
3885 Keyword::AND => Some(BinaryOperator::And),
3886 Keyword::OR => Some(BinaryOperator::Or),
3887 Keyword::XOR => Some(BinaryOperator::Xor),
3888 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3889 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3890 self.expect_token(&Token::LParen)?;
3891 let mut idents = vec![];
3896 loop {
3897 self.advance_token();
3898 idents.push(self.get_current_token().to_string());
3899 if !self.consume_token(&Token::Period) {
3900 break;
3901 }
3902 }
3903 self.expect_token(&Token::RParen)?;
3904 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3905 }
3906 _ => None,
3907 },
3908 _ => None,
3909 };
3910
3911 let tok = self.token_at(tok_index);
3912 if let Some(op) = regular_binary_operator {
3913 if let Some(keyword) =
3914 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3915 {
3916 self.expect_token(&Token::LParen)?;
3917 let right = if self.peek_sub_query() {
3918 self.prev_token(); self.parse_subexpr(precedence)?
3922 } else {
3923 let right = self.parse_subexpr(precedence)?;
3925 self.expect_token(&Token::RParen)?;
3926 right
3927 };
3928
3929 if !matches!(
3930 op,
3931 BinaryOperator::Gt
3932 | BinaryOperator::Lt
3933 | BinaryOperator::GtEq
3934 | BinaryOperator::LtEq
3935 | BinaryOperator::Eq
3936 | BinaryOperator::NotEq
3937 | BinaryOperator::PGRegexMatch
3938 | BinaryOperator::PGRegexIMatch
3939 | BinaryOperator::PGRegexNotMatch
3940 | BinaryOperator::PGRegexNotIMatch
3941 | BinaryOperator::PGLikeMatch
3942 | BinaryOperator::PGILikeMatch
3943 | BinaryOperator::PGNotLikeMatch
3944 | BinaryOperator::PGNotILikeMatch
3945 ) {
3946 return parser_err!(
3947 format!(
3948 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3949 ),
3950 span.start
3951 );
3952 };
3953
3954 Ok(match keyword {
3955 Keyword::ALL => Expr::AllOp {
3956 left: Box::new(expr),
3957 compare_op: op,
3958 right: Box::new(right),
3959 },
3960 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3961 left: Box::new(expr),
3962 compare_op: op,
3963 right: Box::new(right),
3964 is_some: keyword == Keyword::SOME,
3965 },
3966 unexpected_keyword => return Err(ParserError::ParserError(
3967 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3968 )),
3969 })
3970 } else {
3971 Ok(Expr::BinaryOp {
3972 left: Box::new(expr),
3973 op,
3974 right: Box::new(self.parse_subexpr(precedence)?),
3975 })
3976 }
3977 } else if let Token::Word(w) = &tok.token {
3978 match w.keyword {
3979 Keyword::IS => {
3980 if self.parse_keyword(Keyword::NULL) {
3981 Ok(Expr::IsNull(Box::new(expr)))
3982 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3983 Ok(Expr::IsNotNull(Box::new(expr)))
3984 } else if self.parse_keywords(&[Keyword::TRUE]) {
3985 Ok(Expr::IsTrue(Box::new(expr)))
3986 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3987 Ok(Expr::IsNotTrue(Box::new(expr)))
3988 } else if self.parse_keywords(&[Keyword::FALSE]) {
3989 Ok(Expr::IsFalse(Box::new(expr)))
3990 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3991 Ok(Expr::IsNotFalse(Box::new(expr)))
3992 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3993 Ok(Expr::IsUnknown(Box::new(expr)))
3994 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3995 Ok(Expr::IsNotUnknown(Box::new(expr)))
3996 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3997 let expr2 = self.parse_expr()?;
3998 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3999 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
4000 {
4001 let expr2 = self.parse_expr()?;
4002 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
4003 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
4004 Ok(is_normalized)
4005 } else {
4006 self.expected_ref(
4007 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
4008 self.peek_token_ref(),
4009 )
4010 }
4011 }
4012 Keyword::AT => {
4013 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4014 Ok(Expr::AtTimeZone {
4015 timestamp: Box::new(expr),
4016 time_zone: Box::new(self.parse_subexpr(precedence)?),
4017 })
4018 }
4019 Keyword::NOT
4020 | Keyword::IN
4021 | Keyword::BETWEEN
4022 | Keyword::LIKE
4023 | Keyword::ILIKE
4024 | Keyword::SIMILAR
4025 | Keyword::REGEXP
4026 | Keyword::RLIKE => {
4027 self.prev_token();
4028 let negated = self.parse_keyword(Keyword::NOT);
4029 let regexp = self.parse_keyword(Keyword::REGEXP);
4030 let rlike = self.parse_keyword(Keyword::RLIKE);
4031 let null = if !self.in_column_definition_state() {
4032 self.parse_keyword(Keyword::NULL)
4033 } else {
4034 false
4035 };
4036 if regexp || rlike {
4037 Ok(Expr::RLike {
4038 negated,
4039 expr: Box::new(expr),
4040 pattern: Box::new(
4041 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4042 ),
4043 regexp,
4044 })
4045 } else if negated && null {
4046 Ok(Expr::IsNotNull(Box::new(expr)))
4047 } else if self.parse_keyword(Keyword::IN) {
4048 self.parse_in(expr, negated)
4049 } else if self.parse_keyword(Keyword::BETWEEN) {
4050 self.parse_between(expr, negated)
4051 } else if self.parse_keyword(Keyword::LIKE) {
4052 Ok(Expr::Like {
4053 negated,
4054 any: self.parse_keyword(Keyword::ANY),
4055 expr: Box::new(expr),
4056 pattern: Box::new(
4057 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4058 ),
4059 escape_char: self.parse_escape_char()?,
4060 })
4061 } else if self.parse_keyword(Keyword::ILIKE) {
4062 Ok(Expr::ILike {
4063 negated,
4064 any: self.parse_keyword(Keyword::ANY),
4065 expr: Box::new(expr),
4066 pattern: Box::new(
4067 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4068 ),
4069 escape_char: self.parse_escape_char()?,
4070 })
4071 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4072 Ok(Expr::SimilarTo {
4073 negated,
4074 expr: Box::new(expr),
4075 pattern: Box::new(
4076 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4077 ),
4078 escape_char: self.parse_escape_char()?,
4079 })
4080 } else {
4081 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4082 }
4083 }
4084 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4085 Ok(Expr::IsNotNull(Box::new(expr)))
4086 }
4087 Keyword::MEMBER => {
4088 if self.parse_keyword(Keyword::OF) {
4089 self.expect_token(&Token::LParen)?;
4090 let array = self.parse_expr()?;
4091 self.expect_token(&Token::RParen)?;
4092 Ok(Expr::MemberOf(MemberOf {
4093 value: Box::new(expr),
4094 array: Box::new(array),
4095 }))
4096 } else {
4097 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4098 }
4099 }
4100 _ => parser_err!(
4102 format!("No infix parser for token {:?}", tok.token),
4103 tok.span.start
4104 ),
4105 }
4106 } else if Token::DoubleColon == *tok {
4107 Ok(Expr::Cast {
4108 kind: CastKind::DoubleColon,
4109 expr: Box::new(expr),
4110 data_type: self.parse_data_type()?,
4111 array: false,
4112 format: None,
4113 })
4114 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4115 Ok(Expr::UnaryOp {
4116 op: UnaryOperator::PGPostfixFactorial,
4117 expr: Box::new(expr),
4118 })
4119 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4120 || (Token::Colon == *tok)
4121 {
4122 self.prev_token();
4123 self.parse_json_access(expr)
4124 } else {
4125 parser_err!(
4127 format!("No infix parser for token {:?}", tok.token),
4128 tok.span.start
4129 )
4130 }
4131 }
4132
4133 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4135 if self.parse_keyword(Keyword::ESCAPE) {
4136 Ok(Some(self.parse_value()?))
4137 } else {
4138 Ok(None)
4139 }
4140 }
4141
4142 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4152 let lower_bound = if self.consume_token(&Token::Colon) {
4154 None
4155 } else {
4156 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4158 };
4159
4160 if self.consume_token(&Token::RBracket) {
4162 if let Some(lower_bound) = lower_bound {
4163 return Ok(Subscript::Index { index: lower_bound });
4164 };
4165 return Ok(Subscript::Slice {
4166 lower_bound,
4167 upper_bound: None,
4168 stride: None,
4169 });
4170 }
4171
4172 if lower_bound.is_some() {
4174 self.expect_token(&Token::Colon)?;
4175 }
4176
4177 let upper_bound = if self.consume_token(&Token::RBracket) {
4179 return Ok(Subscript::Slice {
4180 lower_bound,
4181 upper_bound: None,
4182 stride: None,
4183 });
4184 } else {
4185 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4187 };
4188
4189 if self.consume_token(&Token::RBracket) {
4191 return Ok(Subscript::Slice {
4192 lower_bound,
4193 upper_bound,
4194 stride: None,
4195 });
4196 }
4197
4198 self.expect_token(&Token::Colon)?;
4200 let stride = if self.consume_token(&Token::RBracket) {
4201 None
4202 } else {
4203 Some(self.parse_expr()?)
4204 };
4205
4206 if stride.is_some() {
4207 self.expect_token(&Token::RBracket)?;
4208 }
4209
4210 Ok(Subscript::Slice {
4211 lower_bound,
4212 upper_bound,
4213 stride,
4214 })
4215 }
4216
4217 pub fn parse_multi_dim_subscript(
4219 &mut self,
4220 chain: &mut Vec<AccessExpr>,
4221 ) -> Result<(), ParserError> {
4222 while self.consume_token(&Token::LBracket) {
4223 self.parse_subscript(chain)?;
4224 }
4225 Ok(())
4226 }
4227
4228 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4232 let subscript = self.parse_subscript_inner()?;
4233 chain.push(AccessExpr::Subscript(subscript));
4234 Ok(())
4235 }
4236
4237 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4238 let token = self.next_token();
4239 match token.token {
4240 Token::Word(Word {
4241 value,
4242 quote_style: quote_style @ (Some('"') | Some('`') | None),
4245 keyword: _,
4248 }) => Ok(JsonPathElem::Dot {
4249 key: value,
4250 quoted: quote_style.is_some(),
4251 }),
4252
4253 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4257
4258 _ => self.expected("variant object key name", token),
4259 }
4260 }
4261
4262 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4263 let path = self.parse_json_path()?;
4264 Ok(Expr::JsonAccess {
4265 value: Box::new(expr),
4266 path,
4267 })
4268 }
4269
4270 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4271 let mut path = Vec::new();
4272 loop {
4273 match self.next_token().token {
4274 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4275 self.next_token();
4276 let key = self.parse_wildcard_expr()?;
4277 self.expect_token(&Token::RBracket)?;
4278 path.push(JsonPathElem::ColonBracket { key });
4279 }
4280 Token::Colon if path.is_empty() => {
4281 path.push(self.parse_json_path_object_key()?);
4282 }
4283 Token::Period if !path.is_empty() => {
4284 path.push(self.parse_json_path_object_key()?);
4285 }
4286 Token::LBracket => {
4287 let key = self.parse_wildcard_expr()?;
4288 self.expect_token(&Token::RBracket)?;
4289
4290 path.push(JsonPathElem::Bracket { key });
4291 }
4292 _ => {
4293 self.prev_token();
4294 break;
4295 }
4296 };
4297 }
4298
4299 debug_assert!(!path.is_empty());
4300 Ok(JsonPath { path })
4301 }
4302
4303 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4305 if self.parse_keyword(Keyword::UNNEST) {
4308 self.expect_token(&Token::LParen)?;
4309 let array_expr = self.parse_expr()?;
4310 self.expect_token(&Token::RParen)?;
4311 return Ok(Expr::InUnnest {
4312 expr: Box::new(expr),
4313 array_expr: Box::new(array_expr),
4314 negated,
4315 });
4316 }
4317 self.expect_token(&Token::LParen)?;
4318 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4319 Some(subquery) => Expr::InSubquery {
4320 expr: Box::new(expr),
4321 subquery,
4322 negated,
4323 },
4324 None => Expr::InList {
4325 expr: Box::new(expr),
4326 list: if self.dialect.supports_in_empty_list() {
4327 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4328 } else {
4329 self.parse_comma_separated(Parser::parse_expr)?
4330 },
4331 negated,
4332 },
4333 };
4334 self.expect_token(&Token::RParen)?;
4335 Ok(in_op)
4336 }
4337
4338 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4340 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4343 self.expect_keyword_is(Keyword::AND)?;
4344 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4345 Ok(Expr::Between {
4346 expr: Box::new(expr),
4347 negated,
4348 low: Box::new(low),
4349 high: Box::new(high),
4350 })
4351 }
4352
4353 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4355 Ok(Expr::Cast {
4356 kind: CastKind::DoubleColon,
4357 expr: Box::new(expr),
4358 data_type: self.parse_data_type()?,
4359 array: false,
4360 format: None,
4361 })
4362 }
4363
4364 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4366 self.dialect.get_next_precedence_default(self)
4367 }
4368
4369 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4372 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4373 }
4374
4375 pub fn peek_token(&self) -> TokenWithSpan {
4380 self.peek_nth_token(0)
4381 }
4382
4383 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4386 self.peek_nth_token_ref(0)
4387 }
4388
4389 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4412 self.peek_tokens_with_location()
4413 .map(|with_loc| with_loc.token)
4414 }
4415
4416 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4421 let mut index = self.index;
4422 core::array::from_fn(|_| loop {
4423 let token = self.tokens.get(index);
4424 index += 1;
4425 if let Some(TokenWithSpan {
4426 token: Token::Whitespace(_),
4427 span: _,
4428 }) = token
4429 {
4430 continue;
4431 }
4432 break token.cloned().unwrap_or(TokenWithSpan {
4433 token: Token::EOF,
4434 span: Span::empty(),
4435 });
4436 })
4437 }
4438
4439 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4444 let mut index = self.index;
4445 core::array::from_fn(|_| loop {
4446 let token = self.tokens.get(index);
4447 index += 1;
4448 if let Some(TokenWithSpan {
4449 token: Token::Whitespace(_),
4450 span: _,
4451 }) = token
4452 {
4453 continue;
4454 }
4455 break token.unwrap_or(&EOF_TOKEN);
4456 })
4457 }
4458
4459 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4461 self.peek_nth_token_ref(n).clone()
4462 }
4463
4464 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4466 let mut index = self.index;
4467 loop {
4468 index += 1;
4469 match self.tokens.get(index - 1) {
4470 Some(TokenWithSpan {
4471 token: Token::Whitespace(_),
4472 span: _,
4473 }) => continue,
4474 non_whitespace => {
4475 if n == 0 {
4476 return non_whitespace.unwrap_or(&EOF_TOKEN);
4477 }
4478 n -= 1;
4479 }
4480 }
4481 }
4482 }
4483
4484 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4487 self.peek_nth_token_no_skip(0)
4488 }
4489
4490 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4492 self.tokens
4493 .get(self.index + n)
4494 .cloned()
4495 .unwrap_or(TokenWithSpan {
4496 token: Token::EOF,
4497 span: Span::empty(),
4498 })
4499 }
4500
4501 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4503 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4504 }
4505
4506 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4510 let index = self.index;
4511 let matched = self.parse_keywords(expected);
4512 self.index = index;
4513 matched
4514 }
4515
4516 pub fn next_token(&mut self) -> TokenWithSpan {
4521 self.advance_token();
4522 self.get_current_token().clone()
4523 }
4524
4525 pub fn get_current_index(&self) -> usize {
4530 self.index.saturating_sub(1)
4531 }
4532
4533 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4535 self.index += 1;
4536 self.tokens.get(self.index - 1)
4537 }
4538
4539 pub fn advance_token(&mut self) {
4543 loop {
4544 self.index += 1;
4545 match self.tokens.get(self.index - 1) {
4546 Some(TokenWithSpan {
4547 token: Token::Whitespace(_),
4548 span: _,
4549 }) => continue,
4550 _ => break,
4551 }
4552 }
4553 }
4554
4555 pub fn get_current_token(&self) -> &TokenWithSpan {
4559 self.token_at(self.index.saturating_sub(1))
4560 }
4561
4562 pub fn get_previous_token(&self) -> &TokenWithSpan {
4566 self.token_at(self.index.saturating_sub(2))
4567 }
4568
4569 pub fn get_next_token(&self) -> &TokenWithSpan {
4573 self.token_at(self.index)
4574 }
4575
4576 pub fn prev_token(&mut self) {
4583 loop {
4584 assert!(self.index > 0);
4585 self.index -= 1;
4586 if let Some(TokenWithSpan {
4587 token: Token::Whitespace(_),
4588 span: _,
4589 }) = self.tokens.get(self.index)
4590 {
4591 continue;
4592 }
4593 return;
4594 }
4595 }
4596
4597 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4599 parser_err!(
4600 format!("Expected: {expected}, found: {found}"),
4601 found.span.start
4602 )
4603 }
4604
4605 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4607 parser_err!(
4608 format!("Expected: {expected}, found: {found}"),
4609 found.span.start
4610 )
4611 }
4612
4613 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4615 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4616 parser_err!(
4617 format!("Expected: {expected}, found: {found}"),
4618 found.span.start
4619 )
4620 }
4621
4622 #[must_use]
4625 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4626 if self.peek_keyword(expected) {
4627 self.advance_token();
4628 true
4629 } else {
4630 false
4631 }
4632 }
4633
4634 #[must_use]
4635 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4639 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4640 }
4641
4642 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4650 self.keyword_with_tokens(expected, tokens, true)
4651 }
4652
4653 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4658 self.keyword_with_tokens(expected, tokens, false)
4659 }
4660
4661 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4662 match &self.peek_token_ref().token {
4663 Token::Word(w) if expected == w.keyword => {
4664 for (idx, token) in tokens.iter().enumerate() {
4665 if self.peek_nth_token_ref(idx + 1).token != *token {
4666 return false;
4667 }
4668 }
4669
4670 if consume {
4671 for _ in 0..(tokens.len() + 1) {
4672 self.advance_token();
4673 }
4674 }
4675
4676 true
4677 }
4678 _ => false,
4679 }
4680 }
4681
4682 #[must_use]
4686 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4687 self.parse_keywords_indexed(keywords).is_some()
4688 }
4689
4690 #[must_use]
4693 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4694 let start_index = self.index;
4695 let mut first_keyword_index = None;
4696 for &keyword in keywords {
4697 if !self.parse_keyword(keyword) {
4698 self.index = start_index;
4699 return None;
4700 }
4701 if first_keyword_index.is_none() {
4702 first_keyword_index = Some(self.index.saturating_sub(1));
4703 }
4704 }
4705 first_keyword_index
4706 }
4707
4708 #[must_use]
4711 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4712 for keyword in keywords {
4713 if self.peek_keyword(*keyword) {
4714 return Some(*keyword);
4715 }
4716 }
4717 None
4718 }
4719
4720 #[must_use]
4724 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4725 match &self.peek_token_ref().token {
4726 Token::Word(w) => {
4727 keywords
4728 .iter()
4729 .find(|keyword| **keyword == w.keyword)
4730 .map(|keyword| {
4731 self.advance_token();
4732 *keyword
4733 })
4734 }
4735 _ => None,
4736 }
4737 }
4738
4739 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4742 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4743 Ok(keyword)
4744 } else {
4745 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4746 self.expected_ref(
4747 &format!("one of {}", keywords.join(" or ")),
4748 self.peek_token_ref(),
4749 )
4750 }
4751 }
4752
4753 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4758 if self.parse_keyword(expected) {
4759 Ok(self.get_current_token().clone())
4760 } else {
4761 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4762 }
4763 }
4764
4765 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4771 if self.parse_keyword(expected) {
4772 Ok(())
4773 } else {
4774 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4775 }
4776 }
4777
4778 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4781 for &kw in expected {
4782 self.expect_keyword_is(kw)?;
4783 }
4784 Ok(())
4785 }
4786
4787 #[must_use]
4791 pub fn consume_token(&mut self, expected: &Token) -> bool {
4792 if self.peek_token_ref() == expected {
4793 self.advance_token();
4794 true
4795 } else {
4796 false
4797 }
4798 }
4799
4800 #[must_use]
4804 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4805 let index = self.index;
4806 for token in tokens {
4807 if !self.consume_token(token) {
4808 self.index = index;
4809 return false;
4810 }
4811 }
4812 true
4813 }
4814
4815 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4817 if self.peek_token_ref() == expected {
4818 Ok(self.next_token())
4819 } else {
4820 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4821 }
4822 }
4823
4824 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4825 where
4826 <T as FromStr>::Err: Display,
4827 {
4828 s.parse::<T>().map_err(|e| {
4829 ParserError::ParserError(format!(
4830 "Could not parse '{s}' as {}: {e}{loc}",
4831 core::any::type_name::<T>()
4832 ))
4833 })
4834 }
4835
4836 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4838 let trailing_commas =
4844 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4845
4846 self.parse_comma_separated_with_trailing_commas(
4847 |p| p.parse_select_item(),
4848 trailing_commas,
4849 Self::is_reserved_for_column_alias,
4850 )
4851 }
4852
4853 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4855 let mut values = vec![];
4856 loop {
4857 values.push(self.parse_grant_permission()?);
4858 if !self.consume_token(&Token::Comma) {
4859 break;
4860 } else if self.options.trailing_commas {
4861 match &self.peek_token_ref().token {
4862 Token::Word(kw) if kw.keyword == Keyword::ON => {
4863 break;
4864 }
4865 Token::RParen
4866 | Token::SemiColon
4867 | Token::EOF
4868 | Token::RBracket
4869 | Token::RBrace => break,
4870 _ => continue,
4871 }
4872 }
4873 }
4874 Ok(values)
4875 }
4876
4877 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4879 let trailing_commas = self.dialect.supports_from_trailing_commas();
4880
4881 self.parse_comma_separated_with_trailing_commas(
4882 Parser::parse_table_and_joins,
4883 trailing_commas,
4884 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4885 )
4886 }
4887
4888 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4895 &mut self,
4896 trailing_commas: bool,
4897 is_reserved_keyword: &R,
4898 ) -> bool
4899 where
4900 R: Fn(&Keyword, &mut Parser) -> bool,
4901 {
4902 if !self.consume_token(&Token::Comma) {
4903 true
4904 } else if trailing_commas {
4905 let token = self.next_token().token;
4906 let is_end = match token {
4907 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4908 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4909 true
4910 }
4911 _ => false,
4912 };
4913 self.prev_token();
4914
4915 is_end
4916 } else {
4917 false
4918 }
4919 }
4920
4921 fn is_parse_comma_separated_end(&mut self) -> bool {
4924 self.is_parse_comma_separated_end_with_trailing_commas(
4925 self.options.trailing_commas,
4926 &Self::is_reserved_for_column_alias,
4927 )
4928 }
4929
4930 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4932 where
4933 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4934 {
4935 self.parse_comma_separated_with_trailing_commas(
4936 f,
4937 self.options.trailing_commas,
4938 Self::is_reserved_for_column_alias,
4939 )
4940 }
4941
4942 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4947 &mut self,
4948 mut f: F,
4949 trailing_commas: bool,
4950 is_reserved_keyword: R,
4951 ) -> Result<Vec<T>, ParserError>
4952 where
4953 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4954 R: Fn(&Keyword, &mut Parser) -> bool,
4955 {
4956 let mut values = vec![];
4957 loop {
4958 values.push(f(self)?);
4959 if self.is_parse_comma_separated_end_with_trailing_commas(
4960 trailing_commas,
4961 &is_reserved_keyword,
4962 ) {
4963 break;
4964 }
4965 }
4966 Ok(values)
4967 }
4968
4969 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4971 where
4972 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4973 {
4974 let mut values = vec![];
4975 loop {
4976 values.push(f(self)?);
4977 if !self.consume_token(&Token::Period) {
4978 break;
4979 }
4980 }
4981 Ok(values)
4982 }
4983
4984 pub fn parse_keyword_separated<T, F>(
4986 &mut self,
4987 keyword: Keyword,
4988 mut f: F,
4989 ) -> Result<Vec<T>, ParserError>
4990 where
4991 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4992 {
4993 let mut values = vec![];
4994 loop {
4995 values.push(f(self)?);
4996 if !self.parse_keyword(keyword) {
4997 break;
4998 }
4999 }
5000 Ok(values)
5001 }
5002
5003 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5005 where
5006 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5007 {
5008 self.expect_token(&Token::LParen)?;
5009 let res = f(self)?;
5010 self.expect_token(&Token::RParen)?;
5011 Ok(res)
5012 }
5013
5014 pub fn parse_comma_separated0<T, F>(
5017 &mut self,
5018 f: F,
5019 end_token: Token,
5020 ) -> Result<Vec<T>, ParserError>
5021 where
5022 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5023 {
5024 if self.peek_token_ref().token == end_token {
5025 return Ok(vec![]);
5026 }
5027
5028 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5029 let _ = self.consume_token(&Token::Comma);
5030 return Ok(vec![]);
5031 }
5032
5033 self.parse_comma_separated(f)
5034 }
5035
5036 pub(crate) fn parse_statement_list(
5040 &mut self,
5041 terminal_keywords: &[Keyword],
5042 ) -> Result<Vec<Statement>, ParserError> {
5043 let mut values = vec![];
5044 loop {
5045 match &self.peek_nth_token_ref(0).token {
5046 Token::EOF => break,
5047 Token::Word(w)
5048 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
5049 {
5050 break;
5051 }
5052 _ => {}
5053 }
5054
5055 values.push(self.parse_statement()?);
5056 self.expect_token(&Token::SemiColon)?;
5057 }
5058 Ok(values)
5059 }
5060
5061 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5065 !parser.dialect.is_column_alias(kw, parser)
5066 }
5067
5068 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5072 where
5073 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5074 {
5075 match self.try_parse(f) {
5076 Ok(t) => Ok(Some(t)),
5077 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5078 _ => Ok(None),
5079 }
5080 }
5081
5082 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5084 where
5085 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5086 {
5087 let index = self.index;
5088 match f(self) {
5089 Ok(t) => Ok(t),
5090 Err(e) => {
5091 self.index = index;
5093 Err(e)
5094 }
5095 }
5096 }
5097
5098 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5101 let loc = self.peek_token_ref().span.start;
5102 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5103 Some(Keyword::ALL) => {
5104 if self.peek_keyword(Keyword::DISTINCT) {
5105 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5106 }
5107 Some(Distinct::All)
5108 }
5109 Some(Keyword::DISTINCT) => {
5110 if self.peek_keyword(Keyword::ALL) {
5111 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5112 }
5113 Some(Distinct::Distinct)
5114 }
5115 None => return Ok(None),
5116 _ => return parser_err!("ALL or DISTINCT", loc),
5117 };
5118
5119 let Some(Distinct::Distinct) = distinct else {
5120 return Ok(distinct);
5121 };
5122 if !self.parse_keyword(Keyword::ON) {
5123 return Ok(Some(Distinct::Distinct));
5124 }
5125
5126 self.expect_token(&Token::LParen)?;
5127 let col_names = if self.consume_token(&Token::RParen) {
5128 self.prev_token();
5129 Vec::new()
5130 } else {
5131 self.parse_comma_separated(Parser::parse_expr)?
5132 };
5133 self.expect_token(&Token::RParen)?;
5134 Ok(Some(Distinct::On(col_names)))
5135 }
5136
5137 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5139 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5140 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5141 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5142 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5143 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5144 let global: Option<bool> = if global {
5145 Some(true)
5146 } else if local {
5147 Some(false)
5148 } else {
5149 None
5150 };
5151 let temporary = self
5152 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5153 .is_some();
5154 let persistent = dialect_of!(self is DuckDbDialect)
5155 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5156 let create_view_params = self.parse_create_view_params()?;
5157 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5158 self.parse_create_snapshot_table().map(Into::into)
5159 } else if self.parse_keyword(Keyword::TABLE) {
5160 self.parse_create_table(or_replace, temporary, global, transient)
5161 .map(Into::into)
5162 } else if self.peek_keyword(Keyword::MATERIALIZED)
5163 || self.peek_keyword(Keyword::VIEW)
5164 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5165 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5166 {
5167 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5168 .map(Into::into)
5169 } else if self.parse_keyword(Keyword::POLICY) {
5170 self.parse_create_policy().map(Into::into)
5171 } else if self.parse_keyword(Keyword::EXTERNAL) {
5172 self.parse_create_external_table(or_replace).map(Into::into)
5173 } else if self.parse_keyword(Keyword::FUNCTION) {
5174 self.parse_create_function(or_alter, or_replace, temporary)
5175 } else if self.parse_keyword(Keyword::DOMAIN) {
5176 self.parse_create_domain().map(Into::into)
5177 } else if self.parse_keyword(Keyword::TRIGGER) {
5178 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5179 .map(Into::into)
5180 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5181 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5182 .map(Into::into)
5183 } else if self.parse_keyword(Keyword::MACRO) {
5184 self.parse_create_macro(or_replace, temporary)
5185 } else if self.parse_keyword(Keyword::SECRET) {
5186 self.parse_create_secret(or_replace, temporary, persistent)
5187 } else if self.parse_keyword(Keyword::USER) {
5188 if self.parse_keyword(Keyword::MAPPING) {
5189 self.parse_create_user_mapping().map(Into::into)
5190 } else {
5191 self.parse_create_user(or_replace).map(Into::into)
5192 }
5193 } else if self.parse_keyword(Keyword::AGGREGATE) {
5194 self.parse_create_aggregate(or_replace).map(Into::into)
5195 } else if self.peek_keyword(Keyword::TRUSTED)
5196 || self.peek_keyword(Keyword::PROCEDURAL)
5197 || self.peek_keyword(Keyword::LANGUAGE)
5198 {
5199 let trusted = self.parse_keyword(Keyword::TRUSTED);
5200 let procedural = self.parse_keyword(Keyword::PROCEDURAL);
5201 if self.parse_keyword(Keyword::LANGUAGE) {
5202 self.parse_create_language(or_replace, trusted, procedural)
5203 .map(Into::into)
5204 } else {
5205 self.expected_ref(
5206 "LANGUAGE after TRUSTED or PROCEDURAL",
5207 self.peek_token_ref(),
5208 )
5209 }
5210 } else if self.parse_keyword(Keyword::TRANSFORM) {
5211 self.parse_create_transform(or_replace).map(Into::into)
5212 } else if or_replace {
5213 self.expected_ref(
5214 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5215 self.peek_token_ref(),
5216 )
5217 } else if self.parse_keyword(Keyword::CAST) {
5218 self.parse_create_cast().map(Into::into)
5219 } else if self.parse_keyword(Keyword::CONVERSION) {
5220 self.parse_create_conversion(false).map(Into::into)
5221 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CONVERSION]) {
5222 self.parse_create_conversion(true).map(Into::into)
5223 } else if self.parse_keyword(Keyword::RULE) {
5224 self.parse_create_rule().map(Into::into)
5225 } else if self.parse_keyword(Keyword::EXTENSION) {
5226 self.parse_create_extension().map(Into::into)
5227 } else if self.parse_keyword(Keyword::INDEX) {
5228 self.parse_create_index(false).map(Into::into)
5229 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5230 self.parse_create_index(true).map(Into::into)
5231 } else if self.parse_keyword(Keyword::VIRTUAL) {
5232 self.parse_create_virtual_table()
5233 } else if self.parse_keyword(Keyword::SCHEMA) {
5234 self.parse_create_schema()
5235 } else if self.parse_keyword(Keyword::DATABASE) {
5236 self.parse_create_database()
5237 } else if self.parse_keyword(Keyword::ROLE) {
5238 self.parse_create_role().map(Into::into)
5239 } else if self.parse_keyword(Keyword::SEQUENCE) {
5240 self.parse_create_sequence(temporary)
5241 } else if self.parse_keyword(Keyword::COLLATION) {
5242 self.parse_create_collation().map(Into::into)
5243 } else if self.parse_keyword(Keyword::TYPE) {
5244 self.parse_create_type()
5245 } else if self.parse_keyword(Keyword::PROCEDURE) {
5246 self.parse_create_procedure(or_alter)
5247 } else if self.parse_keyword(Keyword::CONNECTOR) {
5248 self.parse_create_connector().map(Into::into)
5249 } else if self.parse_keyword(Keyword::OPERATOR) {
5250 if self.parse_keyword(Keyword::FAMILY) {
5252 self.parse_create_operator_family().map(Into::into)
5253 } else if self.parse_keyword(Keyword::CLASS) {
5254 self.parse_create_operator_class().map(Into::into)
5255 } else {
5256 self.parse_create_operator().map(Into::into)
5257 }
5258 } else if self.parse_keyword(Keyword::SERVER) {
5259 self.parse_pg_create_server()
5260 } else if self.parse_keyword(Keyword::FOREIGN) {
5261 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5262 self.parse_create_foreign_data_wrapper().map(Into::into)
5263 } else if self.parse_keyword(Keyword::TABLE) {
5264 self.parse_create_foreign_table().map(Into::into)
5265 } else {
5266 self.expected_ref(
5267 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5268 self.peek_token_ref(),
5269 )
5270 }
5271 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5272 self.parse_create_text_search()
5273 } else if self.parse_keyword(Keyword::PUBLICATION) {
5274 self.parse_create_publication().map(Into::into)
5275 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5276 self.parse_create_subscription().map(Into::into)
5277 } else if self.parse_keyword(Keyword::STATISTICS) {
5278 self.parse_create_statistics().map(Into::into)
5279 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::METHOD]) {
5280 self.parse_create_access_method().map(Into::into)
5281 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::TRIGGER]) {
5282 self.parse_create_event_trigger().map(Into::into)
5283 } else if self.parse_keyword(Keyword::TABLESPACE) {
5284 self.parse_create_tablespace().map(Into::into)
5285 } else {
5286 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5287 }
5288 }
5289
5290 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5291 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5292 let name = self.parse_identifier()?;
5293 let options = self
5294 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5295 .options;
5296 let with_tags = self.parse_keyword(Keyword::WITH);
5297 let tags = if self.parse_keyword(Keyword::TAG) {
5298 self.parse_key_value_options(true, &[])?.options
5299 } else {
5300 vec![]
5301 };
5302 Ok(CreateUser {
5303 or_replace,
5304 if_not_exists,
5305 name,
5306 options: KeyValueOptions {
5307 options,
5308 delimiter: KeyValueOptionsDelimiter::Space,
5309 },
5310 with_tags,
5311 tags: KeyValueOptions {
5312 options: tags,
5313 delimiter: KeyValueOptionsDelimiter::Comma,
5314 },
5315 })
5316 }
5317
5318 pub fn parse_create_secret(
5320 &mut self,
5321 or_replace: bool,
5322 temporary: bool,
5323 persistent: bool,
5324 ) -> Result<Statement, ParserError> {
5325 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5326
5327 let mut storage_specifier = None;
5328 let mut name = None;
5329 if self.peek_token_ref().token != Token::LParen {
5330 if self.parse_keyword(Keyword::IN) {
5331 storage_specifier = self.parse_identifier().ok()
5332 } else {
5333 name = self.parse_identifier().ok();
5334 }
5335
5336 if storage_specifier.is_none()
5338 && self.peek_token_ref().token != Token::LParen
5339 && self.parse_keyword(Keyword::IN)
5340 {
5341 storage_specifier = self.parse_identifier().ok();
5342 }
5343 }
5344
5345 self.expect_token(&Token::LParen)?;
5346 self.expect_keyword_is(Keyword::TYPE)?;
5347 let secret_type = self.parse_identifier()?;
5348
5349 let mut options = Vec::new();
5350 if self.consume_token(&Token::Comma) {
5351 options.append(&mut self.parse_comma_separated(|p| {
5352 let key = p.parse_identifier()?;
5353 let value = p.parse_identifier()?;
5354 Ok(SecretOption { key, value })
5355 })?);
5356 }
5357 self.expect_token(&Token::RParen)?;
5358
5359 let temp = match (temporary, persistent) {
5360 (true, false) => Some(true),
5361 (false, true) => Some(false),
5362 (false, false) => None,
5363 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5364 };
5365
5366 Ok(Statement::CreateSecret {
5367 or_replace,
5368 temporary: temp,
5369 if_not_exists,
5370 name,
5371 storage_specifier,
5372 secret_type,
5373 options,
5374 })
5375 }
5376
5377 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5379 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5380 if self.parse_keyword(Keyword::TABLE) {
5381 let table_name = self.parse_object_name(false)?;
5382 if self.peek_token_ref().token != Token::EOF {
5383 if let Token::Word(word) = &self.peek_token_ref().token {
5384 if word.keyword == Keyword::OPTIONS {
5385 options = self.parse_options(Keyword::OPTIONS)?
5386 }
5387 };
5388
5389 if self.peek_token_ref().token != Token::EOF {
5390 let (a, q) = self.parse_as_query()?;
5391 has_as = a;
5392 query = Some(q);
5393 }
5394
5395 Ok(Statement::Cache {
5396 table_flag,
5397 table_name,
5398 has_as,
5399 options,
5400 query,
5401 })
5402 } else {
5403 Ok(Statement::Cache {
5404 table_flag,
5405 table_name,
5406 has_as,
5407 options,
5408 query,
5409 })
5410 }
5411 } else {
5412 table_flag = Some(self.parse_object_name(false)?);
5413 if self.parse_keyword(Keyword::TABLE) {
5414 let table_name = self.parse_object_name(false)?;
5415 if self.peek_token_ref().token != Token::EOF {
5416 if let Token::Word(word) = &self.peek_token_ref().token {
5417 if word.keyword == Keyword::OPTIONS {
5418 options = self.parse_options(Keyword::OPTIONS)?
5419 }
5420 };
5421
5422 if self.peek_token_ref().token != Token::EOF {
5423 let (a, q) = self.parse_as_query()?;
5424 has_as = a;
5425 query = Some(q);
5426 }
5427
5428 Ok(Statement::Cache {
5429 table_flag,
5430 table_name,
5431 has_as,
5432 options,
5433 query,
5434 })
5435 } else {
5436 Ok(Statement::Cache {
5437 table_flag,
5438 table_name,
5439 has_as,
5440 options,
5441 query,
5442 })
5443 }
5444 } else {
5445 if self.peek_token_ref().token == Token::EOF {
5446 self.prev_token();
5447 }
5448 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5449 }
5450 }
5451 }
5452
5453 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5455 match &self.peek_token_ref().token {
5456 Token::Word(word) => match word.keyword {
5457 Keyword::AS => {
5458 self.next_token();
5459 Ok((true, self.parse_query()?))
5460 }
5461 _ => Ok((false, self.parse_query()?)),
5462 },
5463 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5464 }
5465 }
5466
5467 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5469 self.expect_keyword_is(Keyword::TABLE)?;
5470 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5471 let table_name = self.parse_object_name(false)?;
5472 Ok(Statement::UNCache {
5473 table_name,
5474 if_exists,
5475 })
5476 }
5477
5478 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5480 self.expect_keyword_is(Keyword::TABLE)?;
5481 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5482 let table_name = self.parse_object_name(false)?;
5483 self.expect_keyword_is(Keyword::USING)?;
5484 let module_name = self.parse_identifier()?;
5485 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5490 Ok(Statement::CreateVirtualTable {
5491 name: table_name,
5492 if_not_exists,
5493 module_name,
5494 module_args,
5495 })
5496 }
5497
5498 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5500 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5501
5502 let schema_name = self.parse_schema_name()?;
5503
5504 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5505 Some(self.parse_expr()?)
5506 } else {
5507 None
5508 };
5509
5510 let with = if self.peek_keyword(Keyword::WITH) {
5511 Some(self.parse_options(Keyword::WITH)?)
5512 } else {
5513 None
5514 };
5515
5516 let options = if self.peek_keyword(Keyword::OPTIONS) {
5517 Some(self.parse_options(Keyword::OPTIONS)?)
5518 } else {
5519 None
5520 };
5521
5522 let clone = if self.parse_keyword(Keyword::CLONE) {
5523 Some(self.parse_object_name(false)?)
5524 } else {
5525 None
5526 };
5527
5528 Ok(Statement::CreateSchema {
5529 schema_name,
5530 if_not_exists,
5531 with,
5532 options,
5533 default_collate_spec,
5534 clone,
5535 })
5536 }
5537
5538 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5539 if self.parse_keyword(Keyword::AUTHORIZATION) {
5540 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5541 } else {
5542 let name = self.parse_object_name(false)?;
5543
5544 if self.parse_keyword(Keyword::AUTHORIZATION) {
5545 Ok(SchemaName::NamedAuthorization(
5546 name,
5547 self.parse_identifier()?,
5548 ))
5549 } else {
5550 Ok(SchemaName::Simple(name))
5551 }
5552 }
5553 }
5554
5555 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5557 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5558 let db_name = self.parse_object_name(false)?;
5559 let mut location = None;
5560 let mut managed_location = None;
5561 loop {
5562 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5563 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5564 Some(Keyword::MANAGEDLOCATION) => {
5565 managed_location = Some(self.parse_literal_string()?)
5566 }
5567 _ => break,
5568 }
5569 }
5570 let clone = if self.parse_keyword(Keyword::CLONE) {
5571 Some(self.parse_object_name(false)?)
5572 } else {
5573 None
5574 };
5575
5576 let mut default_charset = None;
5584 let mut default_collation = None;
5585 loop {
5586 let has_default = self.parse_keyword(Keyword::DEFAULT);
5587 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5588 || self.parse_keyword(Keyword::CHARSET)
5589 {
5590 let _ = self.consume_token(&Token::Eq);
5591 default_charset = Some(self.parse_identifier()?.value);
5592 } else if self.parse_keyword(Keyword::COLLATE) {
5593 let _ = self.consume_token(&Token::Eq);
5594 default_collation = Some(self.parse_identifier()?.value);
5595 } else if has_default {
5596 self.prev_token();
5598 break;
5599 } else {
5600 break;
5601 }
5602 }
5603
5604 Ok(Statement::CreateDatabase {
5605 db_name,
5606 if_not_exists: ine,
5607 location,
5608 managed_location,
5609 or_replace: false,
5610 transient: false,
5611 clone,
5612 data_retention_time_in_days: None,
5613 max_data_extension_time_in_days: None,
5614 external_volume: None,
5615 catalog: None,
5616 replace_invalid_characters: None,
5617 default_ddl_collation: None,
5618 storage_serialization_policy: None,
5619 comment: None,
5620 default_charset,
5621 default_collation,
5622 catalog_sync: None,
5623 catalog_sync_namespace_mode: None,
5624 catalog_sync_namespace_flatten_delimiter: None,
5625 with_tags: None,
5626 with_contacts: None,
5627 })
5628 }
5629
5630 pub fn parse_optional_create_function_using(
5632 &mut self,
5633 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5634 if !self.parse_keyword(Keyword::USING) {
5635 return Ok(None);
5636 };
5637 let keyword =
5638 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5639
5640 let uri = self.parse_literal_string()?;
5641
5642 match keyword {
5643 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5644 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5645 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5646 _ => self.expected(
5647 "JAR, FILE or ARCHIVE, got {:?}",
5648 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5649 ),
5650 }
5651 }
5652
5653 pub fn parse_create_function(
5655 &mut self,
5656 or_alter: bool,
5657 or_replace: bool,
5658 temporary: bool,
5659 ) -> Result<Statement, ParserError> {
5660 if dialect_of!(self is HiveDialect) {
5661 self.parse_hive_create_function(or_replace, temporary)
5662 .map(Into::into)
5663 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5664 self.parse_postgres_create_function(or_replace, temporary)
5665 .map(Into::into)
5666 } else if dialect_of!(self is DuckDbDialect) {
5667 self.parse_create_macro(or_replace, temporary)
5668 } else if dialect_of!(self is BigQueryDialect) {
5669 self.parse_bigquery_create_function(or_replace, temporary)
5670 .map(Into::into)
5671 } else if dialect_of!(self is MsSqlDialect) {
5672 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5673 .map(Into::into)
5674 } else {
5675 self.prev_token();
5676 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5677 }
5678 }
5679
5680 fn parse_postgres_create_function(
5684 &mut self,
5685 or_replace: bool,
5686 temporary: bool,
5687 ) -> Result<CreateFunction, ParserError> {
5688 let name = self.parse_object_name(false)?;
5689
5690 self.expect_token(&Token::LParen)?;
5691 let args = if Token::RParen != self.peek_token_ref().token {
5692 self.parse_comma_separated(Parser::parse_function_arg)?
5693 } else {
5694 vec![]
5695 };
5696 self.expect_token(&Token::RParen)?;
5697
5698 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5699 Some(self.parse_function_return_type()?)
5700 } else {
5701 None
5702 };
5703
5704 #[derive(Default)]
5705 struct Body {
5706 language: Option<Ident>,
5707 behavior: Option<FunctionBehavior>,
5708 function_body: Option<CreateFunctionBody>,
5709 called_on_null: Option<FunctionCalledOnNull>,
5710 parallel: Option<FunctionParallel>,
5711 security: Option<FunctionSecurity>,
5712 }
5713 let mut body = Body::default();
5714 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5715 loop {
5716 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5717 if field.is_some() {
5718 return Err(ParserError::ParserError(format!(
5719 "{name} specified more than once",
5720 )));
5721 }
5722 Ok(())
5723 }
5724 if self.parse_keyword(Keyword::AS) {
5725 ensure_not_set(&body.function_body, "AS")?;
5726 body.function_body = Some(self.parse_create_function_body_string()?);
5727 } else if self.parse_keyword(Keyword::LANGUAGE) {
5728 ensure_not_set(&body.language, "LANGUAGE")?;
5729 body.language = Some(self.parse_identifier()?);
5730 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5731 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5732 body.behavior = Some(FunctionBehavior::Immutable);
5733 } else if self.parse_keyword(Keyword::STABLE) {
5734 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5735 body.behavior = Some(FunctionBehavior::Stable);
5736 } else if self.parse_keyword(Keyword::VOLATILE) {
5737 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5738 body.behavior = Some(FunctionBehavior::Volatile);
5739 } else if self.parse_keywords(&[
5740 Keyword::CALLED,
5741 Keyword::ON,
5742 Keyword::NULL,
5743 Keyword::INPUT,
5744 ]) {
5745 ensure_not_set(
5746 &body.called_on_null,
5747 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5748 )?;
5749 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5750 } else if self.parse_keywords(&[
5751 Keyword::RETURNS,
5752 Keyword::NULL,
5753 Keyword::ON,
5754 Keyword::NULL,
5755 Keyword::INPUT,
5756 ]) {
5757 ensure_not_set(
5758 &body.called_on_null,
5759 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5760 )?;
5761 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5762 } else if self.parse_keyword(Keyword::STRICT) {
5763 ensure_not_set(
5764 &body.called_on_null,
5765 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5766 )?;
5767 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5768 } else if self.parse_keyword(Keyword::PARALLEL) {
5769 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5770 if self.parse_keyword(Keyword::UNSAFE) {
5771 body.parallel = Some(FunctionParallel::Unsafe);
5772 } else if self.parse_keyword(Keyword::RESTRICTED) {
5773 body.parallel = Some(FunctionParallel::Restricted);
5774 } else if self.parse_keyword(Keyword::SAFE) {
5775 body.parallel = Some(FunctionParallel::Safe);
5776 } else {
5777 return self
5778 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5779 }
5780 } else if self.parse_keyword(Keyword::SECURITY) {
5781 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5782 if self.parse_keyword(Keyword::DEFINER) {
5783 body.security = Some(FunctionSecurity::Definer);
5784 } else if self.parse_keyword(Keyword::INVOKER) {
5785 body.security = Some(FunctionSecurity::Invoker);
5786 } else {
5787 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5788 }
5789 } else if self.parse_keyword(Keyword::SET) {
5790 let name = self.parse_object_name(false)?;
5791 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5792 FunctionSetValue::FromCurrent
5793 } else {
5794 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5795 return self.expected_ref("= or TO", self.peek_token_ref());
5796 }
5797 if self.parse_keyword(Keyword::DEFAULT) {
5798 FunctionSetValue::Default
5799 } else {
5800 let values = self.parse_comma_separated(Parser::parse_expr)?;
5801 FunctionSetValue::Values(values)
5802 }
5803 };
5804 set_params.push(FunctionDefinitionSetParam { name, value });
5805 } else if self.parse_keyword(Keyword::RETURN) {
5806 ensure_not_set(&body.function_body, "RETURN")?;
5807 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5808 } else {
5809 break;
5810 }
5811 }
5812
5813 Ok(CreateFunction {
5814 or_alter: false,
5815 or_replace,
5816 temporary,
5817 name,
5818 args: Some(args),
5819 return_type,
5820 behavior: body.behavior,
5821 called_on_null: body.called_on_null,
5822 parallel: body.parallel,
5823 security: body.security,
5824 set_params,
5825 language: body.language,
5826 function_body: body.function_body,
5827 if_not_exists: false,
5828 using: None,
5829 determinism_specifier: None,
5830 options: None,
5831 remote_connection: None,
5832 })
5833 }
5834
5835 fn parse_hive_create_function(
5839 &mut self,
5840 or_replace: bool,
5841 temporary: bool,
5842 ) -> Result<CreateFunction, ParserError> {
5843 let name = self.parse_object_name(false)?;
5844 self.expect_keyword_is(Keyword::AS)?;
5845
5846 let body = self.parse_create_function_body_string()?;
5847 let using = self.parse_optional_create_function_using()?;
5848
5849 Ok(CreateFunction {
5850 or_alter: false,
5851 or_replace,
5852 temporary,
5853 name,
5854 function_body: Some(body),
5855 using,
5856 if_not_exists: false,
5857 args: None,
5858 return_type: None,
5859 behavior: None,
5860 called_on_null: None,
5861 parallel: None,
5862 security: None,
5863 set_params: vec![],
5864 language: None,
5865 determinism_specifier: None,
5866 options: None,
5867 remote_connection: None,
5868 })
5869 }
5870
5871 fn parse_bigquery_create_function(
5875 &mut self,
5876 or_replace: bool,
5877 temporary: bool,
5878 ) -> Result<CreateFunction, ParserError> {
5879 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5880 let (name, args) = self.parse_create_function_name_and_params()?;
5881
5882 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5883 Some(self.parse_function_return_type()?)
5884 } else {
5885 None
5886 };
5887
5888 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5889 Some(FunctionDeterminismSpecifier::Deterministic)
5890 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5891 Some(FunctionDeterminismSpecifier::NotDeterministic)
5892 } else {
5893 None
5894 };
5895
5896 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5897 Some(self.parse_identifier()?)
5898 } else {
5899 None
5900 };
5901
5902 let remote_connection =
5903 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5904 Some(self.parse_object_name(false)?)
5905 } else {
5906 None
5907 };
5908
5909 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5912
5913 let function_body = if remote_connection.is_none() {
5914 self.expect_keyword_is(Keyword::AS)?;
5915 let expr = self.parse_expr()?;
5916 if options.is_none() {
5917 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5918 Some(CreateFunctionBody::AsBeforeOptions {
5919 body: expr,
5920 link_symbol: None,
5921 })
5922 } else {
5923 Some(CreateFunctionBody::AsAfterOptions(expr))
5924 }
5925 } else {
5926 None
5927 };
5928
5929 Ok(CreateFunction {
5930 or_alter: false,
5931 or_replace,
5932 temporary,
5933 if_not_exists,
5934 name,
5935 args: Some(args),
5936 return_type,
5937 function_body,
5938 language,
5939 determinism_specifier,
5940 options,
5941 remote_connection,
5942 using: None,
5943 behavior: None,
5944 called_on_null: None,
5945 parallel: None,
5946 security: None,
5947 set_params: vec![],
5948 })
5949 }
5950
5951 fn parse_mssql_create_function(
5955 &mut self,
5956 or_alter: bool,
5957 or_replace: bool,
5958 temporary: bool,
5959 ) -> Result<CreateFunction, ParserError> {
5960 let (name, args) = self.parse_create_function_name_and_params()?;
5961
5962 self.expect_keyword(Keyword::RETURNS)?;
5963
5964 let return_table = self.maybe_parse(|p| {
5965 let return_table_name = p.parse_identifier()?;
5966
5967 p.expect_keyword_is(Keyword::TABLE)?;
5968 p.prev_token();
5969
5970 let table_column_defs = match p.parse_data_type()? {
5971 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5972 table_column_defs
5973 }
5974 _ => parser_err!(
5975 "Expected table column definitions after TABLE keyword",
5976 p.peek_token_ref().span.start
5977 )?,
5978 };
5979
5980 Ok(DataType::NamedTable {
5981 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5982 columns: table_column_defs,
5983 })
5984 })?;
5985
5986 let data_type = match return_table {
5987 Some(table_type) => table_type,
5988 None => self.parse_data_type()?,
5989 };
5990 let return_type = Some(FunctionReturnType::DataType(data_type));
5991
5992 let _ = self.parse_keyword(Keyword::AS);
5993
5994 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5995 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5996 let statements = self.parse_statement_list(&[Keyword::END])?;
5997 let end_token = self.expect_keyword(Keyword::END)?;
5998
5999 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
6000 begin_token: AttachedToken(begin_token),
6001 statements,
6002 end_token: AttachedToken(end_token),
6003 }))
6004 } else if self.parse_keyword(Keyword::RETURN) {
6005 if self.peek_token_ref().token == Token::LParen {
6006 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
6007 } else if self.peek_keyword(Keyword::SELECT) {
6008 let select = self.parse_select()?;
6009 Some(CreateFunctionBody::AsReturnSelect(select))
6010 } else {
6011 parser_err!(
6012 "Expected a subquery (or bare SELECT statement) after RETURN",
6013 self.peek_token_ref().span.start
6014 )?
6015 }
6016 } else {
6017 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
6018 };
6019
6020 Ok(CreateFunction {
6021 or_alter,
6022 or_replace,
6023 temporary,
6024 if_not_exists: false,
6025 name,
6026 args: Some(args),
6027 return_type,
6028 function_body,
6029 language: None,
6030 determinism_specifier: None,
6031 options: None,
6032 remote_connection: None,
6033 using: None,
6034 behavior: None,
6035 called_on_null: None,
6036 parallel: None,
6037 security: None,
6038 set_params: vec![],
6039 })
6040 }
6041
6042 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
6043 if self.parse_keyword(Keyword::SETOF) {
6044 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
6045 } else {
6046 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
6047 }
6048 }
6049
6050 fn parse_create_function_name_and_params(
6051 &mut self,
6052 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6053 let name = self.parse_object_name(false)?;
6054 let parse_function_param =
6055 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6056 let name = parser.parse_identifier()?;
6057 let data_type = parser.parse_data_type()?;
6058 let default_expr = if parser.consume_token(&Token::Eq) {
6059 Some(parser.parse_expr()?)
6060 } else {
6061 None
6062 };
6063
6064 Ok(OperateFunctionArg {
6065 mode: None,
6066 name: Some(name),
6067 data_type,
6068 default_expr,
6069 })
6070 };
6071 self.expect_token(&Token::LParen)?;
6072 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6073 self.expect_token(&Token::RParen)?;
6074 Ok((name, args))
6075 }
6076
6077 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6078 let mode = if self.parse_keyword(Keyword::IN) {
6079 Some(ArgMode::In)
6080 } else if self.parse_keyword(Keyword::OUT) {
6081 Some(ArgMode::Out)
6082 } else if self.parse_keyword(Keyword::INOUT) {
6083 Some(ArgMode::InOut)
6084 } else if self.parse_keyword(Keyword::VARIADIC) {
6085 Some(ArgMode::Variadic)
6086 } else {
6087 None
6088 };
6089
6090 let mut name = None;
6092 let mut data_type = self.parse_data_type()?;
6093
6094 let data_type_idx = self.get_current_index();
6098
6099 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6101 if parser.peek_keyword(Keyword::DEFAULT) {
6102 parser_err!(
6104 "The DEFAULT keyword is not a type",
6105 parser.peek_token_ref().span.start
6106 )
6107 } else {
6108 parser.parse_data_type()
6109 }
6110 }
6111
6112 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6113 let token = self.token_at(data_type_idx);
6114
6115 if !matches!(token.token, Token::Word(_)) {
6117 return self.expected("a name or type", token.clone());
6118 }
6119
6120 name = Some(Ident::new(token.to_string()));
6121 data_type = next_data_type;
6122 }
6123
6124 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6125 {
6126 Some(self.parse_expr()?)
6127 } else {
6128 None
6129 };
6130 Ok(OperateFunctionArg {
6131 mode,
6132 name,
6133 data_type,
6134 default_expr,
6135 })
6136 }
6137
6138 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6139 let mode = if self.parse_keyword(Keyword::IN) {
6140 Some(ArgMode::In)
6141 } else {
6142 if self
6143 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6144 .is_some()
6145 {
6146 return self.expected_ref(
6147 "IN or argument type in aggregate signature",
6148 self.peek_token_ref(),
6149 );
6150 }
6151 None
6152 };
6153
6154 let mut name = None;
6157 let mut data_type = self.parse_data_type()?;
6158 let data_type_idx = self.get_current_index();
6159
6160 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6161 if parser.peek_keyword(Keyword::DEFAULT)
6162 || parser.peek_keyword(Keyword::ORDER)
6163 || parser.peek_token_ref().token == Token::Comma
6164 || parser.peek_token_ref().token == Token::RParen
6165 {
6166 parser_err!(
6168 "The current token cannot start an aggregate argument type",
6169 parser.peek_token_ref().span.start
6170 )
6171 } else {
6172 parser.parse_data_type()
6173 }
6174 }
6175
6176 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6177 let token = self.token_at(data_type_idx);
6178 if !matches!(token.token, Token::Word(_)) {
6179 return self.expected("a name or type", token.clone());
6180 }
6181
6182 name = Some(Ident::new(token.to_string()));
6183 data_type = next_data_type;
6184 }
6185
6186 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6187 return self.expected_ref(
6188 "',' or ')' or ORDER BY after aggregate argument type",
6189 self.peek_token_ref(),
6190 );
6191 }
6192
6193 Ok(OperateFunctionArg {
6194 mode,
6195 name,
6196 data_type,
6197 default_expr: None,
6198 })
6199 }
6200
6201 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6207 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6208 {
6209 self.prev_token();
6210 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6211 }
6212 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6213 let trigger_name = self.parse_object_name(false)?;
6214 let table_name = if self.parse_keyword(Keyword::ON) {
6215 Some(self.parse_object_name(false)?)
6216 } else {
6217 None
6218 };
6219 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6220 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6221 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6222 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6223 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6224 )),
6225 None => None,
6226 };
6227 Ok(DropTrigger {
6228 if_exists,
6229 trigger_name,
6230 table_name,
6231 option,
6232 })
6233 }
6234
6235 pub fn parse_create_trigger(
6237 &mut self,
6238 temporary: bool,
6239 or_alter: bool,
6240 or_replace: bool,
6241 is_constraint: bool,
6242 ) -> Result<CreateTrigger, ParserError> {
6243 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6244 {
6245 self.prev_token();
6246 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6247 }
6248
6249 let name = self.parse_object_name(false)?;
6250 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6251
6252 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6253 self.expect_keyword_is(Keyword::ON)?;
6254 let table_name = self.parse_object_name(false)?;
6255
6256 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6257 self.parse_object_name(true).ok()
6258 } else {
6259 None
6260 };
6261
6262 let characteristics = self.parse_constraint_characteristics()?;
6263
6264 let mut referencing = vec![];
6265 if self.parse_keyword(Keyword::REFERENCING) {
6266 while let Some(refer) = self.parse_trigger_referencing()? {
6267 referencing.push(refer);
6268 }
6269 }
6270
6271 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6272 let include_each = self.parse_keyword(Keyword::EACH);
6273 let trigger_object =
6274 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6275 Keyword::ROW => TriggerObject::Row,
6276 Keyword::STATEMENT => TriggerObject::Statement,
6277 unexpected_keyword => return Err(ParserError::ParserError(
6278 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6279 )),
6280 };
6281
6282 Some(if include_each {
6283 TriggerObjectKind::ForEach(trigger_object)
6284 } else {
6285 TriggerObjectKind::For(trigger_object)
6286 })
6287 } else {
6288 let _ = self.parse_keyword(Keyword::FOR);
6289
6290 None
6291 };
6292
6293 let condition = self
6294 .parse_keyword(Keyword::WHEN)
6295 .then(|| self.parse_expr())
6296 .transpose()?;
6297
6298 let mut exec_body = None;
6299 let mut statements = None;
6300 if self.parse_keyword(Keyword::EXECUTE) {
6301 exec_body = Some(self.parse_trigger_exec_body()?);
6302 } else {
6303 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6304 }
6305
6306 Ok(CreateTrigger {
6307 or_alter,
6308 temporary,
6309 or_replace,
6310 is_constraint,
6311 name,
6312 period,
6313 period_before_table: true,
6314 events,
6315 table_name,
6316 referenced_table_name,
6317 referencing,
6318 trigger_object,
6319 condition,
6320 exec_body,
6321 statements_as: false,
6322 statements,
6323 characteristics,
6324 })
6325 }
6326
6327 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6329 Ok(
6330 match self.expect_one_of_keywords(&[
6331 Keyword::FOR,
6332 Keyword::BEFORE,
6333 Keyword::AFTER,
6334 Keyword::INSTEAD,
6335 ])? {
6336 Keyword::FOR => TriggerPeriod::For,
6337 Keyword::BEFORE => TriggerPeriod::Before,
6338 Keyword::AFTER => TriggerPeriod::After,
6339 Keyword::INSTEAD => self
6340 .expect_keyword_is(Keyword::OF)
6341 .map(|_| TriggerPeriod::InsteadOf)?,
6342 unexpected_keyword => return Err(ParserError::ParserError(
6343 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6344 )),
6345 },
6346 )
6347 }
6348
6349 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6351 Ok(
6352 match self.expect_one_of_keywords(&[
6353 Keyword::INSERT,
6354 Keyword::UPDATE,
6355 Keyword::DELETE,
6356 Keyword::TRUNCATE,
6357 ])? {
6358 Keyword::INSERT => TriggerEvent::Insert,
6359 Keyword::UPDATE => {
6360 if self.parse_keyword(Keyword::OF) {
6361 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6362 TriggerEvent::Update(cols)
6363 } else {
6364 TriggerEvent::Update(vec![])
6365 }
6366 }
6367 Keyword::DELETE => TriggerEvent::Delete,
6368 Keyword::TRUNCATE => TriggerEvent::Truncate,
6369 unexpected_keyword => return Err(ParserError::ParserError(
6370 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6371 )),
6372 },
6373 )
6374 }
6375
6376 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6378 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6379 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6380 TriggerReferencingType::OldTable
6381 }
6382 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6383 TriggerReferencingType::NewTable
6384 }
6385 _ => {
6386 return Ok(None);
6387 }
6388 };
6389
6390 let is_as = self.parse_keyword(Keyword::AS);
6391 let transition_relation_name = self.parse_object_name(false)?;
6392 Ok(Some(TriggerReferencing {
6393 refer_type,
6394 is_as,
6395 transition_relation_name,
6396 }))
6397 }
6398
6399 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6406 let exec_type = match self
6407 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6408 {
6409 Keyword::FUNCTION => TriggerExecBodyType::Function,
6410 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6411 unexpected_keyword => {
6412 return Err(ParserError::ParserError(format!(
6413 "Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"
6414 )))
6415 }
6416 };
6417
6418 let func_name = self.parse_object_name(false)?;
6419
6420 let args = if self.consume_token(&Token::LParen) {
6421 if self.consume_token(&Token::RParen) {
6422 Some(vec![])
6423 } else {
6424 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
6425 self.expect_token(&Token::RParen)?;
6426 Some(exprs)
6427 }
6428 } else {
6429 None
6430 };
6431
6432 Ok(TriggerExecBody {
6433 exec_type,
6434 func_name,
6435 args,
6436 })
6437 }
6438
6439 pub fn parse_create_macro(
6441 &mut self,
6442 or_replace: bool,
6443 temporary: bool,
6444 ) -> Result<Statement, ParserError> {
6445 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6446 let name = self.parse_object_name(false)?;
6447 self.expect_token(&Token::LParen)?;
6448 let args = if self.consume_token(&Token::RParen) {
6449 self.prev_token();
6450 None
6451 } else {
6452 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6453 };
6454
6455 self.expect_token(&Token::RParen)?;
6456 self.expect_keyword_is(Keyword::AS)?;
6457
6458 Ok(Statement::CreateMacro {
6459 or_replace,
6460 temporary,
6461 name,
6462 args,
6463 definition: if self.parse_keyword(Keyword::TABLE) {
6464 MacroDefinition::Table(self.parse_query()?)
6465 } else {
6466 MacroDefinition::Expr(self.parse_expr()?)
6467 },
6468 })
6469 } else {
6470 self.prev_token();
6471 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6472 }
6473 }
6474
6475 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6476 let name = self.parse_identifier()?;
6477
6478 let default_expr =
6479 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6480 Some(self.parse_expr()?)
6481 } else {
6482 None
6483 };
6484 Ok(MacroArg { name, default_expr })
6485 }
6486
6487 pub fn parse_create_external_table(
6489 &mut self,
6490 or_replace: bool,
6491 ) -> Result<CreateTable, ParserError> {
6492 self.expect_keyword_is(Keyword::TABLE)?;
6493 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6494 let table_name = self.parse_object_name(false)?;
6495 let (columns, constraints) = self.parse_columns()?;
6496
6497 let hive_distribution = self.parse_hive_distribution()?;
6498 let hive_formats = self.parse_hive_formats()?;
6499
6500 let file_format = if let Some(ref hf) = hive_formats {
6501 if let Some(ref ff) = hf.storage {
6502 match ff {
6503 HiveIOFormat::FileFormat { format } => Some(*format),
6504 _ => None,
6505 }
6506 } else {
6507 None
6508 }
6509 } else {
6510 None
6511 };
6512 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6513 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6514 let table_options = if !table_properties.is_empty() {
6515 CreateTableOptions::TableProperties(table_properties)
6516 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6517 CreateTableOptions::Options(options)
6518 } else {
6519 CreateTableOptions::None
6520 };
6521 Ok(CreateTableBuilder::new(table_name)
6522 .columns(columns)
6523 .constraints(constraints)
6524 .hive_distribution(hive_distribution)
6525 .hive_formats(hive_formats)
6526 .table_options(table_options)
6527 .or_replace(or_replace)
6528 .if_not_exists(if_not_exists)
6529 .external(true)
6530 .file_format(file_format)
6531 .location(location)
6532 .build())
6533 }
6534
6535 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6539 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6540 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6541 let table_name = self.parse_object_name(true)?;
6542
6543 self.expect_keyword_is(Keyword::CLONE)?;
6544 let clone = Some(self.parse_object_name(true)?);
6545
6546 let version =
6547 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6548 {
6549 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6550 } else {
6551 None
6552 };
6553
6554 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6555 CreateTableOptions::Options(options)
6556 } else {
6557 CreateTableOptions::None
6558 };
6559
6560 Ok(CreateTableBuilder::new(table_name)
6561 .snapshot(true)
6562 .if_not_exists(if_not_exists)
6563 .clone_clause(clone)
6564 .version(version)
6565 .table_options(table_options)
6566 .build())
6567 }
6568
6569 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6571 let next_token = self.next_token();
6572 match &next_token.token {
6573 Token::Word(w) => match w.keyword {
6574 Keyword::AVRO => Ok(FileFormat::AVRO),
6575 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6576 Keyword::ORC => Ok(FileFormat::ORC),
6577 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6578 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6579 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6580 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6581 _ => self.expected("fileformat", next_token),
6582 },
6583 _ => self.expected("fileformat", next_token),
6584 }
6585 }
6586
6587 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6588 if self.consume_token(&Token::Eq) {
6589 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6590 } else {
6591 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6592 }
6593 }
6594
6595 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6597 let next_token = self.next_token();
6598 match &next_token.token {
6599 Token::Word(w) => match w.keyword {
6600 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6601 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6602 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6603 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6604 _ => self.expected("fileformat", next_token),
6605 },
6606 _ => self.expected("fileformat", next_token),
6607 }
6608 }
6609
6610 pub fn parse_create_view(
6612 &mut self,
6613 or_alter: bool,
6614 or_replace: bool,
6615 temporary: bool,
6616 create_view_params: Option<CreateViewParams>,
6617 ) -> Result<CreateView, ParserError> {
6618 let secure = self.parse_keyword(Keyword::SECURE);
6619 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6620 self.expect_keyword_is(Keyword::VIEW)?;
6621 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6622 let if_not_exists_first =
6625 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6626 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6627 let name_before_not_exists = !if_not_exists_first
6628 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6629 let if_not_exists = if_not_exists_first || name_before_not_exists;
6630 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6631 let columns = self.parse_view_columns()?;
6634 let mut options = CreateTableOptions::None;
6635 let with_options = self.parse_options(Keyword::WITH)?;
6636 if !with_options.is_empty() {
6637 options = CreateTableOptions::With(with_options);
6638 }
6639
6640 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6641 self.expect_keyword_is(Keyword::BY)?;
6642 self.parse_parenthesized_column_list(Optional, false)?
6643 } else {
6644 vec![]
6645 };
6646
6647 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6648 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6649 if !opts.is_empty() {
6650 options = CreateTableOptions::Options(opts);
6651 }
6652 };
6653 }
6654
6655 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6656 && self.parse_keyword(Keyword::TO)
6657 {
6658 Some(self.parse_object_name(false)?)
6659 } else {
6660 None
6661 };
6662
6663 let comment = if self.dialect.supports_create_view_comment_syntax()
6664 && self.parse_keyword(Keyword::COMMENT)
6665 {
6666 self.expect_token(&Token::Eq)?;
6667 Some(self.parse_comment_value()?)
6668 } else {
6669 None
6670 };
6671
6672 self.expect_keyword_is(Keyword::AS)?;
6673 let query = self.parse_query()?;
6674 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6677 && self.parse_keywords(&[
6678 Keyword::WITH,
6679 Keyword::NO,
6680 Keyword::SCHEMA,
6681 Keyword::BINDING,
6682 ]);
6683
6684 let with_data = if materialized && self.parse_keyword(Keyword::WITH) {
6687 if self.parse_keyword(Keyword::NO) {
6688 self.expect_keyword_is(Keyword::DATA)?;
6689 Some(false)
6690 } else {
6691 self.expect_keyword_is(Keyword::DATA)?;
6692 Some(true)
6693 }
6694 } else {
6695 None
6696 };
6697
6698 Ok(CreateView {
6699 or_alter,
6700 name,
6701 columns,
6702 query,
6703 materialized,
6704 secure,
6705 or_replace,
6706 options,
6707 cluster_by,
6708 comment,
6709 with_no_schema_binding,
6710 if_not_exists,
6711 temporary,
6712 copy_grants,
6713 to,
6714 params: create_view_params,
6715 name_before_not_exists,
6716 with_data,
6717 })
6718 }
6719
6720 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6724 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6725 self.expect_token(&Token::Eq)?;
6726 Some(
6727 match self.expect_one_of_keywords(&[
6728 Keyword::UNDEFINED,
6729 Keyword::MERGE,
6730 Keyword::TEMPTABLE,
6731 ])? {
6732 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6733 Keyword::MERGE => CreateViewAlgorithm::Merge,
6734 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6735 _ => {
6736 self.prev_token();
6737 let found = self.next_token();
6738 return self
6739 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6740 }
6741 },
6742 )
6743 } else {
6744 None
6745 };
6746 let definer = if self.parse_keyword(Keyword::DEFINER) {
6747 self.expect_token(&Token::Eq)?;
6748 Some(self.parse_grantee_name()?)
6749 } else {
6750 None
6751 };
6752 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6753 Some(
6754 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6755 Keyword::DEFINER => CreateViewSecurity::Definer,
6756 Keyword::INVOKER => CreateViewSecurity::Invoker,
6757 _ => {
6758 self.prev_token();
6759 let found = self.next_token();
6760 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6761 }
6762 },
6763 )
6764 } else {
6765 None
6766 };
6767 if algorithm.is_some() || definer.is_some() || security.is_some() {
6768 Ok(Some(CreateViewParams {
6769 algorithm,
6770 definer,
6771 security,
6772 }))
6773 } else {
6774 Ok(None)
6775 }
6776 }
6777
6778 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6780 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6781 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6782
6783 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6786 vec![Keyword::AUTHORIZATION]
6787 } else if dialect_of!(self is PostgreSqlDialect) {
6788 vec![
6789 Keyword::LOGIN,
6790 Keyword::NOLOGIN,
6791 Keyword::INHERIT,
6792 Keyword::NOINHERIT,
6793 Keyword::BYPASSRLS,
6794 Keyword::NOBYPASSRLS,
6795 Keyword::PASSWORD,
6796 Keyword::CREATEDB,
6797 Keyword::NOCREATEDB,
6798 Keyword::CREATEROLE,
6799 Keyword::NOCREATEROLE,
6800 Keyword::SUPERUSER,
6801 Keyword::NOSUPERUSER,
6802 Keyword::REPLICATION,
6803 Keyword::NOREPLICATION,
6804 Keyword::CONNECTION,
6805 Keyword::VALID,
6806 Keyword::IN,
6807 Keyword::ROLE,
6808 Keyword::ADMIN,
6809 Keyword::USER,
6810 ]
6811 } else {
6812 vec![]
6813 };
6814
6815 let mut authorization_owner = None;
6817 let mut login = None;
6819 let mut inherit = None;
6820 let mut bypassrls = None;
6821 let mut password = None;
6822 let mut create_db = None;
6823 let mut create_role = None;
6824 let mut superuser = None;
6825 let mut replication = None;
6826 let mut connection_limit = None;
6827 let mut valid_until = None;
6828 let mut in_role = vec![];
6829 let mut in_group = vec![];
6830 let mut role = vec![];
6831 let mut user = vec![];
6832 let mut admin = vec![];
6833
6834 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6835 let loc = self
6836 .tokens
6837 .get(self.index - 1)
6838 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6839 match keyword {
6840 Keyword::AUTHORIZATION => {
6841 if authorization_owner.is_some() {
6842 parser_err!("Found multiple AUTHORIZATION", loc)
6843 } else {
6844 authorization_owner = Some(self.parse_object_name(false)?);
6845 Ok(())
6846 }
6847 }
6848 Keyword::LOGIN | Keyword::NOLOGIN => {
6849 if login.is_some() {
6850 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6851 } else {
6852 login = Some(keyword == Keyword::LOGIN);
6853 Ok(())
6854 }
6855 }
6856 Keyword::INHERIT | Keyword::NOINHERIT => {
6857 if inherit.is_some() {
6858 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6859 } else {
6860 inherit = Some(keyword == Keyword::INHERIT);
6861 Ok(())
6862 }
6863 }
6864 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6865 if bypassrls.is_some() {
6866 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6867 } else {
6868 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6869 Ok(())
6870 }
6871 }
6872 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6873 if create_db.is_some() {
6874 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6875 } else {
6876 create_db = Some(keyword == Keyword::CREATEDB);
6877 Ok(())
6878 }
6879 }
6880 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6881 if create_role.is_some() {
6882 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6883 } else {
6884 create_role = Some(keyword == Keyword::CREATEROLE);
6885 Ok(())
6886 }
6887 }
6888 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6889 if superuser.is_some() {
6890 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6891 } else {
6892 superuser = Some(keyword == Keyword::SUPERUSER);
6893 Ok(())
6894 }
6895 }
6896 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6897 if replication.is_some() {
6898 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6899 } else {
6900 replication = Some(keyword == Keyword::REPLICATION);
6901 Ok(())
6902 }
6903 }
6904 Keyword::PASSWORD => {
6905 if password.is_some() {
6906 parser_err!("Found multiple PASSWORD", loc)
6907 } else {
6908 password = if self.parse_keyword(Keyword::NULL) {
6909 Some(Password::NullPassword)
6910 } else {
6911 Some(Password::Password(Expr::Value(self.parse_value()?)))
6912 };
6913 Ok(())
6914 }
6915 }
6916 Keyword::CONNECTION => {
6917 self.expect_keyword_is(Keyword::LIMIT)?;
6918 if connection_limit.is_some() {
6919 parser_err!("Found multiple CONNECTION LIMIT", loc)
6920 } else {
6921 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6922 Ok(())
6923 }
6924 }
6925 Keyword::VALID => {
6926 self.expect_keyword_is(Keyword::UNTIL)?;
6927 if valid_until.is_some() {
6928 parser_err!("Found multiple VALID UNTIL", loc)
6929 } else {
6930 valid_until = Some(Expr::Value(self.parse_value()?));
6931 Ok(())
6932 }
6933 }
6934 Keyword::IN => {
6935 if self.parse_keyword(Keyword::ROLE) {
6936 if !in_role.is_empty() {
6937 parser_err!("Found multiple IN ROLE", loc)
6938 } else {
6939 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6940 Ok(())
6941 }
6942 } else if self.parse_keyword(Keyword::GROUP) {
6943 if !in_group.is_empty() {
6944 parser_err!("Found multiple IN GROUP", loc)
6945 } else {
6946 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6947 Ok(())
6948 }
6949 } else {
6950 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6951 }
6952 }
6953 Keyword::ROLE => {
6954 if !role.is_empty() {
6955 parser_err!("Found multiple ROLE", loc)
6956 } else {
6957 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6958 Ok(())
6959 }
6960 }
6961 Keyword::USER => {
6962 if !user.is_empty() {
6963 parser_err!("Found multiple USER", loc)
6964 } else {
6965 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6966 Ok(())
6967 }
6968 }
6969 Keyword::ADMIN => {
6970 if !admin.is_empty() {
6971 parser_err!("Found multiple ADMIN", loc)
6972 } else {
6973 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6974 Ok(())
6975 }
6976 }
6977 _ => break,
6978 }?
6979 }
6980
6981 Ok(CreateRole {
6982 names,
6983 if_not_exists,
6984 login,
6985 inherit,
6986 bypassrls,
6987 password,
6988 create_db,
6989 create_role,
6990 replication,
6991 superuser,
6992 connection_limit,
6993 valid_until,
6994 in_role,
6995 in_group,
6996 role,
6997 user,
6998 admin,
6999 authorization_owner,
7000 })
7001 }
7002
7003 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
7005 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
7006 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
7007 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
7008 Some(Keyword::SESSION_USER) => Owner::SessionUser,
7009 Some(unexpected_keyword) => return Err(ParserError::ParserError(
7010 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
7011 )),
7012 None => {
7013 match self.parse_identifier() {
7014 Ok(ident) => Owner::Ident(ident),
7015 Err(e) => {
7016 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
7017 }
7018 }
7019 }
7020 };
7021 Ok(owner)
7022 }
7023
7024 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
7026 let name = self.parse_object_name(false)?;
7027 self.expect_keyword_is(Keyword::AS)?;
7028 let data_type = self.parse_data_type()?;
7029 let collation = if self.parse_keyword(Keyword::COLLATE) {
7030 Some(self.parse_identifier()?)
7031 } else {
7032 None
7033 };
7034 let default = if self.parse_keyword(Keyword::DEFAULT) {
7035 Some(self.parse_expr()?)
7036 } else {
7037 None
7038 };
7039 let mut constraints = Vec::new();
7040 while let Some(constraint) = self.parse_optional_table_constraint()? {
7041 constraints.push(constraint);
7042 }
7043
7044 Ok(CreateDomain {
7045 name,
7046 data_type,
7047 collation,
7048 default,
7049 constraints,
7050 })
7051 }
7052
7053 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
7063 let name = self.parse_identifier()?;
7064 self.expect_keyword_is(Keyword::ON)?;
7065 let table_name = self.parse_object_name(false)?;
7066
7067 let policy_type = if self.parse_keyword(Keyword::AS) {
7068 let keyword =
7069 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
7070 Some(match keyword {
7071 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
7072 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
7073 unexpected_keyword => return Err(ParserError::ParserError(
7074 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
7075 )),
7076 })
7077 } else {
7078 None
7079 };
7080
7081 let command = if self.parse_keyword(Keyword::FOR) {
7082 let keyword = self.expect_one_of_keywords(&[
7083 Keyword::ALL,
7084 Keyword::SELECT,
7085 Keyword::INSERT,
7086 Keyword::UPDATE,
7087 Keyword::DELETE,
7088 ])?;
7089 Some(match keyword {
7090 Keyword::ALL => CreatePolicyCommand::All,
7091 Keyword::SELECT => CreatePolicyCommand::Select,
7092 Keyword::INSERT => CreatePolicyCommand::Insert,
7093 Keyword::UPDATE => CreatePolicyCommand::Update,
7094 Keyword::DELETE => CreatePolicyCommand::Delete,
7095 unexpected_keyword => return Err(ParserError::ParserError(
7096 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7097 )),
7098 })
7099 } else {
7100 None
7101 };
7102
7103 let to = if self.parse_keyword(Keyword::TO) {
7104 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7105 } else {
7106 None
7107 };
7108
7109 let using = if self.parse_keyword(Keyword::USING) {
7110 self.expect_token(&Token::LParen)?;
7111 let expr = self.parse_expr()?;
7112 self.expect_token(&Token::RParen)?;
7113 Some(expr)
7114 } else {
7115 None
7116 };
7117
7118 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7119 self.expect_token(&Token::LParen)?;
7120 let expr = self.parse_expr()?;
7121 self.expect_token(&Token::RParen)?;
7122 Some(expr)
7123 } else {
7124 None
7125 };
7126
7127 Ok(CreatePolicy {
7128 name,
7129 table_name,
7130 policy_type,
7131 command,
7132 to,
7133 using,
7134 with_check,
7135 })
7136 }
7137
7138 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7148 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7149 let name = self.parse_identifier()?;
7150
7151 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7152 Some(self.parse_literal_string()?)
7153 } else {
7154 None
7155 };
7156
7157 let url = if self.parse_keyword(Keyword::URL) {
7158 Some(self.parse_literal_string()?)
7159 } else {
7160 None
7161 };
7162
7163 let comment = self.parse_optional_inline_comment()?;
7164
7165 let with_dcproperties =
7166 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7167 properties if !properties.is_empty() => Some(properties),
7168 _ => None,
7169 };
7170
7171 Ok(CreateConnector {
7172 name,
7173 if_not_exists,
7174 connector_type,
7175 url,
7176 comment,
7177 with_dcproperties,
7178 })
7179 }
7180
7181 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7187 let mut parts = vec![];
7188 loop {
7189 parts.push(ObjectNamePart::Identifier(Ident::new(
7190 self.next_token().to_string(),
7191 )));
7192 if !self.consume_token(&Token::Period) {
7193 break;
7194 }
7195 }
7196 Ok(ObjectName(parts))
7197 }
7198
7199 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7203 let name = self.parse_operator_name()?;
7204 self.expect_token(&Token::LParen)?;
7205
7206 let mut function: Option<ObjectName> = None;
7207 let mut is_procedure = false;
7208 let mut left_arg: Option<DataType> = None;
7209 let mut right_arg: Option<DataType> = None;
7210 let mut options: Vec<OperatorOption> = Vec::new();
7211
7212 loop {
7213 let keyword = self.expect_one_of_keywords(&[
7214 Keyword::FUNCTION,
7215 Keyword::PROCEDURE,
7216 Keyword::LEFTARG,
7217 Keyword::RIGHTARG,
7218 Keyword::COMMUTATOR,
7219 Keyword::NEGATOR,
7220 Keyword::RESTRICT,
7221 Keyword::JOIN,
7222 Keyword::HASHES,
7223 Keyword::MERGES,
7224 ])?;
7225
7226 match keyword {
7227 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7228 options.push(OperatorOption::Hashes);
7229 }
7230 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7231 options.push(OperatorOption::Merges);
7232 }
7233 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7234 self.expect_token(&Token::Eq)?;
7235 function = Some(self.parse_object_name(false)?);
7236 is_procedure = keyword == Keyword::PROCEDURE;
7237 }
7238 Keyword::LEFTARG if left_arg.is_none() => {
7239 self.expect_token(&Token::Eq)?;
7240 left_arg = Some(self.parse_data_type()?);
7241 }
7242 Keyword::RIGHTARG if right_arg.is_none() => {
7243 self.expect_token(&Token::Eq)?;
7244 right_arg = Some(self.parse_data_type()?);
7245 }
7246 Keyword::COMMUTATOR
7247 if !options
7248 .iter()
7249 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7250 {
7251 self.expect_token(&Token::Eq)?;
7252 if self.parse_keyword(Keyword::OPERATOR) {
7253 self.expect_token(&Token::LParen)?;
7254 let op = self.parse_operator_name()?;
7255 self.expect_token(&Token::RParen)?;
7256 options.push(OperatorOption::Commutator(op));
7257 } else {
7258 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7259 }
7260 }
7261 Keyword::NEGATOR
7262 if !options
7263 .iter()
7264 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7265 {
7266 self.expect_token(&Token::Eq)?;
7267 if self.parse_keyword(Keyword::OPERATOR) {
7268 self.expect_token(&Token::LParen)?;
7269 let op = self.parse_operator_name()?;
7270 self.expect_token(&Token::RParen)?;
7271 options.push(OperatorOption::Negator(op));
7272 } else {
7273 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7274 }
7275 }
7276 Keyword::RESTRICT
7277 if !options
7278 .iter()
7279 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7280 {
7281 self.expect_token(&Token::Eq)?;
7282 options.push(OperatorOption::Restrict(Some(
7283 self.parse_object_name(false)?,
7284 )));
7285 }
7286 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7287 self.expect_token(&Token::Eq)?;
7288 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7289 }
7290 _ => {
7291 return Err(ParserError::ParserError(format!(
7292 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7293 keyword
7294 )))
7295 }
7296 }
7297
7298 if !self.consume_token(&Token::Comma) {
7299 break;
7300 }
7301 }
7302
7303 self.expect_token(&Token::RParen)?;
7305
7306 let function = function.ok_or_else(|| {
7308 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7309 })?;
7310
7311 Ok(CreateOperator {
7312 name,
7313 function,
7314 is_procedure,
7315 left_arg,
7316 right_arg,
7317 options,
7318 })
7319 }
7320
7321 pub fn parse_create_aggregate(
7325 &mut self,
7326 or_replace: bool,
7327 ) -> Result<CreateAggregate, ParserError> {
7328 let name = self.parse_object_name(false)?;
7329
7330 self.expect_token(&Token::LParen)?;
7332 let args = if self.consume_token(&Token::Mul) {
7333 vec![]
7335 } else if self.consume_token(&Token::RParen) {
7336 self.prev_token();
7337 vec![]
7338 } else {
7339 self.parse_comma_separated(|p| p.parse_data_type())?
7340 };
7341 self.expect_token(&Token::RParen)?;
7342
7343 self.expect_token(&Token::LParen)?;
7345 let mut options: Vec<CreateAggregateOption> = Vec::new();
7346 loop {
7347 let token = self.next_token();
7348 match &token.token {
7349 Token::RParen => break,
7350 Token::Comma => continue,
7351 Token::Word(word) => {
7352 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7353 options.push(option);
7354 }
7355 other => {
7356 return Err(ParserError::ParserError(format!(
7357 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7358 )));
7359 }
7360 }
7361 }
7362
7363 Ok(CreateAggregate {
7364 or_replace,
7365 name,
7366 args,
7367 options,
7368 })
7369 }
7370
7371 fn parse_create_aggregate_option(
7372 &mut self,
7373 key: &str,
7374 ) -> Result<CreateAggregateOption, ParserError> {
7375 match key {
7376 "SFUNC" => {
7377 self.expect_token(&Token::Eq)?;
7378 Ok(CreateAggregateOption::Sfunc(self.parse_object_name(false)?))
7379 }
7380 "STYPE" => {
7381 self.expect_token(&Token::Eq)?;
7382 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7383 }
7384 "SSPACE" => {
7385 self.expect_token(&Token::Eq)?;
7386 let size = self.parse_literal_uint()?;
7387 Ok(CreateAggregateOption::Sspace(size))
7388 }
7389 "FINALFUNC" => {
7390 self.expect_token(&Token::Eq)?;
7391 Ok(CreateAggregateOption::Finalfunc(
7392 self.parse_object_name(false)?,
7393 ))
7394 }
7395 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7396 "FINALFUNC_MODIFY" => {
7397 self.expect_token(&Token::Eq)?;
7398 Ok(CreateAggregateOption::FinalfuncModify(
7399 self.parse_aggregate_modify_kind()?,
7400 ))
7401 }
7402 "COMBINEFUNC" => {
7403 self.expect_token(&Token::Eq)?;
7404 Ok(CreateAggregateOption::Combinefunc(
7405 self.parse_object_name(false)?,
7406 ))
7407 }
7408 "SERIALFUNC" => {
7409 self.expect_token(&Token::Eq)?;
7410 Ok(CreateAggregateOption::Serialfunc(
7411 self.parse_object_name(false)?,
7412 ))
7413 }
7414 "DESERIALFUNC" => {
7415 self.expect_token(&Token::Eq)?;
7416 Ok(CreateAggregateOption::Deserialfunc(
7417 self.parse_object_name(false)?,
7418 ))
7419 }
7420 "INITCOND" => {
7421 self.expect_token(&Token::Eq)?;
7422 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7423 }
7424 "MSFUNC" => {
7425 self.expect_token(&Token::Eq)?;
7426 Ok(CreateAggregateOption::Msfunc(
7427 self.parse_object_name(false)?,
7428 ))
7429 }
7430 "MINVFUNC" => {
7431 self.expect_token(&Token::Eq)?;
7432 Ok(CreateAggregateOption::Minvfunc(
7433 self.parse_object_name(false)?,
7434 ))
7435 }
7436 "MSTYPE" => {
7437 self.expect_token(&Token::Eq)?;
7438 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7439 }
7440 "MSSPACE" => {
7441 self.expect_token(&Token::Eq)?;
7442 let size = self.parse_literal_uint()?;
7443 Ok(CreateAggregateOption::Msspace(size))
7444 }
7445 "MFINALFUNC" => {
7446 self.expect_token(&Token::Eq)?;
7447 Ok(CreateAggregateOption::Mfinalfunc(
7448 self.parse_object_name(false)?,
7449 ))
7450 }
7451 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7452 "MFINALFUNC_MODIFY" => {
7453 self.expect_token(&Token::Eq)?;
7454 Ok(CreateAggregateOption::MfinalfuncModify(
7455 self.parse_aggregate_modify_kind()?,
7456 ))
7457 }
7458 "MINITCOND" => {
7459 self.expect_token(&Token::Eq)?;
7460 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7461 }
7462 "SORTOP" => {
7463 self.expect_token(&Token::Eq)?;
7464 Ok(CreateAggregateOption::Sortop(
7465 self.parse_object_name(false)?,
7466 ))
7467 }
7468 "PARALLEL" => {
7469 self.expect_token(&Token::Eq)?;
7470 let parallel = match self.expect_one_of_keywords(&[
7471 Keyword::SAFE,
7472 Keyword::RESTRICTED,
7473 Keyword::UNSAFE,
7474 ])? {
7475 Keyword::SAFE => FunctionParallel::Safe,
7476 Keyword::RESTRICTED => FunctionParallel::Restricted,
7477 Keyword::UNSAFE => FunctionParallel::Unsafe,
7478 other => {
7479 return Err(ParserError::ParserError(format!(
7480 "Internal parser error: unexpected keyword `{other}` for PARALLEL"
7481 )))
7482 }
7483 };
7484 Ok(CreateAggregateOption::Parallel(parallel))
7485 }
7486 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7487 other => Err(ParserError::ParserError(format!(
7488 "Unknown CREATE AGGREGATE option: {other}"
7489 ))),
7490 }
7491 }
7492
7493 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7494 let token = self.next_token();
7495 match &token.token {
7496 Token::Word(word) => match word.value.to_uppercase().as_str() {
7497 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7498 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7499 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7500 other => Err(ParserError::ParserError(format!(
7501 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7502 ))),
7503 },
7504 other => Err(ParserError::ParserError(format!(
7505 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7506 ))),
7507 }
7508 }
7509
7510 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7514 let name = self.parse_object_name(false)?;
7515 self.expect_keyword(Keyword::USING)?;
7516 let using = self.parse_identifier()?;
7517
7518 Ok(CreateOperatorFamily { name, using })
7519 }
7520
7521 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7525 let name = self.parse_object_name(false)?;
7526 let default = self.parse_keyword(Keyword::DEFAULT);
7527 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7528 let for_type = self.parse_data_type()?;
7529 self.expect_keyword(Keyword::USING)?;
7530 let using = self.parse_identifier()?;
7531
7532 let family = if self.parse_keyword(Keyword::FAMILY) {
7533 Some(self.parse_object_name(false)?)
7534 } else {
7535 None
7536 };
7537
7538 self.expect_keyword(Keyword::AS)?;
7539
7540 let mut items = vec![];
7541 loop {
7542 if self.parse_keyword(Keyword::OPERATOR) {
7543 let strategy_number = self.parse_literal_uint()?;
7544 let operator_name = self.parse_operator_name()?;
7545
7546 let op_types = if self.consume_token(&Token::LParen) {
7548 let left = self.parse_data_type()?;
7549 self.expect_token(&Token::Comma)?;
7550 let right = self.parse_data_type()?;
7551 self.expect_token(&Token::RParen)?;
7552 Some(OperatorArgTypes { left, right })
7553 } else {
7554 None
7555 };
7556
7557 let purpose = if self.parse_keyword(Keyword::FOR) {
7559 if self.parse_keyword(Keyword::SEARCH) {
7560 Some(OperatorPurpose::ForSearch)
7561 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7562 let sort_family = self.parse_object_name(false)?;
7563 Some(OperatorPurpose::ForOrderBy { sort_family })
7564 } else {
7565 return self
7566 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7567 }
7568 } else {
7569 None
7570 };
7571
7572 items.push(OperatorClassItem::Operator {
7573 strategy_number,
7574 operator_name,
7575 op_types,
7576 purpose,
7577 });
7578 } else if self.parse_keyword(Keyword::FUNCTION) {
7579 let support_number = self.parse_literal_uint()?;
7580
7581 let op_types = if self.consume_token(&Token::LParen)
7583 && self.peek_token_ref().token != Token::RParen
7584 {
7585 let mut types = vec![];
7586 loop {
7587 types.push(self.parse_data_type()?);
7588 if !self.consume_token(&Token::Comma) {
7589 break;
7590 }
7591 }
7592 self.expect_token(&Token::RParen)?;
7593 Some(types)
7594 } else if self.consume_token(&Token::LParen) {
7595 self.expect_token(&Token::RParen)?;
7596 Some(vec![])
7597 } else {
7598 None
7599 };
7600
7601 let function_name = self.parse_object_name(false)?;
7602
7603 let argument_types = if self.consume_token(&Token::LParen) {
7605 let mut types = vec![];
7606 loop {
7607 if self.peek_token_ref().token == Token::RParen {
7608 break;
7609 }
7610 types.push(self.parse_data_type()?);
7611 if !self.consume_token(&Token::Comma) {
7612 break;
7613 }
7614 }
7615 self.expect_token(&Token::RParen)?;
7616 types
7617 } else {
7618 vec![]
7619 };
7620
7621 items.push(OperatorClassItem::Function {
7622 support_number,
7623 op_types,
7624 function_name,
7625 argument_types,
7626 });
7627 } else if self.parse_keyword(Keyword::STORAGE) {
7628 let storage_type = self.parse_data_type()?;
7629 items.push(OperatorClassItem::Storage { storage_type });
7630 } else {
7631 break;
7632 }
7633
7634 if !self.consume_token(&Token::Comma) {
7636 break;
7637 }
7638 }
7639
7640 Ok(CreateOperatorClass {
7641 name,
7642 default,
7643 for_type,
7644 using,
7645 family,
7646 items,
7647 })
7648 }
7649
7650 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7652 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7654 && self.parse_keyword(Keyword::TEMPORARY);
7655 let persistent = dialect_of!(self is DuckDbDialect)
7656 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7657
7658 let object_type = if self.parse_keyword(Keyword::TABLE) {
7659 ObjectType::Table
7660 } else if self.parse_keyword(Keyword::COLLATION) {
7661 ObjectType::Collation
7662 } else if self.parse_keyword(Keyword::VIEW) {
7663 ObjectType::View
7664 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7665 ObjectType::MaterializedView
7666 } else if self.parse_keyword(Keyword::INDEX) {
7667 ObjectType::Index
7668 } else if self.parse_keyword(Keyword::ROLE) {
7669 ObjectType::Role
7670 } else if self.parse_keyword(Keyword::SCHEMA) {
7671 ObjectType::Schema
7672 } else if self.parse_keyword(Keyword::DATABASE) {
7673 ObjectType::Database
7674 } else if self.parse_keyword(Keyword::SEQUENCE) {
7675 ObjectType::Sequence
7676 } else if self.parse_keyword(Keyword::STAGE) {
7677 ObjectType::Stage
7678 } else if self.parse_keyword(Keyword::TYPE) {
7679 ObjectType::Type
7680 } else if self.parse_keyword(Keyword::USER) {
7681 ObjectType::User
7682 } else if self.parse_keyword(Keyword::STREAM) {
7683 ObjectType::Stream
7684 } else if self.parse_keyword(Keyword::FUNCTION) {
7685 return self.parse_drop_function().map(Into::into);
7686 } else if self.parse_keyword(Keyword::POLICY) {
7687 return self.parse_drop_policy().map(Into::into);
7688 } else if self.parse_keyword(Keyword::CONNECTOR) {
7689 return self.parse_drop_connector();
7690 } else if self.parse_keyword(Keyword::DOMAIN) {
7691 return self.parse_drop_domain().map(Into::into);
7692 } else if self.parse_keyword(Keyword::PROCEDURE) {
7693 return self.parse_drop_procedure();
7694 } else if self.parse_keyword(Keyword::SECRET) {
7695 return self.parse_drop_secret(temporary, persistent);
7696 } else if self.parse_keyword(Keyword::TRIGGER) {
7697 return self.parse_drop_trigger().map(Into::into);
7698 } else if self.parse_keyword(Keyword::EXTENSION) {
7699 return self.parse_drop_extension();
7700 } else if self.parse_keyword(Keyword::OPERATOR) {
7701 return if self.parse_keyword(Keyword::FAMILY) {
7703 self.parse_drop_operator_family()
7704 } else if self.parse_keyword(Keyword::CLASS) {
7705 self.parse_drop_operator_class()
7706 } else {
7707 self.parse_drop_operator()
7708 };
7709 } else {
7710 return self.expected_ref(
7711 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7712 self.peek_token_ref(),
7713 );
7714 };
7715 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7718 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7719
7720 let loc = self.peek_token_ref().span.start;
7721 let cascade = self.parse_keyword(Keyword::CASCADE);
7722 let restrict = self.parse_keyword(Keyword::RESTRICT);
7723 let purge = self.parse_keyword(Keyword::PURGE);
7724 if cascade && restrict {
7725 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7726 }
7727 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7728 return parser_err!(
7729 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7730 loc
7731 );
7732 }
7733 let table = if self.parse_keyword(Keyword::ON) {
7734 Some(self.parse_object_name(false)?)
7735 } else {
7736 None
7737 };
7738 Ok(Statement::Drop {
7739 object_type,
7740 if_exists,
7741 names,
7742 cascade,
7743 restrict,
7744 purge,
7745 temporary,
7746 table,
7747 })
7748 }
7749
7750 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7751 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7752 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7753 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7754 _ => None,
7755 }
7756 }
7757
7758 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7763 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7764 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7765 let drop_behavior = self.parse_optional_drop_behavior();
7766 Ok(DropFunction {
7767 if_exists,
7768 func_desc,
7769 drop_behavior,
7770 })
7771 }
7772
7773 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7779 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7780 let name = self.parse_identifier()?;
7781 self.expect_keyword_is(Keyword::ON)?;
7782 let table_name = self.parse_object_name(false)?;
7783 let drop_behavior = self.parse_optional_drop_behavior();
7784 Ok(DropPolicy {
7785 if_exists,
7786 name,
7787 table_name,
7788 drop_behavior,
7789 })
7790 }
7791 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7797 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7798 let name = self.parse_identifier()?;
7799 Ok(Statement::DropConnector { if_exists, name })
7800 }
7801
7802 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7806 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7807 let name = self.parse_object_name(false)?;
7808 let drop_behavior = self.parse_optional_drop_behavior();
7809 Ok(DropDomain {
7810 if_exists,
7811 name,
7812 drop_behavior,
7813 })
7814 }
7815
7816 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7821 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7822 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7823 let drop_behavior = self.parse_optional_drop_behavior();
7824 Ok(Statement::DropProcedure {
7825 if_exists,
7826 proc_desc,
7827 drop_behavior,
7828 })
7829 }
7830
7831 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7832 let name = self.parse_object_name(false)?;
7833
7834 let args = if self.consume_token(&Token::LParen) {
7835 if self.consume_token(&Token::RParen) {
7836 Some(vec![])
7837 } else {
7838 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7839 self.expect_token(&Token::RParen)?;
7840 Some(args)
7841 }
7842 } else {
7843 None
7844 };
7845
7846 Ok(FunctionDesc { name, args })
7847 }
7848
7849 fn parse_drop_secret(
7851 &mut self,
7852 temporary: bool,
7853 persistent: bool,
7854 ) -> Result<Statement, ParserError> {
7855 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7856 let name = self.parse_identifier()?;
7857 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7858 self.parse_identifier().ok()
7859 } else {
7860 None
7861 };
7862 let temp = match (temporary, persistent) {
7863 (true, false) => Some(true),
7864 (false, true) => Some(false),
7865 (false, false) => None,
7866 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7867 };
7868
7869 Ok(Statement::DropSecret {
7870 if_exists,
7871 temporary: temp,
7872 name,
7873 storage_specifier,
7874 })
7875 }
7876
7877 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7887 if dialect_of!(self is BigQueryDialect) {
7888 return self.parse_big_query_declare();
7889 }
7890 if dialect_of!(self is SnowflakeDialect) {
7891 return self.parse_snowflake_declare();
7892 }
7893 if dialect_of!(self is MsSqlDialect) {
7894 return self.parse_mssql_declare();
7895 }
7896
7897 let name = self.parse_identifier()?;
7898
7899 let binary = Some(self.parse_keyword(Keyword::BINARY));
7900 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7901 Some(true)
7902 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7903 Some(false)
7904 } else {
7905 None
7906 };
7907 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7908 Some(true)
7909 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7910 Some(false)
7911 } else {
7912 None
7913 };
7914
7915 self.expect_keyword_is(Keyword::CURSOR)?;
7916 let declare_type = Some(DeclareType::Cursor);
7917
7918 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7919 Some(keyword) => {
7920 self.expect_keyword_is(Keyword::HOLD)?;
7921
7922 match keyword {
7923 Keyword::WITH => Some(true),
7924 Keyword::WITHOUT => Some(false),
7925 unexpected_keyword => return Err(ParserError::ParserError(
7926 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7927 )),
7928 }
7929 }
7930 None => None,
7931 };
7932
7933 self.expect_keyword_is(Keyword::FOR)?;
7934
7935 let query = Some(self.parse_query()?);
7936
7937 Ok(Statement::Declare {
7938 stmts: vec![Declare {
7939 names: vec![name],
7940 data_type: None,
7941 assignment: None,
7942 declare_type,
7943 binary,
7944 sensitive,
7945 scroll,
7946 hold,
7947 for_query: query,
7948 }],
7949 })
7950 }
7951
7952 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7960 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7961
7962 let data_type = match &self.peek_token_ref().token {
7963 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7964 _ => Some(self.parse_data_type()?),
7965 };
7966
7967 let expr = if data_type.is_some() {
7968 if self.parse_keyword(Keyword::DEFAULT) {
7969 Some(self.parse_expr()?)
7970 } else {
7971 None
7972 }
7973 } else {
7974 self.expect_keyword_is(Keyword::DEFAULT)?;
7977 Some(self.parse_expr()?)
7978 };
7979
7980 Ok(Statement::Declare {
7981 stmts: vec![Declare {
7982 names,
7983 data_type,
7984 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7985 declare_type: None,
7986 binary: None,
7987 sensitive: None,
7988 scroll: None,
7989 hold: None,
7990 for_query: None,
7991 }],
7992 })
7993 }
7994
7995 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
8020 let mut stmts = vec![];
8021 loop {
8022 let name = self.parse_identifier()?;
8023 let (declare_type, for_query, assigned_expr, data_type) =
8024 if self.parse_keyword(Keyword::CURSOR) {
8025 self.expect_keyword_is(Keyword::FOR)?;
8026 match &self.peek_token_ref().token {
8027 Token::Word(w) if w.keyword == Keyword::SELECT => (
8028 Some(DeclareType::Cursor),
8029 Some(self.parse_query()?),
8030 None,
8031 None,
8032 ),
8033 _ => (
8034 Some(DeclareType::Cursor),
8035 None,
8036 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
8037 None,
8038 ),
8039 }
8040 } else if self.parse_keyword(Keyword::RESULTSET) {
8041 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
8042 self.parse_snowflake_variable_declaration_expression()?
8043 } else {
8044 None
8046 };
8047
8048 (Some(DeclareType::ResultSet), None, assigned_expr, None)
8049 } else if self.parse_keyword(Keyword::EXCEPTION) {
8050 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
8051 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
8052 } else {
8053 None
8055 };
8056
8057 (Some(DeclareType::Exception), None, assigned_expr, None)
8058 } else {
8059 let (assigned_expr, data_type) = if let Some(assigned_expr) =
8061 self.parse_snowflake_variable_declaration_expression()?
8062 {
8063 (Some(assigned_expr), None)
8064 } else if let Token::Word(_) = &self.peek_token_ref().token {
8065 let data_type = self.parse_data_type()?;
8066 (
8067 self.parse_snowflake_variable_declaration_expression()?,
8068 Some(data_type),
8069 )
8070 } else {
8071 (None, None)
8072 };
8073 (None, None, assigned_expr, data_type)
8074 };
8075 let stmt = Declare {
8076 names: vec![name],
8077 data_type,
8078 assignment: assigned_expr,
8079 declare_type,
8080 binary: None,
8081 sensitive: None,
8082 scroll: None,
8083 hold: None,
8084 for_query,
8085 };
8086
8087 stmts.push(stmt);
8088 if self.consume_token(&Token::SemiColon) {
8089 match &self.peek_token_ref().token {
8090 Token::Word(w)
8091 if ALL_KEYWORDS
8092 .binary_search(&w.value.to_uppercase().as_str())
8093 .is_err() =>
8094 {
8095 continue;
8097 }
8098 _ => {
8099 self.prev_token();
8101 }
8102 }
8103 }
8104
8105 break;
8106 }
8107
8108 Ok(Statement::Declare { stmts })
8109 }
8110
8111 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8123 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8124
8125 Ok(Statement::Declare { stmts })
8126 }
8127
8128 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8139 let name = {
8140 let ident = self.parse_identifier()?;
8141 if !ident.value.starts_with('@')
8142 && !matches!(
8143 &self.peek_token_ref().token,
8144 Token::Word(w) if w.keyword == Keyword::CURSOR
8145 )
8146 {
8147 Err(ParserError::TokenizerError(
8148 "Invalid MsSql variable declaration.".to_string(),
8149 ))
8150 } else {
8151 Ok(ident)
8152 }
8153 }?;
8154
8155 let (declare_type, data_type) = match &self.peek_token_ref().token {
8156 Token::Word(w) => match w.keyword {
8157 Keyword::CURSOR => {
8158 self.next_token();
8159 (Some(DeclareType::Cursor), None)
8160 }
8161 Keyword::AS => {
8162 self.next_token();
8163 (None, Some(self.parse_data_type()?))
8164 }
8165 _ => (None, Some(self.parse_data_type()?)),
8166 },
8167 _ => (None, Some(self.parse_data_type()?)),
8168 };
8169
8170 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8171 self.next_token();
8172 let query = Some(self.parse_query()?);
8173 (query, None)
8174 } else {
8175 let assignment = self.parse_mssql_variable_declaration_expression()?;
8176 (None, assignment)
8177 };
8178
8179 Ok(Declare {
8180 names: vec![name],
8181 data_type,
8182 assignment,
8183 declare_type,
8184 binary: None,
8185 sensitive: None,
8186 scroll: None,
8187 hold: None,
8188 for_query,
8189 })
8190 }
8191
8192 pub fn parse_snowflake_variable_declaration_expression(
8200 &mut self,
8201 ) -> Result<Option<DeclareAssignment>, ParserError> {
8202 Ok(match &self.peek_token_ref().token {
8203 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8204 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8206 }
8207 Token::Assignment => {
8208 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8210 self.parse_expr()?,
8211 )))
8212 }
8213 _ => None,
8214 })
8215 }
8216
8217 pub fn parse_mssql_variable_declaration_expression(
8224 &mut self,
8225 ) -> Result<Option<DeclareAssignment>, ParserError> {
8226 Ok(match &self.peek_token_ref().token {
8227 Token::Eq => {
8228 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8230 self.parse_expr()?,
8231 )))
8232 }
8233 _ => None,
8234 })
8235 }
8236
8237 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8239 let direction = if self.parse_keyword(Keyword::NEXT) {
8240 FetchDirection::Next
8241 } else if self.parse_keyword(Keyword::PRIOR) {
8242 FetchDirection::Prior
8243 } else if self.parse_keyword(Keyword::FIRST) {
8244 FetchDirection::First
8245 } else if self.parse_keyword(Keyword::LAST) {
8246 FetchDirection::Last
8247 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8248 FetchDirection::Absolute {
8249 limit: self.parse_number_value()?,
8250 }
8251 } else if self.parse_keyword(Keyword::RELATIVE) {
8252 FetchDirection::Relative {
8253 limit: self.parse_number_value()?,
8254 }
8255 } else if self.parse_keyword(Keyword::FORWARD) {
8256 if self.parse_keyword(Keyword::ALL) {
8257 FetchDirection::ForwardAll
8258 } else {
8259 FetchDirection::Forward {
8260 limit: Some(self.parse_number_value()?),
8262 }
8263 }
8264 } else if self.parse_keyword(Keyword::BACKWARD) {
8265 if self.parse_keyword(Keyword::ALL) {
8266 FetchDirection::BackwardAll
8267 } else {
8268 FetchDirection::Backward {
8269 limit: Some(self.parse_number_value()?),
8271 }
8272 }
8273 } else if self.parse_keyword(Keyword::ALL) {
8274 FetchDirection::All
8275 } else {
8276 FetchDirection::Count {
8277 limit: self.parse_number_value()?,
8278 }
8279 };
8280
8281 let position = if self.peek_keyword(Keyword::FROM) {
8282 self.expect_keyword(Keyword::FROM)?;
8283 FetchPosition::From
8284 } else if self.peek_keyword(Keyword::IN) {
8285 self.expect_keyword(Keyword::IN)?;
8286 FetchPosition::In
8287 } else {
8288 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8289 };
8290
8291 let name = self.parse_identifier()?;
8292
8293 let into = if self.parse_keyword(Keyword::INTO) {
8294 Some(self.parse_object_name(false)?)
8295 } else {
8296 None
8297 };
8298
8299 Ok(Statement::Fetch {
8300 name,
8301 direction,
8302 position,
8303 into,
8304 })
8305 }
8306
8307 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8309 let object_type = if self.parse_keyword(Keyword::ALL) {
8310 DiscardObject::ALL
8311 } else if self.parse_keyword(Keyword::PLANS) {
8312 DiscardObject::PLANS
8313 } else if self.parse_keyword(Keyword::SEQUENCES) {
8314 DiscardObject::SEQUENCES
8315 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8316 DiscardObject::TEMP
8317 } else {
8318 return self.expected_ref(
8319 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8320 self.peek_token_ref(),
8321 );
8322 };
8323 Ok(Statement::Discard { object_type })
8324 }
8325
8326 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8328 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8329 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8330
8331 let mut using = None;
8332
8333 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8334 let index_name = self.parse_object_name(false)?;
8335 using = self.parse_optional_using_then_index_type()?;
8337 self.expect_keyword_is(Keyword::ON)?;
8338 Some(index_name)
8339 } else {
8340 None
8341 };
8342
8343 let table_name = self.parse_object_name(false)?;
8344
8345 using = self.parse_optional_using_then_index_type()?.or(using);
8348
8349 let columns = self.parse_parenthesized_index_column_list()?;
8350
8351 let include = if self.parse_keyword(Keyword::INCLUDE) {
8352 self.expect_token(&Token::LParen)?;
8353 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8354 self.expect_token(&Token::RParen)?;
8355 columns
8356 } else {
8357 vec![]
8358 };
8359
8360 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8361 let not = self.parse_keyword(Keyword::NOT);
8362 self.expect_keyword_is(Keyword::DISTINCT)?;
8363 Some(!not)
8364 } else {
8365 None
8366 };
8367
8368 let with = if self.dialect.supports_create_index_with_clause()
8369 && self.parse_keyword(Keyword::WITH)
8370 {
8371 self.expect_token(&Token::LParen)?;
8372 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8373 self.expect_token(&Token::RParen)?;
8374 with_params
8375 } else {
8376 Vec::new()
8377 };
8378
8379 let predicate = if self.parse_keyword(Keyword::WHERE) {
8380 Some(self.parse_expr()?)
8381 } else {
8382 None
8383 };
8384
8385 let index_options = self.parse_index_options()?;
8391
8392 let mut alter_options = Vec::new();
8394 while self
8395 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8396 .is_some()
8397 {
8398 alter_options.push(self.parse_alter_table_operation()?)
8399 }
8400
8401 Ok(CreateIndex {
8402 name: index_name,
8403 table_name,
8404 using,
8405 columns,
8406 unique,
8407 concurrently,
8408 if_not_exists,
8409 include,
8410 nulls_distinct,
8411 with,
8412 predicate,
8413 index_options,
8414 alter_options,
8415 })
8416 }
8417
8418 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8420 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8421 let name = self.parse_identifier()?;
8422
8423 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8424 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8425 Some(self.parse_identifier()?)
8426 } else {
8427 None
8428 };
8429
8430 let version = if self.parse_keyword(Keyword::VERSION) {
8431 Some(self.parse_identifier()?)
8432 } else {
8433 None
8434 };
8435
8436 let cascade = self.parse_keyword(Keyword::CASCADE);
8437
8438 (schema, version, cascade)
8439 } else {
8440 (None, None, false)
8441 };
8442
8443 Ok(CreateExtension {
8444 name,
8445 if_not_exists,
8446 schema,
8447 version,
8448 cascade,
8449 })
8450 }
8451
8452 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8454 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8455 let name = self.parse_object_name(false)?;
8456
8457 let definition = if self.parse_keyword(Keyword::FROM) {
8458 CreateCollationDefinition::From(self.parse_object_name(false)?)
8459 } else if self.consume_token(&Token::LParen) {
8460 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8461 self.expect_token(&Token::RParen)?;
8462 CreateCollationDefinition::Options(options)
8463 } else {
8464 return self.expected_ref(
8465 "FROM or parenthesized option list after CREATE COLLATION name",
8466 self.peek_token_ref(),
8467 );
8468 };
8469
8470 Ok(CreateCollation {
8471 if_not_exists,
8472 name,
8473 definition,
8474 })
8475 }
8476
8477 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8479 if self.parse_keyword(Keyword::CONFIGURATION) {
8480 let name = self.parse_object_name(false)?;
8481 self.expect_token(&Token::LParen)?;
8482 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8483 self.expect_token(&Token::RParen)?;
8484 Ok(Statement::CreateTextSearchConfiguration(
8485 CreateTextSearchConfiguration { name, options },
8486 ))
8487 } else if self.parse_keyword(Keyword::DICTIONARY) {
8488 let name = self.parse_object_name(false)?;
8489 self.expect_token(&Token::LParen)?;
8490 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8491 self.expect_token(&Token::RParen)?;
8492 Ok(Statement::CreateTextSearchDictionary(
8493 CreateTextSearchDictionary { name, options },
8494 ))
8495 } else if self.parse_keyword(Keyword::PARSER) {
8496 let name = self.parse_object_name(false)?;
8497 self.expect_token(&Token::LParen)?;
8498 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8499 self.expect_token(&Token::RParen)?;
8500 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8501 name,
8502 options,
8503 }))
8504 } else if self.parse_keyword(Keyword::TEMPLATE) {
8505 let name = self.parse_object_name(false)?;
8506 self.expect_token(&Token::LParen)?;
8507 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8508 self.expect_token(&Token::RParen)?;
8509 Ok(Statement::CreateTextSearchTemplate(
8510 CreateTextSearchTemplate { name, options },
8511 ))
8512 } else {
8513 self.expected_ref(
8514 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8515 self.peek_token_ref(),
8516 )
8517 }
8518 }
8519
8520 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8522 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8523 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8524 let cascade_or_restrict =
8525 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8526 Ok(Statement::DropExtension(DropExtension {
8527 names,
8528 if_exists,
8529 cascade_or_restrict: cascade_or_restrict
8530 .map(|k| match k {
8531 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8532 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8533 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8534 })
8535 .transpose()?,
8536 }))
8537 }
8538
8539 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8542 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8543 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8544 let drop_behavior = self.parse_optional_drop_behavior();
8545 Ok(Statement::DropOperator(DropOperator {
8546 if_exists,
8547 operators,
8548 drop_behavior,
8549 }))
8550 }
8551
8552 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8555 let name = self.parse_operator_name()?;
8556 self.expect_token(&Token::LParen)?;
8557 let left_type = self.parse_operator_arg_type_or_none()?;
8558 self.expect_token(&Token::Comma)?;
8559 let right_type = self.parse_data_type()?;
8560 self.expect_token(&Token::RParen)?;
8561
8562 Ok(DropOperatorSignature {
8563 name,
8564 left_type,
8565 right_type,
8566 })
8567 }
8568
8569 fn parse_operator_arg_type_or_none(&mut self) -> Result<Option<DataType>, ParserError> {
8572 if self.parse_keyword(Keyword::NONE) {
8573 Ok(None)
8574 } else {
8575 Ok(Some(self.parse_data_type()?))
8576 }
8577 }
8578
8579 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8583 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8584 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8585 self.expect_keyword(Keyword::USING)?;
8586 let using = self.parse_identifier()?;
8587 let drop_behavior = self.parse_optional_drop_behavior();
8588 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8589 if_exists,
8590 names,
8591 using,
8592 drop_behavior,
8593 }))
8594 }
8595
8596 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8600 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8601 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8602 self.expect_keyword(Keyword::USING)?;
8603 let using = self.parse_identifier()?;
8604 let drop_behavior = self.parse_optional_drop_behavior();
8605 Ok(Statement::DropOperatorClass(DropOperatorClass {
8606 if_exists,
8607 names,
8608 using,
8609 drop_behavior,
8610 }))
8611 }
8612
8613 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8617 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8618 self.expect_token(&Token::LParen)?;
8619 let columns =
8620 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8621 self.expect_token(&Token::RParen)?;
8622 Ok(HiveDistributionStyle::PARTITIONED { columns })
8623 } else {
8624 Ok(HiveDistributionStyle::NONE)
8625 }
8626 }
8627
8628 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8632 let token = self.next_token();
8633 match &token.token {
8634 Token::Word(w) => match w.keyword {
8635 Keyword::AUTO => Ok(DistStyle::Auto),
8636 Keyword::EVEN => Ok(DistStyle::Even),
8637 Keyword::KEY => Ok(DistStyle::Key),
8638 Keyword::ALL => Ok(DistStyle::All),
8639 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8640 },
8641 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8642 }
8643 }
8644
8645 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8647 let mut hive_format: Option<HiveFormat> = None;
8648 loop {
8649 match self.parse_one_of_keywords(&[
8650 Keyword::ROW,
8651 Keyword::STORED,
8652 Keyword::LOCATION,
8653 Keyword::WITH,
8654 ]) {
8655 Some(Keyword::ROW) => {
8656 hive_format
8657 .get_or_insert_with(HiveFormat::default)
8658 .row_format = Some(self.parse_row_format()?);
8659 }
8660 Some(Keyword::STORED) => {
8661 self.expect_keyword_is(Keyword::AS)?;
8662 if self.parse_keyword(Keyword::INPUTFORMAT) {
8663 let input_format = self.parse_expr()?;
8664 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8665 let output_format = self.parse_expr()?;
8666 hive_format.get_or_insert_with(HiveFormat::default).storage =
8667 Some(HiveIOFormat::IOF {
8668 input_format,
8669 output_format,
8670 });
8671 } else {
8672 let format = self.parse_file_format()?;
8673 hive_format.get_or_insert_with(HiveFormat::default).storage =
8674 Some(HiveIOFormat::FileFormat { format });
8675 }
8676 }
8677 Some(Keyword::LOCATION) => {
8678 hive_format.get_or_insert_with(HiveFormat::default).location =
8679 Some(self.parse_literal_string()?);
8680 }
8681 Some(Keyword::WITH) => {
8682 self.prev_token();
8683 let properties = self
8684 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8685 if !properties.is_empty() {
8686 hive_format
8687 .get_or_insert_with(HiveFormat::default)
8688 .serde_properties = Some(properties);
8689 } else {
8690 break;
8691 }
8692 }
8693 None => break,
8694 _ => break,
8695 }
8696 }
8697
8698 Ok(hive_format)
8699 }
8700
8701 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8703 self.expect_keyword_is(Keyword::FORMAT)?;
8704 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8705 Some(Keyword::SERDE) => {
8706 let class = self.parse_literal_string()?;
8707 Ok(HiveRowFormat::SERDE { class })
8708 }
8709 _ => {
8710 let mut row_delimiters = vec![];
8711
8712 loop {
8713 match self.parse_one_of_keywords(&[
8714 Keyword::FIELDS,
8715 Keyword::COLLECTION,
8716 Keyword::MAP,
8717 Keyword::LINES,
8718 Keyword::NULL,
8719 ]) {
8720 Some(Keyword::FIELDS)
8721 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8722 {
8723 row_delimiters.push(HiveRowDelimiter {
8724 delimiter: HiveDelimiter::FieldsTerminatedBy,
8725 char: self.parse_identifier()?,
8726 });
8727
8728 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8729 row_delimiters.push(HiveRowDelimiter {
8730 delimiter: HiveDelimiter::FieldsEscapedBy,
8731 char: self.parse_identifier()?,
8732 });
8733 }
8734 }
8735 Some(Keyword::COLLECTION)
8736 if self.parse_keywords(&[
8737 Keyword::ITEMS,
8738 Keyword::TERMINATED,
8739 Keyword::BY,
8740 ]) =>
8741 {
8742 row_delimiters.push(HiveRowDelimiter {
8743 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8744 char: self.parse_identifier()?,
8745 });
8746 }
8747 Some(Keyword::MAP)
8748 if self.parse_keywords(&[
8749 Keyword::KEYS,
8750 Keyword::TERMINATED,
8751 Keyword::BY,
8752 ]) =>
8753 {
8754 row_delimiters.push(HiveRowDelimiter {
8755 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8756 char: self.parse_identifier()?,
8757 });
8758 }
8759 Some(Keyword::LINES)
8760 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8761 {
8762 row_delimiters.push(HiveRowDelimiter {
8763 delimiter: HiveDelimiter::LinesTerminatedBy,
8764 char: self.parse_identifier()?,
8765 });
8766 }
8767 Some(Keyword::NULL)
8768 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
8769 {
8770 row_delimiters.push(HiveRowDelimiter {
8771 delimiter: HiveDelimiter::NullDefinedAs,
8772 char: self.parse_identifier()?,
8773 });
8774 }
8775 _ => {
8776 break;
8777 }
8778 }
8779 }
8780
8781 Ok(HiveRowFormat::DELIMITED {
8782 delimiters: row_delimiters,
8783 })
8784 }
8785 }
8786 }
8787
8788 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8789 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8790 Ok(Some(self.parse_identifier()?))
8791 } else {
8792 Ok(None)
8793 }
8794 }
8795
8796 pub fn parse_create_table(
8798 &mut self,
8799 or_replace: bool,
8800 temporary: bool,
8801 global: Option<bool>,
8802 transient: bool,
8803 ) -> Result<CreateTable, ParserError> {
8804 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8805 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8806 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8807
8808 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8818 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8819 } else {
8820 None
8821 };
8822
8823 let on_cluster = self.parse_optional_on_cluster()?;
8825
8826 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8827
8828 let clone = if self.parse_keyword(Keyword::CLONE) {
8829 self.parse_object_name(allow_unquoted_hyphen).ok()
8830 } else {
8831 None
8832 };
8833
8834 let (columns, constraints) = self.parse_columns()?;
8836 let comment_after_column_def =
8837 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8838 let next_token = self.next_token();
8839 match next_token.token {
8840 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8841 _ => self.expected("comment", next_token)?,
8842 }
8843 } else {
8844 None
8845 };
8846
8847 let for_values = if partition_of.is_some() {
8849 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8850 Some(self.parse_partition_for_values()?)
8851 } else {
8852 return self.expected_ref(
8853 "FOR VALUES or DEFAULT after PARTITION OF",
8854 self.peek_token_ref(),
8855 );
8856 }
8857 } else {
8858 None
8859 };
8860
8861 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8863
8864 let hive_distribution = self.parse_hive_distribution()?;
8865 let clustered_by = self.parse_optional_clustered_by()?;
8866 let hive_formats = self.parse_hive_formats()?;
8867
8868 let create_table_config = self.parse_optional_create_table_config()?;
8869
8870 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8873 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8874 {
8875 Some(Box::new(self.parse_expr()?))
8876 } else {
8877 None
8878 };
8879
8880 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8881 if self.consume_token(&Token::LParen) {
8882 let columns = if self.peek_token_ref().token != Token::RParen {
8883 self.parse_comma_separated(|p| p.parse_expr())?
8884 } else {
8885 vec![]
8886 };
8887 self.expect_token(&Token::RParen)?;
8888 Some(OneOrManyWithParens::Many(columns))
8889 } else {
8890 Some(OneOrManyWithParens::One(self.parse_expr()?))
8891 }
8892 } else {
8893 None
8894 };
8895
8896 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8897 Some(self.parse_create_table_on_commit()?)
8898 } else {
8899 None
8900 };
8901
8902 let strict = self.parse_keyword(Keyword::STRICT);
8903
8904 let backup = if self.parse_keyword(Keyword::BACKUP) {
8906 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8907 Some(keyword == Keyword::YES)
8908 } else {
8909 None
8910 };
8911
8912 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8914 Some(self.parse_dist_style()?)
8915 } else {
8916 None
8917 };
8918 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8919 self.expect_token(&Token::LParen)?;
8920 let expr = self.parse_expr()?;
8921 self.expect_token(&Token::RParen)?;
8922 Some(expr)
8923 } else {
8924 None
8925 };
8926 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8927 self.expect_token(&Token::LParen)?;
8928 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8929 self.expect_token(&Token::RParen)?;
8930 Some(columns)
8931 } else {
8932 None
8933 };
8934
8935 let query = if self.parse_keyword(Keyword::AS) {
8937 Some(self.parse_query()?)
8938 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8939 {
8940 self.prev_token();
8942 Some(self.parse_query()?)
8943 } else {
8944 None
8945 };
8946
8947 Ok(CreateTableBuilder::new(table_name)
8948 .temporary(temporary)
8949 .columns(columns)
8950 .constraints(constraints)
8951 .or_replace(or_replace)
8952 .if_not_exists(if_not_exists)
8953 .transient(transient)
8954 .hive_distribution(hive_distribution)
8955 .hive_formats(hive_formats)
8956 .global(global)
8957 .query(query)
8958 .without_rowid(without_rowid)
8959 .like(like)
8960 .clone_clause(clone)
8961 .comment_after_column_def(comment_after_column_def)
8962 .order_by(order_by)
8963 .on_commit(on_commit)
8964 .on_cluster(on_cluster)
8965 .clustered_by(clustered_by)
8966 .partition_by(create_table_config.partition_by)
8967 .cluster_by(create_table_config.cluster_by)
8968 .inherits(create_table_config.inherits)
8969 .partition_of(partition_of)
8970 .for_values(for_values)
8971 .table_options(create_table_config.table_options)
8972 .primary_key(primary_key)
8973 .strict(strict)
8974 .backup(backup)
8975 .diststyle(diststyle)
8976 .distkey(distkey)
8977 .sortkey(sortkey)
8978 .build())
8979 }
8980
8981 fn maybe_parse_create_table_like(
8982 &mut self,
8983 allow_unquoted_hyphen: bool,
8984 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8985 let like = if self.dialect.supports_create_table_like_parenthesized()
8986 && self.consume_token(&Token::LParen)
8987 {
8988 if self.parse_keyword(Keyword::LIKE) {
8989 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8990 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8991 Some(CreateTableLikeDefaults::Including)
8992 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8993 Some(CreateTableLikeDefaults::Excluding)
8994 } else {
8995 None
8996 };
8997 self.expect_token(&Token::RParen)?;
8998 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8999 name,
9000 defaults,
9001 }))
9002 } else {
9003 self.prev_token();
9005 None
9006 }
9007 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
9008 let name = self.parse_object_name(allow_unquoted_hyphen)?;
9009 Some(CreateTableLikeKind::Plain(CreateTableLike {
9010 name,
9011 defaults: None,
9012 }))
9013 } else {
9014 None
9015 };
9016 Ok(like)
9017 }
9018
9019 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
9020 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
9021 Ok(OnCommit::DeleteRows)
9022 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
9023 Ok(OnCommit::PreserveRows)
9024 } else if self.parse_keywords(&[Keyword::DROP]) {
9025 Ok(OnCommit::Drop)
9026 } else {
9027 parser_err!(
9028 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
9029 self.peek_token_ref()
9030 )
9031 }
9032 }
9033
9034 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
9040 if self.parse_keyword(Keyword::DEFAULT) {
9041 return Ok(ForValues::Default);
9042 }
9043
9044 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9045
9046 if self.parse_keyword(Keyword::IN) {
9047 self.expect_token(&Token::LParen)?;
9049 if self.peek_token_ref().token == Token::RParen {
9050 return self.expected_ref("at least one value", self.peek_token_ref());
9051 }
9052 let values = self.parse_comma_separated(Parser::parse_expr)?;
9053 self.expect_token(&Token::RParen)?;
9054 Ok(ForValues::In(values))
9055 } else if self.parse_keyword(Keyword::FROM) {
9056 self.expect_token(&Token::LParen)?;
9058 if self.peek_token_ref().token == Token::RParen {
9059 return self.expected_ref("at least one value", self.peek_token_ref());
9060 }
9061 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9062 self.expect_token(&Token::RParen)?;
9063 self.expect_keyword(Keyword::TO)?;
9064 self.expect_token(&Token::LParen)?;
9065 if self.peek_token_ref().token == Token::RParen {
9066 return self.expected_ref("at least one value", self.peek_token_ref());
9067 }
9068 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9069 self.expect_token(&Token::RParen)?;
9070 Ok(ForValues::From { from, to })
9071 } else if self.parse_keyword(Keyword::WITH) {
9072 self.expect_token(&Token::LParen)?;
9074 self.expect_keyword(Keyword::MODULUS)?;
9075 let modulus = self.parse_literal_uint()?;
9076 self.expect_token(&Token::Comma)?;
9077 self.expect_keyword(Keyword::REMAINDER)?;
9078 let remainder = self.parse_literal_uint()?;
9079 self.expect_token(&Token::RParen)?;
9080 Ok(ForValues::With { modulus, remainder })
9081 } else {
9082 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9083 }
9084 }
9085
9086 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9088 if self.parse_keyword(Keyword::MINVALUE) {
9089 Ok(PartitionBoundValue::MinValue)
9090 } else if self.parse_keyword(Keyword::MAXVALUE) {
9091 Ok(PartitionBoundValue::MaxValue)
9092 } else {
9093 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9094 }
9095 }
9096
9097 fn parse_optional_create_table_config(
9103 &mut self,
9104 ) -> Result<CreateTableConfiguration, ParserError> {
9105 let mut table_options = CreateTableOptions::None;
9106
9107 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9108 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9109 } else {
9110 None
9111 };
9112
9113 let with_options = self.parse_options(Keyword::WITH)?;
9115 if !with_options.is_empty() {
9116 table_options = CreateTableOptions::With(with_options)
9117 }
9118
9119 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9120 if !table_properties.is_empty() {
9121 table_options = CreateTableOptions::TableProperties(table_properties);
9122 }
9123 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9124 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9125 {
9126 Some(Box::new(self.parse_expr()?))
9127 } else {
9128 None
9129 };
9130
9131 let mut cluster_by = None;
9132 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9133 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9134 cluster_by = Some(WrappedCollection::NoWrapping(
9135 self.parse_comma_separated(|p| p.parse_expr())?,
9136 ));
9137 };
9138
9139 if let Token::Word(word) = &self.peek_token_ref().token {
9140 if word.keyword == Keyword::OPTIONS {
9141 table_options =
9142 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9143 }
9144 };
9145 }
9146
9147 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9148 let plain_options = self.parse_plain_options()?;
9149 if !plain_options.is_empty() {
9150 table_options = CreateTableOptions::Plain(plain_options)
9151 }
9152 };
9153
9154 Ok(CreateTableConfiguration {
9155 partition_by,
9156 cluster_by,
9157 inherits,
9158 table_options,
9159 })
9160 }
9161
9162 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9163 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9166 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9167 }
9168
9169 if self.parse_keywords(&[Keyword::COMMENT]) {
9172 let has_eq = self.consume_token(&Token::Eq);
9173 let value = self.next_token();
9174
9175 let comment = match (has_eq, value.token) {
9176 (true, Token::SingleQuotedString(s)) => {
9177 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9178 }
9179 (false, Token::SingleQuotedString(s)) => {
9180 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9181 }
9182 (_, token) => {
9183 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9184 }
9185 };
9186 return comment;
9187 }
9188
9189 if self.parse_keywords(&[Keyword::ENGINE]) {
9192 let _ = self.consume_token(&Token::Eq);
9193 let value = self.next_token();
9194
9195 let engine = match value.token {
9196 Token::Word(w) => {
9197 let parameters = if self.peek_token_ref().token == Token::LParen {
9198 self.parse_parenthesized_identifiers()?
9199 } else {
9200 vec![]
9201 };
9202
9203 Ok(Some(SqlOption::NamedParenthesizedList(
9204 NamedParenthesizedList {
9205 key: Ident::new("ENGINE"),
9206 name: Some(Ident::new(w.value)),
9207 values: parameters,
9208 },
9209 )))
9210 }
9211 _ => {
9212 return self.expected("Token::Word", value)?;
9213 }
9214 };
9215
9216 return engine;
9217 }
9218
9219 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9221 let _ = self.consume_token(&Token::Eq);
9222 let value = self.next_token();
9223
9224 let tablespace = match value.token {
9225 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9226 let storage = match self.parse_keyword(Keyword::STORAGE) {
9227 true => {
9228 let _ = self.consume_token(&Token::Eq);
9229 let storage_token = self.next_token();
9230 match &storage_token.token {
9231 Token::Word(w) => match w.value.to_uppercase().as_str() {
9232 "DISK" => Some(StorageType::Disk),
9233 "MEMORY" => Some(StorageType::Memory),
9234 _ => self
9235 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9236 },
9237 _ => self.expected("Token::Word", storage_token)?,
9238 }
9239 }
9240 false => None,
9241 };
9242
9243 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9244 name,
9245 storage,
9246 })))
9247 }
9248 _ => {
9249 return self.expected("Token::Word", value)?;
9250 }
9251 };
9252
9253 return tablespace;
9254 }
9255
9256 if self.parse_keyword(Keyword::UNION) {
9258 let _ = self.consume_token(&Token::Eq);
9259 let value = self.next_token();
9260
9261 match value.token {
9262 Token::LParen => {
9263 let tables: Vec<Ident> =
9264 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9265 self.expect_token(&Token::RParen)?;
9266
9267 return Ok(Some(SqlOption::NamedParenthesizedList(
9268 NamedParenthesizedList {
9269 key: Ident::new("UNION"),
9270 name: None,
9271 values: tables,
9272 },
9273 )));
9274 }
9275 _ => {
9276 return self.expected("Token::LParen", value)?;
9277 }
9278 }
9279 }
9280
9281 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9283 Ident::new("DEFAULT CHARSET")
9284 } else if self.parse_keyword(Keyword::CHARSET) {
9285 Ident::new("CHARSET")
9286 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9287 Ident::new("DEFAULT CHARACTER SET")
9288 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9289 Ident::new("CHARACTER SET")
9290 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9291 Ident::new("DEFAULT COLLATE")
9292 } else if self.parse_keyword(Keyword::COLLATE) {
9293 Ident::new("COLLATE")
9294 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9295 Ident::new("DATA DIRECTORY")
9296 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9297 Ident::new("INDEX DIRECTORY")
9298 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9299 Ident::new("KEY_BLOCK_SIZE")
9300 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9301 Ident::new("ROW_FORMAT")
9302 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9303 Ident::new("PACK_KEYS")
9304 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9305 Ident::new("STATS_AUTO_RECALC")
9306 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9307 Ident::new("STATS_PERSISTENT")
9308 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9309 Ident::new("STATS_SAMPLE_PAGES")
9310 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9311 Ident::new("DELAY_KEY_WRITE")
9312 } else if self.parse_keyword(Keyword::COMPRESSION) {
9313 Ident::new("COMPRESSION")
9314 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9315 Ident::new("ENCRYPTION")
9316 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9317 Ident::new("MAX_ROWS")
9318 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9319 Ident::new("MIN_ROWS")
9320 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9321 Ident::new("AUTOEXTEND_SIZE")
9322 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9323 Ident::new("AVG_ROW_LENGTH")
9324 } else if self.parse_keyword(Keyword::CHECKSUM) {
9325 Ident::new("CHECKSUM")
9326 } else if self.parse_keyword(Keyword::CONNECTION) {
9327 Ident::new("CONNECTION")
9328 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9329 Ident::new("ENGINE_ATTRIBUTE")
9330 } else if self.parse_keyword(Keyword::PASSWORD) {
9331 Ident::new("PASSWORD")
9332 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9333 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9334 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9335 Ident::new("INSERT_METHOD")
9336 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9337 Ident::new("AUTO_INCREMENT")
9338 } else {
9339 return Ok(None);
9340 };
9341
9342 let _ = self.consume_token(&Token::Eq);
9343
9344 let value = match self
9345 .maybe_parse(|parser| parser.parse_value())?
9346 .map(Expr::Value)
9347 {
9348 Some(expr) => expr,
9349 None => Expr::Identifier(self.parse_identifier()?),
9350 };
9351
9352 Ok(Some(SqlOption::KeyValue { key, value }))
9353 }
9354
9355 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9357 let mut options = Vec::new();
9358
9359 while let Some(option) = self.parse_plain_option()? {
9360 options.push(option);
9361 let _ = self.consume_token(&Token::Comma);
9364 }
9365
9366 Ok(options)
9367 }
9368
9369 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9371 let comment = if self.parse_keyword(Keyword::COMMENT) {
9372 let has_eq = self.consume_token(&Token::Eq);
9373 let comment = self.parse_comment_value()?;
9374 Some(if has_eq {
9375 CommentDef::WithEq(comment)
9376 } else {
9377 CommentDef::WithoutEq(comment)
9378 })
9379 } else {
9380 None
9381 };
9382 Ok(comment)
9383 }
9384
9385 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9387 let next_token = self.next_token();
9388 let value = match next_token.token {
9389 Token::SingleQuotedString(str) => str,
9390 Token::DollarQuotedString(str) => str.value,
9391 _ => self.expected("string literal", next_token)?,
9392 };
9393 Ok(value)
9394 }
9395
9396 pub fn parse_optional_procedure_parameters(
9398 &mut self,
9399 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9400 let mut params = vec![];
9401 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9402 return Ok(Some(params));
9403 }
9404 loop {
9405 if let Token::Word(_) = &self.peek_token_ref().token {
9406 params.push(self.parse_procedure_param()?)
9407 }
9408 let comma = self.consume_token(&Token::Comma);
9409 if self.consume_token(&Token::RParen) {
9410 break;
9412 } else if !comma {
9413 return self.expected_ref(
9414 "',' or ')' after parameter definition",
9415 self.peek_token_ref(),
9416 );
9417 }
9418 }
9419 Ok(Some(params))
9420 }
9421
9422 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9424 let mut columns = vec![];
9425 let mut constraints = vec![];
9426 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9427 return Ok((columns, constraints));
9428 }
9429
9430 loop {
9431 if let Some(constraint) = self.parse_optional_table_constraint()? {
9432 constraints.push(constraint);
9433 } else if let Token::Word(_) = &self.peek_token_ref().token {
9434 columns.push(self.parse_column_def()?);
9435 } else {
9436 return self.expected_ref(
9437 "column name or constraint definition",
9438 self.peek_token_ref(),
9439 );
9440 }
9441
9442 let comma = self.consume_token(&Token::Comma);
9443 let rparen = self.peek_token_ref().token == Token::RParen;
9444
9445 if !comma && !rparen {
9446 return self
9447 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9448 };
9449
9450 if rparen
9451 && (!comma
9452 || self.dialect.supports_column_definition_trailing_commas()
9453 || self.options.trailing_commas)
9454 {
9455 let _ = self.consume_token(&Token::RParen);
9456 break;
9457 }
9458 }
9459
9460 Ok((columns, constraints))
9461 }
9462
9463 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9465 let mode = if self.parse_keyword(Keyword::IN) {
9466 Some(ArgMode::In)
9467 } else if self.parse_keyword(Keyword::OUT) {
9468 Some(ArgMode::Out)
9469 } else if self.parse_keyword(Keyword::INOUT) {
9470 Some(ArgMode::InOut)
9471 } else {
9472 None
9473 };
9474 let name = self.parse_identifier()?;
9475 let data_type = self.parse_data_type()?;
9476 let default = if self.consume_token(&Token::Eq) {
9477 Some(self.parse_expr()?)
9478 } else {
9479 None
9480 };
9481
9482 Ok(ProcedureParam {
9483 name,
9484 data_type,
9485 mode,
9486 default,
9487 })
9488 }
9489
9490 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9492 self.parse_column_def_inner(false)
9493 }
9494
9495 fn parse_column_def_inner(
9496 &mut self,
9497 optional_data_type: bool,
9498 ) -> Result<ColumnDef, ParserError> {
9499 let col_name = self.parse_identifier()?;
9500 let data_type = if self.is_column_type_sqlite_unspecified() {
9501 DataType::Unspecified
9502 } else if optional_data_type {
9503 self.maybe_parse(|parser| parser.parse_data_type())?
9504 .unwrap_or(DataType::Unspecified)
9505 } else {
9506 self.parse_data_type()?
9507 };
9508 let mut options = vec![];
9509 loop {
9510 if self.parse_keyword(Keyword::CONSTRAINT) {
9511 let name = Some(self.parse_identifier()?);
9512 if let Some(option) = self.parse_optional_column_option()? {
9513 options.push(ColumnOptionDef { name, option });
9514 } else {
9515 return self.expected_ref(
9516 "constraint details after CONSTRAINT <name>",
9517 self.peek_token_ref(),
9518 );
9519 }
9520 } else if let Some(option) = self.parse_optional_column_option()? {
9521 options.push(ColumnOptionDef { name: None, option });
9522 } else {
9523 break;
9524 };
9525 }
9526 Ok(ColumnDef {
9527 name: col_name,
9528 data_type,
9529 options,
9530 })
9531 }
9532
9533 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9534 if dialect_of!(self is SQLiteDialect) {
9535 match &self.peek_token_ref().token {
9536 Token::Word(word) => matches!(
9537 word.keyword,
9538 Keyword::CONSTRAINT
9539 | Keyword::PRIMARY
9540 | Keyword::NOT
9541 | Keyword::UNIQUE
9542 | Keyword::CHECK
9543 | Keyword::DEFAULT
9544 | Keyword::COLLATE
9545 | Keyword::REFERENCES
9546 | Keyword::GENERATED
9547 | Keyword::AS
9548 ),
9549 _ => true, }
9551 } else {
9552 false
9553 }
9554 }
9555
9556 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9558 if let Some(option) = self.dialect.parse_column_option(self)? {
9559 return option;
9560 }
9561
9562 self.with_state(
9563 ColumnDefinition,
9564 |parser| -> Result<Option<ColumnOption>, ParserError> {
9565 parser.parse_optional_column_option_inner()
9566 },
9567 )
9568 }
9569
9570 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9571 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9572 Ok(Some(ColumnOption::CharacterSet(
9573 self.parse_object_name(false)?,
9574 )))
9575 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9576 Ok(Some(ColumnOption::Collation(
9577 self.parse_object_name(false)?,
9578 )))
9579 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9580 Ok(Some(ColumnOption::NotNull))
9581 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9582 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9583 } else if self.parse_keyword(Keyword::NULL) {
9584 Ok(Some(ColumnOption::Null))
9585 } else if self.parse_keyword(Keyword::DEFAULT) {
9586 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9587 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9588 && self.parse_keyword(Keyword::MATERIALIZED)
9589 {
9590 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9591 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9592 && self.parse_keyword(Keyword::ALIAS)
9593 {
9594 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9595 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9596 && self.parse_keyword(Keyword::EPHEMERAL)
9597 {
9598 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9601 Ok(Some(ColumnOption::Ephemeral(None)))
9602 } else {
9603 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9604 }
9605 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9606 let characteristics = self.parse_constraint_characteristics()?;
9607 Ok(Some(
9608 PrimaryKeyConstraint {
9609 name: None,
9610 index_name: None,
9611 index_type: None,
9612 columns: vec![],
9613 index_options: vec![],
9614 characteristics,
9615 }
9616 .into(),
9617 ))
9618 } else if self.parse_keyword(Keyword::UNIQUE) {
9619 let index_type_display =
9620 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9621 KeyOrIndexDisplay::Key
9622 } else {
9623 KeyOrIndexDisplay::None
9624 };
9625 let characteristics = self.parse_constraint_characteristics()?;
9626 Ok(Some(
9627 UniqueConstraint {
9628 name: None,
9629 index_name: None,
9630 index_type_display,
9631 index_type: None,
9632 columns: vec![],
9633 index_options: vec![],
9634 characteristics,
9635 nulls_distinct: NullsDistinctOption::None,
9636 }
9637 .into(),
9638 ))
9639 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9640 let characteristics = self.parse_constraint_characteristics()?;
9643 Ok(Some(
9644 PrimaryKeyConstraint {
9645 name: None,
9646 index_name: None,
9647 index_type: None,
9648 columns: vec![],
9649 index_options: vec![],
9650 characteristics,
9651 }
9652 .into(),
9653 ))
9654 } else if self.parse_keyword(Keyword::REFERENCES) {
9655 let foreign_table = self.parse_object_name(false)?;
9656 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9659 let mut match_kind = None;
9660 let mut on_delete = None;
9661 let mut on_update = None;
9662 loop {
9663 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9664 match_kind = Some(self.parse_match_kind()?);
9665 } else if on_delete.is_none()
9666 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9667 {
9668 on_delete = Some(self.parse_referential_action()?);
9669 } else if on_update.is_none()
9670 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9671 {
9672 on_update = Some(self.parse_referential_action()?);
9673 } else {
9674 break;
9675 }
9676 }
9677 let characteristics = self.parse_constraint_characteristics()?;
9678
9679 Ok(Some(
9680 ForeignKeyConstraint {
9681 name: None, index_name: None, columns: vec![], foreign_table,
9685 referred_columns,
9686 on_delete,
9687 on_update,
9688 match_kind,
9689 characteristics,
9690 }
9691 .into(),
9692 ))
9693 } else if self.parse_keyword(Keyword::CHECK) {
9694 self.expect_token(&Token::LParen)?;
9695 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9697 self.expect_token(&Token::RParen)?;
9698
9699 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9700 Some(true)
9701 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9702 Some(false)
9703 } else {
9704 None
9705 };
9706
9707 Ok(Some(
9708 CheckConstraint {
9709 name: None, expr: Box::new(expr),
9711 enforced,
9712 }
9713 .into(),
9714 ))
9715 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9716 && dialect_of!(self is MySqlDialect | GenericDialect)
9717 {
9718 Ok(Some(ColumnOption::DialectSpecific(vec![
9720 Token::make_keyword("AUTO_INCREMENT"),
9721 ])))
9722 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9723 && dialect_of!(self is SQLiteDialect | GenericDialect)
9724 {
9725 Ok(Some(ColumnOption::DialectSpecific(vec![
9727 Token::make_keyword("AUTOINCREMENT"),
9728 ])))
9729 } else if self.parse_keyword(Keyword::ASC)
9730 && self.dialect.supports_asc_desc_in_column_definition()
9731 {
9732 Ok(Some(ColumnOption::DialectSpecific(vec![
9734 Token::make_keyword("ASC"),
9735 ])))
9736 } else if self.parse_keyword(Keyword::DESC)
9737 && self.dialect.supports_asc_desc_in_column_definition()
9738 {
9739 Ok(Some(ColumnOption::DialectSpecific(vec![
9741 Token::make_keyword("DESC"),
9742 ])))
9743 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9744 && dialect_of!(self is MySqlDialect | GenericDialect)
9745 {
9746 let expr = self.parse_expr()?;
9747 Ok(Some(ColumnOption::OnUpdate(expr)))
9748 } else if self.parse_keyword(Keyword::GENERATED) {
9749 self.parse_optional_column_option_generated()
9750 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9751 && self.parse_keyword(Keyword::OPTIONS)
9752 {
9753 self.prev_token();
9754 Ok(Some(ColumnOption::Options(
9755 self.parse_options(Keyword::OPTIONS)?,
9756 )))
9757 } else if self.parse_keyword(Keyword::AS)
9758 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9759 {
9760 self.parse_optional_column_option_as()
9761 } else if self.parse_keyword(Keyword::SRID)
9762 && dialect_of!(self is MySqlDialect | GenericDialect)
9763 {
9764 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9765 } else if self.parse_keyword(Keyword::IDENTITY)
9766 && dialect_of!(self is MsSqlDialect | GenericDialect)
9767 {
9768 let parameters = if self.consume_token(&Token::LParen) {
9769 let seed = self.parse_number()?;
9770 self.expect_token(&Token::Comma)?;
9771 let increment = self.parse_number()?;
9772 self.expect_token(&Token::RParen)?;
9773
9774 Some(IdentityPropertyFormatKind::FunctionCall(
9775 IdentityParameters { seed, increment },
9776 ))
9777 } else {
9778 None
9779 };
9780 Ok(Some(ColumnOption::Identity(
9781 IdentityPropertyKind::Identity(IdentityProperty {
9782 parameters,
9783 order: None,
9784 }),
9785 )))
9786 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9787 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9788 {
9789 Ok(Some(ColumnOption::OnConflict(
9791 self.expect_one_of_keywords(&[
9792 Keyword::ROLLBACK,
9793 Keyword::ABORT,
9794 Keyword::FAIL,
9795 Keyword::IGNORE,
9796 Keyword::REPLACE,
9797 ])?,
9798 )))
9799 } else if self.parse_keyword(Keyword::INVISIBLE) {
9800 Ok(Some(ColumnOption::Invisible))
9801 } else {
9802 Ok(None)
9803 }
9804 }
9805
9806 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9807 let name = self.parse_object_name(false)?;
9808 self.expect_token(&Token::Eq)?;
9809 let value = self.parse_literal_string()?;
9810
9811 Ok(Tag::new(name, value))
9812 }
9813
9814 fn parse_optional_column_option_generated(
9815 &mut self,
9816 ) -> Result<Option<ColumnOption>, ParserError> {
9817 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9818 let mut sequence_options = vec![];
9819 if self.expect_token(&Token::LParen).is_ok() {
9820 sequence_options = self.parse_create_sequence_options()?;
9821 self.expect_token(&Token::RParen)?;
9822 }
9823 Ok(Some(ColumnOption::Generated {
9824 generated_as: GeneratedAs::Always,
9825 sequence_options: Some(sequence_options),
9826 generation_expr: None,
9827 generation_expr_mode: None,
9828 generated_keyword: true,
9829 }))
9830 } else if self.parse_keywords(&[
9831 Keyword::BY,
9832 Keyword::DEFAULT,
9833 Keyword::AS,
9834 Keyword::IDENTITY,
9835 ]) {
9836 let mut sequence_options = vec![];
9837 if self.expect_token(&Token::LParen).is_ok() {
9838 sequence_options = self.parse_create_sequence_options()?;
9839 self.expect_token(&Token::RParen)?;
9840 }
9841 Ok(Some(ColumnOption::Generated {
9842 generated_as: GeneratedAs::ByDefault,
9843 sequence_options: Some(sequence_options),
9844 generation_expr: None,
9845 generation_expr_mode: None,
9846 generated_keyword: true,
9847 }))
9848 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9849 if self.expect_token(&Token::LParen).is_ok() {
9850 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9851 self.expect_token(&Token::RParen)?;
9852 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9853 Ok((
9854 GeneratedAs::ExpStored,
9855 Some(GeneratedExpressionMode::Stored),
9856 ))
9857 } else if dialect_of!(self is PostgreSqlDialect) {
9858 self.expected_ref("STORED", self.peek_token_ref())
9860 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9861 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9862 } else {
9863 Ok((GeneratedAs::Always, None))
9864 }?;
9865
9866 Ok(Some(ColumnOption::Generated {
9867 generated_as: gen_as,
9868 sequence_options: None,
9869 generation_expr: Some(expr),
9870 generation_expr_mode: expr_mode,
9871 generated_keyword: true,
9872 }))
9873 } else {
9874 Ok(None)
9875 }
9876 } else {
9877 Ok(None)
9878 }
9879 }
9880
9881 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9882 self.expect_token(&Token::LParen)?;
9884 let expr = self.parse_expr()?;
9885 self.expect_token(&Token::RParen)?;
9886
9887 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9888 (
9889 GeneratedAs::ExpStored,
9890 Some(GeneratedExpressionMode::Stored),
9891 )
9892 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9893 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9894 } else {
9895 (GeneratedAs::Always, None)
9896 };
9897
9898 Ok(Some(ColumnOption::Generated {
9899 generated_as: gen_as,
9900 sequence_options: None,
9901 generation_expr: Some(expr),
9902 generation_expr_mode: expr_mode,
9903 generated_keyword: false,
9904 }))
9905 }
9906
9907 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9909 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9910 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9911 {
9912 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9913
9914 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9915 self.expect_token(&Token::LParen)?;
9916 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9917 self.expect_token(&Token::RParen)?;
9918 Some(sorted_by_columns)
9919 } else {
9920 None
9921 };
9922
9923 self.expect_keyword_is(Keyword::INTO)?;
9924 let num_buckets = self.parse_number_value()?.value;
9925 self.expect_keyword_is(Keyword::BUCKETS)?;
9926 Some(ClusteredBy {
9927 columns,
9928 sorted_by,
9929 num_buckets,
9930 })
9931 } else {
9932 None
9933 };
9934 Ok(clustered_by)
9935 }
9936
9937 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9941 if self.parse_keyword(Keyword::RESTRICT) {
9942 Ok(ReferentialAction::Restrict)
9943 } else if self.parse_keyword(Keyword::CASCADE) {
9944 Ok(ReferentialAction::Cascade)
9945 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9946 Ok(ReferentialAction::SetNull)
9947 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9948 Ok(ReferentialAction::NoAction)
9949 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9950 Ok(ReferentialAction::SetDefault)
9951 } else {
9952 self.expected_ref(
9953 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9954 self.peek_token_ref(),
9955 )
9956 }
9957 }
9958
9959 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9961 if self.parse_keyword(Keyword::FULL) {
9962 Ok(ConstraintReferenceMatchKind::Full)
9963 } else if self.parse_keyword(Keyword::PARTIAL) {
9964 Ok(ConstraintReferenceMatchKind::Partial)
9965 } else if self.parse_keyword(Keyword::SIMPLE) {
9966 Ok(ConstraintReferenceMatchKind::Simple)
9967 } else {
9968 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9969 }
9970 }
9971
9972 fn parse_constraint_using_index(
9975 &mut self,
9976 name: Option<Ident>,
9977 ) -> Result<ConstraintUsingIndex, ParserError> {
9978 let index_name = self.parse_identifier()?;
9979 let characteristics = self.parse_constraint_characteristics()?;
9980 Ok(ConstraintUsingIndex {
9981 name,
9982 index_name,
9983 characteristics,
9984 })
9985 }
9986
9987 pub fn parse_constraint_characteristics(
9989 &mut self,
9990 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9991 let mut cc = ConstraintCharacteristics::default();
9992
9993 loop {
9994 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9995 {
9996 cc.deferrable = Some(false);
9997 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9998 cc.deferrable = Some(true);
9999 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
10000 if self.parse_keyword(Keyword::DEFERRED) {
10001 cc.initially = Some(DeferrableInitial::Deferred);
10002 } else if self.parse_keyword(Keyword::IMMEDIATE) {
10003 cc.initially = Some(DeferrableInitial::Immediate);
10004 } else {
10005 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
10006 }
10007 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
10008 cc.enforced = Some(true);
10009 } else if cc.enforced.is_none()
10010 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
10011 {
10012 cc.enforced = Some(false);
10013 } else {
10014 break;
10015 }
10016 }
10017
10018 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
10019 Ok(Some(cc))
10020 } else {
10021 Ok(None)
10022 }
10023 }
10024
10025 pub fn parse_optional_table_constraint(
10027 &mut self,
10028 ) -> Result<Option<TableConstraint>, ParserError> {
10029 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
10030 if self.dialect.supports_constraint_keyword_without_name()
10031 && self
10032 .peek_one_of_keywords(&[
10033 Keyword::CHECK,
10034 Keyword::PRIMARY,
10035 Keyword::UNIQUE,
10036 Keyword::FOREIGN,
10037 ])
10038 .is_some()
10039 {
10040 None
10041 } else {
10042 Some(self.parse_identifier()?)
10043 }
10044 } else {
10045 None
10046 };
10047
10048 if name.is_none()
10053 && self
10054 .peek_one_of_keywords(&[Keyword::FULLTEXT, Keyword::SPATIAL])
10055 .is_some()
10056 && !dialect_of!(self is GenericDialect | MySqlDialect)
10057 {
10058 return Ok(None);
10059 }
10060
10061 let next_token = self.next_token();
10062 match next_token.token {
10063 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
10064 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10067 return Ok(Some(TableConstraint::UniqueUsingIndex(
10068 self.parse_constraint_using_index(name)?,
10069 )));
10070 }
10071
10072 let index_type_display = self.parse_index_type_display();
10073 if !dialect_of!(self is GenericDialect | MySqlDialect)
10074 && !index_type_display.is_none()
10075 {
10076 return self.expected_ref(
10077 "`index_name` or `(column_name [, ...])`",
10078 self.peek_token_ref(),
10079 );
10080 }
10081
10082 let nulls_distinct = self.parse_optional_nulls_distinct()?;
10083
10084 let index_name = self.parse_optional_ident()?;
10086 let index_type = self.parse_optional_using_then_index_type()?;
10087
10088 let columns = self.parse_parenthesized_index_column_list()?;
10089 let index_options = self.parse_index_options()?;
10090 let characteristics = self.parse_constraint_characteristics()?;
10091 Ok(Some(
10092 UniqueConstraint {
10093 name,
10094 index_name,
10095 index_type_display,
10096 index_type,
10097 columns,
10098 index_options,
10099 characteristics,
10100 nulls_distinct,
10101 }
10102 .into(),
10103 ))
10104 }
10105 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10106 self.expect_keyword_is(Keyword::KEY)?;
10108
10109 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10112 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10113 self.parse_constraint_using_index(name)?,
10114 )));
10115 }
10116
10117 let index_name = self.parse_optional_ident()?;
10119 let index_type = self.parse_optional_using_then_index_type()?;
10120
10121 let columns = self.parse_parenthesized_index_column_list()?;
10122 let index_options = self.parse_index_options()?;
10123 let characteristics = self.parse_constraint_characteristics()?;
10124 Ok(Some(
10125 PrimaryKeyConstraint {
10126 name,
10127 index_name,
10128 index_type,
10129 columns,
10130 index_options,
10131 characteristics,
10132 }
10133 .into(),
10134 ))
10135 }
10136 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10137 self.expect_keyword_is(Keyword::KEY)?;
10138 let index_name = self.parse_optional_ident()?;
10139 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10140 self.expect_keyword_is(Keyword::REFERENCES)?;
10141 let foreign_table = self.parse_object_name(false)?;
10142 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10143 let mut match_kind = None;
10144 let mut on_delete = None;
10145 let mut on_update = None;
10146 loop {
10147 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10148 match_kind = Some(self.parse_match_kind()?);
10149 } else if on_delete.is_none()
10150 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10151 {
10152 on_delete = Some(self.parse_referential_action()?);
10153 } else if on_update.is_none()
10154 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10155 {
10156 on_update = Some(self.parse_referential_action()?);
10157 } else {
10158 break;
10159 }
10160 }
10161
10162 let characteristics = self.parse_constraint_characteristics()?;
10163
10164 Ok(Some(
10165 ForeignKeyConstraint {
10166 name,
10167 index_name,
10168 columns,
10169 foreign_table,
10170 referred_columns,
10171 on_delete,
10172 on_update,
10173 match_kind,
10174 characteristics,
10175 }
10176 .into(),
10177 ))
10178 }
10179 Token::Word(w) if w.keyword == Keyword::CHECK => {
10180 self.expect_token(&Token::LParen)?;
10181 let expr = Box::new(self.parse_expr()?);
10182 self.expect_token(&Token::RParen)?;
10183
10184 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10185 Some(true)
10186 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10187 Some(false)
10188 } else {
10189 None
10190 };
10191
10192 Ok(Some(
10193 CheckConstraint {
10194 name,
10195 expr,
10196 enforced,
10197 }
10198 .into(),
10199 ))
10200 }
10201 Token::Word(w)
10202 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10203 && dialect_of!(self is GenericDialect | MySqlDialect)
10204 && name.is_none() =>
10205 {
10206 let display_as_key = w.keyword == Keyword::KEY;
10207
10208 let name = match &self.peek_token_ref().token {
10209 Token::Word(word) if word.keyword == Keyword::USING => None,
10210 _ => self.parse_optional_ident()?,
10211 };
10212
10213 let index_type = self.parse_optional_using_then_index_type()?;
10214 let columns = self.parse_parenthesized_index_column_list()?;
10215 let index_options = self.parse_index_options()?;
10216
10217 Ok(Some(
10218 IndexConstraint {
10219 display_as_key,
10220 name,
10221 index_type,
10222 columns,
10223 index_options,
10224 }
10225 .into(),
10226 ))
10227 }
10228 Token::Word(w)
10229 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10230 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10231 {
10232 if let Some(name) = name {
10233 return self.expected(
10234 "FULLTEXT or SPATIAL option without constraint name",
10235 TokenWithSpan {
10236 token: Token::make_keyword(&name.to_string()),
10237 span: next_token.span,
10238 },
10239 );
10240 }
10241
10242 let fulltext = w.keyword == Keyword::FULLTEXT;
10243
10244 let index_type_display = self.parse_index_type_display();
10245
10246 let opt_index_name = self.parse_optional_ident()?;
10247
10248 let columns = self.parse_parenthesized_index_column_list()?;
10249
10250 Ok(Some(
10251 FullTextOrSpatialConstraint {
10252 fulltext,
10253 index_type_display,
10254 opt_index_name,
10255 columns,
10256 }
10257 .into(),
10258 ))
10259 }
10260 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10261 let index_method = if self.parse_keyword(Keyword::USING) {
10262 Some(self.parse_identifier()?)
10263 } else {
10264 None
10265 };
10266
10267 self.expect_token(&Token::LParen)?;
10268 let elements = self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10269 self.expect_token(&Token::RParen)?;
10270
10271 let include = if self.parse_keyword(Keyword::INCLUDE) {
10272 self.expect_token(&Token::LParen)?;
10273 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10274 self.expect_token(&Token::RParen)?;
10275 cols
10276 } else {
10277 vec![]
10278 };
10279
10280 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10281 self.expect_token(&Token::LParen)?;
10282 let predicate = self.parse_expr()?;
10283 self.expect_token(&Token::RParen)?;
10284 Some(Box::new(predicate))
10285 } else {
10286 None
10287 };
10288
10289 let characteristics = self.parse_constraint_characteristics()?;
10290
10291 Ok(Some(
10292 ExclusionConstraint {
10293 name,
10294 index_method,
10295 elements,
10296 include,
10297 where_clause,
10298 characteristics,
10299 }
10300 .into(),
10301 ))
10302 }
10303 _ => {
10304 if name.is_some() {
10305 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10306 } else {
10307 self.prev_token();
10308 Ok(None)
10309 }
10310 }
10311 }
10312 }
10313
10314 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10315 let expr = self.parse_expr()?;
10316 self.expect_keyword_is(Keyword::WITH)?;
10317 let operator_token = self.next_token();
10318 let operator = operator_token.token.to_string();
10319 Ok(ExclusionElement { expr, operator })
10320 }
10321
10322 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10323 Ok(if self.parse_keyword(Keyword::NULLS) {
10324 let not = self.parse_keyword(Keyword::NOT);
10325 self.expect_keyword_is(Keyword::DISTINCT)?;
10326 if not {
10327 NullsDistinctOption::NotDistinct
10328 } else {
10329 NullsDistinctOption::Distinct
10330 }
10331 } else {
10332 NullsDistinctOption::None
10333 })
10334 }
10335
10336 pub fn maybe_parse_options(
10338 &mut self,
10339 keyword: Keyword,
10340 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10341 if let Token::Word(word) = &self.peek_token_ref().token {
10342 if word.keyword == keyword {
10343 return Ok(Some(self.parse_options(keyword)?));
10344 }
10345 };
10346 Ok(None)
10347 }
10348
10349 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10351 if self.parse_keyword(keyword) {
10352 self.expect_token(&Token::LParen)?;
10353 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10354 self.expect_token(&Token::RParen)?;
10355 Ok(options)
10356 } else {
10357 Ok(vec![])
10358 }
10359 }
10360
10361 pub fn parse_options_with_keywords(
10363 &mut self,
10364 keywords: &[Keyword],
10365 ) -> Result<Vec<SqlOption>, ParserError> {
10366 if self.parse_keywords(keywords) {
10367 self.expect_token(&Token::LParen)?;
10368 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10369 self.expect_token(&Token::RParen)?;
10370 Ok(options)
10371 } else {
10372 Ok(vec![])
10373 }
10374 }
10375
10376 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10378 Ok(if self.parse_keyword(Keyword::BTREE) {
10379 IndexType::BTree
10380 } else if self.parse_keyword(Keyword::HASH) {
10381 IndexType::Hash
10382 } else if self.parse_keyword(Keyword::GIN) {
10383 IndexType::GIN
10384 } else if self.parse_keyword(Keyword::GIST) {
10385 IndexType::GiST
10386 } else if self.parse_keyword(Keyword::SPGIST) {
10387 IndexType::SPGiST
10388 } else if self.parse_keyword(Keyword::BRIN) {
10389 IndexType::BRIN
10390 } else if self.parse_keyword(Keyword::BLOOM) {
10391 IndexType::Bloom
10392 } else {
10393 IndexType::Custom(self.parse_identifier()?)
10394 })
10395 }
10396
10397 pub fn parse_optional_using_then_index_type(
10404 &mut self,
10405 ) -> Result<Option<IndexType>, ParserError> {
10406 if self.parse_keyword(Keyword::USING) {
10407 Ok(Some(self.parse_index_type()?))
10408 } else {
10409 Ok(None)
10410 }
10411 }
10412
10413 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10417 self.maybe_parse(|parser| parser.parse_identifier())
10418 }
10419
10420 #[must_use]
10421 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10423 if self.parse_keyword(Keyword::KEY) {
10424 KeyOrIndexDisplay::Key
10425 } else if self.parse_keyword(Keyword::INDEX) {
10426 KeyOrIndexDisplay::Index
10427 } else {
10428 KeyOrIndexDisplay::None
10429 }
10430 }
10431
10432 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10434 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10435 Ok(Some(IndexOption::Using(index_type)))
10436 } else if self.parse_keyword(Keyword::COMMENT) {
10437 let s = self.parse_literal_string()?;
10438 Ok(Some(IndexOption::Comment(s)))
10439 } else {
10440 Ok(None)
10441 }
10442 }
10443
10444 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10446 let mut options = Vec::new();
10447
10448 loop {
10449 match self.parse_optional_index_option()? {
10450 Some(index_option) => options.push(index_option),
10451 None => return Ok(options),
10452 }
10453 }
10454 }
10455
10456 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10458 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10459
10460 match &self.peek_token_ref().token {
10461 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10462 Ok(SqlOption::Ident(self.parse_identifier()?))
10463 }
10464 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10465 self.parse_option_partition()
10466 }
10467 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10468 self.parse_option_clustered()
10469 }
10470 _ => {
10471 let name = self.parse_identifier()?;
10472 self.expect_token(&Token::Eq)?;
10473 let value = self.parse_expr()?;
10474
10475 Ok(SqlOption::KeyValue { key: name, value })
10476 }
10477 }
10478 }
10479
10480 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10482 if self.parse_keywords(&[
10483 Keyword::CLUSTERED,
10484 Keyword::COLUMNSTORE,
10485 Keyword::INDEX,
10486 Keyword::ORDER,
10487 ]) {
10488 Ok(SqlOption::Clustered(
10489 TableOptionsClustered::ColumnstoreIndexOrder(
10490 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10491 ),
10492 ))
10493 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10494 Ok(SqlOption::Clustered(
10495 TableOptionsClustered::ColumnstoreIndex,
10496 ))
10497 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10498 self.expect_token(&Token::LParen)?;
10499
10500 let columns = self.parse_comma_separated(|p| {
10501 let name = p.parse_identifier()?;
10502 let asc = p.parse_asc_desc();
10503
10504 Ok(ClusteredIndex { name, asc })
10505 })?;
10506
10507 self.expect_token(&Token::RParen)?;
10508
10509 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10510 } else {
10511 Err(ParserError::ParserError(
10512 "invalid CLUSTERED sequence".to_string(),
10513 ))
10514 }
10515 }
10516
10517 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10519 self.expect_keyword_is(Keyword::PARTITION)?;
10520 self.expect_token(&Token::LParen)?;
10521 let column_name = self.parse_identifier()?;
10522
10523 self.expect_keyword_is(Keyword::RANGE)?;
10524 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10525 Some(PartitionRangeDirection::Left)
10526 } else if self.parse_keyword(Keyword::RIGHT) {
10527 Some(PartitionRangeDirection::Right)
10528 } else {
10529 None
10530 };
10531
10532 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10533 self.expect_token(&Token::LParen)?;
10534
10535 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10536
10537 self.expect_token(&Token::RParen)?;
10538 self.expect_token(&Token::RParen)?;
10539
10540 Ok(SqlOption::Partition {
10541 column_name,
10542 range_direction,
10543 for_values,
10544 })
10545 }
10546
10547 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10549 self.expect_token(&Token::LParen)?;
10550 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10551 self.expect_token(&Token::RParen)?;
10552 Ok(Partition::Partitions(partitions))
10553 }
10554
10555 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10557 self.expect_token(&Token::LParen)?;
10558 self.expect_keyword_is(Keyword::SELECT)?;
10559 let projection = self.parse_projection()?;
10560 let group_by = self.parse_optional_group_by()?;
10561 let order_by = self.parse_optional_order_by()?;
10562 self.expect_token(&Token::RParen)?;
10563 Ok(ProjectionSelect {
10564 projection,
10565 group_by,
10566 order_by,
10567 })
10568 }
10569 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10571 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10572 let name = self.parse_identifier()?;
10573 let query = self.parse_projection_select()?;
10574 Ok(AlterTableOperation::AddProjection {
10575 if_not_exists,
10576 name,
10577 select: query,
10578 })
10579 }
10580
10581 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10585 self.expect_keyword_is(Keyword::ALTER)?;
10586 self.expect_keyword_is(Keyword::SORTKEY)?;
10587 self.expect_token(&Token::LParen)?;
10588 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10589 self.expect_token(&Token::RParen)?;
10590 Ok(AlterTableOperation::AlterSortKey { columns })
10591 }
10592
10593 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10595 let operation = if self.parse_keyword(Keyword::ADD) {
10596 if let Some(constraint) = self.parse_optional_table_constraint()? {
10597 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10598 AlterTableOperation::AddConstraint {
10599 constraint,
10600 not_valid,
10601 }
10602 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10603 && self.parse_keyword(Keyword::PROJECTION)
10604 {
10605 return self.parse_alter_table_add_projection();
10606 } else {
10607 let if_not_exists =
10608 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10609 let mut new_partitions = vec![];
10610 loop {
10611 if self.parse_keyword(Keyword::PARTITION) {
10612 new_partitions.push(self.parse_partition()?);
10613 } else {
10614 break;
10615 }
10616 }
10617 if !new_partitions.is_empty() {
10618 AlterTableOperation::AddPartitions {
10619 if_not_exists,
10620 new_partitions,
10621 }
10622 } else {
10623 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10624
10625 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10626 {
10627 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10628 || if_not_exists
10629 } else {
10630 false
10631 };
10632
10633 let column_def = self.parse_column_def()?;
10634
10635 let column_position = self.parse_column_position()?;
10636
10637 AlterTableOperation::AddColumn {
10638 column_keyword,
10639 if_not_exists,
10640 column_def,
10641 column_position,
10642 }
10643 }
10644 }
10645 } else if self.parse_keyword(Keyword::RENAME) {
10646 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10647 let old_name = self.parse_identifier()?;
10648 self.expect_keyword_is(Keyword::TO)?;
10649 let new_name = self.parse_identifier()?;
10650 AlterTableOperation::RenameConstraint { old_name, new_name }
10651 } else if self.parse_keyword(Keyword::TO) {
10652 let table_name = self.parse_object_name(false)?;
10653 AlterTableOperation::RenameTable {
10654 table_name: RenameTableNameKind::To(table_name),
10655 }
10656 } else if self.parse_keyword(Keyword::AS) {
10657 let table_name = self.parse_object_name(false)?;
10658 AlterTableOperation::RenameTable {
10659 table_name: RenameTableNameKind::As(table_name),
10660 }
10661 } else {
10662 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10664 self.expect_keyword_is(Keyword::TO)?;
10665 let new_column_name = self.parse_identifier()?;
10666 AlterTableOperation::RenameColumn {
10667 old_column_name,
10668 new_column_name,
10669 }
10670 }
10671 } else if self.parse_keyword(Keyword::DISABLE) {
10672 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10673 AlterTableOperation::DisableRowLevelSecurity {}
10674 } else if self.parse_keyword(Keyword::RULE) {
10675 let name = self.parse_identifier()?;
10676 AlterTableOperation::DisableRule { name }
10677 } else if self.parse_keyword(Keyword::TRIGGER) {
10678 let name = self.parse_identifier()?;
10679 AlterTableOperation::DisableTrigger { name }
10680 } else {
10681 return self.expected_ref(
10682 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10683 self.peek_token_ref(),
10684 );
10685 }
10686 } else if self.parse_keyword(Keyword::ENABLE) {
10687 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10688 let name = self.parse_identifier()?;
10689 AlterTableOperation::EnableAlwaysRule { name }
10690 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10691 let name = self.parse_identifier()?;
10692 AlterTableOperation::EnableAlwaysTrigger { name }
10693 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10694 AlterTableOperation::EnableRowLevelSecurity {}
10695 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10696 let name = self.parse_identifier()?;
10697 AlterTableOperation::EnableReplicaRule { name }
10698 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10699 let name = self.parse_identifier()?;
10700 AlterTableOperation::EnableReplicaTrigger { name }
10701 } else if self.parse_keyword(Keyword::RULE) {
10702 let name = self.parse_identifier()?;
10703 AlterTableOperation::EnableRule { name }
10704 } else if self.parse_keyword(Keyword::TRIGGER) {
10705 let name = self.parse_identifier()?;
10706 AlterTableOperation::EnableTrigger { name }
10707 } else {
10708 return self.expected_ref(
10709 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10710 self.peek_token_ref(),
10711 );
10712 }
10713 } else if self.parse_keywords(&[
10714 Keyword::FORCE,
10715 Keyword::ROW,
10716 Keyword::LEVEL,
10717 Keyword::SECURITY,
10718 ]) {
10719 AlterTableOperation::ForceRowLevelSecurity
10720 } else if self.parse_keywords(&[
10721 Keyword::NO,
10722 Keyword::FORCE,
10723 Keyword::ROW,
10724 Keyword::LEVEL,
10725 Keyword::SECURITY,
10726 ]) {
10727 AlterTableOperation::NoForceRowLevelSecurity
10728 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10729 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10730 {
10731 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10732 let name = self.parse_identifier()?;
10733 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10734 Some(self.parse_identifier()?)
10735 } else {
10736 None
10737 };
10738 AlterTableOperation::ClearProjection {
10739 if_exists,
10740 name,
10741 partition,
10742 }
10743 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10744 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10745 {
10746 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10747 let name = self.parse_identifier()?;
10748 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10749 Some(self.parse_identifier()?)
10750 } else {
10751 None
10752 };
10753 AlterTableOperation::MaterializeProjection {
10754 if_exists,
10755 name,
10756 partition,
10757 }
10758 } else if self.parse_keyword(Keyword::DROP) {
10759 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10760 self.expect_token(&Token::LParen)?;
10761 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10762 self.expect_token(&Token::RParen)?;
10763 AlterTableOperation::DropPartitions {
10764 partitions,
10765 if_exists: true,
10766 }
10767 } else if self.parse_keyword(Keyword::PARTITION) {
10768 self.expect_token(&Token::LParen)?;
10769 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10770 self.expect_token(&Token::RParen)?;
10771 AlterTableOperation::DropPartitions {
10772 partitions,
10773 if_exists: false,
10774 }
10775 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10776 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10777 let name = self.parse_identifier()?;
10778 let drop_behavior = self.parse_optional_drop_behavior();
10779 AlterTableOperation::DropConstraint {
10780 if_exists,
10781 name,
10782 drop_behavior,
10783 }
10784 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10785 let drop_behavior = self.parse_optional_drop_behavior();
10786 AlterTableOperation::DropPrimaryKey { drop_behavior }
10787 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10788 let name = self.parse_identifier()?;
10789 let drop_behavior = self.parse_optional_drop_behavior();
10790 AlterTableOperation::DropForeignKey {
10791 name,
10792 drop_behavior,
10793 }
10794 } else if self.parse_keyword(Keyword::INDEX) {
10795 let name = self.parse_identifier()?;
10796 AlterTableOperation::DropIndex { name }
10797 } else if self.parse_keyword(Keyword::PROJECTION)
10798 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10799 {
10800 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10801 let name = self.parse_identifier()?;
10802 AlterTableOperation::DropProjection { if_exists, name }
10803 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10804 AlterTableOperation::DropClusteringKey
10805 } else {
10806 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10808 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10809 self.parse_comma_separated(Parser::parse_identifier)?
10810 } else {
10811 vec![self.parse_identifier()?]
10812 };
10813 let drop_behavior = self.parse_optional_drop_behavior();
10814 AlterTableOperation::DropColumn {
10815 has_column_keyword,
10816 column_names,
10817 if_exists,
10818 drop_behavior,
10819 }
10820 }
10821 } else if self.parse_keyword(Keyword::PARTITION) {
10822 self.expect_token(&Token::LParen)?;
10823 let before = self.parse_comma_separated(Parser::parse_expr)?;
10824 self.expect_token(&Token::RParen)?;
10825 self.expect_keyword_is(Keyword::RENAME)?;
10826 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10827 self.expect_token(&Token::LParen)?;
10828 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10829 self.expect_token(&Token::RParen)?;
10830 AlterTableOperation::RenamePartitions {
10831 old_partitions: before,
10832 new_partitions: renames,
10833 }
10834 } else if self.parse_keyword(Keyword::CHANGE) {
10835 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10837 let new_name = self.parse_identifier()?;
10838 let data_type = self.parse_data_type()?;
10839 let mut options = vec![];
10840 while let Some(option) = self.parse_optional_column_option()? {
10841 options.push(option);
10842 }
10843
10844 let column_position = self.parse_column_position()?;
10845
10846 AlterTableOperation::ChangeColumn {
10847 old_name,
10848 new_name,
10849 data_type,
10850 options,
10851 column_position,
10852 }
10853 } else if self.parse_keyword(Keyword::MODIFY) {
10854 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10856 let data_type = self.parse_data_type()?;
10857 let mut options = vec![];
10858 while let Some(option) = self.parse_optional_column_option()? {
10859 options.push(option);
10860 }
10861
10862 let column_position = self.parse_column_position()?;
10863
10864 AlterTableOperation::ModifyColumn {
10865 col_name,
10866 data_type,
10867 options,
10868 column_position,
10869 }
10870 } else if self.parse_keyword(Keyword::ALTER) {
10871 if self.peek_keyword(Keyword::SORTKEY) {
10872 self.prev_token();
10873 return self.parse_alter_sort_key();
10874 }
10875
10876 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10878 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10879
10880 let op: AlterColumnOperation = if self.parse_keywords(&[
10881 Keyword::SET,
10882 Keyword::NOT,
10883 Keyword::NULL,
10884 ]) {
10885 AlterColumnOperation::SetNotNull {}
10886 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10887 AlterColumnOperation::DropNotNull {}
10888 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10889 AlterColumnOperation::SetDefault {
10890 value: self.parse_expr()?,
10891 }
10892 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10893 AlterColumnOperation::DropDefault {}
10894 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10895 self.parse_set_data_type(true)?
10896 } else if self.parse_keyword(Keyword::TYPE) {
10897 self.parse_set_data_type(false)?
10898 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10899 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10900 Some(GeneratedAs::Always)
10901 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10902 Some(GeneratedAs::ByDefault)
10903 } else {
10904 None
10905 };
10906
10907 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10908
10909 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10910
10911 if self.peek_token_ref().token == Token::LParen {
10912 self.expect_token(&Token::LParen)?;
10913 sequence_options = Some(self.parse_create_sequence_options()?);
10914 self.expect_token(&Token::RParen)?;
10915 }
10916
10917 AlterColumnOperation::AddGenerated {
10918 generated_as,
10919 sequence_options,
10920 }
10921 } else {
10922 let message = if is_postgresql {
10923 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10924 } else {
10925 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10926 };
10927
10928 return self.expected_ref(message, self.peek_token_ref());
10929 };
10930 AlterTableOperation::AlterColumn { column_name, op }
10931 } else if self.parse_keyword(Keyword::SWAP) {
10932 self.expect_keyword_is(Keyword::WITH)?;
10933 let table_name = self.parse_object_name(false)?;
10934 AlterTableOperation::SwapWith { table_name }
10935 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10936 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10937 {
10938 let new_owner = self.parse_owner()?;
10939 AlterTableOperation::OwnerTo { new_owner }
10940 } else if dialect_of!(self is PostgreSqlDialect)
10941 && self.parse_keywords(&[Keyword::ATTACH, Keyword::PARTITION])
10942 {
10943 let partition_name = self.parse_object_name(false)?;
10944 let partition_bound = self.parse_partition_for_values()?;
10945 AlterTableOperation::AttachPartitionOf {
10946 partition_name,
10947 partition_bound,
10948 }
10949 } else if dialect_of!(self is PostgreSqlDialect)
10950 && self.parse_keywords(&[Keyword::DETACH, Keyword::PARTITION])
10951 {
10952 let partition_name = self.parse_object_name(false)?;
10953 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
10954 let finalize = self.parse_keyword(Keyword::FINALIZE);
10955 AlterTableOperation::DetachPartitionOf {
10956 partition_name,
10957 concurrently,
10958 finalize,
10959 }
10960 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10961 && self.parse_keyword(Keyword::ATTACH)
10962 {
10963 AlterTableOperation::AttachPartition {
10964 partition: self.parse_part_or_partition()?,
10965 }
10966 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10967 && self.parse_keyword(Keyword::DETACH)
10968 {
10969 AlterTableOperation::DetachPartition {
10970 partition: self.parse_part_or_partition()?,
10971 }
10972 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10973 && self.parse_keyword(Keyword::FREEZE)
10974 {
10975 let partition = self.parse_part_or_partition()?;
10976 let with_name = if self.parse_keyword(Keyword::WITH) {
10977 self.expect_keyword_is(Keyword::NAME)?;
10978 Some(self.parse_identifier()?)
10979 } else {
10980 None
10981 };
10982 AlterTableOperation::FreezePartition {
10983 partition,
10984 with_name,
10985 }
10986 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10987 && self.parse_keyword(Keyword::UNFREEZE)
10988 {
10989 let partition = self.parse_part_or_partition()?;
10990 let with_name = if self.parse_keyword(Keyword::WITH) {
10991 self.expect_keyword_is(Keyword::NAME)?;
10992 Some(self.parse_identifier()?)
10993 } else {
10994 None
10995 };
10996 AlterTableOperation::UnfreezePartition {
10997 partition,
10998 with_name,
10999 }
11000 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
11001 self.expect_token(&Token::LParen)?;
11002 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
11003 self.expect_token(&Token::RParen)?;
11004 AlterTableOperation::ClusterBy { exprs }
11005 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
11006 AlterTableOperation::SuspendRecluster
11007 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
11008 AlterTableOperation::ResumeRecluster
11009 } else if self.parse_keyword(Keyword::LOCK) {
11010 let equals = self.consume_token(&Token::Eq);
11011 let lock = match self.parse_one_of_keywords(&[
11012 Keyword::DEFAULT,
11013 Keyword::EXCLUSIVE,
11014 Keyword::NONE,
11015 Keyword::SHARED,
11016 ]) {
11017 Some(Keyword::DEFAULT) => AlterTableLock::Default,
11018 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
11019 Some(Keyword::NONE) => AlterTableLock::None,
11020 Some(Keyword::SHARED) => AlterTableLock::Shared,
11021 _ => self.expected_ref(
11022 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
11023 self.peek_token_ref(),
11024 )?,
11025 };
11026 AlterTableOperation::Lock { equals, lock }
11027 } else if self.parse_keyword(Keyword::ALGORITHM) {
11028 let equals = self.consume_token(&Token::Eq);
11029 let algorithm = match self.parse_one_of_keywords(&[
11030 Keyword::DEFAULT,
11031 Keyword::INSTANT,
11032 Keyword::INPLACE,
11033 Keyword::COPY,
11034 ]) {
11035 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
11036 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
11037 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
11038 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
11039 _ => self.expected_ref(
11040 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
11041 self.peek_token_ref(),
11042 )?,
11043 };
11044 AlterTableOperation::Algorithm { equals, algorithm }
11045 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
11046 let equals = self.consume_token(&Token::Eq);
11047 let value = self.parse_number_value()?;
11048 AlterTableOperation::AutoIncrement { equals, value }
11049 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
11050 let identity = if self.parse_keyword(Keyword::NOTHING) {
11051 ReplicaIdentity::Nothing
11052 } else if self.parse_keyword(Keyword::FULL) {
11053 ReplicaIdentity::Full
11054 } else if self.parse_keyword(Keyword::DEFAULT) {
11055 ReplicaIdentity::Default
11056 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
11057 ReplicaIdentity::Index(self.parse_identifier()?)
11058 } else {
11059 return self.expected_ref(
11060 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
11061 self.peek_token_ref(),
11062 );
11063 };
11064
11065 AlterTableOperation::ReplicaIdentity { identity }
11066 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11067 let name = self.parse_identifier()?;
11068 AlterTableOperation::ValidateConstraint { name }
11069 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11070 let tablespace_name = self.parse_identifier()?;
11071 AlterTableOperation::SetTablespace { tablespace_name }
11072 } else {
11073 let mut options =
11074 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
11075 if !options.is_empty() {
11076 AlterTableOperation::SetTblProperties {
11077 table_properties: options,
11078 }
11079 } else {
11080 options = self.parse_options(Keyword::SET)?;
11081 if !options.is_empty() {
11082 AlterTableOperation::SetOptionsParens { options }
11083 } else {
11084 return self.expected_ref(
11085 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
11086 self.peek_token_ref(),
11087 );
11088 }
11089 }
11090 };
11091 Ok(operation)
11092 }
11093
11094 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
11095 let data_type = self.parse_data_type()?;
11096 let using = if self.dialect.supports_alter_column_type_using()
11097 && self.parse_keyword(Keyword::USING)
11098 {
11099 Some(self.parse_expr()?)
11100 } else {
11101 None
11102 };
11103 Ok(AlterColumnOperation::SetDataType {
11104 data_type,
11105 using,
11106 had_set,
11107 })
11108 }
11109
11110 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
11111 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11112 match keyword {
11113 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11114 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11115 unexpected_keyword => Err(ParserError::ParserError(
11117 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11118 )),
11119 }
11120 }
11121
11122 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11124 let object_type = self.expect_one_of_keywords(&[
11125 Keyword::VIEW,
11126 Keyword::TYPE,
11127 Keyword::COLLATION,
11128 Keyword::TABLE,
11129 Keyword::INDEX,
11130 Keyword::FUNCTION,
11131 Keyword::AGGREGATE,
11132 Keyword::ROLE,
11133 Keyword::POLICY,
11134 Keyword::CONNECTOR,
11135 Keyword::ICEBERG,
11136 Keyword::SCHEMA,
11137 Keyword::USER,
11138 Keyword::OPERATOR,
11139 Keyword::DOMAIN,
11140 Keyword::TRIGGER,
11141 Keyword::EXTENSION,
11142 Keyword::PROCEDURE,
11143 Keyword::DEFAULT,
11144 ])?;
11145 match object_type {
11146 Keyword::SCHEMA => {
11147 self.prev_token();
11148 self.prev_token();
11149 self.parse_alter_schema()
11150 }
11151 Keyword::VIEW => self.parse_alter_view(),
11152 Keyword::TYPE => self.parse_alter_type(),
11153 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11154 Keyword::TABLE => self.parse_alter_table(false),
11155 Keyword::ICEBERG => {
11156 self.expect_keyword(Keyword::TABLE)?;
11157 self.parse_alter_table(true)
11158 }
11159 Keyword::DEFAULT => self.parse_alter_default_privileges().map(Into::into),
11160 Keyword::INDEX => {
11161 let index_name = self.parse_object_name(false)?;
11162 let operation = if self.parse_keyword(Keyword::RENAME) {
11163 if self.parse_keyword(Keyword::TO) {
11164 let index_name = self.parse_object_name(false)?;
11165 AlterIndexOperation::RenameIndex { index_name }
11166 } else {
11167 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11168 }
11169 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11170 let tablespace_name = self.parse_identifier()?;
11171 AlterIndexOperation::SetTablespace { tablespace_name }
11172 } else {
11173 return self.expected_ref(
11174 "RENAME or SET TABLESPACE after ALTER INDEX",
11175 self.peek_token_ref(),
11176 );
11177 };
11178
11179 Ok(Statement::AlterIndex {
11180 name: index_name,
11181 operation,
11182 })
11183 }
11184 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11185 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11186 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11187 Keyword::OPERATOR => {
11188 if self.parse_keyword(Keyword::FAMILY) {
11189 self.parse_alter_operator_family().map(Into::into)
11190 } else if self.parse_keyword(Keyword::CLASS) {
11191 self.parse_alter_operator_class().map(Into::into)
11192 } else {
11193 self.parse_alter_operator().map(Into::into)
11194 }
11195 }
11196 Keyword::ROLE => self.parse_alter_role(),
11197 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11198 Keyword::CONNECTOR => self.parse_alter_connector(),
11199 Keyword::USER => self.parse_alter_user().map(Into::into),
11200 Keyword::DOMAIN => self.parse_alter_domain(),
11201 Keyword::TRIGGER => self.parse_alter_trigger(),
11202 Keyword::EXTENSION => self.parse_alter_extension(),
11203 unexpected_keyword => Err(ParserError::ParserError(
11205 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE, DEFAULT}}, got {unexpected_keyword:?}"),
11206 )),
11207 }
11208 }
11209
11210 fn parse_alter_aggregate_signature(
11211 &mut self,
11212 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11213 let name = self.parse_object_name(false)?;
11214 self.expect_token(&Token::LParen)?;
11215
11216 if self.consume_token(&Token::Mul) {
11217 self.expect_token(&Token::RParen)?;
11218 return Ok((
11219 FunctionDesc {
11220 name,
11221 args: Some(vec![]),
11222 },
11223 true,
11224 None,
11225 ));
11226 }
11227
11228 let args =
11229 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11230 vec![]
11231 } else {
11232 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11233 };
11234
11235 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11236 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11237 } else {
11238 None
11239 };
11240
11241 self.expect_token(&Token::RParen)?;
11242 Ok((
11243 FunctionDesc {
11244 name,
11245 args: Some(args),
11246 },
11247 false,
11248 aggregate_order_by,
11249 ))
11250 }
11251
11252 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11253 let action = if self.parse_keywords(&[
11254 Keyword::CALLED,
11255 Keyword::ON,
11256 Keyword::NULL,
11257 Keyword::INPUT,
11258 ]) {
11259 Some(AlterFunctionAction::CalledOnNull(
11260 FunctionCalledOnNull::CalledOnNullInput,
11261 ))
11262 } else if self.parse_keywords(&[
11263 Keyword::RETURNS,
11264 Keyword::NULL,
11265 Keyword::ON,
11266 Keyword::NULL,
11267 Keyword::INPUT,
11268 ]) {
11269 Some(AlterFunctionAction::CalledOnNull(
11270 FunctionCalledOnNull::ReturnsNullOnNullInput,
11271 ))
11272 } else if self.parse_keyword(Keyword::STRICT) {
11273 Some(AlterFunctionAction::CalledOnNull(
11274 FunctionCalledOnNull::Strict,
11275 ))
11276 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11277 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11278 } else if self.parse_keyword(Keyword::STABLE) {
11279 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11280 } else if self.parse_keyword(Keyword::VOLATILE) {
11281 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11282 } else if self.parse_keyword(Keyword::NOT) {
11283 self.expect_keyword(Keyword::LEAKPROOF)?;
11284 Some(AlterFunctionAction::Leakproof(false))
11285 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11286 Some(AlterFunctionAction::Leakproof(true))
11287 } else if self.parse_keyword(Keyword::EXTERNAL) {
11288 self.expect_keyword(Keyword::SECURITY)?;
11289 let security = if self.parse_keyword(Keyword::DEFINER) {
11290 FunctionSecurity::Definer
11291 } else if self.parse_keyword(Keyword::INVOKER) {
11292 FunctionSecurity::Invoker
11293 } else {
11294 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11295 };
11296 Some(AlterFunctionAction::Security {
11297 external: true,
11298 security,
11299 })
11300 } else if self.parse_keyword(Keyword::SECURITY) {
11301 let security = if self.parse_keyword(Keyword::DEFINER) {
11302 FunctionSecurity::Definer
11303 } else if self.parse_keyword(Keyword::INVOKER) {
11304 FunctionSecurity::Invoker
11305 } else {
11306 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11307 };
11308 Some(AlterFunctionAction::Security {
11309 external: false,
11310 security,
11311 })
11312 } else if self.parse_keyword(Keyword::PARALLEL) {
11313 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11314 FunctionParallel::Unsafe
11315 } else if self.parse_keyword(Keyword::RESTRICTED) {
11316 FunctionParallel::Restricted
11317 } else if self.parse_keyword(Keyword::SAFE) {
11318 FunctionParallel::Safe
11319 } else {
11320 return self
11321 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11322 };
11323 Some(AlterFunctionAction::Parallel(parallel))
11324 } else if self.parse_keyword(Keyword::COST) {
11325 Some(AlterFunctionAction::Cost(self.parse_number()?))
11326 } else if self.parse_keyword(Keyword::ROWS) {
11327 Some(AlterFunctionAction::Rows(self.parse_number()?))
11328 } else if self.parse_keyword(Keyword::SUPPORT) {
11329 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11330 } else if self.parse_keyword(Keyword::SET) {
11331 let name = self.parse_object_name(false)?;
11332 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11333 FunctionSetValue::FromCurrent
11334 } else {
11335 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11336 return self.expected_ref("= or TO", self.peek_token_ref());
11337 }
11338 if self.parse_keyword(Keyword::DEFAULT) {
11339 FunctionSetValue::Default
11340 } else {
11341 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11342 }
11343 };
11344 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11345 name,
11346 value,
11347 }))
11348 } else if self.parse_keyword(Keyword::RESET) {
11349 let reset_config = if self.parse_keyword(Keyword::ALL) {
11350 ResetConfig::ALL
11351 } else {
11352 ResetConfig::ConfigName(self.parse_object_name(false)?)
11353 };
11354 Some(AlterFunctionAction::Reset(reset_config))
11355 } else {
11356 None
11357 };
11358
11359 Ok(action)
11360 }
11361
11362 fn parse_alter_function_actions(
11363 &mut self,
11364 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11365 let mut actions = vec![];
11366 while let Some(action) = self.parse_alter_function_action()? {
11367 actions.push(action);
11368 }
11369 if actions.is_empty() {
11370 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11371 }
11372 let restrict = self.parse_keyword(Keyword::RESTRICT);
11373 Ok((actions, restrict))
11374 }
11375
11376 pub fn parse_alter_function(
11378 &mut self,
11379 kind: AlterFunctionKind,
11380 ) -> Result<Statement, ParserError> {
11381 let (function, aggregate_star, aggregate_order_by) = match kind {
11382 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11383 (self.parse_function_desc()?, false, None)
11384 }
11385 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11386 };
11387
11388 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11389 let new_name = self.parse_identifier()?;
11390 AlterFunctionOperation::RenameTo { new_name }
11391 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11392 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11393 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11394 AlterFunctionOperation::SetSchema {
11395 schema_name: self.parse_object_name(false)?,
11396 }
11397 } else if matches!(
11398 kind,
11399 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11400 ) && self.parse_keyword(Keyword::NO)
11401 {
11402 if !self.parse_keyword(Keyword::DEPENDS) {
11403 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11404 }
11405 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11406 AlterFunctionOperation::DependsOnExtension {
11407 no: true,
11408 extension_name: self.parse_object_name(false)?,
11409 }
11410 } else if matches!(
11411 kind,
11412 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11413 ) && self.parse_keyword(Keyword::DEPENDS)
11414 {
11415 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11416 AlterFunctionOperation::DependsOnExtension {
11417 no: false,
11418 extension_name: self.parse_object_name(false)?,
11419 }
11420 } else if matches!(
11421 kind,
11422 AlterFunctionKind::Function | AlterFunctionKind::Procedure
11423 ) {
11424 let (actions, restrict) = self.parse_alter_function_actions()?;
11425 AlterFunctionOperation::Actions { actions, restrict }
11426 } else {
11427 return self.expected_ref(
11428 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11429 self.peek_token_ref(),
11430 );
11431 };
11432
11433 Ok(Statement::AlterFunction(AlterFunction {
11434 kind,
11435 function,
11436 aggregate_order_by,
11437 aggregate_star,
11438 operation,
11439 }))
11440 }
11441
11442 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11444 let name = self.parse_object_name(false)?;
11445
11446 let operation = if self.parse_keyword(Keyword::ADD) {
11447 if let Some(constraint) = self.parse_optional_table_constraint()? {
11448 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11449 AlterDomainOperation::AddConstraint {
11450 constraint,
11451 not_valid,
11452 }
11453 } else {
11454 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11455 }
11456 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11457 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11458 let name = self.parse_identifier()?;
11459 let drop_behavior = self.parse_optional_drop_behavior();
11460 AlterDomainOperation::DropConstraint {
11461 if_exists,
11462 name,
11463 drop_behavior,
11464 }
11465 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11466 AlterDomainOperation::DropDefault
11467 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11468 let old_name = self.parse_identifier()?;
11469 self.expect_keyword_is(Keyword::TO)?;
11470 let new_name = self.parse_identifier()?;
11471 AlterDomainOperation::RenameConstraint { old_name, new_name }
11472 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11473 let new_name = self.parse_identifier()?;
11474 AlterDomainOperation::RenameTo { new_name }
11475 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11476 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11477 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11478 AlterDomainOperation::SetSchema {
11479 schema_name: self.parse_object_name(false)?,
11480 }
11481 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11482 AlterDomainOperation::SetDefault {
11483 default: self.parse_expr()?,
11484 }
11485 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11486 let name = self.parse_identifier()?;
11487 AlterDomainOperation::ValidateConstraint { name }
11488 } else {
11489 return self.expected_ref(
11490 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11491 self.peek_token_ref(),
11492 );
11493 };
11494
11495 Ok(AlterDomain { name, operation }.into())
11496 }
11497
11498 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11500 let name = self.parse_identifier()?;
11501 self.expect_keyword_is(Keyword::ON)?;
11502 let table_name = self.parse_object_name(false)?;
11503
11504 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11505 let new_name = self.parse_identifier()?;
11506 AlterTriggerOperation::RenameTo { new_name }
11507 } else {
11508 return self.expected_ref(
11509 "RENAME TO after ALTER TRIGGER ... ON ...",
11510 self.peek_token_ref(),
11511 );
11512 };
11513
11514 Ok(AlterTrigger {
11515 name,
11516 table_name,
11517 operation,
11518 }
11519 .into())
11520 }
11521
11522 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11524 let name = self.parse_identifier()?;
11525
11526 let operation = if self.parse_keyword(Keyword::UPDATE) {
11527 let version = if self.parse_keyword(Keyword::TO) {
11528 Some(self.parse_identifier()?)
11529 } else {
11530 None
11531 };
11532 AlterExtensionOperation::UpdateTo { version }
11533 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11534 AlterExtensionOperation::SetSchema {
11535 schema_name: self.parse_object_name(false)?,
11536 }
11537 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11538 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11539 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11540 let new_name = self.parse_identifier()?;
11541 AlterExtensionOperation::RenameTo { new_name }
11542 } else {
11543 return self.expected_ref(
11544 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11545 self.peek_token_ref(),
11546 );
11547 };
11548
11549 Ok(AlterExtension { name, operation }.into())
11550 }
11551
11552 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11554 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11555 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11557 let on_cluster = self.parse_optional_on_cluster()?;
11558 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11559
11560 let mut location = None;
11561 if self.parse_keyword(Keyword::LOCATION) {
11562 location = Some(HiveSetLocation {
11563 has_set: false,
11564 location: self.parse_identifier()?,
11565 });
11566 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11567 location = Some(HiveSetLocation {
11568 has_set: true,
11569 location: self.parse_identifier()?,
11570 });
11571 }
11572
11573 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11574 self.peek_token_ref().clone()
11575 } else {
11576 self.get_current_token().clone()
11577 };
11578
11579 Ok(AlterTable {
11580 name: table_name,
11581 if_exists,
11582 only,
11583 operations,
11584 location,
11585 on_cluster,
11586 table_type: if iceberg {
11587 Some(AlterTableType::Iceberg)
11588 } else {
11589 None
11590 },
11591 end_token: AttachedToken(end_token),
11592 }
11593 .into())
11594 }
11595
11596 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11598 let name = self.parse_object_name(false)?;
11599 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11600
11601 let with_options = self.parse_options(Keyword::WITH)?;
11602
11603 self.expect_keyword_is(Keyword::AS)?;
11604 let query = self.parse_query()?;
11605
11606 Ok(Statement::AlterView {
11607 name,
11608 columns,
11609 query,
11610 with_options,
11611 })
11612 }
11613
11614 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11616 let name = self.parse_object_name(false)?;
11617
11618 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11619 let new_name = self.parse_identifier()?;
11620 AlterTypeOperation::Rename(AlterTypeRename { new_name })
11621 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11622 let existing_enum_value = self.parse_identifier()?;
11623 self.expect_keyword(Keyword::TO)?;
11624 let new_enum_value = self.parse_identifier()?;
11625 AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11626 from: existing_enum_value,
11627 to: new_enum_value,
11628 })
11629 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::ATTRIBUTE]) {
11630 let old_name = self.parse_identifier()?;
11631 self.expect_keyword(Keyword::TO)?;
11632 let new_name = self.parse_identifier()?;
11633 let drop_behavior = self.parse_optional_drop_behavior();
11634 AlterTypeOperation::RenameAttribute {
11635 old_name,
11636 new_name,
11637 drop_behavior,
11638 }
11639 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11640 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11641 let new_enum_value = self.parse_identifier()?;
11642 let position = if self.parse_keyword(Keyword::BEFORE) {
11643 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11644 } else if self.parse_keyword(Keyword::AFTER) {
11645 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11646 } else {
11647 None
11648 };
11649 AlterTypeOperation::AddValue(AlterTypeAddValue {
11650 if_not_exists,
11651 value: new_enum_value,
11652 position,
11653 })
11654 } else if self.parse_keywords(&[Keyword::ADD, Keyword::ATTRIBUTE]) {
11655 let attr_name = self.parse_identifier()?;
11656 let data_type = self.parse_data_type()?;
11657 let collation = if self.parse_keyword(Keyword::COLLATE) {
11658 Some(self.parse_object_name(false)?)
11659 } else {
11660 None
11661 };
11662 let drop_behavior = self.parse_optional_drop_behavior();
11663 AlterTypeOperation::AddAttribute {
11664 name: attr_name,
11665 data_type,
11666 collation,
11667 drop_behavior,
11668 }
11669 } else if self.parse_keywords(&[Keyword::DROP, Keyword::ATTRIBUTE]) {
11670 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11671 let attr_name = self.parse_identifier()?;
11672 let drop_behavior = self.parse_optional_drop_behavior();
11673 AlterTypeOperation::DropAttribute {
11674 if_exists,
11675 name: attr_name,
11676 drop_behavior,
11677 }
11678 } else if self.parse_keywords(&[Keyword::ALTER, Keyword::ATTRIBUTE]) {
11679 let attr_name = self.parse_identifier()?;
11680 let _ = self.parse_keywords(&[Keyword::SET, Keyword::DATA]);
11684 self.expect_keyword(Keyword::TYPE)?;
11685 let data_type = self.parse_data_type()?;
11686 let collation = if self.parse_keyword(Keyword::COLLATE) {
11687 Some(self.parse_object_name(false)?)
11688 } else {
11689 None
11690 };
11691 let drop_behavior = self.parse_optional_drop_behavior();
11692 AlterTypeOperation::AlterAttribute {
11693 name: attr_name,
11694 data_type,
11695 collation,
11696 drop_behavior,
11697 }
11698 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11699 let new_owner = self.parse_owner()?;
11700 AlterTypeOperation::OwnerTo { new_owner }
11701 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11702 let new_schema = self.parse_object_name(false)?;
11703 AlterTypeOperation::SetSchema { new_schema }
11704 } else {
11705 return self.expected_ref(
11706 "{RENAME TO | RENAME VALUE | RENAME ATTRIBUTE | ADD VALUE | \
11707 ADD ATTRIBUTE | DROP ATTRIBUTE | ALTER ATTRIBUTE | OWNER TO | SET SCHEMA}",
11708 self.peek_token_ref(),
11709 );
11710 };
11711
11712 Ok(Statement::AlterType(AlterType { name, operation }))
11713 }
11714
11715 pub fn parse_alter_default_privileges(
11722 &mut self,
11723 ) -> Result<AlterDefaultPrivileges, ParserError> {
11724 self.expect_keyword(Keyword::PRIVILEGES)?;
11725
11726 let for_roles = if self.parse_keyword(Keyword::FOR) {
11727 self.expect_one_of_keywords(&[Keyword::ROLE, Keyword::USER])?;
11729 self.parse_comma_separated(Parser::parse_identifier)?
11730 } else {
11731 Vec::new()
11732 };
11733
11734 let in_schemas = if self.parse_keywords(&[Keyword::IN, Keyword::SCHEMA]) {
11735 self.parse_comma_separated(Parser::parse_identifier)?
11736 } else {
11737 Vec::new()
11738 };
11739
11740 let action = self.parse_alter_default_privileges_action()?;
11741
11742 Ok(AlterDefaultPrivileges {
11743 for_roles,
11744 in_schemas,
11745 action,
11746 })
11747 }
11748
11749 fn parse_alter_default_privileges_action(
11750 &mut self,
11751 ) -> Result<AlterDefaultPrivilegesAction, ParserError> {
11752 let kw = self.expect_one_of_keywords(&[Keyword::GRANT, Keyword::REVOKE])?;
11753 match kw {
11754 Keyword::GRANT => {
11755 let privileges = self.parse_alter_default_privileges_privileges()?;
11756 self.expect_keyword(Keyword::ON)?;
11757 let object_type = self.parse_alter_default_privileges_object_type()?;
11758 self.expect_keyword(Keyword::TO)?;
11759 let grantees = self.parse_grantees()?;
11760 let with_grant_option =
11761 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
11762 Ok(AlterDefaultPrivilegesAction::Grant {
11763 privileges,
11764 object_type,
11765 grantees,
11766 with_grant_option,
11767 })
11768 }
11769 Keyword::REVOKE => {
11770 let grant_option_for =
11771 self.parse_keywords(&[Keyword::GRANT, Keyword::OPTION, Keyword::FOR]);
11772 let privileges = self.parse_alter_default_privileges_privileges()?;
11773 self.expect_keyword(Keyword::ON)?;
11774 let object_type = self.parse_alter_default_privileges_object_type()?;
11775 self.expect_keyword(Keyword::FROM)?;
11776 let grantees = self.parse_grantees()?;
11777 let cascade = self.parse_cascade_option();
11778 Ok(AlterDefaultPrivilegesAction::Revoke {
11779 grant_option_for,
11780 privileges,
11781 object_type,
11782 grantees,
11783 cascade,
11784 })
11785 }
11786 unexpected_keyword => Err(ParserError::ParserError(format!(
11787 "Internal parser error: expected GRANT or REVOKE, got {unexpected_keyword:?}"
11788 ))),
11789 }
11790 }
11791
11792 fn parse_alter_default_privileges_privileges(&mut self) -> Result<Privileges, ParserError> {
11793 if self.parse_keyword(Keyword::ALL) {
11794 Ok(Privileges::All {
11795 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
11796 })
11797 } else {
11798 Ok(Privileges::Actions(self.parse_actions_list()?))
11799 }
11800 }
11801
11802 fn parse_alter_default_privileges_object_type(
11803 &mut self,
11804 ) -> Result<AlterDefaultPrivilegesObjectType, ParserError> {
11805 let kw = self.expect_one_of_keywords(&[
11806 Keyword::TABLES,
11807 Keyword::SEQUENCES,
11808 Keyword::FUNCTIONS,
11809 Keyword::ROUTINES,
11810 Keyword::TYPES,
11811 Keyword::SCHEMAS,
11812 ])?;
11813 match kw {
11814 Keyword::TABLES => Ok(AlterDefaultPrivilegesObjectType::Tables),
11815 Keyword::SEQUENCES => Ok(AlterDefaultPrivilegesObjectType::Sequences),
11816 Keyword::FUNCTIONS => Ok(AlterDefaultPrivilegesObjectType::Functions),
11817 Keyword::ROUTINES => Ok(AlterDefaultPrivilegesObjectType::Routines),
11818 Keyword::TYPES => Ok(AlterDefaultPrivilegesObjectType::Types),
11819 Keyword::SCHEMAS => Ok(AlterDefaultPrivilegesObjectType::Schemas),
11820 unexpected_keyword => Err(ParserError::ParserError(format!(
11821 "Internal parser error: expected one of {{TABLES, SEQUENCES, FUNCTIONS, ROUTINES, TYPES, SCHEMAS}}, got {unexpected_keyword:?}"
11822 ))),
11823 }
11824 }
11825
11826 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11830 let name = self.parse_object_name(false)?;
11831 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11832 AlterCollationOperation::RenameTo {
11833 new_name: self.parse_identifier()?,
11834 }
11835 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11836 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11837 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11838 AlterCollationOperation::SetSchema {
11839 schema_name: self.parse_object_name(false)?,
11840 }
11841 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11842 AlterCollationOperation::RefreshVersion
11843 } else {
11844 return self.expected_ref(
11845 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11846 self.peek_token_ref(),
11847 );
11848 };
11849
11850 Ok(AlterCollation { name, operation })
11851 }
11852
11853 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11857 let name = self.parse_operator_name()?;
11858
11859 self.expect_token(&Token::LParen)?;
11861
11862 let left_type = if self.parse_keyword(Keyword::NONE) {
11863 None
11864 } else {
11865 Some(self.parse_data_type()?)
11866 };
11867
11868 self.expect_token(&Token::Comma)?;
11869 let right_type = self.parse_data_type()?;
11870 self.expect_token(&Token::RParen)?;
11871
11872 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11874 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11875 Owner::CurrentRole
11876 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11877 Owner::CurrentUser
11878 } else if self.parse_keyword(Keyword::SESSION_USER) {
11879 Owner::SessionUser
11880 } else {
11881 Owner::Ident(self.parse_identifier()?)
11882 };
11883 AlterOperatorOperation::OwnerTo(owner)
11884 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11885 let schema_name = self.parse_object_name(false)?;
11886 AlterOperatorOperation::SetSchema { schema_name }
11887 } else if self.parse_keyword(Keyword::SET) {
11888 self.expect_token(&Token::LParen)?;
11889
11890 let mut options = Vec::new();
11891 loop {
11892 let keyword = self.expect_one_of_keywords(&[
11893 Keyword::RESTRICT,
11894 Keyword::JOIN,
11895 Keyword::COMMUTATOR,
11896 Keyword::NEGATOR,
11897 Keyword::HASHES,
11898 Keyword::MERGES,
11899 ])?;
11900
11901 match keyword {
11902 Keyword::RESTRICT => {
11903 self.expect_token(&Token::Eq)?;
11904 let proc_name = if self.parse_keyword(Keyword::NONE) {
11905 None
11906 } else {
11907 Some(self.parse_object_name(false)?)
11908 };
11909 options.push(OperatorOption::Restrict(proc_name));
11910 }
11911 Keyword::JOIN => {
11912 self.expect_token(&Token::Eq)?;
11913 let proc_name = if self.parse_keyword(Keyword::NONE) {
11914 None
11915 } else {
11916 Some(self.parse_object_name(false)?)
11917 };
11918 options.push(OperatorOption::Join(proc_name));
11919 }
11920 Keyword::COMMUTATOR => {
11921 self.expect_token(&Token::Eq)?;
11922 let op_name = self.parse_operator_name()?;
11923 options.push(OperatorOption::Commutator(op_name));
11924 }
11925 Keyword::NEGATOR => {
11926 self.expect_token(&Token::Eq)?;
11927 let op_name = self.parse_operator_name()?;
11928 options.push(OperatorOption::Negator(op_name));
11929 }
11930 Keyword::HASHES => {
11931 options.push(OperatorOption::Hashes);
11932 }
11933 Keyword::MERGES => {
11934 options.push(OperatorOption::Merges);
11935 }
11936 unexpected_keyword => return Err(ParserError::ParserError(
11937 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11938 )),
11939 }
11940
11941 if !self.consume_token(&Token::Comma) {
11942 break;
11943 }
11944 }
11945
11946 self.expect_token(&Token::RParen)?;
11947 AlterOperatorOperation::Set { options }
11948 } else {
11949 return self.expected_ref(
11950 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11951 self.peek_token_ref(),
11952 );
11953 };
11954
11955 Ok(AlterOperator {
11956 name,
11957 left_type,
11958 right_type,
11959 operation,
11960 })
11961 }
11962
11963 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11965 let strategy_number = self.parse_literal_uint()?;
11966 let operator_name = self.parse_operator_name()?;
11967
11968 self.expect_token(&Token::LParen)?;
11970 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11971 self.expect_token(&Token::RParen)?;
11972
11973 let purpose = if self.parse_keyword(Keyword::FOR) {
11975 if self.parse_keyword(Keyword::SEARCH) {
11976 Some(OperatorPurpose::ForSearch)
11977 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11978 let sort_family = self.parse_object_name(false)?;
11979 Some(OperatorPurpose::ForOrderBy { sort_family })
11980 } else {
11981 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11982 }
11983 } else {
11984 None
11985 };
11986
11987 Ok(OperatorFamilyItem::Operator {
11988 strategy_number,
11989 operator_name,
11990 op_types,
11991 purpose,
11992 })
11993 }
11994
11995 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11997 let support_number = self.parse_literal_uint()?;
11998
11999 let op_types =
12001 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
12002 let types = self.parse_comma_separated(Parser::parse_data_type)?;
12003 self.expect_token(&Token::RParen)?;
12004 Some(types)
12005 } else if self.consume_token(&Token::LParen) {
12006 self.expect_token(&Token::RParen)?;
12007 Some(vec![])
12008 } else {
12009 None
12010 };
12011
12012 let function_name = self.parse_object_name(false)?;
12013
12014 let argument_types = if self.consume_token(&Token::LParen) {
12016 if self.peek_token_ref().token == Token::RParen {
12017 self.expect_token(&Token::RParen)?;
12018 vec![]
12019 } else {
12020 let types = self.parse_comma_separated(Parser::parse_data_type)?;
12021 self.expect_token(&Token::RParen)?;
12022 types
12023 }
12024 } else {
12025 vec![]
12026 };
12027
12028 Ok(OperatorFamilyItem::Function {
12029 support_number,
12030 op_types,
12031 function_name,
12032 argument_types,
12033 })
12034 }
12035
12036 fn parse_operator_family_drop_operator(
12038 &mut self,
12039 ) -> Result<OperatorFamilyDropItem, ParserError> {
12040 let strategy_number = self.parse_literal_uint()?;
12041
12042 self.expect_token(&Token::LParen)?;
12044 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12045 self.expect_token(&Token::RParen)?;
12046
12047 Ok(OperatorFamilyDropItem::Operator {
12048 strategy_number,
12049 op_types,
12050 })
12051 }
12052
12053 fn parse_operator_family_drop_function(
12055 &mut self,
12056 ) -> Result<OperatorFamilyDropItem, ParserError> {
12057 let support_number = self.parse_literal_uint()?;
12058
12059 self.expect_token(&Token::LParen)?;
12061 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
12062 self.expect_token(&Token::RParen)?;
12063
12064 Ok(OperatorFamilyDropItem::Function {
12065 support_number,
12066 op_types,
12067 })
12068 }
12069
12070 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
12072 if self.parse_keyword(Keyword::OPERATOR) {
12073 self.parse_operator_family_add_operator()
12074 } else if self.parse_keyword(Keyword::FUNCTION) {
12075 self.parse_operator_family_add_function()
12076 } else {
12077 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12078 }
12079 }
12080
12081 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
12083 if self.parse_keyword(Keyword::OPERATOR) {
12084 self.parse_operator_family_drop_operator()
12085 } else if self.parse_keyword(Keyword::FUNCTION) {
12086 self.parse_operator_family_drop_function()
12087 } else {
12088 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
12089 }
12090 }
12091
12092 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
12095 let name = self.parse_object_name(false)?;
12096 self.expect_keyword(Keyword::USING)?;
12097 let using = self.parse_identifier()?;
12098
12099 let operation = if self.parse_keyword(Keyword::ADD) {
12100 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
12101 AlterOperatorFamilyOperation::Add { items }
12102 } else if self.parse_keyword(Keyword::DROP) {
12103 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
12104 AlterOperatorFamilyOperation::Drop { items }
12105 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12106 let new_name = self.parse_object_name(false)?;
12107 AlterOperatorFamilyOperation::RenameTo { new_name }
12108 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12109 let owner = self.parse_owner()?;
12110 AlterOperatorFamilyOperation::OwnerTo(owner)
12111 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12112 let schema_name = self.parse_object_name(false)?;
12113 AlterOperatorFamilyOperation::SetSchema { schema_name }
12114 } else {
12115 return self.expected_ref(
12116 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
12117 self.peek_token_ref(),
12118 );
12119 };
12120
12121 Ok(AlterOperatorFamily {
12122 name,
12123 using,
12124 operation,
12125 })
12126 }
12127
12128 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
12132 let name = self.parse_object_name(false)?;
12133 self.expect_keyword(Keyword::USING)?;
12134 let using = self.parse_identifier()?;
12135
12136 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12137 let new_name = self.parse_object_name(false)?;
12138 AlterOperatorClassOperation::RenameTo { new_name }
12139 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12140 let owner = self.parse_owner()?;
12141 AlterOperatorClassOperation::OwnerTo(owner)
12142 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
12143 let schema_name = self.parse_object_name(false)?;
12144 AlterOperatorClassOperation::SetSchema { schema_name }
12145 } else {
12146 return self.expected_ref(
12147 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
12148 self.peek_token_ref(),
12149 );
12150 };
12151
12152 Ok(AlterOperatorClass {
12153 name,
12154 using,
12155 operation,
12156 })
12157 }
12158
12159 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
12163 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
12164 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
12165 let name = self.parse_object_name(false)?;
12166 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
12167 self.prev_token();
12168 let options = self.parse_options(Keyword::OPTIONS)?;
12169 AlterSchemaOperation::SetOptionsParens { options }
12170 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
12171 let collate = self.parse_expr()?;
12172 AlterSchemaOperation::SetDefaultCollate { collate }
12173 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
12174 let replica = self.parse_identifier()?;
12175 let options = if self.peek_keyword(Keyword::OPTIONS) {
12176 Some(self.parse_options(Keyword::OPTIONS)?)
12177 } else {
12178 None
12179 };
12180 AlterSchemaOperation::AddReplica { replica, options }
12181 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
12182 let replica = self.parse_identifier()?;
12183 AlterSchemaOperation::DropReplica { replica }
12184 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12185 let new_name = self.parse_object_name(false)?;
12186 AlterSchemaOperation::Rename { name: new_name }
12187 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12188 let owner = self.parse_owner()?;
12189 AlterSchemaOperation::OwnerTo { owner }
12190 } else {
12191 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
12192 };
12193 Ok(Statement::AlterSchema(AlterSchema {
12194 name,
12195 if_exists,
12196 operations: vec![operation],
12197 }))
12198 }
12199
12200 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
12203 let object_name = self.parse_object_name(false)?;
12204 if self.peek_token_ref().token == Token::LParen {
12205 match self.parse_function(object_name)? {
12206 Expr::Function(f) => Ok(Statement::Call(f)),
12207 other => parser_err!(
12208 format!("Expected a simple procedure call but found: {other}"),
12209 self.peek_token_ref().span.start
12210 ),
12211 }
12212 } else {
12213 Ok(Statement::Call(Function {
12214 name: object_name,
12215 uses_odbc_syntax: false,
12216 parameters: FunctionArguments::None,
12217 args: FunctionArguments::None,
12218 over: None,
12219 filter: None,
12220 null_treatment: None,
12221 within_group: vec![],
12222 }))
12223 }
12224 }
12225
12226 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
12228 let source;
12229 if self.consume_token(&Token::LParen) {
12230 source = CopySource::Query(self.parse_query()?);
12231 self.expect_token(&Token::RParen)?;
12232 } else {
12233 let table_name = self.parse_object_name(false)?;
12234 let columns = self.parse_parenthesized_column_list(Optional, false)?;
12235 source = CopySource::Table {
12236 table_name,
12237 columns,
12238 };
12239 }
12240 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
12241 Some(Keyword::FROM) => false,
12242 Some(Keyword::TO) => true,
12243 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
12244 };
12245 if !to {
12246 if let CopySource::Query(_) = source {
12249 return Err(ParserError::ParserError(
12250 "COPY ... FROM does not support query as a source".to_string(),
12251 ));
12252 }
12253 }
12254 let target = if self.parse_keyword(Keyword::STDIN) {
12255 CopyTarget::Stdin
12256 } else if self.parse_keyword(Keyword::STDOUT) {
12257 CopyTarget::Stdout
12258 } else if self.parse_keyword(Keyword::PROGRAM) {
12259 CopyTarget::Program {
12260 command: self.parse_literal_string()?,
12261 }
12262 } else {
12263 CopyTarget::File {
12264 filename: self.parse_literal_string()?,
12265 }
12266 };
12267 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
12269 if self.consume_token(&Token::LParen) {
12270 options = self.parse_comma_separated(Parser::parse_copy_option)?;
12271 self.expect_token(&Token::RParen)?;
12272 }
12273 let mut legacy_options = vec![];
12274 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
12275 legacy_options.push(opt);
12276 }
12277 let values =
12278 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
12279 self.expect_token(&Token::SemiColon)?;
12280 self.parse_tsv()
12281 } else {
12282 vec![]
12283 };
12284 Ok(Statement::Copy {
12285 source,
12286 to,
12287 target,
12288 options,
12289 legacy_options,
12290 values,
12291 })
12292 }
12293
12294 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12296 self.expect_keyword(Keyword::OPEN)?;
12297 Ok(Statement::Open(OpenStatement {
12298 cursor_name: self.parse_identifier()?,
12299 }))
12300 }
12301
12302 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12304 let cursor = if self.parse_keyword(Keyword::ALL) {
12305 CloseCursor::All
12306 } else {
12307 let name = self.parse_identifier()?;
12308
12309 CloseCursor::Specific { name }
12310 };
12311
12312 Ok(Statement::Close { cursor })
12313 }
12314
12315 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12316 let ret = match self.parse_one_of_keywords(&[
12317 Keyword::FORMAT,
12318 Keyword::FREEZE,
12319 Keyword::DELIMITER,
12320 Keyword::NULL,
12321 Keyword::HEADER,
12322 Keyword::QUOTE,
12323 Keyword::ESCAPE,
12324 Keyword::FORCE_QUOTE,
12325 Keyword::FORCE_NOT_NULL,
12326 Keyword::FORCE_NULL,
12327 Keyword::ENCODING,
12328 ]) {
12329 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12330 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12331 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12332 Some(Keyword::FALSE)
12333 )),
12334 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12335 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12336 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12337 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12338 Some(Keyword::FALSE)
12339 )),
12340 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12341 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12342 Some(Keyword::FORCE_QUOTE) => {
12343 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12344 }
12345 Some(Keyword::FORCE_NOT_NULL) => {
12346 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12347 }
12348 Some(Keyword::FORCE_NULL) => {
12349 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12350 }
12351 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12352 _ => self.expected_ref("option", self.peek_token_ref())?,
12353 };
12354 Ok(ret)
12355 }
12356
12357 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12358 if self.parse_keyword(Keyword::FORMAT) {
12360 let _ = self.parse_keyword(Keyword::AS);
12361 }
12362
12363 let ret = match self.parse_one_of_keywords(&[
12364 Keyword::ACCEPTANYDATE,
12365 Keyword::ACCEPTINVCHARS,
12366 Keyword::ADDQUOTES,
12367 Keyword::ALLOWOVERWRITE,
12368 Keyword::BINARY,
12369 Keyword::BLANKSASNULL,
12370 Keyword::BZIP2,
12371 Keyword::CLEANPATH,
12372 Keyword::COMPUPDATE,
12373 Keyword::CREDENTIALS,
12374 Keyword::CSV,
12375 Keyword::DATEFORMAT,
12376 Keyword::DELIMITER,
12377 Keyword::EMPTYASNULL,
12378 Keyword::ENCRYPTED,
12379 Keyword::ESCAPE,
12380 Keyword::EXTENSION,
12381 Keyword::FIXEDWIDTH,
12382 Keyword::GZIP,
12383 Keyword::HEADER,
12384 Keyword::IAM_ROLE,
12385 Keyword::IGNOREHEADER,
12386 Keyword::JSON,
12387 Keyword::MANIFEST,
12388 Keyword::MAXFILESIZE,
12389 Keyword::NULL,
12390 Keyword::PARALLEL,
12391 Keyword::PARQUET,
12392 Keyword::PARTITION,
12393 Keyword::REGION,
12394 Keyword::REMOVEQUOTES,
12395 Keyword::ROWGROUPSIZE,
12396 Keyword::STATUPDATE,
12397 Keyword::TIMEFORMAT,
12398 Keyword::TRUNCATECOLUMNS,
12399 Keyword::ZSTD,
12400 ]) {
12401 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12402 Some(Keyword::ACCEPTINVCHARS) => {
12403 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12405 Some(self.parse_literal_string()?)
12406 } else {
12407 None
12408 };
12409 CopyLegacyOption::AcceptInvChars(ch)
12410 }
12411 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12412 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12413 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12414 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12415 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12416 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12417 Some(Keyword::COMPUPDATE) => {
12418 let preset = self.parse_keyword(Keyword::PRESET);
12419 let enabled = match self.parse_one_of_keywords(&[
12420 Keyword::TRUE,
12421 Keyword::FALSE,
12422 Keyword::ON,
12423 Keyword::OFF,
12424 ]) {
12425 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12426 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12427 _ => None,
12428 };
12429 CopyLegacyOption::CompUpdate { preset, enabled }
12430 }
12431 Some(Keyword::CREDENTIALS) => {
12432 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12433 }
12434 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12435 let mut opts = vec![];
12436 while let Some(opt) =
12437 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12438 {
12439 opts.push(opt);
12440 }
12441 opts
12442 }),
12443 Some(Keyword::DATEFORMAT) => {
12444 let _ = self.parse_keyword(Keyword::AS);
12445 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12446 Some(self.parse_literal_string()?)
12447 } else {
12448 None
12449 };
12450 CopyLegacyOption::DateFormat(fmt)
12451 }
12452 Some(Keyword::DELIMITER) => {
12453 let _ = self.parse_keyword(Keyword::AS);
12454 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12455 }
12456 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12457 Some(Keyword::ENCRYPTED) => {
12458 let auto = self.parse_keyword(Keyword::AUTO);
12459 CopyLegacyOption::Encrypted { auto }
12460 }
12461 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12462 Some(Keyword::EXTENSION) => {
12463 let ext = self.parse_literal_string()?;
12464 CopyLegacyOption::Extension(ext)
12465 }
12466 Some(Keyword::FIXEDWIDTH) => {
12467 let spec = self.parse_literal_string()?;
12468 CopyLegacyOption::FixedWidth(spec)
12469 }
12470 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12471 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12472 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12473 Some(Keyword::IGNOREHEADER) => {
12474 let _ = self.parse_keyword(Keyword::AS);
12475 let num_rows = self.parse_literal_uint()?;
12476 CopyLegacyOption::IgnoreHeader(num_rows)
12477 }
12478 Some(Keyword::JSON) => {
12479 let _ = self.parse_keyword(Keyword::AS);
12480 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12481 Some(self.parse_literal_string()?)
12482 } else {
12483 None
12484 };
12485 CopyLegacyOption::Json(fmt)
12486 }
12487 Some(Keyword::MANIFEST) => {
12488 let verbose = self.parse_keyword(Keyword::VERBOSE);
12489 CopyLegacyOption::Manifest { verbose }
12490 }
12491 Some(Keyword::MAXFILESIZE) => {
12492 let _ = self.parse_keyword(Keyword::AS);
12493 let size = self.parse_number_value()?;
12494 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12495 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12496 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12497 _ => None,
12498 };
12499 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12500 }
12501 Some(Keyword::NULL) => {
12502 let _ = self.parse_keyword(Keyword::AS);
12503 CopyLegacyOption::Null(self.parse_literal_string()?)
12504 }
12505 Some(Keyword::PARALLEL) => {
12506 let enabled = match self.parse_one_of_keywords(&[
12507 Keyword::TRUE,
12508 Keyword::FALSE,
12509 Keyword::ON,
12510 Keyword::OFF,
12511 ]) {
12512 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12513 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12514 _ => None,
12515 };
12516 CopyLegacyOption::Parallel(enabled)
12517 }
12518 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12519 Some(Keyword::PARTITION) => {
12520 self.expect_keyword(Keyword::BY)?;
12521 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12522 let include = self.parse_keyword(Keyword::INCLUDE);
12523 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12524 }
12525 Some(Keyword::REGION) => {
12526 let _ = self.parse_keyword(Keyword::AS);
12527 let region = self.parse_literal_string()?;
12528 CopyLegacyOption::Region(region)
12529 }
12530 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12531 Some(Keyword::ROWGROUPSIZE) => {
12532 let _ = self.parse_keyword(Keyword::AS);
12533 let file_size = self.parse_file_size()?;
12534 CopyLegacyOption::RowGroupSize(file_size)
12535 }
12536 Some(Keyword::STATUPDATE) => {
12537 let enabled = match self.parse_one_of_keywords(&[
12538 Keyword::TRUE,
12539 Keyword::FALSE,
12540 Keyword::ON,
12541 Keyword::OFF,
12542 ]) {
12543 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12544 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12545 _ => None,
12546 };
12547 CopyLegacyOption::StatUpdate(enabled)
12548 }
12549 Some(Keyword::TIMEFORMAT) => {
12550 let _ = self.parse_keyword(Keyword::AS);
12551 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12552 Some(self.parse_literal_string()?)
12553 } else {
12554 None
12555 };
12556 CopyLegacyOption::TimeFormat(fmt)
12557 }
12558 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12559 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12560 _ => self.expected_ref("option", self.peek_token_ref())?,
12561 };
12562 Ok(ret)
12563 }
12564
12565 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12566 let size = self.parse_number_value()?;
12567 let unit = self.maybe_parse_file_size_unit();
12568 Ok(FileSize { size, unit })
12569 }
12570
12571 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12572 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12573 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12574 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12575 _ => None,
12576 }
12577 }
12578
12579 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12580 if self.parse_keyword(Keyword::DEFAULT) {
12581 Ok(IamRoleKind::Default)
12582 } else {
12583 let arn = self.parse_literal_string()?;
12584 Ok(IamRoleKind::Arn(arn))
12585 }
12586 }
12587
12588 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12589 let ret = match self.parse_one_of_keywords(&[
12590 Keyword::HEADER,
12591 Keyword::QUOTE,
12592 Keyword::ESCAPE,
12593 Keyword::FORCE,
12594 ]) {
12595 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12596 Some(Keyword::QUOTE) => {
12597 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12599 }
12600 Some(Keyword::ESCAPE) => {
12601 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12603 }
12604 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12605 CopyLegacyCsvOption::ForceNotNull(
12606 self.parse_comma_separated(|p| p.parse_identifier())?,
12607 )
12608 }
12609 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12610 CopyLegacyCsvOption::ForceQuote(
12611 self.parse_comma_separated(|p| p.parse_identifier())?,
12612 )
12613 }
12614 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12615 };
12616 Ok(ret)
12617 }
12618
12619 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12620 let s = self.parse_literal_string()?;
12621 if s.len() != 1 {
12622 let loc = self
12623 .tokens
12624 .get(self.index - 1)
12625 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12626 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12627 }
12628 Ok(s.chars().next().unwrap())
12629 }
12630
12631 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12634 self.parse_tab_value()
12635 }
12636
12637 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12639 let mut values = vec![];
12640 let mut content = String::new();
12641 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12642 match t {
12643 Token::Whitespace(Whitespace::Tab) => {
12644 values.push(Some(core::mem::take(&mut content)));
12645 }
12646 Token::Whitespace(Whitespace::Newline) => {
12647 values.push(Some(core::mem::take(&mut content)));
12648 }
12649 Token::Backslash => {
12650 if self.consume_token(&Token::Period) {
12651 return values;
12652 }
12653 if let Token::Word(w) = self.next_token().token {
12654 if w.value == "N" {
12655 values.push(None);
12656 }
12657 }
12658 }
12659 _ => {
12660 content.push_str(&t.to_string());
12661 }
12662 }
12663 }
12664 values
12665 }
12666
12667 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12669 let next_token = self.next_token();
12670 let span = next_token.span;
12671 let ok_value = |value: Value| Ok(value.with_span(span));
12672 match next_token.token {
12673 Token::Word(w) => match w.keyword {
12674 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12675 ok_value(Value::Boolean(true))
12676 }
12677 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12678 ok_value(Value::Boolean(false))
12679 }
12680 Keyword::NULL => ok_value(Value::Null),
12681 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12682 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12683 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12684 _ => self.expected(
12685 "A value?",
12686 TokenWithSpan {
12687 token: Token::Word(w),
12688 span,
12689 },
12690 )?,
12691 },
12692 _ => self.expected(
12693 "a concrete value",
12694 TokenWithSpan {
12695 token: Token::Word(w),
12696 span,
12697 },
12698 ),
12699 },
12700 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12704 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12705 self.maybe_concat_string_literal(s.to_string()),
12706 )),
12707 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12708 self.maybe_concat_string_literal(s.to_string()),
12709 )),
12710 Token::TripleSingleQuotedString(ref s) => {
12711 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12712 }
12713 Token::TripleDoubleQuotedString(ref s) => {
12714 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12715 }
12716 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12717 Token::SingleQuotedByteStringLiteral(ref s) => {
12718 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12719 }
12720 Token::DoubleQuotedByteStringLiteral(ref s) => {
12721 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12722 }
12723 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12724 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12725 }
12726 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12727 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12728 }
12729 Token::SingleQuotedRawStringLiteral(ref s) => {
12730 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12731 }
12732 Token::DoubleQuotedRawStringLiteral(ref s) => {
12733 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12734 }
12735 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12736 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12737 }
12738 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12739 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12740 }
12741 Token::NationalStringLiteral(ref s) => {
12742 ok_value(Value::NationalStringLiteral(s.to_string()))
12743 }
12744 Token::QuoteDelimitedStringLiteral(v) => {
12745 ok_value(Value::QuoteDelimitedStringLiteral(v))
12746 }
12747 Token::NationalQuoteDelimitedStringLiteral(v) => {
12748 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12749 }
12750 Token::EscapedStringLiteral(ref s) => {
12751 ok_value(Value::EscapedStringLiteral(s.to_string()))
12752 }
12753 Token::UnicodeStringLiteral(ref s) => {
12754 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12755 }
12756 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12757 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12758 tok @ Token::Colon | tok @ Token::AtSign => {
12759 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12767 let ident = match next_token.token {
12768 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12769 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12770 _ => self.expected("placeholder", next_token),
12771 }?;
12772 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12773 .with_span(Span::new(span.start, ident.span.end)))
12774 }
12775 unexpected => self.expected(
12776 "a value",
12777 TokenWithSpan {
12778 token: unexpected,
12779 span,
12780 },
12781 ),
12782 }
12783 }
12784
12785 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12786 if self.dialect.supports_string_literal_concatenation() {
12787 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12788 self.peek_token_ref().token
12789 {
12790 str.push_str(s);
12791 self.advance_token();
12792 }
12793 } else if self
12794 .dialect
12795 .supports_string_literal_concatenation_with_newline()
12796 {
12797 let mut after_newline = false;
12800 loop {
12801 match self.peek_token_no_skip().token {
12802 Token::Whitespace(Whitespace::Newline) => {
12803 after_newline = true;
12804 self.next_token_no_skip();
12805 }
12806 Token::Whitespace(_) => {
12807 self.next_token_no_skip();
12808 }
12809 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12810 if after_newline =>
12811 {
12812 str.push_str(s.clone().as_str());
12813 self.next_token_no_skip();
12814 after_newline = false;
12815 }
12816 _ => break,
12817 }
12818 }
12819 }
12820
12821 str
12822 }
12823
12824 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12826 let value_wrapper = self.parse_value()?;
12827 match &value_wrapper.value {
12828 Value::Number(_, _) => Ok(value_wrapper),
12829 Value::Placeholder(_) => Ok(value_wrapper),
12830 _ => {
12831 self.prev_token();
12832 self.expected_ref("literal number", self.peek_token_ref())
12833 }
12834 }
12835 }
12836
12837 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12840 let next_token = self.next_token();
12841 match next_token.token {
12842 Token::Plus => Ok(Expr::UnaryOp {
12843 op: UnaryOperator::Plus,
12844 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12845 }),
12846 Token::Minus => Ok(Expr::UnaryOp {
12847 op: UnaryOperator::Minus,
12848 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12849 }),
12850 _ => {
12851 self.prev_token();
12852 Ok(Expr::Value(self.parse_number_value()?))
12853 }
12854 }
12855 }
12856
12857 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12858 let next_token = self.next_token();
12859 let span = next_token.span;
12860 match next_token.token {
12861 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12862 Value::SingleQuotedString(s.to_string()).with_span(span),
12863 )),
12864 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12865 Value::DoubleQuotedString(s.to_string()).with_span(span),
12866 )),
12867 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12868 Value::HexStringLiteral(s.to_string()).with_span(span),
12869 )),
12870 unexpected => self.expected(
12871 "a string value",
12872 TokenWithSpan {
12873 token: unexpected,
12874 span,
12875 },
12876 ),
12877 }
12878 }
12879
12880 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12882 let next_token = self.next_token();
12883 match next_token.token {
12884 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12885 _ => self.expected("literal int", next_token),
12886 }
12887 }
12888
12889 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12892 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12893 let peek_token = parser.peek_token();
12894 let span = peek_token.span;
12895 match peek_token.token {
12896 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12897 {
12898 parser.next_token();
12899 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12900 }
12901 _ => Ok(Expr::Value(
12902 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12903 )),
12904 }
12905 };
12906
12907 Ok(CreateFunctionBody::AsBeforeOptions {
12908 body: parse_string_expr(self)?,
12909 link_symbol: if self.consume_token(&Token::Comma) {
12910 Some(parse_string_expr(self)?)
12911 } else {
12912 None
12913 },
12914 })
12915 }
12916
12917 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12919 let next_token = self.next_token();
12920 match next_token.token {
12921 Token::Word(Word {
12922 value,
12923 keyword: Keyword::NoKeyword,
12924 ..
12925 }) => Ok(value),
12926 Token::SingleQuotedString(s) => Ok(s),
12927 Token::DoubleQuotedString(s) => Ok(s),
12928 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12929 Ok(s)
12930 }
12931 Token::UnicodeStringLiteral(s) => Ok(s),
12932 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12933 Ok(s.value)
12934 }
12935 _ => self.expected("literal string", next_token),
12936 }
12937 }
12938
12939 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12941 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12942 Some(Keyword::TRUE) => Ok(true),
12943 Some(Keyword::FALSE) => Ok(false),
12944 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12945 }
12946 }
12947
12948 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12950 let neg = self.parse_keyword(Keyword::NOT);
12951 let normalized_form = self.maybe_parse(|parser| {
12952 match parser.parse_one_of_keywords(&[
12953 Keyword::NFC,
12954 Keyword::NFD,
12955 Keyword::NFKC,
12956 Keyword::NFKD,
12957 ]) {
12958 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12959 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12960 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12961 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12962 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12963 }
12964 })?;
12965 if self.parse_keyword(Keyword::NORMALIZED) {
12966 return Ok(Expr::IsNormalized {
12967 expr: Box::new(expr),
12968 form: normalized_form,
12969 negated: neg,
12970 });
12971 }
12972 self.expected_ref("unicode normalization form", self.peek_token_ref())
12973 }
12974
12975 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12977 self.expect_token(&Token::LParen)?;
12978 let values = self.parse_comma_separated(|parser| {
12979 let name = parser.parse_literal_string()?;
12980 let e = if parser.consume_token(&Token::Eq) {
12981 let value = parser.parse_number()?;
12982 EnumMember::NamedValue(name, value)
12983 } else {
12984 EnumMember::Name(name)
12985 };
12986 Ok(e)
12987 })?;
12988 self.expect_token(&Token::RParen)?;
12989
12990 Ok(values)
12991 }
12992
12993 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12995 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12996 if trailing_bracket.0 {
12997 return parser_err!(
12998 format!("unmatched > after parsing data type {ty}"),
12999 self.peek_token_ref()
13000 );
13001 }
13002
13003 Ok(ty)
13004 }
13005
13006 fn parse_data_type_helper(
13007 &mut self,
13008 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
13009 let dialect = self.dialect;
13010 self.advance_token();
13011 let next_token = self.get_current_token();
13012 let next_token_index = self.get_current_index();
13013
13014 let mut trailing_bracket: MatchedTrailingBracket = false.into();
13015 let mut data = match &next_token.token {
13016 Token::Word(w) => match w.keyword {
13017 Keyword::BOOLEAN => Ok(DataType::Boolean),
13018 Keyword::BOOL => Ok(DataType::Bool),
13019 Keyword::FLOAT => {
13020 let precision = self.parse_exact_number_optional_precision_scale()?;
13021
13022 if self.parse_keyword(Keyword::UNSIGNED) {
13023 Ok(DataType::FloatUnsigned(precision))
13024 } else {
13025 Ok(DataType::Float(precision))
13026 }
13027 }
13028 Keyword::REAL => {
13029 if self.parse_keyword(Keyword::UNSIGNED) {
13030 Ok(DataType::RealUnsigned)
13031 } else {
13032 Ok(DataType::Real)
13033 }
13034 }
13035 Keyword::FLOAT4 => Ok(DataType::Float4),
13036 Keyword::FLOAT32 => Ok(DataType::Float32),
13037 Keyword::FLOAT64 => Ok(DataType::Float64),
13038 Keyword::FLOAT8 => Ok(DataType::Float8),
13039 Keyword::DOUBLE => {
13040 if self.parse_keyword(Keyword::PRECISION) {
13041 if self.parse_keyword(Keyword::UNSIGNED) {
13042 Ok(DataType::DoublePrecisionUnsigned)
13043 } else {
13044 Ok(DataType::DoublePrecision)
13045 }
13046 } else {
13047 let precision = self.parse_exact_number_optional_precision_scale()?;
13048
13049 if self.parse_keyword(Keyword::UNSIGNED) {
13050 Ok(DataType::DoubleUnsigned(precision))
13051 } else {
13052 Ok(DataType::Double(precision))
13053 }
13054 }
13055 }
13056 Keyword::TINYINT => {
13057 let optional_precision = self.parse_optional_precision();
13058 if self.parse_keyword(Keyword::UNSIGNED) {
13059 Ok(DataType::TinyIntUnsigned(optional_precision?))
13060 } else {
13061 if dialect.supports_data_type_signed_suffix() {
13062 let _ = self.parse_keyword(Keyword::SIGNED);
13063 }
13064 Ok(DataType::TinyInt(optional_precision?))
13065 }
13066 }
13067 Keyword::INT2 => {
13068 let optional_precision = self.parse_optional_precision();
13069 if self.parse_keyword(Keyword::UNSIGNED) {
13070 Ok(DataType::Int2Unsigned(optional_precision?))
13071 } else {
13072 Ok(DataType::Int2(optional_precision?))
13073 }
13074 }
13075 Keyword::SMALLINT => {
13076 let optional_precision = self.parse_optional_precision();
13077 if self.parse_keyword(Keyword::UNSIGNED) {
13078 Ok(DataType::SmallIntUnsigned(optional_precision?))
13079 } else {
13080 if dialect.supports_data_type_signed_suffix() {
13081 let _ = self.parse_keyword(Keyword::SIGNED);
13082 }
13083 Ok(DataType::SmallInt(optional_precision?))
13084 }
13085 }
13086 Keyword::MEDIUMINT => {
13087 let optional_precision = self.parse_optional_precision();
13088 if self.parse_keyword(Keyword::UNSIGNED) {
13089 Ok(DataType::MediumIntUnsigned(optional_precision?))
13090 } else {
13091 if dialect.supports_data_type_signed_suffix() {
13092 let _ = self.parse_keyword(Keyword::SIGNED);
13093 }
13094 Ok(DataType::MediumInt(optional_precision?))
13095 }
13096 }
13097 Keyword::INT => {
13098 let optional_precision = self.parse_optional_precision();
13099 if self.parse_keyword(Keyword::UNSIGNED) {
13100 Ok(DataType::IntUnsigned(optional_precision?))
13101 } else {
13102 if dialect.supports_data_type_signed_suffix() {
13103 let _ = self.parse_keyword(Keyword::SIGNED);
13104 }
13105 Ok(DataType::Int(optional_precision?))
13106 }
13107 }
13108 Keyword::INT4 => {
13109 let optional_precision = self.parse_optional_precision();
13110 if self.parse_keyword(Keyword::UNSIGNED) {
13111 Ok(DataType::Int4Unsigned(optional_precision?))
13112 } else {
13113 Ok(DataType::Int4(optional_precision?))
13114 }
13115 }
13116 Keyword::INT8 => {
13117 let optional_precision = self.parse_optional_precision();
13118 if self.parse_keyword(Keyword::UNSIGNED) {
13119 Ok(DataType::Int8Unsigned(optional_precision?))
13120 } else {
13121 Ok(DataType::Int8(optional_precision?))
13122 }
13123 }
13124 Keyword::INT16 => Ok(DataType::Int16),
13125 Keyword::INT32 => Ok(DataType::Int32),
13126 Keyword::INT64 => Ok(DataType::Int64),
13127 Keyword::INT128 => Ok(DataType::Int128),
13128 Keyword::INT256 => Ok(DataType::Int256),
13129 Keyword::INTEGER => {
13130 let optional_precision = self.parse_optional_precision();
13131 if self.parse_keyword(Keyword::UNSIGNED) {
13132 Ok(DataType::IntegerUnsigned(optional_precision?))
13133 } else {
13134 if dialect.supports_data_type_signed_suffix() {
13135 let _ = self.parse_keyword(Keyword::SIGNED);
13136 }
13137 Ok(DataType::Integer(optional_precision?))
13138 }
13139 }
13140 Keyword::BIGINT => {
13141 let optional_precision = self.parse_optional_precision();
13142 if self.parse_keyword(Keyword::UNSIGNED) {
13143 Ok(DataType::BigIntUnsigned(optional_precision?))
13144 } else {
13145 if dialect.supports_data_type_signed_suffix() {
13146 let _ = self.parse_keyword(Keyword::SIGNED);
13147 }
13148 Ok(DataType::BigInt(optional_precision?))
13149 }
13150 }
13151 Keyword::HUGEINT => Ok(DataType::HugeInt),
13152 Keyword::UBIGINT => Ok(DataType::UBigInt),
13153 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
13154 Keyword::USMALLINT => Ok(DataType::USmallInt),
13155 Keyword::UTINYINT => Ok(DataType::UTinyInt),
13156 Keyword::UINT8 => Ok(DataType::UInt8),
13157 Keyword::UINT16 => Ok(DataType::UInt16),
13158 Keyword::UINT32 => Ok(DataType::UInt32),
13159 Keyword::UINT64 => Ok(DataType::UInt64),
13160 Keyword::UINT128 => Ok(DataType::UInt128),
13161 Keyword::UINT256 => Ok(DataType::UInt256),
13162 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
13163 Keyword::NVARCHAR => {
13164 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
13165 }
13166 Keyword::CHARACTER => {
13167 if self.parse_keyword(Keyword::VARYING) {
13168 Ok(DataType::CharacterVarying(
13169 self.parse_optional_character_length()?,
13170 ))
13171 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13172 Ok(DataType::CharacterLargeObject(
13173 self.parse_optional_precision()?,
13174 ))
13175 } else {
13176 Ok(DataType::Character(self.parse_optional_character_length()?))
13177 }
13178 }
13179 Keyword::CHAR => {
13180 if self.parse_keyword(Keyword::VARYING) {
13181 Ok(DataType::CharVarying(
13182 self.parse_optional_character_length()?,
13183 ))
13184 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13185 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
13186 } else {
13187 Ok(DataType::Char(self.parse_optional_character_length()?))
13188 }
13189 }
13190 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
13191 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
13192 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
13193 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
13194 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
13195 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
13196 Keyword::LONGBLOB => Ok(DataType::LongBlob),
13197 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
13198 Keyword::BIT => {
13199 if self.parse_keyword(Keyword::VARYING) {
13200 Ok(DataType::BitVarying(self.parse_optional_precision()?))
13201 } else {
13202 Ok(DataType::Bit(self.parse_optional_precision()?))
13203 }
13204 }
13205 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
13206 Keyword::UUID => Ok(DataType::Uuid),
13207 Keyword::DATE => Ok(DataType::Date),
13208 Keyword::DATE32 => Ok(DataType::Date32),
13209 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
13210 Keyword::DATETIME64 => {
13211 self.prev_token();
13212 let (precision, time_zone) = self.parse_datetime_64()?;
13213 Ok(DataType::Datetime64(precision, time_zone))
13214 }
13215 Keyword::TIMESTAMP => {
13216 let precision = self.parse_optional_precision()?;
13217 let tz = if self.parse_keyword(Keyword::WITH) {
13218 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13219 TimezoneInfo::WithTimeZone
13220 } else if self.parse_keyword(Keyword::WITHOUT) {
13221 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13222 TimezoneInfo::WithoutTimeZone
13223 } else {
13224 TimezoneInfo::None
13225 };
13226 Ok(DataType::Timestamp(precision, tz))
13227 }
13228 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
13229 self.parse_optional_precision()?,
13230 TimezoneInfo::Tz,
13231 )),
13232 Keyword::TIMESTAMP_NTZ => {
13233 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
13234 }
13235 Keyword::TIME => {
13236 let precision = self.parse_optional_precision()?;
13237 let tz = if self.parse_keyword(Keyword::WITH) {
13238 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13239 TimezoneInfo::WithTimeZone
13240 } else if self.parse_keyword(Keyword::WITHOUT) {
13241 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13242 TimezoneInfo::WithoutTimeZone
13243 } else {
13244 TimezoneInfo::None
13245 };
13246 Ok(DataType::Time(precision, tz))
13247 }
13248 Keyword::TIMETZ => Ok(DataType::Time(
13249 self.parse_optional_precision()?,
13250 TimezoneInfo::Tz,
13251 )),
13252 Keyword::INTERVAL => {
13253 if self.dialect.supports_interval_options() {
13254 let fields = self.maybe_parse_optional_interval_fields()?;
13255 let precision = self.parse_optional_precision()?;
13256 Ok(DataType::Interval { fields, precision })
13257 } else {
13258 Ok(DataType::Interval {
13259 fields: None,
13260 precision: None,
13261 })
13262 }
13263 }
13264 Keyword::JSON => Ok(DataType::JSON),
13265 Keyword::JSONB => Ok(DataType::JSONB),
13266 Keyword::REGCLASS => Ok(DataType::Regclass),
13267 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
13268 Keyword::FIXEDSTRING => {
13269 self.expect_token(&Token::LParen)?;
13270 let character_length = self.parse_literal_uint()?;
13271 self.expect_token(&Token::RParen)?;
13272 Ok(DataType::FixedString(character_length))
13273 }
13274 Keyword::TEXT => Ok(DataType::Text),
13275 Keyword::TINYTEXT => Ok(DataType::TinyText),
13276 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
13277 Keyword::LONGTEXT => Ok(DataType::LongText),
13278 Keyword::BYTEA => Ok(DataType::Bytea),
13279 Keyword::NUMERIC => Ok(DataType::Numeric(
13280 self.parse_exact_number_optional_precision_scale()?,
13281 )),
13282 Keyword::DECIMAL => {
13283 let precision = self.parse_exact_number_optional_precision_scale()?;
13284
13285 if self.parse_keyword(Keyword::UNSIGNED) {
13286 Ok(DataType::DecimalUnsigned(precision))
13287 } else {
13288 Ok(DataType::Decimal(precision))
13289 }
13290 }
13291 Keyword::DEC => {
13292 let precision = self.parse_exact_number_optional_precision_scale()?;
13293
13294 if self.parse_keyword(Keyword::UNSIGNED) {
13295 Ok(DataType::DecUnsigned(precision))
13296 } else {
13297 Ok(DataType::Dec(precision))
13298 }
13299 }
13300 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13301 self.parse_exact_number_optional_precision_scale()?,
13302 )),
13303 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13304 self.parse_exact_number_optional_precision_scale()?,
13305 )),
13306 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13307 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13308 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13309 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13310 Keyword::ARRAY => {
13311 if self.dialect.supports_array_typedef_without_element_type() {
13312 Ok(DataType::Array(ArrayElemTypeDef::None))
13313 } else if dialect_of!(self is ClickHouseDialect) {
13314 Ok(self.parse_sub_type(|internal_type| {
13315 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13316 })?)
13317 } else {
13318 self.expect_token(&Token::Lt)?;
13319 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13320 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13321 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13322 inside_type,
13323 ))))
13324 }
13325 }
13326 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13327 self.prev_token();
13328 let field_defs = self.parse_duckdb_struct_type_def()?;
13329 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13330 }
13331 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13332 {
13333 self.prev_token();
13334 let (field_defs, _trailing_bracket) =
13335 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13336 trailing_bracket = _trailing_bracket;
13337 Ok(DataType::Struct(
13338 field_defs,
13339 StructBracketKind::AngleBrackets,
13340 ))
13341 }
13342 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13343 self.prev_token();
13344 let fields = self.parse_union_type_def()?;
13345 Ok(DataType::Union(fields))
13346 }
13347 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13348 Ok(self.parse_sub_type(DataType::Nullable)?)
13349 }
13350 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13351 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13352 }
13353 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13354 self.prev_token();
13355 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13356 Ok(DataType::Map(
13357 Box::new(key_data_type),
13358 Box::new(value_data_type),
13359 ))
13360 }
13361 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13362 self.expect_token(&Token::LParen)?;
13363 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13364 self.expect_token(&Token::RParen)?;
13365 Ok(DataType::Nested(field_defs))
13366 }
13367 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13368 self.prev_token();
13369 let field_defs = self.parse_click_house_tuple_def()?;
13370 Ok(DataType::Tuple(field_defs))
13371 }
13372 Keyword::TRIGGER => Ok(DataType::Trigger),
13373 Keyword::SETOF => {
13374 let inner = self.parse_data_type()?;
13375 Ok(DataType::SetOf(Box::new(inner)))
13376 }
13377 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13378 let _ = self.parse_keyword(Keyword::TYPE);
13379 Ok(DataType::AnyType)
13380 }
13381 Keyword::TABLE => {
13382 if self.peek_token_ref().token == Token::LParen {
13385 let columns = self.parse_returns_table_columns()?;
13386 Ok(DataType::Table(Some(columns)))
13387 } else {
13388 Ok(DataType::Table(None))
13389 }
13390 }
13391 Keyword::SIGNED => {
13392 if self.parse_keyword(Keyword::INTEGER) {
13393 Ok(DataType::SignedInteger)
13394 } else {
13395 Ok(DataType::Signed)
13396 }
13397 }
13398 Keyword::UNSIGNED => {
13399 if self.parse_keyword(Keyword::INTEGER) {
13400 Ok(DataType::UnsignedInteger)
13401 } else {
13402 Ok(DataType::Unsigned)
13403 }
13404 }
13405 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13406 Ok(DataType::TsVector)
13407 }
13408 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13409 Ok(DataType::TsQuery)
13410 }
13411 _ => {
13412 self.prev_token();
13413 let type_name = self.parse_object_name(false)?;
13414 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13415 Ok(DataType::Custom(type_name, modifiers))
13416 } else {
13417 Ok(DataType::Custom(type_name, vec![]))
13418 }
13419 }
13420 },
13421 _ => self.expected_at("a data type name", next_token_index),
13422 }?;
13423
13424 if self.dialect.supports_array_typedef_with_brackets() {
13425 while self.consume_token(&Token::LBracket) {
13426 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13428 self.expect_token(&Token::RBracket)?;
13429 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13430 }
13431 }
13432 Ok((data, trailing_bracket))
13433 }
13434
13435 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13436 self.parse_column_def()
13437 }
13438
13439 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13440 self.expect_token(&Token::LParen)?;
13441 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13442 self.expect_token(&Token::RParen)?;
13443 Ok(columns)
13444 }
13445
13446 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13448 self.expect_token(&Token::LParen)?;
13449 let mut values = Vec::new();
13450 loop {
13451 let next_token = self.next_token();
13452 match next_token.token {
13453 Token::SingleQuotedString(value) => values.push(value),
13454 _ => self.expected("a string", next_token)?,
13455 }
13456 let next_token = self.next_token();
13457 match next_token.token {
13458 Token::Comma => (),
13459 Token::RParen => break,
13460 _ => self.expected(", or }", next_token)?,
13461 }
13462 }
13463 Ok(values)
13464 }
13465
13466 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13468 let ident = self.parse_identifier()?;
13469 self.expect_keyword_is(Keyword::AS)?;
13470 let alias = self.parse_identifier()?;
13471 Ok(IdentWithAlias { ident, alias })
13472 }
13473
13474 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13476 let ident = self.parse_identifier()?;
13477 let _after_as = self.parse_keyword(Keyword::AS);
13478 let alias = self.parse_identifier()?;
13479 Ok(IdentWithAlias { ident, alias })
13480 }
13481
13482 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13484 self.parse_comma_separated(|parser| {
13485 parser.expect_token(&Token::LParen)?;
13486 let query = parser.parse_query()?;
13487 parser.expect_token(&Token::RParen)?;
13488 Ok(*query)
13489 })
13490 }
13491
13492 fn parse_distinct_required_set_quantifier(
13494 &mut self,
13495 operator_name: &str,
13496 ) -> Result<SetQuantifier, ParserError> {
13497 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13498 match quantifier {
13499 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13500 _ => Err(ParserError::ParserError(format!(
13501 "{operator_name} pipe operator requires DISTINCT modifier",
13502 ))),
13503 }
13504 }
13505
13506 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13508 if self.parse_keyword(Keyword::AS) {
13509 Ok(Some(self.parse_identifier()?))
13510 } else {
13511 self.maybe_parse(|parser| parser.parse_identifier())
13513 }
13514 }
13515
13516 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13518 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13519 parser.dialect.is_select_item_alias(explicit, kw, parser)
13520 }
13521 self.parse_optional_alias_inner(None, validator)
13522 }
13523
13524 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13528 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13529 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13530 }
13531 let explicit = self.peek_keyword(Keyword::AS);
13532 match self.parse_optional_alias_inner(None, validator)? {
13533 Some(name) => {
13534 let columns = self.parse_table_alias_column_defs()?;
13535 Ok(Some(TableAlias {
13536 explicit,
13537 name,
13538 columns,
13539 }))
13540 }
13541 None => Ok(None),
13542 }
13543 }
13544
13545 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13546 let mut hints = vec![];
13547 while let Some(hint_type) =
13548 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13549 {
13550 let hint_type = match hint_type {
13551 Keyword::USE => TableIndexHintType::Use,
13552 Keyword::IGNORE => TableIndexHintType::Ignore,
13553 Keyword::FORCE => TableIndexHintType::Force,
13554 _ => {
13555 return self.expected_ref(
13556 "expected to match USE/IGNORE/FORCE keyword",
13557 self.peek_token_ref(),
13558 )
13559 }
13560 };
13561 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13562 Some(Keyword::INDEX) => TableIndexType::Index,
13563 Some(Keyword::KEY) => TableIndexType::Key,
13564 _ => {
13565 return self
13566 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13567 }
13568 };
13569 let for_clause = if self.parse_keyword(Keyword::FOR) {
13570 let clause = if self.parse_keyword(Keyword::JOIN) {
13571 TableIndexHintForClause::Join
13572 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13573 TableIndexHintForClause::OrderBy
13574 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13575 TableIndexHintForClause::GroupBy
13576 } else {
13577 return self.expected_ref(
13578 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13579 self.peek_token_ref(),
13580 );
13581 };
13582 Some(clause)
13583 } else {
13584 None
13585 };
13586
13587 self.expect_token(&Token::LParen)?;
13588 let index_names = if self.peek_token_ref().token != Token::RParen {
13589 self.parse_comma_separated(Parser::parse_identifier)?
13590 } else {
13591 vec![]
13592 };
13593 self.expect_token(&Token::RParen)?;
13594 hints.push(TableIndexHints {
13595 hint_type,
13596 index_type,
13597 for_clause,
13598 index_names,
13599 });
13600 }
13601 Ok(hints)
13602 }
13603
13604 pub fn parse_optional_alias(
13608 &mut self,
13609 reserved_kwds: &[Keyword],
13610 ) -> Result<Option<Ident>, ParserError> {
13611 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13612 false
13613 }
13614 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13615 }
13616
13617 fn parse_optional_alias_inner<F>(
13624 &mut self,
13625 reserved_kwds: Option<&[Keyword]>,
13626 validator: F,
13627 ) -> Result<Option<Ident>, ParserError>
13628 where
13629 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13630 {
13631 let after_as = self.parse_keyword(Keyword::AS);
13632
13633 let next_token = self.next_token();
13634 match next_token.token {
13635 Token::Word(w)
13638 if reserved_kwds.is_some()
13639 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13640 {
13641 Ok(Some(w.into_ident(next_token.span)))
13642 }
13643 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13647 Ok(Some(w.into_ident(next_token.span)))
13648 }
13649 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13651 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13652 _ => {
13653 if after_as {
13654 return self.expected("an identifier after AS", next_token);
13655 }
13656 self.prev_token();
13657 Ok(None) }
13659 }
13660 }
13661
13662 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13664 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13665 let expressions = if self.parse_keyword(Keyword::ALL) {
13666 None
13667 } else {
13668 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13669 };
13670
13671 let mut modifiers = vec![];
13672 if self.dialect.supports_group_by_with_modifier() {
13673 loop {
13674 if !self.parse_keyword(Keyword::WITH) {
13675 break;
13676 }
13677 let keyword = self.expect_one_of_keywords(&[
13678 Keyword::ROLLUP,
13679 Keyword::CUBE,
13680 Keyword::TOTALS,
13681 ])?;
13682 modifiers.push(match keyword {
13683 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13684 Keyword::CUBE => GroupByWithModifier::Cube,
13685 Keyword::TOTALS => GroupByWithModifier::Totals,
13686 _ => {
13687 return parser_err!(
13688 "BUG: expected to match GroupBy modifier keyword",
13689 self.peek_token_ref().span.start
13690 )
13691 }
13692 });
13693 }
13694 }
13695 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13696 self.expect_token(&Token::LParen)?;
13697 let result = self.parse_comma_separated(|p| {
13698 if p.peek_token_ref().token == Token::LParen {
13699 p.parse_tuple(true, true)
13700 } else {
13701 Ok(vec![p.parse_expr()?])
13702 }
13703 })?;
13704 self.expect_token(&Token::RParen)?;
13705 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13706 result,
13707 )));
13708 };
13709 let group_by = match expressions {
13710 None => GroupByExpr::All(modifiers),
13711 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13712 };
13713 Ok(Some(group_by))
13714 } else {
13715 Ok(None)
13716 }
13717 }
13718
13719 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13721 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13722 let order_by =
13723 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13724 let order_by_options = self.parse_order_by_options()?;
13725 OrderBy {
13726 kind: OrderByKind::All(order_by_options),
13727 interpolate: None,
13728 }
13729 } else {
13730 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13731 let interpolate = if self.dialect.supports_interpolate() {
13732 self.parse_interpolations()?
13733 } else {
13734 None
13735 };
13736 OrderBy {
13737 kind: OrderByKind::Expressions(exprs),
13738 interpolate,
13739 }
13740 };
13741 Ok(Some(order_by))
13742 } else {
13743 Ok(None)
13744 }
13745 }
13746
13747 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13748 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13749 Some(self.parse_offset()?)
13750 } else {
13751 None
13752 };
13753
13754 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13755 let expr = self.parse_limit()?;
13756
13757 if self.dialect.supports_limit_comma()
13758 && offset.is_none()
13759 && expr.is_some() && self.consume_token(&Token::Comma)
13761 {
13762 let offset = expr.ok_or_else(|| {
13763 ParserError::ParserError(
13764 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13765 )
13766 })?;
13767 return Ok(Some(LimitClause::OffsetCommaLimit {
13768 offset,
13769 limit: self.parse_expr()?,
13770 }));
13771 }
13772
13773 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13774 Some(self.parse_comma_separated(Parser::parse_expr)?)
13775 } else {
13776 None
13777 };
13778
13779 (Some(expr), limit_by)
13780 } else {
13781 (None, None)
13782 };
13783
13784 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13785 offset = Some(self.parse_offset()?);
13786 }
13787
13788 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13789 Ok(Some(LimitClause::LimitOffset {
13790 limit: limit.unwrap_or_default(),
13791 offset,
13792 limit_by: limit_by.unwrap_or_default(),
13793 }))
13794 } else {
13795 Ok(None)
13796 }
13797 }
13798
13799 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13802 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13803 let fn_name = self.parse_object_name(false)?;
13804 self.parse_function_call(fn_name)
13805 .map(TableObject::TableFunction)
13806 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13807 self.parse_parenthesized(|p| p.parse_query())
13808 .map(TableObject::TableQuery)
13809 } else {
13810 self.parse_object_name(false).map(TableObject::TableName)
13811 }
13812 }
13813
13814 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13821 self.parse_object_name_inner(in_table_clause, false)
13822 }
13823
13824 fn parse_object_name_inner(
13834 &mut self,
13835 in_table_clause: bool,
13836 allow_wildcards: bool,
13837 ) -> Result<ObjectName, ParserError> {
13838 let mut parts = vec![];
13839 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13840 loop {
13841 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13842 parts.push(ObjectNamePart::Identifier(ident));
13843 if !self.consume_token(&Token::Period) && !end_with_period {
13844 break;
13845 }
13846 }
13847 } else {
13848 loop {
13849 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13850 let span = self.next_token().span;
13851 parts.push(ObjectNamePart::Identifier(Ident {
13852 value: Token::Mul.to_string(),
13853 quote_style: None,
13854 span,
13855 }));
13856 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13857 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13858 parts.push(ObjectNamePart::Identifier(ident));
13859 if !self.consume_token(&Token::Period) && !end_with_period {
13860 break;
13861 }
13862 } else if self.dialect.supports_object_name_double_dot_notation()
13863 && parts.len() == 1
13864 && matches!(self.peek_token_ref().token, Token::Period)
13865 {
13866 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13868 } else {
13869 let ident = self.parse_identifier()?;
13870 let part = if self
13871 .dialect
13872 .is_identifier_generating_function_name(&ident, &parts)
13873 {
13874 self.expect_token(&Token::LParen)?;
13875 let args: Vec<FunctionArg> =
13876 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13877 self.expect_token(&Token::RParen)?;
13878 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13879 } else {
13880 ObjectNamePart::Identifier(ident)
13881 };
13882 parts.push(part);
13883 }
13884
13885 if !self.consume_token(&Token::Period) {
13886 break;
13887 }
13888 }
13889 }
13890
13891 if dialect_of!(self is BigQueryDialect)
13894 && parts.iter().any(|part| {
13895 part.as_ident()
13896 .is_some_and(|ident| ident.value.contains('.'))
13897 })
13898 {
13899 parts = parts
13900 .into_iter()
13901 .flat_map(|part| match part.as_ident() {
13902 Some(ident) => ident
13903 .value
13904 .split('.')
13905 .map(|value| {
13906 ObjectNamePart::Identifier(Ident {
13907 value: value.into(),
13908 quote_style: ident.quote_style,
13909 span: ident.span,
13910 })
13911 })
13912 .collect::<Vec<_>>(),
13913 None => vec![part],
13914 })
13915 .collect()
13916 }
13917
13918 Ok(ObjectName(parts))
13919 }
13920
13921 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13923 let mut idents = vec![];
13924 loop {
13925 let token = self.peek_token_ref();
13926 match &token.token {
13927 Token::Word(w) => {
13928 idents.push(w.to_ident(token.span));
13929 }
13930 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13931 break
13932 }
13933 _ => {}
13934 }
13935 self.advance_token();
13936 }
13937 Ok(idents)
13938 }
13939
13940 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13980 let mut idents = vec![];
13981
13982 let next_token = self.next_token();
13984 match next_token.token {
13985 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13986 Token::EOF => {
13987 return Err(ParserError::ParserError(
13988 "Empty input when parsing identifier".to_string(),
13989 ))?
13990 }
13991 token => {
13992 return Err(ParserError::ParserError(format!(
13993 "Unexpected token in identifier: {token}"
13994 )))?
13995 }
13996 };
13997
13998 loop {
14000 match self.next_token().token {
14001 Token::Period => {
14003 let next_token = self.next_token();
14004 match next_token.token {
14005 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
14006 Token::EOF => {
14007 return Err(ParserError::ParserError(
14008 "Trailing period in identifier".to_string(),
14009 ))?
14010 }
14011 token => {
14012 return Err(ParserError::ParserError(format!(
14013 "Unexpected token following period in identifier: {token}"
14014 )))?
14015 }
14016 }
14017 }
14018 Token::EOF => break,
14019 token => {
14020 return Err(ParserError::ParserError(format!(
14021 "Unexpected token in identifier: {token}"
14022 )))?;
14023 }
14024 }
14025 }
14026
14027 Ok(idents)
14028 }
14029
14030 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
14032 let next_token = self.next_token();
14033 match next_token.token {
14034 Token::Word(w) => Ok(w.into_ident(next_token.span)),
14035 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
14036 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
14037 _ => self.expected("identifier", next_token),
14038 }
14039 }
14040
14041 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
14052 match self.peek_token().token {
14053 Token::Word(w) => {
14054 let quote_style_is_none = w.quote_style.is_none();
14055 let mut requires_whitespace = false;
14056 let mut ident = w.into_ident(self.next_token().span);
14057 if quote_style_is_none {
14058 while matches!(self.peek_token_no_skip().token, Token::Minus) {
14059 self.next_token();
14060 ident.value.push('-');
14061
14062 let token = self
14063 .next_token_no_skip()
14064 .cloned()
14065 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
14066 requires_whitespace = match token.token {
14067 Token::Word(next_word) if next_word.quote_style.is_none() => {
14068 ident.value.push_str(&next_word.value);
14069 false
14070 }
14071 Token::Number(s, false) => {
14072 if s.ends_with('.') {
14079 let Some(s) = s.split('.').next().filter(|s| {
14080 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
14081 }) else {
14082 return self.expected(
14083 "continuation of hyphenated identifier",
14084 TokenWithSpan::new(Token::Number(s, false), token.span),
14085 );
14086 };
14087 ident.value.push_str(s);
14088 return Ok((ident, true));
14089 } else {
14090 ident.value.push_str(&s);
14091 }
14092 !matches!(self.peek_token_ref().token, Token::Period)
14095 }
14096 _ => {
14097 return self
14098 .expected("continuation of hyphenated identifier", token);
14099 }
14100 }
14101 }
14102
14103 if requires_whitespace {
14106 let token = self.next_token();
14107 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
14108 return self
14109 .expected("whitespace following hyphenated identifier", token);
14110 }
14111 }
14112 }
14113 Ok((ident, false))
14114 }
14115 _ => Ok((self.parse_identifier()?, false)),
14116 }
14117 }
14118
14119 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
14121 if self.consume_token(&Token::LParen) {
14122 if self.peek_token_ref().token == Token::RParen {
14123 self.next_token();
14124 Ok(vec![])
14125 } else {
14126 let cols = self.parse_comma_separated_with_trailing_commas(
14127 Parser::parse_view_column,
14128 self.dialect.supports_column_definition_trailing_commas(),
14129 Self::is_reserved_for_column_alias,
14130 )?;
14131 self.expect_token(&Token::RParen)?;
14132 Ok(cols)
14133 }
14134 } else {
14135 Ok(vec![])
14136 }
14137 }
14138
14139 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
14141 let name = self.parse_identifier()?;
14142 let options = self.parse_view_column_options()?;
14143 let data_type = if dialect_of!(self is ClickHouseDialect) {
14144 Some(self.parse_data_type()?)
14145 } else {
14146 None
14147 };
14148 Ok(ViewColumnDef {
14149 name,
14150 data_type,
14151 options,
14152 })
14153 }
14154
14155 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
14156 let mut options = Vec::new();
14157 loop {
14158 let option = self.parse_optional_column_option()?;
14159 if let Some(option) = option {
14160 options.push(option);
14161 } else {
14162 break;
14163 }
14164 }
14165 if options.is_empty() {
14166 Ok(None)
14167 } else if self.dialect.supports_space_separated_column_options() {
14168 Ok(Some(ColumnOptions::SpaceSeparated(options)))
14169 } else {
14170 Ok(Some(ColumnOptions::CommaSeparated(options)))
14171 }
14172 }
14173
14174 pub fn parse_parenthesized_column_list(
14177 &mut self,
14178 optional: IsOptional,
14179 allow_empty: bool,
14180 ) -> Result<Vec<Ident>, ParserError> {
14181 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
14182 }
14183
14184 pub fn parse_parenthesized_compound_identifier_list(
14186 &mut self,
14187 optional: IsOptional,
14188 allow_empty: bool,
14189 ) -> Result<Vec<Expr>, ParserError> {
14190 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14191 Ok(Expr::CompoundIdentifier(
14192 p.parse_period_separated(|p| p.parse_identifier())?,
14193 ))
14194 })
14195 }
14196
14197 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
14200 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
14201 p.parse_create_index_expr()
14202 })
14203 }
14204
14205 pub fn parse_parenthesized_qualified_column_list(
14208 &mut self,
14209 optional: IsOptional,
14210 allow_empty: bool,
14211 ) -> Result<Vec<ObjectName>, ParserError> {
14212 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14213 p.parse_object_name(true)
14214 })
14215 }
14216
14217 fn parse_parenthesized_column_list_inner<F, T>(
14220 &mut self,
14221 optional: IsOptional,
14222 allow_empty: bool,
14223 mut f: F,
14224 ) -> Result<Vec<T>, ParserError>
14225 where
14226 F: FnMut(&mut Parser) -> Result<T, ParserError>,
14227 {
14228 if self.consume_token(&Token::LParen) {
14229 if allow_empty && self.peek_token_ref().token == Token::RParen {
14230 self.next_token();
14231 Ok(vec![])
14232 } else {
14233 let cols = self.parse_comma_separated(|p| f(p))?;
14234 self.expect_token(&Token::RParen)?;
14235 Ok(cols)
14236 }
14237 } else if optional == Optional {
14238 Ok(vec![])
14239 } else {
14240 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
14241 }
14242 }
14243
14244 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
14246 if self.consume_token(&Token::LParen) {
14247 let cols = self.parse_comma_separated(|p| {
14248 let name = p.parse_identifier()?;
14249 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
14250 Ok(TableAliasColumnDef { name, data_type })
14251 })?;
14252 self.expect_token(&Token::RParen)?;
14253 Ok(cols)
14254 } else {
14255 Ok(vec![])
14256 }
14257 }
14258
14259 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
14261 self.expect_token(&Token::LParen)?;
14262 let n = self.parse_literal_uint()?;
14263 self.expect_token(&Token::RParen)?;
14264 Ok(n)
14265 }
14266
14267 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
14269 if self.consume_token(&Token::LParen) {
14270 let n = self.parse_literal_uint()?;
14271 self.expect_token(&Token::RParen)?;
14272 Ok(Some(n))
14273 } else {
14274 Ok(None)
14275 }
14276 }
14277
14278 fn maybe_parse_optional_interval_fields(
14279 &mut self,
14280 ) -> Result<Option<IntervalFields>, ParserError> {
14281 match self.parse_one_of_keywords(&[
14282 Keyword::YEAR,
14284 Keyword::DAY,
14285 Keyword::HOUR,
14286 Keyword::MINUTE,
14287 Keyword::MONTH,
14289 Keyword::SECOND,
14290 ]) {
14291 Some(Keyword::YEAR) => {
14292 if self.peek_keyword(Keyword::TO) {
14293 self.expect_keyword(Keyword::TO)?;
14294 self.expect_keyword(Keyword::MONTH)?;
14295 Ok(Some(IntervalFields::YearToMonth))
14296 } else {
14297 Ok(Some(IntervalFields::Year))
14298 }
14299 }
14300 Some(Keyword::DAY) => {
14301 if self.peek_keyword(Keyword::TO) {
14302 self.expect_keyword(Keyword::TO)?;
14303 match self.expect_one_of_keywords(&[
14304 Keyword::HOUR,
14305 Keyword::MINUTE,
14306 Keyword::SECOND,
14307 ])? {
14308 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14309 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14310 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14311 _ => {
14312 self.prev_token();
14313 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14314 }
14315 }
14316 } else {
14317 Ok(Some(IntervalFields::Day))
14318 }
14319 }
14320 Some(Keyword::HOUR) => {
14321 if self.peek_keyword(Keyword::TO) {
14322 self.expect_keyword(Keyword::TO)?;
14323 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14324 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14325 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14326 _ => {
14327 self.prev_token();
14328 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14329 }
14330 }
14331 } else {
14332 Ok(Some(IntervalFields::Hour))
14333 }
14334 }
14335 Some(Keyword::MINUTE) => {
14336 if self.peek_keyword(Keyword::TO) {
14337 self.expect_keyword(Keyword::TO)?;
14338 self.expect_keyword(Keyword::SECOND)?;
14339 Ok(Some(IntervalFields::MinuteToSecond))
14340 } else {
14341 Ok(Some(IntervalFields::Minute))
14342 }
14343 }
14344 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14345 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14346 Some(_) => {
14347 self.prev_token();
14348 self.expected_ref(
14349 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14350 self.peek_token_ref(),
14351 )
14352 }
14353 None => Ok(None),
14354 }
14355 }
14356
14357 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14365 self.expect_keyword_is(Keyword::DATETIME64)?;
14366 self.expect_token(&Token::LParen)?;
14367 let precision = self.parse_literal_uint()?;
14368 let time_zone = if self.consume_token(&Token::Comma) {
14369 Some(self.parse_literal_string()?)
14370 } else {
14371 None
14372 };
14373 self.expect_token(&Token::RParen)?;
14374 Ok((precision, time_zone))
14375 }
14376
14377 pub fn parse_optional_character_length(
14379 &mut self,
14380 ) -> Result<Option<CharacterLength>, ParserError> {
14381 if self.consume_token(&Token::LParen) {
14382 let character_length = self.parse_character_length()?;
14383 self.expect_token(&Token::RParen)?;
14384 Ok(Some(character_length))
14385 } else {
14386 Ok(None)
14387 }
14388 }
14389
14390 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14392 if self.consume_token(&Token::LParen) {
14393 let binary_length = self.parse_binary_length()?;
14394 self.expect_token(&Token::RParen)?;
14395 Ok(Some(binary_length))
14396 } else {
14397 Ok(None)
14398 }
14399 }
14400
14401 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14403 if self.parse_keyword(Keyword::MAX) {
14404 return Ok(CharacterLength::Max);
14405 }
14406 let length = self.parse_literal_uint()?;
14407 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14408 Some(CharLengthUnits::Characters)
14409 } else if self.parse_keyword(Keyword::OCTETS) {
14410 Some(CharLengthUnits::Octets)
14411 } else {
14412 None
14413 };
14414 Ok(CharacterLength::IntegerLength { length, unit })
14415 }
14416
14417 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14419 if self.parse_keyword(Keyword::MAX) {
14420 return Ok(BinaryLength::Max);
14421 }
14422 let length = self.parse_literal_uint()?;
14423 Ok(BinaryLength::IntegerLength { length })
14424 }
14425
14426 pub fn parse_optional_precision_scale(
14428 &mut self,
14429 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14430 if self.consume_token(&Token::LParen) {
14431 let n = self.parse_literal_uint()?;
14432 let scale = if self.consume_token(&Token::Comma) {
14433 Some(self.parse_literal_uint()?)
14434 } else {
14435 None
14436 };
14437 self.expect_token(&Token::RParen)?;
14438 Ok((Some(n), scale))
14439 } else {
14440 Ok((None, None))
14441 }
14442 }
14443
14444 pub fn parse_exact_number_optional_precision_scale(
14446 &mut self,
14447 ) -> Result<ExactNumberInfo, ParserError> {
14448 if self.consume_token(&Token::LParen) {
14449 let precision = self.parse_literal_uint()?;
14450 let scale = if self.consume_token(&Token::Comma) {
14451 Some(self.parse_signed_integer()?)
14452 } else {
14453 None
14454 };
14455
14456 self.expect_token(&Token::RParen)?;
14457
14458 match scale {
14459 None => Ok(ExactNumberInfo::Precision(precision)),
14460 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14461 }
14462 } else {
14463 Ok(ExactNumberInfo::None)
14464 }
14465 }
14466
14467 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14469 let is_negative = self.consume_token(&Token::Minus);
14470
14471 if !is_negative {
14472 let _ = self.consume_token(&Token::Plus);
14473 }
14474
14475 let current_token = self.peek_token_ref();
14476 match ¤t_token.token {
14477 Token::Number(s, _) => {
14478 let s = s.clone();
14479 let span_start = current_token.span.start;
14480 self.advance_token();
14481 let value = Self::parse::<i64>(s, span_start)?;
14482 Ok(if is_negative { -value } else { value })
14483 }
14484 _ => self.expected_ref("number", current_token),
14485 }
14486 }
14487
14488 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14490 if self.consume_token(&Token::LParen) {
14491 let mut modifiers = Vec::new();
14492 loop {
14493 let next_token = self.next_token();
14494 match next_token.token {
14495 Token::Word(w) => modifiers.push(w.to_string()),
14496 Token::Number(n, _) => modifiers.push(n),
14497 Token::SingleQuotedString(s) => modifiers.push(s),
14498
14499 Token::Comma => {
14500 continue;
14501 }
14502 Token::RParen => {
14503 break;
14504 }
14505 _ => self.expected("type modifiers", next_token)?,
14506 }
14507 }
14508
14509 Ok(Some(modifiers))
14510 } else {
14511 Ok(None)
14512 }
14513 }
14514
14515 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14517 where
14518 F: FnOnce(Box<DataType>) -> DataType,
14519 {
14520 self.expect_token(&Token::LParen)?;
14521 let inside_type = self.parse_data_type()?;
14522 self.expect_token(&Token::RParen)?;
14523 Ok(parent_type(inside_type.into()))
14524 }
14525
14526 fn parse_delete_setexpr_boxed(
14530 &mut self,
14531 delete_token: TokenWithSpan,
14532 ) -> Result<Box<SetExpr>, ParserError> {
14533 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14534 }
14535
14536 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14538 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14539 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14540 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14543 (vec![], false)
14544 } else {
14545 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14546 self.expect_keyword_is(Keyword::FROM)?;
14547 (tables, true)
14548 }
14549 } else {
14550 (vec![], true)
14551 };
14552
14553 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14554
14555 let output = self.maybe_parse_output_clause()?;
14556
14557 let using = if self.parse_keyword(Keyword::USING) {
14558 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14559 } else {
14560 None
14561 };
14562 let selection = if self.parse_keyword(Keyword::WHERE) {
14563 Some(self.parse_expr()?)
14564 } else {
14565 None
14566 };
14567 let returning = if self.parse_keyword(Keyword::RETURNING) {
14568 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14569 } else {
14570 None
14571 };
14572 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14573 self.parse_comma_separated(Parser::parse_order_by_expr)?
14574 } else {
14575 vec![]
14576 };
14577 let limit = if self.parse_keyword(Keyword::LIMIT) {
14578 self.parse_limit()?
14579 } else {
14580 None
14581 };
14582
14583 Ok(Statement::Delete(Delete {
14584 delete_token: delete_token.into(),
14585 optimizer_hints,
14586 tables,
14587 from: if with_from_keyword {
14588 FromTable::WithFromKeyword(from)
14589 } else {
14590 FromTable::WithoutKeyword(from)
14591 },
14592 using,
14593 selection,
14594 returning,
14595 output,
14596 order_by,
14597 limit,
14598 }))
14599 }
14600
14601 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14604 let modifier_keyword =
14605 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14606
14607 let id = self.parse_literal_uint()?;
14608
14609 let modifier = match modifier_keyword {
14610 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14611 Some(Keyword::QUERY) => Some(KillType::Query),
14612 Some(Keyword::MUTATION) => {
14613 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14614 Some(KillType::Mutation)
14615 } else {
14616 self.expected_ref(
14617 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14618 self.peek_token_ref(),
14619 )?
14620 }
14621 }
14622 _ => None,
14623 };
14624
14625 Ok(Statement::Kill { modifier, id })
14626 }
14627
14628 pub fn parse_explain(
14630 &mut self,
14631 describe_alias: DescribeAlias,
14632 ) -> Result<Statement, ParserError> {
14633 let mut analyze = false;
14634 let mut verbose = false;
14635 let mut query_plan = false;
14636 let mut estimate = false;
14637 let mut format = None;
14638 let mut options = None;
14639
14640 if describe_alias == DescribeAlias::Explain
14643 && self.dialect.supports_explain_with_utility_options()
14644 && self.peek_token_ref().token == Token::LParen
14645 {
14646 options = Some(self.parse_utility_options()?)
14647 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14648 query_plan = true;
14649 } else if self.parse_keyword(Keyword::ESTIMATE) {
14650 estimate = true;
14651 } else {
14652 analyze = self.parse_keyword(Keyword::ANALYZE);
14653 verbose = self.parse_keyword(Keyword::VERBOSE);
14654 if self.parse_keyword(Keyword::FORMAT) {
14655 format = Some(self.parse_analyze_format_kind()?);
14656 }
14657 }
14658
14659 match self.maybe_parse(|parser| parser.parse_statement())? {
14660 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14661 ParserError::ParserError("Explain must be root of the plan".to_string()),
14662 ),
14663 Some(statement) => Ok(Statement::Explain {
14664 describe_alias,
14665 analyze,
14666 verbose,
14667 query_plan,
14668 estimate,
14669 statement: Box::new(statement),
14670 format,
14671 options,
14672 }),
14673 _ => {
14674 let hive_format =
14675 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14676 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14677 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14678 _ => None,
14679 };
14680
14681 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14682 self.parse_keyword(Keyword::TABLE)
14684 } else {
14685 false
14686 };
14687
14688 let table_name = self.parse_object_name(false)?;
14689 Ok(Statement::ExplainTable {
14690 describe_alias,
14691 hive_format,
14692 has_table_keyword,
14693 table_name,
14694 })
14695 }
14696 }
14697 }
14698
14699 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14704 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14705 let _guard = self.recursion_counter.try_decrease()?;
14706 let with = if self.parse_keyword(Keyword::WITH) {
14707 let with_token = self.get_current_token();
14708 Some(With {
14709 with_token: with_token.clone().into(),
14710 recursive: self.parse_keyword(Keyword::RECURSIVE),
14711 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14712 })
14713 } else {
14714 None
14715 };
14716 if self.parse_keyword(Keyword::INSERT) {
14717 Ok(Query {
14718 with,
14719 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14720 order_by: None,
14721 limit_clause: None,
14722 fetch: None,
14723 locks: vec![],
14724 for_clause: None,
14725 settings: None,
14726 format_clause: None,
14727 pipe_operators: vec![],
14728 }
14729 .into())
14730 } else if self.parse_keyword(Keyword::UPDATE) {
14731 Ok(Query {
14732 with,
14733 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14734 order_by: None,
14735 limit_clause: None,
14736 fetch: None,
14737 locks: vec![],
14738 for_clause: None,
14739 settings: None,
14740 format_clause: None,
14741 pipe_operators: vec![],
14742 }
14743 .into())
14744 } else if self.parse_keyword(Keyword::DELETE) {
14745 Ok(Query {
14746 with,
14747 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14748 limit_clause: None,
14749 order_by: None,
14750 fetch: None,
14751 locks: vec![],
14752 for_clause: None,
14753 settings: None,
14754 format_clause: None,
14755 pipe_operators: vec![],
14756 }
14757 .into())
14758 } else if self.parse_keyword(Keyword::MERGE) {
14759 Ok(Query {
14760 with,
14761 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14762 limit_clause: None,
14763 order_by: None,
14764 fetch: None,
14765 locks: vec![],
14766 for_clause: None,
14767 settings: None,
14768 format_clause: None,
14769 pipe_operators: vec![],
14770 }
14771 .into())
14772 } else {
14773 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14774
14775 let order_by = self.parse_optional_order_by()?;
14776
14777 let limit_clause = self.parse_optional_limit_clause()?;
14778
14779 let settings = self.parse_settings()?;
14780
14781 let fetch = if self.parse_keyword(Keyword::FETCH) {
14782 Some(self.parse_fetch()?)
14783 } else {
14784 None
14785 };
14786
14787 let mut for_clause = None;
14788 let mut locks = Vec::new();
14789 while self.parse_keyword(Keyword::FOR) {
14790 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14791 for_clause = Some(parsed_for_clause);
14792 break;
14793 } else {
14794 locks.push(self.parse_lock()?);
14795 }
14796 }
14797 let format_clause =
14798 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14799 if self.parse_keyword(Keyword::NULL) {
14800 Some(FormatClause::Null)
14801 } else {
14802 let ident = self.parse_identifier()?;
14803 Some(FormatClause::Identifier(ident))
14804 }
14805 } else {
14806 None
14807 };
14808
14809 let pipe_operators = if self.dialect.supports_pipe_operator() {
14810 self.parse_pipe_operators()?
14811 } else {
14812 Vec::new()
14813 };
14814
14815 Ok(Query {
14816 with,
14817 body,
14818 order_by,
14819 limit_clause,
14820 fetch,
14821 locks,
14822 for_clause,
14823 settings,
14824 format_clause,
14825 pipe_operators,
14826 }
14827 .into())
14828 }
14829 }
14830
14831 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14832 let mut pipe_operators = Vec::new();
14833
14834 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14835 let kw = self.expect_one_of_keywords(&[
14836 Keyword::SELECT,
14837 Keyword::EXTEND,
14838 Keyword::SET,
14839 Keyword::DROP,
14840 Keyword::AS,
14841 Keyword::WHERE,
14842 Keyword::LIMIT,
14843 Keyword::AGGREGATE,
14844 Keyword::ORDER,
14845 Keyword::TABLESAMPLE,
14846 Keyword::RENAME,
14847 Keyword::UNION,
14848 Keyword::INTERSECT,
14849 Keyword::EXCEPT,
14850 Keyword::CALL,
14851 Keyword::PIVOT,
14852 Keyword::UNPIVOT,
14853 Keyword::JOIN,
14854 Keyword::INNER,
14855 Keyword::LEFT,
14856 Keyword::RIGHT,
14857 Keyword::FULL,
14858 Keyword::CROSS,
14859 ])?;
14860 match kw {
14861 Keyword::SELECT => {
14862 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14863 pipe_operators.push(PipeOperator::Select { exprs })
14864 }
14865 Keyword::EXTEND => {
14866 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14867 pipe_operators.push(PipeOperator::Extend { exprs })
14868 }
14869 Keyword::SET => {
14870 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14871 pipe_operators.push(PipeOperator::Set { assignments })
14872 }
14873 Keyword::DROP => {
14874 let columns = self.parse_identifiers()?;
14875 pipe_operators.push(PipeOperator::Drop { columns })
14876 }
14877 Keyword::AS => {
14878 let alias = self.parse_identifier()?;
14879 pipe_operators.push(PipeOperator::As { alias })
14880 }
14881 Keyword::WHERE => {
14882 let expr = self.parse_expr()?;
14883 pipe_operators.push(PipeOperator::Where { expr })
14884 }
14885 Keyword::LIMIT => {
14886 let expr = self.parse_expr()?;
14887 let offset = if self.parse_keyword(Keyword::OFFSET) {
14888 Some(self.parse_expr()?)
14889 } else {
14890 None
14891 };
14892 pipe_operators.push(PipeOperator::Limit { expr, offset })
14893 }
14894 Keyword::AGGREGATE => {
14895 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14896 vec![]
14897 } else {
14898 self.parse_comma_separated(|parser| {
14899 parser.parse_expr_with_alias_and_order_by()
14900 })?
14901 };
14902
14903 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14904 self.parse_comma_separated(|parser| {
14905 parser.parse_expr_with_alias_and_order_by()
14906 })?
14907 } else {
14908 vec![]
14909 };
14910
14911 pipe_operators.push(PipeOperator::Aggregate {
14912 full_table_exprs,
14913 group_by_expr,
14914 })
14915 }
14916 Keyword::ORDER => {
14917 self.expect_one_of_keywords(&[Keyword::BY])?;
14918 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14919 pipe_operators.push(PipeOperator::OrderBy { exprs })
14920 }
14921 Keyword::TABLESAMPLE => {
14922 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14923 pipe_operators.push(PipeOperator::TableSample { sample });
14924 }
14925 Keyword::RENAME => {
14926 let mappings =
14927 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14928 pipe_operators.push(PipeOperator::Rename { mappings });
14929 }
14930 Keyword::UNION => {
14931 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14932 let queries = self.parse_pipe_operator_queries()?;
14933 pipe_operators.push(PipeOperator::Union {
14934 set_quantifier,
14935 queries,
14936 });
14937 }
14938 Keyword::INTERSECT => {
14939 let set_quantifier =
14940 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14941 let queries = self.parse_pipe_operator_queries()?;
14942 pipe_operators.push(PipeOperator::Intersect {
14943 set_quantifier,
14944 queries,
14945 });
14946 }
14947 Keyword::EXCEPT => {
14948 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14949 let queries = self.parse_pipe_operator_queries()?;
14950 pipe_operators.push(PipeOperator::Except {
14951 set_quantifier,
14952 queries,
14953 });
14954 }
14955 Keyword::CALL => {
14956 let function_name = self.parse_object_name(false)?;
14957 let function_expr = self.parse_function(function_name)?;
14958 if let Expr::Function(function) = function_expr {
14959 let alias = self.parse_identifier_optional_alias()?;
14960 pipe_operators.push(PipeOperator::Call { function, alias });
14961 } else {
14962 return Err(ParserError::ParserError(
14963 "Expected function call after CALL".to_string(),
14964 ));
14965 }
14966 }
14967 Keyword::PIVOT => {
14968 self.expect_token(&Token::LParen)?;
14969 let aggregate_functions =
14970 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14971 self.expect_keyword_is(Keyword::FOR)?;
14972 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14973 self.expect_keyword_is(Keyword::IN)?;
14974
14975 self.expect_token(&Token::LParen)?;
14976 let value_source = if self.parse_keyword(Keyword::ANY) {
14977 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14978 self.parse_comma_separated(Parser::parse_order_by_expr)?
14979 } else {
14980 vec![]
14981 };
14982 PivotValueSource::Any(order_by)
14983 } else if self.peek_sub_query() {
14984 PivotValueSource::Subquery(self.parse_query()?)
14985 } else {
14986 PivotValueSource::List(
14987 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14988 )
14989 };
14990 self.expect_token(&Token::RParen)?;
14991 self.expect_token(&Token::RParen)?;
14992
14993 let alias = self.parse_identifier_optional_alias()?;
14994
14995 pipe_operators.push(PipeOperator::Pivot {
14996 aggregate_functions,
14997 value_column,
14998 value_source,
14999 alias,
15000 });
15001 }
15002 Keyword::UNPIVOT => {
15003 self.expect_token(&Token::LParen)?;
15004 let value_column = self.parse_identifier()?;
15005 self.expect_keyword(Keyword::FOR)?;
15006 let name_column = self.parse_identifier()?;
15007 self.expect_keyword(Keyword::IN)?;
15008
15009 self.expect_token(&Token::LParen)?;
15010 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
15011 self.expect_token(&Token::RParen)?;
15012
15013 self.expect_token(&Token::RParen)?;
15014
15015 let alias = self.parse_identifier_optional_alias()?;
15016
15017 pipe_operators.push(PipeOperator::Unpivot {
15018 value_column,
15019 name_column,
15020 unpivot_columns,
15021 alias,
15022 });
15023 }
15024 Keyword::JOIN
15025 | Keyword::INNER
15026 | Keyword::LEFT
15027 | Keyword::RIGHT
15028 | Keyword::FULL
15029 | Keyword::CROSS => {
15030 self.prev_token();
15031 let mut joins = self.parse_joins()?;
15032 if joins.len() != 1 {
15033 return Err(ParserError::ParserError(
15034 "Join pipe operator must have a single join".to_string(),
15035 ));
15036 }
15037 let join = joins.swap_remove(0);
15038 pipe_operators.push(PipeOperator::Join(join))
15039 }
15040 unhandled => {
15041 return Err(ParserError::ParserError(format!(
15042 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
15043 )))
15044 }
15045 }
15046 }
15047 Ok(pipe_operators)
15048 }
15049
15050 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
15051 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
15052 {
15053 let key_values = self.parse_comma_separated(|p| {
15054 let key = p.parse_identifier()?;
15055 p.expect_token(&Token::Eq)?;
15056 let value = p.parse_expr()?;
15057 Ok(Setting { key, value })
15058 })?;
15059 Some(key_values)
15060 } else {
15061 None
15062 };
15063 Ok(settings)
15064 }
15065
15066 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
15068 if self.parse_keyword(Keyword::XML) {
15069 Ok(Some(self.parse_for_xml()?))
15070 } else if self.parse_keyword(Keyword::JSON) {
15071 Ok(Some(self.parse_for_json()?))
15072 } else if self.parse_keyword(Keyword::BROWSE) {
15073 Ok(Some(ForClause::Browse))
15074 } else {
15075 Ok(None)
15076 }
15077 }
15078
15079 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
15081 let for_xml = if self.parse_keyword(Keyword::RAW) {
15082 let mut element_name = None;
15083 if self.peek_token_ref().token == Token::LParen {
15084 self.expect_token(&Token::LParen)?;
15085 element_name = Some(self.parse_literal_string()?);
15086 self.expect_token(&Token::RParen)?;
15087 }
15088 ForXml::Raw(element_name)
15089 } else if self.parse_keyword(Keyword::AUTO) {
15090 ForXml::Auto
15091 } else if self.parse_keyword(Keyword::EXPLICIT) {
15092 ForXml::Explicit
15093 } else if self.parse_keyword(Keyword::PATH) {
15094 let mut element_name = None;
15095 if self.peek_token_ref().token == Token::LParen {
15096 self.expect_token(&Token::LParen)?;
15097 element_name = Some(self.parse_literal_string()?);
15098 self.expect_token(&Token::RParen)?;
15099 }
15100 ForXml::Path(element_name)
15101 } else {
15102 return Err(ParserError::ParserError(
15103 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
15104 ));
15105 };
15106 let mut elements = false;
15107 let mut binary_base64 = false;
15108 let mut root = None;
15109 let mut r#type = false;
15110 while self.peek_token_ref().token == Token::Comma {
15111 self.next_token();
15112 if self.parse_keyword(Keyword::ELEMENTS) {
15113 elements = true;
15114 } else if self.parse_keyword(Keyword::BINARY) {
15115 self.expect_keyword_is(Keyword::BASE64)?;
15116 binary_base64 = true;
15117 } else if self.parse_keyword(Keyword::ROOT) {
15118 self.expect_token(&Token::LParen)?;
15119 root = Some(self.parse_literal_string()?);
15120 self.expect_token(&Token::RParen)?;
15121 } else if self.parse_keyword(Keyword::TYPE) {
15122 r#type = true;
15123 }
15124 }
15125 Ok(ForClause::Xml {
15126 for_xml,
15127 elements,
15128 binary_base64,
15129 root,
15130 r#type,
15131 })
15132 }
15133
15134 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
15136 let for_json = if self.parse_keyword(Keyword::AUTO) {
15137 ForJson::Auto
15138 } else if self.parse_keyword(Keyword::PATH) {
15139 ForJson::Path
15140 } else {
15141 return Err(ParserError::ParserError(
15142 "Expected FOR JSON [AUTO | PATH ]".to_string(),
15143 ));
15144 };
15145 let mut root = None;
15146 let mut include_null_values = false;
15147 let mut without_array_wrapper = false;
15148 while self.peek_token_ref().token == Token::Comma {
15149 self.next_token();
15150 if self.parse_keyword(Keyword::ROOT) {
15151 self.expect_token(&Token::LParen)?;
15152 root = Some(self.parse_literal_string()?);
15153 self.expect_token(&Token::RParen)?;
15154 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
15155 include_null_values = true;
15156 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
15157 without_array_wrapper = true;
15158 }
15159 }
15160 Ok(ForClause::Json {
15161 for_json,
15162 root,
15163 include_null_values,
15164 without_array_wrapper,
15165 })
15166 }
15167
15168 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
15170 let name = self.parse_identifier()?;
15171
15172 let as_optional = self.dialect.supports_cte_without_as();
15173
15174 if as_optional && !self.peek_keyword(Keyword::AS) {
15176 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
15177 p.expect_token(&Token::LParen)?;
15178 let query = p.parse_query()?;
15179 let closing_paren_token = p.expect_token(&Token::RParen)?;
15180 Ok((query, closing_paren_token))
15181 })? {
15182 let mut cte = Cte {
15183 alias: TableAlias {
15184 explicit: false,
15185 name,
15186 columns: vec![],
15187 },
15188 query,
15189 from: None,
15190 materialized: None,
15191 closing_paren_token: closing_paren_token.into(),
15192 };
15193 if self.parse_keyword(Keyword::FROM) {
15194 cte.from = Some(self.parse_identifier()?);
15195 }
15196 return Ok(cte);
15197 }
15198 }
15199
15200 let columns = if self.parse_keyword(Keyword::AS) {
15202 vec![]
15203 } else {
15204 let columns = self.parse_table_alias_column_defs()?;
15205 if as_optional {
15206 let _ = self.parse_keyword(Keyword::AS);
15207 } else {
15208 self.expect_keyword_is(Keyword::AS)?;
15209 }
15210 columns
15211 };
15212
15213 let mut is_materialized = None;
15214 if dialect_of!(self is PostgreSqlDialect) {
15215 if self.parse_keyword(Keyword::MATERIALIZED) {
15216 is_materialized = Some(CteAsMaterialized::Materialized);
15217 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
15218 is_materialized = Some(CteAsMaterialized::NotMaterialized);
15219 }
15220 }
15221
15222 self.expect_token(&Token::LParen)?;
15223 let query = self.parse_query()?;
15224 let closing_paren_token = self.expect_token(&Token::RParen)?;
15225
15226 let mut cte = Cte {
15227 alias: TableAlias {
15228 explicit: false,
15229 name,
15230 columns,
15231 },
15232 query,
15233 from: None,
15234 materialized: is_materialized,
15235 closing_paren_token: closing_paren_token.into(),
15236 };
15237 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
15238 cte.from = Some(self.parse_identifier()?);
15239 }
15240 Ok(cte)
15241 }
15242
15243 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
15252 let expr = if self.peek_keyword(Keyword::SELECT)
15255 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
15256 {
15257 SetExpr::Select(self.parse_select().map(Box::new)?)
15258 } else if self.consume_token(&Token::LParen) {
15259 let subquery = self.parse_query()?;
15261 self.expect_token(&Token::RParen)?;
15262 SetExpr::Query(subquery)
15263 } else if self.parse_keyword(Keyword::VALUES) {
15264 let is_mysql = dialect_of!(self is MySqlDialect);
15265 SetExpr::Values(self.parse_values(is_mysql, false)?)
15266 } else if self.parse_keyword(Keyword::VALUE) {
15267 let is_mysql = dialect_of!(self is MySqlDialect);
15268 SetExpr::Values(self.parse_values(is_mysql, true)?)
15269 } else if self.parse_keyword(Keyword::TABLE) {
15270 SetExpr::Table(Box::new(self.parse_as_table()?))
15271 } else {
15272 return self.expected_ref(
15273 "SELECT, VALUES, or a subquery in the query body",
15274 self.peek_token_ref(),
15275 );
15276 };
15277
15278 self.parse_remaining_set_exprs(expr, precedence)
15279 }
15280
15281 fn parse_remaining_set_exprs(
15285 &mut self,
15286 mut expr: SetExpr,
15287 precedence: u8,
15288 ) -> Result<Box<SetExpr>, ParserError> {
15289 loop {
15290 let op = self.parse_set_operator(&self.peek_token().token);
15292 let next_precedence = match op {
15293 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15295 10
15296 }
15297 Some(SetOperator::Intersect) => 20,
15299 None => break,
15301 };
15302 if precedence >= next_precedence {
15303 break;
15304 }
15305 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15307 expr = SetExpr::SetOperation {
15308 left: Box::new(expr),
15309 op: op.unwrap(),
15310 set_quantifier,
15311 right: self.parse_query_body(next_precedence)?,
15312 };
15313 }
15314
15315 Ok(expr.into())
15316 }
15317
15318 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15320 match token {
15321 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15322 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15323 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15324 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15325 _ => None,
15326 }
15327 }
15328
15329 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15331 match op {
15332 Some(
15333 SetOperator::Except
15334 | SetOperator::Intersect
15335 | SetOperator::Union
15336 | SetOperator::Minus,
15337 ) => {
15338 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15339 SetQuantifier::DistinctByName
15340 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15341 SetQuantifier::ByName
15342 } else if self.parse_keyword(Keyword::ALL) {
15343 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15344 SetQuantifier::AllByName
15345 } else {
15346 SetQuantifier::All
15347 }
15348 } else if self.parse_keyword(Keyword::DISTINCT) {
15349 SetQuantifier::Distinct
15350 } else {
15351 SetQuantifier::None
15352 }
15353 }
15354 _ => SetQuantifier::None,
15355 }
15356 }
15357
15358 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15360 let mut from_first = None;
15361
15362 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15363 let from_token = self.expect_keyword(Keyword::FROM)?;
15364 let from = self.parse_table_with_joins()?;
15365 if !self.peek_keyword(Keyword::SELECT) {
15366 return Ok(Select {
15367 select_token: AttachedToken(from_token),
15368 optimizer_hints: vec![],
15369 distinct: None,
15370 select_modifiers: None,
15371 top: None,
15372 top_before_distinct: false,
15373 projection: vec![],
15374 exclude: None,
15375 into: None,
15376 from,
15377 lateral_views: vec![],
15378 prewhere: None,
15379 selection: None,
15380 group_by: GroupByExpr::Expressions(vec![], vec![]),
15381 cluster_by: vec![],
15382 distribute_by: vec![],
15383 sort_by: vec![],
15384 having: None,
15385 named_window: vec![],
15386 window_before_qualify: false,
15387 qualify: None,
15388 value_table_mode: None,
15389 connect_by: vec![],
15390 flavor: SelectFlavor::FromFirstNoSelect,
15391 });
15392 }
15393 from_first = Some(from);
15394 }
15395
15396 let select_token = self.expect_keyword(Keyword::SELECT)?;
15397 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15398 let value_table_mode = self.parse_value_table_mode()?;
15399
15400 let (select_modifiers, distinct_select_modifier) =
15401 if self.dialect.supports_select_modifiers() {
15402 self.parse_select_modifiers()?
15403 } else {
15404 (None, None)
15405 };
15406
15407 let mut top_before_distinct = false;
15408 let mut top = None;
15409 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15410 top = Some(self.parse_top()?);
15411 top_before_distinct = true;
15412 }
15413
15414 let distinct = if distinct_select_modifier.is_some() {
15415 distinct_select_modifier
15416 } else {
15417 self.parse_all_or_distinct()?
15418 };
15419
15420 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15421 top = Some(self.parse_top()?);
15422 }
15423
15424 let projection =
15425 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15426 vec![]
15427 } else {
15428 self.parse_projection()?
15429 };
15430
15431 let exclude = if self.dialect.supports_select_exclude() {
15432 self.parse_optional_select_item_exclude()?
15433 } else {
15434 None
15435 };
15436
15437 let into = if self.parse_keyword(Keyword::INTO) {
15438 Some(self.parse_select_into()?)
15439 } else {
15440 None
15441 };
15442
15443 let (from, from_first) = if let Some(from) = from_first.take() {
15449 (from, true)
15450 } else if self.parse_keyword(Keyword::FROM) {
15451 (self.parse_table_with_joins()?, false)
15452 } else {
15453 (vec![], false)
15454 };
15455
15456 let mut lateral_views = vec![];
15457 loop {
15458 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15459 let outer = self.parse_keyword(Keyword::OUTER);
15460 let lateral_view = self.parse_expr()?;
15461 let lateral_view_name = self.parse_object_name(false)?;
15462 let lateral_col_alias = self
15463 .parse_comma_separated(|parser| {
15464 parser.parse_optional_alias(&[
15465 Keyword::WHERE,
15466 Keyword::GROUP,
15467 Keyword::CLUSTER,
15468 Keyword::HAVING,
15469 Keyword::LATERAL,
15470 ]) })?
15472 .into_iter()
15473 .flatten()
15474 .collect();
15475
15476 lateral_views.push(LateralView {
15477 lateral_view,
15478 lateral_view_name,
15479 lateral_col_alias,
15480 outer,
15481 });
15482 } else {
15483 break;
15484 }
15485 }
15486
15487 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15488 {
15489 Some(self.parse_expr()?)
15490 } else {
15491 None
15492 };
15493
15494 let selection = if self.parse_keyword(Keyword::WHERE) {
15495 Some(self.parse_expr()?)
15496 } else {
15497 None
15498 };
15499
15500 let connect_by = self.maybe_parse_connect_by()?;
15501
15502 let group_by = self
15503 .parse_optional_group_by()?
15504 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15505
15506 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15507 self.parse_comma_separated(Parser::parse_expr)?
15508 } else {
15509 vec![]
15510 };
15511
15512 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15513 self.parse_comma_separated(Parser::parse_expr)?
15514 } else {
15515 vec![]
15516 };
15517
15518 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15519 self.parse_comma_separated(Parser::parse_order_by_expr)?
15520 } else {
15521 vec![]
15522 };
15523
15524 let having = if self.parse_keyword(Keyword::HAVING) {
15525 Some(self.parse_expr()?)
15526 } else {
15527 None
15528 };
15529
15530 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15532 {
15533 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15534 if self.parse_keyword(Keyword::QUALIFY) {
15535 (named_windows, Some(self.parse_expr()?), true)
15536 } else {
15537 (named_windows, None, true)
15538 }
15539 } else if self.parse_keyword(Keyword::QUALIFY) {
15540 let qualify = Some(self.parse_expr()?);
15541 if self.parse_keyword(Keyword::WINDOW) {
15542 (
15543 self.parse_comma_separated(Parser::parse_named_window)?,
15544 qualify,
15545 false,
15546 )
15547 } else {
15548 (Default::default(), qualify, false)
15549 }
15550 } else {
15551 Default::default()
15552 };
15553
15554 Ok(Select {
15555 select_token: AttachedToken(select_token),
15556 optimizer_hints,
15557 distinct,
15558 select_modifiers,
15559 top,
15560 top_before_distinct,
15561 projection,
15562 exclude,
15563 into,
15564 from,
15565 lateral_views,
15566 prewhere,
15567 selection,
15568 group_by,
15569 cluster_by,
15570 distribute_by,
15571 sort_by,
15572 having,
15573 named_window: named_windows,
15574 window_before_qualify,
15575 qualify,
15576 value_table_mode,
15577 connect_by,
15578 flavor: if from_first {
15579 SelectFlavor::FromFirst
15580 } else {
15581 SelectFlavor::Standard
15582 },
15583 })
15584 }
15585
15586 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15595 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15596 if !supports_hints {
15597 return Ok(vec![]);
15598 }
15599 let mut hints = vec![];
15600 loop {
15601 let t = self.peek_nth_token_no_skip_ref(0);
15602 let Token::Whitespace(ws) = &t.token else {
15603 break;
15604 };
15605 match ws {
15606 Whitespace::SingleLineComment { comment, prefix } => {
15607 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15608 hints.push(OptimizerHint {
15609 prefix: hint_prefix,
15610 text,
15611 style: OptimizerHintStyle::SingleLine {
15612 prefix: prefix.clone(),
15613 },
15614 });
15615 }
15616 self.next_token_no_skip();
15617 }
15618 Whitespace::MultiLineComment(comment) => {
15619 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15620 hints.push(OptimizerHint {
15621 prefix: hint_prefix,
15622 text,
15623 style: OptimizerHintStyle::MultiLine,
15624 });
15625 }
15626 self.next_token_no_skip();
15627 }
15628 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15629 self.next_token_no_skip();
15630 }
15631 }
15632 }
15633 Ok(hints)
15634 }
15635
15636 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15639 let (before_plus, text) = comment.split_once('+')?;
15640 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15641 Some((before_plus.to_string(), text.to_string()))
15642 } else {
15643 None
15644 }
15645 }
15646
15647 fn parse_select_modifiers(
15654 &mut self,
15655 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15656 let mut modifiers = SelectModifiers::default();
15657 let mut distinct = None;
15658
15659 let keywords = &[
15660 Keyword::ALL,
15661 Keyword::DISTINCT,
15662 Keyword::DISTINCTROW,
15663 Keyword::HIGH_PRIORITY,
15664 Keyword::STRAIGHT_JOIN,
15665 Keyword::SQL_SMALL_RESULT,
15666 Keyword::SQL_BIG_RESULT,
15667 Keyword::SQL_BUFFER_RESULT,
15668 Keyword::SQL_NO_CACHE,
15669 Keyword::SQL_CALC_FOUND_ROWS,
15670 ];
15671
15672 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15673 match keyword {
15674 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15675 self.prev_token();
15676 distinct = self.parse_all_or_distinct()?;
15677 }
15678 Keyword::DISTINCTROW if distinct.is_none() => {
15680 distinct = Some(Distinct::Distinct);
15681 }
15682 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15683 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15684 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15685 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15686 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15687 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15688 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15689 _ => {
15690 self.prev_token();
15691 return self.expected_ref(
15692 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15693 self.peek_token_ref(),
15694 );
15695 }
15696 }
15697 }
15698
15699 let select_modifiers = if modifiers.is_any_set() {
15702 Some(modifiers)
15703 } else {
15704 None
15705 };
15706 Ok((select_modifiers, distinct))
15707 }
15708
15709 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15710 if !dialect_of!(self is BigQueryDialect) {
15711 return Ok(None);
15712 }
15713
15714 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15715 Some(ValueTableMode::DistinctAsValue)
15716 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15717 Some(ValueTableMode::DistinctAsStruct)
15718 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15719 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15720 {
15721 Some(ValueTableMode::AsValue)
15722 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15723 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15724 {
15725 Some(ValueTableMode::AsStruct)
15726 } else if self.parse_keyword(Keyword::AS) {
15727 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15728 } else {
15729 None
15730 };
15731
15732 Ok(mode)
15733 }
15734
15735 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15739 where
15740 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15741 {
15742 let current_state = self.state;
15743 self.state = state;
15744 let res = f(self);
15745 self.state = current_state;
15746 res
15747 }
15748
15749 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15751 let mut clauses = Vec::with_capacity(2);
15752 loop {
15753 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15754 clauses.push(ConnectByKind::StartWith {
15755 start_token: self.token_at(idx).clone().into(),
15756 condition: self.parse_expr()?.into(),
15757 });
15758 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15759 {
15760 clauses.push(ConnectByKind::ConnectBy {
15761 connect_token: self.token_at(idx).clone().into(),
15762 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15763 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15764 parser.parse_comma_separated(Parser::parse_expr)
15765 })?,
15766 });
15767 } else {
15768 break;
15769 }
15770 }
15771 Ok(clauses)
15772 }
15773
15774 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15776 let token1 = self.next_token();
15777 let token2 = self.next_token();
15778 let token3 = self.next_token();
15779
15780 let table_name;
15781 let schema_name;
15782 if token2 == Token::Period {
15783 match token1.token {
15784 Token::Word(w) => {
15785 schema_name = w.value;
15786 }
15787 _ => {
15788 return self.expected("Schema name", token1);
15789 }
15790 }
15791 match token3.token {
15792 Token::Word(w) => {
15793 table_name = w.value;
15794 }
15795 _ => {
15796 return self.expected("Table name", token3);
15797 }
15798 }
15799 Ok(Table {
15800 table_name: Some(table_name),
15801 schema_name: Some(schema_name),
15802 })
15803 } else {
15804 match token1.token {
15805 Token::Word(w) => {
15806 table_name = w.value;
15807 }
15808 _ => {
15809 return self.expected("Table name", token1);
15810 }
15811 }
15812 Ok(Table {
15813 table_name: Some(table_name),
15814 schema_name: None,
15815 })
15816 }
15817 }
15818
15819 fn parse_set_role(
15821 &mut self,
15822 modifier: Option<ContextModifier>,
15823 ) -> Result<Statement, ParserError> {
15824 self.expect_keyword_is(Keyword::ROLE)?;
15825
15826 let role_name = if self.parse_keyword(Keyword::NONE) {
15827 None
15828 } else {
15829 Some(self.parse_identifier()?)
15830 };
15831 Ok(Statement::Set(Set::SetRole {
15832 context_modifier: modifier,
15833 role_name,
15834 }))
15835 }
15836
15837 fn parse_set_values(
15838 &mut self,
15839 parenthesized_assignment: bool,
15840 ) -> Result<Vec<Expr>, ParserError> {
15841 let mut values = vec![];
15842
15843 if parenthesized_assignment {
15844 self.expect_token(&Token::LParen)?;
15845 }
15846
15847 loop {
15848 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15849 expr
15850 } else if let Ok(expr) = self.parse_expr() {
15851 expr
15852 } else {
15853 self.expected_ref("variable value", self.peek_token_ref())?
15854 };
15855
15856 values.push(value);
15857 if self.consume_token(&Token::Comma) {
15858 continue;
15859 }
15860
15861 if parenthesized_assignment {
15862 self.expect_token(&Token::RParen)?;
15863 }
15864 return Ok(values);
15865 }
15866 }
15867
15868 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15869 let modifier =
15870 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15871
15872 Self::keyword_to_modifier(modifier)
15873 }
15874
15875 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15877 let scope = self.parse_context_modifier();
15878
15879 let name = if self.dialect.supports_parenthesized_set_variables()
15880 && self.consume_token(&Token::LParen)
15881 {
15882 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15886 } else {
15887 self.parse_object_name(false)?
15888 };
15889
15890 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15891 return self.expected_ref("assignment operator", self.peek_token_ref());
15892 }
15893
15894 let value = self.parse_expr()?;
15895
15896 Ok(SetAssignment { scope, name, value })
15897 }
15898
15899 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15900 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15901
15902 let scope = if !hivevar {
15904 self.parse_context_modifier()
15905 } else {
15906 None
15907 };
15908
15909 if hivevar {
15910 self.expect_token(&Token::Colon)?;
15911 }
15912
15913 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15914 return Ok(set_role_stmt);
15915 }
15916
15917 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15919 || self.parse_keyword(Keyword::TIMEZONE)
15920 {
15921 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15922 return Ok(Set::SingleAssignment {
15923 scope,
15924 hivevar,
15925 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15926 values: self.parse_set_values(false)?,
15927 }
15928 .into());
15929 } else {
15930 return Ok(Set::SetTimeZone {
15934 local: scope == Some(ContextModifier::Local),
15935 value: self.parse_expr()?,
15936 }
15937 .into());
15938 }
15939 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15940 if self.parse_keyword(Keyword::DEFAULT) {
15941 return Ok(Set::SetNamesDefault {}.into());
15942 }
15943 let charset_name = self.parse_identifier()?;
15944 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15945 Some(self.parse_literal_string()?)
15946 } else {
15947 None
15948 };
15949
15950 return Ok(Set::SetNames {
15951 charset_name,
15952 collation_name,
15953 }
15954 .into());
15955 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15956 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15957 return Ok(Set::SetTransaction {
15958 modes: self.parse_transaction_modes()?,
15959 snapshot: None,
15960 session: true,
15961 }
15962 .into());
15963 } else if self.parse_keyword(Keyword::TRANSACTION) {
15964 if self.parse_keyword(Keyword::SNAPSHOT) {
15965 let snapshot_id = self.parse_value()?;
15966 return Ok(Set::SetTransaction {
15967 modes: vec![],
15968 snapshot: Some(snapshot_id),
15969 session: false,
15970 }
15971 .into());
15972 }
15973 return Ok(Set::SetTransaction {
15974 modes: self.parse_transaction_modes()?,
15975 snapshot: None,
15976 session: false,
15977 }
15978 .into());
15979 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15980 let scope = match scope {
15981 Some(s) => s,
15982 None => {
15983 return self.expected_at(
15984 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15985 self.get_current_index(),
15986 )
15987 }
15988 };
15989 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15990 SetSessionAuthorizationParamKind::Default
15991 } else {
15992 let value = self.parse_identifier()?;
15993 SetSessionAuthorizationParamKind::User(value)
15994 };
15995 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15996 scope,
15997 kind: auth_value,
15998 })
15999 .into());
16000 }
16001
16002 if self.dialect.supports_comma_separated_set_assignments() {
16003 if scope.is_some() {
16004 self.prev_token();
16005 }
16006
16007 if let Some(assignments) = self
16008 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
16009 {
16010 return if assignments.len() > 1 {
16011 Ok(Set::MultipleAssignments { assignments }.into())
16012 } else {
16013 let SetAssignment { scope, name, value } =
16014 assignments.into_iter().next().ok_or_else(|| {
16015 ParserError::ParserError("Expected at least one assignment".to_string())
16016 })?;
16017
16018 Ok(Set::SingleAssignment {
16019 scope,
16020 hivevar,
16021 variable: name,
16022 values: vec![value],
16023 }
16024 .into())
16025 };
16026 }
16027 }
16028
16029 let variables = if self.dialect.supports_parenthesized_set_variables()
16030 && self.consume_token(&Token::LParen)
16031 {
16032 let vars = OneOrManyWithParens::Many(
16033 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
16034 .into_iter()
16035 .map(|ident| ObjectName::from(vec![ident]))
16036 .collect(),
16037 );
16038 self.expect_token(&Token::RParen)?;
16039 vars
16040 } else {
16041 OneOrManyWithParens::One(self.parse_object_name(false)?)
16042 };
16043
16044 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
16045 let stmt = match variables {
16046 OneOrManyWithParens::One(var) => Set::SingleAssignment {
16047 scope,
16048 hivevar,
16049 variable: var,
16050 values: self.parse_set_values(false)?,
16051 },
16052 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
16053 variables: vars,
16054 values: self.parse_set_values(true)?,
16055 },
16056 };
16057
16058 return Ok(stmt.into());
16059 }
16060
16061 if self.dialect.supports_set_stmt_without_operator() {
16062 self.prev_token();
16063 return self.parse_set_session_params();
16064 };
16065
16066 self.expected_ref("equals sign or TO", self.peek_token_ref())
16067 }
16068
16069 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
16071 if self.parse_keyword(Keyword::STATISTICS) {
16072 let topic = match self.parse_one_of_keywords(&[
16073 Keyword::IO,
16074 Keyword::PROFILE,
16075 Keyword::TIME,
16076 Keyword::XML,
16077 ]) {
16078 Some(Keyword::IO) => SessionParamStatsTopic::IO,
16079 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
16080 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
16081 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
16082 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
16083 };
16084 let value = self.parse_session_param_value()?;
16085 Ok(
16086 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
16087 topic,
16088 value,
16089 }))
16090 .into(),
16091 )
16092 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
16093 let obj = self.parse_object_name(false)?;
16094 let value = self.parse_session_param_value()?;
16095 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
16096 SetSessionParamIdentityInsert { obj, value },
16097 ))
16098 .into())
16099 } else if self.parse_keyword(Keyword::OFFSETS) {
16100 let keywords = self.parse_comma_separated(|parser| {
16101 let next_token = parser.next_token();
16102 match &next_token.token {
16103 Token::Word(w) => Ok(w.to_string()),
16104 _ => parser.expected("SQL keyword", next_token),
16105 }
16106 })?;
16107 let value = self.parse_session_param_value()?;
16108 Ok(
16109 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
16110 keywords,
16111 value,
16112 }))
16113 .into(),
16114 )
16115 } else {
16116 let names = self.parse_comma_separated(|parser| {
16117 let next_token = parser.next_token();
16118 match next_token.token {
16119 Token::Word(w) => Ok(w.to_string()),
16120 _ => parser.expected("Session param name", next_token),
16121 }
16122 })?;
16123 let value = self.parse_expr()?.to_string();
16124 Ok(
16125 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
16126 names,
16127 value,
16128 }))
16129 .into(),
16130 )
16131 }
16132 }
16133
16134 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
16135 if self.parse_keyword(Keyword::ON) {
16136 Ok(SessionParamValue::On)
16137 } else if self.parse_keyword(Keyword::OFF) {
16138 Ok(SessionParamValue::Off)
16139 } else {
16140 self.expected_ref("ON or OFF", self.peek_token_ref())
16141 }
16142 }
16143
16144 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
16146 let terse = self.parse_keyword(Keyword::TERSE);
16147 let extended = self.parse_keyword(Keyword::EXTENDED);
16148 let full = self.parse_keyword(Keyword::FULL);
16149 let session = self.parse_keyword(Keyword::SESSION);
16150 let global = self.parse_keyword(Keyword::GLOBAL);
16151 let external = self.parse_keyword(Keyword::EXTERNAL);
16152 if self
16153 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
16154 .is_some()
16155 {
16156 Ok(self.parse_show_columns(extended, full)?)
16157 } else if self.parse_keyword(Keyword::TABLES) {
16158 Ok(self.parse_show_tables(terse, extended, full, external)?)
16159 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
16160 Ok(self.parse_show_views(terse, true)?)
16161 } else if self.parse_keyword(Keyword::VIEWS) {
16162 Ok(self.parse_show_views(terse, false)?)
16163 } else if self.parse_keyword(Keyword::FUNCTIONS) {
16164 Ok(self.parse_show_functions()?)
16165 } else if self.parse_keyword(Keyword::PROCESSLIST) {
16166 Ok(Statement::ShowProcessList { full })
16167 } else if extended || full {
16168 Err(ParserError::ParserError(
16169 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
16170 ))
16171 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
16172 Ok(self.parse_show_create()?)
16173 } else if self.parse_keyword(Keyword::COLLATION) {
16174 Ok(self.parse_show_collation()?)
16175 } else if self.parse_keyword(Keyword::VARIABLES)
16176 && dialect_of!(self is MySqlDialect | GenericDialect)
16177 {
16178 Ok(Statement::ShowVariables {
16179 filter: self.parse_show_statement_filter()?,
16180 session,
16181 global,
16182 })
16183 } else if self.parse_keyword(Keyword::STATUS)
16184 && dialect_of!(self is MySqlDialect | GenericDialect)
16185 {
16186 Ok(Statement::ShowStatus {
16187 filter: self.parse_show_statement_filter()?,
16188 session,
16189 global,
16190 })
16191 } else if self.parse_keyword(Keyword::CATALOGS) {
16192 self.parse_show_catalogs(terse)
16193 } else if self.parse_keyword(Keyword::DATABASES) {
16194 self.parse_show_databases(terse)
16195 } else if self.parse_keyword(Keyword::SCHEMAS) {
16196 self.parse_show_schemas(terse)
16197 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
16198 self.parse_show_charset(false)
16199 } else if self.parse_keyword(Keyword::CHARSET) {
16200 self.parse_show_charset(true)
16201 } else {
16202 Ok(Statement::ShowVariable {
16203 variable: self.parse_identifiers()?,
16204 })
16205 }
16206 }
16207
16208 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
16209 Ok(Statement::ShowCharset(ShowCharset {
16211 is_shorthand,
16212 filter: self.parse_show_statement_filter()?,
16213 }))
16214 }
16215
16216 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
16217 let history = self.parse_keyword(Keyword::HISTORY);
16218 let show_options = self.parse_show_stmt_options()?;
16219 Ok(Statement::ShowCatalogs {
16220 terse,
16221 history,
16222 show_options,
16223 })
16224 }
16225
16226 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
16227 let history = self.parse_keyword(Keyword::HISTORY);
16228 let show_options = self.parse_show_stmt_options()?;
16229 Ok(Statement::ShowDatabases {
16230 terse,
16231 history,
16232 show_options,
16233 })
16234 }
16235
16236 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
16237 let history = self.parse_keyword(Keyword::HISTORY);
16238 let show_options = self.parse_show_stmt_options()?;
16239 Ok(Statement::ShowSchemas {
16240 terse,
16241 history,
16242 show_options,
16243 })
16244 }
16245
16246 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
16248 let obj_type = match self.expect_one_of_keywords(&[
16249 Keyword::TABLE,
16250 Keyword::TRIGGER,
16251 Keyword::FUNCTION,
16252 Keyword::PROCEDURE,
16253 Keyword::EVENT,
16254 Keyword::VIEW,
16255 ])? {
16256 Keyword::TABLE => Ok(ShowCreateObject::Table),
16257 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
16258 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
16259 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
16260 Keyword::EVENT => Ok(ShowCreateObject::Event),
16261 Keyword::VIEW => Ok(ShowCreateObject::View),
16262 keyword => Err(ParserError::ParserError(format!(
16263 "Unable to map keyword to ShowCreateObject: {keyword:?}"
16264 ))),
16265 }?;
16266
16267 let obj_name = self.parse_object_name(false)?;
16268
16269 Ok(Statement::ShowCreate { obj_type, obj_name })
16270 }
16271
16272 pub fn parse_show_columns(
16274 &mut self,
16275 extended: bool,
16276 full: bool,
16277 ) -> Result<Statement, ParserError> {
16278 let show_options = self.parse_show_stmt_options()?;
16279 Ok(Statement::ShowColumns {
16280 extended,
16281 full,
16282 show_options,
16283 })
16284 }
16285
16286 fn parse_show_tables(
16287 &mut self,
16288 terse: bool,
16289 extended: bool,
16290 full: bool,
16291 external: bool,
16292 ) -> Result<Statement, ParserError> {
16293 let history = !external && self.parse_keyword(Keyword::HISTORY);
16294 let show_options = self.parse_show_stmt_options()?;
16295 Ok(Statement::ShowTables {
16296 terse,
16297 history,
16298 extended,
16299 full,
16300 external,
16301 show_options,
16302 })
16303 }
16304
16305 fn parse_show_views(
16306 &mut self,
16307 terse: bool,
16308 materialized: bool,
16309 ) -> Result<Statement, ParserError> {
16310 let show_options = self.parse_show_stmt_options()?;
16311 Ok(Statement::ShowViews {
16312 materialized,
16313 terse,
16314 show_options,
16315 })
16316 }
16317
16318 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16320 let filter = self.parse_show_statement_filter()?;
16321 Ok(Statement::ShowFunctions { filter })
16322 }
16323
16324 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16326 let filter = self.parse_show_statement_filter()?;
16327 Ok(Statement::ShowCollation { filter })
16328 }
16329
16330 pub fn parse_show_statement_filter(
16332 &mut self,
16333 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16334 if self.parse_keyword(Keyword::LIKE) {
16335 Ok(Some(ShowStatementFilter::Like(
16336 self.parse_literal_string()?,
16337 )))
16338 } else if self.parse_keyword(Keyword::ILIKE) {
16339 Ok(Some(ShowStatementFilter::ILike(
16340 self.parse_literal_string()?,
16341 )))
16342 } else if self.parse_keyword(Keyword::WHERE) {
16343 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16344 } else {
16345 self.maybe_parse(|parser| -> Result<String, ParserError> {
16346 parser.parse_literal_string()
16347 })?
16348 .map_or(Ok(None), |filter| {
16349 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16350 })
16351 }
16352 }
16353
16354 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16356 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16358 if self.parse_keyword(Keyword::DEFAULT) {
16360 return Ok(Statement::Use(Use::Default));
16361 }
16362 None } else if dialect_of!(self is DatabricksDialect) {
16364 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16365 } else if dialect_of!(self is SnowflakeDialect) {
16366 self.parse_one_of_keywords(&[
16367 Keyword::DATABASE,
16368 Keyword::SCHEMA,
16369 Keyword::WAREHOUSE,
16370 Keyword::ROLE,
16371 Keyword::SECONDARY,
16372 ])
16373 } else {
16374 None };
16376
16377 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16378 self.parse_secondary_roles()?
16379 } else {
16380 let obj_name = self.parse_object_name(false)?;
16381 match parsed_keyword {
16382 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16383 Some(Keyword::DATABASE) => Use::Database(obj_name),
16384 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16385 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16386 Some(Keyword::ROLE) => Use::Role(obj_name),
16387 _ => Use::Object(obj_name),
16388 }
16389 };
16390
16391 Ok(Statement::Use(result))
16392 }
16393
16394 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16395 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16396 if self.parse_keyword(Keyword::NONE) {
16397 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16398 } else if self.parse_keyword(Keyword::ALL) {
16399 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16400 } else {
16401 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16402 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16403 }
16404 }
16405
16406 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16408 let relation = self.parse_table_factor()?;
16409 let joins = self.parse_joins()?;
16413 Ok(TableWithJoins { relation, joins })
16414 }
16415
16416 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16417 let mut joins = vec![];
16418 loop {
16419 let global = self.parse_keyword(Keyword::GLOBAL);
16420 let join = if self.parse_keyword(Keyword::CROSS) {
16421 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16422 JoinOperator::CrossJoin(JoinConstraint::None)
16423 } else if self.parse_keyword(Keyword::APPLY) {
16424 JoinOperator::CrossApply
16426 } else {
16427 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16428 };
16429 let relation = self.parse_table_factor()?;
16430 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16431 && self.dialect.supports_cross_join_constraint()
16432 {
16433 let constraint = self.parse_join_constraint(false)?;
16434 JoinOperator::CrossJoin(constraint)
16435 } else {
16436 join_operator
16437 };
16438 Join {
16439 relation,
16440 global,
16441 join_operator,
16442 }
16443 } else if self.parse_keyword(Keyword::OUTER) {
16444 self.expect_keyword_is(Keyword::APPLY)?;
16446 Join {
16447 relation: self.parse_table_factor()?,
16448 global,
16449 join_operator: JoinOperator::OuterApply,
16450 }
16451 } else if self.parse_keyword(Keyword::ASOF) {
16452 self.expect_keyword_is(Keyword::JOIN)?;
16453 let relation = self.parse_table_factor()?;
16454 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16455 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16456 Join {
16457 relation,
16458 global,
16459 join_operator: JoinOperator::AsOf {
16460 match_condition,
16461 constraint: self.parse_join_constraint(false)?,
16462 },
16463 }
16464 } else {
16465 let natural = self.parse_keyword(Keyword::NATURAL);
16466 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16467 w.keyword
16468 } else {
16469 Keyword::NoKeyword
16470 };
16471
16472 let join_operator_type = match peek_keyword {
16473 Keyword::INNER | Keyword::JOIN => {
16474 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16476 if inner {
16477 JoinOperator::Inner
16478 } else {
16479 JoinOperator::Join
16480 }
16481 }
16482 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16483 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16485 let join_type = self.parse_one_of_keywords(&[
16486 Keyword::OUTER,
16487 Keyword::SEMI,
16488 Keyword::ANTI,
16489 Keyword::JOIN,
16490 ]);
16491 match join_type {
16492 Some(Keyword::OUTER) => {
16493 self.expect_keyword_is(Keyword::JOIN)?;
16494 if is_left {
16495 JoinOperator::LeftOuter
16496 } else {
16497 JoinOperator::RightOuter
16498 }
16499 }
16500 Some(Keyword::SEMI) => {
16501 self.expect_keyword_is(Keyword::JOIN)?;
16502 if is_left {
16503 JoinOperator::LeftSemi
16504 } else {
16505 JoinOperator::RightSemi
16506 }
16507 }
16508 Some(Keyword::ANTI) => {
16509 self.expect_keyword_is(Keyword::JOIN)?;
16510 if is_left {
16511 JoinOperator::LeftAnti
16512 } else {
16513 JoinOperator::RightAnti
16514 }
16515 }
16516 Some(Keyword::JOIN) => {
16517 if is_left {
16518 JoinOperator::Left
16519 } else {
16520 JoinOperator::Right
16521 }
16522 }
16523 _ => {
16524 return Err(ParserError::ParserError(format!(
16525 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16526 )))
16527 }
16528 }
16529 }
16530 Keyword::ANTI => {
16531 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16533 JoinOperator::Anti
16534 }
16535 Keyword::SEMI => {
16536 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16538 JoinOperator::Semi
16539 }
16540 Keyword::FULL => {
16541 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16544 JoinOperator::FullOuter
16545 }
16546 Keyword::OUTER => {
16547 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16548 }
16549 Keyword::STRAIGHT_JOIN => {
16550 let _ = self.next_token(); JoinOperator::StraightJoin
16552 }
16553 _ if natural => {
16554 return self
16555 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16556 }
16557 _ => break,
16558 };
16559 let mut relation = self.parse_table_factor()?;
16560
16561 if !self
16562 .dialect
16563 .supports_left_associative_joins_without_parens()
16564 && self.peek_parens_less_nested_join()
16565 {
16566 let joins = self.parse_joins()?;
16567 relation = TableFactor::NestedJoin {
16568 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16569 alias: None,
16570 };
16571 }
16572
16573 let join_constraint = self.parse_join_constraint(natural)?;
16574 Join {
16575 relation,
16576 global,
16577 join_operator: join_operator_type(join_constraint),
16578 }
16579 };
16580 joins.push(join);
16581 }
16582 Ok(joins)
16583 }
16584
16585 fn peek_parens_less_nested_join(&self) -> bool {
16586 matches!(
16587 self.peek_token_ref().token,
16588 Token::Word(Word {
16589 keyword: Keyword::JOIN
16590 | Keyword::INNER
16591 | Keyword::LEFT
16592 | Keyword::RIGHT
16593 | Keyword::FULL,
16594 ..
16595 })
16596 )
16597 }
16598
16599 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16601 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16602 let _guard = self.recursion_counter.try_decrease()?;
16603 if self.parse_keyword(Keyword::LATERAL) {
16604 if self.consume_token(&Token::LParen) {
16606 self.parse_derived_table_factor(Lateral)
16607 } else {
16608 let name = self.parse_object_name(false)?;
16609 self.expect_token(&Token::LParen)?;
16610 let args = self.parse_optional_args()?;
16611 let alias = self.maybe_parse_table_alias()?;
16612 Ok(TableFactor::Function {
16613 lateral: true,
16614 name,
16615 args,
16616 alias,
16617 })
16618 }
16619 } else if self.parse_keyword(Keyword::TABLE) {
16620 self.expect_token(&Token::LParen)?;
16622 let expr = self.parse_expr()?;
16623 self.expect_token(&Token::RParen)?;
16624 let alias = self.maybe_parse_table_alias()?;
16625 Ok(TableFactor::TableFunction { expr, alias })
16626 } else if self.consume_token(&Token::LParen) {
16627 if let Some(mut table) =
16649 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16650 {
16651 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16652 {
16653 table = match kw {
16654 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16655 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16656 unexpected_keyword => return Err(ParserError::ParserError(
16657 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16658 )),
16659 }
16660 }
16661 return Ok(table);
16662 }
16663
16664 let mut table_and_joins = self.parse_table_and_joins()?;
16671
16672 #[allow(clippy::if_same_then_else)]
16673 if !table_and_joins.joins.is_empty() {
16674 self.expect_token(&Token::RParen)?;
16675 let alias = self.maybe_parse_table_alias()?;
16676 Ok(TableFactor::NestedJoin {
16677 table_with_joins: Box::new(table_and_joins),
16678 alias,
16679 }) } else if let TableFactor::NestedJoin {
16681 table_with_joins: _,
16682 alias: _,
16683 } = &table_and_joins.relation
16684 {
16685 self.expect_token(&Token::RParen)?;
16688 let alias = self.maybe_parse_table_alias()?;
16689 Ok(TableFactor::NestedJoin {
16690 table_with_joins: Box::new(table_and_joins),
16691 alias,
16692 })
16693 } else if self.dialect.supports_parens_around_table_factor() {
16694 self.expect_token(&Token::RParen)?;
16701
16702 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16703 match &mut table_and_joins.relation {
16706 TableFactor::Derived { alias, .. }
16707 | TableFactor::Table { alias, .. }
16708 | TableFactor::Function { alias, .. }
16709 | TableFactor::UNNEST { alias, .. }
16710 | TableFactor::JsonTable { alias, .. }
16711 | TableFactor::XmlTable { alias, .. }
16712 | TableFactor::OpenJsonTable { alias, .. }
16713 | TableFactor::TableFunction { alias, .. }
16714 | TableFactor::Pivot { alias, .. }
16715 | TableFactor::Unpivot { alias, .. }
16716 | TableFactor::MatchRecognize { alias, .. }
16717 | TableFactor::SemanticView { alias, .. }
16718 | TableFactor::NestedJoin { alias, .. } => {
16719 if let Some(inner_alias) = alias {
16721 return Err(ParserError::ParserError(format!(
16722 "duplicate alias {inner_alias}"
16723 )));
16724 }
16725 alias.replace(outer_alias);
16729 }
16730 };
16731 }
16732 Ok(table_and_joins.relation)
16734 } else {
16735 self.expected_ref("joined table", self.peek_token_ref())
16738 }
16739 } else if self.dialect.supports_values_as_table_factor()
16740 && matches!(
16741 self.peek_tokens(),
16742 [
16743 Token::Word(Word {
16744 keyword: Keyword::VALUES,
16745 ..
16746 }),
16747 Token::LParen
16748 ]
16749 )
16750 {
16751 self.expect_keyword_is(Keyword::VALUES)?;
16752
16753 let values = SetExpr::Values(self.parse_values(false, false)?);
16757 let alias = self.maybe_parse_table_alias()?;
16758 Ok(TableFactor::Derived {
16759 lateral: false,
16760 subquery: Box::new(Query {
16761 with: None,
16762 body: Box::new(values),
16763 order_by: None,
16764 limit_clause: None,
16765 fetch: None,
16766 locks: vec![],
16767 for_clause: None,
16768 settings: None,
16769 format_clause: None,
16770 pipe_operators: vec![],
16771 }),
16772 alias,
16773 sample: None,
16774 })
16775 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16776 && self.parse_keyword(Keyword::UNNEST)
16777 {
16778 self.expect_token(&Token::LParen)?;
16779 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16780 self.expect_token(&Token::RParen)?;
16781
16782 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16783 let alias = match self.maybe_parse_table_alias() {
16784 Ok(Some(alias)) => Some(alias),
16785 Ok(None) => None,
16786 Err(e) => return Err(e),
16787 };
16788
16789 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16790 Ok(()) => true,
16791 Err(_) => false,
16792 };
16793
16794 let with_offset_alias = if with_offset {
16795 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16796 Ok(Some(alias)) => Some(alias),
16797 Ok(None) => None,
16798 Err(e) => return Err(e),
16799 }
16800 } else {
16801 None
16802 };
16803
16804 Ok(TableFactor::UNNEST {
16805 alias,
16806 array_exprs,
16807 with_offset,
16808 with_offset_alias,
16809 with_ordinality,
16810 })
16811 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16812 let json_expr = self.parse_expr()?;
16813 self.expect_token(&Token::Comma)?;
16814 let json_path = self.parse_value()?;
16815 self.expect_keyword_is(Keyword::COLUMNS)?;
16816 self.expect_token(&Token::LParen)?;
16817 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16818 self.expect_token(&Token::RParen)?;
16819 self.expect_token(&Token::RParen)?;
16820 let alias = self.maybe_parse_table_alias()?;
16821 Ok(TableFactor::JsonTable {
16822 json_expr,
16823 json_path,
16824 columns,
16825 alias,
16826 })
16827 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16828 self.prev_token();
16829 self.parse_open_json_table_factor()
16830 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16831 self.prev_token();
16832 self.parse_xml_table_factor()
16833 } else if self.dialect.supports_semantic_view_table_factor()
16834 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16835 {
16836 self.parse_semantic_view_table_factor()
16837 } else if self.peek_token_ref().token == Token::AtSign {
16838 self.parse_snowflake_stage_table_factor()
16840 } else {
16841 let name = self.parse_object_name(true)?;
16842
16843 let json_path = match &self.peek_token_ref().token {
16844 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16845 _ => None,
16846 };
16847
16848 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16849 && self.parse_keyword(Keyword::PARTITION)
16850 {
16851 self.parse_parenthesized_identifiers()?
16852 } else {
16853 vec![]
16854 };
16855
16856 let version = self.maybe_parse_table_version()?;
16858
16859 let args = if self.consume_token(&Token::LParen) {
16861 Some(self.parse_table_function_args()?)
16862 } else {
16863 None
16864 };
16865
16866 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16867
16868 let mut sample = None;
16869 if self.dialect.supports_table_sample_before_alias() {
16870 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16871 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16872 }
16873 }
16874
16875 let alias = self.maybe_parse_table_alias()?;
16876
16877 let index_hints = if self.dialect.supports_table_hints() {
16879 self.maybe_parse(|p| p.parse_table_index_hints())?
16880 .unwrap_or(vec![])
16881 } else {
16882 vec![]
16883 };
16884
16885 let mut with_hints = vec![];
16887 if self.parse_keyword(Keyword::WITH) {
16888 if self.consume_token(&Token::LParen) {
16889 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16890 self.expect_token(&Token::RParen)?;
16891 } else {
16892 self.prev_token();
16894 }
16895 };
16896
16897 if !self.dialect.supports_table_sample_before_alias() {
16898 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16899 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16900 }
16901 }
16902
16903 let mut table = TableFactor::Table {
16904 name,
16905 alias,
16906 args,
16907 with_hints,
16908 version,
16909 partitions,
16910 with_ordinality,
16911 json_path,
16912 sample,
16913 index_hints,
16914 };
16915
16916 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16917 table = match kw {
16918 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16919 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16920 unexpected_keyword => return Err(ParserError::ParserError(
16921 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16922 )),
16923 }
16924 }
16925
16926 if self.dialect.supports_match_recognize()
16927 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16928 {
16929 table = self.parse_match_recognize(table)?;
16930 }
16931
16932 Ok(table)
16933 }
16934 }
16935
16936 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16941 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16943
16944 let args = if self.consume_token(&Token::LParen) {
16946 Some(self.parse_table_function_args()?)
16947 } else {
16948 None
16949 };
16950
16951 let alias = self.maybe_parse_table_alias()?;
16952
16953 Ok(TableFactor::Table {
16954 name,
16955 alias,
16956 args,
16957 with_hints: vec![],
16958 version: None,
16959 partitions: vec![],
16960 with_ordinality: false,
16961 json_path: None,
16962 sample: None,
16963 index_hints: vec![],
16964 })
16965 }
16966
16967 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16968 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16969 TableSampleModifier::TableSample
16970 } else if self.parse_keyword(Keyword::SAMPLE) {
16971 TableSampleModifier::Sample
16972 } else {
16973 return Ok(None);
16974 };
16975 self.parse_table_sample(modifier).map(Some)
16976 }
16977
16978 fn parse_table_sample(
16979 &mut self,
16980 modifier: TableSampleModifier,
16981 ) -> Result<Box<TableSample>, ParserError> {
16982 let name = match self.parse_one_of_keywords(&[
16983 Keyword::BERNOULLI,
16984 Keyword::ROW,
16985 Keyword::SYSTEM,
16986 Keyword::BLOCK,
16987 ]) {
16988 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16989 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16990 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16991 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16992 _ => None,
16993 };
16994
16995 let parenthesized = self.consume_token(&Token::LParen);
16996
16997 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16998 let selected_bucket = self.parse_number_value()?;
16999 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
17000 let total = self.parse_number_value()?;
17001 let on = if self.parse_keyword(Keyword::ON) {
17002 Some(self.parse_expr()?)
17003 } else {
17004 None
17005 };
17006 (
17007 None,
17008 Some(TableSampleBucket {
17009 bucket: selected_bucket,
17010 total,
17011 on,
17012 }),
17013 )
17014 } else {
17015 let value = match self.maybe_parse(|p| p.parse_expr())? {
17016 Some(num) => num,
17017 None => {
17018 let next_token = self.next_token();
17019 if let Token::Word(w) = next_token.token {
17020 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
17021 } else {
17022 return parser_err!(
17023 "Expecting number or byte length e.g. 100M",
17024 self.peek_token_ref().span.start
17025 );
17026 }
17027 }
17028 };
17029 let unit = if self.parse_keyword(Keyword::ROWS) {
17030 Some(TableSampleUnit::Rows)
17031 } else if self.parse_keyword(Keyword::PERCENT) {
17032 Some(TableSampleUnit::Percent)
17033 } else {
17034 None
17035 };
17036 (
17037 Some(TableSampleQuantity {
17038 parenthesized,
17039 value,
17040 unit,
17041 }),
17042 None,
17043 )
17044 };
17045 if parenthesized {
17046 self.expect_token(&Token::RParen)?;
17047 }
17048
17049 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
17050 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
17051 } else if self.parse_keyword(Keyword::SEED) {
17052 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
17053 } else {
17054 None
17055 };
17056
17057 let offset = if self.parse_keyword(Keyword::OFFSET) {
17058 Some(self.parse_expr()?)
17059 } else {
17060 None
17061 };
17062
17063 Ok(Box::new(TableSample {
17064 modifier,
17065 name,
17066 quantity,
17067 seed,
17068 bucket,
17069 offset,
17070 }))
17071 }
17072
17073 fn parse_table_sample_seed(
17074 &mut self,
17075 modifier: TableSampleSeedModifier,
17076 ) -> Result<TableSampleSeed, ParserError> {
17077 self.expect_token(&Token::LParen)?;
17078 let value = self.parse_number_value()?;
17079 self.expect_token(&Token::RParen)?;
17080 Ok(TableSampleSeed { modifier, value })
17081 }
17082
17083 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17086 self.expect_token(&Token::LParen)?;
17087 let json_expr = self.parse_expr()?;
17088 let json_path = if self.consume_token(&Token::Comma) {
17089 Some(self.parse_value()?)
17090 } else {
17091 None
17092 };
17093 self.expect_token(&Token::RParen)?;
17094 let columns = if self.parse_keyword(Keyword::WITH) {
17095 self.expect_token(&Token::LParen)?;
17096 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
17097 self.expect_token(&Token::RParen)?;
17098 columns
17099 } else {
17100 Vec::new()
17101 };
17102 let alias = self.maybe_parse_table_alias()?;
17103 Ok(TableFactor::OpenJsonTable {
17104 json_expr,
17105 json_path,
17106 columns,
17107 alias,
17108 })
17109 }
17110
17111 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17112 self.expect_token(&Token::LParen)?;
17113 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
17114 self.expect_token(&Token::LParen)?;
17115 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
17116 self.expect_token(&Token::RParen)?;
17117 self.expect_token(&Token::Comma)?;
17118 namespaces
17119 } else {
17120 vec![]
17121 };
17122 let row_expression = self.parse_expr()?;
17123 let passing = self.parse_xml_passing_clause()?;
17124 self.expect_keyword_is(Keyword::COLUMNS)?;
17125 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
17126 self.expect_token(&Token::RParen)?;
17127 let alias = self.maybe_parse_table_alias()?;
17128 Ok(TableFactor::XmlTable {
17129 namespaces,
17130 row_expression,
17131 passing,
17132 columns,
17133 alias,
17134 })
17135 }
17136
17137 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
17138 let uri = self.parse_expr()?;
17139 self.expect_keyword_is(Keyword::AS)?;
17140 let name = self.parse_identifier()?;
17141 Ok(XmlNamespaceDefinition { uri, name })
17142 }
17143
17144 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
17145 let name = self.parse_identifier()?;
17146
17147 let option = if self.parse_keyword(Keyword::FOR) {
17148 self.expect_keyword(Keyword::ORDINALITY)?;
17149 XmlTableColumnOption::ForOrdinality
17150 } else {
17151 let r#type = self.parse_data_type()?;
17152 let mut path = None;
17153 let mut default = None;
17154
17155 if self.parse_keyword(Keyword::PATH) {
17156 path = Some(self.parse_expr()?);
17157 }
17158
17159 if self.parse_keyword(Keyword::DEFAULT) {
17160 default = Some(self.parse_expr()?);
17161 }
17162
17163 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
17164 if !not_null {
17165 let _ = self.parse_keyword(Keyword::NULL);
17167 }
17168
17169 XmlTableColumnOption::NamedInfo {
17170 r#type,
17171 path,
17172 default,
17173 nullable: !not_null,
17174 }
17175 };
17176 Ok(XmlTableColumn { name, option })
17177 }
17178
17179 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
17180 let mut arguments = vec![];
17181 if self.parse_keyword(Keyword::PASSING) {
17182 loop {
17183 let by_value =
17184 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
17185 let expr = self.parse_expr()?;
17186 let alias = if self.parse_keyword(Keyword::AS) {
17187 Some(self.parse_identifier()?)
17188 } else {
17189 None
17190 };
17191 arguments.push(XmlPassingArgument {
17192 expr,
17193 alias,
17194 by_value,
17195 });
17196 if !self.consume_token(&Token::Comma) {
17197 break;
17198 }
17199 }
17200 }
17201 Ok(XmlPassingClause { arguments })
17202 }
17203
17204 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17206 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
17207 self.expect_token(&Token::LParen)?;
17208
17209 let name = self.parse_object_name(true)?;
17210
17211 let mut dimensions = Vec::new();
17213 let mut metrics = Vec::new();
17214 let mut facts = Vec::new();
17215 let mut where_clause = None;
17216
17217 while self.peek_token_ref().token != Token::RParen {
17218 if self.parse_keyword(Keyword::DIMENSIONS) {
17219 if !dimensions.is_empty() {
17220 return Err(ParserError::ParserError(
17221 "DIMENSIONS clause can only be specified once".to_string(),
17222 ));
17223 }
17224 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17225 } else if self.parse_keyword(Keyword::METRICS) {
17226 if !metrics.is_empty() {
17227 return Err(ParserError::ParserError(
17228 "METRICS clause can only be specified once".to_string(),
17229 ));
17230 }
17231 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17232 } else if self.parse_keyword(Keyword::FACTS) {
17233 if !facts.is_empty() {
17234 return Err(ParserError::ParserError(
17235 "FACTS clause can only be specified once".to_string(),
17236 ));
17237 }
17238 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17239 } else if self.parse_keyword(Keyword::WHERE) {
17240 if where_clause.is_some() {
17241 return Err(ParserError::ParserError(
17242 "WHERE clause can only be specified once".to_string(),
17243 ));
17244 }
17245 where_clause = Some(self.parse_expr()?);
17246 } else {
17247 let tok = self.peek_token_ref();
17248 return parser_err!(
17249 format!(
17250 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
17251 tok.token
17252 ),
17253 tok.span.start
17254 )?;
17255 }
17256 }
17257
17258 self.expect_token(&Token::RParen)?;
17259
17260 let alias = self.maybe_parse_table_alias()?;
17261
17262 Ok(TableFactor::SemanticView {
17263 name,
17264 dimensions,
17265 metrics,
17266 facts,
17267 where_clause,
17268 alias,
17269 })
17270 }
17271
17272 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
17273 self.expect_token(&Token::LParen)?;
17274
17275 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17276 self.parse_comma_separated(Parser::parse_expr)?
17277 } else {
17278 vec![]
17279 };
17280
17281 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17282 self.parse_comma_separated(Parser::parse_order_by_expr)?
17283 } else {
17284 vec![]
17285 };
17286
17287 let measures = if self.parse_keyword(Keyword::MEASURES) {
17288 self.parse_comma_separated(|p| {
17289 let expr = p.parse_expr()?;
17290 let _ = p.parse_keyword(Keyword::AS);
17291 let alias = p.parse_identifier()?;
17292 Ok(Measure { expr, alias })
17293 })?
17294 } else {
17295 vec![]
17296 };
17297
17298 let rows_per_match =
17299 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17300 Some(RowsPerMatch::OneRow)
17301 } else if self.parse_keywords(&[
17302 Keyword::ALL,
17303 Keyword::ROWS,
17304 Keyword::PER,
17305 Keyword::MATCH,
17306 ]) {
17307 Some(RowsPerMatch::AllRows(
17308 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17309 Some(EmptyMatchesMode::Show)
17310 } else if self.parse_keywords(&[
17311 Keyword::OMIT,
17312 Keyword::EMPTY,
17313 Keyword::MATCHES,
17314 ]) {
17315 Some(EmptyMatchesMode::Omit)
17316 } else if self.parse_keywords(&[
17317 Keyword::WITH,
17318 Keyword::UNMATCHED,
17319 Keyword::ROWS,
17320 ]) {
17321 Some(EmptyMatchesMode::WithUnmatched)
17322 } else {
17323 None
17324 },
17325 ))
17326 } else {
17327 None
17328 };
17329
17330 let after_match_skip =
17331 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17332 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17333 Some(AfterMatchSkip::PastLastRow)
17334 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17335 Some(AfterMatchSkip::ToNextRow)
17336 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17337 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17338 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17339 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17340 } else {
17341 let found = self.next_token();
17342 return self.expected("after match skip option", found);
17343 }
17344 } else {
17345 None
17346 };
17347
17348 self.expect_keyword_is(Keyword::PATTERN)?;
17349 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17350
17351 self.expect_keyword_is(Keyword::DEFINE)?;
17352
17353 let symbols = self.parse_comma_separated(|p| {
17354 let symbol = p.parse_identifier()?;
17355 p.expect_keyword_is(Keyword::AS)?;
17356 let definition = p.parse_expr()?;
17357 Ok(SymbolDefinition { symbol, definition })
17358 })?;
17359
17360 self.expect_token(&Token::RParen)?;
17361
17362 let alias = self.maybe_parse_table_alias()?;
17363
17364 Ok(TableFactor::MatchRecognize {
17365 table: Box::new(table),
17366 partition_by,
17367 order_by,
17368 measures,
17369 rows_per_match,
17370 after_match_skip,
17371 pattern,
17372 symbols,
17373 alias,
17374 })
17375 }
17376
17377 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17378 match self.next_token().token {
17379 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17380 Token::Placeholder(s) if s == "$" => {
17381 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17382 }
17383 Token::LBrace => {
17384 self.expect_token(&Token::Minus)?;
17385 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17386 self.expect_token(&Token::Minus)?;
17387 self.expect_token(&Token::RBrace)?;
17388 Ok(MatchRecognizePattern::Exclude(symbol))
17389 }
17390 Token::Word(Word {
17391 value,
17392 quote_style: None,
17393 ..
17394 }) if value == "PERMUTE" => {
17395 self.expect_token(&Token::LParen)?;
17396 let symbols = self.parse_comma_separated(|p| {
17397 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17398 })?;
17399 self.expect_token(&Token::RParen)?;
17400 Ok(MatchRecognizePattern::Permute(symbols))
17401 }
17402 Token::LParen => {
17403 let pattern = self.parse_pattern()?;
17404 self.expect_token(&Token::RParen)?;
17405 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17406 }
17407 _ => {
17408 self.prev_token();
17409 self.parse_identifier()
17410 .map(MatchRecognizeSymbol::Named)
17411 .map(MatchRecognizePattern::Symbol)
17412 }
17413 }
17414 }
17415
17416 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17417 let mut pattern = self.parse_base_pattern()?;
17418 loop {
17419 let token = self.next_token();
17420 let quantifier = match token.token {
17421 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17422 Token::Plus => RepetitionQuantifier::OneOrMore,
17423 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17424 Token::LBrace => {
17425 let token = self.next_token();
17427 match token.token {
17428 Token::Comma => {
17429 let next_token = self.next_token();
17430 let Token::Number(n, _) = next_token.token else {
17431 return self.expected("literal number", next_token);
17432 };
17433 self.expect_token(&Token::RBrace)?;
17434 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17435 }
17436 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17437 let next_token = self.next_token();
17438 match next_token.token {
17439 Token::Number(m, _) => {
17440 self.expect_token(&Token::RBrace)?;
17441 RepetitionQuantifier::Range(
17442 Self::parse(n, token.span.start)?,
17443 Self::parse(m, token.span.start)?,
17444 )
17445 }
17446 Token::RBrace => {
17447 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17448 }
17449 _ => {
17450 return self.expected("} or upper bound", next_token);
17451 }
17452 }
17453 }
17454 Token::Number(n, _) => {
17455 self.expect_token(&Token::RBrace)?;
17456 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17457 }
17458 _ => return self.expected("quantifier range", token),
17459 }
17460 }
17461 _ => {
17462 self.prev_token();
17463 break;
17464 }
17465 };
17466 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17467 }
17468 Ok(pattern)
17469 }
17470
17471 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17472 let mut patterns = vec![self.parse_repetition_pattern()?];
17473 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17474 patterns.push(self.parse_repetition_pattern()?);
17475 }
17476 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17477 Ok([pattern]) => Ok(pattern),
17478 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17479 }
17480 }
17481
17482 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17483 let pattern = self.parse_concat_pattern()?;
17484 if self.consume_token(&Token::Pipe) {
17485 match self.parse_pattern()? {
17486 MatchRecognizePattern::Alternation(mut patterns) => {
17488 patterns.insert(0, pattern);
17489 Ok(MatchRecognizePattern::Alternation(patterns))
17490 }
17491 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17492 }
17493 } else {
17494 Ok(pattern)
17495 }
17496 }
17497
17498 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17500 if self.dialect.supports_table_versioning() {
17501 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17502 {
17503 let expr = self.parse_expr()?;
17504 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17505 } else if self.peek_keyword(Keyword::CHANGES) {
17506 return self.parse_table_version_changes().map(Some);
17507 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17508 let func_name = self.parse_object_name(true)?;
17509 let func = self.parse_function(func_name)?;
17510 return Ok(Some(TableVersion::Function(func)));
17511 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17512 let expr = self.parse_expr()?;
17513 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17514 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17515 let expr = Expr::Value(self.parse_number_value()?);
17516 return Ok(Some(TableVersion::VersionAsOf(expr)));
17517 }
17518 }
17519 Ok(None)
17520 }
17521
17522 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17533 let changes_name = self.parse_object_name(true)?;
17534 let changes = self.parse_function(changes_name)?;
17535 let at_name = self.parse_object_name(true)?;
17536 let at = self.parse_function(at_name)?;
17537 let end = if self.peek_keyword(Keyword::END) {
17538 let end_name = self.parse_object_name(true)?;
17539 Some(self.parse_function(end_name)?)
17540 } else {
17541 None
17542 };
17543 Ok(TableVersion::Changes { changes, at, end })
17544 }
17545
17546 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17549 if self.parse_keyword(Keyword::NESTED) {
17550 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17551 let path = self.parse_value()?;
17552 self.expect_keyword_is(Keyword::COLUMNS)?;
17553 let columns = self.parse_parenthesized(|p| {
17554 p.parse_comma_separated(Self::parse_json_table_column_def)
17555 })?;
17556 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17557 path,
17558 columns,
17559 }));
17560 }
17561 let name = self.parse_identifier()?;
17562 if self.parse_keyword(Keyword::FOR) {
17563 self.expect_keyword_is(Keyword::ORDINALITY)?;
17564 return Ok(JsonTableColumn::ForOrdinality(name));
17565 }
17566 let r#type = self.parse_data_type()?;
17567 let exists = self.parse_keyword(Keyword::EXISTS);
17568 self.expect_keyword_is(Keyword::PATH)?;
17569 let path = self.parse_value()?;
17570 let mut on_empty = None;
17571 let mut on_error = None;
17572 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17573 if self.parse_keyword(Keyword::EMPTY) {
17574 on_empty = Some(error_handling);
17575 } else {
17576 self.expect_keyword_is(Keyword::ERROR)?;
17577 on_error = Some(error_handling);
17578 }
17579 }
17580 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17581 name,
17582 r#type,
17583 path,
17584 exists,
17585 on_empty,
17586 on_error,
17587 }))
17588 }
17589
17590 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17598 let name = self.parse_identifier()?;
17599 let r#type = self.parse_data_type()?;
17600 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17601 self.next_token();
17602 Some(path)
17603 } else {
17604 None
17605 };
17606 let as_json = self.parse_keyword(Keyword::AS);
17607 if as_json {
17608 self.expect_keyword_is(Keyword::JSON)?;
17609 }
17610 Ok(OpenJsonTableColumn {
17611 name,
17612 r#type,
17613 path,
17614 as_json,
17615 })
17616 }
17617
17618 fn parse_json_table_column_error_handling(
17619 &mut self,
17620 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17621 let res = if self.parse_keyword(Keyword::NULL) {
17622 JsonTableColumnErrorHandling::Null
17623 } else if self.parse_keyword(Keyword::ERROR) {
17624 JsonTableColumnErrorHandling::Error
17625 } else if self.parse_keyword(Keyword::DEFAULT) {
17626 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17627 } else {
17628 return Ok(None);
17629 };
17630 self.expect_keyword_is(Keyword::ON)?;
17631 Ok(Some(res))
17632 }
17633
17634 pub fn parse_derived_table_factor(
17636 &mut self,
17637 lateral: IsLateral,
17638 ) -> Result<TableFactor, ParserError> {
17639 let subquery = self.parse_query()?;
17640 self.expect_token(&Token::RParen)?;
17641 let alias = self.maybe_parse_table_alias()?;
17642
17643 let sample = self
17645 .maybe_parse_table_sample()?
17646 .map(TableSampleKind::AfterTableAlias);
17647
17648 Ok(TableFactor::Derived {
17649 lateral: match lateral {
17650 Lateral => true,
17651 NotLateral => false,
17652 },
17653 subquery,
17654 alias,
17655 sample,
17656 })
17657 }
17658
17659 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17682 let expr = self.parse_expr()?;
17683 let alias = if self.parse_keyword(Keyword::AS) {
17684 Some(self.parse_identifier()?)
17685 } else {
17686 None
17687 };
17688
17689 Ok(ExprWithAlias { expr, alias })
17690 }
17691
17692 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17696 let expr = self.parse_expr()?;
17697 let alias = self.parse_identifier_optional_alias()?;
17698 Ok(ExprWithAlias { expr, alias })
17699 }
17700
17701 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17703 let function_name = match self.next_token().token {
17704 Token::Word(w) => Ok(w.value),
17705 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17706 }?;
17707 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17708 let alias = {
17709 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17710 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17712 }
17713 self.parse_optional_alias_inner(None, validator)?
17714 };
17715 Ok(ExprWithAlias { expr, alias })
17716 }
17717
17718 pub fn parse_pivot_table_factor(
17720 &mut self,
17721 table: TableFactor,
17722 ) -> Result<TableFactor, ParserError> {
17723 self.expect_token(&Token::LParen)?;
17724 let aggregate_functions =
17725 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17726 self.expect_keyword_is(Keyword::FOR)?;
17727 let value_column = if self.peek_token_ref().token == Token::LParen {
17728 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17729 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17730 })?
17731 } else {
17732 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17733 };
17734 self.expect_keyword_is(Keyword::IN)?;
17735
17736 self.expect_token(&Token::LParen)?;
17737 let value_source = if self.parse_keyword(Keyword::ANY) {
17738 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17739 self.parse_comma_separated(Parser::parse_order_by_expr)?
17740 } else {
17741 vec![]
17742 };
17743 PivotValueSource::Any(order_by)
17744 } else if self.peek_sub_query() {
17745 PivotValueSource::Subquery(self.parse_query()?)
17746 } else {
17747 PivotValueSource::List(
17748 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17749 )
17750 };
17751 self.expect_token(&Token::RParen)?;
17752
17753 let default_on_null =
17754 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17755 self.expect_token(&Token::LParen)?;
17756 let expr = self.parse_expr()?;
17757 self.expect_token(&Token::RParen)?;
17758 Some(expr)
17759 } else {
17760 None
17761 };
17762
17763 self.expect_token(&Token::RParen)?;
17764 let alias = self.maybe_parse_table_alias()?;
17765 Ok(TableFactor::Pivot {
17766 table: Box::new(table),
17767 aggregate_functions,
17768 value_column,
17769 value_source,
17770 default_on_null,
17771 alias,
17772 })
17773 }
17774
17775 pub fn parse_unpivot_table_factor(
17777 &mut self,
17778 table: TableFactor,
17779 ) -> Result<TableFactor, ParserError> {
17780 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17781 self.expect_keyword_is(Keyword::NULLS)?;
17782 Some(NullInclusion::IncludeNulls)
17783 } else if self.parse_keyword(Keyword::EXCLUDE) {
17784 self.expect_keyword_is(Keyword::NULLS)?;
17785 Some(NullInclusion::ExcludeNulls)
17786 } else {
17787 None
17788 };
17789 self.expect_token(&Token::LParen)?;
17790 let value = self.parse_expr()?;
17791 self.expect_keyword_is(Keyword::FOR)?;
17792 let name = self.parse_identifier()?;
17793 self.expect_keyword_is(Keyword::IN)?;
17794 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17795 p.parse_expr_with_alias()
17796 })?;
17797 self.expect_token(&Token::RParen)?;
17798 let alias = self.maybe_parse_table_alias()?;
17799 Ok(TableFactor::Unpivot {
17800 table: Box::new(table),
17801 value,
17802 null_inclusion,
17803 name,
17804 columns,
17805 alias,
17806 })
17807 }
17808
17809 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17811 if natural {
17812 Ok(JoinConstraint::Natural)
17813 } else if self.parse_keyword(Keyword::ON) {
17814 let constraint = self.parse_expr()?;
17815 Ok(JoinConstraint::On(constraint))
17816 } else if self.parse_keyword(Keyword::USING) {
17817 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17818 Ok(JoinConstraint::Using(columns))
17819 } else {
17820 Ok(JoinConstraint::None)
17821 }
17823 }
17824
17825 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17827 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17828
17829 self.expect_keyword_is(Keyword::TO)?;
17830 let grantees = self.parse_grantees()?;
17831
17832 let with_grant_option =
17833 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17834
17835 let current_grants =
17836 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17837 Some(CurrentGrantsKind::CopyCurrentGrants)
17838 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17839 Some(CurrentGrantsKind::RevokeCurrentGrants)
17840 } else {
17841 None
17842 };
17843
17844 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17845 Some(self.parse_identifier()?)
17846 } else {
17847 None
17848 };
17849
17850 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17851 Some(self.parse_identifier()?)
17852 } else {
17853 None
17854 };
17855
17856 Ok(Grant {
17857 privileges,
17858 objects,
17859 grantees,
17860 with_grant_option,
17861 as_grantor,
17862 granted_by,
17863 current_grants,
17864 })
17865 }
17866
17867 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17868 let mut values = vec![];
17869 let mut grantee_type = GranteesType::None;
17870 loop {
17871 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17872 GranteesType::Role
17873 } else if self.parse_keyword(Keyword::USER) {
17874 GranteesType::User
17875 } else if self.parse_keyword(Keyword::SHARE) {
17876 GranteesType::Share
17877 } else if self.parse_keyword(Keyword::GROUP) {
17878 GranteesType::Group
17879 } else if self.parse_keyword(Keyword::PUBLIC) {
17880 GranteesType::Public
17881 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17882 GranteesType::DatabaseRole
17883 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17884 GranteesType::ApplicationRole
17885 } else if self.parse_keyword(Keyword::APPLICATION) {
17886 GranteesType::Application
17887 } else {
17888 grantee_type.clone() };
17890
17891 if self
17892 .dialect
17893 .get_reserved_grantees_types()
17894 .contains(&new_grantee_type)
17895 {
17896 self.prev_token();
17897 } else {
17898 grantee_type = new_grantee_type;
17899 }
17900
17901 let grantee = if grantee_type == GranteesType::Public {
17902 Grantee {
17903 grantee_type: grantee_type.clone(),
17904 name: None,
17905 }
17906 } else {
17907 let mut name = self.parse_grantee_name()?;
17908 if self.consume_token(&Token::Colon) {
17909 let ident = self.parse_identifier()?;
17913 if let GranteeName::ObjectName(namespace) = name {
17914 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17915 format!("{namespace}:{ident}"),
17916 )]));
17917 };
17918 }
17919 Grantee {
17920 grantee_type: grantee_type.clone(),
17921 name: Some(name),
17922 }
17923 };
17924
17925 values.push(grantee);
17926
17927 if !self.consume_token(&Token::Comma) {
17928 break;
17929 }
17930 }
17931
17932 Ok(values)
17933 }
17934
17935 pub fn parse_grant_deny_revoke_privileges_objects(
17937 &mut self,
17938 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17939 let privileges = if self.parse_keyword(Keyword::ALL) {
17940 Privileges::All {
17941 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17942 }
17943 } else {
17944 let actions = self.parse_actions_list()?;
17945 Privileges::Actions(actions)
17946 };
17947
17948 let objects = if self.parse_keyword(Keyword::ON) {
17949 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17950 Some(GrantObjects::AllTablesInSchema {
17951 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17952 })
17953 } else if self.parse_keywords(&[
17954 Keyword::ALL,
17955 Keyword::EXTERNAL,
17956 Keyword::TABLES,
17957 Keyword::IN,
17958 Keyword::SCHEMA,
17959 ]) {
17960 Some(GrantObjects::AllExternalTablesInSchema {
17961 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17962 })
17963 } else if self.parse_keywords(&[
17964 Keyword::ALL,
17965 Keyword::VIEWS,
17966 Keyword::IN,
17967 Keyword::SCHEMA,
17968 ]) {
17969 Some(GrantObjects::AllViewsInSchema {
17970 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17971 })
17972 } else if self.parse_keywords(&[
17973 Keyword::ALL,
17974 Keyword::MATERIALIZED,
17975 Keyword::VIEWS,
17976 Keyword::IN,
17977 Keyword::SCHEMA,
17978 ]) {
17979 Some(GrantObjects::AllMaterializedViewsInSchema {
17980 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17981 })
17982 } else if self.parse_keywords(&[
17983 Keyword::ALL,
17984 Keyword::FUNCTIONS,
17985 Keyword::IN,
17986 Keyword::SCHEMA,
17987 ]) {
17988 Some(GrantObjects::AllFunctionsInSchema {
17989 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17990 })
17991 } else if self.parse_keywords(&[
17992 Keyword::FUTURE,
17993 Keyword::SCHEMAS,
17994 Keyword::IN,
17995 Keyword::DATABASE,
17996 ]) {
17997 Some(GrantObjects::FutureSchemasInDatabase {
17998 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17999 })
18000 } else if self.parse_keywords(&[
18001 Keyword::FUTURE,
18002 Keyword::TABLES,
18003 Keyword::IN,
18004 Keyword::SCHEMA,
18005 ]) {
18006 Some(GrantObjects::FutureTablesInSchema {
18007 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18008 })
18009 } else if self.parse_keywords(&[
18010 Keyword::FUTURE,
18011 Keyword::EXTERNAL,
18012 Keyword::TABLES,
18013 Keyword::IN,
18014 Keyword::SCHEMA,
18015 ]) {
18016 Some(GrantObjects::FutureExternalTablesInSchema {
18017 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18018 })
18019 } else if self.parse_keywords(&[
18020 Keyword::FUTURE,
18021 Keyword::VIEWS,
18022 Keyword::IN,
18023 Keyword::SCHEMA,
18024 ]) {
18025 Some(GrantObjects::FutureViewsInSchema {
18026 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18027 })
18028 } else if self.parse_keywords(&[
18029 Keyword::FUTURE,
18030 Keyword::MATERIALIZED,
18031 Keyword::VIEWS,
18032 Keyword::IN,
18033 Keyword::SCHEMA,
18034 ]) {
18035 Some(GrantObjects::FutureMaterializedViewsInSchema {
18036 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18037 })
18038 } else if self.parse_keywords(&[
18039 Keyword::ALL,
18040 Keyword::SEQUENCES,
18041 Keyword::IN,
18042 Keyword::SCHEMA,
18043 ]) {
18044 Some(GrantObjects::AllSequencesInSchema {
18045 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18046 })
18047 } else if self.parse_keywords(&[
18048 Keyword::FUTURE,
18049 Keyword::SEQUENCES,
18050 Keyword::IN,
18051 Keyword::SCHEMA,
18052 ]) {
18053 Some(GrantObjects::FutureSequencesInSchema {
18054 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
18055 })
18056 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
18057 Some(GrantObjects::ResourceMonitors(
18058 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18059 ))
18060 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18061 Some(GrantObjects::ComputePools(
18062 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18063 ))
18064 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18065 Some(GrantObjects::FailoverGroup(
18066 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18067 ))
18068 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18069 Some(GrantObjects::ReplicationGroup(
18070 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18071 ))
18072 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18073 Some(GrantObjects::ExternalVolumes(
18074 self.parse_comma_separated(|p| p.parse_object_name(false))?,
18075 ))
18076 } else {
18077 let object_type = self.parse_one_of_keywords(&[
18078 Keyword::SEQUENCE,
18079 Keyword::DATABASE,
18080 Keyword::SCHEMA,
18081 Keyword::TABLE,
18082 Keyword::VIEW,
18083 Keyword::WAREHOUSE,
18084 Keyword::INTEGRATION,
18085 Keyword::VIEW,
18086 Keyword::WAREHOUSE,
18087 Keyword::INTEGRATION,
18088 Keyword::USER,
18089 Keyword::CONNECTION,
18090 Keyword::PROCEDURE,
18091 Keyword::FUNCTION,
18092 Keyword::TYPE,
18093 Keyword::DOMAIN,
18094 ]);
18095 let objects =
18096 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
18097 match object_type {
18098 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
18099 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
18100 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
18101 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
18102 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
18103 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
18104 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
18105 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
18106 Some(Keyword::TYPE) => Some(GrantObjects::Types(objects?)),
18107 Some(Keyword::DOMAIN) => Some(GrantObjects::Domains(objects?)),
18108 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
18109 if let Some(name) = objects?.first() {
18110 self.parse_grant_procedure_or_function(name, &kw)?
18111 } else {
18112 self.expected_ref("procedure or function name", self.peek_token_ref())?
18113 }
18114 }
18115 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
18116 Some(unexpected_keyword) => return Err(ParserError::ParserError(
18117 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
18118 )),
18119 }
18120 }
18121 } else {
18122 None
18123 };
18124
18125 Ok((privileges, objects))
18126 }
18127
18128 fn parse_grant_procedure_or_function(
18129 &mut self,
18130 name: &ObjectName,
18131 kw: &Option<Keyword>,
18132 ) -> Result<Option<GrantObjects>, ParserError> {
18133 let arg_types = if self.consume_token(&Token::LParen) {
18134 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
18135 self.expect_token(&Token::RParen)?;
18136 list
18137 } else {
18138 vec![]
18139 };
18140 match kw {
18141 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
18142 name: name.clone(),
18143 arg_types,
18144 })),
18145 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
18146 name: name.clone(),
18147 arg_types,
18148 })),
18149 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
18150 }
18151 }
18152
18153 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
18155 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
18156 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
18157 if columns.is_empty() {
18158 Ok(None)
18159 } else {
18160 Ok(Some(columns))
18161 }
18162 }
18163
18164 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
18166 Ok(Action::ImportedPrivileges)
18167 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
18168 Ok(Action::AddSearchOptimization)
18169 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
18170 Ok(Action::AttachListing)
18171 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
18172 Ok(Action::AttachPolicy)
18173 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
18174 Ok(Action::BindServiceEndpoint)
18175 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
18176 let role = self.parse_object_name(false)?;
18177 Ok(Action::DatabaseRole { role })
18178 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
18179 Ok(Action::EvolveSchema)
18180 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
18181 Ok(Action::ImportShare)
18182 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
18183 Ok(Action::ManageVersions)
18184 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
18185 Ok(Action::ManageReleases)
18186 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
18187 Ok(Action::OverrideShareRestrictions)
18188 } else if self.parse_keywords(&[
18189 Keyword::PURCHASE,
18190 Keyword::DATA,
18191 Keyword::EXCHANGE,
18192 Keyword::LISTING,
18193 ]) {
18194 Ok(Action::PurchaseDataExchangeListing)
18195 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
18196 Ok(Action::ResolveAll)
18197 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
18198 Ok(Action::ReadSession)
18199
18200 } else if self.parse_keyword(Keyword::APPLY) {
18202 let apply_type = self.parse_action_apply_type()?;
18203 Ok(Action::Apply { apply_type })
18204 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
18205 Ok(Action::ApplyBudget)
18206 } else if self.parse_keyword(Keyword::AUDIT) {
18207 Ok(Action::Audit)
18208 } else if self.parse_keyword(Keyword::CONNECT) {
18209 Ok(Action::Connect)
18210 } else if self.parse_keyword(Keyword::CREATE) {
18211 let obj_type = self.maybe_parse_action_create_object_type();
18212 Ok(Action::Create { obj_type })
18213 } else if self.parse_keyword(Keyword::DELETE) {
18214 Ok(Action::Delete)
18215 } else if self.parse_keyword(Keyword::EXEC) {
18216 let obj_type = self.maybe_parse_action_execute_obj_type();
18217 Ok(Action::Exec { obj_type })
18218 } else if self.parse_keyword(Keyword::EXECUTE) {
18219 let obj_type = self.maybe_parse_action_execute_obj_type();
18220 Ok(Action::Execute { obj_type })
18221 } else if self.parse_keyword(Keyword::FAILOVER) {
18222 Ok(Action::Failover)
18223 } else if self.parse_keyword(Keyword::INSERT) {
18224 Ok(Action::Insert {
18225 columns: parse_columns(self)?,
18226 })
18227 } else if self.parse_keyword(Keyword::MANAGE) {
18228 let manage_type = self.parse_action_manage_type()?;
18229 Ok(Action::Manage { manage_type })
18230 } else if self.parse_keyword(Keyword::MODIFY) {
18231 let modify_type = self.parse_action_modify_type();
18232 Ok(Action::Modify { modify_type })
18233 } else if self.parse_keyword(Keyword::MONITOR) {
18234 let monitor_type = self.parse_action_monitor_type();
18235 Ok(Action::Monitor { monitor_type })
18236 } else if self.parse_keyword(Keyword::OPERATE) {
18237 Ok(Action::Operate)
18238 } else if self.parse_keyword(Keyword::REFERENCES) {
18239 Ok(Action::References {
18240 columns: parse_columns(self)?,
18241 })
18242 } else if self.parse_keyword(Keyword::READ) {
18243 Ok(Action::Read)
18244 } else if self.parse_keyword(Keyword::REPLICATE) {
18245 Ok(Action::Replicate)
18246 } else if self.parse_keyword(Keyword::ROLE) {
18247 let role = self.parse_object_name(false)?;
18248 Ok(Action::Role { role })
18249 } else if self.parse_keyword(Keyword::SELECT) {
18250 Ok(Action::Select {
18251 columns: parse_columns(self)?,
18252 })
18253 } else if self.parse_keyword(Keyword::TEMPORARY) {
18254 Ok(Action::Temporary)
18255 } else if self.parse_keyword(Keyword::TRIGGER) {
18256 Ok(Action::Trigger)
18257 } else if self.parse_keyword(Keyword::TRUNCATE) {
18258 Ok(Action::Truncate)
18259 } else if self.parse_keyword(Keyword::UPDATE) {
18260 Ok(Action::Update {
18261 columns: parse_columns(self)?,
18262 })
18263 } else if self.parse_keyword(Keyword::USAGE) {
18264 Ok(Action::Usage)
18265 } else if self.parse_keyword(Keyword::OWNERSHIP) {
18266 Ok(Action::Ownership)
18267 } else if self.parse_keyword(Keyword::DROP) {
18268 Ok(Action::Drop)
18269 } else {
18270 self.expected_ref("a privilege keyword", self.peek_token_ref())?
18271 }
18272 }
18273
18274 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
18275 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
18277 Some(ActionCreateObjectType::ApplicationPackage)
18278 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18279 Some(ActionCreateObjectType::ComputePool)
18280 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
18281 Some(ActionCreateObjectType::DataExchangeListing)
18282 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18283 Some(ActionCreateObjectType::ExternalVolume)
18284 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18285 Some(ActionCreateObjectType::FailoverGroup)
18286 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
18287 Some(ActionCreateObjectType::NetworkPolicy)
18288 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
18289 Some(ActionCreateObjectType::OrganiationListing)
18290 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18291 Some(ActionCreateObjectType::ReplicationGroup)
18292 }
18293 else if self.parse_keyword(Keyword::ACCOUNT) {
18295 Some(ActionCreateObjectType::Account)
18296 } else if self.parse_keyword(Keyword::APPLICATION) {
18297 Some(ActionCreateObjectType::Application)
18298 } else if self.parse_keyword(Keyword::DATABASE) {
18299 Some(ActionCreateObjectType::Database)
18300 } else if self.parse_keyword(Keyword::INTEGRATION) {
18301 Some(ActionCreateObjectType::Integration)
18302 } else if self.parse_keyword(Keyword::ROLE) {
18303 Some(ActionCreateObjectType::Role)
18304 } else if self.parse_keyword(Keyword::SCHEMA) {
18305 Some(ActionCreateObjectType::Schema)
18306 } else if self.parse_keyword(Keyword::SHARE) {
18307 Some(ActionCreateObjectType::Share)
18308 } else if self.parse_keyword(Keyword::USER) {
18309 Some(ActionCreateObjectType::User)
18310 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18311 Some(ActionCreateObjectType::Warehouse)
18312 } else {
18313 None
18314 }
18315 }
18316
18317 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18318 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18319 Ok(ActionApplyType::AggregationPolicy)
18320 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18321 Ok(ActionApplyType::AuthenticationPolicy)
18322 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18323 Ok(ActionApplyType::JoinPolicy)
18324 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18325 Ok(ActionApplyType::MaskingPolicy)
18326 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18327 Ok(ActionApplyType::PackagesPolicy)
18328 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18329 Ok(ActionApplyType::PasswordPolicy)
18330 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18331 Ok(ActionApplyType::ProjectionPolicy)
18332 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18333 Ok(ActionApplyType::RowAccessPolicy)
18334 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18335 Ok(ActionApplyType::SessionPolicy)
18336 } else if self.parse_keyword(Keyword::TAG) {
18337 Ok(ActionApplyType::Tag)
18338 } else {
18339 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18340 }
18341 }
18342
18343 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18344 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18345 Some(ActionExecuteObjectType::DataMetricFunction)
18346 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18347 Some(ActionExecuteObjectType::ManagedAlert)
18348 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18349 Some(ActionExecuteObjectType::ManagedTask)
18350 } else if self.parse_keyword(Keyword::ALERT) {
18351 Some(ActionExecuteObjectType::Alert)
18352 } else if self.parse_keyword(Keyword::TASK) {
18353 Some(ActionExecuteObjectType::Task)
18354 } else {
18355 None
18356 }
18357 }
18358
18359 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18360 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18361 Ok(ActionManageType::AccountSupportCases)
18362 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18363 Ok(ActionManageType::EventSharing)
18364 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18365 Ok(ActionManageType::ListingAutoFulfillment)
18366 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18367 Ok(ActionManageType::OrganizationSupportCases)
18368 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18369 Ok(ActionManageType::UserSupportCases)
18370 } else if self.parse_keyword(Keyword::GRANTS) {
18371 Ok(ActionManageType::Grants)
18372 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18373 Ok(ActionManageType::Warehouses)
18374 } else {
18375 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18376 }
18377 }
18378
18379 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18380 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18381 Some(ActionModifyType::LogLevel)
18382 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18383 Some(ActionModifyType::TraceLevel)
18384 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18385 Some(ActionModifyType::SessionLogLevel)
18386 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18387 Some(ActionModifyType::SessionTraceLevel)
18388 } else {
18389 None
18390 }
18391 }
18392
18393 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18394 if self.parse_keyword(Keyword::EXECUTION) {
18395 Some(ActionMonitorType::Execution)
18396 } else if self.parse_keyword(Keyword::SECURITY) {
18397 Some(ActionMonitorType::Security)
18398 } else if self.parse_keyword(Keyword::USAGE) {
18399 Some(ActionMonitorType::Usage)
18400 } else {
18401 None
18402 }
18403 }
18404
18405 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18407 let mut name = self.parse_object_name(false)?;
18408 if self.dialect.supports_user_host_grantee()
18409 && name.0.len() == 1
18410 && name.0[0].as_ident().is_some()
18411 && self.consume_token(&Token::AtSign)
18412 {
18413 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18414 let host = self.parse_identifier()?;
18415 Ok(GranteeName::UserHost { user, host })
18416 } else {
18417 Ok(GranteeName::ObjectName(name))
18418 }
18419 }
18420
18421 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18423 self.expect_keyword(Keyword::DENY)?;
18424
18425 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18426 let objects = match objects {
18427 Some(o) => o,
18428 None => {
18429 return parser_err!(
18430 "DENY statements must specify an object",
18431 self.peek_token_ref().span.start
18432 )
18433 }
18434 };
18435
18436 self.expect_keyword_is(Keyword::TO)?;
18437 let grantees = self.parse_grantees()?;
18438 let cascade = self.parse_cascade_option();
18439 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18440 Some(self.parse_identifier()?)
18441 } else {
18442 None
18443 };
18444
18445 Ok(Statement::Deny(DenyStatement {
18446 privileges,
18447 objects,
18448 grantees,
18449 cascade,
18450 granted_by,
18451 }))
18452 }
18453
18454 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18456 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18457
18458 self.expect_keyword_is(Keyword::FROM)?;
18459 let grantees = self.parse_grantees()?;
18460
18461 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18462 Some(self.parse_identifier()?)
18463 } else {
18464 None
18465 };
18466
18467 let cascade = self.parse_cascade_option();
18468
18469 Ok(Revoke {
18470 privileges,
18471 objects,
18472 grantees,
18473 granted_by,
18474 cascade,
18475 })
18476 }
18477
18478 pub fn parse_replace(
18480 &mut self,
18481 replace_token: TokenWithSpan,
18482 ) -> Result<Statement, ParserError> {
18483 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18484 return parser_err!(
18485 "Unsupported statement REPLACE",
18486 self.peek_token_ref().span.start
18487 );
18488 }
18489
18490 let mut insert = self.parse_insert(replace_token)?;
18491 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18492 *replace_into = true;
18493 }
18494
18495 Ok(insert)
18496 }
18497
18498 fn parse_insert_setexpr_boxed(
18502 &mut self,
18503 insert_token: TokenWithSpan,
18504 ) -> Result<Box<SetExpr>, ParserError> {
18505 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18506 }
18507
18508 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18510 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18511 let or = self.parse_conflict_clause();
18512 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18513 None
18514 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18515 Some(MysqlInsertPriority::LowPriority)
18516 } else if self.parse_keyword(Keyword::DELAYED) {
18517 Some(MysqlInsertPriority::Delayed)
18518 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18519 Some(MysqlInsertPriority::HighPriority)
18520 } else {
18521 None
18522 };
18523
18524 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18525 && self.parse_keyword(Keyword::IGNORE);
18526
18527 let replace_into = false;
18528
18529 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18530 let into = self.parse_keyword(Keyword::INTO);
18531
18532 let local = self.parse_keyword(Keyword::LOCAL);
18533
18534 if self.parse_keyword(Keyword::DIRECTORY) {
18535 let path = self.parse_literal_string()?;
18536 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18537 Some(self.parse_file_format()?)
18538 } else {
18539 None
18540 };
18541 let source = self.parse_query()?;
18542 Ok(Statement::Directory {
18543 local,
18544 path,
18545 overwrite,
18546 file_format,
18547 source,
18548 })
18549 } else {
18550 let table = self.parse_keyword(Keyword::TABLE);
18552 let table_object = self.parse_table_object()?;
18553
18554 let table_alias = if self.dialect.supports_insert_table_alias()
18555 && !self.peek_sub_query()
18556 && self
18557 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18558 .is_none()
18559 {
18560 if self.parse_keyword(Keyword::AS) {
18561 Some(TableAliasWithoutColumns {
18562 explicit: true,
18563 alias: self.parse_identifier()?,
18564 })
18565 } else {
18566 self.maybe_parse(|parser| parser.parse_identifier())?
18567 .map(|alias| TableAliasWithoutColumns {
18568 explicit: false,
18569 alias,
18570 })
18571 }
18572 } else {
18573 None
18574 };
18575
18576 let is_mysql = dialect_of!(self is MySqlDialect);
18577
18578 let (columns, partitioned, after_columns, output, source, assignments) = if self
18579 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18580 {
18581 (vec![], None, vec![], None, None, vec![])
18582 } else {
18583 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18584 let columns =
18585 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18586
18587 let partitioned = self.parse_insert_partition()?;
18588 let after_columns = if dialect_of!(self is HiveDialect) {
18590 self.parse_parenthesized_column_list(Optional, false)?
18591 } else {
18592 vec![]
18593 };
18594 (columns, partitioned, after_columns)
18595 } else {
18596 Default::default()
18597 };
18598
18599 let output = self.maybe_parse_output_clause()?;
18600
18601 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18602 || self.peek_keyword(Keyword::SETTINGS)
18603 {
18604 (None, vec![])
18605 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18606 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18607 } else {
18608 (Some(self.parse_query()?), vec![])
18609 };
18610
18611 (
18612 columns,
18613 partitioned,
18614 after_columns,
18615 output,
18616 source,
18617 assignments,
18618 )
18619 };
18620
18621 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18622 let settings = self.parse_settings()?;
18625
18626 let format = if self.parse_keyword(Keyword::FORMAT) {
18627 Some(self.parse_input_format_clause()?)
18628 } else {
18629 None
18630 };
18631
18632 (format, settings)
18633 } else {
18634 Default::default()
18635 };
18636
18637 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18638 && self.parse_keyword(Keyword::AS)
18639 {
18640 let row_alias = self.parse_object_name(false)?;
18641 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18642 Some(InsertAliases {
18643 row_alias,
18644 col_aliases,
18645 })
18646 } else {
18647 None
18648 };
18649
18650 let on = if self.parse_keyword(Keyword::ON) {
18651 if self.parse_keyword(Keyword::CONFLICT) {
18652 let conflict_target =
18653 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18654 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18655 } else if self.peek_token_ref().token == Token::LParen {
18656 Some(ConflictTarget::Columns(
18657 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18658 ))
18659 } else {
18660 None
18661 };
18662
18663 self.expect_keyword_is(Keyword::DO)?;
18664 let action = if self.parse_keyword(Keyword::NOTHING) {
18665 OnConflictAction::DoNothing
18666 } else {
18667 self.expect_keyword_is(Keyword::UPDATE)?;
18668 self.expect_keyword_is(Keyword::SET)?;
18669 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18670 let selection = if self.parse_keyword(Keyword::WHERE) {
18671 Some(self.parse_expr()?)
18672 } else {
18673 None
18674 };
18675 OnConflictAction::DoUpdate(DoUpdate {
18676 assignments,
18677 selection,
18678 })
18679 };
18680
18681 Some(OnInsert::OnConflict(OnConflict {
18682 conflict_target,
18683 action,
18684 }))
18685 } else {
18686 self.expect_keyword_is(Keyword::DUPLICATE)?;
18687 self.expect_keyword_is(Keyword::KEY)?;
18688 self.expect_keyword_is(Keyword::UPDATE)?;
18689 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18690
18691 Some(OnInsert::DuplicateKeyUpdate(l))
18692 }
18693 } else {
18694 None
18695 };
18696
18697 let returning = if self.parse_keyword(Keyword::RETURNING) {
18698 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18699 } else {
18700 None
18701 };
18702
18703 Ok(Insert {
18704 insert_token: insert_token.into(),
18705 optimizer_hints,
18706 or,
18707 table: table_object,
18708 table_alias,
18709 ignore,
18710 into,
18711 overwrite,
18712 partitioned,
18713 columns,
18714 after_columns,
18715 source,
18716 assignments,
18717 has_table_keyword: table,
18718 on,
18719 returning,
18720 output,
18721 replace_into,
18722 priority,
18723 insert_alias,
18724 settings,
18725 format_clause,
18726 multi_table_insert_type: None,
18727 multi_table_into_clauses: vec![],
18728 multi_table_when_clauses: vec![],
18729 multi_table_else_clause: None,
18730 }
18731 .into())
18732 }
18733 }
18734
18735 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18739 let ident = self.parse_identifier()?;
18740 let values = self
18741 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18742 .unwrap_or_default();
18743
18744 Ok(InputFormatClause { ident, values })
18745 }
18746
18747 fn peek_subquery_start(&mut self) -> bool {
18750 matches!(
18751 self.peek_tokens_ref(),
18752 [
18753 TokenWithSpan {
18754 token: Token::LParen,
18755 ..
18756 },
18757 TokenWithSpan {
18758 token: Token::Word(Word {
18759 keyword: Keyword::SELECT,
18760 ..
18761 }),
18762 ..
18763 },
18764 ]
18765 )
18766 }
18767
18768 fn peek_subquery_or_cte_start(&mut self) -> bool {
18772 matches!(
18773 self.peek_tokens_ref(),
18774 [
18775 TokenWithSpan {
18776 token: Token::LParen,
18777 ..
18778 },
18779 TokenWithSpan {
18780 token: Token::Word(Word {
18781 keyword: Keyword::SELECT | Keyword::WITH,
18782 ..
18783 }),
18784 ..
18785 },
18786 ]
18787 )
18788 }
18789
18790 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18791 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18792 Some(SqliteOnConflict::Replace)
18793 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18794 Some(SqliteOnConflict::Rollback)
18795 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18796 Some(SqliteOnConflict::Abort)
18797 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18798 Some(SqliteOnConflict::Fail)
18799 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18800 Some(SqliteOnConflict::Ignore)
18801 } else if self.parse_keyword(Keyword::REPLACE) {
18802 Some(SqliteOnConflict::Replace)
18803 } else {
18804 None
18805 }
18806 }
18807
18808 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18810 if self.parse_keyword(Keyword::PARTITION) {
18811 self.expect_token(&Token::LParen)?;
18812 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18813 self.expect_token(&Token::RParen)?;
18814 Ok(partition_cols)
18815 } else {
18816 Ok(None)
18817 }
18818 }
18819
18820 pub fn parse_load_data_table_format(
18822 &mut self,
18823 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18824 if self.parse_keyword(Keyword::INPUTFORMAT) {
18825 let input_format = self.parse_expr()?;
18826 self.expect_keyword_is(Keyword::SERDE)?;
18827 let serde = self.parse_expr()?;
18828 Ok(Some(HiveLoadDataFormat {
18829 input_format,
18830 serde,
18831 }))
18832 } else {
18833 Ok(None)
18834 }
18835 }
18836
18837 fn parse_update_setexpr_boxed(
18841 &mut self,
18842 update_token: TokenWithSpan,
18843 ) -> Result<Box<SetExpr>, ParserError> {
18844 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18845 }
18846
18847 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18849 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18850 let or = self.parse_conflict_clause();
18851 let table = self.parse_table_and_joins()?;
18852 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18853 Some(UpdateTableFromKind::BeforeSet(
18854 self.parse_table_with_joins()?,
18855 ))
18856 } else {
18857 None
18858 };
18859 self.expect_keyword(Keyword::SET)?;
18860 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18861
18862 let output = self.maybe_parse_output_clause()?;
18863
18864 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18865 Some(UpdateTableFromKind::AfterSet(
18866 self.parse_table_with_joins()?,
18867 ))
18868 } else {
18869 from_before_set
18870 };
18871 let selection = if self.parse_keyword(Keyword::WHERE) {
18872 Some(self.parse_expr()?)
18873 } else {
18874 None
18875 };
18876 let returning = if self.parse_keyword(Keyword::RETURNING) {
18877 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18878 } else {
18879 None
18880 };
18881 let order_by = if self.dialect.supports_update_order_by()
18882 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18883 {
18884 self.parse_comma_separated(Parser::parse_order_by_expr)?
18885 } else {
18886 vec![]
18887 };
18888 let limit = if self.parse_keyword(Keyword::LIMIT) {
18889 Some(self.parse_expr()?)
18890 } else {
18891 None
18892 };
18893 Ok(Update {
18894 update_token: update_token.into(),
18895 optimizer_hints,
18896 table,
18897 assignments,
18898 from,
18899 selection,
18900 returning,
18901 output,
18902 or,
18903 order_by,
18904 limit,
18905 }
18906 .into())
18907 }
18908
18909 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18911 let target = self.parse_assignment_target()?;
18912 self.expect_token(&Token::Eq)?;
18913 let value = self.parse_expr()?;
18914 Ok(Assignment { target, value })
18915 }
18916
18917 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18919 if self.consume_token(&Token::LParen) {
18920 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18921 self.expect_token(&Token::RParen)?;
18922 Ok(AssignmentTarget::Tuple(columns))
18923 } else {
18924 let column = self.parse_object_name(false)?;
18925 Ok(AssignmentTarget::ColumnName(column))
18926 }
18927 }
18928
18929 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18931 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18932 self.maybe_parse(|p| {
18933 let name = p.parse_expr()?;
18934 let operator = p.parse_function_named_arg_operator()?;
18935 let arg = p.parse_wildcard_expr()?.into();
18936 Ok(FunctionArg::ExprNamed {
18937 name,
18938 arg,
18939 operator,
18940 })
18941 })?
18942 } else {
18943 self.maybe_parse(|p| {
18944 let name = p.parse_identifier()?;
18945 let operator = p.parse_function_named_arg_operator()?;
18946 let arg = p.parse_wildcard_expr()?.into();
18947 Ok(FunctionArg::Named {
18948 name,
18949 arg,
18950 operator,
18951 })
18952 })?
18953 };
18954 if let Some(arg) = arg {
18955 return Ok(arg);
18956 }
18957 let wildcard_expr = self.parse_wildcard_expr()?;
18958 let arg_expr: FunctionArgExpr = match wildcard_expr {
18959 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18960 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18963 if opts.opt_exclude.is_some()
18964 || opts.opt_except.is_some()
18965 || opts.opt_replace.is_some()
18966 || opts.opt_rename.is_some()
18967 || opts.opt_ilike.is_some()
18968 {
18969 FunctionArgExpr::WildcardWithOptions(opts)
18970 } else {
18971 wildcard_expr.into()
18972 }
18973 }
18974 other => other.into(),
18975 };
18976 Ok(FunctionArg::Unnamed(arg_expr))
18977 }
18978
18979 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18980 if self.parse_keyword(Keyword::VALUE) {
18981 return Ok(FunctionArgOperator::Value);
18982 }
18983 let tok = self.next_token();
18984 match tok.token {
18985 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18986 Ok(FunctionArgOperator::RightArrow)
18987 }
18988 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18989 Ok(FunctionArgOperator::Equals)
18990 }
18991 Token::Assignment
18992 if self
18993 .dialect
18994 .supports_named_fn_args_with_assignment_operator() =>
18995 {
18996 Ok(FunctionArgOperator::Assignment)
18997 }
18998 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18999 Ok(FunctionArgOperator::Colon)
19000 }
19001 _ => {
19002 self.prev_token();
19003 self.expected("argument operator", tok)
19004 }
19005 }
19006 }
19007
19008 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
19010 if self.consume_token(&Token::RParen) {
19011 Ok(vec![])
19012 } else {
19013 let args = self.parse_comma_separated(Parser::parse_function_args)?;
19014 self.expect_token(&Token::RParen)?;
19015 Ok(args)
19016 }
19017 }
19018
19019 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
19020 if self.consume_token(&Token::RParen) {
19021 return Ok(TableFunctionArgs {
19022 args: vec![],
19023 settings: None,
19024 });
19025 }
19026 let mut args = vec![];
19027 let settings = loop {
19028 if let Some(settings) = self.parse_settings()? {
19029 break Some(settings);
19030 }
19031 args.push(self.parse_function_args()?);
19032 if self.is_parse_comma_separated_end() {
19033 break None;
19034 }
19035 };
19036 self.expect_token(&Token::RParen)?;
19037 Ok(TableFunctionArgs { args, settings })
19038 }
19039
19040 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
19049 let mut clauses = vec![];
19050
19051 if let Some(null_clause) = self.parse_json_null_clause() {
19054 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19055 }
19056
19057 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19058 clauses.push(FunctionArgumentClause::JsonReturningClause(
19059 json_returning_clause,
19060 ));
19061 }
19062
19063 if self.consume_token(&Token::RParen) {
19064 return Ok(FunctionArgumentList {
19065 duplicate_treatment: None,
19066 args: vec![],
19067 clauses,
19068 });
19069 }
19070
19071 let duplicate_treatment = self.parse_duplicate_treatment()?;
19072 let args = self.parse_comma_separated(Parser::parse_function_args)?;
19073
19074 if self.dialect.supports_window_function_null_treatment_arg() {
19075 if let Some(null_treatment) = self.parse_null_treatment()? {
19076 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
19077 }
19078 }
19079
19080 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
19081 clauses.push(FunctionArgumentClause::OrderBy(
19082 self.parse_comma_separated(Parser::parse_order_by_expr)?,
19083 ));
19084 }
19085
19086 if self.parse_keyword(Keyword::LIMIT) {
19087 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
19088 }
19089
19090 if dialect_of!(self is GenericDialect | BigQueryDialect)
19091 && self.parse_keyword(Keyword::HAVING)
19092 {
19093 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
19094 Keyword::MIN => HavingBoundKind::Min,
19095 Keyword::MAX => HavingBoundKind::Max,
19096 unexpected_keyword => return Err(ParserError::ParserError(
19097 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
19098 )),
19099 };
19100 clauses.push(FunctionArgumentClause::Having(HavingBound(
19101 kind,
19102 self.parse_expr()?,
19103 )))
19104 }
19105
19106 if dialect_of!(self is GenericDialect | MySqlDialect)
19107 && self.parse_keyword(Keyword::SEPARATOR)
19108 {
19109 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
19110 }
19111
19112 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
19113 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
19114 }
19115
19116 if let Some(null_clause) = self.parse_json_null_clause() {
19117 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
19118 }
19119
19120 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
19121 clauses.push(FunctionArgumentClause::JsonReturningClause(
19122 json_returning_clause,
19123 ));
19124 }
19125
19126 self.expect_token(&Token::RParen)?;
19127 Ok(FunctionArgumentList {
19128 duplicate_treatment,
19129 args,
19130 clauses,
19131 })
19132 }
19133
19134 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
19135 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
19136 Some(JsonNullClause::AbsentOnNull)
19137 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
19138 Some(JsonNullClause::NullOnNull)
19139 } else {
19140 None
19141 }
19142 }
19143
19144 fn maybe_parse_json_returning_clause(
19145 &mut self,
19146 ) -> Result<Option<JsonReturningClause>, ParserError> {
19147 if self.parse_keyword(Keyword::RETURNING) {
19148 let data_type = self.parse_data_type()?;
19149 Ok(Some(JsonReturningClause { data_type }))
19150 } else {
19151 Ok(None)
19152 }
19153 }
19154
19155 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
19156 let loc = self.peek_token_ref().span.start;
19157 match (
19158 self.parse_keyword(Keyword::ALL),
19159 self.parse_keyword(Keyword::DISTINCT),
19160 ) {
19161 (true, false) => Ok(Some(DuplicateTreatment::All)),
19162 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
19163 (false, false) => Ok(None),
19164 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
19165 }
19166 }
19167
19168 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
19170 let prefix = self
19171 .parse_one_of_keywords(
19172 self.dialect
19173 .get_reserved_keywords_for_select_item_operator(),
19174 )
19175 .map(|keyword| Ident::new(format!("{keyword:?}")));
19176
19177 match self.parse_wildcard_expr()? {
19178 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
19179 SelectItemQualifiedWildcardKind::ObjectName(prefix),
19180 self.parse_wildcard_additional_options(token.0)?,
19181 )),
19182 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
19183 self.parse_wildcard_additional_options(token.0)?,
19184 )),
19185 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
19186 parser_err!(
19187 format!("Expected an expression, found: {}", v),
19188 self.peek_token_ref().span.start
19189 )
19190 }
19191 Expr::BinaryOp {
19192 left,
19193 op: BinaryOperator::Eq,
19194 right,
19195 } if self.dialect.supports_eq_alias_assignment()
19196 && matches!(left.as_ref(), Expr::Identifier(_)) =>
19197 {
19198 let Expr::Identifier(alias) = *left else {
19199 return parser_err!(
19200 "BUG: expected identifier expression as alias",
19201 self.peek_token_ref().span.start
19202 );
19203 };
19204 Ok(SelectItem::ExprWithAlias {
19205 expr: *right,
19206 alias,
19207 })
19208 }
19209 expr if self.dialect.supports_select_expr_star()
19210 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
19211 {
19212 let wildcard_token = self.get_previous_token().clone();
19213 Ok(SelectItem::QualifiedWildcard(
19214 SelectItemQualifiedWildcardKind::Expr(expr),
19215 self.parse_wildcard_additional_options(wildcard_token)?,
19216 ))
19217 }
19218 expr if self.dialect.supports_select_item_multi_column_alias()
19219 && self.peek_keyword(Keyword::AS)
19220 && self.peek_nth_token(1).token == Token::LParen =>
19221 {
19222 self.expect_keyword(Keyword::AS)?;
19223 self.expect_token(&Token::LParen)?;
19224 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
19225 self.expect_token(&Token::RParen)?;
19226 Ok(SelectItem::ExprWithAliases {
19227 expr: maybe_prefixed_expr(expr, prefix),
19228 aliases,
19229 })
19230 }
19231 expr => self
19232 .maybe_parse_select_item_alias()
19233 .map(|alias| match alias {
19234 Some(alias) => SelectItem::ExprWithAlias {
19235 expr: maybe_prefixed_expr(expr, prefix),
19236 alias,
19237 },
19238 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
19239 }),
19240 }
19241 }
19242
19243 pub fn parse_wildcard_additional_options(
19247 &mut self,
19248 wildcard_token: TokenWithSpan,
19249 ) -> Result<WildcardAdditionalOptions, ParserError> {
19250 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
19251 self.parse_optional_select_item_ilike()?
19252 } else {
19253 None
19254 };
19255 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
19256 {
19257 self.parse_optional_select_item_exclude()?
19258 } else {
19259 None
19260 };
19261 let opt_except = if self.dialect.supports_select_wildcard_except() {
19262 self.parse_optional_select_item_except()?
19263 } else {
19264 None
19265 };
19266 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
19267 self.parse_optional_select_item_replace()?
19268 } else {
19269 None
19270 };
19271 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
19272 self.parse_optional_select_item_rename()?
19273 } else {
19274 None
19275 };
19276
19277 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
19278 self.maybe_parse_select_item_alias()?
19279 } else {
19280 None
19281 };
19282
19283 Ok(WildcardAdditionalOptions {
19284 wildcard_token: wildcard_token.into(),
19285 opt_ilike,
19286 opt_exclude,
19287 opt_except,
19288 opt_rename,
19289 opt_replace,
19290 opt_alias,
19291 })
19292 }
19293
19294 pub fn parse_optional_select_item_ilike(
19298 &mut self,
19299 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19300 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19301 let next_token = self.next_token();
19302 let pattern = match next_token.token {
19303 Token::SingleQuotedString(s) => s,
19304 _ => return self.expected("ilike pattern", next_token),
19305 };
19306 Some(IlikeSelectItem { pattern })
19307 } else {
19308 None
19309 };
19310 Ok(opt_ilike)
19311 }
19312
19313 pub fn parse_optional_select_item_exclude(
19317 &mut self,
19318 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19319 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19320 if self.consume_token(&Token::LParen) {
19321 let columns =
19322 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19323 self.expect_token(&Token::RParen)?;
19324 Some(ExcludeSelectItem::Multiple(columns))
19325 } else {
19326 let column = self.parse_object_name(false)?;
19327 Some(ExcludeSelectItem::Single(column))
19328 }
19329 } else {
19330 None
19331 };
19332
19333 Ok(opt_exclude)
19334 }
19335
19336 pub fn parse_optional_select_item_except(
19340 &mut self,
19341 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19342 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19343 if self.peek_token_ref().token == Token::LParen {
19344 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19345 match &idents[..] {
19346 [] => {
19347 return self.expected_ref(
19348 "at least one column should be parsed by the expect clause",
19349 self.peek_token_ref(),
19350 )?;
19351 }
19352 [first, idents @ ..] => Some(ExceptSelectItem {
19353 first_element: first.clone(),
19354 additional_elements: idents.to_vec(),
19355 }),
19356 }
19357 } else {
19358 let ident = self.parse_identifier()?;
19360 Some(ExceptSelectItem {
19361 first_element: ident,
19362 additional_elements: vec![],
19363 })
19364 }
19365 } else {
19366 None
19367 };
19368
19369 Ok(opt_except)
19370 }
19371
19372 pub fn parse_optional_select_item_rename(
19374 &mut self,
19375 ) -> Result<Option<RenameSelectItem>, ParserError> {
19376 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19377 if self.consume_token(&Token::LParen) {
19378 let idents =
19379 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19380 self.expect_token(&Token::RParen)?;
19381 Some(RenameSelectItem::Multiple(idents))
19382 } else {
19383 let ident = self.parse_identifier_with_alias()?;
19384 Some(RenameSelectItem::Single(ident))
19385 }
19386 } else {
19387 None
19388 };
19389
19390 Ok(opt_rename)
19391 }
19392
19393 pub fn parse_optional_select_item_replace(
19395 &mut self,
19396 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19397 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19398 if self.consume_token(&Token::LParen) {
19399 let items = self.parse_comma_separated(|parser| {
19400 Ok(Box::new(parser.parse_replace_elements()?))
19401 })?;
19402 self.expect_token(&Token::RParen)?;
19403 Some(ReplaceSelectItem { items })
19404 } else {
19405 let tok = self.next_token();
19406 return self.expected("( after REPLACE but", tok);
19407 }
19408 } else {
19409 None
19410 };
19411
19412 Ok(opt_replace)
19413 }
19414 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19416 let expr = self.parse_expr()?;
19417 let as_keyword = self.parse_keyword(Keyword::AS);
19418 let ident = self.parse_identifier()?;
19419 Ok(ReplaceSelectElement {
19420 expr,
19421 column_name: ident,
19422 as_keyword,
19423 })
19424 }
19425
19426 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19429 if self.parse_keyword(Keyword::ASC) {
19430 Some(true)
19431 } else if self.parse_keyword(Keyword::DESC) {
19432 Some(false)
19433 } else {
19434 None
19435 }
19436 }
19437
19438 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19440 self.parse_order_by_expr_inner(false)
19441 .map(|(order_by, _)| order_by)
19442 }
19443
19444 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19446 self.parse_order_by_expr_inner(true)
19447 .map(|(column, operator_class)| IndexColumn {
19448 column,
19449 operator_class,
19450 })
19451 }
19452
19453 fn parse_order_by_expr_inner(
19454 &mut self,
19455 with_operator_class: bool,
19456 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19457 let expr = self.parse_expr()?;
19458
19459 let operator_class: Option<ObjectName> = if with_operator_class {
19460 if self
19463 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19464 .is_some()
19465 {
19466 None
19467 } else {
19468 self.maybe_parse(|parser| parser.parse_object_name(false))?
19469 }
19470 } else {
19471 None
19472 };
19473
19474 let options = self.parse_order_by_options()?;
19475
19476 let with_fill = if self.dialect.supports_with_fill()
19477 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19478 {
19479 Some(self.parse_with_fill()?)
19480 } else {
19481 None
19482 };
19483
19484 Ok((
19485 OrderByExpr {
19486 expr,
19487 options,
19488 with_fill,
19489 },
19490 operator_class,
19491 ))
19492 }
19493
19494 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19495 let asc = self.parse_asc_desc();
19496
19497 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19498 Some(true)
19499 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19500 Some(false)
19501 } else {
19502 None
19503 };
19504
19505 Ok(OrderByOptions { asc, nulls_first })
19506 }
19507
19508 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19512 let from = if self.parse_keyword(Keyword::FROM) {
19513 Some(self.parse_expr()?)
19514 } else {
19515 None
19516 };
19517
19518 let to = if self.parse_keyword(Keyword::TO) {
19519 Some(self.parse_expr()?)
19520 } else {
19521 None
19522 };
19523
19524 let step = if self.parse_keyword(Keyword::STEP) {
19525 Some(self.parse_expr()?)
19526 } else {
19527 None
19528 };
19529
19530 Ok(WithFill { from, to, step })
19531 }
19532
19533 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19536 if !self.parse_keyword(Keyword::INTERPOLATE) {
19537 return Ok(None);
19538 }
19539
19540 if self.consume_token(&Token::LParen) {
19541 let interpolations =
19542 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19543 self.expect_token(&Token::RParen)?;
19544 return Ok(Some(Interpolate {
19546 exprs: Some(interpolations),
19547 }));
19548 }
19549
19550 Ok(Some(Interpolate { exprs: None }))
19552 }
19553
19554 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19556 let column = self.parse_identifier()?;
19557 let expr = if self.parse_keyword(Keyword::AS) {
19558 Some(self.parse_expr()?)
19559 } else {
19560 None
19561 };
19562 Ok(InterpolateExpr { column, expr })
19563 }
19564
19565 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19568 let quantity = if self.consume_token(&Token::LParen) {
19569 let quantity = self.parse_expr()?;
19570 self.expect_token(&Token::RParen)?;
19571 Some(TopQuantity::Expr(quantity))
19572 } else {
19573 let next_token = self.next_token();
19574 let quantity = match next_token.token {
19575 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19576 _ => self.expected("literal int", next_token)?,
19577 };
19578 Some(TopQuantity::Constant(quantity))
19579 };
19580
19581 let percent = self.parse_keyword(Keyword::PERCENT);
19582
19583 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19584
19585 Ok(Top {
19586 with_ties,
19587 percent,
19588 quantity,
19589 })
19590 }
19591
19592 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19594 if self.parse_keyword(Keyword::ALL) {
19595 Ok(None)
19596 } else {
19597 Ok(Some(self.parse_expr()?))
19598 }
19599 }
19600
19601 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19603 let value = self.parse_expr()?;
19604 let rows = if self.parse_keyword(Keyword::ROW) {
19605 OffsetRows::Row
19606 } else if self.parse_keyword(Keyword::ROWS) {
19607 OffsetRows::Rows
19608 } else {
19609 OffsetRows::None
19610 };
19611 Ok(Offset { value, rows })
19612 }
19613
19614 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19616 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19617
19618 let (quantity, percent) = if self
19619 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19620 .is_some()
19621 {
19622 (None, false)
19623 } else {
19624 let quantity = Expr::Value(self.parse_value()?);
19625 let percent = self.parse_keyword(Keyword::PERCENT);
19626 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19627 (Some(quantity), percent)
19628 };
19629
19630 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19631 false
19632 } else {
19633 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19634 };
19635
19636 Ok(Fetch {
19637 with_ties,
19638 percent,
19639 quantity,
19640 })
19641 }
19642
19643 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19645 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19646 Keyword::UPDATE => LockType::Update,
19647 Keyword::SHARE => LockType::Share,
19648 unexpected_keyword => return Err(ParserError::ParserError(
19649 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19650 )),
19651 };
19652 let of = if self.parse_keyword(Keyword::OF) {
19653 Some(self.parse_object_name(false)?)
19654 } else {
19655 None
19656 };
19657 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19658 Some(NonBlock::Nowait)
19659 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19660 Some(NonBlock::SkipLocked)
19661 } else {
19662 None
19663 };
19664 Ok(LockClause {
19665 lock_type,
19666 of,
19667 nonblock,
19668 })
19669 }
19670
19671 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19673 self.expect_keyword(Keyword::LOCK)?;
19674
19675 if self.peek_keyword(Keyword::TABLES) {
19676 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19677 }
19678
19679 let _ = self.parse_keyword(Keyword::TABLE);
19680 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19681 let lock_mode = if self.parse_keyword(Keyword::IN) {
19682 let lock_mode = self.parse_lock_table_mode()?;
19683 self.expect_keyword(Keyword::MODE)?;
19684 Some(lock_mode)
19685 } else {
19686 None
19687 };
19688 let nowait = self.parse_keyword(Keyword::NOWAIT);
19689
19690 Ok(Lock {
19691 tables,
19692 lock_mode,
19693 nowait,
19694 })
19695 }
19696
19697 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19698 let only = self.parse_keyword(Keyword::ONLY);
19699 let name = self.parse_object_name(false)?;
19700 let has_asterisk = self.consume_token(&Token::Mul);
19701
19702 Ok(LockTableTarget {
19703 name,
19704 only,
19705 has_asterisk,
19706 })
19707 }
19708
19709 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19710 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19711 Ok(LockTableMode::AccessShare)
19712 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19713 Ok(LockTableMode::AccessExclusive)
19714 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19715 Ok(LockTableMode::RowShare)
19716 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19717 Ok(LockTableMode::RowExclusive)
19718 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19719 Ok(LockTableMode::ShareUpdateExclusive)
19720 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19721 Ok(LockTableMode::ShareRowExclusive)
19722 } else if self.parse_keyword(Keyword::SHARE) {
19723 Ok(LockTableMode::Share)
19724 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19725 Ok(LockTableMode::Exclusive)
19726 } else {
19727 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19728 }
19729 }
19730
19731 pub fn parse_values(
19733 &mut self,
19734 allow_empty: bool,
19735 value_keyword: bool,
19736 ) -> Result<Values, ParserError> {
19737 let mut explicit_row = false;
19738
19739 let rows = self.parse_comma_separated(|parser| {
19740 if parser.parse_keyword(Keyword::ROW) {
19741 explicit_row = true;
19742 }
19743
19744 parser.expect_token(&Token::LParen)?;
19745 if allow_empty && parser.peek_token().token == Token::RParen {
19746 parser.next_token();
19747 Ok(vec![])
19748 } else {
19749 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19750 parser.expect_token(&Token::RParen)?;
19751 Ok(exprs)
19752 }
19753 })?;
19754 Ok(Values {
19755 explicit_row,
19756 rows,
19757 value_keyword,
19758 })
19759 }
19760
19761 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19763 self.expect_keyword_is(Keyword::TRANSACTION)?;
19764 Ok(Statement::StartTransaction {
19765 modes: self.parse_transaction_modes()?,
19766 begin: false,
19767 transaction: Some(BeginTransactionKind::Transaction),
19768 modifier: None,
19769 statements: vec![],
19770 exception: None,
19771 has_end_keyword: false,
19772 })
19773 }
19774
19775 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19777 if !self.dialect.supports_start_transaction_modifier() {
19778 None
19779 } else if self.parse_keyword(Keyword::DEFERRED) {
19780 Some(TransactionModifier::Deferred)
19781 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19782 Some(TransactionModifier::Immediate)
19783 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19784 Some(TransactionModifier::Exclusive)
19785 } else if self.parse_keyword(Keyword::TRY) {
19786 Some(TransactionModifier::Try)
19787 } else if self.parse_keyword(Keyword::CATCH) {
19788 Some(TransactionModifier::Catch)
19789 } else {
19790 None
19791 }
19792 }
19793
19794 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19796 let modifier = self.parse_transaction_modifier();
19797 let transaction =
19798 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19799 {
19800 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19801 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19802 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19803 _ => None,
19804 };
19805 Ok(Statement::StartTransaction {
19806 modes: self.parse_transaction_modes()?,
19807 begin: true,
19808 transaction,
19809 modifier,
19810 statements: vec![],
19811 exception: None,
19812 has_end_keyword: false,
19813 })
19814 }
19815
19816 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19818 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19819
19820 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19821 let mut when = Vec::new();
19822
19823 while !self.peek_keyword(Keyword::END) {
19825 self.expect_keyword(Keyword::WHEN)?;
19826
19827 let mut idents = Vec::new();
19831
19832 while !self.parse_keyword(Keyword::THEN) {
19833 let ident = self.parse_identifier()?;
19834 idents.push(ident);
19835
19836 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19837 }
19838
19839 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19840
19841 when.push(ExceptionWhen { idents, statements });
19842 }
19843
19844 Some(when)
19845 } else {
19846 None
19847 };
19848
19849 self.expect_keyword(Keyword::END)?;
19850
19851 Ok(Statement::StartTransaction {
19852 begin: true,
19853 statements,
19854 exception,
19855 has_end_keyword: true,
19856 transaction: None,
19857 modifier: None,
19858 modes: Default::default(),
19859 })
19860 }
19861
19862 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19864 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19865 None
19866 } else if self.parse_keyword(Keyword::TRY) {
19867 Some(TransactionModifier::Try)
19868 } else if self.parse_keyword(Keyword::CATCH) {
19869 Some(TransactionModifier::Catch)
19870 } else {
19871 None
19872 };
19873 Ok(Statement::Commit {
19874 chain: self.parse_commit_rollback_chain()?,
19875 end: true,
19876 modifier,
19877 })
19878 }
19879
19880 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19882 let mut modes = vec![];
19883 let mut required = false;
19884 loop {
19885 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19886 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19887 TransactionIsolationLevel::ReadUncommitted
19888 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19889 TransactionIsolationLevel::ReadCommitted
19890 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19891 TransactionIsolationLevel::RepeatableRead
19892 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19893 TransactionIsolationLevel::Serializable
19894 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19895 TransactionIsolationLevel::Snapshot
19896 } else {
19897 self.expected_ref("isolation level", self.peek_token_ref())?
19898 };
19899 TransactionMode::IsolationLevel(iso_level)
19900 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19901 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19902 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19903 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19904 } else if required {
19905 self.expected_ref("transaction mode", self.peek_token_ref())?
19906 } else {
19907 break;
19908 };
19909 modes.push(mode);
19910 required = self.consume_token(&Token::Comma);
19915 }
19916 Ok(modes)
19917 }
19918
19919 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19921 Ok(Statement::Commit {
19922 chain: self.parse_commit_rollback_chain()?,
19923 end: false,
19924 modifier: None,
19925 })
19926 }
19927
19928 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19930 let chain = self.parse_commit_rollback_chain()?;
19931 let savepoint = self.parse_rollback_savepoint()?;
19932
19933 Ok(Statement::Rollback { chain, savepoint })
19934 }
19935
19936 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19938 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19939 if self.parse_keyword(Keyword::AND) {
19940 let chain = !self.parse_keyword(Keyword::NO);
19941 self.expect_keyword_is(Keyword::CHAIN)?;
19942 Ok(chain)
19943 } else {
19944 Ok(false)
19945 }
19946 }
19947
19948 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19950 if self.parse_keyword(Keyword::TO) {
19951 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19952 let savepoint = self.parse_identifier()?;
19953
19954 Ok(Some(savepoint))
19955 } else {
19956 Ok(None)
19957 }
19958 }
19959
19960 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19962 self.expect_token(&Token::LParen)?;
19963 let message = Box::new(self.parse_expr()?);
19964 self.expect_token(&Token::Comma)?;
19965 let severity = Box::new(self.parse_expr()?);
19966 self.expect_token(&Token::Comma)?;
19967 let state = Box::new(self.parse_expr()?);
19968 let arguments = if self.consume_token(&Token::Comma) {
19969 self.parse_comma_separated(Parser::parse_expr)?
19970 } else {
19971 vec![]
19972 };
19973 self.expect_token(&Token::RParen)?;
19974 let options = if self.parse_keyword(Keyword::WITH) {
19975 self.parse_comma_separated(Parser::parse_raiserror_option)?
19976 } else {
19977 vec![]
19978 };
19979 Ok(Statement::RaisError {
19980 message,
19981 severity,
19982 state,
19983 arguments,
19984 options,
19985 })
19986 }
19987
19988 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19990 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19991 Keyword::LOG => Ok(RaisErrorOption::Log),
19992 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19993 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19994 _ => self.expected_ref(
19995 "LOG, NOWAIT OR SETERROR raiserror option",
19996 self.peek_token_ref(),
19997 ),
19998 }
19999 }
20000
20001 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
20005 self.expect_keyword_is(Keyword::THROW)?;
20006
20007 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
20008 let (message, state) = if error_number.is_some() {
20009 self.expect_token(&Token::Comma)?;
20010 let message = Box::new(self.parse_expr()?);
20011 self.expect_token(&Token::Comma)?;
20012 let state = Box::new(self.parse_expr()?);
20013 (Some(message), Some(state))
20014 } else {
20015 (None, None)
20016 };
20017
20018 Ok(ThrowStatement {
20019 error_number,
20020 message,
20021 state,
20022 })
20023 }
20024
20025 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
20027 let prepare = self.parse_keyword(Keyword::PREPARE);
20028 let name = self.parse_identifier()?;
20029 Ok(Statement::Deallocate { name, prepare })
20030 }
20031
20032 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
20034 let immediate =
20035 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
20036
20037 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
20043 None
20044 } else {
20045 Some(self.parse_object_name(false)?)
20046 };
20047
20048 let has_parentheses = self.consume_token(&Token::LParen);
20049
20050 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
20051 let end_token = match (has_parentheses, self.peek_token().token) {
20052 (true, _) => Token::RParen,
20053 (false, Token::EOF) => Token::EOF,
20054 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
20055 (false, _) => Token::SemiColon,
20056 };
20057
20058 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
20059
20060 if has_parentheses {
20061 self.expect_token(&Token::RParen)?;
20062 }
20063
20064 let into = if self.parse_keyword(Keyword::INTO) {
20065 self.parse_comma_separated(Self::parse_identifier)?
20066 } else {
20067 vec![]
20068 };
20069
20070 let using = if self.parse_keyword(Keyword::USING) {
20071 self.parse_comma_separated(Self::parse_expr_with_alias)?
20072 } else {
20073 vec![]
20074 };
20075
20076 let output = self.parse_keyword(Keyword::OUTPUT);
20077
20078 let default = self.parse_keyword(Keyword::DEFAULT);
20079
20080 Ok(Statement::Execute {
20081 immediate,
20082 name,
20083 parameters,
20084 has_parentheses,
20085 into,
20086 using,
20087 output,
20088 default,
20089 })
20090 }
20091
20092 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
20094 let name = self.parse_identifier()?;
20095
20096 let mut data_types = vec![];
20097 if self.consume_token(&Token::LParen) {
20098 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
20099 self.expect_token(&Token::RParen)?;
20100 }
20101
20102 self.expect_keyword_is(Keyword::AS)?;
20103 let statement = Box::new(self.parse_statement()?);
20104 Ok(Statement::Prepare {
20105 name,
20106 data_types,
20107 statement,
20108 })
20109 }
20110
20111 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
20113 self.expect_keyword(Keyword::UNLOAD)?;
20114 self.expect_token(&Token::LParen)?;
20115 let (query, query_text) =
20116 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
20117 (None, Some(self.parse_literal_string()?))
20118 } else {
20119 (Some(self.parse_query()?), None)
20120 };
20121 self.expect_token(&Token::RParen)?;
20122
20123 self.expect_keyword_is(Keyword::TO)?;
20124 let to = self.parse_identifier()?;
20125 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
20126 Some(self.parse_iam_role_kind()?)
20127 } else {
20128 None
20129 };
20130 let with = self.parse_options(Keyword::WITH)?;
20131 let mut options = vec![];
20132 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
20133 options.push(opt);
20134 }
20135 Ok(Statement::Unload {
20136 query,
20137 query_text,
20138 to,
20139 auth,
20140 with,
20141 options,
20142 })
20143 }
20144
20145 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
20146 let temporary = self
20147 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
20148 .is_some();
20149 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
20150 let table = self.parse_keyword(Keyword::TABLE);
20151 let name = self.parse_object_name(false)?;
20152
20153 Ok(SelectInto {
20154 temporary,
20155 unlogged,
20156 table,
20157 name,
20158 })
20159 }
20160
20161 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
20162 let v = self.parse_value()?;
20163 match &v.value {
20164 Value::SingleQuotedString(_) => Ok(v),
20165 Value::DoubleQuotedString(_) => Ok(v),
20166 Value::Number(_, _) => Ok(v),
20167 Value::Placeholder(_) => Ok(v),
20168 _ => {
20169 self.prev_token();
20170 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
20171 }
20172 }
20173 }
20174
20175 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
20177 let name = self.parse_object_name(false)?;
20178 if self.consume_token(&Token::LParen) {
20179 let value = self.parse_pragma_value()?;
20180 self.expect_token(&Token::RParen)?;
20181 Ok(Statement::Pragma {
20182 name,
20183 value: Some(value),
20184 is_eq: false,
20185 })
20186 } else if self.consume_token(&Token::Eq) {
20187 Ok(Statement::Pragma {
20188 name,
20189 value: Some(self.parse_pragma_value()?),
20190 is_eq: true,
20191 })
20192 } else {
20193 Ok(Statement::Pragma {
20194 name,
20195 value: None,
20196 is_eq: false,
20197 })
20198 }
20199 }
20200
20201 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
20203 let extension_name = self.parse_identifier()?;
20204
20205 Ok(Statement::Install { extension_name })
20206 }
20207
20208 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
20210 if self.dialect.supports_load_extension() {
20211 let extension_name = self.parse_identifier()?;
20212 Ok(Statement::Load { extension_name })
20213 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
20214 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
20215 self.expect_keyword_is(Keyword::INPATH)?;
20216 let inpath = self.parse_literal_string()?;
20217 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
20218 self.expect_keyword_is(Keyword::INTO)?;
20219 self.expect_keyword_is(Keyword::TABLE)?;
20220 let table_name = self.parse_object_name(false)?;
20221 let partitioned = self.parse_insert_partition()?;
20222 let table_format = self.parse_load_data_table_format()?;
20223 Ok(Statement::LoadData {
20224 local,
20225 inpath,
20226 overwrite,
20227 table_name,
20228 partitioned,
20229 table_format,
20230 })
20231 } else {
20232 self.expected_ref(
20233 "`DATA` or an extension name after `LOAD`",
20234 self.peek_token_ref(),
20235 )
20236 }
20237 }
20238
20239 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
20251 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
20252
20253 let name = self.parse_object_name(false)?;
20254
20255 let on_cluster = self.parse_optional_on_cluster()?;
20257
20258 let partition = if self.parse_keyword(Keyword::PARTITION) {
20259 if self.parse_keyword(Keyword::ID) {
20260 Some(Partition::Identifier(self.parse_identifier()?))
20261 } else {
20262 Some(Partition::Expr(self.parse_expr()?))
20263 }
20264 } else {
20265 None
20266 };
20267
20268 let include_final = self.parse_keyword(Keyword::FINAL);
20269
20270 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
20271 if self.parse_keyword(Keyword::BY) {
20272 Some(Deduplicate::ByExpression(self.parse_expr()?))
20273 } else {
20274 Some(Deduplicate::All)
20275 }
20276 } else {
20277 None
20278 };
20279
20280 let predicate = if self.parse_keyword(Keyword::WHERE) {
20282 Some(self.parse_expr()?)
20283 } else {
20284 None
20285 };
20286
20287 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
20288 self.expect_token(&Token::LParen)?;
20289 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
20290 self.expect_token(&Token::RParen)?;
20291 Some(columns)
20292 } else {
20293 None
20294 };
20295
20296 Ok(Statement::OptimizeTable {
20297 name,
20298 has_table_keyword,
20299 on_cluster,
20300 partition,
20301 include_final,
20302 deduplicate,
20303 predicate,
20304 zorder,
20305 })
20306 }
20307
20308 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20314 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20316 let name = self.parse_object_name(false)?;
20318 let mut data_type: Option<DataType> = None;
20320 if self.parse_keywords(&[Keyword::AS]) {
20321 data_type = Some(self.parse_data_type()?)
20322 }
20323 let sequence_options = self.parse_create_sequence_options()?;
20324 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20326 if self.parse_keywords(&[Keyword::NONE]) {
20327 Some(ObjectName::from(vec![Ident::new("NONE")]))
20328 } else {
20329 Some(self.parse_object_name(false)?)
20330 }
20331 } else {
20332 None
20333 };
20334 Ok(Statement::CreateSequence {
20335 temporary,
20336 if_not_exists,
20337 name,
20338 data_type,
20339 sequence_options,
20340 owned_by,
20341 })
20342 }
20343
20344 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20345 let mut sequence_options = vec![];
20346 if self.parse_keywords(&[Keyword::INCREMENT]) {
20348 if self.parse_keywords(&[Keyword::BY]) {
20349 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20350 } else {
20351 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20352 }
20353 }
20354 if self.parse_keyword(Keyword::MINVALUE) {
20356 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20357 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20358 sequence_options.push(SequenceOptions::MinValue(None));
20359 }
20360 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20362 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20363 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20364 sequence_options.push(SequenceOptions::MaxValue(None));
20365 }
20366
20367 if self.parse_keywords(&[Keyword::START]) {
20369 if self.parse_keywords(&[Keyword::WITH]) {
20370 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20371 } else {
20372 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20373 }
20374 }
20375 if self.parse_keywords(&[Keyword::CACHE]) {
20377 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20378 }
20379 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20381 sequence_options.push(SequenceOptions::Cycle(true));
20382 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20383 sequence_options.push(SequenceOptions::Cycle(false));
20384 }
20385
20386 Ok(sequence_options)
20387 }
20388
20389 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20393 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20394 let name = self.parse_object_name(false)?;
20395
20396 let server_type = if self.parse_keyword(Keyword::TYPE) {
20397 Some(self.parse_identifier()?)
20398 } else {
20399 None
20400 };
20401
20402 let version = if self.parse_keyword(Keyword::VERSION) {
20403 Some(self.parse_identifier()?)
20404 } else {
20405 None
20406 };
20407
20408 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20409 let foreign_data_wrapper = self.parse_object_name(false)?;
20410
20411 let mut options = None;
20412 if self.parse_keyword(Keyword::OPTIONS) {
20413 self.expect_token(&Token::LParen)?;
20414 options = Some(self.parse_comma_separated(|p| {
20415 let key = p.parse_identifier()?;
20416 let value = p.parse_identifier()?;
20417 Ok(CreateServerOption { key, value })
20418 })?);
20419 self.expect_token(&Token::RParen)?;
20420 }
20421
20422 Ok(Statement::CreateServer(CreateServerStatement {
20423 name,
20424 if_not_exists: ine,
20425 server_type,
20426 version,
20427 foreign_data_wrapper,
20428 options,
20429 }))
20430 }
20431
20432 pub fn parse_create_foreign_data_wrapper(
20436 &mut self,
20437 ) -> Result<CreateForeignDataWrapper, ParserError> {
20438 let name = self.parse_identifier()?;
20439
20440 let handler = if self.parse_keyword(Keyword::HANDLER) {
20441 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20442 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20443 Some(FdwRoutineClause::NoFunction)
20444 } else {
20445 None
20446 };
20447
20448 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20449 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20450 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20451 Some(FdwRoutineClause::NoFunction)
20452 } else {
20453 None
20454 };
20455
20456 let options = if self.parse_keyword(Keyword::OPTIONS) {
20457 self.expect_token(&Token::LParen)?;
20458 let opts = self.parse_comma_separated(|p| {
20459 let key = p.parse_identifier()?;
20460 let value = p.parse_identifier()?;
20461 Ok(CreateServerOption { key, value })
20462 })?;
20463 self.expect_token(&Token::RParen)?;
20464 Some(opts)
20465 } else {
20466 None
20467 };
20468
20469 Ok(CreateForeignDataWrapper {
20470 name,
20471 handler,
20472 validator,
20473 options,
20474 })
20475 }
20476
20477 pub fn parse_create_foreign_table(&mut self) -> Result<CreateForeignTable, ParserError> {
20481 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20482 let name = self.parse_object_name(false)?;
20483 let (columns, _constraints) = self.parse_columns()?;
20484 self.expect_keyword_is(Keyword::SERVER)?;
20485 let server_name = self.parse_identifier()?;
20486
20487 let options = if self.parse_keyword(Keyword::OPTIONS) {
20488 self.expect_token(&Token::LParen)?;
20489 let opts = self.parse_comma_separated(|p| {
20490 let key = p.parse_identifier()?;
20491 let value = p.parse_identifier()?;
20492 Ok(CreateServerOption { key, value })
20493 })?;
20494 self.expect_token(&Token::RParen)?;
20495 Some(opts)
20496 } else {
20497 None
20498 };
20499
20500 Ok(CreateForeignTable {
20501 name,
20502 if_not_exists,
20503 columns,
20504 server_name,
20505 options,
20506 })
20507 }
20508
20509 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20513 let name = self.parse_identifier()?;
20514
20515 let target = if self.parse_keyword(Keyword::FOR) {
20516 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20517 Some(PublicationTarget::AllTables)
20518 } else if self.parse_keyword(Keyword::TABLE) {
20519 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20520 Some(PublicationTarget::Tables(tables))
20521 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20522 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20523 Some(PublicationTarget::TablesInSchema(schemas))
20524 } else {
20525 return self.expected_ref(
20526 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20527 self.peek_token_ref(),
20528 );
20529 }
20530 } else {
20531 None
20532 };
20533
20534 let with_options = self.parse_options(Keyword::WITH)?;
20535
20536 Ok(CreatePublication {
20537 name,
20538 target,
20539 with_options,
20540 })
20541 }
20542
20543 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20547 let name = self.parse_identifier()?;
20548 self.expect_keyword_is(Keyword::CONNECTION)?;
20549 let connection = self.parse_value()?.value;
20550 self.expect_keyword_is(Keyword::PUBLICATION)?;
20551 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20552 let with_options = self.parse_options(Keyword::WITH)?;
20553
20554 Ok(CreateSubscription {
20555 name,
20556 connection,
20557 publications,
20558 with_options,
20559 })
20560 }
20561
20562 pub fn parse_create_cast(&mut self) -> Result<CreateCast, ParserError> {
20566 self.expect_token(&Token::LParen)?;
20567 let source_type = self.parse_data_type()?;
20568 self.expect_keyword_is(Keyword::AS)?;
20569 let target_type = self.parse_data_type()?;
20570 self.expect_token(&Token::RParen)?;
20571
20572 let function_kind = if self.parse_keywords(&[Keyword::WITHOUT, Keyword::FUNCTION]) {
20573 CastFunctionKind::WithoutFunction
20574 } else if self.parse_keywords(&[Keyword::WITH, Keyword::INOUT]) {
20575 CastFunctionKind::WithInout
20576 } else if self.parse_keywords(&[Keyword::WITH, Keyword::FUNCTION]) {
20577 let function_name = self.parse_object_name(false)?;
20578 let argument_types = if self.peek_token_ref().token == Token::LParen {
20579 self.expect_token(&Token::LParen)?;
20580 let types = if self.peek_token_ref().token == Token::RParen {
20581 vec![]
20582 } else {
20583 self.parse_comma_separated(|p| p.parse_data_type())?
20584 };
20585 self.expect_token(&Token::RParen)?;
20586 types
20587 } else {
20588 vec![]
20589 };
20590 CastFunctionKind::WithFunction {
20591 function_name,
20592 argument_types,
20593 }
20594 } else {
20595 return self.expected_ref(
20596 "WITH FUNCTION, WITHOUT FUNCTION, or WITH INOUT",
20597 self.peek_token_ref(),
20598 );
20599 };
20600
20601 let cast_context = if self.parse_keyword(Keyword::AS) {
20602 if self.parse_keyword(Keyword::ASSIGNMENT) {
20603 CastContext::Assignment
20604 } else if self.parse_keyword(Keyword::IMPLICIT) {
20605 CastContext::Implicit
20606 } else {
20607 return self.expected_ref("ASSIGNMENT or IMPLICIT after AS", self.peek_token_ref());
20608 }
20609 } else {
20610 CastContext::Explicit
20611 };
20612
20613 Ok(CreateCast {
20614 source_type,
20615 target_type,
20616 function_kind,
20617 cast_context,
20618 })
20619 }
20620
20621 pub fn parse_create_conversion(
20625 &mut self,
20626 is_default: bool,
20627 ) -> Result<CreateConversion, ParserError> {
20628 let name = self.parse_object_name(false)?;
20629 self.expect_keyword_is(Keyword::FOR)?;
20630 let source_encoding = self.parse_literal_string()?;
20631 self.expect_keyword_is(Keyword::TO)?;
20632 let destination_encoding = self.parse_literal_string()?;
20633 self.expect_keyword_is(Keyword::FROM)?;
20634 let function_name = self.parse_object_name(false)?;
20635
20636 Ok(CreateConversion {
20637 name,
20638 is_default,
20639 source_encoding,
20640 destination_encoding,
20641 function_name,
20642 })
20643 }
20644
20645 pub fn parse_create_language(
20649 &mut self,
20650 or_replace: bool,
20651 trusted: bool,
20652 procedural: bool,
20653 ) -> Result<CreateLanguage, ParserError> {
20654 let name = self.parse_identifier()?;
20655
20656 let handler = if self.parse_keyword(Keyword::HANDLER) {
20657 Some(self.parse_object_name(false)?)
20658 } else {
20659 None
20660 };
20661
20662 let inline_handler = if self.parse_keyword(Keyword::INLINE) {
20663 Some(self.parse_object_name(false)?)
20664 } else {
20665 None
20666 };
20667
20668 let validator = if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20669 None
20670 } else if self.parse_keyword(Keyword::VALIDATOR) {
20671 Some(self.parse_object_name(false)?)
20672 } else {
20673 None
20674 };
20675
20676 Ok(CreateLanguage {
20677 name,
20678 or_replace,
20679 trusted,
20680 procedural,
20681 handler,
20682 inline_handler,
20683 validator,
20684 })
20685 }
20686
20687 pub fn parse_create_rule(&mut self) -> Result<CreateRule, ParserError> {
20691 let name = self.parse_identifier()?;
20692 self.expect_keyword_is(Keyword::AS)?;
20693 self.expect_keyword_is(Keyword::ON)?;
20694
20695 let event = if self.parse_keyword(Keyword::SELECT) {
20696 RuleEvent::Select
20697 } else if self.parse_keyword(Keyword::INSERT) {
20698 RuleEvent::Insert
20699 } else if self.parse_keyword(Keyword::UPDATE) {
20700 RuleEvent::Update
20701 } else if self.parse_keyword(Keyword::DELETE) {
20702 RuleEvent::Delete
20703 } else {
20704 return self.expected_ref(
20705 "SELECT, INSERT, UPDATE, or DELETE after ON",
20706 self.peek_token_ref(),
20707 );
20708 };
20709
20710 self.expect_keyword_is(Keyword::TO)?;
20711 let table = self.parse_object_name(false)?;
20712
20713 let condition = if self.parse_keyword(Keyword::WHERE) {
20714 Some(self.parse_expr()?)
20715 } else {
20716 None
20717 };
20718
20719 self.expect_keyword_is(Keyword::DO)?;
20720
20721 let instead = self.parse_keyword(Keyword::INSTEAD);
20722 if !instead {
20723 let _ = self.parse_keyword(Keyword::ALSO);
20725 }
20726
20727 let action = if self.parse_keyword(Keyword::NOTHING) {
20728 RuleAction::Nothing
20729 } else if self.peek_token_ref().token == Token::LParen {
20730 self.expect_token(&Token::LParen)?;
20731 let mut stmts = Vec::new();
20732 loop {
20733 stmts.push(self.parse_statement()?);
20734 if !self.consume_token(&Token::SemiColon) {
20735 break;
20736 }
20737 if self.peek_token_ref().token == Token::RParen {
20738 break;
20739 }
20740 }
20741 self.expect_token(&Token::RParen)?;
20742 RuleAction::Statements(stmts)
20743 } else {
20744 let stmt = self.parse_statement()?;
20745 RuleAction::Statements(vec![stmt])
20746 };
20747
20748 Ok(CreateRule {
20749 name,
20750 event,
20751 table,
20752 condition,
20753 instead,
20754 action,
20755 })
20756 }
20757
20758 pub fn parse_create_statistics(&mut self) -> Result<CreateStatistics, ParserError> {
20762 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20763 let name = self.parse_object_name(false)?;
20764
20765 let kinds = if self.consume_token(&Token::LParen) {
20766 let kinds = self.parse_comma_separated(|p| {
20767 let ident = p.parse_identifier()?;
20768 match ident.value.to_lowercase().as_str() {
20769 "ndistinct" => Ok(StatisticsKind::NDistinct),
20770 "dependencies" => Ok(StatisticsKind::Dependencies),
20771 "mcv" => Ok(StatisticsKind::Mcv),
20772 other => Err(ParserError::ParserError(format!(
20773 "Unknown statistics kind: {other}"
20774 ))),
20775 }
20776 })?;
20777 self.expect_token(&Token::RParen)?;
20778 kinds
20779 } else {
20780 vec![]
20781 };
20782
20783 self.expect_keyword_is(Keyword::ON)?;
20784 let on = self.parse_comma_separated(Parser::parse_expr)?;
20785 self.expect_keyword_is(Keyword::FROM)?;
20786 let from = self.parse_object_name(false)?;
20787
20788 Ok(CreateStatistics {
20789 if_not_exists,
20790 name,
20791 kinds,
20792 on,
20793 from,
20794 })
20795 }
20796
20797 pub fn parse_create_access_method(&mut self) -> Result<CreateAccessMethod, ParserError> {
20801 let name = self.parse_identifier()?;
20802 self.expect_keyword_is(Keyword::TYPE)?;
20803 let method_type = if self.parse_keyword(Keyword::INDEX) {
20804 AccessMethodType::Index
20805 } else if self.parse_keyword(Keyword::TABLE) {
20806 AccessMethodType::Table
20807 } else {
20808 return self.expected_ref("INDEX or TABLE after TYPE", self.peek_token_ref());
20809 };
20810 self.expect_keyword_is(Keyword::HANDLER)?;
20811 let handler = self.parse_object_name(false)?;
20812
20813 Ok(CreateAccessMethod {
20814 name,
20815 method_type,
20816 handler,
20817 })
20818 }
20819
20820 pub fn parse_create_event_trigger(&mut self) -> Result<CreateEventTrigger, ParserError> {
20824 let name = self.parse_identifier()?;
20825 self.expect_keyword_is(Keyword::ON)?;
20826 let event_ident = self.parse_identifier()?;
20827 let event = match event_ident.value.to_lowercase().as_str() {
20828 "ddl_command_start" => EventTriggerEvent::DdlCommandStart,
20829 "ddl_command_end" => EventTriggerEvent::DdlCommandEnd,
20830 "table_rewrite" => EventTriggerEvent::TableRewrite,
20831 "sql_drop" => EventTriggerEvent::SqlDrop,
20832 other => {
20833 return Err(ParserError::ParserError(format!(
20834 "Unknown event trigger event: {other}"
20835 )))
20836 }
20837 };
20838
20839 let when_tags = if self.parse_keyword(Keyword::WHEN) {
20840 self.expect_keyword_is(Keyword::TAG)?;
20841 self.expect_keyword_is(Keyword::IN)?;
20842 self.expect_token(&Token::LParen)?;
20843 let tags = self.parse_comma_separated(|p| p.parse_value().map(|v| v.value))?;
20844 self.expect_token(&Token::RParen)?;
20845 Some(tags)
20846 } else {
20847 None
20848 };
20849
20850 self.expect_keyword_is(Keyword::EXECUTE)?;
20851 let is_procedure = if self.parse_keyword(Keyword::FUNCTION) {
20852 false
20853 } else if self.parse_keyword(Keyword::PROCEDURE) {
20854 true
20855 } else {
20856 return self.expected_ref("FUNCTION or PROCEDURE after EXECUTE", self.peek_token_ref());
20857 };
20858 let execute = self.parse_object_name(false)?;
20859 self.expect_token(&Token::LParen)?;
20860 self.expect_token(&Token::RParen)?;
20861
20862 Ok(CreateEventTrigger {
20863 name,
20864 event,
20865 when_tags,
20866 execute,
20867 is_procedure,
20868 })
20869 }
20870
20871 pub fn parse_create_transform(
20875 &mut self,
20876 or_replace: bool,
20877 ) -> Result<CreateTransform, ParserError> {
20878 self.expect_keyword_is(Keyword::FOR)?;
20879 let type_name = self.parse_data_type()?;
20880 self.expect_keyword_is(Keyword::LANGUAGE)?;
20881 let language = self.parse_identifier()?;
20882 self.expect_token(&Token::LParen)?;
20883 let elements = self.parse_comma_separated(|p| {
20884 let is_from = if p.parse_keyword(Keyword::FROM) {
20885 true
20886 } else {
20887 p.expect_keyword_is(Keyword::TO)?;
20888 false
20889 };
20890 p.expect_keyword_is(Keyword::SQL)?;
20891 p.expect_keyword_is(Keyword::WITH)?;
20892 p.expect_keyword_is(Keyword::FUNCTION)?;
20893 let function = p.parse_object_name(false)?;
20894 p.expect_token(&Token::LParen)?;
20895 let arg_types = if p.peek_token().token == Token::RParen {
20896 vec![]
20897 } else {
20898 p.parse_comma_separated(|p| p.parse_data_type())?
20899 };
20900 p.expect_token(&Token::RParen)?;
20901 Ok(TransformElement {
20902 is_from,
20903 function,
20904 arg_types,
20905 })
20906 })?;
20907 self.expect_token(&Token::RParen)?;
20908
20909 Ok(CreateTransform {
20910 or_replace,
20911 type_name,
20912 language,
20913 elements,
20914 })
20915 }
20916
20917 pub fn parse_security_label(&mut self) -> Result<SecurityLabel, ParserError> {
20921 self.expect_keyword_is(Keyword::LABEL)?;
20922
20923 let provider = if self.parse_keyword(Keyword::FOR) {
20924 Some(self.parse_identifier()?)
20925 } else {
20926 None
20927 };
20928
20929 self.expect_keyword_is(Keyword::ON)?;
20930
20931 let object_kind = if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
20932 SecurityLabelObjectKind::MaterializedView
20933 } else if self.parse_keyword(Keyword::TABLE) {
20934 SecurityLabelObjectKind::Table
20935 } else if self.parse_keyword(Keyword::COLUMN) {
20936 SecurityLabelObjectKind::Column
20937 } else if self.parse_keyword(Keyword::DATABASE) {
20938 SecurityLabelObjectKind::Database
20939 } else if self.parse_keyword(Keyword::DOMAIN) {
20940 SecurityLabelObjectKind::Domain
20941 } else if self.parse_keyword(Keyword::FUNCTION) {
20942 SecurityLabelObjectKind::Function
20943 } else if self.parse_keyword(Keyword::ROLE) {
20944 SecurityLabelObjectKind::Role
20945 } else if self.parse_keyword(Keyword::SCHEMA) {
20946 SecurityLabelObjectKind::Schema
20947 } else if self.parse_keyword(Keyword::SEQUENCE) {
20948 SecurityLabelObjectKind::Sequence
20949 } else if self.parse_keyword(Keyword::TYPE) {
20950 SecurityLabelObjectKind::Type
20951 } else if self.parse_keyword(Keyword::VIEW) {
20952 SecurityLabelObjectKind::View
20953 } else {
20954 return self.expected_ref(
20955 "TABLE, COLUMN, DATABASE, DOMAIN, FUNCTION, MATERIALIZED VIEW, ROLE, SCHEMA, SEQUENCE, TYPE, or VIEW after ON",
20956 self.peek_token_ref(),
20957 );
20958 };
20959
20960 let object_name = self.parse_object_name(false)?;
20961
20962 self.expect_keyword_is(Keyword::IS)?;
20963
20964 let label = if self.parse_keyword(Keyword::NULL) {
20965 None
20966 } else {
20967 Some(self.parse_value()?.value)
20968 };
20969
20970 Ok(SecurityLabel {
20971 provider,
20972 object_kind,
20973 object_name,
20974 label,
20975 })
20976 }
20977
20978 pub fn parse_create_user_mapping(&mut self) -> Result<CreateUserMapping, ParserError> {
20982 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20983
20984 self.expect_keyword_is(Keyword::FOR)?;
20985
20986 let user = if self.parse_keyword(Keyword::CURRENT_ROLE) {
20987 UserMappingUser::CurrentRole
20988 } else if self.parse_keyword(Keyword::CURRENT_USER) {
20989 UserMappingUser::CurrentUser
20990 } else if self.parse_keyword(Keyword::PUBLIC) {
20991 UserMappingUser::Public
20992 } else if self.parse_keyword(Keyword::USER) {
20993 UserMappingUser::User
20994 } else {
20995 UserMappingUser::Ident(self.parse_identifier()?)
20996 };
20997
20998 self.expect_keyword_is(Keyword::SERVER)?;
20999 let server_name = self.parse_identifier()?;
21000
21001 let options = if self.parse_keyword(Keyword::OPTIONS) {
21002 self.expect_token(&Token::LParen)?;
21003 let opts = self.parse_comma_separated(|p| {
21004 let key = p.parse_identifier()?;
21005 let value = p.parse_identifier()?;
21006 Ok(CreateServerOption { key, value })
21007 })?;
21008 self.expect_token(&Token::RParen)?;
21009 Some(opts)
21010 } else {
21011 None
21012 };
21013
21014 Ok(CreateUserMapping {
21015 if_not_exists,
21016 user,
21017 server_name,
21018 options,
21019 })
21020 }
21021
21022 pub fn parse_create_tablespace(&mut self) -> Result<CreateTablespace, ParserError> {
21026 let name = self.parse_identifier()?;
21027
21028 let owner = if self.parse_keyword(Keyword::OWNER) {
21029 Some(self.parse_identifier()?)
21030 } else {
21031 None
21032 };
21033
21034 self.expect_keyword_is(Keyword::LOCATION)?;
21035 let location = self.parse_value()?.value;
21036
21037 let with_options = self.parse_options(Keyword::WITH)?;
21038
21039 Ok(CreateTablespace {
21040 name,
21041 owner,
21042 location,
21043 with_options,
21044 })
21045 }
21046
21047 pub fn index(&self) -> usize {
21049 self.index
21050 }
21051
21052 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
21054 let ident = self.parse_identifier()?;
21055 self.expect_keyword_is(Keyword::AS)?;
21056
21057 let window_expr = if self.consume_token(&Token::LParen) {
21058 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
21059 } else if self.dialect.supports_window_clause_named_window_reference() {
21060 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
21061 } else {
21062 return self.expected_ref("(", self.peek_token_ref());
21063 };
21064
21065 Ok(NamedWindowDefinition(ident, window_expr))
21066 }
21067
21068 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
21070 let name = self.parse_object_name(false)?;
21071 let params = self.parse_optional_procedure_parameters()?;
21072
21073 let language = if self.parse_keyword(Keyword::LANGUAGE) {
21074 Some(self.parse_identifier()?)
21075 } else {
21076 None
21077 };
21078
21079 self.expect_keyword_is(Keyword::AS)?;
21080
21081 let body = self.parse_conditional_statements(&[Keyword::END])?;
21082
21083 Ok(Statement::CreateProcedure {
21084 name,
21085 or_alter,
21086 params,
21087 language,
21088 body,
21089 })
21090 }
21091
21092 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
21094 let window_name = match &self.peek_token_ref().token {
21095 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
21096 self.parse_optional_ident()?
21097 }
21098 _ => None,
21099 };
21100
21101 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
21102 self.parse_comma_separated(Parser::parse_expr)?
21103 } else {
21104 vec![]
21105 };
21106 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
21107 self.parse_comma_separated(Parser::parse_order_by_expr)?
21108 } else {
21109 vec![]
21110 };
21111
21112 let window_frame = if !self.consume_token(&Token::RParen) {
21113 let window_frame = self.parse_window_frame()?;
21114 self.expect_token(&Token::RParen)?;
21115 Some(window_frame)
21116 } else {
21117 None
21118 };
21119 Ok(WindowSpec {
21120 window_name,
21121 partition_by,
21122 order_by,
21123 window_frame,
21124 })
21125 }
21126
21127 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
21129 let name = self.parse_object_name(false)?;
21130
21131 let has_as = self.parse_keyword(Keyword::AS);
21133
21134 if !has_as {
21135 if self.consume_token(&Token::LParen) {
21137 let options = self.parse_create_type_sql_definition_options()?;
21139 self.expect_token(&Token::RParen)?;
21140 return Ok(Statement::CreateType {
21141 name,
21142 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
21143 });
21144 }
21145
21146 return Ok(Statement::CreateType {
21148 name,
21149 representation: None,
21150 });
21151 }
21152
21153 if self.parse_keyword(Keyword::ENUM) {
21155 self.parse_create_type_enum(name)
21157 } else if self.parse_keyword(Keyword::RANGE) {
21158 self.parse_create_type_range(name)
21160 } else if self.consume_token(&Token::LParen) {
21161 self.parse_create_type_composite(name)
21163 } else {
21164 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
21165 }
21166 }
21167
21168 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21172 if self.consume_token(&Token::RParen) {
21173 return Ok(Statement::CreateType {
21175 name,
21176 representation: Some(UserDefinedTypeRepresentation::Composite {
21177 attributes: vec![],
21178 }),
21179 });
21180 }
21181
21182 let mut attributes = vec![];
21183 loop {
21184 let attr_name = self.parse_identifier()?;
21185 let attr_data_type = self.parse_data_type()?;
21186 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
21187 Some(self.parse_object_name(false)?)
21188 } else {
21189 None
21190 };
21191 attributes.push(UserDefinedTypeCompositeAttributeDef {
21192 name: attr_name,
21193 data_type: attr_data_type,
21194 collation: attr_collation,
21195 });
21196
21197 if !self.consume_token(&Token::Comma) {
21198 break;
21199 }
21200 }
21201 self.expect_token(&Token::RParen)?;
21202
21203 Ok(Statement::CreateType {
21204 name,
21205 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
21206 })
21207 }
21208
21209 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21213 self.expect_token(&Token::LParen)?;
21214 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21215 self.expect_token(&Token::RParen)?;
21216
21217 Ok(Statement::CreateType {
21218 name,
21219 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
21220 })
21221 }
21222
21223 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21227 self.expect_token(&Token::LParen)?;
21228 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
21229 self.expect_token(&Token::RParen)?;
21230
21231 Ok(Statement::CreateType {
21232 name,
21233 representation: Some(UserDefinedTypeRepresentation::Range { options }),
21234 })
21235 }
21236
21237 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
21239 let keyword = self.parse_one_of_keywords(&[
21240 Keyword::SUBTYPE,
21241 Keyword::SUBTYPE_OPCLASS,
21242 Keyword::COLLATION,
21243 Keyword::CANONICAL,
21244 Keyword::SUBTYPE_DIFF,
21245 Keyword::MULTIRANGE_TYPE_NAME,
21246 ]);
21247
21248 match keyword {
21249 Some(Keyword::SUBTYPE) => {
21250 self.expect_token(&Token::Eq)?;
21251 let data_type = self.parse_data_type()?;
21252 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
21253 }
21254 Some(Keyword::SUBTYPE_OPCLASS) => {
21255 self.expect_token(&Token::Eq)?;
21256 let name = self.parse_object_name(false)?;
21257 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
21258 }
21259 Some(Keyword::COLLATION) => {
21260 self.expect_token(&Token::Eq)?;
21261 let name = self.parse_object_name(false)?;
21262 Ok(UserDefinedTypeRangeOption::Collation(name))
21263 }
21264 Some(Keyword::CANONICAL) => {
21265 self.expect_token(&Token::Eq)?;
21266 let name = self.parse_object_name(false)?;
21267 Ok(UserDefinedTypeRangeOption::Canonical(name))
21268 }
21269 Some(Keyword::SUBTYPE_DIFF) => {
21270 self.expect_token(&Token::Eq)?;
21271 let name = self.parse_object_name(false)?;
21272 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
21273 }
21274 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
21275 self.expect_token(&Token::Eq)?;
21276 let name = self.parse_object_name(false)?;
21277 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
21278 }
21279 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
21280 }
21281 }
21282
21283 fn parse_create_type_sql_definition_options(
21285 &mut self,
21286 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
21287 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
21288 }
21289
21290 fn parse_sql_definition_option(
21292 &mut self,
21293 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
21294 let keyword = self.parse_one_of_keywords(&[
21295 Keyword::INPUT,
21296 Keyword::OUTPUT,
21297 Keyword::RECEIVE,
21298 Keyword::SEND,
21299 Keyword::TYPMOD_IN,
21300 Keyword::TYPMOD_OUT,
21301 Keyword::ANALYZE,
21302 Keyword::SUBSCRIPT,
21303 Keyword::INTERNALLENGTH,
21304 Keyword::PASSEDBYVALUE,
21305 Keyword::ALIGNMENT,
21306 Keyword::STORAGE,
21307 Keyword::LIKE,
21308 Keyword::CATEGORY,
21309 Keyword::PREFERRED,
21310 Keyword::DEFAULT,
21311 Keyword::ELEMENT,
21312 Keyword::DELIMITER,
21313 Keyword::COLLATABLE,
21314 ]);
21315
21316 match keyword {
21317 Some(Keyword::INPUT) => {
21318 self.expect_token(&Token::Eq)?;
21319 let name = self.parse_object_name(false)?;
21320 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
21321 }
21322 Some(Keyword::OUTPUT) => {
21323 self.expect_token(&Token::Eq)?;
21324 let name = self.parse_object_name(false)?;
21325 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
21326 }
21327 Some(Keyword::RECEIVE) => {
21328 self.expect_token(&Token::Eq)?;
21329 let name = self.parse_object_name(false)?;
21330 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
21331 }
21332 Some(Keyword::SEND) => {
21333 self.expect_token(&Token::Eq)?;
21334 let name = self.parse_object_name(false)?;
21335 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
21336 }
21337 Some(Keyword::TYPMOD_IN) => {
21338 self.expect_token(&Token::Eq)?;
21339 let name = self.parse_object_name(false)?;
21340 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
21341 }
21342 Some(Keyword::TYPMOD_OUT) => {
21343 self.expect_token(&Token::Eq)?;
21344 let name = self.parse_object_name(false)?;
21345 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
21346 }
21347 Some(Keyword::ANALYZE) => {
21348 self.expect_token(&Token::Eq)?;
21349 let name = self.parse_object_name(false)?;
21350 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
21351 }
21352 Some(Keyword::SUBSCRIPT) => {
21353 self.expect_token(&Token::Eq)?;
21354 let name = self.parse_object_name(false)?;
21355 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
21356 }
21357 Some(Keyword::INTERNALLENGTH) => {
21358 self.expect_token(&Token::Eq)?;
21359 if self.parse_keyword(Keyword::VARIABLE) {
21360 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21361 UserDefinedTypeInternalLength::Variable,
21362 ))
21363 } else {
21364 let value = self.parse_literal_uint()?;
21365 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21366 UserDefinedTypeInternalLength::Fixed(value),
21367 ))
21368 }
21369 }
21370 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
21371 Some(Keyword::ALIGNMENT) => {
21372 self.expect_token(&Token::Eq)?;
21373 let align_keyword = self.parse_one_of_keywords(&[
21374 Keyword::CHAR,
21375 Keyword::INT2,
21376 Keyword::INT4,
21377 Keyword::DOUBLE,
21378 ]);
21379 match align_keyword {
21380 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21381 Alignment::Char,
21382 )),
21383 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21384 Alignment::Int2,
21385 )),
21386 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21387 Alignment::Int4,
21388 )),
21389 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21390 Alignment::Double,
21391 )),
21392 _ => self.expected_ref(
21393 "alignment value (char, int2, int4, or double)",
21394 self.peek_token_ref(),
21395 ),
21396 }
21397 }
21398 Some(Keyword::STORAGE) => {
21399 self.expect_token(&Token::Eq)?;
21400 let storage_keyword = self.parse_one_of_keywords(&[
21401 Keyword::PLAIN,
21402 Keyword::EXTERNAL,
21403 Keyword::EXTENDED,
21404 Keyword::MAIN,
21405 ]);
21406 match storage_keyword {
21407 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21408 UserDefinedTypeStorage::Plain,
21409 )),
21410 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21411 UserDefinedTypeStorage::External,
21412 )),
21413 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21414 UserDefinedTypeStorage::Extended,
21415 )),
21416 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21417 UserDefinedTypeStorage::Main,
21418 )),
21419 _ => self.expected_ref(
21420 "storage value (plain, external, extended, or main)",
21421 self.peek_token_ref(),
21422 ),
21423 }
21424 }
21425 Some(Keyword::LIKE) => {
21426 self.expect_token(&Token::Eq)?;
21427 let name = self.parse_object_name(false)?;
21428 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
21429 }
21430 Some(Keyword::CATEGORY) => {
21431 self.expect_token(&Token::Eq)?;
21432 let category_str = self.parse_literal_string()?;
21433 let category_char = category_str.chars().next().ok_or_else(|| {
21434 ParserError::ParserError(
21435 "CATEGORY value must be a single character".to_string(),
21436 )
21437 })?;
21438 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
21439 }
21440 Some(Keyword::PREFERRED) => {
21441 self.expect_token(&Token::Eq)?;
21442 let value =
21443 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21444 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
21445 }
21446 Some(Keyword::DEFAULT) => {
21447 self.expect_token(&Token::Eq)?;
21448 let expr = self.parse_expr()?;
21449 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
21450 }
21451 Some(Keyword::ELEMENT) => {
21452 self.expect_token(&Token::Eq)?;
21453 let data_type = self.parse_data_type()?;
21454 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
21455 }
21456 Some(Keyword::DELIMITER) => {
21457 self.expect_token(&Token::Eq)?;
21458 let delimiter = self.parse_literal_string()?;
21459 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
21460 }
21461 Some(Keyword::COLLATABLE) => {
21462 self.expect_token(&Token::Eq)?;
21463 let value =
21464 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21465 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
21466 }
21467 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
21468 }
21469 }
21470
21471 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
21472 self.expect_token(&Token::LParen)?;
21473 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21474 self.expect_token(&Token::RParen)?;
21475 Ok(idents)
21476 }
21477
21478 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
21479 if dialect_of!(self is MySqlDialect | GenericDialect) {
21480 if self.parse_keyword(Keyword::FIRST) {
21481 Ok(Some(MySQLColumnPosition::First))
21482 } else if self.parse_keyword(Keyword::AFTER) {
21483 let ident = self.parse_identifier()?;
21484 Ok(Some(MySQLColumnPosition::After(ident)))
21485 } else {
21486 Ok(None)
21487 }
21488 } else {
21489 Ok(None)
21490 }
21491 }
21492
21493 fn parse_print(&mut self) -> Result<Statement, ParserError> {
21495 Ok(Statement::Print(PrintStatement {
21496 message: Box::new(self.parse_expr()?),
21497 }))
21498 }
21499
21500 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
21504 let wait_type = if self.parse_keyword(Keyword::DELAY) {
21505 WaitForType::Delay
21506 } else if self.parse_keyword(Keyword::TIME) {
21507 WaitForType::Time
21508 } else {
21509 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
21510 };
21511 let expr = self.parse_expr()?;
21512 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
21513 }
21514
21515 fn parse_return(&mut self) -> Result<Statement, ParserError> {
21517 match self.maybe_parse(|p| p.parse_expr())? {
21518 Some(expr) => Ok(Statement::Return(ReturnStatement {
21519 value: Some(ReturnStatementValue::Expr(expr)),
21520 })),
21521 None => Ok(Statement::Return(ReturnStatement { value: None })),
21522 }
21523 }
21524
21525 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
21529 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
21530
21531 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
21532 Some(self.parse_object_name(false)?)
21533 } else {
21534 None
21535 };
21536 self.expect_keyword(Keyword::OPTIONS)?;
21537 self.expect_token(&Token::LParen)?;
21538 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
21539 self.expect_token(&Token::RParen)?;
21540 self.expect_keyword(Keyword::AS)?;
21541 let query = self.parse_query()?;
21542 Ok(Statement::ExportData(ExportData {
21543 options,
21544 query,
21545 connection,
21546 }))
21547 }
21548
21549 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
21550 self.expect_keyword(Keyword::VACUUM)?;
21551 let full = self.parse_keyword(Keyword::FULL);
21552 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
21553 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
21554 let reindex = self.parse_keyword(Keyword::REINDEX);
21555 let recluster = self.parse_keyword(Keyword::RECLUSTER);
21556 let (table_name, threshold, boost) =
21557 match self.maybe_parse(|p| p.parse_object_name(false))? {
21558 Some(table_name) => {
21559 let threshold = if self.parse_keyword(Keyword::TO) {
21560 let value = self.parse_value()?;
21561 self.expect_keyword(Keyword::PERCENT)?;
21562 Some(value)
21563 } else {
21564 None
21565 };
21566 let boost = self.parse_keyword(Keyword::BOOST);
21567 (Some(table_name), threshold, boost)
21568 }
21569 _ => (None, None, false),
21570 };
21571 Ok(Statement::Vacuum(VacuumStatement {
21572 full,
21573 sort_only,
21574 delete_only,
21575 reindex,
21576 recluster,
21577 table_name,
21578 threshold,
21579 boost,
21580 }))
21581 }
21582
21583 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
21585 self.tokens
21586 }
21587
21588 fn peek_sub_query(&mut self) -> bool {
21590 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
21591 .is_some()
21592 }
21593
21594 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
21595 let show_in;
21596 let mut filter_position = None;
21597 if self.dialect.supports_show_like_before_in() {
21598 if let Some(filter) = self.parse_show_statement_filter()? {
21599 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
21600 }
21601 show_in = self.maybe_parse_show_stmt_in()?;
21602 } else {
21603 show_in = self.maybe_parse_show_stmt_in()?;
21604 if let Some(filter) = self.parse_show_statement_filter()? {
21605 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
21606 }
21607 }
21608 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
21609 let limit = self.maybe_parse_show_stmt_limit()?;
21610 let from = self.maybe_parse_show_stmt_from()?;
21611 Ok(ShowStatementOptions {
21612 filter_position,
21613 show_in,
21614 starts_with,
21615 limit,
21616 limit_from: from,
21617 })
21618 }
21619
21620 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
21621 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
21622 Some(Keyword::FROM) => ShowStatementInClause::FROM,
21623 Some(Keyword::IN) => ShowStatementInClause::IN,
21624 None => return Ok(None),
21625 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
21626 };
21627
21628 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
21629 Keyword::ACCOUNT,
21630 Keyword::DATABASE,
21631 Keyword::SCHEMA,
21632 Keyword::TABLE,
21633 Keyword::VIEW,
21634 ]) {
21635 Some(Keyword::DATABASE)
21637 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21638 | self.peek_keyword(Keyword::LIMIT) =>
21639 {
21640 (Some(ShowStatementInParentType::Database), None)
21641 }
21642 Some(Keyword::SCHEMA)
21643 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21644 | self.peek_keyword(Keyword::LIMIT) =>
21645 {
21646 (Some(ShowStatementInParentType::Schema), None)
21647 }
21648 Some(parent_kw) => {
21649 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
21653 match parent_kw {
21654 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
21655 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
21656 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
21657 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
21658 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
21659 _ => {
21660 return self.expected_ref(
21661 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
21662 self.peek_token_ref(),
21663 )
21664 }
21665 }
21666 }
21667 None => {
21668 let mut parent_name = self.parse_object_name(false)?;
21671 if self
21672 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
21673 .is_some()
21674 {
21675 parent_name
21676 .0
21677 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
21678 }
21679 (None, Some(parent_name))
21680 }
21681 };
21682
21683 Ok(Some(ShowStatementIn {
21684 clause,
21685 parent_type,
21686 parent_name,
21687 }))
21688 }
21689
21690 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21691 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
21692 Ok(Some(self.parse_value()?))
21693 } else {
21694 Ok(None)
21695 }
21696 }
21697
21698 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
21699 if self.parse_keyword(Keyword::LIMIT) {
21700 Ok(self.parse_limit()?)
21701 } else {
21702 Ok(None)
21703 }
21704 }
21705
21706 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21707 if self.parse_keyword(Keyword::FROM) {
21708 Ok(Some(self.parse_value()?))
21709 } else {
21710 Ok(None)
21711 }
21712 }
21713
21714 pub(crate) fn in_column_definition_state(&self) -> bool {
21715 matches!(self.state, ColumnDefinition)
21716 }
21717
21718 pub(crate) fn parse_key_value_options(
21723 &mut self,
21724 parenthesized: bool,
21725 end_words: &[Keyword],
21726 ) -> Result<KeyValueOptions, ParserError> {
21727 let mut options: Vec<KeyValueOption> = Vec::new();
21728 let mut delimiter = KeyValueOptionsDelimiter::Space;
21729 if parenthesized {
21730 self.expect_token(&Token::LParen)?;
21731 }
21732 loop {
21733 match self.next_token().token {
21734 Token::RParen => {
21735 if parenthesized {
21736 break;
21737 } else {
21738 return self.expected_ref(" another option or EOF", self.peek_token_ref());
21739 }
21740 }
21741 Token::EOF | Token::SemiColon => break,
21742 Token::Comma => {
21743 delimiter = KeyValueOptionsDelimiter::Comma;
21744 continue;
21745 }
21746 Token::Word(w) if !end_words.contains(&w.keyword) => {
21747 options.push(self.parse_key_value_option(&w)?)
21748 }
21749 Token::Word(w) if end_words.contains(&w.keyword) => {
21750 self.prev_token();
21751 break;
21752 }
21753 _ => {
21754 return self.expected_ref(
21755 "another option, EOF, SemiColon, Comma or ')'",
21756 self.peek_token_ref(),
21757 )
21758 }
21759 };
21760 }
21761
21762 Ok(KeyValueOptions { delimiter, options })
21763 }
21764
21765 pub(crate) fn parse_key_value_option(
21767 &mut self,
21768 key: &Word,
21769 ) -> Result<KeyValueOption, ParserError> {
21770 self.expect_token(&Token::Eq)?;
21771 let peeked_token = self.peek_token();
21772 match peeked_token.token {
21773 Token::SingleQuotedString(_) => Ok(KeyValueOption {
21774 option_name: key.value.clone(),
21775 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21776 }),
21777 Token::Word(word)
21778 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21779 {
21780 Ok(KeyValueOption {
21781 option_name: key.value.clone(),
21782 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21783 })
21784 }
21785 Token::Number(..) => Ok(KeyValueOption {
21786 option_name: key.value.clone(),
21787 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21788 }),
21789 Token::Word(word) => {
21790 self.next_token();
21791 Ok(KeyValueOption {
21792 option_name: key.value.clone(),
21793 option_value: KeyValueOptionKind::Single(
21794 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21795 ),
21796 })
21797 }
21798 Token::LParen => {
21799 match self.maybe_parse(|parser| {
21803 parser.expect_token(&Token::LParen)?;
21804 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21805 parser.expect_token(&Token::RParen)?;
21806 values
21807 })? {
21808 Some(values) => Ok(KeyValueOption {
21809 option_name: key.value.clone(),
21810 option_value: KeyValueOptionKind::Multi(values),
21811 }),
21812 None => Ok(KeyValueOption {
21813 option_name: key.value.clone(),
21814 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21815 self.parse_key_value_options(true, &[])?,
21816 )),
21817 }),
21818 }
21819 }
21820 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21821 }
21822 }
21823
21824 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21826 if self.parse_keyword(Keyword::ALL) {
21827 return Ok(ResetStatement { reset: Reset::ALL });
21828 }
21829
21830 let obj = self.parse_object_name(false)?;
21831 Ok(ResetStatement {
21832 reset: Reset::ConfigurationParameter(obj),
21833 })
21834 }
21835}
21836
21837fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21838 if let Some(prefix) = prefix {
21839 Expr::Prefixed {
21840 prefix,
21841 value: Box::new(expr),
21842 }
21843 } else {
21844 expr
21845 }
21846}
21847
21848impl Word {
21849 pub fn to_ident(&self, span: Span) -> Ident {
21855 Ident {
21856 value: self.value.clone(),
21857 quote_style: self.quote_style,
21858 span,
21859 }
21860 }
21861
21862 pub fn into_ident(self, span: Span) -> Ident {
21867 Ident {
21868 value: self.value,
21869 quote_style: self.quote_style,
21870 span,
21871 }
21872 }
21873}
21874
21875#[cfg(test)]
21876mod tests {
21877 use crate::test_utils::{all_dialects, TestedDialects};
21878
21879 use super::*;
21880
21881 #[test]
21882 fn test_prev_index() {
21883 let sql = "SELECT version";
21884 all_dialects().run_parser_method(sql, |parser| {
21885 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21886 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21887 parser.prev_token();
21888 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21889 assert_eq!(parser.next_token(), Token::make_word("version", None));
21890 parser.prev_token();
21891 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21892 assert_eq!(parser.next_token(), Token::make_word("version", None));
21893 assert_eq!(parser.peek_token(), Token::EOF);
21894 parser.prev_token();
21895 assert_eq!(parser.next_token(), Token::make_word("version", None));
21896 assert_eq!(parser.next_token(), Token::EOF);
21897 assert_eq!(parser.next_token(), Token::EOF);
21898 parser.prev_token();
21899 });
21900 }
21901
21902 #[test]
21903 fn test_peek_tokens() {
21904 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21905 assert!(matches!(
21906 parser.peek_tokens(),
21907 [Token::Word(Word {
21908 keyword: Keyword::SELECT,
21909 ..
21910 })]
21911 ));
21912
21913 assert!(matches!(
21914 parser.peek_tokens(),
21915 [
21916 Token::Word(Word {
21917 keyword: Keyword::SELECT,
21918 ..
21919 }),
21920 Token::Word(_),
21921 Token::Word(Word {
21922 keyword: Keyword::AS,
21923 ..
21924 }),
21925 ]
21926 ));
21927
21928 for _ in 0..4 {
21929 parser.next_token();
21930 }
21931
21932 assert!(matches!(
21933 parser.peek_tokens(),
21934 [
21935 Token::Word(Word {
21936 keyword: Keyword::FROM,
21937 ..
21938 }),
21939 Token::Word(_),
21940 Token::EOF,
21941 Token::EOF,
21942 ]
21943 ))
21944 })
21945 }
21946
21947 #[cfg(test)]
21948 mod test_parse_data_type {
21949 use crate::ast::{
21950 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21951 };
21952 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21953 use crate::test_utils::TestedDialects;
21954
21955 macro_rules! test_parse_data_type {
21956 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21957 $dialect.run_parser_method(&*$input, |parser| {
21958 let data_type = parser.parse_data_type().unwrap();
21959 assert_eq!($expected_type, data_type);
21960 assert_eq!($input.to_string(), data_type.to_string());
21961 });
21962 }};
21963 }
21964
21965 #[test]
21966 fn test_ansii_character_string_types() {
21967 let dialect =
21969 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21970
21971 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21972
21973 test_parse_data_type!(
21974 dialect,
21975 "CHARACTER(20)",
21976 DataType::Character(Some(CharacterLength::IntegerLength {
21977 length: 20,
21978 unit: None
21979 }))
21980 );
21981
21982 test_parse_data_type!(
21983 dialect,
21984 "CHARACTER(20 CHARACTERS)",
21985 DataType::Character(Some(CharacterLength::IntegerLength {
21986 length: 20,
21987 unit: Some(CharLengthUnits::Characters)
21988 }))
21989 );
21990
21991 test_parse_data_type!(
21992 dialect,
21993 "CHARACTER(20 OCTETS)",
21994 DataType::Character(Some(CharacterLength::IntegerLength {
21995 length: 20,
21996 unit: Some(CharLengthUnits::Octets)
21997 }))
21998 );
21999
22000 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
22001
22002 test_parse_data_type!(
22003 dialect,
22004 "CHAR(20)",
22005 DataType::Char(Some(CharacterLength::IntegerLength {
22006 length: 20,
22007 unit: None
22008 }))
22009 );
22010
22011 test_parse_data_type!(
22012 dialect,
22013 "CHAR(20 CHARACTERS)",
22014 DataType::Char(Some(CharacterLength::IntegerLength {
22015 length: 20,
22016 unit: Some(CharLengthUnits::Characters)
22017 }))
22018 );
22019
22020 test_parse_data_type!(
22021 dialect,
22022 "CHAR(20 OCTETS)",
22023 DataType::Char(Some(CharacterLength::IntegerLength {
22024 length: 20,
22025 unit: Some(CharLengthUnits::Octets)
22026 }))
22027 );
22028
22029 test_parse_data_type!(
22030 dialect,
22031 "CHARACTER VARYING(20)",
22032 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22033 length: 20,
22034 unit: None
22035 }))
22036 );
22037
22038 test_parse_data_type!(
22039 dialect,
22040 "CHARACTER VARYING(20 CHARACTERS)",
22041 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22042 length: 20,
22043 unit: Some(CharLengthUnits::Characters)
22044 }))
22045 );
22046
22047 test_parse_data_type!(
22048 dialect,
22049 "CHARACTER VARYING(20 OCTETS)",
22050 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
22051 length: 20,
22052 unit: Some(CharLengthUnits::Octets)
22053 }))
22054 );
22055
22056 test_parse_data_type!(
22057 dialect,
22058 "CHAR VARYING(20)",
22059 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22060 length: 20,
22061 unit: None
22062 }))
22063 );
22064
22065 test_parse_data_type!(
22066 dialect,
22067 "CHAR VARYING(20 CHARACTERS)",
22068 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22069 length: 20,
22070 unit: Some(CharLengthUnits::Characters)
22071 }))
22072 );
22073
22074 test_parse_data_type!(
22075 dialect,
22076 "CHAR VARYING(20 OCTETS)",
22077 DataType::CharVarying(Some(CharacterLength::IntegerLength {
22078 length: 20,
22079 unit: Some(CharLengthUnits::Octets)
22080 }))
22081 );
22082
22083 test_parse_data_type!(
22084 dialect,
22085 "VARCHAR(20)",
22086 DataType::Varchar(Some(CharacterLength::IntegerLength {
22087 length: 20,
22088 unit: None
22089 }))
22090 );
22091 }
22092
22093 #[test]
22094 fn test_ansii_character_large_object_types() {
22095 let dialect =
22097 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22098
22099 test_parse_data_type!(
22100 dialect,
22101 "CHARACTER LARGE OBJECT",
22102 DataType::CharacterLargeObject(None)
22103 );
22104 test_parse_data_type!(
22105 dialect,
22106 "CHARACTER LARGE OBJECT(20)",
22107 DataType::CharacterLargeObject(Some(20))
22108 );
22109
22110 test_parse_data_type!(
22111 dialect,
22112 "CHAR LARGE OBJECT",
22113 DataType::CharLargeObject(None)
22114 );
22115 test_parse_data_type!(
22116 dialect,
22117 "CHAR LARGE OBJECT(20)",
22118 DataType::CharLargeObject(Some(20))
22119 );
22120
22121 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
22122 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
22123 }
22124
22125 #[test]
22126 fn test_parse_custom_types() {
22127 let dialect =
22128 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22129
22130 test_parse_data_type!(
22131 dialect,
22132 "GEOMETRY",
22133 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
22134 );
22135
22136 test_parse_data_type!(
22137 dialect,
22138 "GEOMETRY(POINT)",
22139 DataType::Custom(
22140 ObjectName::from(vec!["GEOMETRY".into()]),
22141 vec!["POINT".to_string()]
22142 )
22143 );
22144
22145 test_parse_data_type!(
22146 dialect,
22147 "GEOMETRY(POINT, 4326)",
22148 DataType::Custom(
22149 ObjectName::from(vec!["GEOMETRY".into()]),
22150 vec!["POINT".to_string(), "4326".to_string()]
22151 )
22152 );
22153 }
22154
22155 #[test]
22156 fn test_ansii_exact_numeric_types() {
22157 let dialect = TestedDialects::new(vec![
22159 Box::new(GenericDialect {}),
22160 Box::new(AnsiDialect {}),
22161 Box::new(PostgreSqlDialect {}),
22162 ]);
22163
22164 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
22165
22166 test_parse_data_type!(
22167 dialect,
22168 "NUMERIC(2)",
22169 DataType::Numeric(ExactNumberInfo::Precision(2))
22170 );
22171
22172 test_parse_data_type!(
22173 dialect,
22174 "NUMERIC(2,10)",
22175 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
22176 );
22177
22178 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
22179
22180 test_parse_data_type!(
22181 dialect,
22182 "DECIMAL(2)",
22183 DataType::Decimal(ExactNumberInfo::Precision(2))
22184 );
22185
22186 test_parse_data_type!(
22187 dialect,
22188 "DECIMAL(2,10)",
22189 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
22190 );
22191
22192 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
22193
22194 test_parse_data_type!(
22195 dialect,
22196 "DEC(2)",
22197 DataType::Dec(ExactNumberInfo::Precision(2))
22198 );
22199
22200 test_parse_data_type!(
22201 dialect,
22202 "DEC(2,10)",
22203 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
22204 );
22205
22206 test_parse_data_type!(
22208 dialect,
22209 "NUMERIC(10,-2)",
22210 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
22211 );
22212
22213 test_parse_data_type!(
22214 dialect,
22215 "DECIMAL(1000,-10)",
22216 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
22217 );
22218
22219 test_parse_data_type!(
22220 dialect,
22221 "DEC(5,-1000)",
22222 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
22223 );
22224
22225 test_parse_data_type!(
22226 dialect,
22227 "NUMERIC(10,-5)",
22228 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
22229 );
22230
22231 test_parse_data_type!(
22232 dialect,
22233 "DECIMAL(20,-10)",
22234 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
22235 );
22236
22237 test_parse_data_type!(
22238 dialect,
22239 "DEC(5,-2)",
22240 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
22241 );
22242
22243 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
22244 let data_type = parser.parse_data_type().unwrap();
22245 assert_eq!(
22246 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
22247 data_type
22248 );
22249 assert_eq!("NUMERIC(10,5)", data_type.to_string());
22251 });
22252 }
22253
22254 #[test]
22255 fn test_ansii_date_type() {
22256 let dialect =
22258 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22259
22260 test_parse_data_type!(dialect, "DATE", DataType::Date);
22261
22262 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
22263
22264 test_parse_data_type!(
22265 dialect,
22266 "TIME(6)",
22267 DataType::Time(Some(6), TimezoneInfo::None)
22268 );
22269
22270 test_parse_data_type!(
22271 dialect,
22272 "TIME WITH TIME ZONE",
22273 DataType::Time(None, TimezoneInfo::WithTimeZone)
22274 );
22275
22276 test_parse_data_type!(
22277 dialect,
22278 "TIME(6) WITH TIME ZONE",
22279 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
22280 );
22281
22282 test_parse_data_type!(
22283 dialect,
22284 "TIME WITHOUT TIME ZONE",
22285 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
22286 );
22287
22288 test_parse_data_type!(
22289 dialect,
22290 "TIME(6) WITHOUT TIME ZONE",
22291 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
22292 );
22293
22294 test_parse_data_type!(
22295 dialect,
22296 "TIMESTAMP",
22297 DataType::Timestamp(None, TimezoneInfo::None)
22298 );
22299
22300 test_parse_data_type!(
22301 dialect,
22302 "TIMESTAMP(22)",
22303 DataType::Timestamp(Some(22), TimezoneInfo::None)
22304 );
22305
22306 test_parse_data_type!(
22307 dialect,
22308 "TIMESTAMP(22) WITH TIME ZONE",
22309 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
22310 );
22311
22312 test_parse_data_type!(
22313 dialect,
22314 "TIMESTAMP(33) WITHOUT TIME ZONE",
22315 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
22316 );
22317 }
22318 }
22319
22320 #[test]
22321 fn test_parse_schema_name() {
22322 macro_rules! test_parse_schema_name {
22324 ($input:expr, $expected_name:expr $(,)?) => {{
22325 all_dialects().run_parser_method(&*$input, |parser| {
22326 let schema_name = parser.parse_schema_name().unwrap();
22327 assert_eq!(schema_name, $expected_name);
22329 assert_eq!(schema_name.to_string(), $input.to_string());
22331 });
22332 }};
22333 }
22334
22335 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
22336 let dummy_authorization = Ident::new("dummy_authorization");
22337
22338 test_parse_schema_name!(
22339 format!("{dummy_name}"),
22340 SchemaName::Simple(dummy_name.clone())
22341 );
22342
22343 test_parse_schema_name!(
22344 format!("AUTHORIZATION {dummy_authorization}"),
22345 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
22346 );
22347 test_parse_schema_name!(
22348 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
22349 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
22350 );
22351 }
22352
22353 #[test]
22354 fn mysql_parse_index_table_constraint() {
22355 macro_rules! test_parse_table_constraint {
22356 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
22357 $dialect.run_parser_method(&*$input, |parser| {
22358 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
22359 assert_eq!(constraint, $expected);
22361 assert_eq!(constraint.to_string(), $input.to_string());
22363 });
22364 }};
22365 }
22366
22367 fn mk_expected_col(name: &str) -> IndexColumn {
22368 IndexColumn {
22369 column: OrderByExpr {
22370 expr: Expr::Identifier(name.into()),
22371 options: OrderByOptions {
22372 asc: None,
22373 nulls_first: None,
22374 },
22375 with_fill: None,
22376 },
22377 operator_class: None,
22378 }
22379 }
22380
22381 let dialect =
22382 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
22383
22384 test_parse_table_constraint!(
22385 dialect,
22386 "INDEX (c1)",
22387 IndexConstraint {
22388 display_as_key: false,
22389 name: None,
22390 index_type: None,
22391 columns: vec![mk_expected_col("c1")],
22392 index_options: vec![],
22393 }
22394 .into()
22395 );
22396
22397 test_parse_table_constraint!(
22398 dialect,
22399 "KEY (c1)",
22400 IndexConstraint {
22401 display_as_key: true,
22402 name: None,
22403 index_type: None,
22404 columns: vec![mk_expected_col("c1")],
22405 index_options: vec![],
22406 }
22407 .into()
22408 );
22409
22410 test_parse_table_constraint!(
22411 dialect,
22412 "INDEX 'index' (c1, c2)",
22413 TableConstraint::Index(IndexConstraint {
22414 display_as_key: false,
22415 name: Some(Ident::with_quote('\'', "index")),
22416 index_type: None,
22417 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
22418 index_options: vec![],
22419 })
22420 );
22421
22422 test_parse_table_constraint!(
22423 dialect,
22424 "INDEX USING BTREE (c1)",
22425 IndexConstraint {
22426 display_as_key: false,
22427 name: None,
22428 index_type: Some(IndexType::BTree),
22429 columns: vec![mk_expected_col("c1")],
22430 index_options: vec![],
22431 }
22432 .into()
22433 );
22434
22435 test_parse_table_constraint!(
22436 dialect,
22437 "INDEX USING HASH (c1)",
22438 IndexConstraint {
22439 display_as_key: false,
22440 name: None,
22441 index_type: Some(IndexType::Hash),
22442 columns: vec![mk_expected_col("c1")],
22443 index_options: vec![],
22444 }
22445 .into()
22446 );
22447
22448 test_parse_table_constraint!(
22449 dialect,
22450 "INDEX idx_name USING BTREE (c1)",
22451 IndexConstraint {
22452 display_as_key: false,
22453 name: Some(Ident::new("idx_name")),
22454 index_type: Some(IndexType::BTree),
22455 columns: vec![mk_expected_col("c1")],
22456 index_options: vec![],
22457 }
22458 .into()
22459 );
22460
22461 test_parse_table_constraint!(
22462 dialect,
22463 "INDEX idx_name USING HASH (c1)",
22464 IndexConstraint {
22465 display_as_key: false,
22466 name: Some(Ident::new("idx_name")),
22467 index_type: Some(IndexType::Hash),
22468 columns: vec![mk_expected_col("c1")],
22469 index_options: vec![],
22470 }
22471 .into()
22472 );
22473 }
22474
22475 #[test]
22476 fn test_tokenizer_error_loc() {
22477 let sql = "foo '";
22478 let ast = Parser::parse_sql(&GenericDialect, sql);
22479 assert_eq!(
22480 ast,
22481 Err(ParserError::TokenizerError(
22482 "Unterminated string literal at Line: 1, Column: 5".to_string()
22483 ))
22484 );
22485 }
22486
22487 #[test]
22488 fn test_parser_error_loc() {
22489 let sql = "SELECT this is a syntax error";
22490 let ast = Parser::parse_sql(&GenericDialect, sql);
22491 assert_eq!(
22492 ast,
22493 Err(ParserError::ParserError(
22494 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
22495 .to_string()
22496 ))
22497 );
22498 }
22499
22500 #[test]
22501 fn test_nested_explain_error() {
22502 let sql = "EXPLAIN EXPLAIN SELECT 1";
22503 let ast = Parser::parse_sql(&GenericDialect, sql);
22504 assert_eq!(
22505 ast,
22506 Err(ParserError::ParserError(
22507 "Explain must be root of the plan".to_string()
22508 ))
22509 );
22510 }
22511
22512 #[test]
22513 fn test_parse_multipart_identifier_positive() {
22514 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
22515
22516 let expected = vec![
22518 Ident {
22519 value: "CATALOG".to_string(),
22520 quote_style: None,
22521 span: Span::empty(),
22522 },
22523 Ident {
22524 value: "F(o)o. \"bar".to_string(),
22525 quote_style: Some('"'),
22526 span: Span::empty(),
22527 },
22528 Ident {
22529 value: "table".to_string(),
22530 quote_style: None,
22531 span: Span::empty(),
22532 },
22533 ];
22534 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
22535 let actual = parser.parse_multipart_identifier().unwrap();
22536 assert_eq!(expected, actual);
22537 });
22538
22539 let expected = vec![
22541 Ident {
22542 value: "CATALOG".to_string(),
22543 quote_style: None,
22544 span: Span::empty(),
22545 },
22546 Ident {
22547 value: "table".to_string(),
22548 quote_style: None,
22549 span: Span::empty(),
22550 },
22551 ];
22552 dialect.run_parser_method("CATALOG . table", |parser| {
22553 let actual = parser.parse_multipart_identifier().unwrap();
22554 assert_eq!(expected, actual);
22555 });
22556 }
22557
22558 #[test]
22559 fn test_parse_multipart_identifier_negative() {
22560 macro_rules! test_parse_multipart_identifier_error {
22561 ($input:expr, $expected_err:expr $(,)?) => {{
22562 all_dialects().run_parser_method(&*$input, |parser| {
22563 let actual_err = parser.parse_multipart_identifier().unwrap_err();
22564 assert_eq!(actual_err.to_string(), $expected_err);
22565 });
22566 }};
22567 }
22568
22569 test_parse_multipart_identifier_error!(
22570 "",
22571 "sql parser error: Empty input when parsing identifier",
22572 );
22573
22574 test_parse_multipart_identifier_error!(
22575 "*schema.table",
22576 "sql parser error: Unexpected token in identifier: *",
22577 );
22578
22579 test_parse_multipart_identifier_error!(
22580 "schema.table*",
22581 "sql parser error: Unexpected token in identifier: *",
22582 );
22583
22584 test_parse_multipart_identifier_error!(
22585 "schema.table.",
22586 "sql parser error: Trailing period in identifier",
22587 );
22588
22589 test_parse_multipart_identifier_error!(
22590 "schema.*",
22591 "sql parser error: Unexpected token following period in identifier: *",
22592 );
22593 }
22594
22595 #[test]
22596 fn test_mysql_partition_selection() {
22597 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
22598 let expected = vec!["p0", "p2"];
22599
22600 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
22601 assert_eq!(ast.len(), 1);
22602 if let Statement::Query(v) = &ast[0] {
22603 if let SetExpr::Select(select) = &*v.body {
22604 assert_eq!(select.from.len(), 1);
22605 let from: &TableWithJoins = &select.from[0];
22606 let table_factor = &from.relation;
22607 if let TableFactor::Table { partitions, .. } = table_factor {
22608 let actual: Vec<&str> = partitions
22609 .iter()
22610 .map(|ident| ident.value.as_str())
22611 .collect();
22612 assert_eq!(expected, actual);
22613 }
22614 }
22615 } else {
22616 panic!("fail to parse mysql partition selection");
22617 }
22618 }
22619
22620 #[test]
22621 fn test_replace_into_placeholders() {
22622 let sql = "REPLACE INTO t (a) VALUES (&a)";
22623
22624 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22625 }
22626
22627 #[test]
22628 fn test_replace_into_set_placeholder() {
22629 let sql = "REPLACE INTO t SET ?";
22630
22631 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22632 }
22633
22634 #[test]
22635 fn test_replace_incomplete() {
22636 let sql = r#"REPLACE"#;
22637
22638 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
22639 }
22640
22641 #[test]
22642 fn test_placeholder_invalid_whitespace() {
22643 for w in [" ", "/*invalid*/"] {
22644 let sql = format!("\nSELECT\n :{w}fooBar");
22645 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
22646 }
22647 }
22648}