1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word) => {
512 if expecting_statement_delimiter && word.keyword == Keyword::END {
513 break;
514 }
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 Keyword::SECURITY => self.parse_security_label().map(Into::into),
723 _ => self.expected("an SQL statement", next_token),
724 },
725 Token::LParen => {
726 self.prev_token();
727 self.parse_query().map(Into::into)
728 }
729 _ => self.expected("an SQL statement", next_token),
730 }
731 }
732
733 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
737 let case_token = self.expect_keyword(Keyword::CASE)?;
738
739 let match_expr = if self.peek_keyword(Keyword::WHEN) {
740 None
741 } else {
742 Some(self.parse_expr()?)
743 };
744
745 self.expect_keyword_is(Keyword::WHEN)?;
746 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
747 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
748 })?;
749
750 let else_block = if self.parse_keyword(Keyword::ELSE) {
751 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
752 } else {
753 None
754 };
755
756 let mut end_case_token = self.expect_keyword(Keyword::END)?;
757 if self.peek_keyword(Keyword::CASE) {
758 end_case_token = self.expect_keyword(Keyword::CASE)?;
759 }
760
761 Ok(CaseStatement {
762 case_token: AttachedToken(case_token),
763 match_expr,
764 when_blocks,
765 else_block,
766 end_case_token: AttachedToken(end_case_token),
767 })
768 }
769
770 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
774 self.expect_keyword_is(Keyword::IF)?;
775 let if_block = self.parse_conditional_statement_block(&[
776 Keyword::ELSE,
777 Keyword::ELSEIF,
778 Keyword::END,
779 ])?;
780
781 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
782 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
783 parser.parse_conditional_statement_block(&[
784 Keyword::ELSEIF,
785 Keyword::ELSE,
786 Keyword::END,
787 ])
788 })?
789 } else {
790 vec![]
791 };
792
793 let else_block = if self.parse_keyword(Keyword::ELSE) {
794 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
795 } else {
796 None
797 };
798
799 self.expect_keyword_is(Keyword::END)?;
800 let end_token = self.expect_keyword(Keyword::IF)?;
801
802 Ok(IfStatement {
803 if_block,
804 elseif_blocks,
805 else_block,
806 end_token: Some(AttachedToken(end_token)),
807 })
808 }
809
810 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
814 self.expect_keyword_is(Keyword::WHILE)?;
815 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
816
817 Ok(WhileStatement { while_block })
818 }
819
820 fn parse_conditional_statement_block(
828 &mut self,
829 terminal_keywords: &[Keyword],
830 ) -> Result<ConditionalStatementBlock, ParserError> {
831 let start_token = self.get_current_token().clone(); let mut then_token = None;
833
834 let condition = match &start_token.token {
835 Token::Word(w) if w.keyword == Keyword::ELSE => None,
836 Token::Word(w) if w.keyword == Keyword::WHILE => {
837 let expr = self.parse_expr()?;
838 Some(expr)
839 }
840 _ => {
841 let expr = self.parse_expr()?;
842 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
843 Some(expr)
844 }
845 };
846
847 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
848
849 Ok(ConditionalStatementBlock {
850 start_token: AttachedToken(start_token),
851 condition,
852 then_token,
853 conditional_statements,
854 })
855 }
856
857 pub(crate) fn parse_conditional_statements(
860 &mut self,
861 terminal_keywords: &[Keyword],
862 ) -> Result<ConditionalStatements, ParserError> {
863 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
864 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
865 let statements = self.parse_statement_list(terminal_keywords)?;
866 let end_token = self.expect_keyword(Keyword::END)?;
867
868 ConditionalStatements::BeginEnd(BeginEndStatements {
869 begin_token: AttachedToken(begin_token),
870 statements,
871 end_token: AttachedToken(end_token),
872 })
873 } else {
874 ConditionalStatements::Sequence {
875 statements: self.parse_statement_list(terminal_keywords)?,
876 }
877 };
878 Ok(conditional_statements)
879 }
880
881 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
885 self.expect_keyword_is(Keyword::RAISE)?;
886
887 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
888 self.expect_token(&Token::Eq)?;
889 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
890 } else {
891 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
892 };
893
894 Ok(RaiseStatement { value })
895 }
896 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
900 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
901
902 self.expect_keyword_is(Keyword::ON)?;
903 let token = self.next_token();
904
905 let (object_type, object_name) = match token.token {
906 Token::Word(w) if w.keyword == Keyword::COLLATION => {
907 (CommentObject::Collation, self.parse_object_name(false)?)
908 }
909 Token::Word(w) if w.keyword == Keyword::COLUMN => {
910 (CommentObject::Column, self.parse_object_name(false)?)
911 }
912 Token::Word(w) if w.keyword == Keyword::DATABASE => {
913 (CommentObject::Database, self.parse_object_name(false)?)
914 }
915 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
916 (CommentObject::Domain, self.parse_object_name(false)?)
917 }
918 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
919 (CommentObject::Extension, self.parse_object_name(false)?)
920 }
921 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
922 (CommentObject::Function, self.parse_object_name(false)?)
923 }
924 Token::Word(w) if w.keyword == Keyword::INDEX => {
925 (CommentObject::Index, self.parse_object_name(false)?)
926 }
927 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
928 self.expect_keyword_is(Keyword::VIEW)?;
929 (
930 CommentObject::MaterializedView,
931 self.parse_object_name(false)?,
932 )
933 }
934 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
935 (CommentObject::Procedure, self.parse_object_name(false)?)
936 }
937 Token::Word(w) if w.keyword == Keyword::ROLE => {
938 (CommentObject::Role, self.parse_object_name(false)?)
939 }
940 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
941 (CommentObject::Schema, self.parse_object_name(false)?)
942 }
943 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
944 (CommentObject::Sequence, self.parse_object_name(false)?)
945 }
946 Token::Word(w) if w.keyword == Keyword::TABLE => {
947 (CommentObject::Table, self.parse_object_name(false)?)
948 }
949 Token::Word(w) if w.keyword == Keyword::TYPE => {
950 (CommentObject::Type, self.parse_object_name(false)?)
951 }
952 Token::Word(w) if w.keyword == Keyword::USER => {
953 (CommentObject::User, self.parse_object_name(false)?)
954 }
955 Token::Word(w) if w.keyword == Keyword::VIEW => {
956 (CommentObject::View, self.parse_object_name(false)?)
957 }
958 _ => self.expected("comment object_type", token)?,
959 };
960
961 self.expect_keyword_is(Keyword::IS)?;
962 let comment = if self.parse_keyword(Keyword::NULL) {
963 None
964 } else {
965 Some(self.parse_literal_string()?)
966 };
967 Ok(Statement::Comment {
968 object_type,
969 object_name,
970 comment,
971 if_exists,
972 })
973 }
974
975 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
977 let mut channel = None;
978 let mut tables: Vec<ObjectName> = vec![];
979 let mut read_lock = false;
980 let mut export = false;
981
982 if !dialect_of!(self is MySqlDialect | GenericDialect) {
983 return parser_err!(
984 "Unsupported statement FLUSH",
985 self.peek_token_ref().span.start
986 );
987 }
988
989 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
990 Some(FlushLocation::NoWriteToBinlog)
991 } else if self.parse_keyword(Keyword::LOCAL) {
992 Some(FlushLocation::Local)
993 } else {
994 None
995 };
996
997 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
998 FlushType::BinaryLogs
999 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
1000 FlushType::EngineLogs
1001 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1002 FlushType::ErrorLogs
1003 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1004 FlushType::GeneralLogs
1005 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1006 FlushType::Hosts
1007 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1008 FlushType::Privileges
1009 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1010 FlushType::OptimizerCosts
1011 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1012 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1013 channel = Some(self.parse_object_name(false).unwrap().to_string());
1014 }
1015 FlushType::RelayLogs
1016 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1017 FlushType::SlowLogs
1018 } else if self.parse_keyword(Keyword::STATUS) {
1019 FlushType::Status
1020 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1021 FlushType::UserResources
1022 } else if self.parse_keywords(&[Keyword::LOGS]) {
1023 FlushType::Logs
1024 } else if self.parse_keywords(&[Keyword::TABLES]) {
1025 loop {
1026 let next_token = self.next_token();
1027 match &next_token.token {
1028 Token::Word(w) => match w.keyword {
1029 Keyword::WITH => {
1030 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1031 }
1032 Keyword::FOR => {
1033 export = self.parse_keyword(Keyword::EXPORT);
1034 }
1035 Keyword::NoKeyword => {
1036 self.prev_token();
1037 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1038 }
1039 _ => {}
1040 },
1041 _ => {
1042 break;
1043 }
1044 }
1045 }
1046
1047 FlushType::Tables
1048 } else {
1049 return self.expected_ref(
1050 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1051 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1052 self.peek_token_ref(),
1053 );
1054 };
1055
1056 Ok(Statement::Flush {
1057 object_type,
1058 location,
1059 channel,
1060 read_lock,
1061 export,
1062 tables,
1063 })
1064 }
1065
1066 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1068 let repair = self.parse_keyword(Keyword::REPAIR);
1069 self.expect_keyword_is(Keyword::TABLE)?;
1070 let table_name = self.parse_object_name(false)?;
1071 let partition_action = self
1072 .maybe_parse(|parser| {
1073 let pa = match parser.parse_one_of_keywords(&[
1074 Keyword::ADD,
1075 Keyword::DROP,
1076 Keyword::SYNC,
1077 ]) {
1078 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1079 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1080 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1081 _ => None,
1082 };
1083 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1084 Ok(pa)
1085 })?
1086 .unwrap_or_default();
1087 Ok(Msck {
1088 repair,
1089 table_name,
1090 partition_action,
1091 })
1092 }
1093
1094 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1096 let table = self.parse_keyword(Keyword::TABLE);
1097 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1098
1099 let table_names = self.parse_comma_separated(|p| {
1100 let only = p.parse_keyword(Keyword::ONLY);
1101 let name = p.parse_object_name(false)?;
1102 let has_asterisk = p.consume_token(&Token::Mul);
1103 Ok(TruncateTableTarget {
1104 name,
1105 only,
1106 has_asterisk,
1107 })
1108 })?;
1109
1110 let mut partitions = None;
1111 if self.parse_keyword(Keyword::PARTITION) {
1112 self.expect_token(&Token::LParen)?;
1113 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1114 self.expect_token(&Token::RParen)?;
1115 }
1116
1117 let mut identity = None;
1118 let mut cascade = None;
1119
1120 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1121 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1122 Some(TruncateIdentityOption::Restart)
1123 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1124 Some(TruncateIdentityOption::Continue)
1125 } else {
1126 None
1127 };
1128
1129 cascade = self.parse_cascade_option();
1130 };
1131
1132 let on_cluster = self.parse_optional_on_cluster()?;
1133
1134 Ok(Truncate {
1135 table_names,
1136 partitions,
1137 table,
1138 if_exists,
1139 identity,
1140 cascade,
1141 on_cluster,
1142 })
1143 }
1144
1145 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1146 if self.parse_keyword(Keyword::CASCADE) {
1147 Some(CascadeOption::Cascade)
1148 } else if self.parse_keyword(Keyword::RESTRICT) {
1149 Some(CascadeOption::Restrict)
1150 } else {
1151 None
1152 }
1153 }
1154
1155 pub fn parse_attach_duckdb_database_options(
1157 &mut self,
1158 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1159 if !self.consume_token(&Token::LParen) {
1160 return Ok(vec![]);
1161 }
1162
1163 let mut options = vec![];
1164 loop {
1165 if self.parse_keyword(Keyword::READ_ONLY) {
1166 let boolean = if self.parse_keyword(Keyword::TRUE) {
1167 Some(true)
1168 } else if self.parse_keyword(Keyword::FALSE) {
1169 Some(false)
1170 } else {
1171 None
1172 };
1173 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1174 } else if self.parse_keyword(Keyword::TYPE) {
1175 let ident = self.parse_identifier()?;
1176 options.push(AttachDuckDBDatabaseOption::Type(ident));
1177 } else {
1178 return self
1179 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1180 };
1181
1182 if self.consume_token(&Token::RParen) {
1183 return Ok(options);
1184 } else if self.consume_token(&Token::Comma) {
1185 continue;
1186 } else {
1187 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1188 }
1189 }
1190 }
1191
1192 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1194 let database = self.parse_keyword(Keyword::DATABASE);
1195 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1196 let database_path = self.parse_identifier()?;
1197 let database_alias = if self.parse_keyword(Keyword::AS) {
1198 Some(self.parse_identifier()?)
1199 } else {
1200 None
1201 };
1202
1203 let attach_options = self.parse_attach_duckdb_database_options()?;
1204 Ok(Statement::AttachDuckDBDatabase {
1205 if_not_exists,
1206 database,
1207 database_path,
1208 database_alias,
1209 attach_options,
1210 })
1211 }
1212
1213 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1215 let database = self.parse_keyword(Keyword::DATABASE);
1216 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1217 let database_alias = self.parse_identifier()?;
1218 Ok(Statement::DetachDuckDBDatabase {
1219 if_exists,
1220 database,
1221 database_alias,
1222 })
1223 }
1224
1225 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1227 let database = self.parse_keyword(Keyword::DATABASE);
1228 let database_file_name = self.parse_expr()?;
1229 self.expect_keyword_is(Keyword::AS)?;
1230 let schema_name = self.parse_identifier()?;
1231 Ok(Statement::AttachDatabase {
1232 database,
1233 schema_name,
1234 database_file_name,
1235 })
1236 }
1237
1238 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1240 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1241 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1242 let mut for_columns = false;
1243 let mut cache_metadata = false;
1244 let mut noscan = false;
1245 let mut partitions = None;
1246 let mut compute_statistics = false;
1247 let mut columns = vec![];
1248
1249 if table_name.is_some() && self.consume_token(&Token::LParen) {
1251 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1252 self.expect_token(&Token::RParen)?;
1253 }
1254
1255 loop {
1256 match self.parse_one_of_keywords(&[
1257 Keyword::PARTITION,
1258 Keyword::FOR,
1259 Keyword::CACHE,
1260 Keyword::NOSCAN,
1261 Keyword::COMPUTE,
1262 ]) {
1263 Some(Keyword::PARTITION) => {
1264 self.expect_token(&Token::LParen)?;
1265 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1266 self.expect_token(&Token::RParen)?;
1267 }
1268 Some(Keyword::NOSCAN) => noscan = true,
1269 Some(Keyword::FOR) => {
1270 self.expect_keyword_is(Keyword::COLUMNS)?;
1271
1272 columns = self
1273 .maybe_parse(|parser| {
1274 parser.parse_comma_separated(|p| p.parse_identifier())
1275 })?
1276 .unwrap_or_default();
1277 for_columns = true
1278 }
1279 Some(Keyword::CACHE) => {
1280 self.expect_keyword_is(Keyword::METADATA)?;
1281 cache_metadata = true
1282 }
1283 Some(Keyword::COMPUTE) => {
1284 self.expect_keyword_is(Keyword::STATISTICS)?;
1285 compute_statistics = true
1286 }
1287 _ => break,
1288 }
1289 }
1290
1291 Ok(Analyze {
1292 has_table_keyword,
1293 table_name,
1294 for_columns,
1295 columns,
1296 partitions,
1297 cache_metadata,
1298 noscan,
1299 compute_statistics,
1300 })
1301 }
1302
1303 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1305 let index = self.index;
1306
1307 let next_token = self.next_token();
1308 match next_token.token {
1309 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1310 if self.peek_token_ref().token == Token::Period {
1311 let mut id_parts: Vec<Ident> = vec![match t {
1312 Token::Word(w) => w.into_ident(next_token.span),
1313 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1314 _ => {
1315 return Err(ParserError::ParserError(
1316 "Internal parser error: unexpected token type".to_string(),
1317 ))
1318 }
1319 }];
1320
1321 while self.consume_token(&Token::Period) {
1322 let next_token = self.next_token();
1323 match next_token.token {
1324 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1325 Token::SingleQuotedString(s) => {
1326 id_parts.push(Ident::with_quote('\'', s))
1328 }
1329 Token::Placeholder(s) => {
1330 id_parts.push(Ident::new(s))
1333 }
1334 Token::Mul => {
1335 return Ok(Expr::QualifiedWildcard(
1336 ObjectName::from(id_parts),
1337 AttachedToken(next_token),
1338 ));
1339 }
1340 _ => {
1341 return self
1342 .expected("an identifier or a '*' after '.'", next_token);
1343 }
1344 }
1345 }
1346 }
1347 }
1348 Token::Mul => {
1349 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1350 }
1351 Token::LParen => {
1353 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1354 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1355 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1358 }
1359 }
1360 _ => (),
1361 };
1362
1363 self.index = index;
1364 self.parse_expr()
1365 }
1366
1367 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1369 self.parse_subexpr(self.dialect.prec_unknown())
1370 }
1371
1372 pub fn parse_expr_with_alias_and_order_by(
1374 &mut self,
1375 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1376 let expr = self.parse_expr()?;
1377
1378 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1379 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1380 }
1381 let alias = self.parse_optional_alias_inner(None, validator)?;
1382 let order_by = OrderByOptions {
1383 asc: self.parse_asc_desc(),
1384 nulls_first: None,
1385 };
1386 Ok(ExprWithAliasAndOrderBy {
1387 expr: ExprWithAlias { expr, alias },
1388 order_by,
1389 })
1390 }
1391
1392 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1394 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1395 let _guard = self.recursion_counter.try_decrease()?;
1396 debug!("parsing expr");
1397 let mut expr = self.parse_prefix()?;
1398
1399 expr = self.parse_compound_expr(expr, vec![])?;
1400
1401 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1405 expr = Expr::Collate {
1406 expr: Box::new(expr),
1407 collation: self.parse_object_name(false)?,
1408 };
1409 }
1410
1411 debug!("prefix: {expr:?}");
1412 loop {
1413 let next_precedence = self.get_next_precedence()?;
1414 debug!("next precedence: {next_precedence:?}");
1415
1416 if precedence >= next_precedence {
1417 break;
1418 }
1419
1420 if Token::Period == self.peek_token_ref().token {
1423 break;
1424 }
1425
1426 expr = self.parse_infix(expr, next_precedence)?;
1427 }
1428 Ok(expr)
1429 }
1430
1431 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1433 let condition = self.parse_expr()?;
1434 let message = if self.parse_keyword(Keyword::AS) {
1435 Some(self.parse_expr()?)
1436 } else {
1437 None
1438 };
1439
1440 Ok(Statement::Assert { condition, message })
1441 }
1442
1443 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1445 let name = self.parse_identifier()?;
1446 Ok(Statement::Savepoint { name })
1447 }
1448
1449 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1451 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1452 let name = self.parse_identifier()?;
1453
1454 Ok(Statement::ReleaseSavepoint { name })
1455 }
1456
1457 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1459 let channel = self.parse_identifier()?;
1460 Ok(Statement::LISTEN { channel })
1461 }
1462
1463 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1465 let channel = if self.consume_token(&Token::Mul) {
1466 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1467 } else {
1468 match self.parse_identifier() {
1469 Ok(expr) => expr,
1470 _ => {
1471 self.prev_token();
1472 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1473 }
1474 }
1475 };
1476 Ok(Statement::UNLISTEN { channel })
1477 }
1478
1479 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1481 let channel = self.parse_identifier()?;
1482 let payload = if self.consume_token(&Token::Comma) {
1483 Some(self.parse_literal_string()?)
1484 } else {
1485 None
1486 };
1487 Ok(Statement::NOTIFY { channel, payload })
1488 }
1489
1490 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1492 if self.peek_keyword(Keyword::TABLE) {
1493 self.expect_keyword(Keyword::TABLE)?;
1494 let rename_tables = self.parse_comma_separated(|parser| {
1495 let old_name = parser.parse_object_name(false)?;
1496 parser.expect_keyword(Keyword::TO)?;
1497 let new_name = parser.parse_object_name(false)?;
1498
1499 Ok(RenameTable { old_name, new_name })
1500 })?;
1501 Ok(rename_tables.into())
1502 } else {
1503 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1504 }
1505 }
1506
1507 fn parse_expr_prefix_by_reserved_word(
1510 &mut self,
1511 w: &Word,
1512 w_span: Span,
1513 ) -> Result<Option<Expr>, ParserError> {
1514 match w.keyword {
1515 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1516 self.prev_token();
1517 Ok(Some(Expr::Value(self.parse_value()?)))
1518 }
1519 Keyword::NULL => {
1520 self.prev_token();
1521 Ok(Some(Expr::Value(self.parse_value()?)))
1522 }
1523 Keyword::CURRENT_CATALOG
1524 | Keyword::CURRENT_USER
1525 | Keyword::SESSION_USER
1526 | Keyword::USER
1527 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1528 {
1529 Ok(Some(Expr::Function(Function {
1530 name: ObjectName::from(vec![w.to_ident(w_span)]),
1531 uses_odbc_syntax: false,
1532 parameters: FunctionArguments::None,
1533 args: FunctionArguments::None,
1534 null_treatment: None,
1535 filter: None,
1536 over: None,
1537 within_group: vec![],
1538 })))
1539 }
1540 Keyword::CURRENT_TIMESTAMP
1541 | Keyword::CURRENT_TIME
1542 | Keyword::CURRENT_DATE
1543 | Keyword::LOCALTIME
1544 | Keyword::LOCALTIMESTAMP => {
1545 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1546 }
1547 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1548 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1549 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1550 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1551 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1552 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1553 Keyword::EXISTS
1554 if !dialect_of!(self is DatabricksDialect)
1556 || matches!(
1557 self.peek_nth_token_ref(1).token,
1558 Token::Word(Word {
1559 keyword: Keyword::SELECT | Keyword::WITH,
1560 ..
1561 })
1562 ) =>
1563 {
1564 Ok(Some(self.parse_exists_expr(false)?))
1565 }
1566 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1567 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1568 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1569 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1570 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1571 }
1572 Keyword::SUBSTR | Keyword::SUBSTRING => {
1573 self.prev_token();
1574 Ok(Some(self.parse_substring()?))
1575 }
1576 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1577 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1578 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1579 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1581 self.expect_token(&Token::LBracket)?;
1582 Ok(Some(self.parse_array_expr(true)?))
1583 }
1584 Keyword::ARRAY
1585 if self.peek_token_ref().token == Token::LParen
1586 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1587 {
1588 self.expect_token(&Token::LParen)?;
1589 let query = self.parse_query()?;
1590 self.expect_token(&Token::RParen)?;
1591 Ok(Some(Expr::Function(Function {
1592 name: ObjectName::from(vec![w.to_ident(w_span)]),
1593 uses_odbc_syntax: false,
1594 parameters: FunctionArguments::None,
1595 args: FunctionArguments::Subquery(query),
1596 filter: None,
1597 null_treatment: None,
1598 over: None,
1599 within_group: vec![],
1600 })))
1601 }
1602 Keyword::NOT => Ok(Some(self.parse_not()?)),
1603 Keyword::MATCH if self.dialect.supports_match_against() => {
1604 Ok(Some(self.parse_match_against()?))
1605 }
1606 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1607 let struct_expr = self.parse_struct_literal()?;
1608 Ok(Some(struct_expr))
1609 }
1610 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1611 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1612 Ok(Some(Expr::Prior(Box::new(expr))))
1613 }
1614 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1615 Ok(Some(self.parse_duckdb_map_literal()?))
1616 }
1617 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1618 Ok(Some(self.parse_lambda_expr()?))
1619 }
1620 _ if self.dialect.supports_geometric_types() => match w.keyword {
1621 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1622 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1623 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1624 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1625 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1626 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1627 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1628 _ => Ok(None),
1629 },
1630 _ => Ok(None),
1631 }
1632 }
1633
1634 fn parse_expr_prefix_by_unreserved_word(
1636 &mut self,
1637 w: &Word,
1638 w_span: Span,
1639 ) -> Result<Expr, ParserError> {
1640 let is_outer_join = self.peek_outer_join_operator();
1641 match &self.peek_token_ref().token {
1642 Token::LParen if !is_outer_join => {
1643 let id_parts = vec![w.to_ident(w_span)];
1644 self.parse_function(ObjectName::from(id_parts))
1645 }
1646 Token::SingleQuotedString(_)
1648 | Token::DoubleQuotedString(_)
1649 | Token::HexStringLiteral(_)
1650 if w.value.starts_with('_') =>
1651 {
1652 Ok(Expr::Prefixed {
1653 prefix: w.to_ident(w_span),
1654 value: self.parse_introduced_string_expr()?.into(),
1655 })
1656 }
1657 Token::SingleQuotedString(_)
1659 | Token::DoubleQuotedString(_)
1660 | Token::HexStringLiteral(_)
1661 if w.value.starts_with('_') =>
1662 {
1663 Ok(Expr::Prefixed {
1664 prefix: w.to_ident(w_span),
1665 value: self.parse_introduced_string_expr()?.into(),
1666 })
1667 }
1668 Token::Arrow if self.dialect.supports_lambda_functions() => {
1672 self.expect_token(&Token::Arrow)?;
1673 Ok(Expr::Lambda(LambdaFunction {
1674 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1675 name: w.to_ident(w_span),
1676 data_type: None,
1677 }),
1678 body: Box::new(self.parse_expr()?),
1679 syntax: LambdaSyntax::Arrow,
1680 }))
1681 }
1682 Token::Word(_)
1686 if self.dialect.supports_lambda_functions()
1687 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1688 {
1689 let data_type = self.parse_data_type()?;
1690 self.expect_token(&Token::Arrow)?;
1691 Ok(Expr::Lambda(LambdaFunction {
1692 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1693 name: w.to_ident(w_span),
1694 data_type: Some(data_type),
1695 }),
1696 body: Box::new(self.parse_expr()?),
1697 syntax: LambdaSyntax::Arrow,
1698 }))
1699 }
1700 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1701 }
1702 }
1703
1704 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1707 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1708 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1709 } else {
1710 false
1711 }
1712 }
1713
1714 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1716 if let Some(prefix) = self.dialect.parse_prefix(self) {
1718 return prefix;
1719 }
1720
1721 let loc = self.peek_token_ref().span.start;
1738 let opt_expr = self.maybe_parse(|parser| {
1739 match parser.parse_data_type()? {
1740 DataType::Interval { .. } => parser.parse_interval(),
1741 DataType::Custom(ref name, ref modifiers)
1752 if modifiers.is_empty()
1753 && Self::is_simple_unquoted_object_name(name, "xml")
1754 && parser.dialect.supports_xml_expressions() =>
1755 {
1756 Ok(Expr::TypedString(TypedString {
1757 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1758 value: parser.parse_value()?,
1759 uses_odbc_syntax: false,
1760 }))
1761 }
1762 DataType::Custom(..) => parser_err!("dummy", loc),
1763 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1765 Ok(Expr::Cast {
1766 kind: CastKind::Cast,
1767 expr: Box::new(parser.parse_expr()?),
1768 data_type: DataType::Binary(None),
1769 array: false,
1770 format: None,
1771 })
1772 }
1773 data_type => Ok(Expr::TypedString(TypedString {
1774 data_type,
1775 value: parser.parse_value()?,
1776 uses_odbc_syntax: false,
1777 })),
1778 }
1779 })?;
1780
1781 if let Some(expr) = opt_expr {
1782 return Ok(expr);
1783 }
1784
1785 let dialect = self.dialect;
1789
1790 self.advance_token();
1791 let next_token_index = self.get_current_index();
1792 let next_token = self.get_current_token();
1793 let span = next_token.span;
1794 let expr = match &next_token.token {
1795 Token::Word(w) => {
1796 let w = w.clone();
1805 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1806 Ok(Some(expr)) => Ok(expr),
1808
1809 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1811
1812 Err(e) => {
1819 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1820 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1821 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1822 }) {
1823 return Ok(expr);
1824 }
1825 }
1826 return Err(e);
1827 }
1828 }
1829 } Token::LBracket => self.parse_array_expr(false),
1832 tok @ Token::Minus | tok @ Token::Plus => {
1833 let op = if *tok == Token::Plus {
1834 UnaryOperator::Plus
1835 } else {
1836 UnaryOperator::Minus
1837 };
1838 Ok(Expr::UnaryOp {
1839 op,
1840 expr: Box::new(
1841 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1842 ),
1843 })
1844 }
1845 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1846 op: UnaryOperator::BangNot,
1847 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1848 }),
1849 tok @ Token::DoubleExclamationMark
1850 | tok @ Token::PGSquareRoot
1851 | tok @ Token::PGCubeRoot
1852 | tok @ Token::AtSign
1853 if dialect_is!(dialect is PostgreSqlDialect) =>
1854 {
1855 let op = match tok {
1856 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1857 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1858 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1859 Token::AtSign => UnaryOperator::PGAbs,
1860 _ => {
1861 return Err(ParserError::ParserError(
1862 "Internal parser error: unexpected unary operator token".to_string(),
1863 ))
1864 }
1865 };
1866 Ok(Expr::UnaryOp {
1867 op,
1868 expr: Box::new(
1869 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1870 ),
1871 })
1872 }
1873 Token::Tilde => Ok(Expr::UnaryOp {
1874 op: UnaryOperator::BitwiseNot,
1875 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1876 }),
1877 tok @ Token::Sharp
1878 | tok @ Token::AtDashAt
1879 | tok @ Token::AtAt
1880 | tok @ Token::QuestionMarkDash
1881 | tok @ Token::QuestionPipe
1882 if self.dialect.supports_geometric_types() =>
1883 {
1884 let op = match tok {
1885 Token::Sharp => UnaryOperator::Hash,
1886 Token::AtDashAt => UnaryOperator::AtDashAt,
1887 Token::AtAt => UnaryOperator::DoubleAt,
1888 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1889 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1890 _ => {
1891 return Err(ParserError::ParserError(format!(
1892 "Unexpected token in unary operator parsing: {tok:?}"
1893 )))
1894 }
1895 };
1896 Ok(Expr::UnaryOp {
1897 op,
1898 expr: Box::new(
1899 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1900 ),
1901 })
1902 }
1903 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1904 {
1905 self.prev_token();
1906 Ok(Expr::Value(self.parse_value()?))
1907 }
1908 Token::UnicodeStringLiteral(_) => {
1909 self.prev_token();
1910 Ok(Expr::Value(self.parse_value()?))
1911 }
1912 Token::Number(_, _)
1913 | Token::SingleQuotedString(_)
1914 | Token::DoubleQuotedString(_)
1915 | Token::TripleSingleQuotedString(_)
1916 | Token::TripleDoubleQuotedString(_)
1917 | Token::DollarQuotedString(_)
1918 | Token::SingleQuotedByteStringLiteral(_)
1919 | Token::DoubleQuotedByteStringLiteral(_)
1920 | Token::TripleSingleQuotedByteStringLiteral(_)
1921 | Token::TripleDoubleQuotedByteStringLiteral(_)
1922 | Token::SingleQuotedRawStringLiteral(_)
1923 | Token::DoubleQuotedRawStringLiteral(_)
1924 | Token::TripleSingleQuotedRawStringLiteral(_)
1925 | Token::TripleDoubleQuotedRawStringLiteral(_)
1926 | Token::NationalStringLiteral(_)
1927 | Token::QuoteDelimitedStringLiteral(_)
1928 | Token::NationalQuoteDelimitedStringLiteral(_)
1929 | Token::HexStringLiteral(_) => {
1930 self.prev_token();
1931 Ok(Expr::Value(self.parse_value()?))
1932 }
1933 Token::LParen => {
1934 let expr =
1935 if let Some(expr) = self.try_parse_expr_sub_query()? {
1936 expr
1937 } else if let Some(lambda) = self.try_parse_lambda()? {
1938 return Ok(lambda);
1939 } else {
1940 let exprs = self.with_state(ParserState::Normal, |p| {
1951 p.parse_comma_separated(Parser::parse_expr)
1952 })?;
1953 match exprs.len() {
1954 0 => return Err(ParserError::ParserError(
1955 "Internal parser error: parse_comma_separated returned empty list"
1956 .to_string(),
1957 )),
1958 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1959 _ => Expr::Tuple(exprs),
1960 }
1961 };
1962 self.expect_token(&Token::RParen)?;
1963 Ok(expr)
1964 }
1965 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1966 self.prev_token();
1967 Ok(Expr::Value(self.parse_value()?))
1968 }
1969 Token::LBrace => {
1970 self.prev_token();
1971 self.parse_lbrace_expr()
1972 }
1973 _ => self.expected_at("an expression", next_token_index),
1974 }?;
1975
1976 Ok(expr)
1977 }
1978
1979 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1980 Ok(Expr::TypedString(TypedString {
1981 data_type: DataType::GeometricType(kind),
1982 value: self.parse_value()?,
1983 uses_odbc_syntax: false,
1984 }))
1985 }
1986
1987 pub fn parse_compound_expr(
1994 &mut self,
1995 root: Expr,
1996 mut chain: Vec<AccessExpr>,
1997 ) -> Result<Expr, ParserError> {
1998 let mut ending_wildcard: Option<TokenWithSpan> = None;
1999 loop {
2000 if self.consume_token(&Token::Period) {
2001 let next_token = self.peek_token_ref();
2002 match &next_token.token {
2003 Token::Mul => {
2004 if dialect_of!(self is PostgreSqlDialect) {
2007 ending_wildcard = Some(self.next_token());
2008 } else {
2009 self.prev_token(); }
2016
2017 break;
2018 }
2019 Token::SingleQuotedString(s) => {
2020 let expr =
2021 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2022 chain.push(AccessExpr::Dot(expr));
2023 self.advance_token(); }
2025 Token::Placeholder(s) => {
2026 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2029 chain.push(AccessExpr::Dot(expr));
2030 self.advance_token(); }
2032 _ => {
2037 let expr = self.maybe_parse(|parser| {
2038 let expr = parser
2039 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2040 match &expr {
2041 Expr::CompoundFieldAccess { .. }
2042 | Expr::CompoundIdentifier(_)
2043 | Expr::Identifier(_)
2044 | Expr::Value(_)
2045 | Expr::Function(_) => Ok(expr),
2046 _ => parser.expected_ref(
2047 "an identifier or value",
2048 parser.peek_token_ref(),
2049 ),
2050 }
2051 })?;
2052
2053 match expr {
2054 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2063 chain.push(AccessExpr::Dot(*root));
2064 chain.extend(access_chain);
2065 }
2066 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2067 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2068 ),
2069 Some(expr) => {
2070 chain.push(AccessExpr::Dot(expr));
2071 }
2072 None => {
2076 chain.push(AccessExpr::Dot(Expr::Identifier(
2077 self.parse_identifier()?,
2078 )));
2079 }
2080 }
2081 }
2082 }
2083 } else if !self.dialect.supports_partiql()
2084 && self.peek_token_ref().token == Token::LBracket
2085 {
2086 self.parse_multi_dim_subscript(&mut chain)?;
2087 } else {
2088 break;
2089 }
2090 }
2091
2092 let tok_index = self.get_current_index();
2093 if let Some(wildcard_token) = ending_wildcard {
2094 if !Self::is_all_ident(&root, &chain) {
2095 return self
2096 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2097 };
2098 Ok(Expr::QualifiedWildcard(
2099 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2100 AttachedToken(wildcard_token),
2101 ))
2102 } else if self.maybe_parse_outer_join_operator() {
2103 if !Self::is_all_ident(&root, &chain) {
2104 return self.expected_at("column identifier before (+)", tok_index);
2105 };
2106 let expr = if chain.is_empty() {
2107 root
2108 } else {
2109 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2110 };
2111 Ok(Expr::OuterJoin(expr.into()))
2112 } else {
2113 Self::build_compound_expr(root, chain)
2114 }
2115 }
2116
2117 fn build_compound_expr(
2122 root: Expr,
2123 mut access_chain: Vec<AccessExpr>,
2124 ) -> Result<Expr, ParserError> {
2125 if access_chain.is_empty() {
2126 return Ok(root);
2127 }
2128
2129 if Self::is_all_ident(&root, &access_chain) {
2130 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2131 root,
2132 access_chain,
2133 )?));
2134 }
2135
2136 if matches!(root, Expr::Identifier(_))
2141 && matches!(
2142 access_chain.last(),
2143 Some(AccessExpr::Dot(Expr::Function(_)))
2144 )
2145 && access_chain
2146 .iter()
2147 .rev()
2148 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2150 {
2151 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2152 return parser_err!("expected function expression", root.span().start);
2153 };
2154
2155 let compound_func_name = [root]
2156 .into_iter()
2157 .chain(access_chain.into_iter().flat_map(|access| match access {
2158 AccessExpr::Dot(expr) => Some(expr),
2159 _ => None,
2160 }))
2161 .flat_map(|expr| match expr {
2162 Expr::Identifier(ident) => Some(ident),
2163 _ => None,
2164 })
2165 .map(ObjectNamePart::Identifier)
2166 .chain(func.name.0)
2167 .collect::<Vec<_>>();
2168 func.name = ObjectName(compound_func_name);
2169
2170 return Ok(Expr::Function(func));
2171 }
2172
2173 if access_chain.len() == 1
2178 && matches!(
2179 access_chain.last(),
2180 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2181 )
2182 {
2183 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2184 return parser_err!("expected (+) expression", root.span().start);
2185 };
2186
2187 if !Self::is_all_ident(&root, &[]) {
2188 return parser_err!("column identifier before (+)", root.span().start);
2189 };
2190
2191 let token_start = root.span().start;
2192 let mut idents = Self::exprs_to_idents(root, vec![])?;
2193 match *inner_expr {
2194 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2195 Expr::Identifier(suffix) => idents.push(suffix),
2196 _ => {
2197 return parser_err!("column identifier before (+)", token_start);
2198 }
2199 }
2200
2201 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2202 }
2203
2204 Ok(Expr::CompoundFieldAccess {
2205 root: Box::new(root),
2206 access_chain,
2207 })
2208 }
2209
2210 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2211 match k {
2212 Keyword::LOCAL => Some(ContextModifier::Local),
2213 Keyword::GLOBAL => Some(ContextModifier::Global),
2214 Keyword::SESSION => Some(ContextModifier::Session),
2215 _ => None,
2216 }
2217 }
2218
2219 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2221 if !matches!(root, Expr::Identifier(_)) {
2222 return false;
2223 }
2224 fields
2225 .iter()
2226 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2227 }
2228
2229 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2231 let mut idents = vec![];
2232 if let Expr::Identifier(root) = root {
2233 idents.push(root);
2234 for x in fields {
2235 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2236 idents.push(ident);
2237 } else {
2238 return parser_err!(
2239 format!("Expected identifier, found: {}", x),
2240 x.span().start
2241 );
2242 }
2243 }
2244 Ok(idents)
2245 } else {
2246 parser_err!(
2247 format!("Expected identifier, found: {}", root),
2248 root.span().start
2249 )
2250 }
2251 }
2252
2253 fn peek_outer_join_operator(&mut self) -> bool {
2255 if !self.dialect.supports_outer_join_operator() {
2256 return false;
2257 }
2258
2259 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2260 Token::LParen == maybe_lparen.token
2261 && Token::Plus == maybe_plus.token
2262 && Token::RParen == maybe_rparen.token
2263 }
2264
2265 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2268 self.dialect.supports_outer_join_operator()
2269 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2270 }
2271
2272 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2274 self.expect_token(&Token::LParen)?;
2275 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2276 self.expect_token(&Token::RParen)?;
2277
2278 Ok(options)
2279 }
2280
2281 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2282 let name = self.parse_identifier()?;
2283
2284 let next_token = self.peek_token_ref();
2285 if next_token == &Token::Comma || next_token == &Token::RParen {
2286 return Ok(UtilityOption { name, arg: None });
2287 }
2288 let arg = self.parse_expr()?;
2289
2290 Ok(UtilityOption {
2291 name,
2292 arg: Some(arg),
2293 })
2294 }
2295
2296 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2297 if !self.peek_sub_query() {
2298 return Ok(None);
2299 }
2300
2301 Ok(Some(Expr::Subquery(self.parse_query()?)))
2302 }
2303
2304 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2305 if !self.dialect.supports_lambda_functions() {
2306 return Ok(None);
2307 }
2308 self.maybe_parse(|p| {
2309 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2310 p.expect_token(&Token::RParen)?;
2311 p.expect_token(&Token::Arrow)?;
2312 let expr = p.parse_expr()?;
2313 Ok(Expr::Lambda(LambdaFunction {
2314 params: OneOrManyWithParens::Many(params),
2315 body: Box::new(expr),
2316 syntax: LambdaSyntax::Arrow,
2317 }))
2318 })
2319 }
2320
2321 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2331 let params = self.parse_lambda_function_parameters()?;
2333 self.expect_token(&Token::Colon)?;
2335 let body = self.parse_expr()?;
2337 Ok(Expr::Lambda(LambdaFunction {
2338 params,
2339 body: Box::new(body),
2340 syntax: LambdaSyntax::LambdaKeyword,
2341 }))
2342 }
2343
2344 fn parse_lambda_function_parameters(
2346 &mut self,
2347 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2348 let params = if self.consume_token(&Token::LParen) {
2350 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2352 self.expect_token(&Token::RParen)?;
2353 OneOrManyWithParens::Many(params)
2354 } else {
2355 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2357 if params.len() == 1 {
2358 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2359 } else {
2360 OneOrManyWithParens::Many(params)
2361 }
2362 };
2363 Ok(params)
2364 }
2365
2366 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2368 let name = self.parse_identifier()?;
2369 let data_type = match &self.peek_token_ref().token {
2370 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2371 _ => None,
2372 };
2373 Ok(LambdaFunctionParameter { name, data_type })
2374 }
2375
2376 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2383 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2385 return Ok(Some(expr));
2386 }
2387 self.maybe_parse_odbc_body_datetime()
2389 }
2390
2391 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2402 self.maybe_parse(|p| {
2403 let token = p.next_token().clone();
2404 let word_string = token.token.to_string();
2405 let data_type = match word_string.as_str() {
2406 "t" => DataType::Time(None, TimezoneInfo::None),
2407 "d" => DataType::Date,
2408 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2409 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2410 };
2411 let value = p.parse_value()?;
2412 Ok(Expr::TypedString(TypedString {
2413 data_type,
2414 value,
2415 uses_odbc_syntax: true,
2416 }))
2417 })
2418 }
2419
2420 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2429 self.maybe_parse(|p| {
2430 p.expect_keyword(Keyword::FN)?;
2431 let fn_name = p.parse_object_name(false)?;
2432 let mut fn_call = p.parse_function_call(fn_name)?;
2433 fn_call.uses_odbc_syntax = true;
2434 Ok(Expr::Function(fn_call))
2435 })
2436 }
2437
2438 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2440 self.parse_function_call(name).map(Expr::Function)
2441 }
2442
2443 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2444 self.expect_token(&Token::LParen)?;
2445
2446 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2449 let subquery = self.parse_query()?;
2450 self.expect_token(&Token::RParen)?;
2451 return Ok(Function {
2452 name,
2453 uses_odbc_syntax: false,
2454 parameters: FunctionArguments::None,
2455 args: FunctionArguments::Subquery(subquery),
2456 filter: None,
2457 null_treatment: None,
2458 over: None,
2459 within_group: vec![],
2460 });
2461 }
2462
2463 let mut args = self.parse_function_argument_list()?;
2464 let mut parameters = FunctionArguments::None;
2465 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2468 && self.consume_token(&Token::LParen)
2469 {
2470 parameters = FunctionArguments::List(args);
2471 args = self.parse_function_argument_list()?;
2472 }
2473
2474 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2475 self.expect_token(&Token::LParen)?;
2476 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2477 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2478 self.expect_token(&Token::RParen)?;
2479 order_by
2480 } else {
2481 vec![]
2482 };
2483
2484 let filter = if self.dialect.supports_filter_during_aggregation()
2485 && self.parse_keyword(Keyword::FILTER)
2486 && self.consume_token(&Token::LParen)
2487 && self.parse_keyword(Keyword::WHERE)
2488 {
2489 let filter = Some(Box::new(self.parse_expr()?));
2490 self.expect_token(&Token::RParen)?;
2491 filter
2492 } else {
2493 None
2494 };
2495
2496 let null_treatment = if args
2499 .clauses
2500 .iter()
2501 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2502 {
2503 self.parse_null_treatment()?
2504 } else {
2505 None
2506 };
2507
2508 let over = if self.parse_keyword(Keyword::OVER) {
2509 if self.consume_token(&Token::LParen) {
2510 let window_spec = self.parse_window_spec()?;
2511 Some(WindowType::WindowSpec(window_spec))
2512 } else {
2513 Some(WindowType::NamedWindow(self.parse_identifier()?))
2514 }
2515 } else {
2516 None
2517 };
2518
2519 Ok(Function {
2520 name,
2521 uses_odbc_syntax: false,
2522 parameters,
2523 args: FunctionArguments::List(args),
2524 null_treatment,
2525 filter,
2526 over,
2527 within_group,
2528 })
2529 }
2530
2531 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2533 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2534 Some(keyword) => {
2535 self.expect_keyword_is(Keyword::NULLS)?;
2536
2537 Ok(match keyword {
2538 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2539 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2540 _ => None,
2541 })
2542 }
2543 None => Ok(None),
2544 }
2545 }
2546
2547 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2549 let args = if self.consume_token(&Token::LParen) {
2550 FunctionArguments::List(self.parse_function_argument_list()?)
2551 } else {
2552 FunctionArguments::None
2553 };
2554 Ok(Expr::Function(Function {
2555 name,
2556 uses_odbc_syntax: false,
2557 parameters: FunctionArguments::None,
2558 args,
2559 filter: None,
2560 over: None,
2561 null_treatment: None,
2562 within_group: vec![],
2563 }))
2564 }
2565
2566 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2568 let next_token = self.next_token();
2569 match &next_token.token {
2570 Token::Word(w) => match w.keyword {
2571 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2572 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2573 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2574 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2575 },
2576 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2577 }
2578 }
2579
2580 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2582 let units = self.parse_window_frame_units()?;
2583 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2584 let start_bound = self.parse_window_frame_bound()?;
2585 self.expect_keyword_is(Keyword::AND)?;
2586 let end_bound = Some(self.parse_window_frame_bound()?);
2587 (start_bound, end_bound)
2588 } else {
2589 (self.parse_window_frame_bound()?, None)
2590 };
2591 Ok(WindowFrame {
2592 units,
2593 start_bound,
2594 end_bound,
2595 })
2596 }
2597
2598 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2600 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2601 Ok(WindowFrameBound::CurrentRow)
2602 } else {
2603 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2604 None
2605 } else {
2606 Some(Box::new(match &self.peek_token_ref().token {
2607 Token::SingleQuotedString(_) => self.parse_interval()?,
2608 _ => self.parse_expr()?,
2609 }))
2610 };
2611 if self.parse_keyword(Keyword::PRECEDING) {
2612 Ok(WindowFrameBound::Preceding(rows))
2613 } else if self.parse_keyword(Keyword::FOLLOWING) {
2614 Ok(WindowFrameBound::Following(rows))
2615 } else {
2616 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2617 }
2618 }
2619 }
2620
2621 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2623 if self.dialect.supports_group_by_expr() {
2624 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2625 self.expect_token(&Token::LParen)?;
2626 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2627 self.expect_token(&Token::RParen)?;
2628 Ok(Expr::GroupingSets(result))
2629 } else if self.parse_keyword(Keyword::CUBE) {
2630 self.expect_token(&Token::LParen)?;
2631 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2632 self.expect_token(&Token::RParen)?;
2633 Ok(Expr::Cube(result))
2634 } else if self.parse_keyword(Keyword::ROLLUP) {
2635 self.expect_token(&Token::LParen)?;
2636 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2637 self.expect_token(&Token::RParen)?;
2638 Ok(Expr::Rollup(result))
2639 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2640 Ok(Expr::Tuple(vec![]))
2644 } else {
2645 self.parse_expr()
2646 }
2647 } else {
2648 self.parse_expr()
2650 }
2651 }
2652
2653 fn parse_tuple(
2657 &mut self,
2658 lift_singleton: bool,
2659 allow_empty: bool,
2660 ) -> Result<Vec<Expr>, ParserError> {
2661 if lift_singleton {
2662 if self.consume_token(&Token::LParen) {
2663 let result = if allow_empty && self.consume_token(&Token::RParen) {
2664 vec![]
2665 } else {
2666 let result = self.parse_comma_separated(Parser::parse_expr)?;
2667 self.expect_token(&Token::RParen)?;
2668 result
2669 };
2670 Ok(result)
2671 } else {
2672 Ok(vec![self.parse_expr()?])
2673 }
2674 } else {
2675 self.expect_token(&Token::LParen)?;
2676 let result = if allow_empty && self.consume_token(&Token::RParen) {
2677 vec![]
2678 } else {
2679 let result = self.parse_comma_separated(Parser::parse_expr)?;
2680 self.expect_token(&Token::RParen)?;
2681 result
2682 };
2683 Ok(result)
2684 }
2685 }
2686
2687 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2689 let case_token = AttachedToken(self.get_current_token().clone());
2690 let mut operand = None;
2691 if !self.parse_keyword(Keyword::WHEN) {
2692 operand = Some(Box::new(self.parse_expr()?));
2693 self.expect_keyword_is(Keyword::WHEN)?;
2694 }
2695 let mut conditions = vec![];
2696 loop {
2697 let condition = self.parse_expr()?;
2698 self.expect_keyword_is(Keyword::THEN)?;
2699 let result = self.parse_expr()?;
2700 conditions.push(CaseWhen { condition, result });
2701 if !self.parse_keyword(Keyword::WHEN) {
2702 break;
2703 }
2704 }
2705 let else_result = if self.parse_keyword(Keyword::ELSE) {
2706 Some(Box::new(self.parse_expr()?))
2707 } else {
2708 None
2709 };
2710 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2711 Ok(Expr::Case {
2712 case_token,
2713 end_token,
2714 operand,
2715 conditions,
2716 else_result,
2717 })
2718 }
2719
2720 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2722 if self.parse_keyword(Keyword::FORMAT) {
2723 let value = self.parse_value()?;
2724 match self.parse_optional_time_zone()? {
2725 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2726 None => Ok(Some(CastFormat::Value(value))),
2727 }
2728 } else {
2729 Ok(None)
2730 }
2731 }
2732
2733 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2735 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2736 self.parse_value().map(Some)
2737 } else {
2738 Ok(None)
2739 }
2740 }
2741
2742 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2744 self.expect_token(&Token::LParen)?;
2745 let data_type = self.parse_data_type()?;
2746 self.expect_token(&Token::Comma)?;
2747 let expr = self.parse_expr()?;
2748 let styles = if self.consume_token(&Token::Comma) {
2749 self.parse_comma_separated(Parser::parse_expr)?
2750 } else {
2751 Default::default()
2752 };
2753 self.expect_token(&Token::RParen)?;
2754 Ok(Expr::Convert {
2755 is_try,
2756 expr: Box::new(expr),
2757 data_type: Some(data_type),
2758 charset: None,
2759 target_before_value: true,
2760 styles,
2761 })
2762 }
2763
2764 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2769 if self.dialect.convert_type_before_value() {
2770 return self.parse_mssql_convert(is_try);
2771 }
2772 self.expect_token(&Token::LParen)?;
2773 let expr = self.parse_expr()?;
2774 if self.parse_keyword(Keyword::USING) {
2775 let charset = self.parse_object_name(false)?;
2776 self.expect_token(&Token::RParen)?;
2777 return Ok(Expr::Convert {
2778 is_try,
2779 expr: Box::new(expr),
2780 data_type: None,
2781 charset: Some(charset),
2782 target_before_value: false,
2783 styles: vec![],
2784 });
2785 }
2786 self.expect_token(&Token::Comma)?;
2787 let data_type = self.parse_data_type()?;
2788 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2789 Some(self.parse_object_name(false)?)
2790 } else {
2791 None
2792 };
2793 self.expect_token(&Token::RParen)?;
2794 Ok(Expr::Convert {
2795 is_try,
2796 expr: Box::new(expr),
2797 data_type: Some(data_type),
2798 charset,
2799 target_before_value: false,
2800 styles: vec![],
2801 })
2802 }
2803
2804 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2806 self.expect_token(&Token::LParen)?;
2807 let expr = self.parse_expr()?;
2808 self.expect_keyword_is(Keyword::AS)?;
2809 let data_type = self.parse_data_type()?;
2810 let array = self.parse_keyword(Keyword::ARRAY);
2811 let format = self.parse_optional_cast_format()?;
2812 self.expect_token(&Token::RParen)?;
2813 Ok(Expr::Cast {
2814 kind,
2815 expr: Box::new(expr),
2816 data_type,
2817 array,
2818 format,
2819 })
2820 }
2821
2822 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2824 self.expect_token(&Token::LParen)?;
2825 let exists_node = Expr::Exists {
2826 negated,
2827 subquery: self.parse_query()?,
2828 };
2829 self.expect_token(&Token::RParen)?;
2830 Ok(exists_node)
2831 }
2832
2833 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2835 self.expect_token(&Token::LParen)?;
2836 let field = self.parse_date_time_field()?;
2837
2838 let syntax = if self.parse_keyword(Keyword::FROM) {
2839 ExtractSyntax::From
2840 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2841 {
2842 ExtractSyntax::Comma
2843 } else {
2844 return Err(ParserError::ParserError(
2845 "Expected 'FROM' or ','".to_string(),
2846 ));
2847 };
2848
2849 let expr = self.parse_expr()?;
2850 self.expect_token(&Token::RParen)?;
2851 Ok(Expr::Extract {
2852 field,
2853 expr: Box::new(expr),
2854 syntax,
2855 })
2856 }
2857
2858 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2860 self.expect_token(&Token::LParen)?;
2861 let expr = self.parse_expr()?;
2862 let field = if self.parse_keyword(Keyword::TO) {
2864 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2866 } else if self.consume_token(&Token::Comma) {
2867 let v = self.parse_value()?;
2869 if matches!(v.value, Value::Number(_, _)) {
2870 CeilFloorKind::Scale(v)
2871 } else {
2872 return Err(ParserError::ParserError(
2873 "Scale field can only be of number type".to_string(),
2874 ));
2875 }
2876 } else {
2877 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2878 };
2879 self.expect_token(&Token::RParen)?;
2880 if is_ceil {
2881 Ok(Expr::Ceil {
2882 expr: Box::new(expr),
2883 field,
2884 })
2885 } else {
2886 Ok(Expr::Floor {
2887 expr: Box::new(expr),
2888 field,
2889 })
2890 }
2891 }
2892
2893 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2895 let between_prec = self.dialect.prec_value(Precedence::Between);
2896 let position_expr = self.maybe_parse(|p| {
2897 p.expect_token(&Token::LParen)?;
2899
2900 let expr = p.parse_subexpr(between_prec)?;
2902 p.expect_keyword_is(Keyword::IN)?;
2903 let from = p.parse_expr()?;
2904 p.expect_token(&Token::RParen)?;
2905 Ok(Expr::Position {
2906 expr: Box::new(expr),
2907 r#in: Box::new(from),
2908 })
2909 })?;
2910 match position_expr {
2911 Some(expr) => Ok(expr),
2912 None => self.parse_function(ObjectName::from(vec![ident])),
2915 }
2916 }
2917
2918 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2920 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2921 Keyword::SUBSTR => true,
2922 Keyword::SUBSTRING => false,
2923 _ => {
2924 self.prev_token();
2925 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2926 }
2927 };
2928 self.expect_token(&Token::LParen)?;
2929 let expr = self.parse_expr()?;
2930 let mut from_expr = None;
2931 let special = self.consume_token(&Token::Comma);
2932 if special || self.parse_keyword(Keyword::FROM) {
2933 from_expr = Some(self.parse_expr()?);
2934 }
2935
2936 let mut to_expr = None;
2937 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2938 to_expr = Some(self.parse_expr()?);
2939 }
2940 self.expect_token(&Token::RParen)?;
2941
2942 Ok(Expr::Substring {
2943 expr: Box::new(expr),
2944 substring_from: from_expr.map(Box::new),
2945 substring_for: to_expr.map(Box::new),
2946 special,
2947 shorthand,
2948 })
2949 }
2950
2951 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2955 self.expect_token(&Token::LParen)?;
2957 let expr = self.parse_expr()?;
2958 self.expect_keyword_is(Keyword::PLACING)?;
2959 let what_expr = self.parse_expr()?;
2960 self.expect_keyword_is(Keyword::FROM)?;
2961 let from_expr = self.parse_expr()?;
2962 let mut for_expr = None;
2963 if self.parse_keyword(Keyword::FOR) {
2964 for_expr = Some(self.parse_expr()?);
2965 }
2966 self.expect_token(&Token::RParen)?;
2967
2968 Ok(Expr::Overlay {
2969 expr: Box::new(expr),
2970 overlay_what: Box::new(what_expr),
2971 overlay_from: Box::new(from_expr),
2972 overlay_for: for_expr.map(Box::new),
2973 })
2974 }
2975
2976 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2982 self.expect_token(&Token::LParen)?;
2983 let mut trim_where = None;
2984 if let Token::Word(word) = &self.peek_token_ref().token {
2985 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2986 trim_where = Some(self.parse_trim_where()?);
2987 }
2988 }
2989 let expr = self.parse_expr()?;
2990 if self.parse_keyword(Keyword::FROM) {
2991 let trim_what = Box::new(expr);
2992 let expr = self.parse_expr()?;
2993 self.expect_token(&Token::RParen)?;
2994 Ok(Expr::Trim {
2995 expr: Box::new(expr),
2996 trim_where,
2997 trim_what: Some(trim_what),
2998 trim_characters: None,
2999 })
3000 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3001 {
3002 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3003 self.expect_token(&Token::RParen)?;
3004 Ok(Expr::Trim {
3005 expr: Box::new(expr),
3006 trim_where: None,
3007 trim_what: None,
3008 trim_characters: Some(characters),
3009 })
3010 } else {
3011 self.expect_token(&Token::RParen)?;
3012 Ok(Expr::Trim {
3013 expr: Box::new(expr),
3014 trim_where,
3015 trim_what: None,
3016 trim_characters: None,
3017 })
3018 }
3019 }
3020
3021 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3025 let next_token = self.next_token();
3026 match &next_token.token {
3027 Token::Word(w) => match w.keyword {
3028 Keyword::BOTH => Ok(TrimWhereField::Both),
3029 Keyword::LEADING => Ok(TrimWhereField::Leading),
3030 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3031 _ => self.expected("trim_where field", next_token)?,
3032 },
3033 _ => self.expected("trim_where field", next_token),
3034 }
3035 }
3036
3037 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3040 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3041 self.expect_token(&Token::RBracket)?;
3042 Ok(Expr::Array(Array { elem: exprs, named }))
3043 }
3044
3045 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3049 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3050 if self.parse_keyword(Keyword::ERROR) {
3051 Ok(Some(ListAggOnOverflow::Error))
3052 } else {
3053 self.expect_keyword_is(Keyword::TRUNCATE)?;
3054 let filler = match &self.peek_token_ref().token {
3055 Token::Word(w)
3056 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3057 {
3058 None
3059 }
3060 Token::SingleQuotedString(_)
3061 | Token::EscapedStringLiteral(_)
3062 | Token::UnicodeStringLiteral(_)
3063 | Token::NationalStringLiteral(_)
3064 | Token::QuoteDelimitedStringLiteral(_)
3065 | Token::NationalQuoteDelimitedStringLiteral(_)
3066 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3067 _ => self.expected_ref(
3068 "either filler, WITH, or WITHOUT in LISTAGG",
3069 self.peek_token_ref(),
3070 )?,
3071 };
3072 let with_count = self.parse_keyword(Keyword::WITH);
3073 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3074 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3075 }
3076 self.expect_keyword_is(Keyword::COUNT)?;
3077 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3078 }
3079 } else {
3080 Ok(None)
3081 }
3082 }
3083
3084 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3091 let next_token = self.next_token();
3092 match &next_token.token {
3093 Token::Word(w) => match w.keyword {
3094 Keyword::YEAR => Ok(DateTimeField::Year),
3095 Keyword::YEARS => Ok(DateTimeField::Years),
3096 Keyword::MONTH => Ok(DateTimeField::Month),
3097 Keyword::MONTHS => Ok(DateTimeField::Months),
3098 Keyword::WEEK => {
3099 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3100 && self.consume_token(&Token::LParen)
3101 {
3102 let week_day = self.parse_identifier()?;
3103 self.expect_token(&Token::RParen)?;
3104 Some(week_day)
3105 } else {
3106 None
3107 };
3108 Ok(DateTimeField::Week(week_day))
3109 }
3110 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3111 Keyword::DAY => Ok(DateTimeField::Day),
3112 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3113 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3114 Keyword::DAYS => Ok(DateTimeField::Days),
3115 Keyword::DATE => Ok(DateTimeField::Date),
3116 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3117 Keyword::HOUR => Ok(DateTimeField::Hour),
3118 Keyword::HOURS => Ok(DateTimeField::Hours),
3119 Keyword::MINUTE => Ok(DateTimeField::Minute),
3120 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3121 Keyword::SECOND => Ok(DateTimeField::Second),
3122 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3123 Keyword::CENTURY => Ok(DateTimeField::Century),
3124 Keyword::DECADE => Ok(DateTimeField::Decade),
3125 Keyword::DOY => Ok(DateTimeField::Doy),
3126 Keyword::DOW => Ok(DateTimeField::Dow),
3127 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3128 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3129 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3130 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3131 Keyword::JULIAN => Ok(DateTimeField::Julian),
3132 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3133 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3134 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3135 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3136 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3137 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3138 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3139 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3140 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3141 Keyword::TIME => Ok(DateTimeField::Time),
3142 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3143 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3144 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3145 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3146 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3147 _ if self.dialect.allow_extract_custom() => {
3148 self.prev_token();
3149 let custom = self.parse_identifier()?;
3150 Ok(DateTimeField::Custom(custom))
3151 }
3152 _ => self.expected("date/time field", next_token),
3153 },
3154 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3155 self.prev_token();
3156 let custom = self.parse_identifier()?;
3157 Ok(DateTimeField::Custom(custom))
3158 }
3159 _ => self.expected("date/time field", next_token),
3160 }
3161 }
3162
3163 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3167 match &self.peek_token_ref().token {
3168 Token::Word(w) => match w.keyword {
3169 Keyword::EXISTS => {
3170 let negated = true;
3171 let _ = self.parse_keyword(Keyword::EXISTS);
3172 self.parse_exists_expr(negated)
3173 }
3174 _ => Ok(Expr::UnaryOp {
3175 op: UnaryOperator::Not,
3176 expr: Box::new(
3177 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3178 ),
3179 }),
3180 },
3181 _ => Ok(Expr::UnaryOp {
3182 op: UnaryOperator::Not,
3183 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3184 }),
3185 }
3186 }
3187
3188 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3198 let token = self.expect_token(&Token::LBrace)?;
3199
3200 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3201 self.expect_token(&Token::RBrace)?;
3202 return Ok(fn_expr);
3203 }
3204
3205 if self.dialect.supports_dictionary_syntax() {
3206 self.prev_token(); return self.parse_dictionary();
3208 }
3209
3210 self.expected("an expression", token)
3211 }
3212
3213 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3219 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3220
3221 self.expect_keyword_is(Keyword::AGAINST)?;
3222
3223 self.expect_token(&Token::LParen)?;
3224
3225 let match_value = self.parse_value()?;
3227
3228 let in_natural_language_mode_keywords = &[
3229 Keyword::IN,
3230 Keyword::NATURAL,
3231 Keyword::LANGUAGE,
3232 Keyword::MODE,
3233 ];
3234
3235 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3236
3237 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3238
3239 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3240 if self.parse_keywords(with_query_expansion_keywords) {
3241 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3242 } else {
3243 Some(SearchModifier::InNaturalLanguageMode)
3244 }
3245 } else if self.parse_keywords(in_boolean_mode_keywords) {
3246 Some(SearchModifier::InBooleanMode)
3247 } else if self.parse_keywords(with_query_expansion_keywords) {
3248 Some(SearchModifier::WithQueryExpansion)
3249 } else {
3250 None
3251 };
3252
3253 self.expect_token(&Token::RParen)?;
3254
3255 Ok(Expr::MatchAgainst {
3256 columns,
3257 match_value,
3258 opt_search_modifier,
3259 })
3260 }
3261
3262 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3278 let value = if self.dialect.require_interval_qualifier() {
3287 self.parse_expr()?
3289 } else {
3290 self.parse_prefix()?
3293 };
3294
3295 let leading_field = if self.next_token_is_temporal_unit() {
3301 Some(self.parse_date_time_field()?)
3302 } else if self.dialect.require_interval_qualifier() {
3303 return parser_err!(
3304 "INTERVAL requires a unit after the literal value",
3305 self.peek_token_ref().span.start
3306 );
3307 } else {
3308 None
3309 };
3310
3311 let (leading_precision, last_field, fsec_precision) =
3312 if leading_field == Some(DateTimeField::Second) {
3313 let last_field = None;
3319 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3320 (leading_precision, last_field, fsec_precision)
3321 } else {
3322 let leading_precision = self.parse_optional_precision()?;
3323 if self.parse_keyword(Keyword::TO) {
3324 let last_field = Some(self.parse_date_time_field()?);
3325 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3326 self.parse_optional_precision()?
3327 } else {
3328 None
3329 };
3330 (leading_precision, last_field, fsec_precision)
3331 } else {
3332 (leading_precision, None, None)
3333 }
3334 };
3335
3336 Ok(Expr::Interval(Interval {
3337 value: Box::new(value),
3338 leading_field,
3339 leading_precision,
3340 last_field,
3341 fractional_seconds_precision: fsec_precision,
3342 }))
3343 }
3344
3345 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3348 if let Token::Word(word) = &self.peek_token_ref().token {
3349 matches!(
3350 word.keyword,
3351 Keyword::YEAR
3352 | Keyword::YEARS
3353 | Keyword::MONTH
3354 | Keyword::MONTHS
3355 | Keyword::WEEK
3356 | Keyword::WEEKS
3357 | Keyword::DAY
3358 | Keyword::DAYS
3359 | Keyword::HOUR
3360 | Keyword::HOURS
3361 | Keyword::MINUTE
3362 | Keyword::MINUTES
3363 | Keyword::SECOND
3364 | Keyword::SECONDS
3365 | Keyword::CENTURY
3366 | Keyword::DECADE
3367 | Keyword::DOW
3368 | Keyword::DOY
3369 | Keyword::EPOCH
3370 | Keyword::ISODOW
3371 | Keyword::ISOYEAR
3372 | Keyword::JULIAN
3373 | Keyword::MICROSECOND
3374 | Keyword::MICROSECONDS
3375 | Keyword::MILLENIUM
3376 | Keyword::MILLENNIUM
3377 | Keyword::MILLISECOND
3378 | Keyword::MILLISECONDS
3379 | Keyword::NANOSECOND
3380 | Keyword::NANOSECONDS
3381 | Keyword::QUARTER
3382 | Keyword::TIMEZONE
3383 | Keyword::TIMEZONE_HOUR
3384 | Keyword::TIMEZONE_MINUTE
3385 )
3386 } else {
3387 false
3388 }
3389 }
3390
3391 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3399 self.prev_token();
3401 let (fields, trailing_bracket) =
3402 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3403 if trailing_bracket.0 {
3404 return parser_err!(
3405 "unmatched > in STRUCT literal",
3406 self.peek_token_ref().span.start
3407 );
3408 }
3409
3410 self.expect_token(&Token::LParen)?;
3412 let values = self
3413 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3414 self.expect_token(&Token::RParen)?;
3415
3416 Ok(Expr::Struct { values, fields })
3417 }
3418
3419 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3433 let expr = self.parse_expr()?;
3434 if self.parse_keyword(Keyword::AS) {
3435 if typed_syntax {
3436 return parser_err!("Typed syntax does not allow AS", {
3437 self.prev_token();
3438 self.peek_token_ref().span.start
3439 });
3440 }
3441 let field_name = self.parse_identifier()?;
3442 Ok(Expr::Named {
3443 expr: expr.into(),
3444 name: field_name,
3445 })
3446 } else {
3447 Ok(expr)
3448 }
3449 }
3450
3451 fn parse_struct_type_def<F>(
3464 &mut self,
3465 mut elem_parser: F,
3466 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3467 where
3468 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3469 {
3470 self.expect_keyword_is(Keyword::STRUCT)?;
3471
3472 if self.peek_token_ref().token != Token::Lt {
3474 return Ok((Default::default(), false.into()));
3475 }
3476 self.next_token();
3477
3478 let mut field_defs = vec![];
3479 let trailing_bracket = loop {
3480 let (def, trailing_bracket) = elem_parser(self)?;
3481 field_defs.push(def);
3482 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3484 break trailing_bracket;
3485 }
3486 };
3487
3488 Ok((
3489 field_defs,
3490 self.expect_closing_angle_bracket(trailing_bracket)?,
3491 ))
3492 }
3493
3494 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3496 self.expect_keyword_is(Keyword::STRUCT)?;
3497 self.expect_token(&Token::LParen)?;
3498 let struct_body = self.parse_comma_separated(|parser| {
3499 let field_name = parser.parse_identifier()?;
3500 let field_type = parser.parse_data_type()?;
3501
3502 Ok(StructField {
3503 field_name: Some(field_name),
3504 field_type,
3505 options: None,
3506 })
3507 });
3508 self.expect_token(&Token::RParen)?;
3509 struct_body
3510 }
3511
3512 fn parse_struct_field_def(
3524 &mut self,
3525 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3526 let is_named_field = matches!(
3529 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3530 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3531 );
3532
3533 let field_name = if is_named_field {
3534 let name = self.parse_identifier()?;
3535 let _ = self.consume_token(&Token::Colon);
3536 Some(name)
3537 } else {
3538 None
3539 };
3540
3541 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3542
3543 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3544 Ok((
3545 StructField {
3546 field_name,
3547 field_type,
3548 options,
3549 },
3550 trailing_bracket,
3551 ))
3552 }
3553
3554 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3564 self.expect_keyword_is(Keyword::UNION)?;
3565
3566 self.expect_token(&Token::LParen)?;
3567
3568 let fields = self.parse_comma_separated(|p| {
3569 Ok(UnionField {
3570 field_name: p.parse_identifier()?,
3571 field_type: p.parse_data_type()?,
3572 })
3573 })?;
3574
3575 self.expect_token(&Token::RParen)?;
3576
3577 Ok(fields)
3578 }
3579
3580 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3591 self.expect_token(&Token::LBrace)?;
3592
3593 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3594
3595 self.expect_token(&Token::RBrace)?;
3596
3597 Ok(Expr::Dictionary(fields))
3598 }
3599
3600 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3611 let key = self.parse_identifier()?;
3612
3613 self.expect_token(&Token::Colon)?;
3614
3615 let expr = self.parse_expr()?;
3616
3617 Ok(DictionaryField {
3618 key,
3619 value: Box::new(expr),
3620 })
3621 }
3622
3623 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3633 self.expect_token(&Token::LBrace)?;
3634 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3635 self.expect_token(&Token::RBrace)?;
3636 Ok(Expr::Map(Map { entries: fields }))
3637 }
3638
3639 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3649 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3651
3652 self.expect_token(&Token::Colon)?;
3653
3654 let value = self.parse_expr()?;
3655
3656 Ok(MapEntry {
3657 key: Box::new(key),
3658 value: Box::new(value),
3659 })
3660 }
3661
3662 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3672 self.expect_keyword_is(Keyword::MAP)?;
3673 self.expect_token(&Token::LParen)?;
3674 let key_data_type = self.parse_data_type()?;
3675 self.expect_token(&Token::Comma)?;
3676 let value_data_type = self.parse_data_type()?;
3677 self.expect_token(&Token::RParen)?;
3678
3679 Ok((key_data_type, value_data_type))
3680 }
3681
3682 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3692 self.expect_keyword_is(Keyword::TUPLE)?;
3693 self.expect_token(&Token::LParen)?;
3694 let mut field_defs = vec![];
3695 loop {
3696 let (def, _) = self.parse_struct_field_def()?;
3697 field_defs.push(def);
3698 if !self.consume_token(&Token::Comma) {
3699 break;
3700 }
3701 }
3702 self.expect_token(&Token::RParen)?;
3703
3704 Ok(field_defs)
3705 }
3706
3707 fn expect_closing_angle_bracket(
3712 &mut self,
3713 trailing_bracket: MatchedTrailingBracket,
3714 ) -> Result<MatchedTrailingBracket, ParserError> {
3715 let trailing_bracket = if !trailing_bracket.0 {
3716 match &self.peek_token_ref().token {
3717 Token::Gt => {
3718 self.next_token();
3719 false.into()
3720 }
3721 Token::ShiftRight => {
3722 self.next_token();
3723 true.into()
3724 }
3725 _ => return self.expected_ref(">", self.peek_token_ref()),
3726 }
3727 } else {
3728 false.into()
3729 };
3730
3731 Ok(trailing_bracket)
3732 }
3733
3734 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3736 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3738 return infix;
3739 }
3740
3741 let dialect = self.dialect;
3742
3743 self.advance_token();
3744 let tok = self.get_current_token();
3745 debug!("infix: {tok:?}");
3746 let tok_index = self.get_current_index();
3747 let span = tok.span;
3748 let regular_binary_operator = match &tok.token {
3749 Token::Spaceship => Some(BinaryOperator::Spaceship),
3750 Token::DoubleEq => Some(BinaryOperator::Eq),
3751 Token::Assignment => Some(BinaryOperator::Assignment),
3752 Token::Eq => Some(BinaryOperator::Eq),
3753 Token::Neq => Some(BinaryOperator::NotEq),
3754 Token::Gt => Some(BinaryOperator::Gt),
3755 Token::GtEq => Some(BinaryOperator::GtEq),
3756 Token::Lt => Some(BinaryOperator::Lt),
3757 Token::LtEq => Some(BinaryOperator::LtEq),
3758 Token::Plus => Some(BinaryOperator::Plus),
3759 Token::Minus => Some(BinaryOperator::Minus),
3760 Token::Mul => Some(BinaryOperator::Multiply),
3761 Token::Mod => Some(BinaryOperator::Modulo),
3762 Token::StringConcat => Some(BinaryOperator::StringConcat),
3763 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3764 Token::Caret => {
3765 if dialect_is!(dialect is PostgreSqlDialect) {
3768 Some(BinaryOperator::PGExp)
3769 } else {
3770 Some(BinaryOperator::BitwiseXor)
3771 }
3772 }
3773 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3774 Token::Div => Some(BinaryOperator::Divide),
3775 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3776 Some(BinaryOperator::DuckIntegerDivide)
3777 }
3778 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3779 Some(BinaryOperator::PGBitwiseShiftLeft)
3780 }
3781 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3782 Some(BinaryOperator::PGBitwiseShiftRight)
3783 }
3784 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3785 Some(BinaryOperator::PGBitwiseXor)
3786 }
3787 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3788 Some(BinaryOperator::PGOverlap)
3789 }
3790 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3791 Some(BinaryOperator::PGOverlap)
3792 }
3793 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3794 Some(BinaryOperator::And)
3795 }
3796 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3797 Some(BinaryOperator::PGStartsWith)
3798 }
3799 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3800 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3801 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3802 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3803 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3804 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3805 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3806 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3807 Token::Arrow => Some(BinaryOperator::Arrow),
3808 Token::LongArrow => Some(BinaryOperator::LongArrow),
3809 Token::HashArrow => Some(BinaryOperator::HashArrow),
3810 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3811 Token::AtArrow => Some(BinaryOperator::AtArrow),
3812 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3813 Token::HashMinus => Some(BinaryOperator::HashMinus),
3814 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3815 Token::AtAt => Some(BinaryOperator::AtAt),
3816 Token::Question => Some(BinaryOperator::Question),
3817 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3818 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3819 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3820 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3821 Some(BinaryOperator::DoubleHash)
3822 }
3823
3824 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3825 Some(BinaryOperator::AndLt)
3826 }
3827 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3828 Some(BinaryOperator::AndGt)
3829 }
3830 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3831 Some(BinaryOperator::QuestionDash)
3832 }
3833 Token::AmpersandLeftAngleBracketVerticalBar
3834 if self.dialect.supports_geometric_types() =>
3835 {
3836 Some(BinaryOperator::AndLtPipe)
3837 }
3838 Token::VerticalBarAmpersandRightAngleBracket
3839 if self.dialect.supports_geometric_types() =>
3840 {
3841 Some(BinaryOperator::PipeAndGt)
3842 }
3843 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3844 Some(BinaryOperator::LtDashGt)
3845 }
3846 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3847 Some(BinaryOperator::LtCaret)
3848 }
3849 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3850 Some(BinaryOperator::GtCaret)
3851 }
3852 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3853 Some(BinaryOperator::QuestionHash)
3854 }
3855 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3856 Some(BinaryOperator::QuestionDoublePipe)
3857 }
3858 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3859 Some(BinaryOperator::QuestionDashPipe)
3860 }
3861 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3862 Some(BinaryOperator::TildeEq)
3863 }
3864 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3865 Some(BinaryOperator::LtLtPipe)
3866 }
3867 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3868 Some(BinaryOperator::PipeGtGt)
3869 }
3870 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3871
3872 Token::Word(w) => match w.keyword {
3873 Keyword::AND => Some(BinaryOperator::And),
3874 Keyword::OR => Some(BinaryOperator::Or),
3875 Keyword::XOR => Some(BinaryOperator::Xor),
3876 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3877 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3878 self.expect_token(&Token::LParen)?;
3879 let mut idents = vec![];
3884 loop {
3885 self.advance_token();
3886 idents.push(self.get_current_token().to_string());
3887 if !self.consume_token(&Token::Period) {
3888 break;
3889 }
3890 }
3891 self.expect_token(&Token::RParen)?;
3892 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3893 }
3894 _ => None,
3895 },
3896 _ => None,
3897 };
3898
3899 let tok = self.token_at(tok_index);
3900 if let Some(op) = regular_binary_operator {
3901 if let Some(keyword) =
3902 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3903 {
3904 self.expect_token(&Token::LParen)?;
3905 let right = if self.peek_sub_query() {
3906 self.prev_token(); self.parse_subexpr(precedence)?
3910 } else {
3911 let right = self.parse_subexpr(precedence)?;
3913 self.expect_token(&Token::RParen)?;
3914 right
3915 };
3916
3917 if !matches!(
3918 op,
3919 BinaryOperator::Gt
3920 | BinaryOperator::Lt
3921 | BinaryOperator::GtEq
3922 | BinaryOperator::LtEq
3923 | BinaryOperator::Eq
3924 | BinaryOperator::NotEq
3925 | BinaryOperator::PGRegexMatch
3926 | BinaryOperator::PGRegexIMatch
3927 | BinaryOperator::PGRegexNotMatch
3928 | BinaryOperator::PGRegexNotIMatch
3929 | BinaryOperator::PGLikeMatch
3930 | BinaryOperator::PGILikeMatch
3931 | BinaryOperator::PGNotLikeMatch
3932 | BinaryOperator::PGNotILikeMatch
3933 ) {
3934 return parser_err!(
3935 format!(
3936 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3937 ),
3938 span.start
3939 );
3940 };
3941
3942 Ok(match keyword {
3943 Keyword::ALL => Expr::AllOp {
3944 left: Box::new(expr),
3945 compare_op: op,
3946 right: Box::new(right),
3947 },
3948 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3949 left: Box::new(expr),
3950 compare_op: op,
3951 right: Box::new(right),
3952 is_some: keyword == Keyword::SOME,
3953 },
3954 unexpected_keyword => return Err(ParserError::ParserError(
3955 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3956 )),
3957 })
3958 } else {
3959 Ok(Expr::BinaryOp {
3960 left: Box::new(expr),
3961 op,
3962 right: Box::new(self.parse_subexpr(precedence)?),
3963 })
3964 }
3965 } else if let Token::Word(w) = &tok.token {
3966 match w.keyword {
3967 Keyword::IS => {
3968 if self.parse_keyword(Keyword::NULL) {
3969 Ok(Expr::IsNull(Box::new(expr)))
3970 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3971 Ok(Expr::IsNotNull(Box::new(expr)))
3972 } else if self.parse_keywords(&[Keyword::TRUE]) {
3973 Ok(Expr::IsTrue(Box::new(expr)))
3974 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3975 Ok(Expr::IsNotTrue(Box::new(expr)))
3976 } else if self.parse_keywords(&[Keyword::FALSE]) {
3977 Ok(Expr::IsFalse(Box::new(expr)))
3978 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3979 Ok(Expr::IsNotFalse(Box::new(expr)))
3980 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3981 Ok(Expr::IsUnknown(Box::new(expr)))
3982 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3983 Ok(Expr::IsNotUnknown(Box::new(expr)))
3984 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3985 let expr2 = self.parse_expr()?;
3986 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3987 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3988 {
3989 let expr2 = self.parse_expr()?;
3990 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3991 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3992 Ok(is_normalized)
3993 } else {
3994 self.expected_ref(
3995 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3996 self.peek_token_ref(),
3997 )
3998 }
3999 }
4000 Keyword::AT => {
4001 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4002 Ok(Expr::AtTimeZone {
4003 timestamp: Box::new(expr),
4004 time_zone: Box::new(self.parse_subexpr(precedence)?),
4005 })
4006 }
4007 Keyword::NOT
4008 | Keyword::IN
4009 | Keyword::BETWEEN
4010 | Keyword::LIKE
4011 | Keyword::ILIKE
4012 | Keyword::SIMILAR
4013 | Keyword::REGEXP
4014 | Keyword::RLIKE => {
4015 self.prev_token();
4016 let negated = self.parse_keyword(Keyword::NOT);
4017 let regexp = self.parse_keyword(Keyword::REGEXP);
4018 let rlike = self.parse_keyword(Keyword::RLIKE);
4019 let null = if !self.in_column_definition_state() {
4020 self.parse_keyword(Keyword::NULL)
4021 } else {
4022 false
4023 };
4024 if regexp || rlike {
4025 Ok(Expr::RLike {
4026 negated,
4027 expr: Box::new(expr),
4028 pattern: Box::new(
4029 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4030 ),
4031 regexp,
4032 })
4033 } else if negated && null {
4034 Ok(Expr::IsNotNull(Box::new(expr)))
4035 } else if self.parse_keyword(Keyword::IN) {
4036 self.parse_in(expr, negated)
4037 } else if self.parse_keyword(Keyword::BETWEEN) {
4038 self.parse_between(expr, negated)
4039 } else if self.parse_keyword(Keyword::LIKE) {
4040 Ok(Expr::Like {
4041 negated,
4042 any: self.parse_keyword(Keyword::ANY),
4043 expr: Box::new(expr),
4044 pattern: Box::new(
4045 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4046 ),
4047 escape_char: self.parse_escape_char()?,
4048 })
4049 } else if self.parse_keyword(Keyword::ILIKE) {
4050 Ok(Expr::ILike {
4051 negated,
4052 any: self.parse_keyword(Keyword::ANY),
4053 expr: Box::new(expr),
4054 pattern: Box::new(
4055 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4056 ),
4057 escape_char: self.parse_escape_char()?,
4058 })
4059 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4060 Ok(Expr::SimilarTo {
4061 negated,
4062 expr: Box::new(expr),
4063 pattern: Box::new(
4064 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4065 ),
4066 escape_char: self.parse_escape_char()?,
4067 })
4068 } else {
4069 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4070 }
4071 }
4072 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4073 Ok(Expr::IsNotNull(Box::new(expr)))
4074 }
4075 Keyword::MEMBER => {
4076 if self.parse_keyword(Keyword::OF) {
4077 self.expect_token(&Token::LParen)?;
4078 let array = self.parse_expr()?;
4079 self.expect_token(&Token::RParen)?;
4080 Ok(Expr::MemberOf(MemberOf {
4081 value: Box::new(expr),
4082 array: Box::new(array),
4083 }))
4084 } else {
4085 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4086 }
4087 }
4088 _ => parser_err!(
4090 format!("No infix parser for token {:?}", tok.token),
4091 tok.span.start
4092 ),
4093 }
4094 } else if Token::DoubleColon == *tok {
4095 Ok(Expr::Cast {
4096 kind: CastKind::DoubleColon,
4097 expr: Box::new(expr),
4098 data_type: self.parse_data_type()?,
4099 array: false,
4100 format: None,
4101 })
4102 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4103 Ok(Expr::UnaryOp {
4104 op: UnaryOperator::PGPostfixFactorial,
4105 expr: Box::new(expr),
4106 })
4107 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4108 || (Token::Colon == *tok)
4109 {
4110 self.prev_token();
4111 self.parse_json_access(expr)
4112 } else {
4113 parser_err!(
4115 format!("No infix parser for token {:?}", tok.token),
4116 tok.span.start
4117 )
4118 }
4119 }
4120
4121 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4123 if self.parse_keyword(Keyword::ESCAPE) {
4124 Ok(Some(self.parse_value()?))
4125 } else {
4126 Ok(None)
4127 }
4128 }
4129
4130 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4140 let lower_bound = if self.consume_token(&Token::Colon) {
4142 None
4143 } else {
4144 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4146 };
4147
4148 if self.consume_token(&Token::RBracket) {
4150 if let Some(lower_bound) = lower_bound {
4151 return Ok(Subscript::Index { index: lower_bound });
4152 };
4153 return Ok(Subscript::Slice {
4154 lower_bound,
4155 upper_bound: None,
4156 stride: None,
4157 });
4158 }
4159
4160 if lower_bound.is_some() {
4162 self.expect_token(&Token::Colon)?;
4163 }
4164
4165 let upper_bound = if self.consume_token(&Token::RBracket) {
4167 return Ok(Subscript::Slice {
4168 lower_bound,
4169 upper_bound: None,
4170 stride: None,
4171 });
4172 } else {
4173 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4175 };
4176
4177 if self.consume_token(&Token::RBracket) {
4179 return Ok(Subscript::Slice {
4180 lower_bound,
4181 upper_bound,
4182 stride: None,
4183 });
4184 }
4185
4186 self.expect_token(&Token::Colon)?;
4188 let stride = if self.consume_token(&Token::RBracket) {
4189 None
4190 } else {
4191 Some(self.parse_expr()?)
4192 };
4193
4194 if stride.is_some() {
4195 self.expect_token(&Token::RBracket)?;
4196 }
4197
4198 Ok(Subscript::Slice {
4199 lower_bound,
4200 upper_bound,
4201 stride,
4202 })
4203 }
4204
4205 pub fn parse_multi_dim_subscript(
4207 &mut self,
4208 chain: &mut Vec<AccessExpr>,
4209 ) -> Result<(), ParserError> {
4210 while self.consume_token(&Token::LBracket) {
4211 self.parse_subscript(chain)?;
4212 }
4213 Ok(())
4214 }
4215
4216 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4220 let subscript = self.parse_subscript_inner()?;
4221 chain.push(AccessExpr::Subscript(subscript));
4222 Ok(())
4223 }
4224
4225 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4226 let token = self.next_token();
4227 match token.token {
4228 Token::Word(Word {
4229 value,
4230 quote_style: quote_style @ (Some('"') | Some('`') | None),
4233 keyword: _,
4236 }) => Ok(JsonPathElem::Dot {
4237 key: value,
4238 quoted: quote_style.is_some(),
4239 }),
4240
4241 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4245
4246 _ => self.expected("variant object key name", token),
4247 }
4248 }
4249
4250 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4251 let path = self.parse_json_path()?;
4252 Ok(Expr::JsonAccess {
4253 value: Box::new(expr),
4254 path,
4255 })
4256 }
4257
4258 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4259 let mut path = Vec::new();
4260 loop {
4261 match self.next_token().token {
4262 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4263 self.next_token();
4264 let key = self.parse_wildcard_expr()?;
4265 self.expect_token(&Token::RBracket)?;
4266 path.push(JsonPathElem::ColonBracket { key });
4267 }
4268 Token::Colon if path.is_empty() => {
4269 path.push(self.parse_json_path_object_key()?);
4270 }
4271 Token::Period if !path.is_empty() => {
4272 path.push(self.parse_json_path_object_key()?);
4273 }
4274 Token::LBracket => {
4275 let key = self.parse_wildcard_expr()?;
4276 self.expect_token(&Token::RBracket)?;
4277
4278 path.push(JsonPathElem::Bracket { key });
4279 }
4280 _ => {
4281 self.prev_token();
4282 break;
4283 }
4284 };
4285 }
4286
4287 debug_assert!(!path.is_empty());
4288 Ok(JsonPath { path })
4289 }
4290
4291 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4293 if self.parse_keyword(Keyword::UNNEST) {
4296 self.expect_token(&Token::LParen)?;
4297 let array_expr = self.parse_expr()?;
4298 self.expect_token(&Token::RParen)?;
4299 return Ok(Expr::InUnnest {
4300 expr: Box::new(expr),
4301 array_expr: Box::new(array_expr),
4302 negated,
4303 });
4304 }
4305 self.expect_token(&Token::LParen)?;
4306 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4307 Some(subquery) => Expr::InSubquery {
4308 expr: Box::new(expr),
4309 subquery,
4310 negated,
4311 },
4312 None => Expr::InList {
4313 expr: Box::new(expr),
4314 list: if self.dialect.supports_in_empty_list() {
4315 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4316 } else {
4317 self.parse_comma_separated(Parser::parse_expr)?
4318 },
4319 negated,
4320 },
4321 };
4322 self.expect_token(&Token::RParen)?;
4323 Ok(in_op)
4324 }
4325
4326 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4328 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4331 self.expect_keyword_is(Keyword::AND)?;
4332 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4333 Ok(Expr::Between {
4334 expr: Box::new(expr),
4335 negated,
4336 low: Box::new(low),
4337 high: Box::new(high),
4338 })
4339 }
4340
4341 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4343 Ok(Expr::Cast {
4344 kind: CastKind::DoubleColon,
4345 expr: Box::new(expr),
4346 data_type: self.parse_data_type()?,
4347 array: false,
4348 format: None,
4349 })
4350 }
4351
4352 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4354 self.dialect.get_next_precedence_default(self)
4355 }
4356
4357 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4360 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4361 }
4362
4363 pub fn peek_token(&self) -> TokenWithSpan {
4368 self.peek_nth_token(0)
4369 }
4370
4371 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4374 self.peek_nth_token_ref(0)
4375 }
4376
4377 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4400 self.peek_tokens_with_location()
4401 .map(|with_loc| with_loc.token)
4402 }
4403
4404 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4409 let mut index = self.index;
4410 core::array::from_fn(|_| loop {
4411 let token = self.tokens.get(index);
4412 index += 1;
4413 if let Some(TokenWithSpan {
4414 token: Token::Whitespace(_),
4415 span: _,
4416 }) = token
4417 {
4418 continue;
4419 }
4420 break token.cloned().unwrap_or(TokenWithSpan {
4421 token: Token::EOF,
4422 span: Span::empty(),
4423 });
4424 })
4425 }
4426
4427 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4432 let mut index = self.index;
4433 core::array::from_fn(|_| loop {
4434 let token = self.tokens.get(index);
4435 index += 1;
4436 if let Some(TokenWithSpan {
4437 token: Token::Whitespace(_),
4438 span: _,
4439 }) = token
4440 {
4441 continue;
4442 }
4443 break token.unwrap_or(&EOF_TOKEN);
4444 })
4445 }
4446
4447 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4449 self.peek_nth_token_ref(n).clone()
4450 }
4451
4452 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4454 let mut index = self.index;
4455 loop {
4456 index += 1;
4457 match self.tokens.get(index - 1) {
4458 Some(TokenWithSpan {
4459 token: Token::Whitespace(_),
4460 span: _,
4461 }) => continue,
4462 non_whitespace => {
4463 if n == 0 {
4464 return non_whitespace.unwrap_or(&EOF_TOKEN);
4465 }
4466 n -= 1;
4467 }
4468 }
4469 }
4470 }
4471
4472 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4475 self.peek_nth_token_no_skip(0)
4476 }
4477
4478 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4480 self.tokens
4481 .get(self.index + n)
4482 .cloned()
4483 .unwrap_or(TokenWithSpan {
4484 token: Token::EOF,
4485 span: Span::empty(),
4486 })
4487 }
4488
4489 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4491 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4492 }
4493
4494 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4498 let index = self.index;
4499 let matched = self.parse_keywords(expected);
4500 self.index = index;
4501 matched
4502 }
4503
4504 pub fn next_token(&mut self) -> TokenWithSpan {
4509 self.advance_token();
4510 self.get_current_token().clone()
4511 }
4512
4513 pub fn get_current_index(&self) -> usize {
4518 self.index.saturating_sub(1)
4519 }
4520
4521 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4523 self.index += 1;
4524 self.tokens.get(self.index - 1)
4525 }
4526
4527 pub fn advance_token(&mut self) {
4531 loop {
4532 self.index += 1;
4533 match self.tokens.get(self.index - 1) {
4534 Some(TokenWithSpan {
4535 token: Token::Whitespace(_),
4536 span: _,
4537 }) => continue,
4538 _ => break,
4539 }
4540 }
4541 }
4542
4543 pub fn get_current_token(&self) -> &TokenWithSpan {
4547 self.token_at(self.index.saturating_sub(1))
4548 }
4549
4550 pub fn get_previous_token(&self) -> &TokenWithSpan {
4554 self.token_at(self.index.saturating_sub(2))
4555 }
4556
4557 pub fn get_next_token(&self) -> &TokenWithSpan {
4561 self.token_at(self.index)
4562 }
4563
4564 pub fn prev_token(&mut self) {
4571 loop {
4572 assert!(self.index > 0);
4573 self.index -= 1;
4574 if let Some(TokenWithSpan {
4575 token: Token::Whitespace(_),
4576 span: _,
4577 }) = self.tokens.get(self.index)
4578 {
4579 continue;
4580 }
4581 return;
4582 }
4583 }
4584
4585 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4587 parser_err!(
4588 format!("Expected: {expected}, found: {found}"),
4589 found.span.start
4590 )
4591 }
4592
4593 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4595 parser_err!(
4596 format!("Expected: {expected}, found: {found}"),
4597 found.span.start
4598 )
4599 }
4600
4601 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4603 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4604 parser_err!(
4605 format!("Expected: {expected}, found: {found}"),
4606 found.span.start
4607 )
4608 }
4609
4610 #[must_use]
4613 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4614 if self.peek_keyword(expected) {
4615 self.advance_token();
4616 true
4617 } else {
4618 false
4619 }
4620 }
4621
4622 #[must_use]
4623 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4627 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4628 }
4629
4630 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4638 self.keyword_with_tokens(expected, tokens, true)
4639 }
4640
4641 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4646 self.keyword_with_tokens(expected, tokens, false)
4647 }
4648
4649 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4650 match &self.peek_token_ref().token {
4651 Token::Word(w) if expected == w.keyword => {
4652 for (idx, token) in tokens.iter().enumerate() {
4653 if self.peek_nth_token_ref(idx + 1).token != *token {
4654 return false;
4655 }
4656 }
4657
4658 if consume {
4659 for _ in 0..(tokens.len() + 1) {
4660 self.advance_token();
4661 }
4662 }
4663
4664 true
4665 }
4666 _ => false,
4667 }
4668 }
4669
4670 #[must_use]
4674 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4675 self.parse_keywords_indexed(keywords).is_some()
4676 }
4677
4678 #[must_use]
4681 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4682 let start_index = self.index;
4683 let mut first_keyword_index = None;
4684 for &keyword in keywords {
4685 if !self.parse_keyword(keyword) {
4686 self.index = start_index;
4687 return None;
4688 }
4689 if first_keyword_index.is_none() {
4690 first_keyword_index = Some(self.index.saturating_sub(1));
4691 }
4692 }
4693 first_keyword_index
4694 }
4695
4696 #[must_use]
4699 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4700 for keyword in keywords {
4701 if self.peek_keyword(*keyword) {
4702 return Some(*keyword);
4703 }
4704 }
4705 None
4706 }
4707
4708 #[must_use]
4712 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4713 match &self.peek_token_ref().token {
4714 Token::Word(w) => {
4715 keywords
4716 .iter()
4717 .find(|keyword| **keyword == w.keyword)
4718 .map(|keyword| {
4719 self.advance_token();
4720 *keyword
4721 })
4722 }
4723 _ => None,
4724 }
4725 }
4726
4727 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4730 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4731 Ok(keyword)
4732 } else {
4733 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4734 self.expected_ref(
4735 &format!("one of {}", keywords.join(" or ")),
4736 self.peek_token_ref(),
4737 )
4738 }
4739 }
4740
4741 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4746 if self.parse_keyword(expected) {
4747 Ok(self.get_current_token().clone())
4748 } else {
4749 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4750 }
4751 }
4752
4753 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4759 if self.parse_keyword(expected) {
4760 Ok(())
4761 } else {
4762 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4763 }
4764 }
4765
4766 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4769 for &kw in expected {
4770 self.expect_keyword_is(kw)?;
4771 }
4772 Ok(())
4773 }
4774
4775 #[must_use]
4779 pub fn consume_token(&mut self, expected: &Token) -> bool {
4780 if self.peek_token_ref() == expected {
4781 self.advance_token();
4782 true
4783 } else {
4784 false
4785 }
4786 }
4787
4788 #[must_use]
4792 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4793 let index = self.index;
4794 for token in tokens {
4795 if !self.consume_token(token) {
4796 self.index = index;
4797 return false;
4798 }
4799 }
4800 true
4801 }
4802
4803 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4805 if self.peek_token_ref() == expected {
4806 Ok(self.next_token())
4807 } else {
4808 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4809 }
4810 }
4811
4812 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4813 where
4814 <T as FromStr>::Err: Display,
4815 {
4816 s.parse::<T>().map_err(|e| {
4817 ParserError::ParserError(format!(
4818 "Could not parse '{s}' as {}: {e}{loc}",
4819 core::any::type_name::<T>()
4820 ))
4821 })
4822 }
4823
4824 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4826 let trailing_commas =
4832 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4833
4834 self.parse_comma_separated_with_trailing_commas(
4835 |p| p.parse_select_item(),
4836 trailing_commas,
4837 Self::is_reserved_for_column_alias,
4838 )
4839 }
4840
4841 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4843 let mut values = vec![];
4844 loop {
4845 values.push(self.parse_grant_permission()?);
4846 if !self.consume_token(&Token::Comma) {
4847 break;
4848 } else if self.options.trailing_commas {
4849 match &self.peek_token_ref().token {
4850 Token::Word(kw) if kw.keyword == Keyword::ON => {
4851 break;
4852 }
4853 Token::RParen
4854 | Token::SemiColon
4855 | Token::EOF
4856 | Token::RBracket
4857 | Token::RBrace => break,
4858 _ => continue,
4859 }
4860 }
4861 }
4862 Ok(values)
4863 }
4864
4865 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4867 let trailing_commas = self.dialect.supports_from_trailing_commas();
4868
4869 self.parse_comma_separated_with_trailing_commas(
4870 Parser::parse_table_and_joins,
4871 trailing_commas,
4872 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4873 )
4874 }
4875
4876 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4883 &mut self,
4884 trailing_commas: bool,
4885 is_reserved_keyword: &R,
4886 ) -> bool
4887 where
4888 R: Fn(&Keyword, &mut Parser) -> bool,
4889 {
4890 if !self.consume_token(&Token::Comma) {
4891 true
4892 } else if trailing_commas {
4893 let token = self.next_token().token;
4894 let is_end = match token {
4895 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4896 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4897 true
4898 }
4899 _ => false,
4900 };
4901 self.prev_token();
4902
4903 is_end
4904 } else {
4905 false
4906 }
4907 }
4908
4909 fn is_parse_comma_separated_end(&mut self) -> bool {
4912 self.is_parse_comma_separated_end_with_trailing_commas(
4913 self.options.trailing_commas,
4914 &Self::is_reserved_for_column_alias,
4915 )
4916 }
4917
4918 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4920 where
4921 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4922 {
4923 self.parse_comma_separated_with_trailing_commas(
4924 f,
4925 self.options.trailing_commas,
4926 Self::is_reserved_for_column_alias,
4927 )
4928 }
4929
4930 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4935 &mut self,
4936 mut f: F,
4937 trailing_commas: bool,
4938 is_reserved_keyword: R,
4939 ) -> Result<Vec<T>, ParserError>
4940 where
4941 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4942 R: Fn(&Keyword, &mut Parser) -> bool,
4943 {
4944 let mut values = vec![];
4945 loop {
4946 values.push(f(self)?);
4947 if self.is_parse_comma_separated_end_with_trailing_commas(
4948 trailing_commas,
4949 &is_reserved_keyword,
4950 ) {
4951 break;
4952 }
4953 }
4954 Ok(values)
4955 }
4956
4957 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4959 where
4960 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4961 {
4962 let mut values = vec![];
4963 loop {
4964 values.push(f(self)?);
4965 if !self.consume_token(&Token::Period) {
4966 break;
4967 }
4968 }
4969 Ok(values)
4970 }
4971
4972 pub fn parse_keyword_separated<T, F>(
4974 &mut self,
4975 keyword: Keyword,
4976 mut f: F,
4977 ) -> Result<Vec<T>, ParserError>
4978 where
4979 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4980 {
4981 let mut values = vec![];
4982 loop {
4983 values.push(f(self)?);
4984 if !self.parse_keyword(keyword) {
4985 break;
4986 }
4987 }
4988 Ok(values)
4989 }
4990
4991 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4993 where
4994 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4995 {
4996 self.expect_token(&Token::LParen)?;
4997 let res = f(self)?;
4998 self.expect_token(&Token::RParen)?;
4999 Ok(res)
5000 }
5001
5002 pub fn parse_comma_separated0<T, F>(
5005 &mut self,
5006 f: F,
5007 end_token: Token,
5008 ) -> Result<Vec<T>, ParserError>
5009 where
5010 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5011 {
5012 if self.peek_token_ref().token == end_token {
5013 return Ok(vec![]);
5014 }
5015
5016 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5017 let _ = self.consume_token(&Token::Comma);
5018 return Ok(vec![]);
5019 }
5020
5021 self.parse_comma_separated(f)
5022 }
5023
5024 pub(crate) fn parse_statement_list(
5028 &mut self,
5029 terminal_keywords: &[Keyword],
5030 ) -> Result<Vec<Statement>, ParserError> {
5031 let mut values = vec![];
5032 loop {
5033 match &self.peek_nth_token_ref(0).token {
5034 Token::EOF => break,
5035 Token::Word(w) => {
5036 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5037 break;
5038 }
5039 }
5040 _ => {}
5041 }
5042
5043 values.push(self.parse_statement()?);
5044 self.expect_token(&Token::SemiColon)?;
5045 }
5046 Ok(values)
5047 }
5048
5049 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5053 !parser.dialect.is_column_alias(kw, parser)
5054 }
5055
5056 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5060 where
5061 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5062 {
5063 match self.try_parse(f) {
5064 Ok(t) => Ok(Some(t)),
5065 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5066 _ => Ok(None),
5067 }
5068 }
5069
5070 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5072 where
5073 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5074 {
5075 let index = self.index;
5076 match f(self) {
5077 Ok(t) => Ok(t),
5078 Err(e) => {
5079 self.index = index;
5081 Err(e)
5082 }
5083 }
5084 }
5085
5086 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5089 let loc = self.peek_token_ref().span.start;
5090 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5091 Some(Keyword::ALL) => {
5092 if self.peek_keyword(Keyword::DISTINCT) {
5093 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5094 }
5095 Some(Distinct::All)
5096 }
5097 Some(Keyword::DISTINCT) => {
5098 if self.peek_keyword(Keyword::ALL) {
5099 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5100 }
5101 Some(Distinct::Distinct)
5102 }
5103 None => return Ok(None),
5104 _ => return parser_err!("ALL or DISTINCT", loc),
5105 };
5106
5107 let Some(Distinct::Distinct) = distinct else {
5108 return Ok(distinct);
5109 };
5110 if !self.parse_keyword(Keyword::ON) {
5111 return Ok(Some(Distinct::Distinct));
5112 }
5113
5114 self.expect_token(&Token::LParen)?;
5115 let col_names = if self.consume_token(&Token::RParen) {
5116 self.prev_token();
5117 Vec::new()
5118 } else {
5119 self.parse_comma_separated(Parser::parse_expr)?
5120 };
5121 self.expect_token(&Token::RParen)?;
5122 Ok(Some(Distinct::On(col_names)))
5123 }
5124
5125 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5127 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5128 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5129 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5130 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5131 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5132 let global: Option<bool> = if global {
5133 Some(true)
5134 } else if local {
5135 Some(false)
5136 } else {
5137 None
5138 };
5139 let temporary = self
5140 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5141 .is_some();
5142 let persistent = dialect_of!(self is DuckDbDialect)
5143 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5144 let create_view_params = self.parse_create_view_params()?;
5145 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5146 self.parse_create_snapshot_table().map(Into::into)
5147 } else if self.parse_keyword(Keyword::TABLE) {
5148 self.parse_create_table(or_replace, temporary, global, transient)
5149 .map(Into::into)
5150 } else if self.peek_keyword(Keyword::MATERIALIZED)
5151 || self.peek_keyword(Keyword::VIEW)
5152 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5153 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5154 {
5155 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5156 .map(Into::into)
5157 } else if self.parse_keyword(Keyword::POLICY) {
5158 self.parse_create_policy().map(Into::into)
5159 } else if self.parse_keyword(Keyword::EXTERNAL) {
5160 self.parse_create_external_table(or_replace).map(Into::into)
5161 } else if self.parse_keyword(Keyword::FUNCTION) {
5162 self.parse_create_function(or_alter, or_replace, temporary)
5163 } else if self.parse_keyword(Keyword::DOMAIN) {
5164 self.parse_create_domain().map(Into::into)
5165 } else if self.parse_keyword(Keyword::TRIGGER) {
5166 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5167 .map(Into::into)
5168 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5169 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5170 .map(Into::into)
5171 } else if self.parse_keyword(Keyword::MACRO) {
5172 self.parse_create_macro(or_replace, temporary)
5173 } else if self.parse_keyword(Keyword::SECRET) {
5174 self.parse_create_secret(or_replace, temporary, persistent)
5175 } else if self.parse_keyword(Keyword::USER) {
5176 if self.parse_keyword(Keyword::MAPPING) {
5177 self.parse_create_user_mapping().map(Into::into)
5178 } else {
5179 self.parse_create_user(or_replace).map(Into::into)
5180 }
5181 } else if self.parse_keyword(Keyword::AGGREGATE) {
5182 self.parse_create_aggregate(or_replace).map(Into::into)
5183 } else if self.peek_keyword(Keyword::TRUSTED)
5184 || self.peek_keyword(Keyword::PROCEDURAL)
5185 || self.peek_keyword(Keyword::LANGUAGE)
5186 {
5187 let trusted = self.parse_keyword(Keyword::TRUSTED);
5188 let procedural = self.parse_keyword(Keyword::PROCEDURAL);
5189 if self.parse_keyword(Keyword::LANGUAGE) {
5190 self.parse_create_language(or_replace, trusted, procedural)
5191 .map(Into::into)
5192 } else {
5193 self.expected_ref(
5194 "LANGUAGE after TRUSTED or PROCEDURAL",
5195 self.peek_token_ref(),
5196 )
5197 }
5198 } else if self.parse_keyword(Keyword::TRANSFORM) {
5199 self.parse_create_transform(or_replace).map(Into::into)
5200 } else if or_replace {
5201 self.expected_ref(
5202 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5203 self.peek_token_ref(),
5204 )
5205 } else if self.parse_keyword(Keyword::CAST) {
5206 self.parse_create_cast().map(Into::into)
5207 } else if self.parse_keyword(Keyword::CONVERSION) {
5208 self.parse_create_conversion(false).map(Into::into)
5209 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CONVERSION]) {
5210 self.parse_create_conversion(true).map(Into::into)
5211 } else if self.parse_keyword(Keyword::RULE) {
5212 self.parse_create_rule().map(Into::into)
5213 } else if self.parse_keyword(Keyword::EXTENSION) {
5214 self.parse_create_extension().map(Into::into)
5215 } else if self.parse_keyword(Keyword::INDEX) {
5216 self.parse_create_index(false).map(Into::into)
5217 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5218 self.parse_create_index(true).map(Into::into)
5219 } else if self.parse_keyword(Keyword::VIRTUAL) {
5220 self.parse_create_virtual_table()
5221 } else if self.parse_keyword(Keyword::SCHEMA) {
5222 self.parse_create_schema()
5223 } else if self.parse_keyword(Keyword::DATABASE) {
5224 self.parse_create_database()
5225 } else if self.parse_keyword(Keyword::ROLE) {
5226 self.parse_create_role().map(Into::into)
5227 } else if self.parse_keyword(Keyword::SEQUENCE) {
5228 self.parse_create_sequence(temporary)
5229 } else if self.parse_keyword(Keyword::COLLATION) {
5230 self.parse_create_collation().map(Into::into)
5231 } else if self.parse_keyword(Keyword::TYPE) {
5232 self.parse_create_type()
5233 } else if self.parse_keyword(Keyword::PROCEDURE) {
5234 self.parse_create_procedure(or_alter)
5235 } else if self.parse_keyword(Keyword::CONNECTOR) {
5236 self.parse_create_connector().map(Into::into)
5237 } else if self.parse_keyword(Keyword::OPERATOR) {
5238 if self.parse_keyword(Keyword::FAMILY) {
5240 self.parse_create_operator_family().map(Into::into)
5241 } else if self.parse_keyword(Keyword::CLASS) {
5242 self.parse_create_operator_class().map(Into::into)
5243 } else {
5244 self.parse_create_operator().map(Into::into)
5245 }
5246 } else if self.parse_keyword(Keyword::SERVER) {
5247 self.parse_pg_create_server()
5248 } else if self.parse_keyword(Keyword::FOREIGN) {
5249 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5250 self.parse_create_foreign_data_wrapper().map(Into::into)
5251 } else if self.parse_keyword(Keyword::TABLE) {
5252 self.parse_create_foreign_table().map(Into::into)
5253 } else {
5254 self.expected_ref(
5255 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5256 self.peek_token_ref(),
5257 )
5258 }
5259 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5260 self.parse_create_text_search()
5261 } else if self.parse_keyword(Keyword::PUBLICATION) {
5262 self.parse_create_publication().map(Into::into)
5263 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5264 self.parse_create_subscription().map(Into::into)
5265 } else if self.parse_keyword(Keyword::STATISTICS) {
5266 self.parse_create_statistics().map(Into::into)
5267 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::METHOD]) {
5268 self.parse_create_access_method().map(Into::into)
5269 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::TRIGGER]) {
5270 self.parse_create_event_trigger().map(Into::into)
5271 } else if self.parse_keyword(Keyword::TABLESPACE) {
5272 self.parse_create_tablespace().map(Into::into)
5273 } else {
5274 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5275 }
5276 }
5277
5278 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5279 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5280 let name = self.parse_identifier()?;
5281 let options = self
5282 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5283 .options;
5284 let with_tags = self.parse_keyword(Keyword::WITH);
5285 let tags = if self.parse_keyword(Keyword::TAG) {
5286 self.parse_key_value_options(true, &[])?.options
5287 } else {
5288 vec![]
5289 };
5290 Ok(CreateUser {
5291 or_replace,
5292 if_not_exists,
5293 name,
5294 options: KeyValueOptions {
5295 options,
5296 delimiter: KeyValueOptionsDelimiter::Space,
5297 },
5298 with_tags,
5299 tags: KeyValueOptions {
5300 options: tags,
5301 delimiter: KeyValueOptionsDelimiter::Comma,
5302 },
5303 })
5304 }
5305
5306 pub fn parse_create_secret(
5308 &mut self,
5309 or_replace: bool,
5310 temporary: bool,
5311 persistent: bool,
5312 ) -> Result<Statement, ParserError> {
5313 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5314
5315 let mut storage_specifier = None;
5316 let mut name = None;
5317 if self.peek_token_ref().token != Token::LParen {
5318 if self.parse_keyword(Keyword::IN) {
5319 storage_specifier = self.parse_identifier().ok()
5320 } else {
5321 name = self.parse_identifier().ok();
5322 }
5323
5324 if storage_specifier.is_none()
5326 && self.peek_token_ref().token != Token::LParen
5327 && self.parse_keyword(Keyword::IN)
5328 {
5329 storage_specifier = self.parse_identifier().ok();
5330 }
5331 }
5332
5333 self.expect_token(&Token::LParen)?;
5334 self.expect_keyword_is(Keyword::TYPE)?;
5335 let secret_type = self.parse_identifier()?;
5336
5337 let mut options = Vec::new();
5338 if self.consume_token(&Token::Comma) {
5339 options.append(&mut self.parse_comma_separated(|p| {
5340 let key = p.parse_identifier()?;
5341 let value = p.parse_identifier()?;
5342 Ok(SecretOption { key, value })
5343 })?);
5344 }
5345 self.expect_token(&Token::RParen)?;
5346
5347 let temp = match (temporary, persistent) {
5348 (true, false) => Some(true),
5349 (false, true) => Some(false),
5350 (false, false) => None,
5351 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5352 };
5353
5354 Ok(Statement::CreateSecret {
5355 or_replace,
5356 temporary: temp,
5357 if_not_exists,
5358 name,
5359 storage_specifier,
5360 secret_type,
5361 options,
5362 })
5363 }
5364
5365 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5367 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5368 if self.parse_keyword(Keyword::TABLE) {
5369 let table_name = self.parse_object_name(false)?;
5370 if self.peek_token_ref().token != Token::EOF {
5371 if let Token::Word(word) = &self.peek_token_ref().token {
5372 if word.keyword == Keyword::OPTIONS {
5373 options = self.parse_options(Keyword::OPTIONS)?
5374 }
5375 };
5376
5377 if self.peek_token_ref().token != Token::EOF {
5378 let (a, q) = self.parse_as_query()?;
5379 has_as = a;
5380 query = Some(q);
5381 }
5382
5383 Ok(Statement::Cache {
5384 table_flag,
5385 table_name,
5386 has_as,
5387 options,
5388 query,
5389 })
5390 } else {
5391 Ok(Statement::Cache {
5392 table_flag,
5393 table_name,
5394 has_as,
5395 options,
5396 query,
5397 })
5398 }
5399 } else {
5400 table_flag = Some(self.parse_object_name(false)?);
5401 if self.parse_keyword(Keyword::TABLE) {
5402 let table_name = self.parse_object_name(false)?;
5403 if self.peek_token_ref().token != Token::EOF {
5404 if let Token::Word(word) = &self.peek_token_ref().token {
5405 if word.keyword == Keyword::OPTIONS {
5406 options = self.parse_options(Keyword::OPTIONS)?
5407 }
5408 };
5409
5410 if self.peek_token_ref().token != Token::EOF {
5411 let (a, q) = self.parse_as_query()?;
5412 has_as = a;
5413 query = Some(q);
5414 }
5415
5416 Ok(Statement::Cache {
5417 table_flag,
5418 table_name,
5419 has_as,
5420 options,
5421 query,
5422 })
5423 } else {
5424 Ok(Statement::Cache {
5425 table_flag,
5426 table_name,
5427 has_as,
5428 options,
5429 query,
5430 })
5431 }
5432 } else {
5433 if self.peek_token_ref().token == Token::EOF {
5434 self.prev_token();
5435 }
5436 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5437 }
5438 }
5439 }
5440
5441 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5443 match &self.peek_token_ref().token {
5444 Token::Word(word) => match word.keyword {
5445 Keyword::AS => {
5446 self.next_token();
5447 Ok((true, self.parse_query()?))
5448 }
5449 _ => Ok((false, self.parse_query()?)),
5450 },
5451 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5452 }
5453 }
5454
5455 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5457 self.expect_keyword_is(Keyword::TABLE)?;
5458 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5459 let table_name = self.parse_object_name(false)?;
5460 Ok(Statement::UNCache {
5461 table_name,
5462 if_exists,
5463 })
5464 }
5465
5466 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5468 self.expect_keyword_is(Keyword::TABLE)?;
5469 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5470 let table_name = self.parse_object_name(false)?;
5471 self.expect_keyword_is(Keyword::USING)?;
5472 let module_name = self.parse_identifier()?;
5473 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5478 Ok(Statement::CreateVirtualTable {
5479 name: table_name,
5480 if_not_exists,
5481 module_name,
5482 module_args,
5483 })
5484 }
5485
5486 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5488 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5489
5490 let schema_name = self.parse_schema_name()?;
5491
5492 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5493 Some(self.parse_expr()?)
5494 } else {
5495 None
5496 };
5497
5498 let with = if self.peek_keyword(Keyword::WITH) {
5499 Some(self.parse_options(Keyword::WITH)?)
5500 } else {
5501 None
5502 };
5503
5504 let options = if self.peek_keyword(Keyword::OPTIONS) {
5505 Some(self.parse_options(Keyword::OPTIONS)?)
5506 } else {
5507 None
5508 };
5509
5510 let clone = if self.parse_keyword(Keyword::CLONE) {
5511 Some(self.parse_object_name(false)?)
5512 } else {
5513 None
5514 };
5515
5516 Ok(Statement::CreateSchema {
5517 schema_name,
5518 if_not_exists,
5519 with,
5520 options,
5521 default_collate_spec,
5522 clone,
5523 })
5524 }
5525
5526 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5527 if self.parse_keyword(Keyword::AUTHORIZATION) {
5528 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5529 } else {
5530 let name = self.parse_object_name(false)?;
5531
5532 if self.parse_keyword(Keyword::AUTHORIZATION) {
5533 Ok(SchemaName::NamedAuthorization(
5534 name,
5535 self.parse_identifier()?,
5536 ))
5537 } else {
5538 Ok(SchemaName::Simple(name))
5539 }
5540 }
5541 }
5542
5543 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5545 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5546 let db_name = self.parse_object_name(false)?;
5547 let mut location = None;
5548 let mut managed_location = None;
5549 loop {
5550 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5551 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5552 Some(Keyword::MANAGEDLOCATION) => {
5553 managed_location = Some(self.parse_literal_string()?)
5554 }
5555 _ => break,
5556 }
5557 }
5558 let clone = if self.parse_keyword(Keyword::CLONE) {
5559 Some(self.parse_object_name(false)?)
5560 } else {
5561 None
5562 };
5563
5564 let mut default_charset = None;
5572 let mut default_collation = None;
5573 loop {
5574 let has_default = self.parse_keyword(Keyword::DEFAULT);
5575 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5576 || self.parse_keyword(Keyword::CHARSET)
5577 {
5578 let _ = self.consume_token(&Token::Eq);
5579 default_charset = Some(self.parse_identifier()?.value);
5580 } else if self.parse_keyword(Keyword::COLLATE) {
5581 let _ = self.consume_token(&Token::Eq);
5582 default_collation = Some(self.parse_identifier()?.value);
5583 } else if has_default {
5584 self.prev_token();
5586 break;
5587 } else {
5588 break;
5589 }
5590 }
5591
5592 Ok(Statement::CreateDatabase {
5593 db_name,
5594 if_not_exists: ine,
5595 location,
5596 managed_location,
5597 or_replace: false,
5598 transient: false,
5599 clone,
5600 data_retention_time_in_days: None,
5601 max_data_extension_time_in_days: None,
5602 external_volume: None,
5603 catalog: None,
5604 replace_invalid_characters: None,
5605 default_ddl_collation: None,
5606 storage_serialization_policy: None,
5607 comment: None,
5608 default_charset,
5609 default_collation,
5610 catalog_sync: None,
5611 catalog_sync_namespace_mode: None,
5612 catalog_sync_namespace_flatten_delimiter: None,
5613 with_tags: None,
5614 with_contacts: None,
5615 })
5616 }
5617
5618 pub fn parse_optional_create_function_using(
5620 &mut self,
5621 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5622 if !self.parse_keyword(Keyword::USING) {
5623 return Ok(None);
5624 };
5625 let keyword =
5626 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5627
5628 let uri = self.parse_literal_string()?;
5629
5630 match keyword {
5631 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5632 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5633 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5634 _ => self.expected(
5635 "JAR, FILE or ARCHIVE, got {:?}",
5636 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5637 ),
5638 }
5639 }
5640
5641 pub fn parse_create_function(
5643 &mut self,
5644 or_alter: bool,
5645 or_replace: bool,
5646 temporary: bool,
5647 ) -> Result<Statement, ParserError> {
5648 if dialect_of!(self is HiveDialect) {
5649 self.parse_hive_create_function(or_replace, temporary)
5650 .map(Into::into)
5651 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5652 self.parse_postgres_create_function(or_replace, temporary)
5653 .map(Into::into)
5654 } else if dialect_of!(self is DuckDbDialect) {
5655 self.parse_create_macro(or_replace, temporary)
5656 } else if dialect_of!(self is BigQueryDialect) {
5657 self.parse_bigquery_create_function(or_replace, temporary)
5658 .map(Into::into)
5659 } else if dialect_of!(self is MsSqlDialect) {
5660 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5661 .map(Into::into)
5662 } else {
5663 self.prev_token();
5664 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5665 }
5666 }
5667
5668 fn parse_postgres_create_function(
5672 &mut self,
5673 or_replace: bool,
5674 temporary: bool,
5675 ) -> Result<CreateFunction, ParserError> {
5676 let name = self.parse_object_name(false)?;
5677
5678 self.expect_token(&Token::LParen)?;
5679 let args = if Token::RParen != self.peek_token_ref().token {
5680 self.parse_comma_separated(Parser::parse_function_arg)?
5681 } else {
5682 vec![]
5683 };
5684 self.expect_token(&Token::RParen)?;
5685
5686 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5687 Some(self.parse_function_return_type()?)
5688 } else {
5689 None
5690 };
5691
5692 #[derive(Default)]
5693 struct Body {
5694 language: Option<Ident>,
5695 behavior: Option<FunctionBehavior>,
5696 function_body: Option<CreateFunctionBody>,
5697 called_on_null: Option<FunctionCalledOnNull>,
5698 parallel: Option<FunctionParallel>,
5699 security: Option<FunctionSecurity>,
5700 }
5701 let mut body = Body::default();
5702 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5703 loop {
5704 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5705 if field.is_some() {
5706 return Err(ParserError::ParserError(format!(
5707 "{name} specified more than once",
5708 )));
5709 }
5710 Ok(())
5711 }
5712 if self.parse_keyword(Keyword::AS) {
5713 ensure_not_set(&body.function_body, "AS")?;
5714 body.function_body = Some(self.parse_create_function_body_string()?);
5715 } else if self.parse_keyword(Keyword::LANGUAGE) {
5716 ensure_not_set(&body.language, "LANGUAGE")?;
5717 body.language = Some(self.parse_identifier()?);
5718 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5719 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5720 body.behavior = Some(FunctionBehavior::Immutable);
5721 } else if self.parse_keyword(Keyword::STABLE) {
5722 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5723 body.behavior = Some(FunctionBehavior::Stable);
5724 } else if self.parse_keyword(Keyword::VOLATILE) {
5725 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5726 body.behavior = Some(FunctionBehavior::Volatile);
5727 } else if self.parse_keywords(&[
5728 Keyword::CALLED,
5729 Keyword::ON,
5730 Keyword::NULL,
5731 Keyword::INPUT,
5732 ]) {
5733 ensure_not_set(
5734 &body.called_on_null,
5735 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5736 )?;
5737 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5738 } else if self.parse_keywords(&[
5739 Keyword::RETURNS,
5740 Keyword::NULL,
5741 Keyword::ON,
5742 Keyword::NULL,
5743 Keyword::INPUT,
5744 ]) {
5745 ensure_not_set(
5746 &body.called_on_null,
5747 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5748 )?;
5749 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5750 } else if self.parse_keyword(Keyword::STRICT) {
5751 ensure_not_set(
5752 &body.called_on_null,
5753 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5754 )?;
5755 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5756 } else if self.parse_keyword(Keyword::PARALLEL) {
5757 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5758 if self.parse_keyword(Keyword::UNSAFE) {
5759 body.parallel = Some(FunctionParallel::Unsafe);
5760 } else if self.parse_keyword(Keyword::RESTRICTED) {
5761 body.parallel = Some(FunctionParallel::Restricted);
5762 } else if self.parse_keyword(Keyword::SAFE) {
5763 body.parallel = Some(FunctionParallel::Safe);
5764 } else {
5765 return self
5766 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5767 }
5768 } else if self.parse_keyword(Keyword::SECURITY) {
5769 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5770 if self.parse_keyword(Keyword::DEFINER) {
5771 body.security = Some(FunctionSecurity::Definer);
5772 } else if self.parse_keyword(Keyword::INVOKER) {
5773 body.security = Some(FunctionSecurity::Invoker);
5774 } else {
5775 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5776 }
5777 } else if self.parse_keyword(Keyword::SET) {
5778 let name = self.parse_object_name(false)?;
5779 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5780 FunctionSetValue::FromCurrent
5781 } else {
5782 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5783 return self.expected_ref("= or TO", self.peek_token_ref());
5784 }
5785 if self.parse_keyword(Keyword::DEFAULT) {
5786 FunctionSetValue::Default
5787 } else {
5788 let values = self.parse_comma_separated(Parser::parse_expr)?;
5789 FunctionSetValue::Values(values)
5790 }
5791 };
5792 set_params.push(FunctionDefinitionSetParam { name, value });
5793 } else if self.parse_keyword(Keyword::RETURN) {
5794 ensure_not_set(&body.function_body, "RETURN")?;
5795 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5796 } else {
5797 break;
5798 }
5799 }
5800
5801 Ok(CreateFunction {
5802 or_alter: false,
5803 or_replace,
5804 temporary,
5805 name,
5806 args: Some(args),
5807 return_type,
5808 behavior: body.behavior,
5809 called_on_null: body.called_on_null,
5810 parallel: body.parallel,
5811 security: body.security,
5812 set_params,
5813 language: body.language,
5814 function_body: body.function_body,
5815 if_not_exists: false,
5816 using: None,
5817 determinism_specifier: None,
5818 options: None,
5819 remote_connection: None,
5820 })
5821 }
5822
5823 fn parse_hive_create_function(
5827 &mut self,
5828 or_replace: bool,
5829 temporary: bool,
5830 ) -> Result<CreateFunction, ParserError> {
5831 let name = self.parse_object_name(false)?;
5832 self.expect_keyword_is(Keyword::AS)?;
5833
5834 let body = self.parse_create_function_body_string()?;
5835 let using = self.parse_optional_create_function_using()?;
5836
5837 Ok(CreateFunction {
5838 or_alter: false,
5839 or_replace,
5840 temporary,
5841 name,
5842 function_body: Some(body),
5843 using,
5844 if_not_exists: false,
5845 args: None,
5846 return_type: None,
5847 behavior: None,
5848 called_on_null: None,
5849 parallel: None,
5850 security: None,
5851 set_params: vec![],
5852 language: None,
5853 determinism_specifier: None,
5854 options: None,
5855 remote_connection: None,
5856 })
5857 }
5858
5859 fn parse_bigquery_create_function(
5863 &mut self,
5864 or_replace: bool,
5865 temporary: bool,
5866 ) -> Result<CreateFunction, ParserError> {
5867 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5868 let (name, args) = self.parse_create_function_name_and_params()?;
5869
5870 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5871 Some(self.parse_function_return_type()?)
5872 } else {
5873 None
5874 };
5875
5876 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5877 Some(FunctionDeterminismSpecifier::Deterministic)
5878 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5879 Some(FunctionDeterminismSpecifier::NotDeterministic)
5880 } else {
5881 None
5882 };
5883
5884 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5885 Some(self.parse_identifier()?)
5886 } else {
5887 None
5888 };
5889
5890 let remote_connection =
5891 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5892 Some(self.parse_object_name(false)?)
5893 } else {
5894 None
5895 };
5896
5897 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5900
5901 let function_body = if remote_connection.is_none() {
5902 self.expect_keyword_is(Keyword::AS)?;
5903 let expr = self.parse_expr()?;
5904 if options.is_none() {
5905 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5906 Some(CreateFunctionBody::AsBeforeOptions {
5907 body: expr,
5908 link_symbol: None,
5909 })
5910 } else {
5911 Some(CreateFunctionBody::AsAfterOptions(expr))
5912 }
5913 } else {
5914 None
5915 };
5916
5917 Ok(CreateFunction {
5918 or_alter: false,
5919 or_replace,
5920 temporary,
5921 if_not_exists,
5922 name,
5923 args: Some(args),
5924 return_type,
5925 function_body,
5926 language,
5927 determinism_specifier,
5928 options,
5929 remote_connection,
5930 using: None,
5931 behavior: None,
5932 called_on_null: None,
5933 parallel: None,
5934 security: None,
5935 set_params: vec![],
5936 })
5937 }
5938
5939 fn parse_mssql_create_function(
5943 &mut self,
5944 or_alter: bool,
5945 or_replace: bool,
5946 temporary: bool,
5947 ) -> Result<CreateFunction, ParserError> {
5948 let (name, args) = self.parse_create_function_name_and_params()?;
5949
5950 self.expect_keyword(Keyword::RETURNS)?;
5951
5952 let return_table = self.maybe_parse(|p| {
5953 let return_table_name = p.parse_identifier()?;
5954
5955 p.expect_keyword_is(Keyword::TABLE)?;
5956 p.prev_token();
5957
5958 let table_column_defs = match p.parse_data_type()? {
5959 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5960 table_column_defs
5961 }
5962 _ => parser_err!(
5963 "Expected table column definitions after TABLE keyword",
5964 p.peek_token_ref().span.start
5965 )?,
5966 };
5967
5968 Ok(DataType::NamedTable {
5969 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5970 columns: table_column_defs,
5971 })
5972 })?;
5973
5974 let data_type = match return_table {
5975 Some(table_type) => table_type,
5976 None => self.parse_data_type()?,
5977 };
5978 let return_type = Some(FunctionReturnType::DataType(data_type));
5979
5980 let _ = self.parse_keyword(Keyword::AS);
5981
5982 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5983 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5984 let statements = self.parse_statement_list(&[Keyword::END])?;
5985 let end_token = self.expect_keyword(Keyword::END)?;
5986
5987 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5988 begin_token: AttachedToken(begin_token),
5989 statements,
5990 end_token: AttachedToken(end_token),
5991 }))
5992 } else if self.parse_keyword(Keyword::RETURN) {
5993 if self.peek_token_ref().token == Token::LParen {
5994 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5995 } else if self.peek_keyword(Keyword::SELECT) {
5996 let select = self.parse_select()?;
5997 Some(CreateFunctionBody::AsReturnSelect(select))
5998 } else {
5999 parser_err!(
6000 "Expected a subquery (or bare SELECT statement) after RETURN",
6001 self.peek_token_ref().span.start
6002 )?
6003 }
6004 } else {
6005 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
6006 };
6007
6008 Ok(CreateFunction {
6009 or_alter,
6010 or_replace,
6011 temporary,
6012 if_not_exists: false,
6013 name,
6014 args: Some(args),
6015 return_type,
6016 function_body,
6017 language: None,
6018 determinism_specifier: None,
6019 options: None,
6020 remote_connection: None,
6021 using: None,
6022 behavior: None,
6023 called_on_null: None,
6024 parallel: None,
6025 security: None,
6026 set_params: vec![],
6027 })
6028 }
6029
6030 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
6031 if self.parse_keyword(Keyword::SETOF) {
6032 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
6033 } else {
6034 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
6035 }
6036 }
6037
6038 fn parse_create_function_name_and_params(
6039 &mut self,
6040 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6041 let name = self.parse_object_name(false)?;
6042 let parse_function_param =
6043 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6044 let name = parser.parse_identifier()?;
6045 let data_type = parser.parse_data_type()?;
6046 let default_expr = if parser.consume_token(&Token::Eq) {
6047 Some(parser.parse_expr()?)
6048 } else {
6049 None
6050 };
6051
6052 Ok(OperateFunctionArg {
6053 mode: None,
6054 name: Some(name),
6055 data_type,
6056 default_expr,
6057 })
6058 };
6059 self.expect_token(&Token::LParen)?;
6060 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6061 self.expect_token(&Token::RParen)?;
6062 Ok((name, args))
6063 }
6064
6065 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6066 let mode = if self.parse_keyword(Keyword::IN) {
6067 Some(ArgMode::In)
6068 } else if self.parse_keyword(Keyword::OUT) {
6069 Some(ArgMode::Out)
6070 } else if self.parse_keyword(Keyword::INOUT) {
6071 Some(ArgMode::InOut)
6072 } else if self.parse_keyword(Keyword::VARIADIC) {
6073 Some(ArgMode::Variadic)
6074 } else {
6075 None
6076 };
6077
6078 let mut name = None;
6080 let mut data_type = self.parse_data_type()?;
6081
6082 let data_type_idx = self.get_current_index();
6086
6087 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6089 if parser.peek_keyword(Keyword::DEFAULT) {
6090 parser_err!(
6092 "The DEFAULT keyword is not a type",
6093 parser.peek_token_ref().span.start
6094 )
6095 } else {
6096 parser.parse_data_type()
6097 }
6098 }
6099
6100 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6101 let token = self.token_at(data_type_idx);
6102
6103 if !matches!(token.token, Token::Word(_)) {
6105 return self.expected("a name or type", token.clone());
6106 }
6107
6108 name = Some(Ident::new(token.to_string()));
6109 data_type = next_data_type;
6110 }
6111
6112 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6113 {
6114 Some(self.parse_expr()?)
6115 } else {
6116 None
6117 };
6118 Ok(OperateFunctionArg {
6119 mode,
6120 name,
6121 data_type,
6122 default_expr,
6123 })
6124 }
6125
6126 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6127 let mode = if self.parse_keyword(Keyword::IN) {
6128 Some(ArgMode::In)
6129 } else {
6130 if self
6131 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6132 .is_some()
6133 {
6134 return self.expected_ref(
6135 "IN or argument type in aggregate signature",
6136 self.peek_token_ref(),
6137 );
6138 }
6139 None
6140 };
6141
6142 let mut name = None;
6145 let mut data_type = self.parse_data_type()?;
6146 let data_type_idx = self.get_current_index();
6147
6148 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6149 if parser.peek_keyword(Keyword::DEFAULT)
6150 || parser.peek_keyword(Keyword::ORDER)
6151 || parser.peek_token_ref().token == Token::Comma
6152 || parser.peek_token_ref().token == Token::RParen
6153 {
6154 parser_err!(
6156 "The current token cannot start an aggregate argument type",
6157 parser.peek_token_ref().span.start
6158 )
6159 } else {
6160 parser.parse_data_type()
6161 }
6162 }
6163
6164 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6165 let token = self.token_at(data_type_idx);
6166 if !matches!(token.token, Token::Word(_)) {
6167 return self.expected("a name or type", token.clone());
6168 }
6169
6170 name = Some(Ident::new(token.to_string()));
6171 data_type = next_data_type;
6172 }
6173
6174 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6175 return self.expected_ref(
6176 "',' or ')' or ORDER BY after aggregate argument type",
6177 self.peek_token_ref(),
6178 );
6179 }
6180
6181 Ok(OperateFunctionArg {
6182 mode,
6183 name,
6184 data_type,
6185 default_expr: None,
6186 })
6187 }
6188
6189 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6195 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6196 {
6197 self.prev_token();
6198 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6199 }
6200 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6201 let trigger_name = self.parse_object_name(false)?;
6202 let table_name = if self.parse_keyword(Keyword::ON) {
6203 Some(self.parse_object_name(false)?)
6204 } else {
6205 None
6206 };
6207 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6208 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6209 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6210 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6211 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6212 )),
6213 None => None,
6214 };
6215 Ok(DropTrigger {
6216 if_exists,
6217 trigger_name,
6218 table_name,
6219 option,
6220 })
6221 }
6222
6223 pub fn parse_create_trigger(
6225 &mut self,
6226 temporary: bool,
6227 or_alter: bool,
6228 or_replace: bool,
6229 is_constraint: bool,
6230 ) -> Result<CreateTrigger, ParserError> {
6231 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6232 {
6233 self.prev_token();
6234 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6235 }
6236
6237 let name = self.parse_object_name(false)?;
6238 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6239
6240 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6241 self.expect_keyword_is(Keyword::ON)?;
6242 let table_name = self.parse_object_name(false)?;
6243
6244 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6245 self.parse_object_name(true).ok()
6246 } else {
6247 None
6248 };
6249
6250 let characteristics = self.parse_constraint_characteristics()?;
6251
6252 let mut referencing = vec![];
6253 if self.parse_keyword(Keyword::REFERENCING) {
6254 while let Some(refer) = self.parse_trigger_referencing()? {
6255 referencing.push(refer);
6256 }
6257 }
6258
6259 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6260 let include_each = self.parse_keyword(Keyword::EACH);
6261 let trigger_object =
6262 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6263 Keyword::ROW => TriggerObject::Row,
6264 Keyword::STATEMENT => TriggerObject::Statement,
6265 unexpected_keyword => return Err(ParserError::ParserError(
6266 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6267 )),
6268 };
6269
6270 Some(if include_each {
6271 TriggerObjectKind::ForEach(trigger_object)
6272 } else {
6273 TriggerObjectKind::For(trigger_object)
6274 })
6275 } else {
6276 let _ = self.parse_keyword(Keyword::FOR);
6277
6278 None
6279 };
6280
6281 let condition = self
6282 .parse_keyword(Keyword::WHEN)
6283 .then(|| self.parse_expr())
6284 .transpose()?;
6285
6286 let mut exec_body = None;
6287 let mut statements = None;
6288 if self.parse_keyword(Keyword::EXECUTE) {
6289 exec_body = Some(self.parse_trigger_exec_body()?);
6290 } else {
6291 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6292 }
6293
6294 Ok(CreateTrigger {
6295 or_alter,
6296 temporary,
6297 or_replace,
6298 is_constraint,
6299 name,
6300 period,
6301 period_before_table: true,
6302 events,
6303 table_name,
6304 referenced_table_name,
6305 referencing,
6306 trigger_object,
6307 condition,
6308 exec_body,
6309 statements_as: false,
6310 statements,
6311 characteristics,
6312 })
6313 }
6314
6315 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6317 Ok(
6318 match self.expect_one_of_keywords(&[
6319 Keyword::FOR,
6320 Keyword::BEFORE,
6321 Keyword::AFTER,
6322 Keyword::INSTEAD,
6323 ])? {
6324 Keyword::FOR => TriggerPeriod::For,
6325 Keyword::BEFORE => TriggerPeriod::Before,
6326 Keyword::AFTER => TriggerPeriod::After,
6327 Keyword::INSTEAD => self
6328 .expect_keyword_is(Keyword::OF)
6329 .map(|_| TriggerPeriod::InsteadOf)?,
6330 unexpected_keyword => return Err(ParserError::ParserError(
6331 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6332 )),
6333 },
6334 )
6335 }
6336
6337 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6339 Ok(
6340 match self.expect_one_of_keywords(&[
6341 Keyword::INSERT,
6342 Keyword::UPDATE,
6343 Keyword::DELETE,
6344 Keyword::TRUNCATE,
6345 ])? {
6346 Keyword::INSERT => TriggerEvent::Insert,
6347 Keyword::UPDATE => {
6348 if self.parse_keyword(Keyword::OF) {
6349 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6350 TriggerEvent::Update(cols)
6351 } else {
6352 TriggerEvent::Update(vec![])
6353 }
6354 }
6355 Keyword::DELETE => TriggerEvent::Delete,
6356 Keyword::TRUNCATE => TriggerEvent::Truncate,
6357 unexpected_keyword => return Err(ParserError::ParserError(
6358 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6359 )),
6360 },
6361 )
6362 }
6363
6364 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6366 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6367 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6368 TriggerReferencingType::OldTable
6369 }
6370 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6371 TriggerReferencingType::NewTable
6372 }
6373 _ => {
6374 return Ok(None);
6375 }
6376 };
6377
6378 let is_as = self.parse_keyword(Keyword::AS);
6379 let transition_relation_name = self.parse_object_name(false)?;
6380 Ok(Some(TriggerReferencing {
6381 refer_type,
6382 is_as,
6383 transition_relation_name,
6384 }))
6385 }
6386
6387 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6394 let exec_type = match self
6395 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6396 {
6397 Keyword::FUNCTION => TriggerExecBodyType::Function,
6398 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6399 unexpected_keyword => {
6400 return Err(ParserError::ParserError(format!(
6401 "Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"
6402 )))
6403 }
6404 };
6405
6406 let func_name = self.parse_object_name(false)?;
6407
6408 let args = if self.consume_token(&Token::LParen) {
6409 if self.consume_token(&Token::RParen) {
6410 Some(vec![])
6411 } else {
6412 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
6413 self.expect_token(&Token::RParen)?;
6414 Some(exprs)
6415 }
6416 } else {
6417 None
6418 };
6419
6420 Ok(TriggerExecBody {
6421 exec_type,
6422 func_name,
6423 args,
6424 })
6425 }
6426
6427 pub fn parse_create_macro(
6429 &mut self,
6430 or_replace: bool,
6431 temporary: bool,
6432 ) -> Result<Statement, ParserError> {
6433 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6434 let name = self.parse_object_name(false)?;
6435 self.expect_token(&Token::LParen)?;
6436 let args = if self.consume_token(&Token::RParen) {
6437 self.prev_token();
6438 None
6439 } else {
6440 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6441 };
6442
6443 self.expect_token(&Token::RParen)?;
6444 self.expect_keyword_is(Keyword::AS)?;
6445
6446 Ok(Statement::CreateMacro {
6447 or_replace,
6448 temporary,
6449 name,
6450 args,
6451 definition: if self.parse_keyword(Keyword::TABLE) {
6452 MacroDefinition::Table(self.parse_query()?)
6453 } else {
6454 MacroDefinition::Expr(self.parse_expr()?)
6455 },
6456 })
6457 } else {
6458 self.prev_token();
6459 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6460 }
6461 }
6462
6463 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6464 let name = self.parse_identifier()?;
6465
6466 let default_expr =
6467 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6468 Some(self.parse_expr()?)
6469 } else {
6470 None
6471 };
6472 Ok(MacroArg { name, default_expr })
6473 }
6474
6475 pub fn parse_create_external_table(
6477 &mut self,
6478 or_replace: bool,
6479 ) -> Result<CreateTable, ParserError> {
6480 self.expect_keyword_is(Keyword::TABLE)?;
6481 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6482 let table_name = self.parse_object_name(false)?;
6483 let (columns, constraints) = self.parse_columns()?;
6484
6485 let hive_distribution = self.parse_hive_distribution()?;
6486 let hive_formats = self.parse_hive_formats()?;
6487
6488 let file_format = if let Some(ref hf) = hive_formats {
6489 if let Some(ref ff) = hf.storage {
6490 match ff {
6491 HiveIOFormat::FileFormat { format } => Some(*format),
6492 _ => None,
6493 }
6494 } else {
6495 None
6496 }
6497 } else {
6498 None
6499 };
6500 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6501 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6502 let table_options = if !table_properties.is_empty() {
6503 CreateTableOptions::TableProperties(table_properties)
6504 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6505 CreateTableOptions::Options(options)
6506 } else {
6507 CreateTableOptions::None
6508 };
6509 Ok(CreateTableBuilder::new(table_name)
6510 .columns(columns)
6511 .constraints(constraints)
6512 .hive_distribution(hive_distribution)
6513 .hive_formats(hive_formats)
6514 .table_options(table_options)
6515 .or_replace(or_replace)
6516 .if_not_exists(if_not_exists)
6517 .external(true)
6518 .file_format(file_format)
6519 .location(location)
6520 .build())
6521 }
6522
6523 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6527 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6528 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6529 let table_name = self.parse_object_name(true)?;
6530
6531 self.expect_keyword_is(Keyword::CLONE)?;
6532 let clone = Some(self.parse_object_name(true)?);
6533
6534 let version =
6535 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6536 {
6537 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6538 } else {
6539 None
6540 };
6541
6542 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6543 CreateTableOptions::Options(options)
6544 } else {
6545 CreateTableOptions::None
6546 };
6547
6548 Ok(CreateTableBuilder::new(table_name)
6549 .snapshot(true)
6550 .if_not_exists(if_not_exists)
6551 .clone_clause(clone)
6552 .version(version)
6553 .table_options(table_options)
6554 .build())
6555 }
6556
6557 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6559 let next_token = self.next_token();
6560 match &next_token.token {
6561 Token::Word(w) => match w.keyword {
6562 Keyword::AVRO => Ok(FileFormat::AVRO),
6563 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6564 Keyword::ORC => Ok(FileFormat::ORC),
6565 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6566 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6567 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6568 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6569 _ => self.expected("fileformat", next_token),
6570 },
6571 _ => self.expected("fileformat", next_token),
6572 }
6573 }
6574
6575 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6576 if self.consume_token(&Token::Eq) {
6577 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6578 } else {
6579 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6580 }
6581 }
6582
6583 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6585 let next_token = self.next_token();
6586 match &next_token.token {
6587 Token::Word(w) => match w.keyword {
6588 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6589 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6590 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6591 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6592 _ => self.expected("fileformat", next_token),
6593 },
6594 _ => self.expected("fileformat", next_token),
6595 }
6596 }
6597
6598 pub fn parse_create_view(
6600 &mut self,
6601 or_alter: bool,
6602 or_replace: bool,
6603 temporary: bool,
6604 create_view_params: Option<CreateViewParams>,
6605 ) -> Result<CreateView, ParserError> {
6606 let secure = self.parse_keyword(Keyword::SECURE);
6607 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6608 self.expect_keyword_is(Keyword::VIEW)?;
6609 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6610 let if_not_exists_first =
6613 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6614 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6615 let name_before_not_exists = !if_not_exists_first
6616 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6617 let if_not_exists = if_not_exists_first || name_before_not_exists;
6618 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6619 let columns = self.parse_view_columns()?;
6622 let mut options = CreateTableOptions::None;
6623 let with_options = self.parse_options(Keyword::WITH)?;
6624 if !with_options.is_empty() {
6625 options = CreateTableOptions::With(with_options);
6626 }
6627
6628 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6629 self.expect_keyword_is(Keyword::BY)?;
6630 self.parse_parenthesized_column_list(Optional, false)?
6631 } else {
6632 vec![]
6633 };
6634
6635 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6636 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6637 if !opts.is_empty() {
6638 options = CreateTableOptions::Options(opts);
6639 }
6640 };
6641 }
6642
6643 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6644 && self.parse_keyword(Keyword::TO)
6645 {
6646 Some(self.parse_object_name(false)?)
6647 } else {
6648 None
6649 };
6650
6651 let comment = if self.dialect.supports_create_view_comment_syntax()
6652 && self.parse_keyword(Keyword::COMMENT)
6653 {
6654 self.expect_token(&Token::Eq)?;
6655 Some(self.parse_comment_value()?)
6656 } else {
6657 None
6658 };
6659
6660 self.expect_keyword_is(Keyword::AS)?;
6661 let query = self.parse_query()?;
6662 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6665 && self.parse_keywords(&[
6666 Keyword::WITH,
6667 Keyword::NO,
6668 Keyword::SCHEMA,
6669 Keyword::BINDING,
6670 ]);
6671
6672 let with_data = if materialized && self.parse_keyword(Keyword::WITH) {
6675 if self.parse_keyword(Keyword::NO) {
6676 self.expect_keyword_is(Keyword::DATA)?;
6677 Some(false)
6678 } else {
6679 self.expect_keyword_is(Keyword::DATA)?;
6680 Some(true)
6681 }
6682 } else {
6683 None
6684 };
6685
6686 Ok(CreateView {
6687 or_alter,
6688 name,
6689 columns,
6690 query,
6691 materialized,
6692 secure,
6693 or_replace,
6694 options,
6695 cluster_by,
6696 comment,
6697 with_no_schema_binding,
6698 if_not_exists,
6699 temporary,
6700 copy_grants,
6701 to,
6702 params: create_view_params,
6703 name_before_not_exists,
6704 with_data,
6705 })
6706 }
6707
6708 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6712 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6713 self.expect_token(&Token::Eq)?;
6714 Some(
6715 match self.expect_one_of_keywords(&[
6716 Keyword::UNDEFINED,
6717 Keyword::MERGE,
6718 Keyword::TEMPTABLE,
6719 ])? {
6720 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6721 Keyword::MERGE => CreateViewAlgorithm::Merge,
6722 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6723 _ => {
6724 self.prev_token();
6725 let found = self.next_token();
6726 return self
6727 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6728 }
6729 },
6730 )
6731 } else {
6732 None
6733 };
6734 let definer = if self.parse_keyword(Keyword::DEFINER) {
6735 self.expect_token(&Token::Eq)?;
6736 Some(self.parse_grantee_name()?)
6737 } else {
6738 None
6739 };
6740 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6741 Some(
6742 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6743 Keyword::DEFINER => CreateViewSecurity::Definer,
6744 Keyword::INVOKER => CreateViewSecurity::Invoker,
6745 _ => {
6746 self.prev_token();
6747 let found = self.next_token();
6748 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6749 }
6750 },
6751 )
6752 } else {
6753 None
6754 };
6755 if algorithm.is_some() || definer.is_some() || security.is_some() {
6756 Ok(Some(CreateViewParams {
6757 algorithm,
6758 definer,
6759 security,
6760 }))
6761 } else {
6762 Ok(None)
6763 }
6764 }
6765
6766 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6768 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6769 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6770
6771 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6774 vec![Keyword::AUTHORIZATION]
6775 } else if dialect_of!(self is PostgreSqlDialect) {
6776 vec![
6777 Keyword::LOGIN,
6778 Keyword::NOLOGIN,
6779 Keyword::INHERIT,
6780 Keyword::NOINHERIT,
6781 Keyword::BYPASSRLS,
6782 Keyword::NOBYPASSRLS,
6783 Keyword::PASSWORD,
6784 Keyword::CREATEDB,
6785 Keyword::NOCREATEDB,
6786 Keyword::CREATEROLE,
6787 Keyword::NOCREATEROLE,
6788 Keyword::SUPERUSER,
6789 Keyword::NOSUPERUSER,
6790 Keyword::REPLICATION,
6791 Keyword::NOREPLICATION,
6792 Keyword::CONNECTION,
6793 Keyword::VALID,
6794 Keyword::IN,
6795 Keyword::ROLE,
6796 Keyword::ADMIN,
6797 Keyword::USER,
6798 ]
6799 } else {
6800 vec![]
6801 };
6802
6803 let mut authorization_owner = None;
6805 let mut login = None;
6807 let mut inherit = None;
6808 let mut bypassrls = None;
6809 let mut password = None;
6810 let mut create_db = None;
6811 let mut create_role = None;
6812 let mut superuser = None;
6813 let mut replication = None;
6814 let mut connection_limit = None;
6815 let mut valid_until = None;
6816 let mut in_role = vec![];
6817 let mut in_group = vec![];
6818 let mut role = vec![];
6819 let mut user = vec![];
6820 let mut admin = vec![];
6821
6822 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6823 let loc = self
6824 .tokens
6825 .get(self.index - 1)
6826 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6827 match keyword {
6828 Keyword::AUTHORIZATION => {
6829 if authorization_owner.is_some() {
6830 parser_err!("Found multiple AUTHORIZATION", loc)
6831 } else {
6832 authorization_owner = Some(self.parse_object_name(false)?);
6833 Ok(())
6834 }
6835 }
6836 Keyword::LOGIN | Keyword::NOLOGIN => {
6837 if login.is_some() {
6838 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6839 } else {
6840 login = Some(keyword == Keyword::LOGIN);
6841 Ok(())
6842 }
6843 }
6844 Keyword::INHERIT | Keyword::NOINHERIT => {
6845 if inherit.is_some() {
6846 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6847 } else {
6848 inherit = Some(keyword == Keyword::INHERIT);
6849 Ok(())
6850 }
6851 }
6852 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6853 if bypassrls.is_some() {
6854 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6855 } else {
6856 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6857 Ok(())
6858 }
6859 }
6860 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6861 if create_db.is_some() {
6862 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6863 } else {
6864 create_db = Some(keyword == Keyword::CREATEDB);
6865 Ok(())
6866 }
6867 }
6868 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6869 if create_role.is_some() {
6870 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6871 } else {
6872 create_role = Some(keyword == Keyword::CREATEROLE);
6873 Ok(())
6874 }
6875 }
6876 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6877 if superuser.is_some() {
6878 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6879 } else {
6880 superuser = Some(keyword == Keyword::SUPERUSER);
6881 Ok(())
6882 }
6883 }
6884 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6885 if replication.is_some() {
6886 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6887 } else {
6888 replication = Some(keyword == Keyword::REPLICATION);
6889 Ok(())
6890 }
6891 }
6892 Keyword::PASSWORD => {
6893 if password.is_some() {
6894 parser_err!("Found multiple PASSWORD", loc)
6895 } else {
6896 password = if self.parse_keyword(Keyword::NULL) {
6897 Some(Password::NullPassword)
6898 } else {
6899 Some(Password::Password(Expr::Value(self.parse_value()?)))
6900 };
6901 Ok(())
6902 }
6903 }
6904 Keyword::CONNECTION => {
6905 self.expect_keyword_is(Keyword::LIMIT)?;
6906 if connection_limit.is_some() {
6907 parser_err!("Found multiple CONNECTION LIMIT", loc)
6908 } else {
6909 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6910 Ok(())
6911 }
6912 }
6913 Keyword::VALID => {
6914 self.expect_keyword_is(Keyword::UNTIL)?;
6915 if valid_until.is_some() {
6916 parser_err!("Found multiple VALID UNTIL", loc)
6917 } else {
6918 valid_until = Some(Expr::Value(self.parse_value()?));
6919 Ok(())
6920 }
6921 }
6922 Keyword::IN => {
6923 if self.parse_keyword(Keyword::ROLE) {
6924 if !in_role.is_empty() {
6925 parser_err!("Found multiple IN ROLE", loc)
6926 } else {
6927 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6928 Ok(())
6929 }
6930 } else if self.parse_keyword(Keyword::GROUP) {
6931 if !in_group.is_empty() {
6932 parser_err!("Found multiple IN GROUP", loc)
6933 } else {
6934 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6935 Ok(())
6936 }
6937 } else {
6938 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6939 }
6940 }
6941 Keyword::ROLE => {
6942 if !role.is_empty() {
6943 parser_err!("Found multiple ROLE", loc)
6944 } else {
6945 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6946 Ok(())
6947 }
6948 }
6949 Keyword::USER => {
6950 if !user.is_empty() {
6951 parser_err!("Found multiple USER", loc)
6952 } else {
6953 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6954 Ok(())
6955 }
6956 }
6957 Keyword::ADMIN => {
6958 if !admin.is_empty() {
6959 parser_err!("Found multiple ADMIN", loc)
6960 } else {
6961 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6962 Ok(())
6963 }
6964 }
6965 _ => break,
6966 }?
6967 }
6968
6969 Ok(CreateRole {
6970 names,
6971 if_not_exists,
6972 login,
6973 inherit,
6974 bypassrls,
6975 password,
6976 create_db,
6977 create_role,
6978 replication,
6979 superuser,
6980 connection_limit,
6981 valid_until,
6982 in_role,
6983 in_group,
6984 role,
6985 user,
6986 admin,
6987 authorization_owner,
6988 })
6989 }
6990
6991 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6993 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6994 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6995 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6996 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6997 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6998 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6999 )),
7000 None => {
7001 match self.parse_identifier() {
7002 Ok(ident) => Owner::Ident(ident),
7003 Err(e) => {
7004 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
7005 }
7006 }
7007 }
7008 };
7009 Ok(owner)
7010 }
7011
7012 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
7014 let name = self.parse_object_name(false)?;
7015 self.expect_keyword_is(Keyword::AS)?;
7016 let data_type = self.parse_data_type()?;
7017 let collation = if self.parse_keyword(Keyword::COLLATE) {
7018 Some(self.parse_identifier()?)
7019 } else {
7020 None
7021 };
7022 let default = if self.parse_keyword(Keyword::DEFAULT) {
7023 Some(self.parse_expr()?)
7024 } else {
7025 None
7026 };
7027 let mut constraints = Vec::new();
7028 while let Some(constraint) = self.parse_optional_table_constraint()? {
7029 constraints.push(constraint);
7030 }
7031
7032 Ok(CreateDomain {
7033 name,
7034 data_type,
7035 collation,
7036 default,
7037 constraints,
7038 })
7039 }
7040
7041 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
7051 let name = self.parse_identifier()?;
7052 self.expect_keyword_is(Keyword::ON)?;
7053 let table_name = self.parse_object_name(false)?;
7054
7055 let policy_type = if self.parse_keyword(Keyword::AS) {
7056 let keyword =
7057 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
7058 Some(match keyword {
7059 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
7060 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
7061 unexpected_keyword => return Err(ParserError::ParserError(
7062 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
7063 )),
7064 })
7065 } else {
7066 None
7067 };
7068
7069 let command = if self.parse_keyword(Keyword::FOR) {
7070 let keyword = self.expect_one_of_keywords(&[
7071 Keyword::ALL,
7072 Keyword::SELECT,
7073 Keyword::INSERT,
7074 Keyword::UPDATE,
7075 Keyword::DELETE,
7076 ])?;
7077 Some(match keyword {
7078 Keyword::ALL => CreatePolicyCommand::All,
7079 Keyword::SELECT => CreatePolicyCommand::Select,
7080 Keyword::INSERT => CreatePolicyCommand::Insert,
7081 Keyword::UPDATE => CreatePolicyCommand::Update,
7082 Keyword::DELETE => CreatePolicyCommand::Delete,
7083 unexpected_keyword => return Err(ParserError::ParserError(
7084 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7085 )),
7086 })
7087 } else {
7088 None
7089 };
7090
7091 let to = if self.parse_keyword(Keyword::TO) {
7092 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7093 } else {
7094 None
7095 };
7096
7097 let using = if self.parse_keyword(Keyword::USING) {
7098 self.expect_token(&Token::LParen)?;
7099 let expr = self.parse_expr()?;
7100 self.expect_token(&Token::RParen)?;
7101 Some(expr)
7102 } else {
7103 None
7104 };
7105
7106 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7107 self.expect_token(&Token::LParen)?;
7108 let expr = self.parse_expr()?;
7109 self.expect_token(&Token::RParen)?;
7110 Some(expr)
7111 } else {
7112 None
7113 };
7114
7115 Ok(CreatePolicy {
7116 name,
7117 table_name,
7118 policy_type,
7119 command,
7120 to,
7121 using,
7122 with_check,
7123 })
7124 }
7125
7126 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7136 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7137 let name = self.parse_identifier()?;
7138
7139 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7140 Some(self.parse_literal_string()?)
7141 } else {
7142 None
7143 };
7144
7145 let url = if self.parse_keyword(Keyword::URL) {
7146 Some(self.parse_literal_string()?)
7147 } else {
7148 None
7149 };
7150
7151 let comment = self.parse_optional_inline_comment()?;
7152
7153 let with_dcproperties =
7154 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7155 properties if !properties.is_empty() => Some(properties),
7156 _ => None,
7157 };
7158
7159 Ok(CreateConnector {
7160 name,
7161 if_not_exists,
7162 connector_type,
7163 url,
7164 comment,
7165 with_dcproperties,
7166 })
7167 }
7168
7169 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7175 let mut parts = vec![];
7176 loop {
7177 parts.push(ObjectNamePart::Identifier(Ident::new(
7178 self.next_token().to_string(),
7179 )));
7180 if !self.consume_token(&Token::Period) {
7181 break;
7182 }
7183 }
7184 Ok(ObjectName(parts))
7185 }
7186
7187 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7191 let name = self.parse_operator_name()?;
7192 self.expect_token(&Token::LParen)?;
7193
7194 let mut function: Option<ObjectName> = None;
7195 let mut is_procedure = false;
7196 let mut left_arg: Option<DataType> = None;
7197 let mut right_arg: Option<DataType> = None;
7198 let mut options: Vec<OperatorOption> = Vec::new();
7199
7200 loop {
7201 let keyword = self.expect_one_of_keywords(&[
7202 Keyword::FUNCTION,
7203 Keyword::PROCEDURE,
7204 Keyword::LEFTARG,
7205 Keyword::RIGHTARG,
7206 Keyword::COMMUTATOR,
7207 Keyword::NEGATOR,
7208 Keyword::RESTRICT,
7209 Keyword::JOIN,
7210 Keyword::HASHES,
7211 Keyword::MERGES,
7212 ])?;
7213
7214 match keyword {
7215 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7216 options.push(OperatorOption::Hashes);
7217 }
7218 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7219 options.push(OperatorOption::Merges);
7220 }
7221 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7222 self.expect_token(&Token::Eq)?;
7223 function = Some(self.parse_object_name(false)?);
7224 is_procedure = keyword == Keyword::PROCEDURE;
7225 }
7226 Keyword::LEFTARG if left_arg.is_none() => {
7227 self.expect_token(&Token::Eq)?;
7228 left_arg = Some(self.parse_data_type()?);
7229 }
7230 Keyword::RIGHTARG if right_arg.is_none() => {
7231 self.expect_token(&Token::Eq)?;
7232 right_arg = Some(self.parse_data_type()?);
7233 }
7234 Keyword::COMMUTATOR
7235 if !options
7236 .iter()
7237 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7238 {
7239 self.expect_token(&Token::Eq)?;
7240 if self.parse_keyword(Keyword::OPERATOR) {
7241 self.expect_token(&Token::LParen)?;
7242 let op = self.parse_operator_name()?;
7243 self.expect_token(&Token::RParen)?;
7244 options.push(OperatorOption::Commutator(op));
7245 } else {
7246 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7247 }
7248 }
7249 Keyword::NEGATOR
7250 if !options
7251 .iter()
7252 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7253 {
7254 self.expect_token(&Token::Eq)?;
7255 if self.parse_keyword(Keyword::OPERATOR) {
7256 self.expect_token(&Token::LParen)?;
7257 let op = self.parse_operator_name()?;
7258 self.expect_token(&Token::RParen)?;
7259 options.push(OperatorOption::Negator(op));
7260 } else {
7261 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7262 }
7263 }
7264 Keyword::RESTRICT
7265 if !options
7266 .iter()
7267 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7268 {
7269 self.expect_token(&Token::Eq)?;
7270 options.push(OperatorOption::Restrict(Some(
7271 self.parse_object_name(false)?,
7272 )));
7273 }
7274 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7275 self.expect_token(&Token::Eq)?;
7276 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7277 }
7278 _ => {
7279 return Err(ParserError::ParserError(format!(
7280 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7281 keyword
7282 )))
7283 }
7284 }
7285
7286 if !self.consume_token(&Token::Comma) {
7287 break;
7288 }
7289 }
7290
7291 self.expect_token(&Token::RParen)?;
7293
7294 let function = function.ok_or_else(|| {
7296 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7297 })?;
7298
7299 Ok(CreateOperator {
7300 name,
7301 function,
7302 is_procedure,
7303 left_arg,
7304 right_arg,
7305 options,
7306 })
7307 }
7308
7309 pub fn parse_create_aggregate(
7313 &mut self,
7314 or_replace: bool,
7315 ) -> Result<CreateAggregate, ParserError> {
7316 let name = self.parse_object_name(false)?;
7317
7318 self.expect_token(&Token::LParen)?;
7320 let args = if self.consume_token(&Token::Mul) {
7321 vec![]
7323 } else if self.consume_token(&Token::RParen) {
7324 self.prev_token();
7325 vec![]
7326 } else {
7327 let parsed = self.parse_comma_separated(|p| p.parse_data_type())?;
7328 parsed
7329 };
7330 self.expect_token(&Token::RParen)?;
7331
7332 self.expect_token(&Token::LParen)?;
7334 let mut options: Vec<CreateAggregateOption> = Vec::new();
7335 loop {
7336 let token = self.next_token();
7337 match &token.token {
7338 Token::RParen => break,
7339 Token::Comma => continue,
7340 Token::Word(word) => {
7341 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7342 options.push(option);
7343 }
7344 other => {
7345 return Err(ParserError::ParserError(format!(
7346 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7347 )));
7348 }
7349 }
7350 }
7351
7352 Ok(CreateAggregate {
7353 or_replace,
7354 name,
7355 args,
7356 options,
7357 })
7358 }
7359
7360 fn parse_create_aggregate_option(
7361 &mut self,
7362 key: &str,
7363 ) -> Result<CreateAggregateOption, ParserError> {
7364 match key {
7365 "SFUNC" => {
7366 self.expect_token(&Token::Eq)?;
7367 Ok(CreateAggregateOption::Sfunc(
7368 self.parse_object_name(false)?,
7369 ))
7370 }
7371 "STYPE" => {
7372 self.expect_token(&Token::Eq)?;
7373 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7374 }
7375 "SSPACE" => {
7376 self.expect_token(&Token::Eq)?;
7377 let size = self.parse_literal_uint()?;
7378 Ok(CreateAggregateOption::Sspace(size))
7379 }
7380 "FINALFUNC" => {
7381 self.expect_token(&Token::Eq)?;
7382 Ok(CreateAggregateOption::Finalfunc(
7383 self.parse_object_name(false)?,
7384 ))
7385 }
7386 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7387 "FINALFUNC_MODIFY" => {
7388 self.expect_token(&Token::Eq)?;
7389 Ok(CreateAggregateOption::FinalfuncModify(
7390 self.parse_aggregate_modify_kind()?,
7391 ))
7392 }
7393 "COMBINEFUNC" => {
7394 self.expect_token(&Token::Eq)?;
7395 Ok(CreateAggregateOption::Combinefunc(
7396 self.parse_object_name(false)?,
7397 ))
7398 }
7399 "SERIALFUNC" => {
7400 self.expect_token(&Token::Eq)?;
7401 Ok(CreateAggregateOption::Serialfunc(
7402 self.parse_object_name(false)?,
7403 ))
7404 }
7405 "DESERIALFUNC" => {
7406 self.expect_token(&Token::Eq)?;
7407 Ok(CreateAggregateOption::Deserialfunc(
7408 self.parse_object_name(false)?,
7409 ))
7410 }
7411 "INITCOND" => {
7412 self.expect_token(&Token::Eq)?;
7413 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7414 }
7415 "MSFUNC" => {
7416 self.expect_token(&Token::Eq)?;
7417 Ok(CreateAggregateOption::Msfunc(
7418 self.parse_object_name(false)?,
7419 ))
7420 }
7421 "MINVFUNC" => {
7422 self.expect_token(&Token::Eq)?;
7423 Ok(CreateAggregateOption::Minvfunc(
7424 self.parse_object_name(false)?,
7425 ))
7426 }
7427 "MSTYPE" => {
7428 self.expect_token(&Token::Eq)?;
7429 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7430 }
7431 "MSSPACE" => {
7432 self.expect_token(&Token::Eq)?;
7433 let size = self.parse_literal_uint()?;
7434 Ok(CreateAggregateOption::Msspace(size))
7435 }
7436 "MFINALFUNC" => {
7437 self.expect_token(&Token::Eq)?;
7438 Ok(CreateAggregateOption::Mfinalfunc(
7439 self.parse_object_name(false)?,
7440 ))
7441 }
7442 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7443 "MFINALFUNC_MODIFY" => {
7444 self.expect_token(&Token::Eq)?;
7445 Ok(CreateAggregateOption::MfinalfuncModify(
7446 self.parse_aggregate_modify_kind()?,
7447 ))
7448 }
7449 "MINITCOND" => {
7450 self.expect_token(&Token::Eq)?;
7451 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7452 }
7453 "SORTOP" => {
7454 self.expect_token(&Token::Eq)?;
7455 Ok(CreateAggregateOption::Sortop(
7456 self.parse_object_name(false)?,
7457 ))
7458 }
7459 "PARALLEL" => {
7460 self.expect_token(&Token::Eq)?;
7461 let parallel = match self.expect_one_of_keywords(&[
7462 Keyword::SAFE,
7463 Keyword::RESTRICTED,
7464 Keyword::UNSAFE,
7465 ])? {
7466 Keyword::SAFE => FunctionParallel::Safe,
7467 Keyword::RESTRICTED => FunctionParallel::Restricted,
7468 Keyword::UNSAFE => FunctionParallel::Unsafe,
7469 _ => unreachable!(),
7470 };
7471 Ok(CreateAggregateOption::Parallel(parallel))
7472 }
7473 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7474 other => Err(ParserError::ParserError(format!(
7475 "Unknown CREATE AGGREGATE option: {other}"
7476 ))),
7477 }
7478 }
7479
7480 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7481 let token = self.next_token();
7482 match &token.token {
7483 Token::Word(word) => match word.value.to_uppercase().as_str() {
7484 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7485 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7486 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7487 other => Err(ParserError::ParserError(format!(
7488 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7489 ))),
7490 },
7491 other => Err(ParserError::ParserError(format!(
7492 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7493 ))),
7494 }
7495 }
7496
7497 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7501 let name = self.parse_object_name(false)?;
7502 self.expect_keyword(Keyword::USING)?;
7503 let using = self.parse_identifier()?;
7504
7505 Ok(CreateOperatorFamily { name, using })
7506 }
7507
7508 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7512 let name = self.parse_object_name(false)?;
7513 let default = self.parse_keyword(Keyword::DEFAULT);
7514 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7515 let for_type = self.parse_data_type()?;
7516 self.expect_keyword(Keyword::USING)?;
7517 let using = self.parse_identifier()?;
7518
7519 let family = if self.parse_keyword(Keyword::FAMILY) {
7520 Some(self.parse_object_name(false)?)
7521 } else {
7522 None
7523 };
7524
7525 self.expect_keyword(Keyword::AS)?;
7526
7527 let mut items = vec![];
7528 loop {
7529 if self.parse_keyword(Keyword::OPERATOR) {
7530 let strategy_number = self.parse_literal_uint()?;
7531 let operator_name = self.parse_operator_name()?;
7532
7533 let op_types = if self.consume_token(&Token::LParen) {
7535 let left = self.parse_data_type()?;
7536 self.expect_token(&Token::Comma)?;
7537 let right = self.parse_data_type()?;
7538 self.expect_token(&Token::RParen)?;
7539 Some(OperatorArgTypes { left, right })
7540 } else {
7541 None
7542 };
7543
7544 let purpose = if self.parse_keyword(Keyword::FOR) {
7546 if self.parse_keyword(Keyword::SEARCH) {
7547 Some(OperatorPurpose::ForSearch)
7548 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7549 let sort_family = self.parse_object_name(false)?;
7550 Some(OperatorPurpose::ForOrderBy { sort_family })
7551 } else {
7552 return self
7553 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7554 }
7555 } else {
7556 None
7557 };
7558
7559 items.push(OperatorClassItem::Operator {
7560 strategy_number,
7561 operator_name,
7562 op_types,
7563 purpose,
7564 });
7565 } else if self.parse_keyword(Keyword::FUNCTION) {
7566 let support_number = self.parse_literal_uint()?;
7567
7568 let op_types = if self.consume_token(&Token::LParen)
7570 && self.peek_token_ref().token != Token::RParen
7571 {
7572 let mut types = vec![];
7573 loop {
7574 types.push(self.parse_data_type()?);
7575 if !self.consume_token(&Token::Comma) {
7576 break;
7577 }
7578 }
7579 self.expect_token(&Token::RParen)?;
7580 Some(types)
7581 } else if self.consume_token(&Token::LParen) {
7582 self.expect_token(&Token::RParen)?;
7583 Some(vec![])
7584 } else {
7585 None
7586 };
7587
7588 let function_name = self.parse_object_name(false)?;
7589
7590 let argument_types = if self.consume_token(&Token::LParen) {
7592 let mut types = vec![];
7593 loop {
7594 if self.peek_token_ref().token == Token::RParen {
7595 break;
7596 }
7597 types.push(self.parse_data_type()?);
7598 if !self.consume_token(&Token::Comma) {
7599 break;
7600 }
7601 }
7602 self.expect_token(&Token::RParen)?;
7603 types
7604 } else {
7605 vec![]
7606 };
7607
7608 items.push(OperatorClassItem::Function {
7609 support_number,
7610 op_types,
7611 function_name,
7612 argument_types,
7613 });
7614 } else if self.parse_keyword(Keyword::STORAGE) {
7615 let storage_type = self.parse_data_type()?;
7616 items.push(OperatorClassItem::Storage { storage_type });
7617 } else {
7618 break;
7619 }
7620
7621 if !self.consume_token(&Token::Comma) {
7623 break;
7624 }
7625 }
7626
7627 Ok(CreateOperatorClass {
7628 name,
7629 default,
7630 for_type,
7631 using,
7632 family,
7633 items,
7634 })
7635 }
7636
7637 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7639 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7641 && self.parse_keyword(Keyword::TEMPORARY);
7642 let persistent = dialect_of!(self is DuckDbDialect)
7643 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7644
7645 let object_type = if self.parse_keyword(Keyword::TABLE) {
7646 ObjectType::Table
7647 } else if self.parse_keyword(Keyword::COLLATION) {
7648 ObjectType::Collation
7649 } else if self.parse_keyword(Keyword::VIEW) {
7650 ObjectType::View
7651 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7652 ObjectType::MaterializedView
7653 } else if self.parse_keyword(Keyword::INDEX) {
7654 ObjectType::Index
7655 } else if self.parse_keyword(Keyword::ROLE) {
7656 ObjectType::Role
7657 } else if self.parse_keyword(Keyword::SCHEMA) {
7658 ObjectType::Schema
7659 } else if self.parse_keyword(Keyword::DATABASE) {
7660 ObjectType::Database
7661 } else if self.parse_keyword(Keyword::SEQUENCE) {
7662 ObjectType::Sequence
7663 } else if self.parse_keyword(Keyword::STAGE) {
7664 ObjectType::Stage
7665 } else if self.parse_keyword(Keyword::TYPE) {
7666 ObjectType::Type
7667 } else if self.parse_keyword(Keyword::USER) {
7668 ObjectType::User
7669 } else if self.parse_keyword(Keyword::STREAM) {
7670 ObjectType::Stream
7671 } else if self.parse_keyword(Keyword::FUNCTION) {
7672 return self.parse_drop_function().map(Into::into);
7673 } else if self.parse_keyword(Keyword::POLICY) {
7674 return self.parse_drop_policy().map(Into::into);
7675 } else if self.parse_keyword(Keyword::CONNECTOR) {
7676 return self.parse_drop_connector();
7677 } else if self.parse_keyword(Keyword::DOMAIN) {
7678 return self.parse_drop_domain().map(Into::into);
7679 } else if self.parse_keyword(Keyword::PROCEDURE) {
7680 return self.parse_drop_procedure();
7681 } else if self.parse_keyword(Keyword::SECRET) {
7682 return self.parse_drop_secret(temporary, persistent);
7683 } else if self.parse_keyword(Keyword::TRIGGER) {
7684 return self.parse_drop_trigger().map(Into::into);
7685 } else if self.parse_keyword(Keyword::EXTENSION) {
7686 return self.parse_drop_extension();
7687 } else if self.parse_keyword(Keyword::OPERATOR) {
7688 return if self.parse_keyword(Keyword::FAMILY) {
7690 self.parse_drop_operator_family()
7691 } else if self.parse_keyword(Keyword::CLASS) {
7692 self.parse_drop_operator_class()
7693 } else {
7694 self.parse_drop_operator()
7695 };
7696 } else {
7697 return self.expected_ref(
7698 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7699 self.peek_token_ref(),
7700 );
7701 };
7702 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7705 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7706
7707 let loc = self.peek_token_ref().span.start;
7708 let cascade = self.parse_keyword(Keyword::CASCADE);
7709 let restrict = self.parse_keyword(Keyword::RESTRICT);
7710 let purge = self.parse_keyword(Keyword::PURGE);
7711 if cascade && restrict {
7712 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7713 }
7714 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7715 return parser_err!(
7716 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7717 loc
7718 );
7719 }
7720 let table = if self.parse_keyword(Keyword::ON) {
7721 Some(self.parse_object_name(false)?)
7722 } else {
7723 None
7724 };
7725 Ok(Statement::Drop {
7726 object_type,
7727 if_exists,
7728 names,
7729 cascade,
7730 restrict,
7731 purge,
7732 temporary,
7733 table,
7734 })
7735 }
7736
7737 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7738 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7739 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7740 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7741 _ => None,
7742 }
7743 }
7744
7745 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7750 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7751 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7752 let drop_behavior = self.parse_optional_drop_behavior();
7753 Ok(DropFunction {
7754 if_exists,
7755 func_desc,
7756 drop_behavior,
7757 })
7758 }
7759
7760 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7766 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7767 let name = self.parse_identifier()?;
7768 self.expect_keyword_is(Keyword::ON)?;
7769 let table_name = self.parse_object_name(false)?;
7770 let drop_behavior = self.parse_optional_drop_behavior();
7771 Ok(DropPolicy {
7772 if_exists,
7773 name,
7774 table_name,
7775 drop_behavior,
7776 })
7777 }
7778 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7784 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7785 let name = self.parse_identifier()?;
7786 Ok(Statement::DropConnector { if_exists, name })
7787 }
7788
7789 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7793 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7794 let name = self.parse_object_name(false)?;
7795 let drop_behavior = self.parse_optional_drop_behavior();
7796 Ok(DropDomain {
7797 if_exists,
7798 name,
7799 drop_behavior,
7800 })
7801 }
7802
7803 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7808 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7809 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7810 let drop_behavior = self.parse_optional_drop_behavior();
7811 Ok(Statement::DropProcedure {
7812 if_exists,
7813 proc_desc,
7814 drop_behavior,
7815 })
7816 }
7817
7818 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7819 let name = self.parse_object_name(false)?;
7820
7821 let args = if self.consume_token(&Token::LParen) {
7822 if self.consume_token(&Token::RParen) {
7823 Some(vec![])
7824 } else {
7825 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7826 self.expect_token(&Token::RParen)?;
7827 Some(args)
7828 }
7829 } else {
7830 None
7831 };
7832
7833 Ok(FunctionDesc { name, args })
7834 }
7835
7836 fn parse_drop_secret(
7838 &mut self,
7839 temporary: bool,
7840 persistent: bool,
7841 ) -> Result<Statement, ParserError> {
7842 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7843 let name = self.parse_identifier()?;
7844 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7845 self.parse_identifier().ok()
7846 } else {
7847 None
7848 };
7849 let temp = match (temporary, persistent) {
7850 (true, false) => Some(true),
7851 (false, true) => Some(false),
7852 (false, false) => None,
7853 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7854 };
7855
7856 Ok(Statement::DropSecret {
7857 if_exists,
7858 temporary: temp,
7859 name,
7860 storage_specifier,
7861 })
7862 }
7863
7864 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7874 if dialect_of!(self is BigQueryDialect) {
7875 return self.parse_big_query_declare();
7876 }
7877 if dialect_of!(self is SnowflakeDialect) {
7878 return self.parse_snowflake_declare();
7879 }
7880 if dialect_of!(self is MsSqlDialect) {
7881 return self.parse_mssql_declare();
7882 }
7883
7884 let name = self.parse_identifier()?;
7885
7886 let binary = Some(self.parse_keyword(Keyword::BINARY));
7887 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7888 Some(true)
7889 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7890 Some(false)
7891 } else {
7892 None
7893 };
7894 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7895 Some(true)
7896 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7897 Some(false)
7898 } else {
7899 None
7900 };
7901
7902 self.expect_keyword_is(Keyword::CURSOR)?;
7903 let declare_type = Some(DeclareType::Cursor);
7904
7905 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7906 Some(keyword) => {
7907 self.expect_keyword_is(Keyword::HOLD)?;
7908
7909 match keyword {
7910 Keyword::WITH => Some(true),
7911 Keyword::WITHOUT => Some(false),
7912 unexpected_keyword => return Err(ParserError::ParserError(
7913 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7914 )),
7915 }
7916 }
7917 None => None,
7918 };
7919
7920 self.expect_keyword_is(Keyword::FOR)?;
7921
7922 let query = Some(self.parse_query()?);
7923
7924 Ok(Statement::Declare {
7925 stmts: vec![Declare {
7926 names: vec![name],
7927 data_type: None,
7928 assignment: None,
7929 declare_type,
7930 binary,
7931 sensitive,
7932 scroll,
7933 hold,
7934 for_query: query,
7935 }],
7936 })
7937 }
7938
7939 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7947 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7948
7949 let data_type = match &self.peek_token_ref().token {
7950 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7951 _ => Some(self.parse_data_type()?),
7952 };
7953
7954 let expr = if data_type.is_some() {
7955 if self.parse_keyword(Keyword::DEFAULT) {
7956 Some(self.parse_expr()?)
7957 } else {
7958 None
7959 }
7960 } else {
7961 self.expect_keyword_is(Keyword::DEFAULT)?;
7964 Some(self.parse_expr()?)
7965 };
7966
7967 Ok(Statement::Declare {
7968 stmts: vec![Declare {
7969 names,
7970 data_type,
7971 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7972 declare_type: None,
7973 binary: None,
7974 sensitive: None,
7975 scroll: None,
7976 hold: None,
7977 for_query: None,
7978 }],
7979 })
7980 }
7981
7982 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
8007 let mut stmts = vec![];
8008 loop {
8009 let name = self.parse_identifier()?;
8010 let (declare_type, for_query, assigned_expr, data_type) =
8011 if self.parse_keyword(Keyword::CURSOR) {
8012 self.expect_keyword_is(Keyword::FOR)?;
8013 match &self.peek_token_ref().token {
8014 Token::Word(w) if w.keyword == Keyword::SELECT => (
8015 Some(DeclareType::Cursor),
8016 Some(self.parse_query()?),
8017 None,
8018 None,
8019 ),
8020 _ => (
8021 Some(DeclareType::Cursor),
8022 None,
8023 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
8024 None,
8025 ),
8026 }
8027 } else if self.parse_keyword(Keyword::RESULTSET) {
8028 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
8029 self.parse_snowflake_variable_declaration_expression()?
8030 } else {
8031 None
8033 };
8034
8035 (Some(DeclareType::ResultSet), None, assigned_expr, None)
8036 } else if self.parse_keyword(Keyword::EXCEPTION) {
8037 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
8038 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
8039 } else {
8040 None
8042 };
8043
8044 (Some(DeclareType::Exception), None, assigned_expr, None)
8045 } else {
8046 let (assigned_expr, data_type) = if let Some(assigned_expr) =
8048 self.parse_snowflake_variable_declaration_expression()?
8049 {
8050 (Some(assigned_expr), None)
8051 } else if let Token::Word(_) = &self.peek_token_ref().token {
8052 let data_type = self.parse_data_type()?;
8053 (
8054 self.parse_snowflake_variable_declaration_expression()?,
8055 Some(data_type),
8056 )
8057 } else {
8058 (None, None)
8059 };
8060 (None, None, assigned_expr, data_type)
8061 };
8062 let stmt = Declare {
8063 names: vec![name],
8064 data_type,
8065 assignment: assigned_expr,
8066 declare_type,
8067 binary: None,
8068 sensitive: None,
8069 scroll: None,
8070 hold: None,
8071 for_query,
8072 };
8073
8074 stmts.push(stmt);
8075 if self.consume_token(&Token::SemiColon) {
8076 match &self.peek_token_ref().token {
8077 Token::Word(w)
8078 if ALL_KEYWORDS
8079 .binary_search(&w.value.to_uppercase().as_str())
8080 .is_err() =>
8081 {
8082 continue;
8084 }
8085 _ => {
8086 self.prev_token();
8088 }
8089 }
8090 }
8091
8092 break;
8093 }
8094
8095 Ok(Statement::Declare { stmts })
8096 }
8097
8098 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8110 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8111
8112 Ok(Statement::Declare { stmts })
8113 }
8114
8115 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8126 let name = {
8127 let ident = self.parse_identifier()?;
8128 if !ident.value.starts_with('@')
8129 && !matches!(
8130 &self.peek_token_ref().token,
8131 Token::Word(w) if w.keyword == Keyword::CURSOR
8132 )
8133 {
8134 Err(ParserError::TokenizerError(
8135 "Invalid MsSql variable declaration.".to_string(),
8136 ))
8137 } else {
8138 Ok(ident)
8139 }
8140 }?;
8141
8142 let (declare_type, data_type) = match &self.peek_token_ref().token {
8143 Token::Word(w) => match w.keyword {
8144 Keyword::CURSOR => {
8145 self.next_token();
8146 (Some(DeclareType::Cursor), None)
8147 }
8148 Keyword::AS => {
8149 self.next_token();
8150 (None, Some(self.parse_data_type()?))
8151 }
8152 _ => (None, Some(self.parse_data_type()?)),
8153 },
8154 _ => (None, Some(self.parse_data_type()?)),
8155 };
8156
8157 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8158 self.next_token();
8159 let query = Some(self.parse_query()?);
8160 (query, None)
8161 } else {
8162 let assignment = self.parse_mssql_variable_declaration_expression()?;
8163 (None, assignment)
8164 };
8165
8166 Ok(Declare {
8167 names: vec![name],
8168 data_type,
8169 assignment,
8170 declare_type,
8171 binary: None,
8172 sensitive: None,
8173 scroll: None,
8174 hold: None,
8175 for_query,
8176 })
8177 }
8178
8179 pub fn parse_snowflake_variable_declaration_expression(
8187 &mut self,
8188 ) -> Result<Option<DeclareAssignment>, ParserError> {
8189 Ok(match &self.peek_token_ref().token {
8190 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8191 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8193 }
8194 Token::Assignment => {
8195 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8197 self.parse_expr()?,
8198 )))
8199 }
8200 _ => None,
8201 })
8202 }
8203
8204 pub fn parse_mssql_variable_declaration_expression(
8211 &mut self,
8212 ) -> Result<Option<DeclareAssignment>, ParserError> {
8213 Ok(match &self.peek_token_ref().token {
8214 Token::Eq => {
8215 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8217 self.parse_expr()?,
8218 )))
8219 }
8220 _ => None,
8221 })
8222 }
8223
8224 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8226 let direction = if self.parse_keyword(Keyword::NEXT) {
8227 FetchDirection::Next
8228 } else if self.parse_keyword(Keyword::PRIOR) {
8229 FetchDirection::Prior
8230 } else if self.parse_keyword(Keyword::FIRST) {
8231 FetchDirection::First
8232 } else if self.parse_keyword(Keyword::LAST) {
8233 FetchDirection::Last
8234 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8235 FetchDirection::Absolute {
8236 limit: self.parse_number_value()?,
8237 }
8238 } else if self.parse_keyword(Keyword::RELATIVE) {
8239 FetchDirection::Relative {
8240 limit: self.parse_number_value()?,
8241 }
8242 } else if self.parse_keyword(Keyword::FORWARD) {
8243 if self.parse_keyword(Keyword::ALL) {
8244 FetchDirection::ForwardAll
8245 } else {
8246 FetchDirection::Forward {
8247 limit: Some(self.parse_number_value()?),
8249 }
8250 }
8251 } else if self.parse_keyword(Keyword::BACKWARD) {
8252 if self.parse_keyword(Keyword::ALL) {
8253 FetchDirection::BackwardAll
8254 } else {
8255 FetchDirection::Backward {
8256 limit: Some(self.parse_number_value()?),
8258 }
8259 }
8260 } else if self.parse_keyword(Keyword::ALL) {
8261 FetchDirection::All
8262 } else {
8263 FetchDirection::Count {
8264 limit: self.parse_number_value()?,
8265 }
8266 };
8267
8268 let position = if self.peek_keyword(Keyword::FROM) {
8269 self.expect_keyword(Keyword::FROM)?;
8270 FetchPosition::From
8271 } else if self.peek_keyword(Keyword::IN) {
8272 self.expect_keyword(Keyword::IN)?;
8273 FetchPosition::In
8274 } else {
8275 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8276 };
8277
8278 let name = self.parse_identifier()?;
8279
8280 let into = if self.parse_keyword(Keyword::INTO) {
8281 Some(self.parse_object_name(false)?)
8282 } else {
8283 None
8284 };
8285
8286 Ok(Statement::Fetch {
8287 name,
8288 direction,
8289 position,
8290 into,
8291 })
8292 }
8293
8294 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8296 let object_type = if self.parse_keyword(Keyword::ALL) {
8297 DiscardObject::ALL
8298 } else if self.parse_keyword(Keyword::PLANS) {
8299 DiscardObject::PLANS
8300 } else if self.parse_keyword(Keyword::SEQUENCES) {
8301 DiscardObject::SEQUENCES
8302 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8303 DiscardObject::TEMP
8304 } else {
8305 return self.expected_ref(
8306 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8307 self.peek_token_ref(),
8308 );
8309 };
8310 Ok(Statement::Discard { object_type })
8311 }
8312
8313 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8315 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8316 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8317
8318 let mut using = None;
8319
8320 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8321 let index_name = self.parse_object_name(false)?;
8322 using = self.parse_optional_using_then_index_type()?;
8324 self.expect_keyword_is(Keyword::ON)?;
8325 Some(index_name)
8326 } else {
8327 None
8328 };
8329
8330 let table_name = self.parse_object_name(false)?;
8331
8332 using = self.parse_optional_using_then_index_type()?.or(using);
8335
8336 let columns = self.parse_parenthesized_index_column_list()?;
8337
8338 let include = if self.parse_keyword(Keyword::INCLUDE) {
8339 self.expect_token(&Token::LParen)?;
8340 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8341 self.expect_token(&Token::RParen)?;
8342 columns
8343 } else {
8344 vec![]
8345 };
8346
8347 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8348 let not = self.parse_keyword(Keyword::NOT);
8349 self.expect_keyword_is(Keyword::DISTINCT)?;
8350 Some(!not)
8351 } else {
8352 None
8353 };
8354
8355 let with = if self.dialect.supports_create_index_with_clause()
8356 && self.parse_keyword(Keyword::WITH)
8357 {
8358 self.expect_token(&Token::LParen)?;
8359 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8360 self.expect_token(&Token::RParen)?;
8361 with_params
8362 } else {
8363 Vec::new()
8364 };
8365
8366 let predicate = if self.parse_keyword(Keyword::WHERE) {
8367 Some(self.parse_expr()?)
8368 } else {
8369 None
8370 };
8371
8372 let index_options = self.parse_index_options()?;
8378
8379 let mut alter_options = Vec::new();
8381 while self
8382 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8383 .is_some()
8384 {
8385 alter_options.push(self.parse_alter_table_operation()?)
8386 }
8387
8388 Ok(CreateIndex {
8389 name: index_name,
8390 table_name,
8391 using,
8392 columns,
8393 unique,
8394 concurrently,
8395 if_not_exists,
8396 include,
8397 nulls_distinct,
8398 with,
8399 predicate,
8400 index_options,
8401 alter_options,
8402 })
8403 }
8404
8405 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8407 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8408 let name = self.parse_identifier()?;
8409
8410 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8411 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8412 Some(self.parse_identifier()?)
8413 } else {
8414 None
8415 };
8416
8417 let version = if self.parse_keyword(Keyword::VERSION) {
8418 Some(self.parse_identifier()?)
8419 } else {
8420 None
8421 };
8422
8423 let cascade = self.parse_keyword(Keyword::CASCADE);
8424
8425 (schema, version, cascade)
8426 } else {
8427 (None, None, false)
8428 };
8429
8430 Ok(CreateExtension {
8431 name,
8432 if_not_exists,
8433 schema,
8434 version,
8435 cascade,
8436 })
8437 }
8438
8439 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8441 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8442 let name = self.parse_object_name(false)?;
8443
8444 let definition = if self.parse_keyword(Keyword::FROM) {
8445 CreateCollationDefinition::From(self.parse_object_name(false)?)
8446 } else if self.consume_token(&Token::LParen) {
8447 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8448 self.expect_token(&Token::RParen)?;
8449 CreateCollationDefinition::Options(options)
8450 } else {
8451 return self.expected_ref(
8452 "FROM or parenthesized option list after CREATE COLLATION name",
8453 self.peek_token_ref(),
8454 );
8455 };
8456
8457 Ok(CreateCollation {
8458 if_not_exists,
8459 name,
8460 definition,
8461 })
8462 }
8463
8464 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8466 if self.parse_keyword(Keyword::CONFIGURATION) {
8467 let name = self.parse_object_name(false)?;
8468 self.expect_token(&Token::LParen)?;
8469 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8470 self.expect_token(&Token::RParen)?;
8471 Ok(Statement::CreateTextSearchConfiguration(
8472 CreateTextSearchConfiguration { name, options },
8473 ))
8474 } else if self.parse_keyword(Keyword::DICTIONARY) {
8475 let name = self.parse_object_name(false)?;
8476 self.expect_token(&Token::LParen)?;
8477 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8478 self.expect_token(&Token::RParen)?;
8479 Ok(Statement::CreateTextSearchDictionary(
8480 CreateTextSearchDictionary { name, options },
8481 ))
8482 } else if self.parse_keyword(Keyword::PARSER) {
8483 let name = self.parse_object_name(false)?;
8484 self.expect_token(&Token::LParen)?;
8485 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8486 self.expect_token(&Token::RParen)?;
8487 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8488 name,
8489 options,
8490 }))
8491 } else if self.parse_keyword(Keyword::TEMPLATE) {
8492 let name = self.parse_object_name(false)?;
8493 self.expect_token(&Token::LParen)?;
8494 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8495 self.expect_token(&Token::RParen)?;
8496 Ok(Statement::CreateTextSearchTemplate(
8497 CreateTextSearchTemplate { name, options },
8498 ))
8499 } else {
8500 self.expected_ref(
8501 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8502 self.peek_token_ref(),
8503 )
8504 }
8505 }
8506
8507 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8509 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8510 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8511 let cascade_or_restrict =
8512 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8513 Ok(Statement::DropExtension(DropExtension {
8514 names,
8515 if_exists,
8516 cascade_or_restrict: cascade_or_restrict
8517 .map(|k| match k {
8518 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8519 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8520 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8521 })
8522 .transpose()?,
8523 }))
8524 }
8525
8526 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8529 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8530 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8531 let drop_behavior = self.parse_optional_drop_behavior();
8532 Ok(Statement::DropOperator(DropOperator {
8533 if_exists,
8534 operators,
8535 drop_behavior,
8536 }))
8537 }
8538
8539 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8542 let name = self.parse_operator_name()?;
8543 self.expect_token(&Token::LParen)?;
8544
8545 let left_type = if self.parse_keyword(Keyword::NONE) {
8547 None
8548 } else {
8549 Some(self.parse_data_type()?)
8550 };
8551
8552 self.expect_token(&Token::Comma)?;
8553
8554 let right_type = self.parse_data_type()?;
8556
8557 self.expect_token(&Token::RParen)?;
8558
8559 Ok(DropOperatorSignature {
8560 name,
8561 left_type,
8562 right_type,
8563 })
8564 }
8565
8566 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8570 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8571 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8572 self.expect_keyword(Keyword::USING)?;
8573 let using = self.parse_identifier()?;
8574 let drop_behavior = self.parse_optional_drop_behavior();
8575 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8576 if_exists,
8577 names,
8578 using,
8579 drop_behavior,
8580 }))
8581 }
8582
8583 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8587 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8588 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8589 self.expect_keyword(Keyword::USING)?;
8590 let using = self.parse_identifier()?;
8591 let drop_behavior = self.parse_optional_drop_behavior();
8592 Ok(Statement::DropOperatorClass(DropOperatorClass {
8593 if_exists,
8594 names,
8595 using,
8596 drop_behavior,
8597 }))
8598 }
8599
8600 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8604 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8605 self.expect_token(&Token::LParen)?;
8606 let columns =
8607 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8608 self.expect_token(&Token::RParen)?;
8609 Ok(HiveDistributionStyle::PARTITIONED { columns })
8610 } else {
8611 Ok(HiveDistributionStyle::NONE)
8612 }
8613 }
8614
8615 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8619 let token = self.next_token();
8620 match &token.token {
8621 Token::Word(w) => match w.keyword {
8622 Keyword::AUTO => Ok(DistStyle::Auto),
8623 Keyword::EVEN => Ok(DistStyle::Even),
8624 Keyword::KEY => Ok(DistStyle::Key),
8625 Keyword::ALL => Ok(DistStyle::All),
8626 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8627 },
8628 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8629 }
8630 }
8631
8632 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8634 let mut hive_format: Option<HiveFormat> = None;
8635 loop {
8636 match self.parse_one_of_keywords(&[
8637 Keyword::ROW,
8638 Keyword::STORED,
8639 Keyword::LOCATION,
8640 Keyword::WITH,
8641 ]) {
8642 Some(Keyword::ROW) => {
8643 hive_format
8644 .get_or_insert_with(HiveFormat::default)
8645 .row_format = Some(self.parse_row_format()?);
8646 }
8647 Some(Keyword::STORED) => {
8648 self.expect_keyword_is(Keyword::AS)?;
8649 if self.parse_keyword(Keyword::INPUTFORMAT) {
8650 let input_format = self.parse_expr()?;
8651 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8652 let output_format = self.parse_expr()?;
8653 hive_format.get_or_insert_with(HiveFormat::default).storage =
8654 Some(HiveIOFormat::IOF {
8655 input_format,
8656 output_format,
8657 });
8658 } else {
8659 let format = self.parse_file_format()?;
8660 hive_format.get_or_insert_with(HiveFormat::default).storage =
8661 Some(HiveIOFormat::FileFormat { format });
8662 }
8663 }
8664 Some(Keyword::LOCATION) => {
8665 hive_format.get_or_insert_with(HiveFormat::default).location =
8666 Some(self.parse_literal_string()?);
8667 }
8668 Some(Keyword::WITH) => {
8669 self.prev_token();
8670 let properties = self
8671 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8672 if !properties.is_empty() {
8673 hive_format
8674 .get_or_insert_with(HiveFormat::default)
8675 .serde_properties = Some(properties);
8676 } else {
8677 break;
8678 }
8679 }
8680 None => break,
8681 _ => break,
8682 }
8683 }
8684
8685 Ok(hive_format)
8686 }
8687
8688 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8690 self.expect_keyword_is(Keyword::FORMAT)?;
8691 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8692 Some(Keyword::SERDE) => {
8693 let class = self.parse_literal_string()?;
8694 Ok(HiveRowFormat::SERDE { class })
8695 }
8696 _ => {
8697 let mut row_delimiters = vec![];
8698
8699 loop {
8700 match self.parse_one_of_keywords(&[
8701 Keyword::FIELDS,
8702 Keyword::COLLECTION,
8703 Keyword::MAP,
8704 Keyword::LINES,
8705 Keyword::NULL,
8706 ]) {
8707 Some(Keyword::FIELDS) => {
8708 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8709 row_delimiters.push(HiveRowDelimiter {
8710 delimiter: HiveDelimiter::FieldsTerminatedBy,
8711 char: self.parse_identifier()?,
8712 });
8713
8714 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8715 row_delimiters.push(HiveRowDelimiter {
8716 delimiter: HiveDelimiter::FieldsEscapedBy,
8717 char: self.parse_identifier()?,
8718 });
8719 }
8720 } else {
8721 break;
8722 }
8723 }
8724 Some(Keyword::COLLECTION) => {
8725 if self.parse_keywords(&[
8726 Keyword::ITEMS,
8727 Keyword::TERMINATED,
8728 Keyword::BY,
8729 ]) {
8730 row_delimiters.push(HiveRowDelimiter {
8731 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8732 char: self.parse_identifier()?,
8733 });
8734 } else {
8735 break;
8736 }
8737 }
8738 Some(Keyword::MAP) => {
8739 if self.parse_keywords(&[
8740 Keyword::KEYS,
8741 Keyword::TERMINATED,
8742 Keyword::BY,
8743 ]) {
8744 row_delimiters.push(HiveRowDelimiter {
8745 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8746 char: self.parse_identifier()?,
8747 });
8748 } else {
8749 break;
8750 }
8751 }
8752 Some(Keyword::LINES) => {
8753 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8754 row_delimiters.push(HiveRowDelimiter {
8755 delimiter: HiveDelimiter::LinesTerminatedBy,
8756 char: self.parse_identifier()?,
8757 });
8758 } else {
8759 break;
8760 }
8761 }
8762 Some(Keyword::NULL) => {
8763 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8764 row_delimiters.push(HiveRowDelimiter {
8765 delimiter: HiveDelimiter::NullDefinedAs,
8766 char: self.parse_identifier()?,
8767 });
8768 } else {
8769 break;
8770 }
8771 }
8772 _ => {
8773 break;
8774 }
8775 }
8776 }
8777
8778 Ok(HiveRowFormat::DELIMITED {
8779 delimiters: row_delimiters,
8780 })
8781 }
8782 }
8783 }
8784
8785 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8786 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8787 Ok(Some(self.parse_identifier()?))
8788 } else {
8789 Ok(None)
8790 }
8791 }
8792
8793 pub fn parse_create_table(
8795 &mut self,
8796 or_replace: bool,
8797 temporary: bool,
8798 global: Option<bool>,
8799 transient: bool,
8800 ) -> Result<CreateTable, ParserError> {
8801 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8802 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8803 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8804
8805 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8815 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8816 } else {
8817 None
8818 };
8819
8820 let on_cluster = self.parse_optional_on_cluster()?;
8822
8823 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8824
8825 let clone = if self.parse_keyword(Keyword::CLONE) {
8826 self.parse_object_name(allow_unquoted_hyphen).ok()
8827 } else {
8828 None
8829 };
8830
8831 let (columns, constraints) = self.parse_columns()?;
8833 let comment_after_column_def =
8834 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8835 let next_token = self.next_token();
8836 match next_token.token {
8837 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8838 _ => self.expected("comment", next_token)?,
8839 }
8840 } else {
8841 None
8842 };
8843
8844 let for_values = if partition_of.is_some() {
8846 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8847 Some(self.parse_partition_for_values()?)
8848 } else {
8849 return self.expected_ref(
8850 "FOR VALUES or DEFAULT after PARTITION OF",
8851 self.peek_token_ref(),
8852 );
8853 }
8854 } else {
8855 None
8856 };
8857
8858 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8860
8861 let hive_distribution = self.parse_hive_distribution()?;
8862 let clustered_by = self.parse_optional_clustered_by()?;
8863 let hive_formats = self.parse_hive_formats()?;
8864
8865 let create_table_config = self.parse_optional_create_table_config()?;
8866
8867 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8870 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8871 {
8872 Some(Box::new(self.parse_expr()?))
8873 } else {
8874 None
8875 };
8876
8877 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8878 if self.consume_token(&Token::LParen) {
8879 let columns = if self.peek_token_ref().token != Token::RParen {
8880 self.parse_comma_separated(|p| p.parse_expr())?
8881 } else {
8882 vec![]
8883 };
8884 self.expect_token(&Token::RParen)?;
8885 Some(OneOrManyWithParens::Many(columns))
8886 } else {
8887 Some(OneOrManyWithParens::One(self.parse_expr()?))
8888 }
8889 } else {
8890 None
8891 };
8892
8893 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8894 Some(self.parse_create_table_on_commit()?)
8895 } else {
8896 None
8897 };
8898
8899 let strict = self.parse_keyword(Keyword::STRICT);
8900
8901 let backup = if self.parse_keyword(Keyword::BACKUP) {
8903 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8904 Some(keyword == Keyword::YES)
8905 } else {
8906 None
8907 };
8908
8909 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8911 Some(self.parse_dist_style()?)
8912 } else {
8913 None
8914 };
8915 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8916 self.expect_token(&Token::LParen)?;
8917 let expr = self.parse_expr()?;
8918 self.expect_token(&Token::RParen)?;
8919 Some(expr)
8920 } else {
8921 None
8922 };
8923 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8924 self.expect_token(&Token::LParen)?;
8925 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8926 self.expect_token(&Token::RParen)?;
8927 Some(columns)
8928 } else {
8929 None
8930 };
8931
8932 let query = if self.parse_keyword(Keyword::AS) {
8934 Some(self.parse_query()?)
8935 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8936 {
8937 self.prev_token();
8939 Some(self.parse_query()?)
8940 } else {
8941 None
8942 };
8943
8944 Ok(CreateTableBuilder::new(table_name)
8945 .temporary(temporary)
8946 .columns(columns)
8947 .constraints(constraints)
8948 .or_replace(or_replace)
8949 .if_not_exists(if_not_exists)
8950 .transient(transient)
8951 .hive_distribution(hive_distribution)
8952 .hive_formats(hive_formats)
8953 .global(global)
8954 .query(query)
8955 .without_rowid(without_rowid)
8956 .like(like)
8957 .clone_clause(clone)
8958 .comment_after_column_def(comment_after_column_def)
8959 .order_by(order_by)
8960 .on_commit(on_commit)
8961 .on_cluster(on_cluster)
8962 .clustered_by(clustered_by)
8963 .partition_by(create_table_config.partition_by)
8964 .cluster_by(create_table_config.cluster_by)
8965 .inherits(create_table_config.inherits)
8966 .partition_of(partition_of)
8967 .for_values(for_values)
8968 .table_options(create_table_config.table_options)
8969 .primary_key(primary_key)
8970 .strict(strict)
8971 .backup(backup)
8972 .diststyle(diststyle)
8973 .distkey(distkey)
8974 .sortkey(sortkey)
8975 .build())
8976 }
8977
8978 fn maybe_parse_create_table_like(
8979 &mut self,
8980 allow_unquoted_hyphen: bool,
8981 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8982 let like = if self.dialect.supports_create_table_like_parenthesized()
8983 && self.consume_token(&Token::LParen)
8984 {
8985 if self.parse_keyword(Keyword::LIKE) {
8986 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8987 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8988 Some(CreateTableLikeDefaults::Including)
8989 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8990 Some(CreateTableLikeDefaults::Excluding)
8991 } else {
8992 None
8993 };
8994 self.expect_token(&Token::RParen)?;
8995 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8996 name,
8997 defaults,
8998 }))
8999 } else {
9000 self.prev_token();
9002 None
9003 }
9004 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
9005 let name = self.parse_object_name(allow_unquoted_hyphen)?;
9006 Some(CreateTableLikeKind::Plain(CreateTableLike {
9007 name,
9008 defaults: None,
9009 }))
9010 } else {
9011 None
9012 };
9013 Ok(like)
9014 }
9015
9016 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
9017 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
9018 Ok(OnCommit::DeleteRows)
9019 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
9020 Ok(OnCommit::PreserveRows)
9021 } else if self.parse_keywords(&[Keyword::DROP]) {
9022 Ok(OnCommit::Drop)
9023 } else {
9024 parser_err!(
9025 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
9026 self.peek_token_ref()
9027 )
9028 }
9029 }
9030
9031 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
9037 if self.parse_keyword(Keyword::DEFAULT) {
9038 return Ok(ForValues::Default);
9039 }
9040
9041 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9042
9043 if self.parse_keyword(Keyword::IN) {
9044 self.expect_token(&Token::LParen)?;
9046 if self.peek_token_ref().token == Token::RParen {
9047 return self.expected_ref("at least one value", self.peek_token_ref());
9048 }
9049 let values = self.parse_comma_separated(Parser::parse_expr)?;
9050 self.expect_token(&Token::RParen)?;
9051 Ok(ForValues::In(values))
9052 } else if self.parse_keyword(Keyword::FROM) {
9053 self.expect_token(&Token::LParen)?;
9055 if self.peek_token_ref().token == Token::RParen {
9056 return self.expected_ref("at least one value", self.peek_token_ref());
9057 }
9058 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9059 self.expect_token(&Token::RParen)?;
9060 self.expect_keyword(Keyword::TO)?;
9061 self.expect_token(&Token::LParen)?;
9062 if self.peek_token_ref().token == Token::RParen {
9063 return self.expected_ref("at least one value", self.peek_token_ref());
9064 }
9065 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9066 self.expect_token(&Token::RParen)?;
9067 Ok(ForValues::From { from, to })
9068 } else if self.parse_keyword(Keyword::WITH) {
9069 self.expect_token(&Token::LParen)?;
9071 self.expect_keyword(Keyword::MODULUS)?;
9072 let modulus = self.parse_literal_uint()?;
9073 self.expect_token(&Token::Comma)?;
9074 self.expect_keyword(Keyword::REMAINDER)?;
9075 let remainder = self.parse_literal_uint()?;
9076 self.expect_token(&Token::RParen)?;
9077 Ok(ForValues::With { modulus, remainder })
9078 } else {
9079 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9080 }
9081 }
9082
9083 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9085 if self.parse_keyword(Keyword::MINVALUE) {
9086 Ok(PartitionBoundValue::MinValue)
9087 } else if self.parse_keyword(Keyword::MAXVALUE) {
9088 Ok(PartitionBoundValue::MaxValue)
9089 } else {
9090 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9091 }
9092 }
9093
9094 fn parse_optional_create_table_config(
9100 &mut self,
9101 ) -> Result<CreateTableConfiguration, ParserError> {
9102 let mut table_options = CreateTableOptions::None;
9103
9104 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9105 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9106 } else {
9107 None
9108 };
9109
9110 let with_options = self.parse_options(Keyword::WITH)?;
9112 if !with_options.is_empty() {
9113 table_options = CreateTableOptions::With(with_options)
9114 }
9115
9116 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9117 if !table_properties.is_empty() {
9118 table_options = CreateTableOptions::TableProperties(table_properties);
9119 }
9120 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9121 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9122 {
9123 Some(Box::new(self.parse_expr()?))
9124 } else {
9125 None
9126 };
9127
9128 let mut cluster_by = None;
9129 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9130 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9131 cluster_by = Some(WrappedCollection::NoWrapping(
9132 self.parse_comma_separated(|p| p.parse_expr())?,
9133 ));
9134 };
9135
9136 if let Token::Word(word) = &self.peek_token_ref().token {
9137 if word.keyword == Keyword::OPTIONS {
9138 table_options =
9139 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9140 }
9141 };
9142 }
9143
9144 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9145 let plain_options = self.parse_plain_options()?;
9146 if !plain_options.is_empty() {
9147 table_options = CreateTableOptions::Plain(plain_options)
9148 }
9149 };
9150
9151 Ok(CreateTableConfiguration {
9152 partition_by,
9153 cluster_by,
9154 inherits,
9155 table_options,
9156 })
9157 }
9158
9159 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9160 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9163 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9164 }
9165
9166 if self.parse_keywords(&[Keyword::COMMENT]) {
9169 let has_eq = self.consume_token(&Token::Eq);
9170 let value = self.next_token();
9171
9172 let comment = match (has_eq, value.token) {
9173 (true, Token::SingleQuotedString(s)) => {
9174 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9175 }
9176 (false, Token::SingleQuotedString(s)) => {
9177 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9178 }
9179 (_, token) => {
9180 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9181 }
9182 };
9183 return comment;
9184 }
9185
9186 if self.parse_keywords(&[Keyword::ENGINE]) {
9189 let _ = self.consume_token(&Token::Eq);
9190 let value = self.next_token();
9191
9192 let engine = match value.token {
9193 Token::Word(w) => {
9194 let parameters = if self.peek_token_ref().token == Token::LParen {
9195 self.parse_parenthesized_identifiers()?
9196 } else {
9197 vec![]
9198 };
9199
9200 Ok(Some(SqlOption::NamedParenthesizedList(
9201 NamedParenthesizedList {
9202 key: Ident::new("ENGINE"),
9203 name: Some(Ident::new(w.value)),
9204 values: parameters,
9205 },
9206 )))
9207 }
9208 _ => {
9209 return self.expected("Token::Word", value)?;
9210 }
9211 };
9212
9213 return engine;
9214 }
9215
9216 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9218 let _ = self.consume_token(&Token::Eq);
9219 let value = self.next_token();
9220
9221 let tablespace = match value.token {
9222 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9223 let storage = match self.parse_keyword(Keyword::STORAGE) {
9224 true => {
9225 let _ = self.consume_token(&Token::Eq);
9226 let storage_token = self.next_token();
9227 match &storage_token.token {
9228 Token::Word(w) => match w.value.to_uppercase().as_str() {
9229 "DISK" => Some(StorageType::Disk),
9230 "MEMORY" => Some(StorageType::Memory),
9231 _ => self
9232 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9233 },
9234 _ => self.expected("Token::Word", storage_token)?,
9235 }
9236 }
9237 false => None,
9238 };
9239
9240 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9241 name,
9242 storage,
9243 })))
9244 }
9245 _ => {
9246 return self.expected("Token::Word", value)?;
9247 }
9248 };
9249
9250 return tablespace;
9251 }
9252
9253 if self.parse_keyword(Keyword::UNION) {
9255 let _ = self.consume_token(&Token::Eq);
9256 let value = self.next_token();
9257
9258 match value.token {
9259 Token::LParen => {
9260 let tables: Vec<Ident> =
9261 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9262 self.expect_token(&Token::RParen)?;
9263
9264 return Ok(Some(SqlOption::NamedParenthesizedList(
9265 NamedParenthesizedList {
9266 key: Ident::new("UNION"),
9267 name: None,
9268 values: tables,
9269 },
9270 )));
9271 }
9272 _ => {
9273 return self.expected("Token::LParen", value)?;
9274 }
9275 }
9276 }
9277
9278 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9280 Ident::new("DEFAULT CHARSET")
9281 } else if self.parse_keyword(Keyword::CHARSET) {
9282 Ident::new("CHARSET")
9283 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9284 Ident::new("DEFAULT CHARACTER SET")
9285 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9286 Ident::new("CHARACTER SET")
9287 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9288 Ident::new("DEFAULT COLLATE")
9289 } else if self.parse_keyword(Keyword::COLLATE) {
9290 Ident::new("COLLATE")
9291 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9292 Ident::new("DATA DIRECTORY")
9293 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9294 Ident::new("INDEX DIRECTORY")
9295 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9296 Ident::new("KEY_BLOCK_SIZE")
9297 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9298 Ident::new("ROW_FORMAT")
9299 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9300 Ident::new("PACK_KEYS")
9301 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9302 Ident::new("STATS_AUTO_RECALC")
9303 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9304 Ident::new("STATS_PERSISTENT")
9305 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9306 Ident::new("STATS_SAMPLE_PAGES")
9307 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9308 Ident::new("DELAY_KEY_WRITE")
9309 } else if self.parse_keyword(Keyword::COMPRESSION) {
9310 Ident::new("COMPRESSION")
9311 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9312 Ident::new("ENCRYPTION")
9313 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9314 Ident::new("MAX_ROWS")
9315 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9316 Ident::new("MIN_ROWS")
9317 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9318 Ident::new("AUTOEXTEND_SIZE")
9319 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9320 Ident::new("AVG_ROW_LENGTH")
9321 } else if self.parse_keyword(Keyword::CHECKSUM) {
9322 Ident::new("CHECKSUM")
9323 } else if self.parse_keyword(Keyword::CONNECTION) {
9324 Ident::new("CONNECTION")
9325 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9326 Ident::new("ENGINE_ATTRIBUTE")
9327 } else if self.parse_keyword(Keyword::PASSWORD) {
9328 Ident::new("PASSWORD")
9329 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9330 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9331 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9332 Ident::new("INSERT_METHOD")
9333 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9334 Ident::new("AUTO_INCREMENT")
9335 } else {
9336 return Ok(None);
9337 };
9338
9339 let _ = self.consume_token(&Token::Eq);
9340
9341 let value = match self
9342 .maybe_parse(|parser| parser.parse_value())?
9343 .map(Expr::Value)
9344 {
9345 Some(expr) => expr,
9346 None => Expr::Identifier(self.parse_identifier()?),
9347 };
9348
9349 Ok(Some(SqlOption::KeyValue { key, value }))
9350 }
9351
9352 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9354 let mut options = Vec::new();
9355
9356 while let Some(option) = self.parse_plain_option()? {
9357 options.push(option);
9358 let _ = self.consume_token(&Token::Comma);
9361 }
9362
9363 Ok(options)
9364 }
9365
9366 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9368 let comment = if self.parse_keyword(Keyword::COMMENT) {
9369 let has_eq = self.consume_token(&Token::Eq);
9370 let comment = self.parse_comment_value()?;
9371 Some(if has_eq {
9372 CommentDef::WithEq(comment)
9373 } else {
9374 CommentDef::WithoutEq(comment)
9375 })
9376 } else {
9377 None
9378 };
9379 Ok(comment)
9380 }
9381
9382 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9384 let next_token = self.next_token();
9385 let value = match next_token.token {
9386 Token::SingleQuotedString(str) => str,
9387 Token::DollarQuotedString(str) => str.value,
9388 _ => self.expected("string literal", next_token)?,
9389 };
9390 Ok(value)
9391 }
9392
9393 pub fn parse_optional_procedure_parameters(
9395 &mut self,
9396 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9397 let mut params = vec![];
9398 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9399 return Ok(Some(params));
9400 }
9401 loop {
9402 if let Token::Word(_) = &self.peek_token_ref().token {
9403 params.push(self.parse_procedure_param()?)
9404 }
9405 let comma = self.consume_token(&Token::Comma);
9406 if self.consume_token(&Token::RParen) {
9407 break;
9409 } else if !comma {
9410 return self.expected_ref(
9411 "',' or ')' after parameter definition",
9412 self.peek_token_ref(),
9413 );
9414 }
9415 }
9416 Ok(Some(params))
9417 }
9418
9419 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9421 let mut columns = vec![];
9422 let mut constraints = vec![];
9423 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9424 return Ok((columns, constraints));
9425 }
9426
9427 loop {
9428 if let Some(constraint) = self.parse_optional_table_constraint()? {
9429 constraints.push(constraint);
9430 } else if let Token::Word(_) = &self.peek_token_ref().token {
9431 columns.push(self.parse_column_def()?);
9432 } else {
9433 return self.expected_ref(
9434 "column name or constraint definition",
9435 self.peek_token_ref(),
9436 );
9437 }
9438
9439 let comma = self.consume_token(&Token::Comma);
9440 let rparen = self.peek_token_ref().token == Token::RParen;
9441
9442 if !comma && !rparen {
9443 return self
9444 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9445 };
9446
9447 if rparen
9448 && (!comma
9449 || self.dialect.supports_column_definition_trailing_commas()
9450 || self.options.trailing_commas)
9451 {
9452 let _ = self.consume_token(&Token::RParen);
9453 break;
9454 }
9455 }
9456
9457 Ok((columns, constraints))
9458 }
9459
9460 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9462 let mode = if self.parse_keyword(Keyword::IN) {
9463 Some(ArgMode::In)
9464 } else if self.parse_keyword(Keyword::OUT) {
9465 Some(ArgMode::Out)
9466 } else if self.parse_keyword(Keyword::INOUT) {
9467 Some(ArgMode::InOut)
9468 } else {
9469 None
9470 };
9471 let name = self.parse_identifier()?;
9472 let data_type = self.parse_data_type()?;
9473 let default = if self.consume_token(&Token::Eq) {
9474 Some(self.parse_expr()?)
9475 } else {
9476 None
9477 };
9478
9479 Ok(ProcedureParam {
9480 name,
9481 data_type,
9482 mode,
9483 default,
9484 })
9485 }
9486
9487 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9489 self.parse_column_def_inner(false)
9490 }
9491
9492 fn parse_column_def_inner(
9493 &mut self,
9494 optional_data_type: bool,
9495 ) -> Result<ColumnDef, ParserError> {
9496 let col_name = self.parse_identifier()?;
9497 let data_type = if self.is_column_type_sqlite_unspecified() {
9498 DataType::Unspecified
9499 } else if optional_data_type {
9500 self.maybe_parse(|parser| parser.parse_data_type())?
9501 .unwrap_or(DataType::Unspecified)
9502 } else {
9503 self.parse_data_type()?
9504 };
9505 let mut options = vec![];
9506 loop {
9507 if self.parse_keyword(Keyword::CONSTRAINT) {
9508 let name = Some(self.parse_identifier()?);
9509 if let Some(option) = self.parse_optional_column_option()? {
9510 options.push(ColumnOptionDef { name, option });
9511 } else {
9512 return self.expected_ref(
9513 "constraint details after CONSTRAINT <name>",
9514 self.peek_token_ref(),
9515 );
9516 }
9517 } else if let Some(option) = self.parse_optional_column_option()? {
9518 options.push(ColumnOptionDef { name: None, option });
9519 } else {
9520 break;
9521 };
9522 }
9523 Ok(ColumnDef {
9524 name: col_name,
9525 data_type,
9526 options,
9527 })
9528 }
9529
9530 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9531 if dialect_of!(self is SQLiteDialect) {
9532 match &self.peek_token_ref().token {
9533 Token::Word(word) => matches!(
9534 word.keyword,
9535 Keyword::CONSTRAINT
9536 | Keyword::PRIMARY
9537 | Keyword::NOT
9538 | Keyword::UNIQUE
9539 | Keyword::CHECK
9540 | Keyword::DEFAULT
9541 | Keyword::COLLATE
9542 | Keyword::REFERENCES
9543 | Keyword::GENERATED
9544 | Keyword::AS
9545 ),
9546 _ => true, }
9548 } else {
9549 false
9550 }
9551 }
9552
9553 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9555 if let Some(option) = self.dialect.parse_column_option(self)? {
9556 return option;
9557 }
9558
9559 self.with_state(
9560 ColumnDefinition,
9561 |parser| -> Result<Option<ColumnOption>, ParserError> {
9562 parser.parse_optional_column_option_inner()
9563 },
9564 )
9565 }
9566
9567 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9568 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9569 Ok(Some(ColumnOption::CharacterSet(
9570 self.parse_object_name(false)?,
9571 )))
9572 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9573 Ok(Some(ColumnOption::Collation(
9574 self.parse_object_name(false)?,
9575 )))
9576 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9577 Ok(Some(ColumnOption::NotNull))
9578 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9579 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9580 } else if self.parse_keyword(Keyword::NULL) {
9581 Ok(Some(ColumnOption::Null))
9582 } else if self.parse_keyword(Keyword::DEFAULT) {
9583 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9584 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9585 && self.parse_keyword(Keyword::MATERIALIZED)
9586 {
9587 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9588 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9589 && self.parse_keyword(Keyword::ALIAS)
9590 {
9591 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9592 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9593 && self.parse_keyword(Keyword::EPHEMERAL)
9594 {
9595 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9598 Ok(Some(ColumnOption::Ephemeral(None)))
9599 } else {
9600 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9601 }
9602 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9603 let characteristics = self.parse_constraint_characteristics()?;
9604 Ok(Some(
9605 PrimaryKeyConstraint {
9606 name: None,
9607 index_name: None,
9608 index_type: None,
9609 columns: vec![],
9610 index_options: vec![],
9611 characteristics,
9612 }
9613 .into(),
9614 ))
9615 } else if self.parse_keyword(Keyword::UNIQUE) {
9616 let index_type_display =
9617 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9618 KeyOrIndexDisplay::Key
9619 } else {
9620 KeyOrIndexDisplay::None
9621 };
9622 let characteristics = self.parse_constraint_characteristics()?;
9623 Ok(Some(
9624 UniqueConstraint {
9625 name: None,
9626 index_name: None,
9627 index_type_display,
9628 index_type: None,
9629 columns: vec![],
9630 index_options: vec![],
9631 characteristics,
9632 nulls_distinct: NullsDistinctOption::None,
9633 }
9634 .into(),
9635 ))
9636 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9637 let characteristics = self.parse_constraint_characteristics()?;
9640 Ok(Some(
9641 PrimaryKeyConstraint {
9642 name: None,
9643 index_name: None,
9644 index_type: None,
9645 columns: vec![],
9646 index_options: vec![],
9647 characteristics,
9648 }
9649 .into(),
9650 ))
9651 } else if self.parse_keyword(Keyword::REFERENCES) {
9652 let foreign_table = self.parse_object_name(false)?;
9653 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9656 let mut match_kind = None;
9657 let mut on_delete = None;
9658 let mut on_update = None;
9659 loop {
9660 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9661 match_kind = Some(self.parse_match_kind()?);
9662 } else if on_delete.is_none()
9663 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9664 {
9665 on_delete = Some(self.parse_referential_action()?);
9666 } else if on_update.is_none()
9667 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9668 {
9669 on_update = Some(self.parse_referential_action()?);
9670 } else {
9671 break;
9672 }
9673 }
9674 let characteristics = self.parse_constraint_characteristics()?;
9675
9676 Ok(Some(
9677 ForeignKeyConstraint {
9678 name: None, index_name: None, columns: vec![], foreign_table,
9682 referred_columns,
9683 on_delete,
9684 on_update,
9685 match_kind,
9686 characteristics,
9687 }
9688 .into(),
9689 ))
9690 } else if self.parse_keyword(Keyword::CHECK) {
9691 self.expect_token(&Token::LParen)?;
9692 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9694 self.expect_token(&Token::RParen)?;
9695
9696 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9697 Some(true)
9698 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9699 Some(false)
9700 } else {
9701 None
9702 };
9703
9704 Ok(Some(
9705 CheckConstraint {
9706 name: None, expr: Box::new(expr),
9708 enforced,
9709 }
9710 .into(),
9711 ))
9712 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9713 && dialect_of!(self is MySqlDialect | GenericDialect)
9714 {
9715 Ok(Some(ColumnOption::DialectSpecific(vec![
9717 Token::make_keyword("AUTO_INCREMENT"),
9718 ])))
9719 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9720 && dialect_of!(self is SQLiteDialect | GenericDialect)
9721 {
9722 Ok(Some(ColumnOption::DialectSpecific(vec![
9724 Token::make_keyword("AUTOINCREMENT"),
9725 ])))
9726 } else if self.parse_keyword(Keyword::ASC)
9727 && self.dialect.supports_asc_desc_in_column_definition()
9728 {
9729 Ok(Some(ColumnOption::DialectSpecific(vec![
9731 Token::make_keyword("ASC"),
9732 ])))
9733 } else if self.parse_keyword(Keyword::DESC)
9734 && self.dialect.supports_asc_desc_in_column_definition()
9735 {
9736 Ok(Some(ColumnOption::DialectSpecific(vec![
9738 Token::make_keyword("DESC"),
9739 ])))
9740 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9741 && dialect_of!(self is MySqlDialect | GenericDialect)
9742 {
9743 let expr = self.parse_expr()?;
9744 Ok(Some(ColumnOption::OnUpdate(expr)))
9745 } else if self.parse_keyword(Keyword::GENERATED) {
9746 self.parse_optional_column_option_generated()
9747 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9748 && self.parse_keyword(Keyword::OPTIONS)
9749 {
9750 self.prev_token();
9751 Ok(Some(ColumnOption::Options(
9752 self.parse_options(Keyword::OPTIONS)?,
9753 )))
9754 } else if self.parse_keyword(Keyword::AS)
9755 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9756 {
9757 self.parse_optional_column_option_as()
9758 } else if self.parse_keyword(Keyword::SRID)
9759 && dialect_of!(self is MySqlDialect | GenericDialect)
9760 {
9761 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9762 } else if self.parse_keyword(Keyword::IDENTITY)
9763 && dialect_of!(self is MsSqlDialect | GenericDialect)
9764 {
9765 let parameters = if self.consume_token(&Token::LParen) {
9766 let seed = self.parse_number()?;
9767 self.expect_token(&Token::Comma)?;
9768 let increment = self.parse_number()?;
9769 self.expect_token(&Token::RParen)?;
9770
9771 Some(IdentityPropertyFormatKind::FunctionCall(
9772 IdentityParameters { seed, increment },
9773 ))
9774 } else {
9775 None
9776 };
9777 Ok(Some(ColumnOption::Identity(
9778 IdentityPropertyKind::Identity(IdentityProperty {
9779 parameters,
9780 order: None,
9781 }),
9782 )))
9783 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9784 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9785 {
9786 Ok(Some(ColumnOption::OnConflict(
9788 self.expect_one_of_keywords(&[
9789 Keyword::ROLLBACK,
9790 Keyword::ABORT,
9791 Keyword::FAIL,
9792 Keyword::IGNORE,
9793 Keyword::REPLACE,
9794 ])?,
9795 )))
9796 } else if self.parse_keyword(Keyword::INVISIBLE) {
9797 Ok(Some(ColumnOption::Invisible))
9798 } else {
9799 Ok(None)
9800 }
9801 }
9802
9803 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9804 let name = self.parse_object_name(false)?;
9805 self.expect_token(&Token::Eq)?;
9806 let value = self.parse_literal_string()?;
9807
9808 Ok(Tag::new(name, value))
9809 }
9810
9811 fn parse_optional_column_option_generated(
9812 &mut self,
9813 ) -> Result<Option<ColumnOption>, ParserError> {
9814 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9815 let mut sequence_options = vec![];
9816 if self.expect_token(&Token::LParen).is_ok() {
9817 sequence_options = self.parse_create_sequence_options()?;
9818 self.expect_token(&Token::RParen)?;
9819 }
9820 Ok(Some(ColumnOption::Generated {
9821 generated_as: GeneratedAs::Always,
9822 sequence_options: Some(sequence_options),
9823 generation_expr: None,
9824 generation_expr_mode: None,
9825 generated_keyword: true,
9826 }))
9827 } else if self.parse_keywords(&[
9828 Keyword::BY,
9829 Keyword::DEFAULT,
9830 Keyword::AS,
9831 Keyword::IDENTITY,
9832 ]) {
9833 let mut sequence_options = vec![];
9834 if self.expect_token(&Token::LParen).is_ok() {
9835 sequence_options = self.parse_create_sequence_options()?;
9836 self.expect_token(&Token::RParen)?;
9837 }
9838 Ok(Some(ColumnOption::Generated {
9839 generated_as: GeneratedAs::ByDefault,
9840 sequence_options: Some(sequence_options),
9841 generation_expr: None,
9842 generation_expr_mode: None,
9843 generated_keyword: true,
9844 }))
9845 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9846 if self.expect_token(&Token::LParen).is_ok() {
9847 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9848 self.expect_token(&Token::RParen)?;
9849 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9850 Ok((
9851 GeneratedAs::ExpStored,
9852 Some(GeneratedExpressionMode::Stored),
9853 ))
9854 } else if dialect_of!(self is PostgreSqlDialect) {
9855 self.expected_ref("STORED", self.peek_token_ref())
9857 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9858 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9859 } else {
9860 Ok((GeneratedAs::Always, None))
9861 }?;
9862
9863 Ok(Some(ColumnOption::Generated {
9864 generated_as: gen_as,
9865 sequence_options: None,
9866 generation_expr: Some(expr),
9867 generation_expr_mode: expr_mode,
9868 generated_keyword: true,
9869 }))
9870 } else {
9871 Ok(None)
9872 }
9873 } else {
9874 Ok(None)
9875 }
9876 }
9877
9878 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9879 self.expect_token(&Token::LParen)?;
9881 let expr = self.parse_expr()?;
9882 self.expect_token(&Token::RParen)?;
9883
9884 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9885 (
9886 GeneratedAs::ExpStored,
9887 Some(GeneratedExpressionMode::Stored),
9888 )
9889 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9890 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9891 } else {
9892 (GeneratedAs::Always, None)
9893 };
9894
9895 Ok(Some(ColumnOption::Generated {
9896 generated_as: gen_as,
9897 sequence_options: None,
9898 generation_expr: Some(expr),
9899 generation_expr_mode: expr_mode,
9900 generated_keyword: false,
9901 }))
9902 }
9903
9904 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9906 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9907 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9908 {
9909 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9910
9911 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9912 self.expect_token(&Token::LParen)?;
9913 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9914 self.expect_token(&Token::RParen)?;
9915 Some(sorted_by_columns)
9916 } else {
9917 None
9918 };
9919
9920 self.expect_keyword_is(Keyword::INTO)?;
9921 let num_buckets = self.parse_number_value()?.value;
9922 self.expect_keyword_is(Keyword::BUCKETS)?;
9923 Some(ClusteredBy {
9924 columns,
9925 sorted_by,
9926 num_buckets,
9927 })
9928 } else {
9929 None
9930 };
9931 Ok(clustered_by)
9932 }
9933
9934 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9938 if self.parse_keyword(Keyword::RESTRICT) {
9939 Ok(ReferentialAction::Restrict)
9940 } else if self.parse_keyword(Keyword::CASCADE) {
9941 Ok(ReferentialAction::Cascade)
9942 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9943 Ok(ReferentialAction::SetNull)
9944 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9945 Ok(ReferentialAction::NoAction)
9946 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9947 Ok(ReferentialAction::SetDefault)
9948 } else {
9949 self.expected_ref(
9950 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9951 self.peek_token_ref(),
9952 )
9953 }
9954 }
9955
9956 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9958 if self.parse_keyword(Keyword::FULL) {
9959 Ok(ConstraintReferenceMatchKind::Full)
9960 } else if self.parse_keyword(Keyword::PARTIAL) {
9961 Ok(ConstraintReferenceMatchKind::Partial)
9962 } else if self.parse_keyword(Keyword::SIMPLE) {
9963 Ok(ConstraintReferenceMatchKind::Simple)
9964 } else {
9965 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9966 }
9967 }
9968
9969 fn parse_constraint_using_index(
9972 &mut self,
9973 name: Option<Ident>,
9974 ) -> Result<ConstraintUsingIndex, ParserError> {
9975 let index_name = self.parse_identifier()?;
9976 let characteristics = self.parse_constraint_characteristics()?;
9977 Ok(ConstraintUsingIndex {
9978 name,
9979 index_name,
9980 characteristics,
9981 })
9982 }
9983
9984 pub fn parse_constraint_characteristics(
9986 &mut self,
9987 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9988 let mut cc = ConstraintCharacteristics::default();
9989
9990 loop {
9991 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9992 {
9993 cc.deferrable = Some(false);
9994 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9995 cc.deferrable = Some(true);
9996 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9997 if self.parse_keyword(Keyword::DEFERRED) {
9998 cc.initially = Some(DeferrableInitial::Deferred);
9999 } else if self.parse_keyword(Keyword::IMMEDIATE) {
10000 cc.initially = Some(DeferrableInitial::Immediate);
10001 } else {
10002 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
10003 }
10004 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
10005 cc.enforced = Some(true);
10006 } else if cc.enforced.is_none()
10007 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
10008 {
10009 cc.enforced = Some(false);
10010 } else {
10011 break;
10012 }
10013 }
10014
10015 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
10016 Ok(Some(cc))
10017 } else {
10018 Ok(None)
10019 }
10020 }
10021
10022 pub fn parse_optional_table_constraint(
10024 &mut self,
10025 ) -> Result<Option<TableConstraint>, ParserError> {
10026 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
10027 if self.dialect.supports_constraint_keyword_without_name()
10028 && self
10029 .peek_one_of_keywords(&[
10030 Keyword::CHECK,
10031 Keyword::PRIMARY,
10032 Keyword::UNIQUE,
10033 Keyword::FOREIGN,
10034 ])
10035 .is_some()
10036 {
10037 None
10038 } else {
10039 Some(self.parse_identifier()?)
10040 }
10041 } else {
10042 None
10043 };
10044
10045 if name.is_none()
10050 && self
10051 .peek_one_of_keywords(&[Keyword::FULLTEXT, Keyword::SPATIAL])
10052 .is_some()
10053 && !dialect_of!(self is GenericDialect | MySqlDialect)
10054 {
10055 return Ok(None);
10056 }
10057
10058 let next_token = self.next_token();
10059 match next_token.token {
10060 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
10061 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10064 return Ok(Some(TableConstraint::UniqueUsingIndex(
10065 self.parse_constraint_using_index(name)?,
10066 )));
10067 }
10068
10069 let index_type_display = self.parse_index_type_display();
10070 if !dialect_of!(self is GenericDialect | MySqlDialect)
10071 && !index_type_display.is_none()
10072 {
10073 return self.expected_ref(
10074 "`index_name` or `(column_name [, ...])`",
10075 self.peek_token_ref(),
10076 );
10077 }
10078
10079 let nulls_distinct = self.parse_optional_nulls_distinct()?;
10080
10081 let index_name = self.parse_optional_ident()?;
10083 let index_type = self.parse_optional_using_then_index_type()?;
10084
10085 let columns = self.parse_parenthesized_index_column_list()?;
10086 let index_options = self.parse_index_options()?;
10087 let characteristics = self.parse_constraint_characteristics()?;
10088 Ok(Some(
10089 UniqueConstraint {
10090 name,
10091 index_name,
10092 index_type_display,
10093 index_type,
10094 columns,
10095 index_options,
10096 characteristics,
10097 nulls_distinct,
10098 }
10099 .into(),
10100 ))
10101 }
10102 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10103 self.expect_keyword_is(Keyword::KEY)?;
10105
10106 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10109 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10110 self.parse_constraint_using_index(name)?,
10111 )));
10112 }
10113
10114 let index_name = self.parse_optional_ident()?;
10116 let index_type = self.parse_optional_using_then_index_type()?;
10117
10118 let columns = self.parse_parenthesized_index_column_list()?;
10119 let index_options = self.parse_index_options()?;
10120 let characteristics = self.parse_constraint_characteristics()?;
10121 Ok(Some(
10122 PrimaryKeyConstraint {
10123 name,
10124 index_name,
10125 index_type,
10126 columns,
10127 index_options,
10128 characteristics,
10129 }
10130 .into(),
10131 ))
10132 }
10133 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10134 self.expect_keyword_is(Keyword::KEY)?;
10135 let index_name = self.parse_optional_ident()?;
10136 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10137 self.expect_keyword_is(Keyword::REFERENCES)?;
10138 let foreign_table = self.parse_object_name(false)?;
10139 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10140 let mut match_kind = None;
10141 let mut on_delete = None;
10142 let mut on_update = None;
10143 loop {
10144 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10145 match_kind = Some(self.parse_match_kind()?);
10146 } else if on_delete.is_none()
10147 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10148 {
10149 on_delete = Some(self.parse_referential_action()?);
10150 } else if on_update.is_none()
10151 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10152 {
10153 on_update = Some(self.parse_referential_action()?);
10154 } else {
10155 break;
10156 }
10157 }
10158
10159 let characteristics = self.parse_constraint_characteristics()?;
10160
10161 Ok(Some(
10162 ForeignKeyConstraint {
10163 name,
10164 index_name,
10165 columns,
10166 foreign_table,
10167 referred_columns,
10168 on_delete,
10169 on_update,
10170 match_kind,
10171 characteristics,
10172 }
10173 .into(),
10174 ))
10175 }
10176 Token::Word(w) if w.keyword == Keyword::CHECK => {
10177 self.expect_token(&Token::LParen)?;
10178 let expr = Box::new(self.parse_expr()?);
10179 self.expect_token(&Token::RParen)?;
10180
10181 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10182 Some(true)
10183 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10184 Some(false)
10185 } else {
10186 None
10187 };
10188
10189 Ok(Some(
10190 CheckConstraint {
10191 name,
10192 expr,
10193 enforced,
10194 }
10195 .into(),
10196 ))
10197 }
10198 Token::Word(w)
10199 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10200 && dialect_of!(self is GenericDialect | MySqlDialect)
10201 && name.is_none() =>
10202 {
10203 let display_as_key = w.keyword == Keyword::KEY;
10204
10205 let name = match &self.peek_token_ref().token {
10206 Token::Word(word) if word.keyword == Keyword::USING => None,
10207 _ => self.parse_optional_ident()?,
10208 };
10209
10210 let index_type = self.parse_optional_using_then_index_type()?;
10211 let columns = self.parse_parenthesized_index_column_list()?;
10212 let index_options = self.parse_index_options()?;
10213
10214 Ok(Some(
10215 IndexConstraint {
10216 display_as_key,
10217 name,
10218 index_type,
10219 columns,
10220 index_options,
10221 }
10222 .into(),
10223 ))
10224 }
10225 Token::Word(w)
10226 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10227 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10228 {
10229 if let Some(name) = name {
10230 return self.expected(
10231 "FULLTEXT or SPATIAL option without constraint name",
10232 TokenWithSpan {
10233 token: Token::make_keyword(&name.to_string()),
10234 span: next_token.span,
10235 },
10236 );
10237 }
10238
10239 let fulltext = w.keyword == Keyword::FULLTEXT;
10240
10241 let index_type_display = self.parse_index_type_display();
10242
10243 let opt_index_name = self.parse_optional_ident()?;
10244
10245 let columns = self.parse_parenthesized_index_column_list()?;
10246
10247 Ok(Some(
10248 FullTextOrSpatialConstraint {
10249 fulltext,
10250 index_type_display,
10251 opt_index_name,
10252 columns,
10253 }
10254 .into(),
10255 ))
10256 }
10257 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10258 let index_method = if self.parse_keyword(Keyword::USING) {
10259 Some(self.parse_identifier()?)
10260 } else {
10261 None
10262 };
10263
10264 self.expect_token(&Token::LParen)?;
10265 let elements =
10266 self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10267 self.expect_token(&Token::RParen)?;
10268
10269 let include = if self.parse_keyword(Keyword::INCLUDE) {
10270 self.expect_token(&Token::LParen)?;
10271 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10272 self.expect_token(&Token::RParen)?;
10273 cols
10274 } else {
10275 vec![]
10276 };
10277
10278 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10279 self.expect_token(&Token::LParen)?;
10280 let predicate = self.parse_expr()?;
10281 self.expect_token(&Token::RParen)?;
10282 Some(Box::new(predicate))
10283 } else {
10284 None
10285 };
10286
10287 let characteristics = self.parse_constraint_characteristics()?;
10288
10289 Ok(Some(
10290 ExclusionConstraint {
10291 name,
10292 index_method,
10293 elements,
10294 include,
10295 where_clause,
10296 characteristics,
10297 }
10298 .into(),
10299 ))
10300 }
10301 _ => {
10302 if name.is_some() {
10303 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10304 } else {
10305 self.prev_token();
10306 Ok(None)
10307 }
10308 }
10309 }
10310 }
10311
10312 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10313 let expr = self.parse_expr()?;
10314 self.expect_keyword_is(Keyword::WITH)?;
10315 let operator_token = self.next_token();
10316 let operator = operator_token.token.to_string();
10317 Ok(ExclusionElement { expr, operator })
10318 }
10319
10320 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10321 Ok(if self.parse_keyword(Keyword::NULLS) {
10322 let not = self.parse_keyword(Keyword::NOT);
10323 self.expect_keyword_is(Keyword::DISTINCT)?;
10324 if not {
10325 NullsDistinctOption::NotDistinct
10326 } else {
10327 NullsDistinctOption::Distinct
10328 }
10329 } else {
10330 NullsDistinctOption::None
10331 })
10332 }
10333
10334 pub fn maybe_parse_options(
10336 &mut self,
10337 keyword: Keyword,
10338 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10339 if let Token::Word(word) = &self.peek_token_ref().token {
10340 if word.keyword == keyword {
10341 return Ok(Some(self.parse_options(keyword)?));
10342 }
10343 };
10344 Ok(None)
10345 }
10346
10347 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10349 if self.parse_keyword(keyword) {
10350 self.expect_token(&Token::LParen)?;
10351 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10352 self.expect_token(&Token::RParen)?;
10353 Ok(options)
10354 } else {
10355 Ok(vec![])
10356 }
10357 }
10358
10359 pub fn parse_options_with_keywords(
10361 &mut self,
10362 keywords: &[Keyword],
10363 ) -> Result<Vec<SqlOption>, ParserError> {
10364 if self.parse_keywords(keywords) {
10365 self.expect_token(&Token::LParen)?;
10366 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10367 self.expect_token(&Token::RParen)?;
10368 Ok(options)
10369 } else {
10370 Ok(vec![])
10371 }
10372 }
10373
10374 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10376 Ok(if self.parse_keyword(Keyword::BTREE) {
10377 IndexType::BTree
10378 } else if self.parse_keyword(Keyword::HASH) {
10379 IndexType::Hash
10380 } else if self.parse_keyword(Keyword::GIN) {
10381 IndexType::GIN
10382 } else if self.parse_keyword(Keyword::GIST) {
10383 IndexType::GiST
10384 } else if self.parse_keyword(Keyword::SPGIST) {
10385 IndexType::SPGiST
10386 } else if self.parse_keyword(Keyword::BRIN) {
10387 IndexType::BRIN
10388 } else if self.parse_keyword(Keyword::BLOOM) {
10389 IndexType::Bloom
10390 } else {
10391 IndexType::Custom(self.parse_identifier()?)
10392 })
10393 }
10394
10395 pub fn parse_optional_using_then_index_type(
10402 &mut self,
10403 ) -> Result<Option<IndexType>, ParserError> {
10404 if self.parse_keyword(Keyword::USING) {
10405 Ok(Some(self.parse_index_type()?))
10406 } else {
10407 Ok(None)
10408 }
10409 }
10410
10411 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10415 self.maybe_parse(|parser| parser.parse_identifier())
10416 }
10417
10418 #[must_use]
10419 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10421 if self.parse_keyword(Keyword::KEY) {
10422 KeyOrIndexDisplay::Key
10423 } else if self.parse_keyword(Keyword::INDEX) {
10424 KeyOrIndexDisplay::Index
10425 } else {
10426 KeyOrIndexDisplay::None
10427 }
10428 }
10429
10430 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10432 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10433 Ok(Some(IndexOption::Using(index_type)))
10434 } else if self.parse_keyword(Keyword::COMMENT) {
10435 let s = self.parse_literal_string()?;
10436 Ok(Some(IndexOption::Comment(s)))
10437 } else {
10438 Ok(None)
10439 }
10440 }
10441
10442 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10444 let mut options = Vec::new();
10445
10446 loop {
10447 match self.parse_optional_index_option()? {
10448 Some(index_option) => options.push(index_option),
10449 None => return Ok(options),
10450 }
10451 }
10452 }
10453
10454 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10456 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10457
10458 match &self.peek_token_ref().token {
10459 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10460 Ok(SqlOption::Ident(self.parse_identifier()?))
10461 }
10462 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10463 self.parse_option_partition()
10464 }
10465 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10466 self.parse_option_clustered()
10467 }
10468 _ => {
10469 let name = self.parse_identifier()?;
10470 self.expect_token(&Token::Eq)?;
10471 let value = self.parse_expr()?;
10472
10473 Ok(SqlOption::KeyValue { key: name, value })
10474 }
10475 }
10476 }
10477
10478 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10480 if self.parse_keywords(&[
10481 Keyword::CLUSTERED,
10482 Keyword::COLUMNSTORE,
10483 Keyword::INDEX,
10484 Keyword::ORDER,
10485 ]) {
10486 Ok(SqlOption::Clustered(
10487 TableOptionsClustered::ColumnstoreIndexOrder(
10488 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10489 ),
10490 ))
10491 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10492 Ok(SqlOption::Clustered(
10493 TableOptionsClustered::ColumnstoreIndex,
10494 ))
10495 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10496 self.expect_token(&Token::LParen)?;
10497
10498 let columns = self.parse_comma_separated(|p| {
10499 let name = p.parse_identifier()?;
10500 let asc = p.parse_asc_desc();
10501
10502 Ok(ClusteredIndex { name, asc })
10503 })?;
10504
10505 self.expect_token(&Token::RParen)?;
10506
10507 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10508 } else {
10509 Err(ParserError::ParserError(
10510 "invalid CLUSTERED sequence".to_string(),
10511 ))
10512 }
10513 }
10514
10515 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10517 self.expect_keyword_is(Keyword::PARTITION)?;
10518 self.expect_token(&Token::LParen)?;
10519 let column_name = self.parse_identifier()?;
10520
10521 self.expect_keyword_is(Keyword::RANGE)?;
10522 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10523 Some(PartitionRangeDirection::Left)
10524 } else if self.parse_keyword(Keyword::RIGHT) {
10525 Some(PartitionRangeDirection::Right)
10526 } else {
10527 None
10528 };
10529
10530 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10531 self.expect_token(&Token::LParen)?;
10532
10533 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10534
10535 self.expect_token(&Token::RParen)?;
10536 self.expect_token(&Token::RParen)?;
10537
10538 Ok(SqlOption::Partition {
10539 column_name,
10540 range_direction,
10541 for_values,
10542 })
10543 }
10544
10545 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10547 self.expect_token(&Token::LParen)?;
10548 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10549 self.expect_token(&Token::RParen)?;
10550 Ok(Partition::Partitions(partitions))
10551 }
10552
10553 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10555 self.expect_token(&Token::LParen)?;
10556 self.expect_keyword_is(Keyword::SELECT)?;
10557 let projection = self.parse_projection()?;
10558 let group_by = self.parse_optional_group_by()?;
10559 let order_by = self.parse_optional_order_by()?;
10560 self.expect_token(&Token::RParen)?;
10561 Ok(ProjectionSelect {
10562 projection,
10563 group_by,
10564 order_by,
10565 })
10566 }
10567 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10569 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10570 let name = self.parse_identifier()?;
10571 let query = self.parse_projection_select()?;
10572 Ok(AlterTableOperation::AddProjection {
10573 if_not_exists,
10574 name,
10575 select: query,
10576 })
10577 }
10578
10579 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10583 self.expect_keyword_is(Keyword::ALTER)?;
10584 self.expect_keyword_is(Keyword::SORTKEY)?;
10585 self.expect_token(&Token::LParen)?;
10586 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10587 self.expect_token(&Token::RParen)?;
10588 Ok(AlterTableOperation::AlterSortKey { columns })
10589 }
10590
10591 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10593 let operation = if self.parse_keyword(Keyword::ADD) {
10594 if let Some(constraint) = self.parse_optional_table_constraint()? {
10595 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10596 AlterTableOperation::AddConstraint {
10597 constraint,
10598 not_valid,
10599 }
10600 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10601 && self.parse_keyword(Keyword::PROJECTION)
10602 {
10603 return self.parse_alter_table_add_projection();
10604 } else {
10605 let if_not_exists =
10606 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10607 let mut new_partitions = vec![];
10608 loop {
10609 if self.parse_keyword(Keyword::PARTITION) {
10610 new_partitions.push(self.parse_partition()?);
10611 } else {
10612 break;
10613 }
10614 }
10615 if !new_partitions.is_empty() {
10616 AlterTableOperation::AddPartitions {
10617 if_not_exists,
10618 new_partitions,
10619 }
10620 } else {
10621 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10622
10623 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10624 {
10625 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10626 || if_not_exists
10627 } else {
10628 false
10629 };
10630
10631 let column_def = self.parse_column_def()?;
10632
10633 let column_position = self.parse_column_position()?;
10634
10635 AlterTableOperation::AddColumn {
10636 column_keyword,
10637 if_not_exists,
10638 column_def,
10639 column_position,
10640 }
10641 }
10642 }
10643 } else if self.parse_keyword(Keyword::RENAME) {
10644 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10645 let old_name = self.parse_identifier()?;
10646 self.expect_keyword_is(Keyword::TO)?;
10647 let new_name = self.parse_identifier()?;
10648 AlterTableOperation::RenameConstraint { old_name, new_name }
10649 } else if self.parse_keyword(Keyword::TO) {
10650 let table_name = self.parse_object_name(false)?;
10651 AlterTableOperation::RenameTable {
10652 table_name: RenameTableNameKind::To(table_name),
10653 }
10654 } else if self.parse_keyword(Keyword::AS) {
10655 let table_name = self.parse_object_name(false)?;
10656 AlterTableOperation::RenameTable {
10657 table_name: RenameTableNameKind::As(table_name),
10658 }
10659 } else {
10660 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10662 self.expect_keyword_is(Keyword::TO)?;
10663 let new_column_name = self.parse_identifier()?;
10664 AlterTableOperation::RenameColumn {
10665 old_column_name,
10666 new_column_name,
10667 }
10668 }
10669 } else if self.parse_keyword(Keyword::DISABLE) {
10670 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10671 AlterTableOperation::DisableRowLevelSecurity {}
10672 } else if self.parse_keyword(Keyword::RULE) {
10673 let name = self.parse_identifier()?;
10674 AlterTableOperation::DisableRule { name }
10675 } else if self.parse_keyword(Keyword::TRIGGER) {
10676 let name = self.parse_identifier()?;
10677 AlterTableOperation::DisableTrigger { name }
10678 } else {
10679 return self.expected_ref(
10680 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10681 self.peek_token_ref(),
10682 );
10683 }
10684 } else if self.parse_keyword(Keyword::ENABLE) {
10685 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10686 let name = self.parse_identifier()?;
10687 AlterTableOperation::EnableAlwaysRule { name }
10688 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10689 let name = self.parse_identifier()?;
10690 AlterTableOperation::EnableAlwaysTrigger { name }
10691 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10692 AlterTableOperation::EnableRowLevelSecurity {}
10693 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10694 let name = self.parse_identifier()?;
10695 AlterTableOperation::EnableReplicaRule { name }
10696 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10697 let name = self.parse_identifier()?;
10698 AlterTableOperation::EnableReplicaTrigger { name }
10699 } else if self.parse_keyword(Keyword::RULE) {
10700 let name = self.parse_identifier()?;
10701 AlterTableOperation::EnableRule { name }
10702 } else if self.parse_keyword(Keyword::TRIGGER) {
10703 let name = self.parse_identifier()?;
10704 AlterTableOperation::EnableTrigger { name }
10705 } else {
10706 return self.expected_ref(
10707 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10708 self.peek_token_ref(),
10709 );
10710 }
10711 } else if self.parse_keywords(&[
10712 Keyword::FORCE,
10713 Keyword::ROW,
10714 Keyword::LEVEL,
10715 Keyword::SECURITY,
10716 ]) {
10717 AlterTableOperation::ForceRowLevelSecurity
10718 } else if self.parse_keywords(&[
10719 Keyword::NO,
10720 Keyword::FORCE,
10721 Keyword::ROW,
10722 Keyword::LEVEL,
10723 Keyword::SECURITY,
10724 ]) {
10725 AlterTableOperation::NoForceRowLevelSecurity
10726 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10727 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10728 {
10729 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10730 let name = self.parse_identifier()?;
10731 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10732 Some(self.parse_identifier()?)
10733 } else {
10734 None
10735 };
10736 AlterTableOperation::ClearProjection {
10737 if_exists,
10738 name,
10739 partition,
10740 }
10741 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10742 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10743 {
10744 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10745 let name = self.parse_identifier()?;
10746 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10747 Some(self.parse_identifier()?)
10748 } else {
10749 None
10750 };
10751 AlterTableOperation::MaterializeProjection {
10752 if_exists,
10753 name,
10754 partition,
10755 }
10756 } else if self.parse_keyword(Keyword::DROP) {
10757 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10758 self.expect_token(&Token::LParen)?;
10759 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10760 self.expect_token(&Token::RParen)?;
10761 AlterTableOperation::DropPartitions {
10762 partitions,
10763 if_exists: true,
10764 }
10765 } else if self.parse_keyword(Keyword::PARTITION) {
10766 self.expect_token(&Token::LParen)?;
10767 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10768 self.expect_token(&Token::RParen)?;
10769 AlterTableOperation::DropPartitions {
10770 partitions,
10771 if_exists: false,
10772 }
10773 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10774 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10775 let name = self.parse_identifier()?;
10776 let drop_behavior = self.parse_optional_drop_behavior();
10777 AlterTableOperation::DropConstraint {
10778 if_exists,
10779 name,
10780 drop_behavior,
10781 }
10782 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10783 let drop_behavior = self.parse_optional_drop_behavior();
10784 AlterTableOperation::DropPrimaryKey { drop_behavior }
10785 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10786 let name = self.parse_identifier()?;
10787 let drop_behavior = self.parse_optional_drop_behavior();
10788 AlterTableOperation::DropForeignKey {
10789 name,
10790 drop_behavior,
10791 }
10792 } else if self.parse_keyword(Keyword::INDEX) {
10793 let name = self.parse_identifier()?;
10794 AlterTableOperation::DropIndex { name }
10795 } else if self.parse_keyword(Keyword::PROJECTION)
10796 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10797 {
10798 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10799 let name = self.parse_identifier()?;
10800 AlterTableOperation::DropProjection { if_exists, name }
10801 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10802 AlterTableOperation::DropClusteringKey
10803 } else {
10804 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10806 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10807 self.parse_comma_separated(Parser::parse_identifier)?
10808 } else {
10809 vec![self.parse_identifier()?]
10810 };
10811 let drop_behavior = self.parse_optional_drop_behavior();
10812 AlterTableOperation::DropColumn {
10813 has_column_keyword,
10814 column_names,
10815 if_exists,
10816 drop_behavior,
10817 }
10818 }
10819 } else if self.parse_keyword(Keyword::PARTITION) {
10820 self.expect_token(&Token::LParen)?;
10821 let before = self.parse_comma_separated(Parser::parse_expr)?;
10822 self.expect_token(&Token::RParen)?;
10823 self.expect_keyword_is(Keyword::RENAME)?;
10824 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10825 self.expect_token(&Token::LParen)?;
10826 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10827 self.expect_token(&Token::RParen)?;
10828 AlterTableOperation::RenamePartitions {
10829 old_partitions: before,
10830 new_partitions: renames,
10831 }
10832 } else if self.parse_keyword(Keyword::CHANGE) {
10833 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10835 let new_name = self.parse_identifier()?;
10836 let data_type = self.parse_data_type()?;
10837 let mut options = vec![];
10838 while let Some(option) = self.parse_optional_column_option()? {
10839 options.push(option);
10840 }
10841
10842 let column_position = self.parse_column_position()?;
10843
10844 AlterTableOperation::ChangeColumn {
10845 old_name,
10846 new_name,
10847 data_type,
10848 options,
10849 column_position,
10850 }
10851 } else if self.parse_keyword(Keyword::MODIFY) {
10852 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10854 let data_type = self.parse_data_type()?;
10855 let mut options = vec![];
10856 while let Some(option) = self.parse_optional_column_option()? {
10857 options.push(option);
10858 }
10859
10860 let column_position = self.parse_column_position()?;
10861
10862 AlterTableOperation::ModifyColumn {
10863 col_name,
10864 data_type,
10865 options,
10866 column_position,
10867 }
10868 } else if self.parse_keyword(Keyword::ALTER) {
10869 if self.peek_keyword(Keyword::SORTKEY) {
10870 self.prev_token();
10871 return self.parse_alter_sort_key();
10872 }
10873
10874 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10876 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10877
10878 let op: AlterColumnOperation = if self.parse_keywords(&[
10879 Keyword::SET,
10880 Keyword::NOT,
10881 Keyword::NULL,
10882 ]) {
10883 AlterColumnOperation::SetNotNull {}
10884 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10885 AlterColumnOperation::DropNotNull {}
10886 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10887 AlterColumnOperation::SetDefault {
10888 value: self.parse_expr()?,
10889 }
10890 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10891 AlterColumnOperation::DropDefault {}
10892 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10893 self.parse_set_data_type(true)?
10894 } else if self.parse_keyword(Keyword::TYPE) {
10895 self.parse_set_data_type(false)?
10896 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10897 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10898 Some(GeneratedAs::Always)
10899 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10900 Some(GeneratedAs::ByDefault)
10901 } else {
10902 None
10903 };
10904
10905 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10906
10907 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10908
10909 if self.peek_token_ref().token == Token::LParen {
10910 self.expect_token(&Token::LParen)?;
10911 sequence_options = Some(self.parse_create_sequence_options()?);
10912 self.expect_token(&Token::RParen)?;
10913 }
10914
10915 AlterColumnOperation::AddGenerated {
10916 generated_as,
10917 sequence_options,
10918 }
10919 } else {
10920 let message = if is_postgresql {
10921 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10922 } else {
10923 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10924 };
10925
10926 return self.expected_ref(message, self.peek_token_ref());
10927 };
10928 AlterTableOperation::AlterColumn { column_name, op }
10929 } else if self.parse_keyword(Keyword::SWAP) {
10930 self.expect_keyword_is(Keyword::WITH)?;
10931 let table_name = self.parse_object_name(false)?;
10932 AlterTableOperation::SwapWith { table_name }
10933 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10934 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10935 {
10936 let new_owner = self.parse_owner()?;
10937 AlterTableOperation::OwnerTo { new_owner }
10938 } else if dialect_of!(self is PostgreSqlDialect)
10939 && self.parse_keywords(&[Keyword::ATTACH, Keyword::PARTITION])
10940 {
10941 let partition_name = self.parse_object_name(false)?;
10942 let partition_bound = self.parse_partition_for_values()?;
10943 AlterTableOperation::AttachPartitionOf {
10944 partition_name,
10945 partition_bound,
10946 }
10947 } else if dialect_of!(self is PostgreSqlDialect)
10948 && self.parse_keywords(&[Keyword::DETACH, Keyword::PARTITION])
10949 {
10950 let partition_name = self.parse_object_name(false)?;
10951 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
10952 let finalize = self.parse_keyword(Keyword::FINALIZE);
10953 AlterTableOperation::DetachPartitionOf {
10954 partition_name,
10955 concurrently,
10956 finalize,
10957 }
10958 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10959 && self.parse_keyword(Keyword::ATTACH)
10960 {
10961 AlterTableOperation::AttachPartition {
10962 partition: self.parse_part_or_partition()?,
10963 }
10964 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10965 && self.parse_keyword(Keyword::DETACH)
10966 {
10967 AlterTableOperation::DetachPartition {
10968 partition: self.parse_part_or_partition()?,
10969 }
10970 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10971 && self.parse_keyword(Keyword::FREEZE)
10972 {
10973 let partition = self.parse_part_or_partition()?;
10974 let with_name = if self.parse_keyword(Keyword::WITH) {
10975 self.expect_keyword_is(Keyword::NAME)?;
10976 Some(self.parse_identifier()?)
10977 } else {
10978 None
10979 };
10980 AlterTableOperation::FreezePartition {
10981 partition,
10982 with_name,
10983 }
10984 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10985 && self.parse_keyword(Keyword::UNFREEZE)
10986 {
10987 let partition = self.parse_part_or_partition()?;
10988 let with_name = if self.parse_keyword(Keyword::WITH) {
10989 self.expect_keyword_is(Keyword::NAME)?;
10990 Some(self.parse_identifier()?)
10991 } else {
10992 None
10993 };
10994 AlterTableOperation::UnfreezePartition {
10995 partition,
10996 with_name,
10997 }
10998 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10999 self.expect_token(&Token::LParen)?;
11000 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
11001 self.expect_token(&Token::RParen)?;
11002 AlterTableOperation::ClusterBy { exprs }
11003 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
11004 AlterTableOperation::SuspendRecluster
11005 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
11006 AlterTableOperation::ResumeRecluster
11007 } else if self.parse_keyword(Keyword::LOCK) {
11008 let equals = self.consume_token(&Token::Eq);
11009 let lock = match self.parse_one_of_keywords(&[
11010 Keyword::DEFAULT,
11011 Keyword::EXCLUSIVE,
11012 Keyword::NONE,
11013 Keyword::SHARED,
11014 ]) {
11015 Some(Keyword::DEFAULT) => AlterTableLock::Default,
11016 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
11017 Some(Keyword::NONE) => AlterTableLock::None,
11018 Some(Keyword::SHARED) => AlterTableLock::Shared,
11019 _ => self.expected_ref(
11020 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
11021 self.peek_token_ref(),
11022 )?,
11023 };
11024 AlterTableOperation::Lock { equals, lock }
11025 } else if self.parse_keyword(Keyword::ALGORITHM) {
11026 let equals = self.consume_token(&Token::Eq);
11027 let algorithm = match self.parse_one_of_keywords(&[
11028 Keyword::DEFAULT,
11029 Keyword::INSTANT,
11030 Keyword::INPLACE,
11031 Keyword::COPY,
11032 ]) {
11033 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
11034 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
11035 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
11036 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
11037 _ => self.expected_ref(
11038 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
11039 self.peek_token_ref(),
11040 )?,
11041 };
11042 AlterTableOperation::Algorithm { equals, algorithm }
11043 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
11044 let equals = self.consume_token(&Token::Eq);
11045 let value = self.parse_number_value()?;
11046 AlterTableOperation::AutoIncrement { equals, value }
11047 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
11048 let identity = if self.parse_keyword(Keyword::NOTHING) {
11049 ReplicaIdentity::Nothing
11050 } else if self.parse_keyword(Keyword::FULL) {
11051 ReplicaIdentity::Full
11052 } else if self.parse_keyword(Keyword::DEFAULT) {
11053 ReplicaIdentity::Default
11054 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
11055 ReplicaIdentity::Index(self.parse_identifier()?)
11056 } else {
11057 return self.expected_ref(
11058 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
11059 self.peek_token_ref(),
11060 );
11061 };
11062
11063 AlterTableOperation::ReplicaIdentity { identity }
11064 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11065 let name = self.parse_identifier()?;
11066 AlterTableOperation::ValidateConstraint { name }
11067 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11068 let tablespace_name = self.parse_identifier()?;
11069 AlterTableOperation::SetTablespace { tablespace_name }
11070 } else {
11071 let mut options =
11072 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
11073 if !options.is_empty() {
11074 AlterTableOperation::SetTblProperties {
11075 table_properties: options,
11076 }
11077 } else {
11078 options = self.parse_options(Keyword::SET)?;
11079 if !options.is_empty() {
11080 AlterTableOperation::SetOptionsParens { options }
11081 } else {
11082 return self.expected_ref(
11083 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
11084 self.peek_token_ref(),
11085 );
11086 }
11087 }
11088 };
11089 Ok(operation)
11090 }
11091
11092 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
11093 let data_type = self.parse_data_type()?;
11094 let using = if self.dialect.supports_alter_column_type_using()
11095 && self.parse_keyword(Keyword::USING)
11096 {
11097 Some(self.parse_expr()?)
11098 } else {
11099 None
11100 };
11101 Ok(AlterColumnOperation::SetDataType {
11102 data_type,
11103 using,
11104 had_set,
11105 })
11106 }
11107
11108 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
11109 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11110 match keyword {
11111 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11112 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11113 unexpected_keyword => Err(ParserError::ParserError(
11115 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11116 )),
11117 }
11118 }
11119
11120 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11122 let object_type = self.expect_one_of_keywords(&[
11123 Keyword::VIEW,
11124 Keyword::TYPE,
11125 Keyword::COLLATION,
11126 Keyword::TABLE,
11127 Keyword::INDEX,
11128 Keyword::FUNCTION,
11129 Keyword::AGGREGATE,
11130 Keyword::ROLE,
11131 Keyword::POLICY,
11132 Keyword::CONNECTOR,
11133 Keyword::ICEBERG,
11134 Keyword::SCHEMA,
11135 Keyword::USER,
11136 Keyword::OPERATOR,
11137 Keyword::DOMAIN,
11138 Keyword::TRIGGER,
11139 Keyword::EXTENSION,
11140 Keyword::PROCEDURE,
11141 ])?;
11142 match object_type {
11143 Keyword::SCHEMA => {
11144 self.prev_token();
11145 self.prev_token();
11146 self.parse_alter_schema()
11147 }
11148 Keyword::VIEW => self.parse_alter_view(),
11149 Keyword::TYPE => self.parse_alter_type(),
11150 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11151 Keyword::TABLE => self.parse_alter_table(false),
11152 Keyword::ICEBERG => {
11153 self.expect_keyword(Keyword::TABLE)?;
11154 self.parse_alter_table(true)
11155 }
11156 Keyword::INDEX => {
11157 let index_name = self.parse_object_name(false)?;
11158 let operation = if self.parse_keyword(Keyword::RENAME) {
11159 if self.parse_keyword(Keyword::TO) {
11160 let index_name = self.parse_object_name(false)?;
11161 AlterIndexOperation::RenameIndex { index_name }
11162 } else {
11163 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11164 }
11165 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11166 let tablespace_name = self.parse_identifier()?;
11167 AlterIndexOperation::SetTablespace { tablespace_name }
11168 } else {
11169 return self.expected_ref(
11170 "RENAME or SET TABLESPACE after ALTER INDEX",
11171 self.peek_token_ref(),
11172 );
11173 };
11174
11175 Ok(Statement::AlterIndex {
11176 name: index_name,
11177 operation,
11178 })
11179 }
11180 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11181 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11182 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11183 Keyword::OPERATOR => {
11184 if self.parse_keyword(Keyword::FAMILY) {
11185 self.parse_alter_operator_family().map(Into::into)
11186 } else if self.parse_keyword(Keyword::CLASS) {
11187 self.parse_alter_operator_class().map(Into::into)
11188 } else {
11189 self.parse_alter_operator().map(Into::into)
11190 }
11191 }
11192 Keyword::ROLE => self.parse_alter_role(),
11193 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11194 Keyword::CONNECTOR => self.parse_alter_connector(),
11195 Keyword::USER => self.parse_alter_user().map(Into::into),
11196 Keyword::DOMAIN => self.parse_alter_domain(),
11197 Keyword::TRIGGER => self.parse_alter_trigger(),
11198 Keyword::EXTENSION => self.parse_alter_extension(),
11199 unexpected_keyword => Err(ParserError::ParserError(
11201 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE}}, got {unexpected_keyword:?}"),
11202 )),
11203 }
11204 }
11205
11206 fn parse_alter_aggregate_signature(
11207 &mut self,
11208 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11209 let name = self.parse_object_name(false)?;
11210 self.expect_token(&Token::LParen)?;
11211
11212 if self.consume_token(&Token::Mul) {
11213 self.expect_token(&Token::RParen)?;
11214 return Ok((
11215 FunctionDesc {
11216 name,
11217 args: Some(vec![]),
11218 },
11219 true,
11220 None,
11221 ));
11222 }
11223
11224 let args =
11225 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11226 vec![]
11227 } else {
11228 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11229 };
11230
11231 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11232 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11233 } else {
11234 None
11235 };
11236
11237 self.expect_token(&Token::RParen)?;
11238 Ok((
11239 FunctionDesc {
11240 name,
11241 args: Some(args),
11242 },
11243 false,
11244 aggregate_order_by,
11245 ))
11246 }
11247
11248 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11249 let action = if self.parse_keywords(&[
11250 Keyword::CALLED,
11251 Keyword::ON,
11252 Keyword::NULL,
11253 Keyword::INPUT,
11254 ]) {
11255 Some(AlterFunctionAction::CalledOnNull(
11256 FunctionCalledOnNull::CalledOnNullInput,
11257 ))
11258 } else if self.parse_keywords(&[
11259 Keyword::RETURNS,
11260 Keyword::NULL,
11261 Keyword::ON,
11262 Keyword::NULL,
11263 Keyword::INPUT,
11264 ]) {
11265 Some(AlterFunctionAction::CalledOnNull(
11266 FunctionCalledOnNull::ReturnsNullOnNullInput,
11267 ))
11268 } else if self.parse_keyword(Keyword::STRICT) {
11269 Some(AlterFunctionAction::CalledOnNull(
11270 FunctionCalledOnNull::Strict,
11271 ))
11272 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11273 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11274 } else if self.parse_keyword(Keyword::STABLE) {
11275 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11276 } else if self.parse_keyword(Keyword::VOLATILE) {
11277 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11278 } else if self.parse_keyword(Keyword::NOT) {
11279 self.expect_keyword(Keyword::LEAKPROOF)?;
11280 Some(AlterFunctionAction::Leakproof(false))
11281 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11282 Some(AlterFunctionAction::Leakproof(true))
11283 } else if self.parse_keyword(Keyword::EXTERNAL) {
11284 self.expect_keyword(Keyword::SECURITY)?;
11285 let security = if self.parse_keyword(Keyword::DEFINER) {
11286 FunctionSecurity::Definer
11287 } else if self.parse_keyword(Keyword::INVOKER) {
11288 FunctionSecurity::Invoker
11289 } else {
11290 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11291 };
11292 Some(AlterFunctionAction::Security {
11293 external: true,
11294 security,
11295 })
11296 } else if self.parse_keyword(Keyword::SECURITY) {
11297 let security = if self.parse_keyword(Keyword::DEFINER) {
11298 FunctionSecurity::Definer
11299 } else if self.parse_keyword(Keyword::INVOKER) {
11300 FunctionSecurity::Invoker
11301 } else {
11302 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11303 };
11304 Some(AlterFunctionAction::Security {
11305 external: false,
11306 security,
11307 })
11308 } else if self.parse_keyword(Keyword::PARALLEL) {
11309 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11310 FunctionParallel::Unsafe
11311 } else if self.parse_keyword(Keyword::RESTRICTED) {
11312 FunctionParallel::Restricted
11313 } else if self.parse_keyword(Keyword::SAFE) {
11314 FunctionParallel::Safe
11315 } else {
11316 return self
11317 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11318 };
11319 Some(AlterFunctionAction::Parallel(parallel))
11320 } else if self.parse_keyword(Keyword::COST) {
11321 Some(AlterFunctionAction::Cost(self.parse_number()?))
11322 } else if self.parse_keyword(Keyword::ROWS) {
11323 Some(AlterFunctionAction::Rows(self.parse_number()?))
11324 } else if self.parse_keyword(Keyword::SUPPORT) {
11325 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11326 } else if self.parse_keyword(Keyword::SET) {
11327 let name = self.parse_object_name(false)?;
11328 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11329 FunctionSetValue::FromCurrent
11330 } else {
11331 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11332 return self.expected_ref("= or TO", self.peek_token_ref());
11333 }
11334 if self.parse_keyword(Keyword::DEFAULT) {
11335 FunctionSetValue::Default
11336 } else {
11337 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11338 }
11339 };
11340 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11341 name,
11342 value,
11343 }))
11344 } else if self.parse_keyword(Keyword::RESET) {
11345 let reset_config = if self.parse_keyword(Keyword::ALL) {
11346 ResetConfig::ALL
11347 } else {
11348 ResetConfig::ConfigName(self.parse_object_name(false)?)
11349 };
11350 Some(AlterFunctionAction::Reset(reset_config))
11351 } else {
11352 None
11353 };
11354
11355 Ok(action)
11356 }
11357
11358 fn parse_alter_function_actions(
11359 &mut self,
11360 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11361 let mut actions = vec![];
11362 while let Some(action) = self.parse_alter_function_action()? {
11363 actions.push(action);
11364 }
11365 if actions.is_empty() {
11366 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11367 }
11368 let restrict = self.parse_keyword(Keyword::RESTRICT);
11369 Ok((actions, restrict))
11370 }
11371
11372 pub fn parse_alter_function(
11374 &mut self,
11375 kind: AlterFunctionKind,
11376 ) -> Result<Statement, ParserError> {
11377 let (function, aggregate_star, aggregate_order_by) = match kind {
11378 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11379 (self.parse_function_desc()?, false, None)
11380 }
11381 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11382 };
11383
11384 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11385 let new_name = self.parse_identifier()?;
11386 AlterFunctionOperation::RenameTo { new_name }
11387 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11388 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11389 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11390 AlterFunctionOperation::SetSchema {
11391 schema_name: self.parse_object_name(false)?,
11392 }
11393 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11394 && self.parse_keyword(Keyword::NO)
11395 {
11396 if !self.parse_keyword(Keyword::DEPENDS) {
11397 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11398 }
11399 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11400 AlterFunctionOperation::DependsOnExtension {
11401 no: true,
11402 extension_name: self.parse_object_name(false)?,
11403 }
11404 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11405 && self.parse_keyword(Keyword::DEPENDS)
11406 {
11407 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11408 AlterFunctionOperation::DependsOnExtension {
11409 no: false,
11410 extension_name: self.parse_object_name(false)?,
11411 }
11412 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure) {
11413 let (actions, restrict) = self.parse_alter_function_actions()?;
11414 AlterFunctionOperation::Actions { actions, restrict }
11415 } else {
11416 return self.expected_ref(
11417 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11418 self.peek_token_ref(),
11419 );
11420 };
11421
11422 Ok(Statement::AlterFunction(AlterFunction {
11423 kind,
11424 function,
11425 aggregate_order_by,
11426 aggregate_star,
11427 operation,
11428 }))
11429 }
11430
11431 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11433 let name = self.parse_object_name(false)?;
11434
11435 let operation = if self.parse_keyword(Keyword::ADD) {
11436 if let Some(constraint) = self.parse_optional_table_constraint()? {
11437 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11438 AlterDomainOperation::AddConstraint {
11439 constraint,
11440 not_valid,
11441 }
11442 } else {
11443 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11444 }
11445 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11446 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11447 let name = self.parse_identifier()?;
11448 let drop_behavior = self.parse_optional_drop_behavior();
11449 AlterDomainOperation::DropConstraint {
11450 if_exists,
11451 name,
11452 drop_behavior,
11453 }
11454 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11455 AlterDomainOperation::DropDefault
11456 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11457 let old_name = self.parse_identifier()?;
11458 self.expect_keyword_is(Keyword::TO)?;
11459 let new_name = self.parse_identifier()?;
11460 AlterDomainOperation::RenameConstraint { old_name, new_name }
11461 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11462 let new_name = self.parse_identifier()?;
11463 AlterDomainOperation::RenameTo { new_name }
11464 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11465 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11466 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11467 AlterDomainOperation::SetSchema {
11468 schema_name: self.parse_object_name(false)?,
11469 }
11470 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11471 AlterDomainOperation::SetDefault {
11472 default: self.parse_expr()?,
11473 }
11474 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11475 let name = self.parse_identifier()?;
11476 AlterDomainOperation::ValidateConstraint { name }
11477 } else {
11478 return self.expected_ref(
11479 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11480 self.peek_token_ref(),
11481 );
11482 };
11483
11484 Ok(AlterDomain { name, operation }.into())
11485 }
11486
11487 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11489 let name = self.parse_identifier()?;
11490 self.expect_keyword_is(Keyword::ON)?;
11491 let table_name = self.parse_object_name(false)?;
11492
11493 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11494 let new_name = self.parse_identifier()?;
11495 AlterTriggerOperation::RenameTo { new_name }
11496 } else {
11497 return self.expected_ref("RENAME TO after ALTER TRIGGER ... ON ...", self.peek_token_ref());
11498 };
11499
11500 Ok(AlterTrigger {
11501 name,
11502 table_name,
11503 operation,
11504 }
11505 .into())
11506 }
11507
11508 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11510 let name = self.parse_identifier()?;
11511
11512 let operation = if self.parse_keyword(Keyword::UPDATE) {
11513 let version = if self.parse_keyword(Keyword::TO) {
11514 Some(self.parse_identifier()?)
11515 } else {
11516 None
11517 };
11518 AlterExtensionOperation::UpdateTo { version }
11519 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11520 AlterExtensionOperation::SetSchema {
11521 schema_name: self.parse_object_name(false)?,
11522 }
11523 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11524 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11525 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11526 let new_name = self.parse_identifier()?;
11527 AlterExtensionOperation::RenameTo { new_name }
11528 } else {
11529 return self.expected_ref(
11530 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11531 self.peek_token_ref(),
11532 );
11533 };
11534
11535 Ok(AlterExtension { name, operation }.into())
11536 }
11537
11538 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11540 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11541 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11543 let on_cluster = self.parse_optional_on_cluster()?;
11544 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11545
11546 let mut location = None;
11547 if self.parse_keyword(Keyword::LOCATION) {
11548 location = Some(HiveSetLocation {
11549 has_set: false,
11550 location: self.parse_identifier()?,
11551 });
11552 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11553 location = Some(HiveSetLocation {
11554 has_set: true,
11555 location: self.parse_identifier()?,
11556 });
11557 }
11558
11559 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11560 self.peek_token_ref().clone()
11561 } else {
11562 self.get_current_token().clone()
11563 };
11564
11565 Ok(AlterTable {
11566 name: table_name,
11567 if_exists,
11568 only,
11569 operations,
11570 location,
11571 on_cluster,
11572 table_type: if iceberg {
11573 Some(AlterTableType::Iceberg)
11574 } else {
11575 None
11576 },
11577 end_token: AttachedToken(end_token),
11578 }
11579 .into())
11580 }
11581
11582 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11584 let name = self.parse_object_name(false)?;
11585 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11586
11587 let with_options = self.parse_options(Keyword::WITH)?;
11588
11589 self.expect_keyword_is(Keyword::AS)?;
11590 let query = self.parse_query()?;
11591
11592 Ok(Statement::AlterView {
11593 name,
11594 columns,
11595 query,
11596 with_options,
11597 })
11598 }
11599
11600 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11602 let name = self.parse_object_name(false)?;
11603
11604 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11605 let new_name = self.parse_identifier()?;
11606 Ok(Statement::AlterType(AlterType {
11607 name,
11608 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
11609 }))
11610 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11611 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11612 let new_enum_value = self.parse_identifier()?;
11613 let position = if self.parse_keyword(Keyword::BEFORE) {
11614 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11615 } else if self.parse_keyword(Keyword::AFTER) {
11616 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11617 } else {
11618 None
11619 };
11620
11621 Ok(Statement::AlterType(AlterType {
11622 name,
11623 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
11624 if_not_exists,
11625 value: new_enum_value,
11626 position,
11627 }),
11628 }))
11629 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11630 let existing_enum_value = self.parse_identifier()?;
11631 self.expect_keyword(Keyword::TO)?;
11632 let new_enum_value = self.parse_identifier()?;
11633
11634 Ok(Statement::AlterType(AlterType {
11635 name,
11636 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11637 from: existing_enum_value,
11638 to: new_enum_value,
11639 }),
11640 }))
11641 } else {
11642 self.expected_ref(
11643 "{RENAME TO | { RENAME | ADD } VALUE}",
11644 self.peek_token_ref(),
11645 )
11646 }
11647 }
11648
11649 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11653 let name = self.parse_object_name(false)?;
11654 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11655 AlterCollationOperation::RenameTo {
11656 new_name: self.parse_identifier()?,
11657 }
11658 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11659 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11660 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11661 AlterCollationOperation::SetSchema {
11662 schema_name: self.parse_object_name(false)?,
11663 }
11664 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11665 AlterCollationOperation::RefreshVersion
11666 } else {
11667 return self.expected_ref(
11668 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11669 self.peek_token_ref(),
11670 );
11671 };
11672
11673 Ok(AlterCollation { name, operation })
11674 }
11675
11676 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11680 let name = self.parse_operator_name()?;
11681
11682 self.expect_token(&Token::LParen)?;
11684
11685 let left_type = if self.parse_keyword(Keyword::NONE) {
11686 None
11687 } else {
11688 Some(self.parse_data_type()?)
11689 };
11690
11691 self.expect_token(&Token::Comma)?;
11692 let right_type = self.parse_data_type()?;
11693 self.expect_token(&Token::RParen)?;
11694
11695 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11697 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11698 Owner::CurrentRole
11699 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11700 Owner::CurrentUser
11701 } else if self.parse_keyword(Keyword::SESSION_USER) {
11702 Owner::SessionUser
11703 } else {
11704 Owner::Ident(self.parse_identifier()?)
11705 };
11706 AlterOperatorOperation::OwnerTo(owner)
11707 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11708 let schema_name = self.parse_object_name(false)?;
11709 AlterOperatorOperation::SetSchema { schema_name }
11710 } else if self.parse_keyword(Keyword::SET) {
11711 self.expect_token(&Token::LParen)?;
11712
11713 let mut options = Vec::new();
11714 loop {
11715 let keyword = self.expect_one_of_keywords(&[
11716 Keyword::RESTRICT,
11717 Keyword::JOIN,
11718 Keyword::COMMUTATOR,
11719 Keyword::NEGATOR,
11720 Keyword::HASHES,
11721 Keyword::MERGES,
11722 ])?;
11723
11724 match keyword {
11725 Keyword::RESTRICT => {
11726 self.expect_token(&Token::Eq)?;
11727 let proc_name = if self.parse_keyword(Keyword::NONE) {
11728 None
11729 } else {
11730 Some(self.parse_object_name(false)?)
11731 };
11732 options.push(OperatorOption::Restrict(proc_name));
11733 }
11734 Keyword::JOIN => {
11735 self.expect_token(&Token::Eq)?;
11736 let proc_name = if self.parse_keyword(Keyword::NONE) {
11737 None
11738 } else {
11739 Some(self.parse_object_name(false)?)
11740 };
11741 options.push(OperatorOption::Join(proc_name));
11742 }
11743 Keyword::COMMUTATOR => {
11744 self.expect_token(&Token::Eq)?;
11745 let op_name = self.parse_operator_name()?;
11746 options.push(OperatorOption::Commutator(op_name));
11747 }
11748 Keyword::NEGATOR => {
11749 self.expect_token(&Token::Eq)?;
11750 let op_name = self.parse_operator_name()?;
11751 options.push(OperatorOption::Negator(op_name));
11752 }
11753 Keyword::HASHES => {
11754 options.push(OperatorOption::Hashes);
11755 }
11756 Keyword::MERGES => {
11757 options.push(OperatorOption::Merges);
11758 }
11759 unexpected_keyword => return Err(ParserError::ParserError(
11760 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11761 )),
11762 }
11763
11764 if !self.consume_token(&Token::Comma) {
11765 break;
11766 }
11767 }
11768
11769 self.expect_token(&Token::RParen)?;
11770 AlterOperatorOperation::Set { options }
11771 } else {
11772 return self.expected_ref(
11773 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11774 self.peek_token_ref(),
11775 );
11776 };
11777
11778 Ok(AlterOperator {
11779 name,
11780 left_type,
11781 right_type,
11782 operation,
11783 })
11784 }
11785
11786 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11788 let strategy_number = self.parse_literal_uint()?;
11789 let operator_name = self.parse_operator_name()?;
11790
11791 self.expect_token(&Token::LParen)?;
11793 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11794 self.expect_token(&Token::RParen)?;
11795
11796 let purpose = if self.parse_keyword(Keyword::FOR) {
11798 if self.parse_keyword(Keyword::SEARCH) {
11799 Some(OperatorPurpose::ForSearch)
11800 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11801 let sort_family = self.parse_object_name(false)?;
11802 Some(OperatorPurpose::ForOrderBy { sort_family })
11803 } else {
11804 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11805 }
11806 } else {
11807 None
11808 };
11809
11810 Ok(OperatorFamilyItem::Operator {
11811 strategy_number,
11812 operator_name,
11813 op_types,
11814 purpose,
11815 })
11816 }
11817
11818 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11820 let support_number = self.parse_literal_uint()?;
11821
11822 let op_types =
11824 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11825 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11826 self.expect_token(&Token::RParen)?;
11827 Some(types)
11828 } else if self.consume_token(&Token::LParen) {
11829 self.expect_token(&Token::RParen)?;
11830 Some(vec![])
11831 } else {
11832 None
11833 };
11834
11835 let function_name = self.parse_object_name(false)?;
11836
11837 let argument_types = if self.consume_token(&Token::LParen) {
11839 if self.peek_token_ref().token == Token::RParen {
11840 self.expect_token(&Token::RParen)?;
11841 vec![]
11842 } else {
11843 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11844 self.expect_token(&Token::RParen)?;
11845 types
11846 }
11847 } else {
11848 vec![]
11849 };
11850
11851 Ok(OperatorFamilyItem::Function {
11852 support_number,
11853 op_types,
11854 function_name,
11855 argument_types,
11856 })
11857 }
11858
11859 fn parse_operator_family_drop_operator(
11861 &mut self,
11862 ) -> Result<OperatorFamilyDropItem, ParserError> {
11863 let strategy_number = self.parse_literal_uint()?;
11864
11865 self.expect_token(&Token::LParen)?;
11867 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11868 self.expect_token(&Token::RParen)?;
11869
11870 Ok(OperatorFamilyDropItem::Operator {
11871 strategy_number,
11872 op_types,
11873 })
11874 }
11875
11876 fn parse_operator_family_drop_function(
11878 &mut self,
11879 ) -> Result<OperatorFamilyDropItem, ParserError> {
11880 let support_number = self.parse_literal_uint()?;
11881
11882 self.expect_token(&Token::LParen)?;
11884 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11885 self.expect_token(&Token::RParen)?;
11886
11887 Ok(OperatorFamilyDropItem::Function {
11888 support_number,
11889 op_types,
11890 })
11891 }
11892
11893 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11895 if self.parse_keyword(Keyword::OPERATOR) {
11896 self.parse_operator_family_add_operator()
11897 } else if self.parse_keyword(Keyword::FUNCTION) {
11898 self.parse_operator_family_add_function()
11899 } else {
11900 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11901 }
11902 }
11903
11904 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
11906 if self.parse_keyword(Keyword::OPERATOR) {
11907 self.parse_operator_family_drop_operator()
11908 } else if self.parse_keyword(Keyword::FUNCTION) {
11909 self.parse_operator_family_drop_function()
11910 } else {
11911 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11912 }
11913 }
11914
11915 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
11918 let name = self.parse_object_name(false)?;
11919 self.expect_keyword(Keyword::USING)?;
11920 let using = self.parse_identifier()?;
11921
11922 let operation = if self.parse_keyword(Keyword::ADD) {
11923 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
11924 AlterOperatorFamilyOperation::Add { items }
11925 } else if self.parse_keyword(Keyword::DROP) {
11926 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
11927 AlterOperatorFamilyOperation::Drop { items }
11928 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11929 let new_name = self.parse_object_name(false)?;
11930 AlterOperatorFamilyOperation::RenameTo { new_name }
11931 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11932 let owner = self.parse_owner()?;
11933 AlterOperatorFamilyOperation::OwnerTo(owner)
11934 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11935 let schema_name = self.parse_object_name(false)?;
11936 AlterOperatorFamilyOperation::SetSchema { schema_name }
11937 } else {
11938 return self.expected_ref(
11939 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
11940 self.peek_token_ref(),
11941 );
11942 };
11943
11944 Ok(AlterOperatorFamily {
11945 name,
11946 using,
11947 operation,
11948 })
11949 }
11950
11951 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
11955 let name = self.parse_object_name(false)?;
11956 self.expect_keyword(Keyword::USING)?;
11957 let using = self.parse_identifier()?;
11958
11959 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11960 let new_name = self.parse_object_name(false)?;
11961 AlterOperatorClassOperation::RenameTo { new_name }
11962 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11963 let owner = self.parse_owner()?;
11964 AlterOperatorClassOperation::OwnerTo(owner)
11965 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11966 let schema_name = self.parse_object_name(false)?;
11967 AlterOperatorClassOperation::SetSchema { schema_name }
11968 } else {
11969 return self.expected_ref(
11970 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
11971 self.peek_token_ref(),
11972 );
11973 };
11974
11975 Ok(AlterOperatorClass {
11976 name,
11977 using,
11978 operation,
11979 })
11980 }
11981
11982 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
11986 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
11987 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11988 let name = self.parse_object_name(false)?;
11989 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
11990 self.prev_token();
11991 let options = self.parse_options(Keyword::OPTIONS)?;
11992 AlterSchemaOperation::SetOptionsParens { options }
11993 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
11994 let collate = self.parse_expr()?;
11995 AlterSchemaOperation::SetDefaultCollate { collate }
11996 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
11997 let replica = self.parse_identifier()?;
11998 let options = if self.peek_keyword(Keyword::OPTIONS) {
11999 Some(self.parse_options(Keyword::OPTIONS)?)
12000 } else {
12001 None
12002 };
12003 AlterSchemaOperation::AddReplica { replica, options }
12004 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
12005 let replica = self.parse_identifier()?;
12006 AlterSchemaOperation::DropReplica { replica }
12007 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
12008 let new_name = self.parse_object_name(false)?;
12009 AlterSchemaOperation::Rename { name: new_name }
12010 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
12011 let owner = self.parse_owner()?;
12012 AlterSchemaOperation::OwnerTo { owner }
12013 } else {
12014 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
12015 };
12016 Ok(Statement::AlterSchema(AlterSchema {
12017 name,
12018 if_exists,
12019 operations: vec![operation],
12020 }))
12021 }
12022
12023 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
12026 let object_name = self.parse_object_name(false)?;
12027 if self.peek_token_ref().token == Token::LParen {
12028 match self.parse_function(object_name)? {
12029 Expr::Function(f) => Ok(Statement::Call(f)),
12030 other => parser_err!(
12031 format!("Expected a simple procedure call but found: {other}"),
12032 self.peek_token_ref().span.start
12033 ),
12034 }
12035 } else {
12036 Ok(Statement::Call(Function {
12037 name: object_name,
12038 uses_odbc_syntax: false,
12039 parameters: FunctionArguments::None,
12040 args: FunctionArguments::None,
12041 over: None,
12042 filter: None,
12043 null_treatment: None,
12044 within_group: vec![],
12045 }))
12046 }
12047 }
12048
12049 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
12051 let source;
12052 if self.consume_token(&Token::LParen) {
12053 source = CopySource::Query(self.parse_query()?);
12054 self.expect_token(&Token::RParen)?;
12055 } else {
12056 let table_name = self.parse_object_name(false)?;
12057 let columns = self.parse_parenthesized_column_list(Optional, false)?;
12058 source = CopySource::Table {
12059 table_name,
12060 columns,
12061 };
12062 }
12063 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
12064 Some(Keyword::FROM) => false,
12065 Some(Keyword::TO) => true,
12066 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
12067 };
12068 if !to {
12069 if let CopySource::Query(_) = source {
12072 return Err(ParserError::ParserError(
12073 "COPY ... FROM does not support query as a source".to_string(),
12074 ));
12075 }
12076 }
12077 let target = if self.parse_keyword(Keyword::STDIN) {
12078 CopyTarget::Stdin
12079 } else if self.parse_keyword(Keyword::STDOUT) {
12080 CopyTarget::Stdout
12081 } else if self.parse_keyword(Keyword::PROGRAM) {
12082 CopyTarget::Program {
12083 command: self.parse_literal_string()?,
12084 }
12085 } else {
12086 CopyTarget::File {
12087 filename: self.parse_literal_string()?,
12088 }
12089 };
12090 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
12092 if self.consume_token(&Token::LParen) {
12093 options = self.parse_comma_separated(Parser::parse_copy_option)?;
12094 self.expect_token(&Token::RParen)?;
12095 }
12096 let mut legacy_options = vec![];
12097 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
12098 legacy_options.push(opt);
12099 }
12100 let values =
12101 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
12102 self.expect_token(&Token::SemiColon)?;
12103 self.parse_tsv()
12104 } else {
12105 vec![]
12106 };
12107 Ok(Statement::Copy {
12108 source,
12109 to,
12110 target,
12111 options,
12112 legacy_options,
12113 values,
12114 })
12115 }
12116
12117 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12119 self.expect_keyword(Keyword::OPEN)?;
12120 Ok(Statement::Open(OpenStatement {
12121 cursor_name: self.parse_identifier()?,
12122 }))
12123 }
12124
12125 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12127 let cursor = if self.parse_keyword(Keyword::ALL) {
12128 CloseCursor::All
12129 } else {
12130 let name = self.parse_identifier()?;
12131
12132 CloseCursor::Specific { name }
12133 };
12134
12135 Ok(Statement::Close { cursor })
12136 }
12137
12138 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12139 let ret = match self.parse_one_of_keywords(&[
12140 Keyword::FORMAT,
12141 Keyword::FREEZE,
12142 Keyword::DELIMITER,
12143 Keyword::NULL,
12144 Keyword::HEADER,
12145 Keyword::QUOTE,
12146 Keyword::ESCAPE,
12147 Keyword::FORCE_QUOTE,
12148 Keyword::FORCE_NOT_NULL,
12149 Keyword::FORCE_NULL,
12150 Keyword::ENCODING,
12151 ]) {
12152 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12153 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12154 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12155 Some(Keyword::FALSE)
12156 )),
12157 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12158 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12159 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12160 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12161 Some(Keyword::FALSE)
12162 )),
12163 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12164 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12165 Some(Keyword::FORCE_QUOTE) => {
12166 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12167 }
12168 Some(Keyword::FORCE_NOT_NULL) => {
12169 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12170 }
12171 Some(Keyword::FORCE_NULL) => {
12172 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12173 }
12174 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12175 _ => self.expected_ref("option", self.peek_token_ref())?,
12176 };
12177 Ok(ret)
12178 }
12179
12180 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12181 if self.parse_keyword(Keyword::FORMAT) {
12183 let _ = self.parse_keyword(Keyword::AS);
12184 }
12185
12186 let ret = match self.parse_one_of_keywords(&[
12187 Keyword::ACCEPTANYDATE,
12188 Keyword::ACCEPTINVCHARS,
12189 Keyword::ADDQUOTES,
12190 Keyword::ALLOWOVERWRITE,
12191 Keyword::BINARY,
12192 Keyword::BLANKSASNULL,
12193 Keyword::BZIP2,
12194 Keyword::CLEANPATH,
12195 Keyword::COMPUPDATE,
12196 Keyword::CREDENTIALS,
12197 Keyword::CSV,
12198 Keyword::DATEFORMAT,
12199 Keyword::DELIMITER,
12200 Keyword::EMPTYASNULL,
12201 Keyword::ENCRYPTED,
12202 Keyword::ESCAPE,
12203 Keyword::EXTENSION,
12204 Keyword::FIXEDWIDTH,
12205 Keyword::GZIP,
12206 Keyword::HEADER,
12207 Keyword::IAM_ROLE,
12208 Keyword::IGNOREHEADER,
12209 Keyword::JSON,
12210 Keyword::MANIFEST,
12211 Keyword::MAXFILESIZE,
12212 Keyword::NULL,
12213 Keyword::PARALLEL,
12214 Keyword::PARQUET,
12215 Keyword::PARTITION,
12216 Keyword::REGION,
12217 Keyword::REMOVEQUOTES,
12218 Keyword::ROWGROUPSIZE,
12219 Keyword::STATUPDATE,
12220 Keyword::TIMEFORMAT,
12221 Keyword::TRUNCATECOLUMNS,
12222 Keyword::ZSTD,
12223 ]) {
12224 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12225 Some(Keyword::ACCEPTINVCHARS) => {
12226 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12228 Some(self.parse_literal_string()?)
12229 } else {
12230 None
12231 };
12232 CopyLegacyOption::AcceptInvChars(ch)
12233 }
12234 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12235 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12236 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12237 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12238 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12239 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12240 Some(Keyword::COMPUPDATE) => {
12241 let preset = self.parse_keyword(Keyword::PRESET);
12242 let enabled = match self.parse_one_of_keywords(&[
12243 Keyword::TRUE,
12244 Keyword::FALSE,
12245 Keyword::ON,
12246 Keyword::OFF,
12247 ]) {
12248 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12249 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12250 _ => None,
12251 };
12252 CopyLegacyOption::CompUpdate { preset, enabled }
12253 }
12254 Some(Keyword::CREDENTIALS) => {
12255 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12256 }
12257 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12258 let mut opts = vec![];
12259 while let Some(opt) =
12260 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12261 {
12262 opts.push(opt);
12263 }
12264 opts
12265 }),
12266 Some(Keyword::DATEFORMAT) => {
12267 let _ = self.parse_keyword(Keyword::AS);
12268 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12269 Some(self.parse_literal_string()?)
12270 } else {
12271 None
12272 };
12273 CopyLegacyOption::DateFormat(fmt)
12274 }
12275 Some(Keyword::DELIMITER) => {
12276 let _ = self.parse_keyword(Keyword::AS);
12277 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12278 }
12279 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12280 Some(Keyword::ENCRYPTED) => {
12281 let auto = self.parse_keyword(Keyword::AUTO);
12282 CopyLegacyOption::Encrypted { auto }
12283 }
12284 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12285 Some(Keyword::EXTENSION) => {
12286 let ext = self.parse_literal_string()?;
12287 CopyLegacyOption::Extension(ext)
12288 }
12289 Some(Keyword::FIXEDWIDTH) => {
12290 let spec = self.parse_literal_string()?;
12291 CopyLegacyOption::FixedWidth(spec)
12292 }
12293 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12294 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12295 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12296 Some(Keyword::IGNOREHEADER) => {
12297 let _ = self.parse_keyword(Keyword::AS);
12298 let num_rows = self.parse_literal_uint()?;
12299 CopyLegacyOption::IgnoreHeader(num_rows)
12300 }
12301 Some(Keyword::JSON) => {
12302 let _ = self.parse_keyword(Keyword::AS);
12303 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12304 Some(self.parse_literal_string()?)
12305 } else {
12306 None
12307 };
12308 CopyLegacyOption::Json(fmt)
12309 }
12310 Some(Keyword::MANIFEST) => {
12311 let verbose = self.parse_keyword(Keyword::VERBOSE);
12312 CopyLegacyOption::Manifest { verbose }
12313 }
12314 Some(Keyword::MAXFILESIZE) => {
12315 let _ = self.parse_keyword(Keyword::AS);
12316 let size = self.parse_number_value()?;
12317 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12318 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12319 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12320 _ => None,
12321 };
12322 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12323 }
12324 Some(Keyword::NULL) => {
12325 let _ = self.parse_keyword(Keyword::AS);
12326 CopyLegacyOption::Null(self.parse_literal_string()?)
12327 }
12328 Some(Keyword::PARALLEL) => {
12329 let enabled = match self.parse_one_of_keywords(&[
12330 Keyword::TRUE,
12331 Keyword::FALSE,
12332 Keyword::ON,
12333 Keyword::OFF,
12334 ]) {
12335 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12336 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12337 _ => None,
12338 };
12339 CopyLegacyOption::Parallel(enabled)
12340 }
12341 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12342 Some(Keyword::PARTITION) => {
12343 self.expect_keyword(Keyword::BY)?;
12344 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12345 let include = self.parse_keyword(Keyword::INCLUDE);
12346 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12347 }
12348 Some(Keyword::REGION) => {
12349 let _ = self.parse_keyword(Keyword::AS);
12350 let region = self.parse_literal_string()?;
12351 CopyLegacyOption::Region(region)
12352 }
12353 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12354 Some(Keyword::ROWGROUPSIZE) => {
12355 let _ = self.parse_keyword(Keyword::AS);
12356 let file_size = self.parse_file_size()?;
12357 CopyLegacyOption::RowGroupSize(file_size)
12358 }
12359 Some(Keyword::STATUPDATE) => {
12360 let enabled = match self.parse_one_of_keywords(&[
12361 Keyword::TRUE,
12362 Keyword::FALSE,
12363 Keyword::ON,
12364 Keyword::OFF,
12365 ]) {
12366 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12367 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12368 _ => None,
12369 };
12370 CopyLegacyOption::StatUpdate(enabled)
12371 }
12372 Some(Keyword::TIMEFORMAT) => {
12373 let _ = self.parse_keyword(Keyword::AS);
12374 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12375 Some(self.parse_literal_string()?)
12376 } else {
12377 None
12378 };
12379 CopyLegacyOption::TimeFormat(fmt)
12380 }
12381 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12382 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12383 _ => self.expected_ref("option", self.peek_token_ref())?,
12384 };
12385 Ok(ret)
12386 }
12387
12388 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12389 let size = self.parse_number_value()?;
12390 let unit = self.maybe_parse_file_size_unit();
12391 Ok(FileSize { size, unit })
12392 }
12393
12394 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12395 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12396 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12397 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12398 _ => None,
12399 }
12400 }
12401
12402 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12403 if self.parse_keyword(Keyword::DEFAULT) {
12404 Ok(IamRoleKind::Default)
12405 } else {
12406 let arn = self.parse_literal_string()?;
12407 Ok(IamRoleKind::Arn(arn))
12408 }
12409 }
12410
12411 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12412 let ret = match self.parse_one_of_keywords(&[
12413 Keyword::HEADER,
12414 Keyword::QUOTE,
12415 Keyword::ESCAPE,
12416 Keyword::FORCE,
12417 ]) {
12418 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12419 Some(Keyword::QUOTE) => {
12420 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12422 }
12423 Some(Keyword::ESCAPE) => {
12424 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12426 }
12427 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12428 CopyLegacyCsvOption::ForceNotNull(
12429 self.parse_comma_separated(|p| p.parse_identifier())?,
12430 )
12431 }
12432 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12433 CopyLegacyCsvOption::ForceQuote(
12434 self.parse_comma_separated(|p| p.parse_identifier())?,
12435 )
12436 }
12437 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12438 };
12439 Ok(ret)
12440 }
12441
12442 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12443 let s = self.parse_literal_string()?;
12444 if s.len() != 1 {
12445 let loc = self
12446 .tokens
12447 .get(self.index - 1)
12448 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12449 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12450 }
12451 Ok(s.chars().next().unwrap())
12452 }
12453
12454 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12457 self.parse_tab_value()
12458 }
12459
12460 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12462 let mut values = vec![];
12463 let mut content = String::new();
12464 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12465 match t {
12466 Token::Whitespace(Whitespace::Tab) => {
12467 values.push(Some(core::mem::take(&mut content)));
12468 }
12469 Token::Whitespace(Whitespace::Newline) => {
12470 values.push(Some(core::mem::take(&mut content)));
12471 }
12472 Token::Backslash => {
12473 if self.consume_token(&Token::Period) {
12474 return values;
12475 }
12476 if let Token::Word(w) = self.next_token().token {
12477 if w.value == "N" {
12478 values.push(None);
12479 }
12480 }
12481 }
12482 _ => {
12483 content.push_str(&t.to_string());
12484 }
12485 }
12486 }
12487 values
12488 }
12489
12490 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12492 let next_token = self.next_token();
12493 let span = next_token.span;
12494 let ok_value = |value: Value| Ok(value.with_span(span));
12495 match next_token.token {
12496 Token::Word(w) => match w.keyword {
12497 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12498 ok_value(Value::Boolean(true))
12499 }
12500 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12501 ok_value(Value::Boolean(false))
12502 }
12503 Keyword::NULL => ok_value(Value::Null),
12504 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12505 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12506 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12507 _ => self.expected(
12508 "A value?",
12509 TokenWithSpan {
12510 token: Token::Word(w),
12511 span,
12512 },
12513 )?,
12514 },
12515 _ => self.expected(
12516 "a concrete value",
12517 TokenWithSpan {
12518 token: Token::Word(w),
12519 span,
12520 },
12521 ),
12522 },
12523 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12527 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12528 self.maybe_concat_string_literal(s.to_string()),
12529 )),
12530 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12531 self.maybe_concat_string_literal(s.to_string()),
12532 )),
12533 Token::TripleSingleQuotedString(ref s) => {
12534 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12535 }
12536 Token::TripleDoubleQuotedString(ref s) => {
12537 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12538 }
12539 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12540 Token::SingleQuotedByteStringLiteral(ref s) => {
12541 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12542 }
12543 Token::DoubleQuotedByteStringLiteral(ref s) => {
12544 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12545 }
12546 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12547 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12548 }
12549 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12550 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12551 }
12552 Token::SingleQuotedRawStringLiteral(ref s) => {
12553 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12554 }
12555 Token::DoubleQuotedRawStringLiteral(ref s) => {
12556 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12557 }
12558 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12559 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12560 }
12561 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12562 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12563 }
12564 Token::NationalStringLiteral(ref s) => {
12565 ok_value(Value::NationalStringLiteral(s.to_string()))
12566 }
12567 Token::QuoteDelimitedStringLiteral(v) => {
12568 ok_value(Value::QuoteDelimitedStringLiteral(v))
12569 }
12570 Token::NationalQuoteDelimitedStringLiteral(v) => {
12571 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12572 }
12573 Token::EscapedStringLiteral(ref s) => {
12574 ok_value(Value::EscapedStringLiteral(s.to_string()))
12575 }
12576 Token::UnicodeStringLiteral(ref s) => {
12577 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12578 }
12579 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12580 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12581 tok @ Token::Colon | tok @ Token::AtSign => {
12582 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12590 let ident = match next_token.token {
12591 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12592 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12593 _ => self.expected("placeholder", next_token),
12594 }?;
12595 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12596 .with_span(Span::new(span.start, ident.span.end)))
12597 }
12598 unexpected => self.expected(
12599 "a value",
12600 TokenWithSpan {
12601 token: unexpected,
12602 span,
12603 },
12604 ),
12605 }
12606 }
12607
12608 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12609 if self.dialect.supports_string_literal_concatenation() {
12610 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12611 self.peek_token_ref().token
12612 {
12613 str.push_str(s);
12614 self.advance_token();
12615 }
12616 } else if self
12617 .dialect
12618 .supports_string_literal_concatenation_with_newline()
12619 {
12620 let mut after_newline = false;
12623 loop {
12624 match self.peek_token_no_skip().token {
12625 Token::Whitespace(Whitespace::Newline) => {
12626 after_newline = true;
12627 self.next_token_no_skip();
12628 }
12629 Token::Whitespace(_) => {
12630 self.next_token_no_skip();
12631 }
12632 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12633 if after_newline =>
12634 {
12635 str.push_str(s.clone().as_str());
12636 self.next_token_no_skip();
12637 after_newline = false;
12638 }
12639 _ => break,
12640 }
12641 }
12642 }
12643
12644 str
12645 }
12646
12647 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12649 let value_wrapper = self.parse_value()?;
12650 match &value_wrapper.value {
12651 Value::Number(_, _) => Ok(value_wrapper),
12652 Value::Placeholder(_) => Ok(value_wrapper),
12653 _ => {
12654 self.prev_token();
12655 self.expected_ref("literal number", self.peek_token_ref())
12656 }
12657 }
12658 }
12659
12660 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12663 let next_token = self.next_token();
12664 match next_token.token {
12665 Token::Plus => Ok(Expr::UnaryOp {
12666 op: UnaryOperator::Plus,
12667 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12668 }),
12669 Token::Minus => Ok(Expr::UnaryOp {
12670 op: UnaryOperator::Minus,
12671 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12672 }),
12673 _ => {
12674 self.prev_token();
12675 Ok(Expr::Value(self.parse_number_value()?))
12676 }
12677 }
12678 }
12679
12680 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12681 let next_token = self.next_token();
12682 let span = next_token.span;
12683 match next_token.token {
12684 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12685 Value::SingleQuotedString(s.to_string()).with_span(span),
12686 )),
12687 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12688 Value::DoubleQuotedString(s.to_string()).with_span(span),
12689 )),
12690 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12691 Value::HexStringLiteral(s.to_string()).with_span(span),
12692 )),
12693 unexpected => self.expected(
12694 "a string value",
12695 TokenWithSpan {
12696 token: unexpected,
12697 span,
12698 },
12699 ),
12700 }
12701 }
12702
12703 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12705 let next_token = self.next_token();
12706 match next_token.token {
12707 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12708 _ => self.expected("literal int", next_token),
12709 }
12710 }
12711
12712 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12715 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12716 let peek_token = parser.peek_token();
12717 let span = peek_token.span;
12718 match peek_token.token {
12719 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12720 {
12721 parser.next_token();
12722 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12723 }
12724 _ => Ok(Expr::Value(
12725 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12726 )),
12727 }
12728 };
12729
12730 Ok(CreateFunctionBody::AsBeforeOptions {
12731 body: parse_string_expr(self)?,
12732 link_symbol: if self.consume_token(&Token::Comma) {
12733 Some(parse_string_expr(self)?)
12734 } else {
12735 None
12736 },
12737 })
12738 }
12739
12740 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12742 let next_token = self.next_token();
12743 match next_token.token {
12744 Token::Word(Word {
12745 value,
12746 keyword: Keyword::NoKeyword,
12747 ..
12748 }) => Ok(value),
12749 Token::SingleQuotedString(s) => Ok(s),
12750 Token::DoubleQuotedString(s) => Ok(s),
12751 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12752 Ok(s)
12753 }
12754 Token::UnicodeStringLiteral(s) => Ok(s),
12755 _ => self.expected("literal string", next_token),
12756 }
12757 }
12758
12759 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12761 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12762 Some(Keyword::TRUE) => Ok(true),
12763 Some(Keyword::FALSE) => Ok(false),
12764 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12765 }
12766 }
12767
12768 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12770 let neg = self.parse_keyword(Keyword::NOT);
12771 let normalized_form = self.maybe_parse(|parser| {
12772 match parser.parse_one_of_keywords(&[
12773 Keyword::NFC,
12774 Keyword::NFD,
12775 Keyword::NFKC,
12776 Keyword::NFKD,
12777 ]) {
12778 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12779 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12780 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12781 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12782 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12783 }
12784 })?;
12785 if self.parse_keyword(Keyword::NORMALIZED) {
12786 return Ok(Expr::IsNormalized {
12787 expr: Box::new(expr),
12788 form: normalized_form,
12789 negated: neg,
12790 });
12791 }
12792 self.expected_ref("unicode normalization form", self.peek_token_ref())
12793 }
12794
12795 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12797 self.expect_token(&Token::LParen)?;
12798 let values = self.parse_comma_separated(|parser| {
12799 let name = parser.parse_literal_string()?;
12800 let e = if parser.consume_token(&Token::Eq) {
12801 let value = parser.parse_number()?;
12802 EnumMember::NamedValue(name, value)
12803 } else {
12804 EnumMember::Name(name)
12805 };
12806 Ok(e)
12807 })?;
12808 self.expect_token(&Token::RParen)?;
12809
12810 Ok(values)
12811 }
12812
12813 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12815 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12816 if trailing_bracket.0 {
12817 return parser_err!(
12818 format!("unmatched > after parsing data type {ty}"),
12819 self.peek_token_ref()
12820 );
12821 }
12822
12823 Ok(ty)
12824 }
12825
12826 fn parse_data_type_helper(
12827 &mut self,
12828 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12829 let dialect = self.dialect;
12830 self.advance_token();
12831 let next_token = self.get_current_token();
12832 let next_token_index = self.get_current_index();
12833
12834 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12835 let mut data = match &next_token.token {
12836 Token::Word(w) => match w.keyword {
12837 Keyword::BOOLEAN => Ok(DataType::Boolean),
12838 Keyword::BOOL => Ok(DataType::Bool),
12839 Keyword::FLOAT => {
12840 let precision = self.parse_exact_number_optional_precision_scale()?;
12841
12842 if self.parse_keyword(Keyword::UNSIGNED) {
12843 Ok(DataType::FloatUnsigned(precision))
12844 } else {
12845 Ok(DataType::Float(precision))
12846 }
12847 }
12848 Keyword::REAL => {
12849 if self.parse_keyword(Keyword::UNSIGNED) {
12850 Ok(DataType::RealUnsigned)
12851 } else {
12852 Ok(DataType::Real)
12853 }
12854 }
12855 Keyword::FLOAT4 => Ok(DataType::Float4),
12856 Keyword::FLOAT32 => Ok(DataType::Float32),
12857 Keyword::FLOAT64 => Ok(DataType::Float64),
12858 Keyword::FLOAT8 => Ok(DataType::Float8),
12859 Keyword::DOUBLE => {
12860 if self.parse_keyword(Keyword::PRECISION) {
12861 if self.parse_keyword(Keyword::UNSIGNED) {
12862 Ok(DataType::DoublePrecisionUnsigned)
12863 } else {
12864 Ok(DataType::DoublePrecision)
12865 }
12866 } else {
12867 let precision = self.parse_exact_number_optional_precision_scale()?;
12868
12869 if self.parse_keyword(Keyword::UNSIGNED) {
12870 Ok(DataType::DoubleUnsigned(precision))
12871 } else {
12872 Ok(DataType::Double(precision))
12873 }
12874 }
12875 }
12876 Keyword::TINYINT => {
12877 let optional_precision = self.parse_optional_precision();
12878 if self.parse_keyword(Keyword::UNSIGNED) {
12879 Ok(DataType::TinyIntUnsigned(optional_precision?))
12880 } else {
12881 if dialect.supports_data_type_signed_suffix() {
12882 let _ = self.parse_keyword(Keyword::SIGNED);
12883 }
12884 Ok(DataType::TinyInt(optional_precision?))
12885 }
12886 }
12887 Keyword::INT2 => {
12888 let optional_precision = self.parse_optional_precision();
12889 if self.parse_keyword(Keyword::UNSIGNED) {
12890 Ok(DataType::Int2Unsigned(optional_precision?))
12891 } else {
12892 Ok(DataType::Int2(optional_precision?))
12893 }
12894 }
12895 Keyword::SMALLINT => {
12896 let optional_precision = self.parse_optional_precision();
12897 if self.parse_keyword(Keyword::UNSIGNED) {
12898 Ok(DataType::SmallIntUnsigned(optional_precision?))
12899 } else {
12900 if dialect.supports_data_type_signed_suffix() {
12901 let _ = self.parse_keyword(Keyword::SIGNED);
12902 }
12903 Ok(DataType::SmallInt(optional_precision?))
12904 }
12905 }
12906 Keyword::MEDIUMINT => {
12907 let optional_precision = self.parse_optional_precision();
12908 if self.parse_keyword(Keyword::UNSIGNED) {
12909 Ok(DataType::MediumIntUnsigned(optional_precision?))
12910 } else {
12911 if dialect.supports_data_type_signed_suffix() {
12912 let _ = self.parse_keyword(Keyword::SIGNED);
12913 }
12914 Ok(DataType::MediumInt(optional_precision?))
12915 }
12916 }
12917 Keyword::INT => {
12918 let optional_precision = self.parse_optional_precision();
12919 if self.parse_keyword(Keyword::UNSIGNED) {
12920 Ok(DataType::IntUnsigned(optional_precision?))
12921 } else {
12922 if dialect.supports_data_type_signed_suffix() {
12923 let _ = self.parse_keyword(Keyword::SIGNED);
12924 }
12925 Ok(DataType::Int(optional_precision?))
12926 }
12927 }
12928 Keyword::INT4 => {
12929 let optional_precision = self.parse_optional_precision();
12930 if self.parse_keyword(Keyword::UNSIGNED) {
12931 Ok(DataType::Int4Unsigned(optional_precision?))
12932 } else {
12933 Ok(DataType::Int4(optional_precision?))
12934 }
12935 }
12936 Keyword::INT8 => {
12937 let optional_precision = self.parse_optional_precision();
12938 if self.parse_keyword(Keyword::UNSIGNED) {
12939 Ok(DataType::Int8Unsigned(optional_precision?))
12940 } else {
12941 Ok(DataType::Int8(optional_precision?))
12942 }
12943 }
12944 Keyword::INT16 => Ok(DataType::Int16),
12945 Keyword::INT32 => Ok(DataType::Int32),
12946 Keyword::INT64 => Ok(DataType::Int64),
12947 Keyword::INT128 => Ok(DataType::Int128),
12948 Keyword::INT256 => Ok(DataType::Int256),
12949 Keyword::INTEGER => {
12950 let optional_precision = self.parse_optional_precision();
12951 if self.parse_keyword(Keyword::UNSIGNED) {
12952 Ok(DataType::IntegerUnsigned(optional_precision?))
12953 } else {
12954 if dialect.supports_data_type_signed_suffix() {
12955 let _ = self.parse_keyword(Keyword::SIGNED);
12956 }
12957 Ok(DataType::Integer(optional_precision?))
12958 }
12959 }
12960 Keyword::BIGINT => {
12961 let optional_precision = self.parse_optional_precision();
12962 if self.parse_keyword(Keyword::UNSIGNED) {
12963 Ok(DataType::BigIntUnsigned(optional_precision?))
12964 } else {
12965 if dialect.supports_data_type_signed_suffix() {
12966 let _ = self.parse_keyword(Keyword::SIGNED);
12967 }
12968 Ok(DataType::BigInt(optional_precision?))
12969 }
12970 }
12971 Keyword::HUGEINT => Ok(DataType::HugeInt),
12972 Keyword::UBIGINT => Ok(DataType::UBigInt),
12973 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
12974 Keyword::USMALLINT => Ok(DataType::USmallInt),
12975 Keyword::UTINYINT => Ok(DataType::UTinyInt),
12976 Keyword::UINT8 => Ok(DataType::UInt8),
12977 Keyword::UINT16 => Ok(DataType::UInt16),
12978 Keyword::UINT32 => Ok(DataType::UInt32),
12979 Keyword::UINT64 => Ok(DataType::UInt64),
12980 Keyword::UINT128 => Ok(DataType::UInt128),
12981 Keyword::UINT256 => Ok(DataType::UInt256),
12982 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
12983 Keyword::NVARCHAR => {
12984 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
12985 }
12986 Keyword::CHARACTER => {
12987 if self.parse_keyword(Keyword::VARYING) {
12988 Ok(DataType::CharacterVarying(
12989 self.parse_optional_character_length()?,
12990 ))
12991 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12992 Ok(DataType::CharacterLargeObject(
12993 self.parse_optional_precision()?,
12994 ))
12995 } else {
12996 Ok(DataType::Character(self.parse_optional_character_length()?))
12997 }
12998 }
12999 Keyword::CHAR => {
13000 if self.parse_keyword(Keyword::VARYING) {
13001 Ok(DataType::CharVarying(
13002 self.parse_optional_character_length()?,
13003 ))
13004 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
13005 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
13006 } else {
13007 Ok(DataType::Char(self.parse_optional_character_length()?))
13008 }
13009 }
13010 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
13011 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
13012 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
13013 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
13014 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
13015 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
13016 Keyword::LONGBLOB => Ok(DataType::LongBlob),
13017 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
13018 Keyword::BIT => {
13019 if self.parse_keyword(Keyword::VARYING) {
13020 Ok(DataType::BitVarying(self.parse_optional_precision()?))
13021 } else {
13022 Ok(DataType::Bit(self.parse_optional_precision()?))
13023 }
13024 }
13025 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
13026 Keyword::UUID => Ok(DataType::Uuid),
13027 Keyword::DATE => Ok(DataType::Date),
13028 Keyword::DATE32 => Ok(DataType::Date32),
13029 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
13030 Keyword::DATETIME64 => {
13031 self.prev_token();
13032 let (precision, time_zone) = self.parse_datetime_64()?;
13033 Ok(DataType::Datetime64(precision, time_zone))
13034 }
13035 Keyword::TIMESTAMP => {
13036 let precision = self.parse_optional_precision()?;
13037 let tz = if self.parse_keyword(Keyword::WITH) {
13038 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13039 TimezoneInfo::WithTimeZone
13040 } else if self.parse_keyword(Keyword::WITHOUT) {
13041 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13042 TimezoneInfo::WithoutTimeZone
13043 } else {
13044 TimezoneInfo::None
13045 };
13046 Ok(DataType::Timestamp(precision, tz))
13047 }
13048 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
13049 self.parse_optional_precision()?,
13050 TimezoneInfo::Tz,
13051 )),
13052 Keyword::TIMESTAMP_NTZ => {
13053 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
13054 }
13055 Keyword::TIME => {
13056 let precision = self.parse_optional_precision()?;
13057 let tz = if self.parse_keyword(Keyword::WITH) {
13058 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13059 TimezoneInfo::WithTimeZone
13060 } else if self.parse_keyword(Keyword::WITHOUT) {
13061 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13062 TimezoneInfo::WithoutTimeZone
13063 } else {
13064 TimezoneInfo::None
13065 };
13066 Ok(DataType::Time(precision, tz))
13067 }
13068 Keyword::TIMETZ => Ok(DataType::Time(
13069 self.parse_optional_precision()?,
13070 TimezoneInfo::Tz,
13071 )),
13072 Keyword::INTERVAL => {
13073 if self.dialect.supports_interval_options() {
13074 let fields = self.maybe_parse_optional_interval_fields()?;
13075 let precision = self.parse_optional_precision()?;
13076 Ok(DataType::Interval { fields, precision })
13077 } else {
13078 Ok(DataType::Interval {
13079 fields: None,
13080 precision: None,
13081 })
13082 }
13083 }
13084 Keyword::JSON => Ok(DataType::JSON),
13085 Keyword::JSONB => Ok(DataType::JSONB),
13086 Keyword::REGCLASS => Ok(DataType::Regclass),
13087 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
13088 Keyword::FIXEDSTRING => {
13089 self.expect_token(&Token::LParen)?;
13090 let character_length = self.parse_literal_uint()?;
13091 self.expect_token(&Token::RParen)?;
13092 Ok(DataType::FixedString(character_length))
13093 }
13094 Keyword::TEXT => Ok(DataType::Text),
13095 Keyword::TINYTEXT => Ok(DataType::TinyText),
13096 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
13097 Keyword::LONGTEXT => Ok(DataType::LongText),
13098 Keyword::BYTEA => Ok(DataType::Bytea),
13099 Keyword::NUMERIC => Ok(DataType::Numeric(
13100 self.parse_exact_number_optional_precision_scale()?,
13101 )),
13102 Keyword::DECIMAL => {
13103 let precision = self.parse_exact_number_optional_precision_scale()?;
13104
13105 if self.parse_keyword(Keyword::UNSIGNED) {
13106 Ok(DataType::DecimalUnsigned(precision))
13107 } else {
13108 Ok(DataType::Decimal(precision))
13109 }
13110 }
13111 Keyword::DEC => {
13112 let precision = self.parse_exact_number_optional_precision_scale()?;
13113
13114 if self.parse_keyword(Keyword::UNSIGNED) {
13115 Ok(DataType::DecUnsigned(precision))
13116 } else {
13117 Ok(DataType::Dec(precision))
13118 }
13119 }
13120 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13121 self.parse_exact_number_optional_precision_scale()?,
13122 )),
13123 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13124 self.parse_exact_number_optional_precision_scale()?,
13125 )),
13126 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13127 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13128 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13129 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13130 Keyword::ARRAY => {
13131 if self.dialect.supports_array_typedef_without_element_type() {
13132 Ok(DataType::Array(ArrayElemTypeDef::None))
13133 } else if dialect_of!(self is ClickHouseDialect) {
13134 Ok(self.parse_sub_type(|internal_type| {
13135 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13136 })?)
13137 } else {
13138 self.expect_token(&Token::Lt)?;
13139 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13140 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13141 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13142 inside_type,
13143 ))))
13144 }
13145 }
13146 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13147 self.prev_token();
13148 let field_defs = self.parse_duckdb_struct_type_def()?;
13149 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13150 }
13151 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13152 {
13153 self.prev_token();
13154 let (field_defs, _trailing_bracket) =
13155 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13156 trailing_bracket = _trailing_bracket;
13157 Ok(DataType::Struct(
13158 field_defs,
13159 StructBracketKind::AngleBrackets,
13160 ))
13161 }
13162 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13163 self.prev_token();
13164 let fields = self.parse_union_type_def()?;
13165 Ok(DataType::Union(fields))
13166 }
13167 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13168 Ok(self.parse_sub_type(DataType::Nullable)?)
13169 }
13170 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13171 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13172 }
13173 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13174 self.prev_token();
13175 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13176 Ok(DataType::Map(
13177 Box::new(key_data_type),
13178 Box::new(value_data_type),
13179 ))
13180 }
13181 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13182 self.expect_token(&Token::LParen)?;
13183 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13184 self.expect_token(&Token::RParen)?;
13185 Ok(DataType::Nested(field_defs))
13186 }
13187 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13188 self.prev_token();
13189 let field_defs = self.parse_click_house_tuple_def()?;
13190 Ok(DataType::Tuple(field_defs))
13191 }
13192 Keyword::TRIGGER => Ok(DataType::Trigger),
13193 Keyword::SETOF => {
13194 let inner = self.parse_data_type()?;
13195 Ok(DataType::SetOf(Box::new(inner)))
13196 }
13197 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13198 let _ = self.parse_keyword(Keyword::TYPE);
13199 Ok(DataType::AnyType)
13200 }
13201 Keyword::TABLE => {
13202 if self.peek_token_ref().token == Token::LParen {
13205 let columns = self.parse_returns_table_columns()?;
13206 Ok(DataType::Table(Some(columns)))
13207 } else {
13208 Ok(DataType::Table(None))
13209 }
13210 }
13211 Keyword::SIGNED => {
13212 if self.parse_keyword(Keyword::INTEGER) {
13213 Ok(DataType::SignedInteger)
13214 } else {
13215 Ok(DataType::Signed)
13216 }
13217 }
13218 Keyword::UNSIGNED => {
13219 if self.parse_keyword(Keyword::INTEGER) {
13220 Ok(DataType::UnsignedInteger)
13221 } else {
13222 Ok(DataType::Unsigned)
13223 }
13224 }
13225 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13226 Ok(DataType::TsVector)
13227 }
13228 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13229 Ok(DataType::TsQuery)
13230 }
13231 _ => {
13232 self.prev_token();
13233 let type_name = self.parse_object_name(false)?;
13234 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13235 Ok(DataType::Custom(type_name, modifiers))
13236 } else {
13237 Ok(DataType::Custom(type_name, vec![]))
13238 }
13239 }
13240 },
13241 _ => self.expected_at("a data type name", next_token_index),
13242 }?;
13243
13244 if self.dialect.supports_array_typedef_with_brackets() {
13245 while self.consume_token(&Token::LBracket) {
13246 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13248 self.expect_token(&Token::RBracket)?;
13249 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13250 }
13251 }
13252 Ok((data, trailing_bracket))
13253 }
13254
13255 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13256 self.parse_column_def()
13257 }
13258
13259 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13260 self.expect_token(&Token::LParen)?;
13261 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13262 self.expect_token(&Token::RParen)?;
13263 Ok(columns)
13264 }
13265
13266 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13268 self.expect_token(&Token::LParen)?;
13269 let mut values = Vec::new();
13270 loop {
13271 let next_token = self.next_token();
13272 match next_token.token {
13273 Token::SingleQuotedString(value) => values.push(value),
13274 _ => self.expected("a string", next_token)?,
13275 }
13276 let next_token = self.next_token();
13277 match next_token.token {
13278 Token::Comma => (),
13279 Token::RParen => break,
13280 _ => self.expected(", or }", next_token)?,
13281 }
13282 }
13283 Ok(values)
13284 }
13285
13286 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13288 let ident = self.parse_identifier()?;
13289 self.expect_keyword_is(Keyword::AS)?;
13290 let alias = self.parse_identifier()?;
13291 Ok(IdentWithAlias { ident, alias })
13292 }
13293
13294 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13296 let ident = self.parse_identifier()?;
13297 let _after_as = self.parse_keyword(Keyword::AS);
13298 let alias = self.parse_identifier()?;
13299 Ok(IdentWithAlias { ident, alias })
13300 }
13301
13302 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13304 self.parse_comma_separated(|parser| {
13305 parser.expect_token(&Token::LParen)?;
13306 let query = parser.parse_query()?;
13307 parser.expect_token(&Token::RParen)?;
13308 Ok(*query)
13309 })
13310 }
13311
13312 fn parse_distinct_required_set_quantifier(
13314 &mut self,
13315 operator_name: &str,
13316 ) -> Result<SetQuantifier, ParserError> {
13317 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13318 match quantifier {
13319 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13320 _ => Err(ParserError::ParserError(format!(
13321 "{operator_name} pipe operator requires DISTINCT modifier",
13322 ))),
13323 }
13324 }
13325
13326 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13328 if self.parse_keyword(Keyword::AS) {
13329 Ok(Some(self.parse_identifier()?))
13330 } else {
13331 self.maybe_parse(|parser| parser.parse_identifier())
13333 }
13334 }
13335
13336 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13338 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13339 parser.dialect.is_select_item_alias(explicit, kw, parser)
13340 }
13341 self.parse_optional_alias_inner(None, validator)
13342 }
13343
13344 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13348 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13349 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13350 }
13351 let explicit = self.peek_keyword(Keyword::AS);
13352 match self.parse_optional_alias_inner(None, validator)? {
13353 Some(name) => {
13354 let columns = self.parse_table_alias_column_defs()?;
13355 Ok(Some(TableAlias {
13356 explicit,
13357 name,
13358 columns,
13359 }))
13360 }
13361 None => Ok(None),
13362 }
13363 }
13364
13365 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13366 let mut hints = vec![];
13367 while let Some(hint_type) =
13368 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13369 {
13370 let hint_type = match hint_type {
13371 Keyword::USE => TableIndexHintType::Use,
13372 Keyword::IGNORE => TableIndexHintType::Ignore,
13373 Keyword::FORCE => TableIndexHintType::Force,
13374 _ => {
13375 return self.expected_ref(
13376 "expected to match USE/IGNORE/FORCE keyword",
13377 self.peek_token_ref(),
13378 )
13379 }
13380 };
13381 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13382 Some(Keyword::INDEX) => TableIndexType::Index,
13383 Some(Keyword::KEY) => TableIndexType::Key,
13384 _ => {
13385 return self
13386 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13387 }
13388 };
13389 let for_clause = if self.parse_keyword(Keyword::FOR) {
13390 let clause = if self.parse_keyword(Keyword::JOIN) {
13391 TableIndexHintForClause::Join
13392 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13393 TableIndexHintForClause::OrderBy
13394 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13395 TableIndexHintForClause::GroupBy
13396 } else {
13397 return self.expected_ref(
13398 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13399 self.peek_token_ref(),
13400 );
13401 };
13402 Some(clause)
13403 } else {
13404 None
13405 };
13406
13407 self.expect_token(&Token::LParen)?;
13408 let index_names = if self.peek_token_ref().token != Token::RParen {
13409 self.parse_comma_separated(Parser::parse_identifier)?
13410 } else {
13411 vec![]
13412 };
13413 self.expect_token(&Token::RParen)?;
13414 hints.push(TableIndexHints {
13415 hint_type,
13416 index_type,
13417 for_clause,
13418 index_names,
13419 });
13420 }
13421 Ok(hints)
13422 }
13423
13424 pub fn parse_optional_alias(
13428 &mut self,
13429 reserved_kwds: &[Keyword],
13430 ) -> Result<Option<Ident>, ParserError> {
13431 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13432 false
13433 }
13434 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13435 }
13436
13437 fn parse_optional_alias_inner<F>(
13444 &mut self,
13445 reserved_kwds: Option<&[Keyword]>,
13446 validator: F,
13447 ) -> Result<Option<Ident>, ParserError>
13448 where
13449 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13450 {
13451 let after_as = self.parse_keyword(Keyword::AS);
13452
13453 let next_token = self.next_token();
13454 match next_token.token {
13455 Token::Word(w)
13458 if reserved_kwds.is_some()
13459 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13460 {
13461 Ok(Some(w.into_ident(next_token.span)))
13462 }
13463 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13467 Ok(Some(w.into_ident(next_token.span)))
13468 }
13469 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13471 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13472 _ => {
13473 if after_as {
13474 return self.expected("an identifier after AS", next_token);
13475 }
13476 self.prev_token();
13477 Ok(None) }
13479 }
13480 }
13481
13482 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13484 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13485 let expressions = if self.parse_keyword(Keyword::ALL) {
13486 None
13487 } else {
13488 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13489 };
13490
13491 let mut modifiers = vec![];
13492 if self.dialect.supports_group_by_with_modifier() {
13493 loop {
13494 if !self.parse_keyword(Keyword::WITH) {
13495 break;
13496 }
13497 let keyword = self.expect_one_of_keywords(&[
13498 Keyword::ROLLUP,
13499 Keyword::CUBE,
13500 Keyword::TOTALS,
13501 ])?;
13502 modifiers.push(match keyword {
13503 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13504 Keyword::CUBE => GroupByWithModifier::Cube,
13505 Keyword::TOTALS => GroupByWithModifier::Totals,
13506 _ => {
13507 return parser_err!(
13508 "BUG: expected to match GroupBy modifier keyword",
13509 self.peek_token_ref().span.start
13510 )
13511 }
13512 });
13513 }
13514 }
13515 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13516 self.expect_token(&Token::LParen)?;
13517 let result = self.parse_comma_separated(|p| {
13518 if p.peek_token_ref().token == Token::LParen {
13519 p.parse_tuple(true, true)
13520 } else {
13521 Ok(vec![p.parse_expr()?])
13522 }
13523 })?;
13524 self.expect_token(&Token::RParen)?;
13525 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13526 result,
13527 )));
13528 };
13529 let group_by = match expressions {
13530 None => GroupByExpr::All(modifiers),
13531 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13532 };
13533 Ok(Some(group_by))
13534 } else {
13535 Ok(None)
13536 }
13537 }
13538
13539 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13541 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13542 let order_by =
13543 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13544 let order_by_options = self.parse_order_by_options()?;
13545 OrderBy {
13546 kind: OrderByKind::All(order_by_options),
13547 interpolate: None,
13548 }
13549 } else {
13550 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13551 let interpolate = if self.dialect.supports_interpolate() {
13552 self.parse_interpolations()?
13553 } else {
13554 None
13555 };
13556 OrderBy {
13557 kind: OrderByKind::Expressions(exprs),
13558 interpolate,
13559 }
13560 };
13561 Ok(Some(order_by))
13562 } else {
13563 Ok(None)
13564 }
13565 }
13566
13567 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13568 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13569 Some(self.parse_offset()?)
13570 } else {
13571 None
13572 };
13573
13574 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13575 let expr = self.parse_limit()?;
13576
13577 if self.dialect.supports_limit_comma()
13578 && offset.is_none()
13579 && expr.is_some() && self.consume_token(&Token::Comma)
13581 {
13582 let offset = expr.ok_or_else(|| {
13583 ParserError::ParserError(
13584 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13585 )
13586 })?;
13587 return Ok(Some(LimitClause::OffsetCommaLimit {
13588 offset,
13589 limit: self.parse_expr()?,
13590 }));
13591 }
13592
13593 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13594 Some(self.parse_comma_separated(Parser::parse_expr)?)
13595 } else {
13596 None
13597 };
13598
13599 (Some(expr), limit_by)
13600 } else {
13601 (None, None)
13602 };
13603
13604 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13605 offset = Some(self.parse_offset()?);
13606 }
13607
13608 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13609 Ok(Some(LimitClause::LimitOffset {
13610 limit: limit.unwrap_or_default(),
13611 offset,
13612 limit_by: limit_by.unwrap_or_default(),
13613 }))
13614 } else {
13615 Ok(None)
13616 }
13617 }
13618
13619 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13622 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13623 let fn_name = self.parse_object_name(false)?;
13624 self.parse_function_call(fn_name)
13625 .map(TableObject::TableFunction)
13626 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13627 self.parse_parenthesized(|p| p.parse_query())
13628 .map(TableObject::TableQuery)
13629 } else {
13630 self.parse_object_name(false).map(TableObject::TableName)
13631 }
13632 }
13633
13634 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13641 self.parse_object_name_inner(in_table_clause, false)
13642 }
13643
13644 fn parse_object_name_inner(
13654 &mut self,
13655 in_table_clause: bool,
13656 allow_wildcards: bool,
13657 ) -> Result<ObjectName, ParserError> {
13658 let mut parts = vec![];
13659 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13660 loop {
13661 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13662 parts.push(ObjectNamePart::Identifier(ident));
13663 if !self.consume_token(&Token::Period) && !end_with_period {
13664 break;
13665 }
13666 }
13667 } else {
13668 loop {
13669 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13670 let span = self.next_token().span;
13671 parts.push(ObjectNamePart::Identifier(Ident {
13672 value: Token::Mul.to_string(),
13673 quote_style: None,
13674 span,
13675 }));
13676 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13677 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13678 parts.push(ObjectNamePart::Identifier(ident));
13679 if !self.consume_token(&Token::Period) && !end_with_period {
13680 break;
13681 }
13682 } else if self.dialect.supports_object_name_double_dot_notation()
13683 && parts.len() == 1
13684 && matches!(self.peek_token_ref().token, Token::Period)
13685 {
13686 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13688 } else {
13689 let ident = self.parse_identifier()?;
13690 let part = if self
13691 .dialect
13692 .is_identifier_generating_function_name(&ident, &parts)
13693 {
13694 self.expect_token(&Token::LParen)?;
13695 let args: Vec<FunctionArg> =
13696 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13697 self.expect_token(&Token::RParen)?;
13698 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13699 } else {
13700 ObjectNamePart::Identifier(ident)
13701 };
13702 parts.push(part);
13703 }
13704
13705 if !self.consume_token(&Token::Period) {
13706 break;
13707 }
13708 }
13709 }
13710
13711 if dialect_of!(self is BigQueryDialect)
13714 && parts.iter().any(|part| {
13715 part.as_ident()
13716 .is_some_and(|ident| ident.value.contains('.'))
13717 })
13718 {
13719 parts = parts
13720 .into_iter()
13721 .flat_map(|part| match part.as_ident() {
13722 Some(ident) => ident
13723 .value
13724 .split('.')
13725 .map(|value| {
13726 ObjectNamePart::Identifier(Ident {
13727 value: value.into(),
13728 quote_style: ident.quote_style,
13729 span: ident.span,
13730 })
13731 })
13732 .collect::<Vec<_>>(),
13733 None => vec![part],
13734 })
13735 .collect()
13736 }
13737
13738 Ok(ObjectName(parts))
13739 }
13740
13741 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13743 let mut idents = vec![];
13744 loop {
13745 let token = self.peek_token_ref();
13746 match &token.token {
13747 Token::Word(w) => {
13748 idents.push(w.to_ident(token.span));
13749 }
13750 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13751 break
13752 }
13753 _ => {}
13754 }
13755 self.advance_token();
13756 }
13757 Ok(idents)
13758 }
13759
13760 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13800 let mut idents = vec![];
13801
13802 let next_token = self.next_token();
13804 match next_token.token {
13805 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13806 Token::EOF => {
13807 return Err(ParserError::ParserError(
13808 "Empty input when parsing identifier".to_string(),
13809 ))?
13810 }
13811 token => {
13812 return Err(ParserError::ParserError(format!(
13813 "Unexpected token in identifier: {token}"
13814 )))?
13815 }
13816 };
13817
13818 loop {
13820 match self.next_token().token {
13821 Token::Period => {
13823 let next_token = self.next_token();
13824 match next_token.token {
13825 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13826 Token::EOF => {
13827 return Err(ParserError::ParserError(
13828 "Trailing period in identifier".to_string(),
13829 ))?
13830 }
13831 token => {
13832 return Err(ParserError::ParserError(format!(
13833 "Unexpected token following period in identifier: {token}"
13834 )))?
13835 }
13836 }
13837 }
13838 Token::EOF => break,
13839 token => {
13840 return Err(ParserError::ParserError(format!(
13841 "Unexpected token in identifier: {token}"
13842 )))?;
13843 }
13844 }
13845 }
13846
13847 Ok(idents)
13848 }
13849
13850 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
13852 let next_token = self.next_token();
13853 match next_token.token {
13854 Token::Word(w) => Ok(w.into_ident(next_token.span)),
13855 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
13856 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
13857 _ => self.expected("identifier", next_token),
13858 }
13859 }
13860
13861 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
13872 match self.peek_token().token {
13873 Token::Word(w) => {
13874 let quote_style_is_none = w.quote_style.is_none();
13875 let mut requires_whitespace = false;
13876 let mut ident = w.into_ident(self.next_token().span);
13877 if quote_style_is_none {
13878 while matches!(self.peek_token_no_skip().token, Token::Minus) {
13879 self.next_token();
13880 ident.value.push('-');
13881
13882 let token = self
13883 .next_token_no_skip()
13884 .cloned()
13885 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
13886 requires_whitespace = match token.token {
13887 Token::Word(next_word) if next_word.quote_style.is_none() => {
13888 ident.value.push_str(&next_word.value);
13889 false
13890 }
13891 Token::Number(s, false) => {
13892 if s.ends_with('.') {
13899 let Some(s) = s.split('.').next().filter(|s| {
13900 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
13901 }) else {
13902 return self.expected(
13903 "continuation of hyphenated identifier",
13904 TokenWithSpan::new(Token::Number(s, false), token.span),
13905 );
13906 };
13907 ident.value.push_str(s);
13908 return Ok((ident, true));
13909 } else {
13910 ident.value.push_str(&s);
13911 }
13912 !matches!(self.peek_token_ref().token, Token::Period)
13915 }
13916 _ => {
13917 return self
13918 .expected("continuation of hyphenated identifier", token);
13919 }
13920 }
13921 }
13922
13923 if requires_whitespace {
13926 let token = self.next_token();
13927 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
13928 return self
13929 .expected("whitespace following hyphenated identifier", token);
13930 }
13931 }
13932 }
13933 Ok((ident, false))
13934 }
13935 _ => Ok((self.parse_identifier()?, false)),
13936 }
13937 }
13938
13939 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
13941 if self.consume_token(&Token::LParen) {
13942 if self.peek_token_ref().token == Token::RParen {
13943 self.next_token();
13944 Ok(vec![])
13945 } else {
13946 let cols = self.parse_comma_separated_with_trailing_commas(
13947 Parser::parse_view_column,
13948 self.dialect.supports_column_definition_trailing_commas(),
13949 Self::is_reserved_for_column_alias,
13950 )?;
13951 self.expect_token(&Token::RParen)?;
13952 Ok(cols)
13953 }
13954 } else {
13955 Ok(vec![])
13956 }
13957 }
13958
13959 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
13961 let name = self.parse_identifier()?;
13962 let options = self.parse_view_column_options()?;
13963 let data_type = if dialect_of!(self is ClickHouseDialect) {
13964 Some(self.parse_data_type()?)
13965 } else {
13966 None
13967 };
13968 Ok(ViewColumnDef {
13969 name,
13970 data_type,
13971 options,
13972 })
13973 }
13974
13975 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
13976 let mut options = Vec::new();
13977 loop {
13978 let option = self.parse_optional_column_option()?;
13979 if let Some(option) = option {
13980 options.push(option);
13981 } else {
13982 break;
13983 }
13984 }
13985 if options.is_empty() {
13986 Ok(None)
13987 } else if self.dialect.supports_space_separated_column_options() {
13988 Ok(Some(ColumnOptions::SpaceSeparated(options)))
13989 } else {
13990 Ok(Some(ColumnOptions::CommaSeparated(options)))
13991 }
13992 }
13993
13994 pub fn parse_parenthesized_column_list(
13997 &mut self,
13998 optional: IsOptional,
13999 allow_empty: bool,
14000 ) -> Result<Vec<Ident>, ParserError> {
14001 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
14002 }
14003
14004 pub fn parse_parenthesized_compound_identifier_list(
14006 &mut self,
14007 optional: IsOptional,
14008 allow_empty: bool,
14009 ) -> Result<Vec<Expr>, ParserError> {
14010 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14011 Ok(Expr::CompoundIdentifier(
14012 p.parse_period_separated(|p| p.parse_identifier())?,
14013 ))
14014 })
14015 }
14016
14017 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
14020 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
14021 p.parse_create_index_expr()
14022 })
14023 }
14024
14025 pub fn parse_parenthesized_qualified_column_list(
14028 &mut self,
14029 optional: IsOptional,
14030 allow_empty: bool,
14031 ) -> Result<Vec<ObjectName>, ParserError> {
14032 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14033 p.parse_object_name(true)
14034 })
14035 }
14036
14037 fn parse_parenthesized_column_list_inner<F, T>(
14040 &mut self,
14041 optional: IsOptional,
14042 allow_empty: bool,
14043 mut f: F,
14044 ) -> Result<Vec<T>, ParserError>
14045 where
14046 F: FnMut(&mut Parser) -> Result<T, ParserError>,
14047 {
14048 if self.consume_token(&Token::LParen) {
14049 if allow_empty && self.peek_token_ref().token == Token::RParen {
14050 self.next_token();
14051 Ok(vec![])
14052 } else {
14053 let cols = self.parse_comma_separated(|p| f(p))?;
14054 self.expect_token(&Token::RParen)?;
14055 Ok(cols)
14056 }
14057 } else if optional == Optional {
14058 Ok(vec![])
14059 } else {
14060 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
14061 }
14062 }
14063
14064 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
14066 if self.consume_token(&Token::LParen) {
14067 let cols = self.parse_comma_separated(|p| {
14068 let name = p.parse_identifier()?;
14069 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
14070 Ok(TableAliasColumnDef { name, data_type })
14071 })?;
14072 self.expect_token(&Token::RParen)?;
14073 Ok(cols)
14074 } else {
14075 Ok(vec![])
14076 }
14077 }
14078
14079 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
14081 self.expect_token(&Token::LParen)?;
14082 let n = self.parse_literal_uint()?;
14083 self.expect_token(&Token::RParen)?;
14084 Ok(n)
14085 }
14086
14087 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
14089 if self.consume_token(&Token::LParen) {
14090 let n = self.parse_literal_uint()?;
14091 self.expect_token(&Token::RParen)?;
14092 Ok(Some(n))
14093 } else {
14094 Ok(None)
14095 }
14096 }
14097
14098 fn maybe_parse_optional_interval_fields(
14099 &mut self,
14100 ) -> Result<Option<IntervalFields>, ParserError> {
14101 match self.parse_one_of_keywords(&[
14102 Keyword::YEAR,
14104 Keyword::DAY,
14105 Keyword::HOUR,
14106 Keyword::MINUTE,
14107 Keyword::MONTH,
14109 Keyword::SECOND,
14110 ]) {
14111 Some(Keyword::YEAR) => {
14112 if self.peek_keyword(Keyword::TO) {
14113 self.expect_keyword(Keyword::TO)?;
14114 self.expect_keyword(Keyword::MONTH)?;
14115 Ok(Some(IntervalFields::YearToMonth))
14116 } else {
14117 Ok(Some(IntervalFields::Year))
14118 }
14119 }
14120 Some(Keyword::DAY) => {
14121 if self.peek_keyword(Keyword::TO) {
14122 self.expect_keyword(Keyword::TO)?;
14123 match self.expect_one_of_keywords(&[
14124 Keyword::HOUR,
14125 Keyword::MINUTE,
14126 Keyword::SECOND,
14127 ])? {
14128 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14129 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14130 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14131 _ => {
14132 self.prev_token();
14133 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14134 }
14135 }
14136 } else {
14137 Ok(Some(IntervalFields::Day))
14138 }
14139 }
14140 Some(Keyword::HOUR) => {
14141 if self.peek_keyword(Keyword::TO) {
14142 self.expect_keyword(Keyword::TO)?;
14143 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14144 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14145 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14146 _ => {
14147 self.prev_token();
14148 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14149 }
14150 }
14151 } else {
14152 Ok(Some(IntervalFields::Hour))
14153 }
14154 }
14155 Some(Keyword::MINUTE) => {
14156 if self.peek_keyword(Keyword::TO) {
14157 self.expect_keyword(Keyword::TO)?;
14158 self.expect_keyword(Keyword::SECOND)?;
14159 Ok(Some(IntervalFields::MinuteToSecond))
14160 } else {
14161 Ok(Some(IntervalFields::Minute))
14162 }
14163 }
14164 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14165 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14166 Some(_) => {
14167 self.prev_token();
14168 self.expected_ref(
14169 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14170 self.peek_token_ref(),
14171 )
14172 }
14173 None => Ok(None),
14174 }
14175 }
14176
14177 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14185 self.expect_keyword_is(Keyword::DATETIME64)?;
14186 self.expect_token(&Token::LParen)?;
14187 let precision = self.parse_literal_uint()?;
14188 let time_zone = if self.consume_token(&Token::Comma) {
14189 Some(self.parse_literal_string()?)
14190 } else {
14191 None
14192 };
14193 self.expect_token(&Token::RParen)?;
14194 Ok((precision, time_zone))
14195 }
14196
14197 pub fn parse_optional_character_length(
14199 &mut self,
14200 ) -> Result<Option<CharacterLength>, ParserError> {
14201 if self.consume_token(&Token::LParen) {
14202 let character_length = self.parse_character_length()?;
14203 self.expect_token(&Token::RParen)?;
14204 Ok(Some(character_length))
14205 } else {
14206 Ok(None)
14207 }
14208 }
14209
14210 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14212 if self.consume_token(&Token::LParen) {
14213 let binary_length = self.parse_binary_length()?;
14214 self.expect_token(&Token::RParen)?;
14215 Ok(Some(binary_length))
14216 } else {
14217 Ok(None)
14218 }
14219 }
14220
14221 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14223 if self.parse_keyword(Keyword::MAX) {
14224 return Ok(CharacterLength::Max);
14225 }
14226 let length = self.parse_literal_uint()?;
14227 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14228 Some(CharLengthUnits::Characters)
14229 } else if self.parse_keyword(Keyword::OCTETS) {
14230 Some(CharLengthUnits::Octets)
14231 } else {
14232 None
14233 };
14234 Ok(CharacterLength::IntegerLength { length, unit })
14235 }
14236
14237 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14239 if self.parse_keyword(Keyword::MAX) {
14240 return Ok(BinaryLength::Max);
14241 }
14242 let length = self.parse_literal_uint()?;
14243 Ok(BinaryLength::IntegerLength { length })
14244 }
14245
14246 pub fn parse_optional_precision_scale(
14248 &mut self,
14249 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14250 if self.consume_token(&Token::LParen) {
14251 let n = self.parse_literal_uint()?;
14252 let scale = if self.consume_token(&Token::Comma) {
14253 Some(self.parse_literal_uint()?)
14254 } else {
14255 None
14256 };
14257 self.expect_token(&Token::RParen)?;
14258 Ok((Some(n), scale))
14259 } else {
14260 Ok((None, None))
14261 }
14262 }
14263
14264 pub fn parse_exact_number_optional_precision_scale(
14266 &mut self,
14267 ) -> Result<ExactNumberInfo, ParserError> {
14268 if self.consume_token(&Token::LParen) {
14269 let precision = self.parse_literal_uint()?;
14270 let scale = if self.consume_token(&Token::Comma) {
14271 Some(self.parse_signed_integer()?)
14272 } else {
14273 None
14274 };
14275
14276 self.expect_token(&Token::RParen)?;
14277
14278 match scale {
14279 None => Ok(ExactNumberInfo::Precision(precision)),
14280 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14281 }
14282 } else {
14283 Ok(ExactNumberInfo::None)
14284 }
14285 }
14286
14287 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14289 let is_negative = self.consume_token(&Token::Minus);
14290
14291 if !is_negative {
14292 let _ = self.consume_token(&Token::Plus);
14293 }
14294
14295 let current_token = self.peek_token_ref();
14296 match ¤t_token.token {
14297 Token::Number(s, _) => {
14298 let s = s.clone();
14299 let span_start = current_token.span.start;
14300 self.advance_token();
14301 let value = Self::parse::<i64>(s, span_start)?;
14302 Ok(if is_negative { -value } else { value })
14303 }
14304 _ => self.expected_ref("number", current_token),
14305 }
14306 }
14307
14308 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14310 if self.consume_token(&Token::LParen) {
14311 let mut modifiers = Vec::new();
14312 loop {
14313 let next_token = self.next_token();
14314 match next_token.token {
14315 Token::Word(w) => modifiers.push(w.to_string()),
14316 Token::Number(n, _) => modifiers.push(n),
14317 Token::SingleQuotedString(s) => modifiers.push(s),
14318
14319 Token::Comma => {
14320 continue;
14321 }
14322 Token::RParen => {
14323 break;
14324 }
14325 _ => self.expected("type modifiers", next_token)?,
14326 }
14327 }
14328
14329 Ok(Some(modifiers))
14330 } else {
14331 Ok(None)
14332 }
14333 }
14334
14335 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14337 where
14338 F: FnOnce(Box<DataType>) -> DataType,
14339 {
14340 self.expect_token(&Token::LParen)?;
14341 let inside_type = self.parse_data_type()?;
14342 self.expect_token(&Token::RParen)?;
14343 Ok(parent_type(inside_type.into()))
14344 }
14345
14346 fn parse_delete_setexpr_boxed(
14350 &mut self,
14351 delete_token: TokenWithSpan,
14352 ) -> Result<Box<SetExpr>, ParserError> {
14353 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14354 }
14355
14356 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14358 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14359 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14360 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14363 (vec![], false)
14364 } else {
14365 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14366 self.expect_keyword_is(Keyword::FROM)?;
14367 (tables, true)
14368 }
14369 } else {
14370 (vec![], true)
14371 };
14372
14373 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14374
14375 let output = self.maybe_parse_output_clause()?;
14376
14377 let using = if self.parse_keyword(Keyword::USING) {
14378 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14379 } else {
14380 None
14381 };
14382 let selection = if self.parse_keyword(Keyword::WHERE) {
14383 Some(self.parse_expr()?)
14384 } else {
14385 None
14386 };
14387 let returning = if self.parse_keyword(Keyword::RETURNING) {
14388 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14389 } else {
14390 None
14391 };
14392 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14393 self.parse_comma_separated(Parser::parse_order_by_expr)?
14394 } else {
14395 vec![]
14396 };
14397 let limit = if self.parse_keyword(Keyword::LIMIT) {
14398 self.parse_limit()?
14399 } else {
14400 None
14401 };
14402
14403 Ok(Statement::Delete(Delete {
14404 delete_token: delete_token.into(),
14405 optimizer_hints,
14406 tables,
14407 from: if with_from_keyword {
14408 FromTable::WithFromKeyword(from)
14409 } else {
14410 FromTable::WithoutKeyword(from)
14411 },
14412 using,
14413 selection,
14414 returning,
14415 output,
14416 order_by,
14417 limit,
14418 }))
14419 }
14420
14421 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14424 let modifier_keyword =
14425 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14426
14427 let id = self.parse_literal_uint()?;
14428
14429 let modifier = match modifier_keyword {
14430 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14431 Some(Keyword::QUERY) => Some(KillType::Query),
14432 Some(Keyword::MUTATION) => {
14433 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14434 Some(KillType::Mutation)
14435 } else {
14436 self.expected_ref(
14437 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14438 self.peek_token_ref(),
14439 )?
14440 }
14441 }
14442 _ => None,
14443 };
14444
14445 Ok(Statement::Kill { modifier, id })
14446 }
14447
14448 pub fn parse_explain(
14450 &mut self,
14451 describe_alias: DescribeAlias,
14452 ) -> Result<Statement, ParserError> {
14453 let mut analyze = false;
14454 let mut verbose = false;
14455 let mut query_plan = false;
14456 let mut estimate = false;
14457 let mut format = None;
14458 let mut options = None;
14459
14460 if describe_alias == DescribeAlias::Explain
14463 && self.dialect.supports_explain_with_utility_options()
14464 && self.peek_token_ref().token == Token::LParen
14465 {
14466 options = Some(self.parse_utility_options()?)
14467 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14468 query_plan = true;
14469 } else if self.parse_keyword(Keyword::ESTIMATE) {
14470 estimate = true;
14471 } else {
14472 analyze = self.parse_keyword(Keyword::ANALYZE);
14473 verbose = self.parse_keyword(Keyword::VERBOSE);
14474 if self.parse_keyword(Keyword::FORMAT) {
14475 format = Some(self.parse_analyze_format_kind()?);
14476 }
14477 }
14478
14479 match self.maybe_parse(|parser| parser.parse_statement())? {
14480 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14481 ParserError::ParserError("Explain must be root of the plan".to_string()),
14482 ),
14483 Some(statement) => Ok(Statement::Explain {
14484 describe_alias,
14485 analyze,
14486 verbose,
14487 query_plan,
14488 estimate,
14489 statement: Box::new(statement),
14490 format,
14491 options,
14492 }),
14493 _ => {
14494 let hive_format =
14495 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14496 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14497 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14498 _ => None,
14499 };
14500
14501 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14502 self.parse_keyword(Keyword::TABLE)
14504 } else {
14505 false
14506 };
14507
14508 let table_name = self.parse_object_name(false)?;
14509 Ok(Statement::ExplainTable {
14510 describe_alias,
14511 hive_format,
14512 has_table_keyword,
14513 table_name,
14514 })
14515 }
14516 }
14517 }
14518
14519 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14524 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14525 let _guard = self.recursion_counter.try_decrease()?;
14526 let with = if self.parse_keyword(Keyword::WITH) {
14527 let with_token = self.get_current_token();
14528 Some(With {
14529 with_token: with_token.clone().into(),
14530 recursive: self.parse_keyword(Keyword::RECURSIVE),
14531 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14532 })
14533 } else {
14534 None
14535 };
14536 if self.parse_keyword(Keyword::INSERT) {
14537 Ok(Query {
14538 with,
14539 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14540 order_by: None,
14541 limit_clause: None,
14542 fetch: None,
14543 locks: vec![],
14544 for_clause: None,
14545 settings: None,
14546 format_clause: None,
14547 pipe_operators: vec![],
14548 }
14549 .into())
14550 } else if self.parse_keyword(Keyword::UPDATE) {
14551 Ok(Query {
14552 with,
14553 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14554 order_by: None,
14555 limit_clause: None,
14556 fetch: None,
14557 locks: vec![],
14558 for_clause: None,
14559 settings: None,
14560 format_clause: None,
14561 pipe_operators: vec![],
14562 }
14563 .into())
14564 } else if self.parse_keyword(Keyword::DELETE) {
14565 Ok(Query {
14566 with,
14567 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14568 limit_clause: None,
14569 order_by: None,
14570 fetch: None,
14571 locks: vec![],
14572 for_clause: None,
14573 settings: None,
14574 format_clause: None,
14575 pipe_operators: vec![],
14576 }
14577 .into())
14578 } else if self.parse_keyword(Keyword::MERGE) {
14579 Ok(Query {
14580 with,
14581 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14582 limit_clause: None,
14583 order_by: None,
14584 fetch: None,
14585 locks: vec![],
14586 for_clause: None,
14587 settings: None,
14588 format_clause: None,
14589 pipe_operators: vec![],
14590 }
14591 .into())
14592 } else {
14593 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14594
14595 let order_by = self.parse_optional_order_by()?;
14596
14597 let limit_clause = self.parse_optional_limit_clause()?;
14598
14599 let settings = self.parse_settings()?;
14600
14601 let fetch = if self.parse_keyword(Keyword::FETCH) {
14602 Some(self.parse_fetch()?)
14603 } else {
14604 None
14605 };
14606
14607 let mut for_clause = None;
14608 let mut locks = Vec::new();
14609 while self.parse_keyword(Keyword::FOR) {
14610 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14611 for_clause = Some(parsed_for_clause);
14612 break;
14613 } else {
14614 locks.push(self.parse_lock()?);
14615 }
14616 }
14617 let format_clause =
14618 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14619 if self.parse_keyword(Keyword::NULL) {
14620 Some(FormatClause::Null)
14621 } else {
14622 let ident = self.parse_identifier()?;
14623 Some(FormatClause::Identifier(ident))
14624 }
14625 } else {
14626 None
14627 };
14628
14629 let pipe_operators = if self.dialect.supports_pipe_operator() {
14630 self.parse_pipe_operators()?
14631 } else {
14632 Vec::new()
14633 };
14634
14635 Ok(Query {
14636 with,
14637 body,
14638 order_by,
14639 limit_clause,
14640 fetch,
14641 locks,
14642 for_clause,
14643 settings,
14644 format_clause,
14645 pipe_operators,
14646 }
14647 .into())
14648 }
14649 }
14650
14651 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14652 let mut pipe_operators = Vec::new();
14653
14654 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14655 let kw = self.expect_one_of_keywords(&[
14656 Keyword::SELECT,
14657 Keyword::EXTEND,
14658 Keyword::SET,
14659 Keyword::DROP,
14660 Keyword::AS,
14661 Keyword::WHERE,
14662 Keyword::LIMIT,
14663 Keyword::AGGREGATE,
14664 Keyword::ORDER,
14665 Keyword::TABLESAMPLE,
14666 Keyword::RENAME,
14667 Keyword::UNION,
14668 Keyword::INTERSECT,
14669 Keyword::EXCEPT,
14670 Keyword::CALL,
14671 Keyword::PIVOT,
14672 Keyword::UNPIVOT,
14673 Keyword::JOIN,
14674 Keyword::INNER,
14675 Keyword::LEFT,
14676 Keyword::RIGHT,
14677 Keyword::FULL,
14678 Keyword::CROSS,
14679 ])?;
14680 match kw {
14681 Keyword::SELECT => {
14682 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14683 pipe_operators.push(PipeOperator::Select { exprs })
14684 }
14685 Keyword::EXTEND => {
14686 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14687 pipe_operators.push(PipeOperator::Extend { exprs })
14688 }
14689 Keyword::SET => {
14690 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14691 pipe_operators.push(PipeOperator::Set { assignments })
14692 }
14693 Keyword::DROP => {
14694 let columns = self.parse_identifiers()?;
14695 pipe_operators.push(PipeOperator::Drop { columns })
14696 }
14697 Keyword::AS => {
14698 let alias = self.parse_identifier()?;
14699 pipe_operators.push(PipeOperator::As { alias })
14700 }
14701 Keyword::WHERE => {
14702 let expr = self.parse_expr()?;
14703 pipe_operators.push(PipeOperator::Where { expr })
14704 }
14705 Keyword::LIMIT => {
14706 let expr = self.parse_expr()?;
14707 let offset = if self.parse_keyword(Keyword::OFFSET) {
14708 Some(self.parse_expr()?)
14709 } else {
14710 None
14711 };
14712 pipe_operators.push(PipeOperator::Limit { expr, offset })
14713 }
14714 Keyword::AGGREGATE => {
14715 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14716 vec![]
14717 } else {
14718 self.parse_comma_separated(|parser| {
14719 parser.parse_expr_with_alias_and_order_by()
14720 })?
14721 };
14722
14723 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14724 self.parse_comma_separated(|parser| {
14725 parser.parse_expr_with_alias_and_order_by()
14726 })?
14727 } else {
14728 vec![]
14729 };
14730
14731 pipe_operators.push(PipeOperator::Aggregate {
14732 full_table_exprs,
14733 group_by_expr,
14734 })
14735 }
14736 Keyword::ORDER => {
14737 self.expect_one_of_keywords(&[Keyword::BY])?;
14738 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14739 pipe_operators.push(PipeOperator::OrderBy { exprs })
14740 }
14741 Keyword::TABLESAMPLE => {
14742 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14743 pipe_operators.push(PipeOperator::TableSample { sample });
14744 }
14745 Keyword::RENAME => {
14746 let mappings =
14747 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14748 pipe_operators.push(PipeOperator::Rename { mappings });
14749 }
14750 Keyword::UNION => {
14751 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14752 let queries = self.parse_pipe_operator_queries()?;
14753 pipe_operators.push(PipeOperator::Union {
14754 set_quantifier,
14755 queries,
14756 });
14757 }
14758 Keyword::INTERSECT => {
14759 let set_quantifier =
14760 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14761 let queries = self.parse_pipe_operator_queries()?;
14762 pipe_operators.push(PipeOperator::Intersect {
14763 set_quantifier,
14764 queries,
14765 });
14766 }
14767 Keyword::EXCEPT => {
14768 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14769 let queries = self.parse_pipe_operator_queries()?;
14770 pipe_operators.push(PipeOperator::Except {
14771 set_quantifier,
14772 queries,
14773 });
14774 }
14775 Keyword::CALL => {
14776 let function_name = self.parse_object_name(false)?;
14777 let function_expr = self.parse_function(function_name)?;
14778 if let Expr::Function(function) = function_expr {
14779 let alias = self.parse_identifier_optional_alias()?;
14780 pipe_operators.push(PipeOperator::Call { function, alias });
14781 } else {
14782 return Err(ParserError::ParserError(
14783 "Expected function call after CALL".to_string(),
14784 ));
14785 }
14786 }
14787 Keyword::PIVOT => {
14788 self.expect_token(&Token::LParen)?;
14789 let aggregate_functions =
14790 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14791 self.expect_keyword_is(Keyword::FOR)?;
14792 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14793 self.expect_keyword_is(Keyword::IN)?;
14794
14795 self.expect_token(&Token::LParen)?;
14796 let value_source = if self.parse_keyword(Keyword::ANY) {
14797 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14798 self.parse_comma_separated(Parser::parse_order_by_expr)?
14799 } else {
14800 vec![]
14801 };
14802 PivotValueSource::Any(order_by)
14803 } else if self.peek_sub_query() {
14804 PivotValueSource::Subquery(self.parse_query()?)
14805 } else {
14806 PivotValueSource::List(
14807 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14808 )
14809 };
14810 self.expect_token(&Token::RParen)?;
14811 self.expect_token(&Token::RParen)?;
14812
14813 let alias = self.parse_identifier_optional_alias()?;
14814
14815 pipe_operators.push(PipeOperator::Pivot {
14816 aggregate_functions,
14817 value_column,
14818 value_source,
14819 alias,
14820 });
14821 }
14822 Keyword::UNPIVOT => {
14823 self.expect_token(&Token::LParen)?;
14824 let value_column = self.parse_identifier()?;
14825 self.expect_keyword(Keyword::FOR)?;
14826 let name_column = self.parse_identifier()?;
14827 self.expect_keyword(Keyword::IN)?;
14828
14829 self.expect_token(&Token::LParen)?;
14830 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14831 self.expect_token(&Token::RParen)?;
14832
14833 self.expect_token(&Token::RParen)?;
14834
14835 let alias = self.parse_identifier_optional_alias()?;
14836
14837 pipe_operators.push(PipeOperator::Unpivot {
14838 value_column,
14839 name_column,
14840 unpivot_columns,
14841 alias,
14842 });
14843 }
14844 Keyword::JOIN
14845 | Keyword::INNER
14846 | Keyword::LEFT
14847 | Keyword::RIGHT
14848 | Keyword::FULL
14849 | Keyword::CROSS => {
14850 self.prev_token();
14851 let mut joins = self.parse_joins()?;
14852 if joins.len() != 1 {
14853 return Err(ParserError::ParserError(
14854 "Join pipe operator must have a single join".to_string(),
14855 ));
14856 }
14857 let join = joins.swap_remove(0);
14858 pipe_operators.push(PipeOperator::Join(join))
14859 }
14860 unhandled => {
14861 return Err(ParserError::ParserError(format!(
14862 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
14863 )))
14864 }
14865 }
14866 }
14867 Ok(pipe_operators)
14868 }
14869
14870 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
14871 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
14872 {
14873 let key_values = self.parse_comma_separated(|p| {
14874 let key = p.parse_identifier()?;
14875 p.expect_token(&Token::Eq)?;
14876 let value = p.parse_expr()?;
14877 Ok(Setting { key, value })
14878 })?;
14879 Some(key_values)
14880 } else {
14881 None
14882 };
14883 Ok(settings)
14884 }
14885
14886 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
14888 if self.parse_keyword(Keyword::XML) {
14889 Ok(Some(self.parse_for_xml()?))
14890 } else if self.parse_keyword(Keyword::JSON) {
14891 Ok(Some(self.parse_for_json()?))
14892 } else if self.parse_keyword(Keyword::BROWSE) {
14893 Ok(Some(ForClause::Browse))
14894 } else {
14895 Ok(None)
14896 }
14897 }
14898
14899 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
14901 let for_xml = if self.parse_keyword(Keyword::RAW) {
14902 let mut element_name = None;
14903 if self.peek_token_ref().token == Token::LParen {
14904 self.expect_token(&Token::LParen)?;
14905 element_name = Some(self.parse_literal_string()?);
14906 self.expect_token(&Token::RParen)?;
14907 }
14908 ForXml::Raw(element_name)
14909 } else if self.parse_keyword(Keyword::AUTO) {
14910 ForXml::Auto
14911 } else if self.parse_keyword(Keyword::EXPLICIT) {
14912 ForXml::Explicit
14913 } else if self.parse_keyword(Keyword::PATH) {
14914 let mut element_name = None;
14915 if self.peek_token_ref().token == Token::LParen {
14916 self.expect_token(&Token::LParen)?;
14917 element_name = Some(self.parse_literal_string()?);
14918 self.expect_token(&Token::RParen)?;
14919 }
14920 ForXml::Path(element_name)
14921 } else {
14922 return Err(ParserError::ParserError(
14923 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
14924 ));
14925 };
14926 let mut elements = false;
14927 let mut binary_base64 = false;
14928 let mut root = None;
14929 let mut r#type = false;
14930 while self.peek_token_ref().token == Token::Comma {
14931 self.next_token();
14932 if self.parse_keyword(Keyword::ELEMENTS) {
14933 elements = true;
14934 } else if self.parse_keyword(Keyword::BINARY) {
14935 self.expect_keyword_is(Keyword::BASE64)?;
14936 binary_base64 = true;
14937 } else if self.parse_keyword(Keyword::ROOT) {
14938 self.expect_token(&Token::LParen)?;
14939 root = Some(self.parse_literal_string()?);
14940 self.expect_token(&Token::RParen)?;
14941 } else if self.parse_keyword(Keyword::TYPE) {
14942 r#type = true;
14943 }
14944 }
14945 Ok(ForClause::Xml {
14946 for_xml,
14947 elements,
14948 binary_base64,
14949 root,
14950 r#type,
14951 })
14952 }
14953
14954 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
14956 let for_json = if self.parse_keyword(Keyword::AUTO) {
14957 ForJson::Auto
14958 } else if self.parse_keyword(Keyword::PATH) {
14959 ForJson::Path
14960 } else {
14961 return Err(ParserError::ParserError(
14962 "Expected FOR JSON [AUTO | PATH ]".to_string(),
14963 ));
14964 };
14965 let mut root = None;
14966 let mut include_null_values = false;
14967 let mut without_array_wrapper = false;
14968 while self.peek_token_ref().token == Token::Comma {
14969 self.next_token();
14970 if self.parse_keyword(Keyword::ROOT) {
14971 self.expect_token(&Token::LParen)?;
14972 root = Some(self.parse_literal_string()?);
14973 self.expect_token(&Token::RParen)?;
14974 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
14975 include_null_values = true;
14976 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
14977 without_array_wrapper = true;
14978 }
14979 }
14980 Ok(ForClause::Json {
14981 for_json,
14982 root,
14983 include_null_values,
14984 without_array_wrapper,
14985 })
14986 }
14987
14988 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
14990 let name = self.parse_identifier()?;
14991
14992 let as_optional = self.dialect.supports_cte_without_as();
14993
14994 if as_optional && !self.peek_keyword(Keyword::AS) {
14996 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
14997 p.expect_token(&Token::LParen)?;
14998 let query = p.parse_query()?;
14999 let closing_paren_token = p.expect_token(&Token::RParen)?;
15000 Ok((query, closing_paren_token))
15001 })? {
15002 let mut cte = Cte {
15003 alias: TableAlias {
15004 explicit: false,
15005 name,
15006 columns: vec![],
15007 },
15008 query,
15009 from: None,
15010 materialized: None,
15011 closing_paren_token: closing_paren_token.into(),
15012 };
15013 if self.parse_keyword(Keyword::FROM) {
15014 cte.from = Some(self.parse_identifier()?);
15015 }
15016 return Ok(cte);
15017 }
15018 }
15019
15020 let columns = if self.parse_keyword(Keyword::AS) {
15022 vec![]
15023 } else {
15024 let columns = self.parse_table_alias_column_defs()?;
15025 if as_optional {
15026 let _ = self.parse_keyword(Keyword::AS);
15027 } else {
15028 self.expect_keyword_is(Keyword::AS)?;
15029 }
15030 columns
15031 };
15032
15033 let mut is_materialized = None;
15034 if dialect_of!(self is PostgreSqlDialect) {
15035 if self.parse_keyword(Keyword::MATERIALIZED) {
15036 is_materialized = Some(CteAsMaterialized::Materialized);
15037 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
15038 is_materialized = Some(CteAsMaterialized::NotMaterialized);
15039 }
15040 }
15041
15042 self.expect_token(&Token::LParen)?;
15043 let query = self.parse_query()?;
15044 let closing_paren_token = self.expect_token(&Token::RParen)?;
15045
15046 let mut cte = Cte {
15047 alias: TableAlias {
15048 explicit: false,
15049 name,
15050 columns,
15051 },
15052 query,
15053 from: None,
15054 materialized: is_materialized,
15055 closing_paren_token: closing_paren_token.into(),
15056 };
15057 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
15058 cte.from = Some(self.parse_identifier()?);
15059 }
15060 Ok(cte)
15061 }
15062
15063 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
15072 let expr = if self.peek_keyword(Keyword::SELECT)
15075 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
15076 {
15077 SetExpr::Select(self.parse_select().map(Box::new)?)
15078 } else if self.consume_token(&Token::LParen) {
15079 let subquery = self.parse_query()?;
15081 self.expect_token(&Token::RParen)?;
15082 SetExpr::Query(subquery)
15083 } else if self.parse_keyword(Keyword::VALUES) {
15084 let is_mysql = dialect_of!(self is MySqlDialect);
15085 SetExpr::Values(self.parse_values(is_mysql, false)?)
15086 } else if self.parse_keyword(Keyword::VALUE) {
15087 let is_mysql = dialect_of!(self is MySqlDialect);
15088 SetExpr::Values(self.parse_values(is_mysql, true)?)
15089 } else if self.parse_keyword(Keyword::TABLE) {
15090 SetExpr::Table(Box::new(self.parse_as_table()?))
15091 } else {
15092 return self.expected_ref(
15093 "SELECT, VALUES, or a subquery in the query body",
15094 self.peek_token_ref(),
15095 );
15096 };
15097
15098 self.parse_remaining_set_exprs(expr, precedence)
15099 }
15100
15101 fn parse_remaining_set_exprs(
15105 &mut self,
15106 mut expr: SetExpr,
15107 precedence: u8,
15108 ) -> Result<Box<SetExpr>, ParserError> {
15109 loop {
15110 let op = self.parse_set_operator(&self.peek_token().token);
15112 let next_precedence = match op {
15113 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15115 10
15116 }
15117 Some(SetOperator::Intersect) => 20,
15119 None => break,
15121 };
15122 if precedence >= next_precedence {
15123 break;
15124 }
15125 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15127 expr = SetExpr::SetOperation {
15128 left: Box::new(expr),
15129 op: op.unwrap(),
15130 set_quantifier,
15131 right: self.parse_query_body(next_precedence)?,
15132 };
15133 }
15134
15135 Ok(expr.into())
15136 }
15137
15138 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15140 match token {
15141 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15142 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15143 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15144 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15145 _ => None,
15146 }
15147 }
15148
15149 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15151 match op {
15152 Some(
15153 SetOperator::Except
15154 | SetOperator::Intersect
15155 | SetOperator::Union
15156 | SetOperator::Minus,
15157 ) => {
15158 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15159 SetQuantifier::DistinctByName
15160 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15161 SetQuantifier::ByName
15162 } else if self.parse_keyword(Keyword::ALL) {
15163 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15164 SetQuantifier::AllByName
15165 } else {
15166 SetQuantifier::All
15167 }
15168 } else if self.parse_keyword(Keyword::DISTINCT) {
15169 SetQuantifier::Distinct
15170 } else {
15171 SetQuantifier::None
15172 }
15173 }
15174 _ => SetQuantifier::None,
15175 }
15176 }
15177
15178 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15180 let mut from_first = None;
15181
15182 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15183 let from_token = self.expect_keyword(Keyword::FROM)?;
15184 let from = self.parse_table_with_joins()?;
15185 if !self.peek_keyword(Keyword::SELECT) {
15186 return Ok(Select {
15187 select_token: AttachedToken(from_token),
15188 optimizer_hints: vec![],
15189 distinct: None,
15190 select_modifiers: None,
15191 top: None,
15192 top_before_distinct: false,
15193 projection: vec![],
15194 exclude: None,
15195 into: None,
15196 from,
15197 lateral_views: vec![],
15198 prewhere: None,
15199 selection: None,
15200 group_by: GroupByExpr::Expressions(vec![], vec![]),
15201 cluster_by: vec![],
15202 distribute_by: vec![],
15203 sort_by: vec![],
15204 having: None,
15205 named_window: vec![],
15206 window_before_qualify: false,
15207 qualify: None,
15208 value_table_mode: None,
15209 connect_by: vec![],
15210 flavor: SelectFlavor::FromFirstNoSelect,
15211 });
15212 }
15213 from_first = Some(from);
15214 }
15215
15216 let select_token = self.expect_keyword(Keyword::SELECT)?;
15217 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15218 let value_table_mode = self.parse_value_table_mode()?;
15219
15220 let (select_modifiers, distinct_select_modifier) =
15221 if self.dialect.supports_select_modifiers() {
15222 self.parse_select_modifiers()?
15223 } else {
15224 (None, None)
15225 };
15226
15227 let mut top_before_distinct = false;
15228 let mut top = None;
15229 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15230 top = Some(self.parse_top()?);
15231 top_before_distinct = true;
15232 }
15233
15234 let distinct = if distinct_select_modifier.is_some() {
15235 distinct_select_modifier
15236 } else {
15237 self.parse_all_or_distinct()?
15238 };
15239
15240 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15241 top = Some(self.parse_top()?);
15242 }
15243
15244 let projection =
15245 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15246 vec![]
15247 } else {
15248 self.parse_projection()?
15249 };
15250
15251 let exclude = if self.dialect.supports_select_exclude() {
15252 self.parse_optional_select_item_exclude()?
15253 } else {
15254 None
15255 };
15256
15257 let into = if self.parse_keyword(Keyword::INTO) {
15258 Some(self.parse_select_into()?)
15259 } else {
15260 None
15261 };
15262
15263 let (from, from_first) = if let Some(from) = from_first.take() {
15269 (from, true)
15270 } else if self.parse_keyword(Keyword::FROM) {
15271 (self.parse_table_with_joins()?, false)
15272 } else {
15273 (vec![], false)
15274 };
15275
15276 let mut lateral_views = vec![];
15277 loop {
15278 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15279 let outer = self.parse_keyword(Keyword::OUTER);
15280 let lateral_view = self.parse_expr()?;
15281 let lateral_view_name = self.parse_object_name(false)?;
15282 let lateral_col_alias = self
15283 .parse_comma_separated(|parser| {
15284 parser.parse_optional_alias(&[
15285 Keyword::WHERE,
15286 Keyword::GROUP,
15287 Keyword::CLUSTER,
15288 Keyword::HAVING,
15289 Keyword::LATERAL,
15290 ]) })?
15292 .into_iter()
15293 .flatten()
15294 .collect();
15295
15296 lateral_views.push(LateralView {
15297 lateral_view,
15298 lateral_view_name,
15299 lateral_col_alias,
15300 outer,
15301 });
15302 } else {
15303 break;
15304 }
15305 }
15306
15307 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15308 {
15309 Some(self.parse_expr()?)
15310 } else {
15311 None
15312 };
15313
15314 let selection = if self.parse_keyword(Keyword::WHERE) {
15315 Some(self.parse_expr()?)
15316 } else {
15317 None
15318 };
15319
15320 let connect_by = self.maybe_parse_connect_by()?;
15321
15322 let group_by = self
15323 .parse_optional_group_by()?
15324 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15325
15326 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15327 self.parse_comma_separated(Parser::parse_expr)?
15328 } else {
15329 vec![]
15330 };
15331
15332 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15333 self.parse_comma_separated(Parser::parse_expr)?
15334 } else {
15335 vec![]
15336 };
15337
15338 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15339 self.parse_comma_separated(Parser::parse_order_by_expr)?
15340 } else {
15341 vec![]
15342 };
15343
15344 let having = if self.parse_keyword(Keyword::HAVING) {
15345 Some(self.parse_expr()?)
15346 } else {
15347 None
15348 };
15349
15350 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15352 {
15353 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15354 if self.parse_keyword(Keyword::QUALIFY) {
15355 (named_windows, Some(self.parse_expr()?), true)
15356 } else {
15357 (named_windows, None, true)
15358 }
15359 } else if self.parse_keyword(Keyword::QUALIFY) {
15360 let qualify = Some(self.parse_expr()?);
15361 if self.parse_keyword(Keyword::WINDOW) {
15362 (
15363 self.parse_comma_separated(Parser::parse_named_window)?,
15364 qualify,
15365 false,
15366 )
15367 } else {
15368 (Default::default(), qualify, false)
15369 }
15370 } else {
15371 Default::default()
15372 };
15373
15374 Ok(Select {
15375 select_token: AttachedToken(select_token),
15376 optimizer_hints,
15377 distinct,
15378 select_modifiers,
15379 top,
15380 top_before_distinct,
15381 projection,
15382 exclude,
15383 into,
15384 from,
15385 lateral_views,
15386 prewhere,
15387 selection,
15388 group_by,
15389 cluster_by,
15390 distribute_by,
15391 sort_by,
15392 having,
15393 named_window: named_windows,
15394 window_before_qualify,
15395 qualify,
15396 value_table_mode,
15397 connect_by,
15398 flavor: if from_first {
15399 SelectFlavor::FromFirst
15400 } else {
15401 SelectFlavor::Standard
15402 },
15403 })
15404 }
15405
15406 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15415 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15416 if !supports_hints {
15417 return Ok(vec![]);
15418 }
15419 let mut hints = vec![];
15420 loop {
15421 let t = self.peek_nth_token_no_skip_ref(0);
15422 let Token::Whitespace(ws) = &t.token else {
15423 break;
15424 };
15425 match ws {
15426 Whitespace::SingleLineComment { comment, prefix } => {
15427 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15428 hints.push(OptimizerHint {
15429 prefix: hint_prefix,
15430 text,
15431 style: OptimizerHintStyle::SingleLine {
15432 prefix: prefix.clone(),
15433 },
15434 });
15435 }
15436 self.next_token_no_skip();
15437 }
15438 Whitespace::MultiLineComment(comment) => {
15439 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15440 hints.push(OptimizerHint {
15441 prefix: hint_prefix,
15442 text,
15443 style: OptimizerHintStyle::MultiLine,
15444 });
15445 }
15446 self.next_token_no_skip();
15447 }
15448 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15449 self.next_token_no_skip();
15450 }
15451 }
15452 }
15453 Ok(hints)
15454 }
15455
15456 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15459 let (before_plus, text) = comment.split_once('+')?;
15460 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15461 Some((before_plus.to_string(), text.to_string()))
15462 } else {
15463 None
15464 }
15465 }
15466
15467 fn parse_select_modifiers(
15474 &mut self,
15475 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15476 let mut modifiers = SelectModifiers::default();
15477 let mut distinct = None;
15478
15479 let keywords = &[
15480 Keyword::ALL,
15481 Keyword::DISTINCT,
15482 Keyword::DISTINCTROW,
15483 Keyword::HIGH_PRIORITY,
15484 Keyword::STRAIGHT_JOIN,
15485 Keyword::SQL_SMALL_RESULT,
15486 Keyword::SQL_BIG_RESULT,
15487 Keyword::SQL_BUFFER_RESULT,
15488 Keyword::SQL_NO_CACHE,
15489 Keyword::SQL_CALC_FOUND_ROWS,
15490 ];
15491
15492 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15493 match keyword {
15494 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15495 self.prev_token();
15496 distinct = self.parse_all_or_distinct()?;
15497 }
15498 Keyword::DISTINCTROW if distinct.is_none() => {
15500 distinct = Some(Distinct::Distinct);
15501 }
15502 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15503 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15504 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15505 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15506 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15507 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15508 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15509 _ => {
15510 self.prev_token();
15511 return self.expected_ref(
15512 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15513 self.peek_token_ref(),
15514 );
15515 }
15516 }
15517 }
15518
15519 let select_modifiers = if modifiers.is_any_set() {
15522 Some(modifiers)
15523 } else {
15524 None
15525 };
15526 Ok((select_modifiers, distinct))
15527 }
15528
15529 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15530 if !dialect_of!(self is BigQueryDialect) {
15531 return Ok(None);
15532 }
15533
15534 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15535 Some(ValueTableMode::DistinctAsValue)
15536 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15537 Some(ValueTableMode::DistinctAsStruct)
15538 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15539 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15540 {
15541 Some(ValueTableMode::AsValue)
15542 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15543 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15544 {
15545 Some(ValueTableMode::AsStruct)
15546 } else if self.parse_keyword(Keyword::AS) {
15547 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15548 } else {
15549 None
15550 };
15551
15552 Ok(mode)
15553 }
15554
15555 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15559 where
15560 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15561 {
15562 let current_state = self.state;
15563 self.state = state;
15564 let res = f(self);
15565 self.state = current_state;
15566 res
15567 }
15568
15569 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15571 let mut clauses = Vec::with_capacity(2);
15572 loop {
15573 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15574 clauses.push(ConnectByKind::StartWith {
15575 start_token: self.token_at(idx).clone().into(),
15576 condition: self.parse_expr()?.into(),
15577 });
15578 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15579 {
15580 clauses.push(ConnectByKind::ConnectBy {
15581 connect_token: self.token_at(idx).clone().into(),
15582 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15583 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15584 parser.parse_comma_separated(Parser::parse_expr)
15585 })?,
15586 });
15587 } else {
15588 break;
15589 }
15590 }
15591 Ok(clauses)
15592 }
15593
15594 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15596 let token1 = self.next_token();
15597 let token2 = self.next_token();
15598 let token3 = self.next_token();
15599
15600 let table_name;
15601 let schema_name;
15602 if token2 == Token::Period {
15603 match token1.token {
15604 Token::Word(w) => {
15605 schema_name = w.value;
15606 }
15607 _ => {
15608 return self.expected("Schema name", token1);
15609 }
15610 }
15611 match token3.token {
15612 Token::Word(w) => {
15613 table_name = w.value;
15614 }
15615 _ => {
15616 return self.expected("Table name", token3);
15617 }
15618 }
15619 Ok(Table {
15620 table_name: Some(table_name),
15621 schema_name: Some(schema_name),
15622 })
15623 } else {
15624 match token1.token {
15625 Token::Word(w) => {
15626 table_name = w.value;
15627 }
15628 _ => {
15629 return self.expected("Table name", token1);
15630 }
15631 }
15632 Ok(Table {
15633 table_name: Some(table_name),
15634 schema_name: None,
15635 })
15636 }
15637 }
15638
15639 fn parse_set_role(
15641 &mut self,
15642 modifier: Option<ContextModifier>,
15643 ) -> Result<Statement, ParserError> {
15644 self.expect_keyword_is(Keyword::ROLE)?;
15645
15646 let role_name = if self.parse_keyword(Keyword::NONE) {
15647 None
15648 } else {
15649 Some(self.parse_identifier()?)
15650 };
15651 Ok(Statement::Set(Set::SetRole {
15652 context_modifier: modifier,
15653 role_name,
15654 }))
15655 }
15656
15657 fn parse_set_values(
15658 &mut self,
15659 parenthesized_assignment: bool,
15660 ) -> Result<Vec<Expr>, ParserError> {
15661 let mut values = vec![];
15662
15663 if parenthesized_assignment {
15664 self.expect_token(&Token::LParen)?;
15665 }
15666
15667 loop {
15668 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15669 expr
15670 } else if let Ok(expr) = self.parse_expr() {
15671 expr
15672 } else {
15673 self.expected_ref("variable value", self.peek_token_ref())?
15674 };
15675
15676 values.push(value);
15677 if self.consume_token(&Token::Comma) {
15678 continue;
15679 }
15680
15681 if parenthesized_assignment {
15682 self.expect_token(&Token::RParen)?;
15683 }
15684 return Ok(values);
15685 }
15686 }
15687
15688 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15689 let modifier =
15690 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15691
15692 Self::keyword_to_modifier(modifier)
15693 }
15694
15695 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15697 let scope = self.parse_context_modifier();
15698
15699 let name = if self.dialect.supports_parenthesized_set_variables()
15700 && self.consume_token(&Token::LParen)
15701 {
15702 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15706 } else {
15707 self.parse_object_name(false)?
15708 };
15709
15710 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15711 return self.expected_ref("assignment operator", self.peek_token_ref());
15712 }
15713
15714 let value = self.parse_expr()?;
15715
15716 Ok(SetAssignment { scope, name, value })
15717 }
15718
15719 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15720 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15721
15722 let scope = if !hivevar {
15724 self.parse_context_modifier()
15725 } else {
15726 None
15727 };
15728
15729 if hivevar {
15730 self.expect_token(&Token::Colon)?;
15731 }
15732
15733 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15734 return Ok(set_role_stmt);
15735 }
15736
15737 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15739 || self.parse_keyword(Keyword::TIMEZONE)
15740 {
15741 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15742 return Ok(Set::SingleAssignment {
15743 scope,
15744 hivevar,
15745 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15746 values: self.parse_set_values(false)?,
15747 }
15748 .into());
15749 } else {
15750 return Ok(Set::SetTimeZone {
15754 local: scope == Some(ContextModifier::Local),
15755 value: self.parse_expr()?,
15756 }
15757 .into());
15758 }
15759 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15760 if self.parse_keyword(Keyword::DEFAULT) {
15761 return Ok(Set::SetNamesDefault {}.into());
15762 }
15763 let charset_name = self.parse_identifier()?;
15764 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15765 Some(self.parse_literal_string()?)
15766 } else {
15767 None
15768 };
15769
15770 return Ok(Set::SetNames {
15771 charset_name,
15772 collation_name,
15773 }
15774 .into());
15775 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15776 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15777 return Ok(Set::SetTransaction {
15778 modes: self.parse_transaction_modes()?,
15779 snapshot: None,
15780 session: true,
15781 }
15782 .into());
15783 } else if self.parse_keyword(Keyword::TRANSACTION) {
15784 if self.parse_keyword(Keyword::SNAPSHOT) {
15785 let snapshot_id = self.parse_value()?;
15786 return Ok(Set::SetTransaction {
15787 modes: vec![],
15788 snapshot: Some(snapshot_id),
15789 session: false,
15790 }
15791 .into());
15792 }
15793 return Ok(Set::SetTransaction {
15794 modes: self.parse_transaction_modes()?,
15795 snapshot: None,
15796 session: false,
15797 }
15798 .into());
15799 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15800 let scope = match scope {
15801 Some(s) => s,
15802 None => {
15803 return self.expected_at(
15804 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15805 self.get_current_index(),
15806 )
15807 }
15808 };
15809 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15810 SetSessionAuthorizationParamKind::Default
15811 } else {
15812 let value = self.parse_identifier()?;
15813 SetSessionAuthorizationParamKind::User(value)
15814 };
15815 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15816 scope,
15817 kind: auth_value,
15818 })
15819 .into());
15820 }
15821
15822 if self.dialect.supports_comma_separated_set_assignments() {
15823 if scope.is_some() {
15824 self.prev_token();
15825 }
15826
15827 if let Some(assignments) = self
15828 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15829 {
15830 return if assignments.len() > 1 {
15831 Ok(Set::MultipleAssignments { assignments }.into())
15832 } else {
15833 let SetAssignment { scope, name, value } =
15834 assignments.into_iter().next().ok_or_else(|| {
15835 ParserError::ParserError("Expected at least one assignment".to_string())
15836 })?;
15837
15838 Ok(Set::SingleAssignment {
15839 scope,
15840 hivevar,
15841 variable: name,
15842 values: vec![value],
15843 }
15844 .into())
15845 };
15846 }
15847 }
15848
15849 let variables = if self.dialect.supports_parenthesized_set_variables()
15850 && self.consume_token(&Token::LParen)
15851 {
15852 let vars = OneOrManyWithParens::Many(
15853 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
15854 .into_iter()
15855 .map(|ident| ObjectName::from(vec![ident]))
15856 .collect(),
15857 );
15858 self.expect_token(&Token::RParen)?;
15859 vars
15860 } else {
15861 OneOrManyWithParens::One(self.parse_object_name(false)?)
15862 };
15863
15864 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15865 let stmt = match variables {
15866 OneOrManyWithParens::One(var) => Set::SingleAssignment {
15867 scope,
15868 hivevar,
15869 variable: var,
15870 values: self.parse_set_values(false)?,
15871 },
15872 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
15873 variables: vars,
15874 values: self.parse_set_values(true)?,
15875 },
15876 };
15877
15878 return Ok(stmt.into());
15879 }
15880
15881 if self.dialect.supports_set_stmt_without_operator() {
15882 self.prev_token();
15883 return self.parse_set_session_params();
15884 };
15885
15886 self.expected_ref("equals sign or TO", self.peek_token_ref())
15887 }
15888
15889 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
15891 if self.parse_keyword(Keyword::STATISTICS) {
15892 let topic = match self.parse_one_of_keywords(&[
15893 Keyword::IO,
15894 Keyword::PROFILE,
15895 Keyword::TIME,
15896 Keyword::XML,
15897 ]) {
15898 Some(Keyword::IO) => SessionParamStatsTopic::IO,
15899 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
15900 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
15901 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
15902 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
15903 };
15904 let value = self.parse_session_param_value()?;
15905 Ok(
15906 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
15907 topic,
15908 value,
15909 }))
15910 .into(),
15911 )
15912 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
15913 let obj = self.parse_object_name(false)?;
15914 let value = self.parse_session_param_value()?;
15915 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
15916 SetSessionParamIdentityInsert { obj, value },
15917 ))
15918 .into())
15919 } else if self.parse_keyword(Keyword::OFFSETS) {
15920 let keywords = self.parse_comma_separated(|parser| {
15921 let next_token = parser.next_token();
15922 match &next_token.token {
15923 Token::Word(w) => Ok(w.to_string()),
15924 _ => parser.expected("SQL keyword", next_token),
15925 }
15926 })?;
15927 let value = self.parse_session_param_value()?;
15928 Ok(
15929 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
15930 keywords,
15931 value,
15932 }))
15933 .into(),
15934 )
15935 } else {
15936 let names = self.parse_comma_separated(|parser| {
15937 let next_token = parser.next_token();
15938 match next_token.token {
15939 Token::Word(w) => Ok(w.to_string()),
15940 _ => parser.expected("Session param name", next_token),
15941 }
15942 })?;
15943 let value = self.parse_expr()?.to_string();
15944 Ok(
15945 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
15946 names,
15947 value,
15948 }))
15949 .into(),
15950 )
15951 }
15952 }
15953
15954 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
15955 if self.parse_keyword(Keyword::ON) {
15956 Ok(SessionParamValue::On)
15957 } else if self.parse_keyword(Keyword::OFF) {
15958 Ok(SessionParamValue::Off)
15959 } else {
15960 self.expected_ref("ON or OFF", self.peek_token_ref())
15961 }
15962 }
15963
15964 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
15966 let terse = self.parse_keyword(Keyword::TERSE);
15967 let extended = self.parse_keyword(Keyword::EXTENDED);
15968 let full = self.parse_keyword(Keyword::FULL);
15969 let session = self.parse_keyword(Keyword::SESSION);
15970 let global = self.parse_keyword(Keyword::GLOBAL);
15971 let external = self.parse_keyword(Keyword::EXTERNAL);
15972 if self
15973 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
15974 .is_some()
15975 {
15976 Ok(self.parse_show_columns(extended, full)?)
15977 } else if self.parse_keyword(Keyword::TABLES) {
15978 Ok(self.parse_show_tables(terse, extended, full, external)?)
15979 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
15980 Ok(self.parse_show_views(terse, true)?)
15981 } else if self.parse_keyword(Keyword::VIEWS) {
15982 Ok(self.parse_show_views(terse, false)?)
15983 } else if self.parse_keyword(Keyword::FUNCTIONS) {
15984 Ok(self.parse_show_functions()?)
15985 } else if self.parse_keyword(Keyword::PROCESSLIST) {
15986 Ok(Statement::ShowProcessList { full })
15987 } else if extended || full {
15988 Err(ParserError::ParserError(
15989 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
15990 ))
15991 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
15992 Ok(self.parse_show_create()?)
15993 } else if self.parse_keyword(Keyword::COLLATION) {
15994 Ok(self.parse_show_collation()?)
15995 } else if self.parse_keyword(Keyword::VARIABLES)
15996 && dialect_of!(self is MySqlDialect | GenericDialect)
15997 {
15998 Ok(Statement::ShowVariables {
15999 filter: self.parse_show_statement_filter()?,
16000 session,
16001 global,
16002 })
16003 } else if self.parse_keyword(Keyword::STATUS)
16004 && dialect_of!(self is MySqlDialect | GenericDialect)
16005 {
16006 Ok(Statement::ShowStatus {
16007 filter: self.parse_show_statement_filter()?,
16008 session,
16009 global,
16010 })
16011 } else if self.parse_keyword(Keyword::CATALOGS) {
16012 self.parse_show_catalogs(terse)
16013 } else if self.parse_keyword(Keyword::DATABASES) {
16014 self.parse_show_databases(terse)
16015 } else if self.parse_keyword(Keyword::SCHEMAS) {
16016 self.parse_show_schemas(terse)
16017 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
16018 self.parse_show_charset(false)
16019 } else if self.parse_keyword(Keyword::CHARSET) {
16020 self.parse_show_charset(true)
16021 } else {
16022 Ok(Statement::ShowVariable {
16023 variable: self.parse_identifiers()?,
16024 })
16025 }
16026 }
16027
16028 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
16029 Ok(Statement::ShowCharset(ShowCharset {
16031 is_shorthand,
16032 filter: self.parse_show_statement_filter()?,
16033 }))
16034 }
16035
16036 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
16037 let history = self.parse_keyword(Keyword::HISTORY);
16038 let show_options = self.parse_show_stmt_options()?;
16039 Ok(Statement::ShowCatalogs {
16040 terse,
16041 history,
16042 show_options,
16043 })
16044 }
16045
16046 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
16047 let history = self.parse_keyword(Keyword::HISTORY);
16048 let show_options = self.parse_show_stmt_options()?;
16049 Ok(Statement::ShowDatabases {
16050 terse,
16051 history,
16052 show_options,
16053 })
16054 }
16055
16056 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
16057 let history = self.parse_keyword(Keyword::HISTORY);
16058 let show_options = self.parse_show_stmt_options()?;
16059 Ok(Statement::ShowSchemas {
16060 terse,
16061 history,
16062 show_options,
16063 })
16064 }
16065
16066 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
16068 let obj_type = match self.expect_one_of_keywords(&[
16069 Keyword::TABLE,
16070 Keyword::TRIGGER,
16071 Keyword::FUNCTION,
16072 Keyword::PROCEDURE,
16073 Keyword::EVENT,
16074 Keyword::VIEW,
16075 ])? {
16076 Keyword::TABLE => Ok(ShowCreateObject::Table),
16077 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
16078 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
16079 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
16080 Keyword::EVENT => Ok(ShowCreateObject::Event),
16081 Keyword::VIEW => Ok(ShowCreateObject::View),
16082 keyword => Err(ParserError::ParserError(format!(
16083 "Unable to map keyword to ShowCreateObject: {keyword:?}"
16084 ))),
16085 }?;
16086
16087 let obj_name = self.parse_object_name(false)?;
16088
16089 Ok(Statement::ShowCreate { obj_type, obj_name })
16090 }
16091
16092 pub fn parse_show_columns(
16094 &mut self,
16095 extended: bool,
16096 full: bool,
16097 ) -> Result<Statement, ParserError> {
16098 let show_options = self.parse_show_stmt_options()?;
16099 Ok(Statement::ShowColumns {
16100 extended,
16101 full,
16102 show_options,
16103 })
16104 }
16105
16106 fn parse_show_tables(
16107 &mut self,
16108 terse: bool,
16109 extended: bool,
16110 full: bool,
16111 external: bool,
16112 ) -> Result<Statement, ParserError> {
16113 let history = !external && self.parse_keyword(Keyword::HISTORY);
16114 let show_options = self.parse_show_stmt_options()?;
16115 Ok(Statement::ShowTables {
16116 terse,
16117 history,
16118 extended,
16119 full,
16120 external,
16121 show_options,
16122 })
16123 }
16124
16125 fn parse_show_views(
16126 &mut self,
16127 terse: bool,
16128 materialized: bool,
16129 ) -> Result<Statement, ParserError> {
16130 let show_options = self.parse_show_stmt_options()?;
16131 Ok(Statement::ShowViews {
16132 materialized,
16133 terse,
16134 show_options,
16135 })
16136 }
16137
16138 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16140 let filter = self.parse_show_statement_filter()?;
16141 Ok(Statement::ShowFunctions { filter })
16142 }
16143
16144 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16146 let filter = self.parse_show_statement_filter()?;
16147 Ok(Statement::ShowCollation { filter })
16148 }
16149
16150 pub fn parse_show_statement_filter(
16152 &mut self,
16153 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16154 if self.parse_keyword(Keyword::LIKE) {
16155 Ok(Some(ShowStatementFilter::Like(
16156 self.parse_literal_string()?,
16157 )))
16158 } else if self.parse_keyword(Keyword::ILIKE) {
16159 Ok(Some(ShowStatementFilter::ILike(
16160 self.parse_literal_string()?,
16161 )))
16162 } else if self.parse_keyword(Keyword::WHERE) {
16163 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16164 } else {
16165 self.maybe_parse(|parser| -> Result<String, ParserError> {
16166 parser.parse_literal_string()
16167 })?
16168 .map_or(Ok(None), |filter| {
16169 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16170 })
16171 }
16172 }
16173
16174 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16176 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16178 if self.parse_keyword(Keyword::DEFAULT) {
16180 return Ok(Statement::Use(Use::Default));
16181 }
16182 None } else if dialect_of!(self is DatabricksDialect) {
16184 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16185 } else if dialect_of!(self is SnowflakeDialect) {
16186 self.parse_one_of_keywords(&[
16187 Keyword::DATABASE,
16188 Keyword::SCHEMA,
16189 Keyword::WAREHOUSE,
16190 Keyword::ROLE,
16191 Keyword::SECONDARY,
16192 ])
16193 } else {
16194 None };
16196
16197 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16198 self.parse_secondary_roles()?
16199 } else {
16200 let obj_name = self.parse_object_name(false)?;
16201 match parsed_keyword {
16202 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16203 Some(Keyword::DATABASE) => Use::Database(obj_name),
16204 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16205 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16206 Some(Keyword::ROLE) => Use::Role(obj_name),
16207 _ => Use::Object(obj_name),
16208 }
16209 };
16210
16211 Ok(Statement::Use(result))
16212 }
16213
16214 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16215 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16216 if self.parse_keyword(Keyword::NONE) {
16217 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16218 } else if self.parse_keyword(Keyword::ALL) {
16219 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16220 } else {
16221 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16222 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16223 }
16224 }
16225
16226 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16228 let relation = self.parse_table_factor()?;
16229 let joins = self.parse_joins()?;
16233 Ok(TableWithJoins { relation, joins })
16234 }
16235
16236 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16237 let mut joins = vec![];
16238 loop {
16239 let global = self.parse_keyword(Keyword::GLOBAL);
16240 let join = if self.parse_keyword(Keyword::CROSS) {
16241 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16242 JoinOperator::CrossJoin(JoinConstraint::None)
16243 } else if self.parse_keyword(Keyword::APPLY) {
16244 JoinOperator::CrossApply
16246 } else {
16247 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16248 };
16249 let relation = self.parse_table_factor()?;
16250 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16251 && self.dialect.supports_cross_join_constraint()
16252 {
16253 let constraint = self.parse_join_constraint(false)?;
16254 JoinOperator::CrossJoin(constraint)
16255 } else {
16256 join_operator
16257 };
16258 Join {
16259 relation,
16260 global,
16261 join_operator,
16262 }
16263 } else if self.parse_keyword(Keyword::OUTER) {
16264 self.expect_keyword_is(Keyword::APPLY)?;
16266 Join {
16267 relation: self.parse_table_factor()?,
16268 global,
16269 join_operator: JoinOperator::OuterApply,
16270 }
16271 } else if self.parse_keyword(Keyword::ASOF) {
16272 self.expect_keyword_is(Keyword::JOIN)?;
16273 let relation = self.parse_table_factor()?;
16274 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16275 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16276 Join {
16277 relation,
16278 global,
16279 join_operator: JoinOperator::AsOf {
16280 match_condition,
16281 constraint: self.parse_join_constraint(false)?,
16282 },
16283 }
16284 } else {
16285 let natural = self.parse_keyword(Keyword::NATURAL);
16286 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16287 w.keyword
16288 } else {
16289 Keyword::NoKeyword
16290 };
16291
16292 let join_operator_type = match peek_keyword {
16293 Keyword::INNER | Keyword::JOIN => {
16294 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16296 if inner {
16297 JoinOperator::Inner
16298 } else {
16299 JoinOperator::Join
16300 }
16301 }
16302 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16303 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16305 let join_type = self.parse_one_of_keywords(&[
16306 Keyword::OUTER,
16307 Keyword::SEMI,
16308 Keyword::ANTI,
16309 Keyword::JOIN,
16310 ]);
16311 match join_type {
16312 Some(Keyword::OUTER) => {
16313 self.expect_keyword_is(Keyword::JOIN)?;
16314 if is_left {
16315 JoinOperator::LeftOuter
16316 } else {
16317 JoinOperator::RightOuter
16318 }
16319 }
16320 Some(Keyword::SEMI) => {
16321 self.expect_keyword_is(Keyword::JOIN)?;
16322 if is_left {
16323 JoinOperator::LeftSemi
16324 } else {
16325 JoinOperator::RightSemi
16326 }
16327 }
16328 Some(Keyword::ANTI) => {
16329 self.expect_keyword_is(Keyword::JOIN)?;
16330 if is_left {
16331 JoinOperator::LeftAnti
16332 } else {
16333 JoinOperator::RightAnti
16334 }
16335 }
16336 Some(Keyword::JOIN) => {
16337 if is_left {
16338 JoinOperator::Left
16339 } else {
16340 JoinOperator::Right
16341 }
16342 }
16343 _ => {
16344 return Err(ParserError::ParserError(format!(
16345 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16346 )))
16347 }
16348 }
16349 }
16350 Keyword::ANTI => {
16351 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16353 JoinOperator::Anti
16354 }
16355 Keyword::SEMI => {
16356 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16358 JoinOperator::Semi
16359 }
16360 Keyword::FULL => {
16361 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16364 JoinOperator::FullOuter
16365 }
16366 Keyword::OUTER => {
16367 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16368 }
16369 Keyword::STRAIGHT_JOIN => {
16370 let _ = self.next_token(); JoinOperator::StraightJoin
16372 }
16373 _ if natural => {
16374 return self
16375 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16376 }
16377 _ => break,
16378 };
16379 let mut relation = self.parse_table_factor()?;
16380
16381 if !self
16382 .dialect
16383 .supports_left_associative_joins_without_parens()
16384 && self.peek_parens_less_nested_join()
16385 {
16386 let joins = self.parse_joins()?;
16387 relation = TableFactor::NestedJoin {
16388 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16389 alias: None,
16390 };
16391 }
16392
16393 let join_constraint = self.parse_join_constraint(natural)?;
16394 Join {
16395 relation,
16396 global,
16397 join_operator: join_operator_type(join_constraint),
16398 }
16399 };
16400 joins.push(join);
16401 }
16402 Ok(joins)
16403 }
16404
16405 fn peek_parens_less_nested_join(&self) -> bool {
16406 matches!(
16407 self.peek_token_ref().token,
16408 Token::Word(Word {
16409 keyword: Keyword::JOIN
16410 | Keyword::INNER
16411 | Keyword::LEFT
16412 | Keyword::RIGHT
16413 | Keyword::FULL,
16414 ..
16415 })
16416 )
16417 }
16418
16419 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16421 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16422 let _guard = self.recursion_counter.try_decrease()?;
16423 if self.parse_keyword(Keyword::LATERAL) {
16424 if self.consume_token(&Token::LParen) {
16426 self.parse_derived_table_factor(Lateral)
16427 } else {
16428 let name = self.parse_object_name(false)?;
16429 self.expect_token(&Token::LParen)?;
16430 let args = self.parse_optional_args()?;
16431 let alias = self.maybe_parse_table_alias()?;
16432 Ok(TableFactor::Function {
16433 lateral: true,
16434 name,
16435 args,
16436 alias,
16437 })
16438 }
16439 } else if self.parse_keyword(Keyword::TABLE) {
16440 self.expect_token(&Token::LParen)?;
16442 let expr = self.parse_expr()?;
16443 self.expect_token(&Token::RParen)?;
16444 let alias = self.maybe_parse_table_alias()?;
16445 Ok(TableFactor::TableFunction { expr, alias })
16446 } else if self.consume_token(&Token::LParen) {
16447 if let Some(mut table) =
16469 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16470 {
16471 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16472 {
16473 table = match kw {
16474 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16475 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16476 unexpected_keyword => return Err(ParserError::ParserError(
16477 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16478 )),
16479 }
16480 }
16481 return Ok(table);
16482 }
16483
16484 let mut table_and_joins = self.parse_table_and_joins()?;
16491
16492 #[allow(clippy::if_same_then_else)]
16493 if !table_and_joins.joins.is_empty() {
16494 self.expect_token(&Token::RParen)?;
16495 let alias = self.maybe_parse_table_alias()?;
16496 Ok(TableFactor::NestedJoin {
16497 table_with_joins: Box::new(table_and_joins),
16498 alias,
16499 }) } else if let TableFactor::NestedJoin {
16501 table_with_joins: _,
16502 alias: _,
16503 } = &table_and_joins.relation
16504 {
16505 self.expect_token(&Token::RParen)?;
16508 let alias = self.maybe_parse_table_alias()?;
16509 Ok(TableFactor::NestedJoin {
16510 table_with_joins: Box::new(table_and_joins),
16511 alias,
16512 })
16513 } else if self.dialect.supports_parens_around_table_factor() {
16514 self.expect_token(&Token::RParen)?;
16521
16522 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16523 match &mut table_and_joins.relation {
16526 TableFactor::Derived { alias, .. }
16527 | TableFactor::Table { alias, .. }
16528 | TableFactor::Function { alias, .. }
16529 | TableFactor::UNNEST { alias, .. }
16530 | TableFactor::JsonTable { alias, .. }
16531 | TableFactor::XmlTable { alias, .. }
16532 | TableFactor::OpenJsonTable { alias, .. }
16533 | TableFactor::TableFunction { alias, .. }
16534 | TableFactor::Pivot { alias, .. }
16535 | TableFactor::Unpivot { alias, .. }
16536 | TableFactor::MatchRecognize { alias, .. }
16537 | TableFactor::SemanticView { alias, .. }
16538 | TableFactor::NestedJoin { alias, .. } => {
16539 if let Some(inner_alias) = alias {
16541 return Err(ParserError::ParserError(format!(
16542 "duplicate alias {inner_alias}"
16543 )));
16544 }
16545 alias.replace(outer_alias);
16549 }
16550 };
16551 }
16552 Ok(table_and_joins.relation)
16554 } else {
16555 self.expected_ref("joined table", self.peek_token_ref())
16558 }
16559 } else if self.dialect.supports_values_as_table_factor()
16560 && matches!(
16561 self.peek_tokens(),
16562 [
16563 Token::Word(Word {
16564 keyword: Keyword::VALUES,
16565 ..
16566 }),
16567 Token::LParen
16568 ]
16569 )
16570 {
16571 self.expect_keyword_is(Keyword::VALUES)?;
16572
16573 let values = SetExpr::Values(self.parse_values(false, false)?);
16577 let alias = self.maybe_parse_table_alias()?;
16578 Ok(TableFactor::Derived {
16579 lateral: false,
16580 subquery: Box::new(Query {
16581 with: None,
16582 body: Box::new(values),
16583 order_by: None,
16584 limit_clause: None,
16585 fetch: None,
16586 locks: vec![],
16587 for_clause: None,
16588 settings: None,
16589 format_clause: None,
16590 pipe_operators: vec![],
16591 }),
16592 alias,
16593 sample: None,
16594 })
16595 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16596 && self.parse_keyword(Keyword::UNNEST)
16597 {
16598 self.expect_token(&Token::LParen)?;
16599 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16600 self.expect_token(&Token::RParen)?;
16601
16602 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16603 let alias = match self.maybe_parse_table_alias() {
16604 Ok(Some(alias)) => Some(alias),
16605 Ok(None) => None,
16606 Err(e) => return Err(e),
16607 };
16608
16609 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16610 Ok(()) => true,
16611 Err(_) => false,
16612 };
16613
16614 let with_offset_alias = if with_offset {
16615 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16616 Ok(Some(alias)) => Some(alias),
16617 Ok(None) => None,
16618 Err(e) => return Err(e),
16619 }
16620 } else {
16621 None
16622 };
16623
16624 Ok(TableFactor::UNNEST {
16625 alias,
16626 array_exprs,
16627 with_offset,
16628 with_offset_alias,
16629 with_ordinality,
16630 })
16631 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16632 let json_expr = self.parse_expr()?;
16633 self.expect_token(&Token::Comma)?;
16634 let json_path = self.parse_value()?;
16635 self.expect_keyword_is(Keyword::COLUMNS)?;
16636 self.expect_token(&Token::LParen)?;
16637 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16638 self.expect_token(&Token::RParen)?;
16639 self.expect_token(&Token::RParen)?;
16640 let alias = self.maybe_parse_table_alias()?;
16641 Ok(TableFactor::JsonTable {
16642 json_expr,
16643 json_path,
16644 columns,
16645 alias,
16646 })
16647 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16648 self.prev_token();
16649 self.parse_open_json_table_factor()
16650 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16651 self.prev_token();
16652 self.parse_xml_table_factor()
16653 } else if self.dialect.supports_semantic_view_table_factor()
16654 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16655 {
16656 self.parse_semantic_view_table_factor()
16657 } else if self.peek_token_ref().token == Token::AtSign {
16658 self.parse_snowflake_stage_table_factor()
16660 } else {
16661 let name = self.parse_object_name(true)?;
16662
16663 let json_path = match &self.peek_token_ref().token {
16664 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16665 _ => None,
16666 };
16667
16668 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16669 && self.parse_keyword(Keyword::PARTITION)
16670 {
16671 self.parse_parenthesized_identifiers()?
16672 } else {
16673 vec![]
16674 };
16675
16676 let version = self.maybe_parse_table_version()?;
16678
16679 let args = if self.consume_token(&Token::LParen) {
16681 Some(self.parse_table_function_args()?)
16682 } else {
16683 None
16684 };
16685
16686 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16687
16688 let mut sample = None;
16689 if self.dialect.supports_table_sample_before_alias() {
16690 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16691 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16692 }
16693 }
16694
16695 let alias = self.maybe_parse_table_alias()?;
16696
16697 let index_hints = if self.dialect.supports_table_hints() {
16699 self.maybe_parse(|p| p.parse_table_index_hints())?
16700 .unwrap_or(vec![])
16701 } else {
16702 vec![]
16703 };
16704
16705 let mut with_hints = vec![];
16707 if self.parse_keyword(Keyword::WITH) {
16708 if self.consume_token(&Token::LParen) {
16709 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16710 self.expect_token(&Token::RParen)?;
16711 } else {
16712 self.prev_token();
16714 }
16715 };
16716
16717 if !self.dialect.supports_table_sample_before_alias() {
16718 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16719 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16720 }
16721 }
16722
16723 let mut table = TableFactor::Table {
16724 name,
16725 alias,
16726 args,
16727 with_hints,
16728 version,
16729 partitions,
16730 with_ordinality,
16731 json_path,
16732 sample,
16733 index_hints,
16734 };
16735
16736 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16737 table = match kw {
16738 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16739 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16740 unexpected_keyword => return Err(ParserError::ParserError(
16741 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16742 )),
16743 }
16744 }
16745
16746 if self.dialect.supports_match_recognize()
16747 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16748 {
16749 table = self.parse_match_recognize(table)?;
16750 }
16751
16752 Ok(table)
16753 }
16754 }
16755
16756 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16761 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16763
16764 let args = if self.consume_token(&Token::LParen) {
16766 Some(self.parse_table_function_args()?)
16767 } else {
16768 None
16769 };
16770
16771 let alias = self.maybe_parse_table_alias()?;
16772
16773 Ok(TableFactor::Table {
16774 name,
16775 alias,
16776 args,
16777 with_hints: vec![],
16778 version: None,
16779 partitions: vec![],
16780 with_ordinality: false,
16781 json_path: None,
16782 sample: None,
16783 index_hints: vec![],
16784 })
16785 }
16786
16787 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16788 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16789 TableSampleModifier::TableSample
16790 } else if self.parse_keyword(Keyword::SAMPLE) {
16791 TableSampleModifier::Sample
16792 } else {
16793 return Ok(None);
16794 };
16795 self.parse_table_sample(modifier).map(Some)
16796 }
16797
16798 fn parse_table_sample(
16799 &mut self,
16800 modifier: TableSampleModifier,
16801 ) -> Result<Box<TableSample>, ParserError> {
16802 let name = match self.parse_one_of_keywords(&[
16803 Keyword::BERNOULLI,
16804 Keyword::ROW,
16805 Keyword::SYSTEM,
16806 Keyword::BLOCK,
16807 ]) {
16808 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16809 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16810 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16811 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16812 _ => None,
16813 };
16814
16815 let parenthesized = self.consume_token(&Token::LParen);
16816
16817 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16818 let selected_bucket = self.parse_number_value()?;
16819 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16820 let total = self.parse_number_value()?;
16821 let on = if self.parse_keyword(Keyword::ON) {
16822 Some(self.parse_expr()?)
16823 } else {
16824 None
16825 };
16826 (
16827 None,
16828 Some(TableSampleBucket {
16829 bucket: selected_bucket,
16830 total,
16831 on,
16832 }),
16833 )
16834 } else {
16835 let value = match self.maybe_parse(|p| p.parse_expr())? {
16836 Some(num) => num,
16837 None => {
16838 let next_token = self.next_token();
16839 if let Token::Word(w) = next_token.token {
16840 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16841 } else {
16842 return parser_err!(
16843 "Expecting number or byte length e.g. 100M",
16844 self.peek_token_ref().span.start
16845 );
16846 }
16847 }
16848 };
16849 let unit = if self.parse_keyword(Keyword::ROWS) {
16850 Some(TableSampleUnit::Rows)
16851 } else if self.parse_keyword(Keyword::PERCENT) {
16852 Some(TableSampleUnit::Percent)
16853 } else {
16854 None
16855 };
16856 (
16857 Some(TableSampleQuantity {
16858 parenthesized,
16859 value,
16860 unit,
16861 }),
16862 None,
16863 )
16864 };
16865 if parenthesized {
16866 self.expect_token(&Token::RParen)?;
16867 }
16868
16869 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
16870 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
16871 } else if self.parse_keyword(Keyword::SEED) {
16872 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
16873 } else {
16874 None
16875 };
16876
16877 let offset = if self.parse_keyword(Keyword::OFFSET) {
16878 Some(self.parse_expr()?)
16879 } else {
16880 None
16881 };
16882
16883 Ok(Box::new(TableSample {
16884 modifier,
16885 name,
16886 quantity,
16887 seed,
16888 bucket,
16889 offset,
16890 }))
16891 }
16892
16893 fn parse_table_sample_seed(
16894 &mut self,
16895 modifier: TableSampleSeedModifier,
16896 ) -> Result<TableSampleSeed, ParserError> {
16897 self.expect_token(&Token::LParen)?;
16898 let value = self.parse_number_value()?;
16899 self.expect_token(&Token::RParen)?;
16900 Ok(TableSampleSeed { modifier, value })
16901 }
16902
16903 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16906 self.expect_token(&Token::LParen)?;
16907 let json_expr = self.parse_expr()?;
16908 let json_path = if self.consume_token(&Token::Comma) {
16909 Some(self.parse_value()?)
16910 } else {
16911 None
16912 };
16913 self.expect_token(&Token::RParen)?;
16914 let columns = if self.parse_keyword(Keyword::WITH) {
16915 self.expect_token(&Token::LParen)?;
16916 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
16917 self.expect_token(&Token::RParen)?;
16918 columns
16919 } else {
16920 Vec::new()
16921 };
16922 let alias = self.maybe_parse_table_alias()?;
16923 Ok(TableFactor::OpenJsonTable {
16924 json_expr,
16925 json_path,
16926 columns,
16927 alias,
16928 })
16929 }
16930
16931 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16932 self.expect_token(&Token::LParen)?;
16933 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
16934 self.expect_token(&Token::LParen)?;
16935 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
16936 self.expect_token(&Token::RParen)?;
16937 self.expect_token(&Token::Comma)?;
16938 namespaces
16939 } else {
16940 vec![]
16941 };
16942 let row_expression = self.parse_expr()?;
16943 let passing = self.parse_xml_passing_clause()?;
16944 self.expect_keyword_is(Keyword::COLUMNS)?;
16945 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
16946 self.expect_token(&Token::RParen)?;
16947 let alias = self.maybe_parse_table_alias()?;
16948 Ok(TableFactor::XmlTable {
16949 namespaces,
16950 row_expression,
16951 passing,
16952 columns,
16953 alias,
16954 })
16955 }
16956
16957 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
16958 let uri = self.parse_expr()?;
16959 self.expect_keyword_is(Keyword::AS)?;
16960 let name = self.parse_identifier()?;
16961 Ok(XmlNamespaceDefinition { uri, name })
16962 }
16963
16964 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
16965 let name = self.parse_identifier()?;
16966
16967 let option = if self.parse_keyword(Keyword::FOR) {
16968 self.expect_keyword(Keyword::ORDINALITY)?;
16969 XmlTableColumnOption::ForOrdinality
16970 } else {
16971 let r#type = self.parse_data_type()?;
16972 let mut path = None;
16973 let mut default = None;
16974
16975 if self.parse_keyword(Keyword::PATH) {
16976 path = Some(self.parse_expr()?);
16977 }
16978
16979 if self.parse_keyword(Keyword::DEFAULT) {
16980 default = Some(self.parse_expr()?);
16981 }
16982
16983 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
16984 if !not_null {
16985 let _ = self.parse_keyword(Keyword::NULL);
16987 }
16988
16989 XmlTableColumnOption::NamedInfo {
16990 r#type,
16991 path,
16992 default,
16993 nullable: !not_null,
16994 }
16995 };
16996 Ok(XmlTableColumn { name, option })
16997 }
16998
16999 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
17000 let mut arguments = vec![];
17001 if self.parse_keyword(Keyword::PASSING) {
17002 loop {
17003 let by_value =
17004 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
17005 let expr = self.parse_expr()?;
17006 let alias = if self.parse_keyword(Keyword::AS) {
17007 Some(self.parse_identifier()?)
17008 } else {
17009 None
17010 };
17011 arguments.push(XmlPassingArgument {
17012 expr,
17013 alias,
17014 by_value,
17015 });
17016 if !self.consume_token(&Token::Comma) {
17017 break;
17018 }
17019 }
17020 }
17021 Ok(XmlPassingClause { arguments })
17022 }
17023
17024 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17026 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
17027 self.expect_token(&Token::LParen)?;
17028
17029 let name = self.parse_object_name(true)?;
17030
17031 let mut dimensions = Vec::new();
17033 let mut metrics = Vec::new();
17034 let mut facts = Vec::new();
17035 let mut where_clause = None;
17036
17037 while self.peek_token_ref().token != Token::RParen {
17038 if self.parse_keyword(Keyword::DIMENSIONS) {
17039 if !dimensions.is_empty() {
17040 return Err(ParserError::ParserError(
17041 "DIMENSIONS clause can only be specified once".to_string(),
17042 ));
17043 }
17044 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17045 } else if self.parse_keyword(Keyword::METRICS) {
17046 if !metrics.is_empty() {
17047 return Err(ParserError::ParserError(
17048 "METRICS clause can only be specified once".to_string(),
17049 ));
17050 }
17051 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17052 } else if self.parse_keyword(Keyword::FACTS) {
17053 if !facts.is_empty() {
17054 return Err(ParserError::ParserError(
17055 "FACTS clause can only be specified once".to_string(),
17056 ));
17057 }
17058 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17059 } else if self.parse_keyword(Keyword::WHERE) {
17060 if where_clause.is_some() {
17061 return Err(ParserError::ParserError(
17062 "WHERE clause can only be specified once".to_string(),
17063 ));
17064 }
17065 where_clause = Some(self.parse_expr()?);
17066 } else {
17067 let tok = self.peek_token_ref();
17068 return parser_err!(
17069 format!(
17070 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
17071 tok.token
17072 ),
17073 tok.span.start
17074 )?;
17075 }
17076 }
17077
17078 self.expect_token(&Token::RParen)?;
17079
17080 let alias = self.maybe_parse_table_alias()?;
17081
17082 Ok(TableFactor::SemanticView {
17083 name,
17084 dimensions,
17085 metrics,
17086 facts,
17087 where_clause,
17088 alias,
17089 })
17090 }
17091
17092 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
17093 self.expect_token(&Token::LParen)?;
17094
17095 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17096 self.parse_comma_separated(Parser::parse_expr)?
17097 } else {
17098 vec![]
17099 };
17100
17101 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17102 self.parse_comma_separated(Parser::parse_order_by_expr)?
17103 } else {
17104 vec![]
17105 };
17106
17107 let measures = if self.parse_keyword(Keyword::MEASURES) {
17108 self.parse_comma_separated(|p| {
17109 let expr = p.parse_expr()?;
17110 let _ = p.parse_keyword(Keyword::AS);
17111 let alias = p.parse_identifier()?;
17112 Ok(Measure { expr, alias })
17113 })?
17114 } else {
17115 vec![]
17116 };
17117
17118 let rows_per_match =
17119 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17120 Some(RowsPerMatch::OneRow)
17121 } else if self.parse_keywords(&[
17122 Keyword::ALL,
17123 Keyword::ROWS,
17124 Keyword::PER,
17125 Keyword::MATCH,
17126 ]) {
17127 Some(RowsPerMatch::AllRows(
17128 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17129 Some(EmptyMatchesMode::Show)
17130 } else if self.parse_keywords(&[
17131 Keyword::OMIT,
17132 Keyword::EMPTY,
17133 Keyword::MATCHES,
17134 ]) {
17135 Some(EmptyMatchesMode::Omit)
17136 } else if self.parse_keywords(&[
17137 Keyword::WITH,
17138 Keyword::UNMATCHED,
17139 Keyword::ROWS,
17140 ]) {
17141 Some(EmptyMatchesMode::WithUnmatched)
17142 } else {
17143 None
17144 },
17145 ))
17146 } else {
17147 None
17148 };
17149
17150 let after_match_skip =
17151 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17152 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17153 Some(AfterMatchSkip::PastLastRow)
17154 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17155 Some(AfterMatchSkip::ToNextRow)
17156 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17157 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17158 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17159 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17160 } else {
17161 let found = self.next_token();
17162 return self.expected("after match skip option", found);
17163 }
17164 } else {
17165 None
17166 };
17167
17168 self.expect_keyword_is(Keyword::PATTERN)?;
17169 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17170
17171 self.expect_keyword_is(Keyword::DEFINE)?;
17172
17173 let symbols = self.parse_comma_separated(|p| {
17174 let symbol = p.parse_identifier()?;
17175 p.expect_keyword_is(Keyword::AS)?;
17176 let definition = p.parse_expr()?;
17177 Ok(SymbolDefinition { symbol, definition })
17178 })?;
17179
17180 self.expect_token(&Token::RParen)?;
17181
17182 let alias = self.maybe_parse_table_alias()?;
17183
17184 Ok(TableFactor::MatchRecognize {
17185 table: Box::new(table),
17186 partition_by,
17187 order_by,
17188 measures,
17189 rows_per_match,
17190 after_match_skip,
17191 pattern,
17192 symbols,
17193 alias,
17194 })
17195 }
17196
17197 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17198 match self.next_token().token {
17199 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17200 Token::Placeholder(s) if s == "$" => {
17201 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17202 }
17203 Token::LBrace => {
17204 self.expect_token(&Token::Minus)?;
17205 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17206 self.expect_token(&Token::Minus)?;
17207 self.expect_token(&Token::RBrace)?;
17208 Ok(MatchRecognizePattern::Exclude(symbol))
17209 }
17210 Token::Word(Word {
17211 value,
17212 quote_style: None,
17213 ..
17214 }) if value == "PERMUTE" => {
17215 self.expect_token(&Token::LParen)?;
17216 let symbols = self.parse_comma_separated(|p| {
17217 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17218 })?;
17219 self.expect_token(&Token::RParen)?;
17220 Ok(MatchRecognizePattern::Permute(symbols))
17221 }
17222 Token::LParen => {
17223 let pattern = self.parse_pattern()?;
17224 self.expect_token(&Token::RParen)?;
17225 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17226 }
17227 _ => {
17228 self.prev_token();
17229 self.parse_identifier()
17230 .map(MatchRecognizeSymbol::Named)
17231 .map(MatchRecognizePattern::Symbol)
17232 }
17233 }
17234 }
17235
17236 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17237 let mut pattern = self.parse_base_pattern()?;
17238 loop {
17239 let token = self.next_token();
17240 let quantifier = match token.token {
17241 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17242 Token::Plus => RepetitionQuantifier::OneOrMore,
17243 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17244 Token::LBrace => {
17245 let token = self.next_token();
17247 match token.token {
17248 Token::Comma => {
17249 let next_token = self.next_token();
17250 let Token::Number(n, _) = next_token.token else {
17251 return self.expected("literal number", next_token);
17252 };
17253 self.expect_token(&Token::RBrace)?;
17254 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17255 }
17256 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17257 let next_token = self.next_token();
17258 match next_token.token {
17259 Token::Number(m, _) => {
17260 self.expect_token(&Token::RBrace)?;
17261 RepetitionQuantifier::Range(
17262 Self::parse(n, token.span.start)?,
17263 Self::parse(m, token.span.start)?,
17264 )
17265 }
17266 Token::RBrace => {
17267 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17268 }
17269 _ => {
17270 return self.expected("} or upper bound", next_token);
17271 }
17272 }
17273 }
17274 Token::Number(n, _) => {
17275 self.expect_token(&Token::RBrace)?;
17276 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17277 }
17278 _ => return self.expected("quantifier range", token),
17279 }
17280 }
17281 _ => {
17282 self.prev_token();
17283 break;
17284 }
17285 };
17286 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17287 }
17288 Ok(pattern)
17289 }
17290
17291 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17292 let mut patterns = vec![self.parse_repetition_pattern()?];
17293 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17294 patterns.push(self.parse_repetition_pattern()?);
17295 }
17296 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17297 Ok([pattern]) => Ok(pattern),
17298 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17299 }
17300 }
17301
17302 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17303 let pattern = self.parse_concat_pattern()?;
17304 if self.consume_token(&Token::Pipe) {
17305 match self.parse_pattern()? {
17306 MatchRecognizePattern::Alternation(mut patterns) => {
17308 patterns.insert(0, pattern);
17309 Ok(MatchRecognizePattern::Alternation(patterns))
17310 }
17311 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17312 }
17313 } else {
17314 Ok(pattern)
17315 }
17316 }
17317
17318 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17320 if self.dialect.supports_table_versioning() {
17321 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17322 {
17323 let expr = self.parse_expr()?;
17324 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17325 } else if self.peek_keyword(Keyword::CHANGES) {
17326 return self.parse_table_version_changes().map(Some);
17327 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17328 let func_name = self.parse_object_name(true)?;
17329 let func = self.parse_function(func_name)?;
17330 return Ok(Some(TableVersion::Function(func)));
17331 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17332 let expr = self.parse_expr()?;
17333 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17334 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17335 let expr = Expr::Value(self.parse_number_value()?);
17336 return Ok(Some(TableVersion::VersionAsOf(expr)));
17337 }
17338 }
17339 Ok(None)
17340 }
17341
17342 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17353 let changes_name = self.parse_object_name(true)?;
17354 let changes = self.parse_function(changes_name)?;
17355 let at_name = self.parse_object_name(true)?;
17356 let at = self.parse_function(at_name)?;
17357 let end = if self.peek_keyword(Keyword::END) {
17358 let end_name = self.parse_object_name(true)?;
17359 Some(self.parse_function(end_name)?)
17360 } else {
17361 None
17362 };
17363 Ok(TableVersion::Changes { changes, at, end })
17364 }
17365
17366 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17369 if self.parse_keyword(Keyword::NESTED) {
17370 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17371 let path = self.parse_value()?;
17372 self.expect_keyword_is(Keyword::COLUMNS)?;
17373 let columns = self.parse_parenthesized(|p| {
17374 p.parse_comma_separated(Self::parse_json_table_column_def)
17375 })?;
17376 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17377 path,
17378 columns,
17379 }));
17380 }
17381 let name = self.parse_identifier()?;
17382 if self.parse_keyword(Keyword::FOR) {
17383 self.expect_keyword_is(Keyword::ORDINALITY)?;
17384 return Ok(JsonTableColumn::ForOrdinality(name));
17385 }
17386 let r#type = self.parse_data_type()?;
17387 let exists = self.parse_keyword(Keyword::EXISTS);
17388 self.expect_keyword_is(Keyword::PATH)?;
17389 let path = self.parse_value()?;
17390 let mut on_empty = None;
17391 let mut on_error = None;
17392 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17393 if self.parse_keyword(Keyword::EMPTY) {
17394 on_empty = Some(error_handling);
17395 } else {
17396 self.expect_keyword_is(Keyword::ERROR)?;
17397 on_error = Some(error_handling);
17398 }
17399 }
17400 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17401 name,
17402 r#type,
17403 path,
17404 exists,
17405 on_empty,
17406 on_error,
17407 }))
17408 }
17409
17410 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17418 let name = self.parse_identifier()?;
17419 let r#type = self.parse_data_type()?;
17420 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17421 self.next_token();
17422 Some(path)
17423 } else {
17424 None
17425 };
17426 let as_json = self.parse_keyword(Keyword::AS);
17427 if as_json {
17428 self.expect_keyword_is(Keyword::JSON)?;
17429 }
17430 Ok(OpenJsonTableColumn {
17431 name,
17432 r#type,
17433 path,
17434 as_json,
17435 })
17436 }
17437
17438 fn parse_json_table_column_error_handling(
17439 &mut self,
17440 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17441 let res = if self.parse_keyword(Keyword::NULL) {
17442 JsonTableColumnErrorHandling::Null
17443 } else if self.parse_keyword(Keyword::ERROR) {
17444 JsonTableColumnErrorHandling::Error
17445 } else if self.parse_keyword(Keyword::DEFAULT) {
17446 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17447 } else {
17448 return Ok(None);
17449 };
17450 self.expect_keyword_is(Keyword::ON)?;
17451 Ok(Some(res))
17452 }
17453
17454 pub fn parse_derived_table_factor(
17456 &mut self,
17457 lateral: IsLateral,
17458 ) -> Result<TableFactor, ParserError> {
17459 let subquery = self.parse_query()?;
17460 self.expect_token(&Token::RParen)?;
17461 let alias = self.maybe_parse_table_alias()?;
17462
17463 let sample = self
17465 .maybe_parse_table_sample()?
17466 .map(TableSampleKind::AfterTableAlias);
17467
17468 Ok(TableFactor::Derived {
17469 lateral: match lateral {
17470 Lateral => true,
17471 NotLateral => false,
17472 },
17473 subquery,
17474 alias,
17475 sample,
17476 })
17477 }
17478
17479 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17502 let expr = self.parse_expr()?;
17503 let alias = if self.parse_keyword(Keyword::AS) {
17504 Some(self.parse_identifier()?)
17505 } else {
17506 None
17507 };
17508
17509 Ok(ExprWithAlias { expr, alias })
17510 }
17511
17512 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17516 let expr = self.parse_expr()?;
17517 let alias = self.parse_identifier_optional_alias()?;
17518 Ok(ExprWithAlias { expr, alias })
17519 }
17520
17521 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17523 let function_name = match self.next_token().token {
17524 Token::Word(w) => Ok(w.value),
17525 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17526 }?;
17527 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17528 let alias = {
17529 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17530 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17532 }
17533 self.parse_optional_alias_inner(None, validator)?
17534 };
17535 Ok(ExprWithAlias { expr, alias })
17536 }
17537
17538 pub fn parse_pivot_table_factor(
17540 &mut self,
17541 table: TableFactor,
17542 ) -> Result<TableFactor, ParserError> {
17543 self.expect_token(&Token::LParen)?;
17544 let aggregate_functions =
17545 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17546 self.expect_keyword_is(Keyword::FOR)?;
17547 let value_column = if self.peek_token_ref().token == Token::LParen {
17548 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17549 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17550 })?
17551 } else {
17552 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17553 };
17554 self.expect_keyword_is(Keyword::IN)?;
17555
17556 self.expect_token(&Token::LParen)?;
17557 let value_source = if self.parse_keyword(Keyword::ANY) {
17558 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17559 self.parse_comma_separated(Parser::parse_order_by_expr)?
17560 } else {
17561 vec![]
17562 };
17563 PivotValueSource::Any(order_by)
17564 } else if self.peek_sub_query() {
17565 PivotValueSource::Subquery(self.parse_query()?)
17566 } else {
17567 PivotValueSource::List(
17568 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17569 )
17570 };
17571 self.expect_token(&Token::RParen)?;
17572
17573 let default_on_null =
17574 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17575 self.expect_token(&Token::LParen)?;
17576 let expr = self.parse_expr()?;
17577 self.expect_token(&Token::RParen)?;
17578 Some(expr)
17579 } else {
17580 None
17581 };
17582
17583 self.expect_token(&Token::RParen)?;
17584 let alias = self.maybe_parse_table_alias()?;
17585 Ok(TableFactor::Pivot {
17586 table: Box::new(table),
17587 aggregate_functions,
17588 value_column,
17589 value_source,
17590 default_on_null,
17591 alias,
17592 })
17593 }
17594
17595 pub fn parse_unpivot_table_factor(
17597 &mut self,
17598 table: TableFactor,
17599 ) -> Result<TableFactor, ParserError> {
17600 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17601 self.expect_keyword_is(Keyword::NULLS)?;
17602 Some(NullInclusion::IncludeNulls)
17603 } else if self.parse_keyword(Keyword::EXCLUDE) {
17604 self.expect_keyword_is(Keyword::NULLS)?;
17605 Some(NullInclusion::ExcludeNulls)
17606 } else {
17607 None
17608 };
17609 self.expect_token(&Token::LParen)?;
17610 let value = self.parse_expr()?;
17611 self.expect_keyword_is(Keyword::FOR)?;
17612 let name = self.parse_identifier()?;
17613 self.expect_keyword_is(Keyword::IN)?;
17614 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17615 p.parse_expr_with_alias()
17616 })?;
17617 self.expect_token(&Token::RParen)?;
17618 let alias = self.maybe_parse_table_alias()?;
17619 Ok(TableFactor::Unpivot {
17620 table: Box::new(table),
17621 value,
17622 null_inclusion,
17623 name,
17624 columns,
17625 alias,
17626 })
17627 }
17628
17629 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17631 if natural {
17632 Ok(JoinConstraint::Natural)
17633 } else if self.parse_keyword(Keyword::ON) {
17634 let constraint = self.parse_expr()?;
17635 Ok(JoinConstraint::On(constraint))
17636 } else if self.parse_keyword(Keyword::USING) {
17637 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17638 Ok(JoinConstraint::Using(columns))
17639 } else {
17640 Ok(JoinConstraint::None)
17641 }
17643 }
17644
17645 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17647 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17648
17649 self.expect_keyword_is(Keyword::TO)?;
17650 let grantees = self.parse_grantees()?;
17651
17652 let with_grant_option =
17653 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17654
17655 let current_grants =
17656 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17657 Some(CurrentGrantsKind::CopyCurrentGrants)
17658 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17659 Some(CurrentGrantsKind::RevokeCurrentGrants)
17660 } else {
17661 None
17662 };
17663
17664 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17665 Some(self.parse_identifier()?)
17666 } else {
17667 None
17668 };
17669
17670 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17671 Some(self.parse_identifier()?)
17672 } else {
17673 None
17674 };
17675
17676 Ok(Grant {
17677 privileges,
17678 objects,
17679 grantees,
17680 with_grant_option,
17681 as_grantor,
17682 granted_by,
17683 current_grants,
17684 })
17685 }
17686
17687 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17688 let mut values = vec![];
17689 let mut grantee_type = GranteesType::None;
17690 loop {
17691 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17692 GranteesType::Role
17693 } else if self.parse_keyword(Keyword::USER) {
17694 GranteesType::User
17695 } else if self.parse_keyword(Keyword::SHARE) {
17696 GranteesType::Share
17697 } else if self.parse_keyword(Keyword::GROUP) {
17698 GranteesType::Group
17699 } else if self.parse_keyword(Keyword::PUBLIC) {
17700 GranteesType::Public
17701 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17702 GranteesType::DatabaseRole
17703 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17704 GranteesType::ApplicationRole
17705 } else if self.parse_keyword(Keyword::APPLICATION) {
17706 GranteesType::Application
17707 } else {
17708 grantee_type.clone() };
17710
17711 if self
17712 .dialect
17713 .get_reserved_grantees_types()
17714 .contains(&new_grantee_type)
17715 {
17716 self.prev_token();
17717 } else {
17718 grantee_type = new_grantee_type;
17719 }
17720
17721 let grantee = if grantee_type == GranteesType::Public {
17722 Grantee {
17723 grantee_type: grantee_type.clone(),
17724 name: None,
17725 }
17726 } else {
17727 let mut name = self.parse_grantee_name()?;
17728 if self.consume_token(&Token::Colon) {
17729 let ident = self.parse_identifier()?;
17733 if let GranteeName::ObjectName(namespace) = name {
17734 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17735 format!("{namespace}:{ident}"),
17736 )]));
17737 };
17738 }
17739 Grantee {
17740 grantee_type: grantee_type.clone(),
17741 name: Some(name),
17742 }
17743 };
17744
17745 values.push(grantee);
17746
17747 if !self.consume_token(&Token::Comma) {
17748 break;
17749 }
17750 }
17751
17752 Ok(values)
17753 }
17754
17755 pub fn parse_grant_deny_revoke_privileges_objects(
17757 &mut self,
17758 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17759 let privileges = if self.parse_keyword(Keyword::ALL) {
17760 Privileges::All {
17761 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17762 }
17763 } else {
17764 let actions = self.parse_actions_list()?;
17765 Privileges::Actions(actions)
17766 };
17767
17768 let objects = if self.parse_keyword(Keyword::ON) {
17769 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17770 Some(GrantObjects::AllTablesInSchema {
17771 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17772 })
17773 } else if self.parse_keywords(&[
17774 Keyword::ALL,
17775 Keyword::EXTERNAL,
17776 Keyword::TABLES,
17777 Keyword::IN,
17778 Keyword::SCHEMA,
17779 ]) {
17780 Some(GrantObjects::AllExternalTablesInSchema {
17781 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17782 })
17783 } else if self.parse_keywords(&[
17784 Keyword::ALL,
17785 Keyword::VIEWS,
17786 Keyword::IN,
17787 Keyword::SCHEMA,
17788 ]) {
17789 Some(GrantObjects::AllViewsInSchema {
17790 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17791 })
17792 } else if self.parse_keywords(&[
17793 Keyword::ALL,
17794 Keyword::MATERIALIZED,
17795 Keyword::VIEWS,
17796 Keyword::IN,
17797 Keyword::SCHEMA,
17798 ]) {
17799 Some(GrantObjects::AllMaterializedViewsInSchema {
17800 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17801 })
17802 } else if self.parse_keywords(&[
17803 Keyword::ALL,
17804 Keyword::FUNCTIONS,
17805 Keyword::IN,
17806 Keyword::SCHEMA,
17807 ]) {
17808 Some(GrantObjects::AllFunctionsInSchema {
17809 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17810 })
17811 } else if self.parse_keywords(&[
17812 Keyword::FUTURE,
17813 Keyword::SCHEMAS,
17814 Keyword::IN,
17815 Keyword::DATABASE,
17816 ]) {
17817 Some(GrantObjects::FutureSchemasInDatabase {
17818 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17819 })
17820 } else if self.parse_keywords(&[
17821 Keyword::FUTURE,
17822 Keyword::TABLES,
17823 Keyword::IN,
17824 Keyword::SCHEMA,
17825 ]) {
17826 Some(GrantObjects::FutureTablesInSchema {
17827 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17828 })
17829 } else if self.parse_keywords(&[
17830 Keyword::FUTURE,
17831 Keyword::EXTERNAL,
17832 Keyword::TABLES,
17833 Keyword::IN,
17834 Keyword::SCHEMA,
17835 ]) {
17836 Some(GrantObjects::FutureExternalTablesInSchema {
17837 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17838 })
17839 } else if self.parse_keywords(&[
17840 Keyword::FUTURE,
17841 Keyword::VIEWS,
17842 Keyword::IN,
17843 Keyword::SCHEMA,
17844 ]) {
17845 Some(GrantObjects::FutureViewsInSchema {
17846 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17847 })
17848 } else if self.parse_keywords(&[
17849 Keyword::FUTURE,
17850 Keyword::MATERIALIZED,
17851 Keyword::VIEWS,
17852 Keyword::IN,
17853 Keyword::SCHEMA,
17854 ]) {
17855 Some(GrantObjects::FutureMaterializedViewsInSchema {
17856 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17857 })
17858 } else if self.parse_keywords(&[
17859 Keyword::ALL,
17860 Keyword::SEQUENCES,
17861 Keyword::IN,
17862 Keyword::SCHEMA,
17863 ]) {
17864 Some(GrantObjects::AllSequencesInSchema {
17865 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17866 })
17867 } else if self.parse_keywords(&[
17868 Keyword::FUTURE,
17869 Keyword::SEQUENCES,
17870 Keyword::IN,
17871 Keyword::SCHEMA,
17872 ]) {
17873 Some(GrantObjects::FutureSequencesInSchema {
17874 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17875 })
17876 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
17877 Some(GrantObjects::ResourceMonitors(
17878 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17879 ))
17880 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17881 Some(GrantObjects::ComputePools(
17882 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17883 ))
17884 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17885 Some(GrantObjects::FailoverGroup(
17886 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17887 ))
17888 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17889 Some(GrantObjects::ReplicationGroup(
17890 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17891 ))
17892 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17893 Some(GrantObjects::ExternalVolumes(
17894 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17895 ))
17896 } else {
17897 let object_type = self.parse_one_of_keywords(&[
17898 Keyword::SEQUENCE,
17899 Keyword::DATABASE,
17900 Keyword::SCHEMA,
17901 Keyword::TABLE,
17902 Keyword::VIEW,
17903 Keyword::WAREHOUSE,
17904 Keyword::INTEGRATION,
17905 Keyword::VIEW,
17906 Keyword::WAREHOUSE,
17907 Keyword::INTEGRATION,
17908 Keyword::USER,
17909 Keyword::CONNECTION,
17910 Keyword::PROCEDURE,
17911 Keyword::FUNCTION,
17912 ]);
17913 let objects =
17914 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
17915 match object_type {
17916 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
17917 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
17918 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
17919 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
17920 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
17921 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
17922 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
17923 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
17924 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
17925 if let Some(name) = objects?.first() {
17926 self.parse_grant_procedure_or_function(name, &kw)?
17927 } else {
17928 self.expected_ref("procedure or function name", self.peek_token_ref())?
17929 }
17930 }
17931 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
17932 Some(unexpected_keyword) => return Err(ParserError::ParserError(
17933 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
17934 )),
17935 }
17936 }
17937 } else {
17938 None
17939 };
17940
17941 Ok((privileges, objects))
17942 }
17943
17944 fn parse_grant_procedure_or_function(
17945 &mut self,
17946 name: &ObjectName,
17947 kw: &Option<Keyword>,
17948 ) -> Result<Option<GrantObjects>, ParserError> {
17949 let arg_types = if self.consume_token(&Token::LParen) {
17950 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
17951 self.expect_token(&Token::RParen)?;
17952 list
17953 } else {
17954 vec![]
17955 };
17956 match kw {
17957 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
17958 name: name.clone(),
17959 arg_types,
17960 })),
17961 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
17962 name: name.clone(),
17963 arg_types,
17964 })),
17965 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
17966 }
17967 }
17968
17969 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
17971 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
17972 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
17973 if columns.is_empty() {
17974 Ok(None)
17975 } else {
17976 Ok(Some(columns))
17977 }
17978 }
17979
17980 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
17982 Ok(Action::ImportedPrivileges)
17983 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
17984 Ok(Action::AddSearchOptimization)
17985 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
17986 Ok(Action::AttachListing)
17987 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
17988 Ok(Action::AttachPolicy)
17989 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
17990 Ok(Action::BindServiceEndpoint)
17991 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17992 let role = self.parse_object_name(false)?;
17993 Ok(Action::DatabaseRole { role })
17994 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
17995 Ok(Action::EvolveSchema)
17996 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
17997 Ok(Action::ImportShare)
17998 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
17999 Ok(Action::ManageVersions)
18000 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
18001 Ok(Action::ManageReleases)
18002 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
18003 Ok(Action::OverrideShareRestrictions)
18004 } else if self.parse_keywords(&[
18005 Keyword::PURCHASE,
18006 Keyword::DATA,
18007 Keyword::EXCHANGE,
18008 Keyword::LISTING,
18009 ]) {
18010 Ok(Action::PurchaseDataExchangeListing)
18011 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
18012 Ok(Action::ResolveAll)
18013 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
18014 Ok(Action::ReadSession)
18015
18016 } else if self.parse_keyword(Keyword::APPLY) {
18018 let apply_type = self.parse_action_apply_type()?;
18019 Ok(Action::Apply { apply_type })
18020 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
18021 Ok(Action::ApplyBudget)
18022 } else if self.parse_keyword(Keyword::AUDIT) {
18023 Ok(Action::Audit)
18024 } else if self.parse_keyword(Keyword::CONNECT) {
18025 Ok(Action::Connect)
18026 } else if self.parse_keyword(Keyword::CREATE) {
18027 let obj_type = self.maybe_parse_action_create_object_type();
18028 Ok(Action::Create { obj_type })
18029 } else if self.parse_keyword(Keyword::DELETE) {
18030 Ok(Action::Delete)
18031 } else if self.parse_keyword(Keyword::EXEC) {
18032 let obj_type = self.maybe_parse_action_execute_obj_type();
18033 Ok(Action::Exec { obj_type })
18034 } else if self.parse_keyword(Keyword::EXECUTE) {
18035 let obj_type = self.maybe_parse_action_execute_obj_type();
18036 Ok(Action::Execute { obj_type })
18037 } else if self.parse_keyword(Keyword::FAILOVER) {
18038 Ok(Action::Failover)
18039 } else if self.parse_keyword(Keyword::INSERT) {
18040 Ok(Action::Insert {
18041 columns: parse_columns(self)?,
18042 })
18043 } else if self.parse_keyword(Keyword::MANAGE) {
18044 let manage_type = self.parse_action_manage_type()?;
18045 Ok(Action::Manage { manage_type })
18046 } else if self.parse_keyword(Keyword::MODIFY) {
18047 let modify_type = self.parse_action_modify_type();
18048 Ok(Action::Modify { modify_type })
18049 } else if self.parse_keyword(Keyword::MONITOR) {
18050 let monitor_type = self.parse_action_monitor_type();
18051 Ok(Action::Monitor { monitor_type })
18052 } else if self.parse_keyword(Keyword::OPERATE) {
18053 Ok(Action::Operate)
18054 } else if self.parse_keyword(Keyword::REFERENCES) {
18055 Ok(Action::References {
18056 columns: parse_columns(self)?,
18057 })
18058 } else if self.parse_keyword(Keyword::READ) {
18059 Ok(Action::Read)
18060 } else if self.parse_keyword(Keyword::REPLICATE) {
18061 Ok(Action::Replicate)
18062 } else if self.parse_keyword(Keyword::ROLE) {
18063 let role = self.parse_object_name(false)?;
18064 Ok(Action::Role { role })
18065 } else if self.parse_keyword(Keyword::SELECT) {
18066 Ok(Action::Select {
18067 columns: parse_columns(self)?,
18068 })
18069 } else if self.parse_keyword(Keyword::TEMPORARY) {
18070 Ok(Action::Temporary)
18071 } else if self.parse_keyword(Keyword::TRIGGER) {
18072 Ok(Action::Trigger)
18073 } else if self.parse_keyword(Keyword::TRUNCATE) {
18074 Ok(Action::Truncate)
18075 } else if self.parse_keyword(Keyword::UPDATE) {
18076 Ok(Action::Update {
18077 columns: parse_columns(self)?,
18078 })
18079 } else if self.parse_keyword(Keyword::USAGE) {
18080 Ok(Action::Usage)
18081 } else if self.parse_keyword(Keyword::OWNERSHIP) {
18082 Ok(Action::Ownership)
18083 } else if self.parse_keyword(Keyword::DROP) {
18084 Ok(Action::Drop)
18085 } else {
18086 self.expected_ref("a privilege keyword", self.peek_token_ref())?
18087 }
18088 }
18089
18090 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
18091 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
18093 Some(ActionCreateObjectType::ApplicationPackage)
18094 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18095 Some(ActionCreateObjectType::ComputePool)
18096 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
18097 Some(ActionCreateObjectType::DataExchangeListing)
18098 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18099 Some(ActionCreateObjectType::ExternalVolume)
18100 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18101 Some(ActionCreateObjectType::FailoverGroup)
18102 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
18103 Some(ActionCreateObjectType::NetworkPolicy)
18104 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
18105 Some(ActionCreateObjectType::OrganiationListing)
18106 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18107 Some(ActionCreateObjectType::ReplicationGroup)
18108 }
18109 else if self.parse_keyword(Keyword::ACCOUNT) {
18111 Some(ActionCreateObjectType::Account)
18112 } else if self.parse_keyword(Keyword::APPLICATION) {
18113 Some(ActionCreateObjectType::Application)
18114 } else if self.parse_keyword(Keyword::DATABASE) {
18115 Some(ActionCreateObjectType::Database)
18116 } else if self.parse_keyword(Keyword::INTEGRATION) {
18117 Some(ActionCreateObjectType::Integration)
18118 } else if self.parse_keyword(Keyword::ROLE) {
18119 Some(ActionCreateObjectType::Role)
18120 } else if self.parse_keyword(Keyword::SCHEMA) {
18121 Some(ActionCreateObjectType::Schema)
18122 } else if self.parse_keyword(Keyword::SHARE) {
18123 Some(ActionCreateObjectType::Share)
18124 } else if self.parse_keyword(Keyword::USER) {
18125 Some(ActionCreateObjectType::User)
18126 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18127 Some(ActionCreateObjectType::Warehouse)
18128 } else {
18129 None
18130 }
18131 }
18132
18133 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18134 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18135 Ok(ActionApplyType::AggregationPolicy)
18136 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18137 Ok(ActionApplyType::AuthenticationPolicy)
18138 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18139 Ok(ActionApplyType::JoinPolicy)
18140 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18141 Ok(ActionApplyType::MaskingPolicy)
18142 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18143 Ok(ActionApplyType::PackagesPolicy)
18144 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18145 Ok(ActionApplyType::PasswordPolicy)
18146 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18147 Ok(ActionApplyType::ProjectionPolicy)
18148 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18149 Ok(ActionApplyType::RowAccessPolicy)
18150 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18151 Ok(ActionApplyType::SessionPolicy)
18152 } else if self.parse_keyword(Keyword::TAG) {
18153 Ok(ActionApplyType::Tag)
18154 } else {
18155 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18156 }
18157 }
18158
18159 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18160 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18161 Some(ActionExecuteObjectType::DataMetricFunction)
18162 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18163 Some(ActionExecuteObjectType::ManagedAlert)
18164 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18165 Some(ActionExecuteObjectType::ManagedTask)
18166 } else if self.parse_keyword(Keyword::ALERT) {
18167 Some(ActionExecuteObjectType::Alert)
18168 } else if self.parse_keyword(Keyword::TASK) {
18169 Some(ActionExecuteObjectType::Task)
18170 } else {
18171 None
18172 }
18173 }
18174
18175 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18176 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18177 Ok(ActionManageType::AccountSupportCases)
18178 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18179 Ok(ActionManageType::EventSharing)
18180 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18181 Ok(ActionManageType::ListingAutoFulfillment)
18182 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18183 Ok(ActionManageType::OrganizationSupportCases)
18184 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18185 Ok(ActionManageType::UserSupportCases)
18186 } else if self.parse_keyword(Keyword::GRANTS) {
18187 Ok(ActionManageType::Grants)
18188 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18189 Ok(ActionManageType::Warehouses)
18190 } else {
18191 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18192 }
18193 }
18194
18195 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18196 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18197 Some(ActionModifyType::LogLevel)
18198 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18199 Some(ActionModifyType::TraceLevel)
18200 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18201 Some(ActionModifyType::SessionLogLevel)
18202 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18203 Some(ActionModifyType::SessionTraceLevel)
18204 } else {
18205 None
18206 }
18207 }
18208
18209 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18210 if self.parse_keyword(Keyword::EXECUTION) {
18211 Some(ActionMonitorType::Execution)
18212 } else if self.parse_keyword(Keyword::SECURITY) {
18213 Some(ActionMonitorType::Security)
18214 } else if self.parse_keyword(Keyword::USAGE) {
18215 Some(ActionMonitorType::Usage)
18216 } else {
18217 None
18218 }
18219 }
18220
18221 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18223 let mut name = self.parse_object_name(false)?;
18224 if self.dialect.supports_user_host_grantee()
18225 && name.0.len() == 1
18226 && name.0[0].as_ident().is_some()
18227 && self.consume_token(&Token::AtSign)
18228 {
18229 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18230 let host = self.parse_identifier()?;
18231 Ok(GranteeName::UserHost { user, host })
18232 } else {
18233 Ok(GranteeName::ObjectName(name))
18234 }
18235 }
18236
18237 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18239 self.expect_keyword(Keyword::DENY)?;
18240
18241 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18242 let objects = match objects {
18243 Some(o) => o,
18244 None => {
18245 return parser_err!(
18246 "DENY statements must specify an object",
18247 self.peek_token_ref().span.start
18248 )
18249 }
18250 };
18251
18252 self.expect_keyword_is(Keyword::TO)?;
18253 let grantees = self.parse_grantees()?;
18254 let cascade = self.parse_cascade_option();
18255 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18256 Some(self.parse_identifier()?)
18257 } else {
18258 None
18259 };
18260
18261 Ok(Statement::Deny(DenyStatement {
18262 privileges,
18263 objects,
18264 grantees,
18265 cascade,
18266 granted_by,
18267 }))
18268 }
18269
18270 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18272 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18273
18274 self.expect_keyword_is(Keyword::FROM)?;
18275 let grantees = self.parse_grantees()?;
18276
18277 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18278 Some(self.parse_identifier()?)
18279 } else {
18280 None
18281 };
18282
18283 let cascade = self.parse_cascade_option();
18284
18285 Ok(Revoke {
18286 privileges,
18287 objects,
18288 grantees,
18289 granted_by,
18290 cascade,
18291 })
18292 }
18293
18294 pub fn parse_replace(
18296 &mut self,
18297 replace_token: TokenWithSpan,
18298 ) -> Result<Statement, ParserError> {
18299 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18300 return parser_err!(
18301 "Unsupported statement REPLACE",
18302 self.peek_token_ref().span.start
18303 );
18304 }
18305
18306 let mut insert = self.parse_insert(replace_token)?;
18307 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18308 *replace_into = true;
18309 }
18310
18311 Ok(insert)
18312 }
18313
18314 fn parse_insert_setexpr_boxed(
18318 &mut self,
18319 insert_token: TokenWithSpan,
18320 ) -> Result<Box<SetExpr>, ParserError> {
18321 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18322 }
18323
18324 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18326 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18327 let or = self.parse_conflict_clause();
18328 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18329 None
18330 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18331 Some(MysqlInsertPriority::LowPriority)
18332 } else if self.parse_keyword(Keyword::DELAYED) {
18333 Some(MysqlInsertPriority::Delayed)
18334 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18335 Some(MysqlInsertPriority::HighPriority)
18336 } else {
18337 None
18338 };
18339
18340 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18341 && self.parse_keyword(Keyword::IGNORE);
18342
18343 let replace_into = false;
18344
18345 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18346 let into = self.parse_keyword(Keyword::INTO);
18347
18348 let local = self.parse_keyword(Keyword::LOCAL);
18349
18350 if self.parse_keyword(Keyword::DIRECTORY) {
18351 let path = self.parse_literal_string()?;
18352 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18353 Some(self.parse_file_format()?)
18354 } else {
18355 None
18356 };
18357 let source = self.parse_query()?;
18358 Ok(Statement::Directory {
18359 local,
18360 path,
18361 overwrite,
18362 file_format,
18363 source,
18364 })
18365 } else {
18366 let table = self.parse_keyword(Keyword::TABLE);
18368 let table_object = self.parse_table_object()?;
18369
18370 let table_alias = if self.dialect.supports_insert_table_alias()
18371 && !self.peek_sub_query()
18372 && self
18373 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18374 .is_none()
18375 {
18376 if self.parse_keyword(Keyword::AS) {
18377 Some(TableAliasWithoutColumns {
18378 explicit: true,
18379 alias: self.parse_identifier()?,
18380 })
18381 } else {
18382 self.maybe_parse(|parser| parser.parse_identifier())?
18383 .map(|alias| TableAliasWithoutColumns {
18384 explicit: false,
18385 alias,
18386 })
18387 }
18388 } else {
18389 None
18390 };
18391
18392 let is_mysql = dialect_of!(self is MySqlDialect);
18393
18394 let (columns, partitioned, after_columns, output, source, assignments) = if self
18395 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18396 {
18397 (vec![], None, vec![], None, None, vec![])
18398 } else {
18399 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18400 let columns =
18401 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18402
18403 let partitioned = self.parse_insert_partition()?;
18404 let after_columns = if dialect_of!(self is HiveDialect) {
18406 self.parse_parenthesized_column_list(Optional, false)?
18407 } else {
18408 vec![]
18409 };
18410 (columns, partitioned, after_columns)
18411 } else {
18412 Default::default()
18413 };
18414
18415 let output = self.maybe_parse_output_clause()?;
18416
18417 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18418 || self.peek_keyword(Keyword::SETTINGS)
18419 {
18420 (None, vec![])
18421 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18422 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18423 } else {
18424 (Some(self.parse_query()?), vec![])
18425 };
18426
18427 (
18428 columns,
18429 partitioned,
18430 after_columns,
18431 output,
18432 source,
18433 assignments,
18434 )
18435 };
18436
18437 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18438 let settings = self.parse_settings()?;
18441
18442 let format = if self.parse_keyword(Keyword::FORMAT) {
18443 Some(self.parse_input_format_clause()?)
18444 } else {
18445 None
18446 };
18447
18448 (format, settings)
18449 } else {
18450 Default::default()
18451 };
18452
18453 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18454 && self.parse_keyword(Keyword::AS)
18455 {
18456 let row_alias = self.parse_object_name(false)?;
18457 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18458 Some(InsertAliases {
18459 row_alias,
18460 col_aliases,
18461 })
18462 } else {
18463 None
18464 };
18465
18466 let on = if self.parse_keyword(Keyword::ON) {
18467 if self.parse_keyword(Keyword::CONFLICT) {
18468 let conflict_target =
18469 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18470 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18471 } else if self.peek_token_ref().token == Token::LParen {
18472 Some(ConflictTarget::Columns(
18473 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18474 ))
18475 } else {
18476 None
18477 };
18478
18479 self.expect_keyword_is(Keyword::DO)?;
18480 let action = if self.parse_keyword(Keyword::NOTHING) {
18481 OnConflictAction::DoNothing
18482 } else {
18483 self.expect_keyword_is(Keyword::UPDATE)?;
18484 self.expect_keyword_is(Keyword::SET)?;
18485 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18486 let selection = if self.parse_keyword(Keyword::WHERE) {
18487 Some(self.parse_expr()?)
18488 } else {
18489 None
18490 };
18491 OnConflictAction::DoUpdate(DoUpdate {
18492 assignments,
18493 selection,
18494 })
18495 };
18496
18497 Some(OnInsert::OnConflict(OnConflict {
18498 conflict_target,
18499 action,
18500 }))
18501 } else {
18502 self.expect_keyword_is(Keyword::DUPLICATE)?;
18503 self.expect_keyword_is(Keyword::KEY)?;
18504 self.expect_keyword_is(Keyword::UPDATE)?;
18505 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18506
18507 Some(OnInsert::DuplicateKeyUpdate(l))
18508 }
18509 } else {
18510 None
18511 };
18512
18513 let returning = if self.parse_keyword(Keyword::RETURNING) {
18514 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18515 } else {
18516 None
18517 };
18518
18519 Ok(Insert {
18520 insert_token: insert_token.into(),
18521 optimizer_hints,
18522 or,
18523 table: table_object,
18524 table_alias,
18525 ignore,
18526 into,
18527 overwrite,
18528 partitioned,
18529 columns,
18530 after_columns,
18531 source,
18532 assignments,
18533 has_table_keyword: table,
18534 on,
18535 returning,
18536 output,
18537 replace_into,
18538 priority,
18539 insert_alias,
18540 settings,
18541 format_clause,
18542 multi_table_insert_type: None,
18543 multi_table_into_clauses: vec![],
18544 multi_table_when_clauses: vec![],
18545 multi_table_else_clause: None,
18546 }
18547 .into())
18548 }
18549 }
18550
18551 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18555 let ident = self.parse_identifier()?;
18556 let values = self
18557 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18558 .unwrap_or_default();
18559
18560 Ok(InputFormatClause { ident, values })
18561 }
18562
18563 fn peek_subquery_start(&mut self) -> bool {
18566 matches!(
18567 self.peek_tokens_ref(),
18568 [
18569 TokenWithSpan {
18570 token: Token::LParen,
18571 ..
18572 },
18573 TokenWithSpan {
18574 token: Token::Word(Word {
18575 keyword: Keyword::SELECT,
18576 ..
18577 }),
18578 ..
18579 },
18580 ]
18581 )
18582 }
18583
18584 fn peek_subquery_or_cte_start(&mut self) -> bool {
18588 matches!(
18589 self.peek_tokens_ref(),
18590 [
18591 TokenWithSpan {
18592 token: Token::LParen,
18593 ..
18594 },
18595 TokenWithSpan {
18596 token: Token::Word(Word {
18597 keyword: Keyword::SELECT | Keyword::WITH,
18598 ..
18599 }),
18600 ..
18601 },
18602 ]
18603 )
18604 }
18605
18606 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18607 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18608 Some(SqliteOnConflict::Replace)
18609 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18610 Some(SqliteOnConflict::Rollback)
18611 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18612 Some(SqliteOnConflict::Abort)
18613 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18614 Some(SqliteOnConflict::Fail)
18615 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18616 Some(SqliteOnConflict::Ignore)
18617 } else if self.parse_keyword(Keyword::REPLACE) {
18618 Some(SqliteOnConflict::Replace)
18619 } else {
18620 None
18621 }
18622 }
18623
18624 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18626 if self.parse_keyword(Keyword::PARTITION) {
18627 self.expect_token(&Token::LParen)?;
18628 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18629 self.expect_token(&Token::RParen)?;
18630 Ok(partition_cols)
18631 } else {
18632 Ok(None)
18633 }
18634 }
18635
18636 pub fn parse_load_data_table_format(
18638 &mut self,
18639 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18640 if self.parse_keyword(Keyword::INPUTFORMAT) {
18641 let input_format = self.parse_expr()?;
18642 self.expect_keyword_is(Keyword::SERDE)?;
18643 let serde = self.parse_expr()?;
18644 Ok(Some(HiveLoadDataFormat {
18645 input_format,
18646 serde,
18647 }))
18648 } else {
18649 Ok(None)
18650 }
18651 }
18652
18653 fn parse_update_setexpr_boxed(
18657 &mut self,
18658 update_token: TokenWithSpan,
18659 ) -> Result<Box<SetExpr>, ParserError> {
18660 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18661 }
18662
18663 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18665 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18666 let or = self.parse_conflict_clause();
18667 let table = self.parse_table_and_joins()?;
18668 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18669 Some(UpdateTableFromKind::BeforeSet(
18670 self.parse_table_with_joins()?,
18671 ))
18672 } else {
18673 None
18674 };
18675 self.expect_keyword(Keyword::SET)?;
18676 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18677
18678 let output = self.maybe_parse_output_clause()?;
18679
18680 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18681 Some(UpdateTableFromKind::AfterSet(
18682 self.parse_table_with_joins()?,
18683 ))
18684 } else {
18685 from_before_set
18686 };
18687 let selection = if self.parse_keyword(Keyword::WHERE) {
18688 Some(self.parse_expr()?)
18689 } else {
18690 None
18691 };
18692 let returning = if self.parse_keyword(Keyword::RETURNING) {
18693 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18694 } else {
18695 None
18696 };
18697 let order_by = if self.dialect.supports_update_order_by()
18698 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18699 {
18700 self.parse_comma_separated(Parser::parse_order_by_expr)?
18701 } else {
18702 vec![]
18703 };
18704 let limit = if self.parse_keyword(Keyword::LIMIT) {
18705 Some(self.parse_expr()?)
18706 } else {
18707 None
18708 };
18709 Ok(Update {
18710 update_token: update_token.into(),
18711 optimizer_hints,
18712 table,
18713 assignments,
18714 from,
18715 selection,
18716 returning,
18717 output,
18718 or,
18719 order_by,
18720 limit,
18721 }
18722 .into())
18723 }
18724
18725 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18727 let target = self.parse_assignment_target()?;
18728 self.expect_token(&Token::Eq)?;
18729 let value = self.parse_expr()?;
18730 Ok(Assignment { target, value })
18731 }
18732
18733 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18735 if self.consume_token(&Token::LParen) {
18736 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18737 self.expect_token(&Token::RParen)?;
18738 Ok(AssignmentTarget::Tuple(columns))
18739 } else {
18740 let column = self.parse_object_name(false)?;
18741 Ok(AssignmentTarget::ColumnName(column))
18742 }
18743 }
18744
18745 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18747 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18748 self.maybe_parse(|p| {
18749 let name = p.parse_expr()?;
18750 let operator = p.parse_function_named_arg_operator()?;
18751 let arg = p.parse_wildcard_expr()?.into();
18752 Ok(FunctionArg::ExprNamed {
18753 name,
18754 arg,
18755 operator,
18756 })
18757 })?
18758 } else {
18759 self.maybe_parse(|p| {
18760 let name = p.parse_identifier()?;
18761 let operator = p.parse_function_named_arg_operator()?;
18762 let arg = p.parse_wildcard_expr()?.into();
18763 Ok(FunctionArg::Named {
18764 name,
18765 arg,
18766 operator,
18767 })
18768 })?
18769 };
18770 if let Some(arg) = arg {
18771 return Ok(arg);
18772 }
18773 let wildcard_expr = self.parse_wildcard_expr()?;
18774 let arg_expr: FunctionArgExpr = match wildcard_expr {
18775 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18776 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18779 if opts.opt_exclude.is_some()
18780 || opts.opt_except.is_some()
18781 || opts.opt_replace.is_some()
18782 || opts.opt_rename.is_some()
18783 || opts.opt_ilike.is_some()
18784 {
18785 FunctionArgExpr::WildcardWithOptions(opts)
18786 } else {
18787 wildcard_expr.into()
18788 }
18789 }
18790 other => other.into(),
18791 };
18792 Ok(FunctionArg::Unnamed(arg_expr))
18793 }
18794
18795 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18796 if self.parse_keyword(Keyword::VALUE) {
18797 return Ok(FunctionArgOperator::Value);
18798 }
18799 let tok = self.next_token();
18800 match tok.token {
18801 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18802 Ok(FunctionArgOperator::RightArrow)
18803 }
18804 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18805 Ok(FunctionArgOperator::Equals)
18806 }
18807 Token::Assignment
18808 if self
18809 .dialect
18810 .supports_named_fn_args_with_assignment_operator() =>
18811 {
18812 Ok(FunctionArgOperator::Assignment)
18813 }
18814 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18815 Ok(FunctionArgOperator::Colon)
18816 }
18817 _ => {
18818 self.prev_token();
18819 self.expected("argument operator", tok)
18820 }
18821 }
18822 }
18823
18824 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18826 if self.consume_token(&Token::RParen) {
18827 Ok(vec![])
18828 } else {
18829 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18830 self.expect_token(&Token::RParen)?;
18831 Ok(args)
18832 }
18833 }
18834
18835 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18836 if self.consume_token(&Token::RParen) {
18837 return Ok(TableFunctionArgs {
18838 args: vec![],
18839 settings: None,
18840 });
18841 }
18842 let mut args = vec![];
18843 let settings = loop {
18844 if let Some(settings) = self.parse_settings()? {
18845 break Some(settings);
18846 }
18847 args.push(self.parse_function_args()?);
18848 if self.is_parse_comma_separated_end() {
18849 break None;
18850 }
18851 };
18852 self.expect_token(&Token::RParen)?;
18853 Ok(TableFunctionArgs { args, settings })
18854 }
18855
18856 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
18865 let mut clauses = vec![];
18866
18867 if let Some(null_clause) = self.parse_json_null_clause() {
18870 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18871 }
18872
18873 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18874 clauses.push(FunctionArgumentClause::JsonReturningClause(
18875 json_returning_clause,
18876 ));
18877 }
18878
18879 if self.consume_token(&Token::RParen) {
18880 return Ok(FunctionArgumentList {
18881 duplicate_treatment: None,
18882 args: vec![],
18883 clauses,
18884 });
18885 }
18886
18887 let duplicate_treatment = self.parse_duplicate_treatment()?;
18888 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18889
18890 if self.dialect.supports_window_function_null_treatment_arg() {
18891 if let Some(null_treatment) = self.parse_null_treatment()? {
18892 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
18893 }
18894 }
18895
18896 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18897 clauses.push(FunctionArgumentClause::OrderBy(
18898 self.parse_comma_separated(Parser::parse_order_by_expr)?,
18899 ));
18900 }
18901
18902 if self.parse_keyword(Keyword::LIMIT) {
18903 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
18904 }
18905
18906 if dialect_of!(self is GenericDialect | BigQueryDialect)
18907 && self.parse_keyword(Keyword::HAVING)
18908 {
18909 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
18910 Keyword::MIN => HavingBoundKind::Min,
18911 Keyword::MAX => HavingBoundKind::Max,
18912 unexpected_keyword => return Err(ParserError::ParserError(
18913 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
18914 )),
18915 };
18916 clauses.push(FunctionArgumentClause::Having(HavingBound(
18917 kind,
18918 self.parse_expr()?,
18919 )))
18920 }
18921
18922 if dialect_of!(self is GenericDialect | MySqlDialect)
18923 && self.parse_keyword(Keyword::SEPARATOR)
18924 {
18925 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
18926 }
18927
18928 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
18929 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
18930 }
18931
18932 if let Some(null_clause) = self.parse_json_null_clause() {
18933 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18934 }
18935
18936 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18937 clauses.push(FunctionArgumentClause::JsonReturningClause(
18938 json_returning_clause,
18939 ));
18940 }
18941
18942 self.expect_token(&Token::RParen)?;
18943 Ok(FunctionArgumentList {
18944 duplicate_treatment,
18945 args,
18946 clauses,
18947 })
18948 }
18949
18950 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
18951 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
18952 Some(JsonNullClause::AbsentOnNull)
18953 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
18954 Some(JsonNullClause::NullOnNull)
18955 } else {
18956 None
18957 }
18958 }
18959
18960 fn maybe_parse_json_returning_clause(
18961 &mut self,
18962 ) -> Result<Option<JsonReturningClause>, ParserError> {
18963 if self.parse_keyword(Keyword::RETURNING) {
18964 let data_type = self.parse_data_type()?;
18965 Ok(Some(JsonReturningClause { data_type }))
18966 } else {
18967 Ok(None)
18968 }
18969 }
18970
18971 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
18972 let loc = self.peek_token_ref().span.start;
18973 match (
18974 self.parse_keyword(Keyword::ALL),
18975 self.parse_keyword(Keyword::DISTINCT),
18976 ) {
18977 (true, false) => Ok(Some(DuplicateTreatment::All)),
18978 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
18979 (false, false) => Ok(None),
18980 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
18981 }
18982 }
18983
18984 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
18986 let prefix = self
18987 .parse_one_of_keywords(
18988 self.dialect
18989 .get_reserved_keywords_for_select_item_operator(),
18990 )
18991 .map(|keyword| Ident::new(format!("{keyword:?}")));
18992
18993 match self.parse_wildcard_expr()? {
18994 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
18995 SelectItemQualifiedWildcardKind::ObjectName(prefix),
18996 self.parse_wildcard_additional_options(token.0)?,
18997 )),
18998 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
18999 self.parse_wildcard_additional_options(token.0)?,
19000 )),
19001 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
19002 parser_err!(
19003 format!("Expected an expression, found: {}", v),
19004 self.peek_token_ref().span.start
19005 )
19006 }
19007 Expr::BinaryOp {
19008 left,
19009 op: BinaryOperator::Eq,
19010 right,
19011 } if self.dialect.supports_eq_alias_assignment()
19012 && matches!(left.as_ref(), Expr::Identifier(_)) =>
19013 {
19014 let Expr::Identifier(alias) = *left else {
19015 return parser_err!(
19016 "BUG: expected identifier expression as alias",
19017 self.peek_token_ref().span.start
19018 );
19019 };
19020 Ok(SelectItem::ExprWithAlias {
19021 expr: *right,
19022 alias,
19023 })
19024 }
19025 expr if self.dialect.supports_select_expr_star()
19026 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
19027 {
19028 let wildcard_token = self.get_previous_token().clone();
19029 Ok(SelectItem::QualifiedWildcard(
19030 SelectItemQualifiedWildcardKind::Expr(expr),
19031 self.parse_wildcard_additional_options(wildcard_token)?,
19032 ))
19033 }
19034 expr if self.dialect.supports_select_item_multi_column_alias()
19035 && self.peek_keyword(Keyword::AS)
19036 && self.peek_nth_token(1).token == Token::LParen =>
19037 {
19038 self.expect_keyword(Keyword::AS)?;
19039 self.expect_token(&Token::LParen)?;
19040 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
19041 self.expect_token(&Token::RParen)?;
19042 Ok(SelectItem::ExprWithAliases {
19043 expr: maybe_prefixed_expr(expr, prefix),
19044 aliases,
19045 })
19046 }
19047 expr => self
19048 .maybe_parse_select_item_alias()
19049 .map(|alias| match alias {
19050 Some(alias) => SelectItem::ExprWithAlias {
19051 expr: maybe_prefixed_expr(expr, prefix),
19052 alias,
19053 },
19054 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
19055 }),
19056 }
19057 }
19058
19059 pub fn parse_wildcard_additional_options(
19063 &mut self,
19064 wildcard_token: TokenWithSpan,
19065 ) -> Result<WildcardAdditionalOptions, ParserError> {
19066 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
19067 self.parse_optional_select_item_ilike()?
19068 } else {
19069 None
19070 };
19071 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
19072 {
19073 self.parse_optional_select_item_exclude()?
19074 } else {
19075 None
19076 };
19077 let opt_except = if self.dialect.supports_select_wildcard_except() {
19078 self.parse_optional_select_item_except()?
19079 } else {
19080 None
19081 };
19082 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
19083 self.parse_optional_select_item_replace()?
19084 } else {
19085 None
19086 };
19087 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
19088 self.parse_optional_select_item_rename()?
19089 } else {
19090 None
19091 };
19092
19093 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
19094 self.maybe_parse_select_item_alias()?
19095 } else {
19096 None
19097 };
19098
19099 Ok(WildcardAdditionalOptions {
19100 wildcard_token: wildcard_token.into(),
19101 opt_ilike,
19102 opt_exclude,
19103 opt_except,
19104 opt_rename,
19105 opt_replace,
19106 opt_alias,
19107 })
19108 }
19109
19110 pub fn parse_optional_select_item_ilike(
19114 &mut self,
19115 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19116 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19117 let next_token = self.next_token();
19118 let pattern = match next_token.token {
19119 Token::SingleQuotedString(s) => s,
19120 _ => return self.expected("ilike pattern", next_token),
19121 };
19122 Some(IlikeSelectItem { pattern })
19123 } else {
19124 None
19125 };
19126 Ok(opt_ilike)
19127 }
19128
19129 pub fn parse_optional_select_item_exclude(
19133 &mut self,
19134 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19135 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19136 if self.consume_token(&Token::LParen) {
19137 let columns =
19138 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19139 self.expect_token(&Token::RParen)?;
19140 Some(ExcludeSelectItem::Multiple(columns))
19141 } else {
19142 let column = self.parse_object_name(false)?;
19143 Some(ExcludeSelectItem::Single(column))
19144 }
19145 } else {
19146 None
19147 };
19148
19149 Ok(opt_exclude)
19150 }
19151
19152 pub fn parse_optional_select_item_except(
19156 &mut self,
19157 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19158 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19159 if self.peek_token_ref().token == Token::LParen {
19160 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19161 match &idents[..] {
19162 [] => {
19163 return self.expected_ref(
19164 "at least one column should be parsed by the expect clause",
19165 self.peek_token_ref(),
19166 )?;
19167 }
19168 [first, idents @ ..] => Some(ExceptSelectItem {
19169 first_element: first.clone(),
19170 additional_elements: idents.to_vec(),
19171 }),
19172 }
19173 } else {
19174 let ident = self.parse_identifier()?;
19176 Some(ExceptSelectItem {
19177 first_element: ident,
19178 additional_elements: vec![],
19179 })
19180 }
19181 } else {
19182 None
19183 };
19184
19185 Ok(opt_except)
19186 }
19187
19188 pub fn parse_optional_select_item_rename(
19190 &mut self,
19191 ) -> Result<Option<RenameSelectItem>, ParserError> {
19192 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19193 if self.consume_token(&Token::LParen) {
19194 let idents =
19195 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19196 self.expect_token(&Token::RParen)?;
19197 Some(RenameSelectItem::Multiple(idents))
19198 } else {
19199 let ident = self.parse_identifier_with_alias()?;
19200 Some(RenameSelectItem::Single(ident))
19201 }
19202 } else {
19203 None
19204 };
19205
19206 Ok(opt_rename)
19207 }
19208
19209 pub fn parse_optional_select_item_replace(
19211 &mut self,
19212 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19213 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19214 if self.consume_token(&Token::LParen) {
19215 let items = self.parse_comma_separated(|parser| {
19216 Ok(Box::new(parser.parse_replace_elements()?))
19217 })?;
19218 self.expect_token(&Token::RParen)?;
19219 Some(ReplaceSelectItem { items })
19220 } else {
19221 let tok = self.next_token();
19222 return self.expected("( after REPLACE but", tok);
19223 }
19224 } else {
19225 None
19226 };
19227
19228 Ok(opt_replace)
19229 }
19230 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19232 let expr = self.parse_expr()?;
19233 let as_keyword = self.parse_keyword(Keyword::AS);
19234 let ident = self.parse_identifier()?;
19235 Ok(ReplaceSelectElement {
19236 expr,
19237 column_name: ident,
19238 as_keyword,
19239 })
19240 }
19241
19242 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19245 if self.parse_keyword(Keyword::ASC) {
19246 Some(true)
19247 } else if self.parse_keyword(Keyword::DESC) {
19248 Some(false)
19249 } else {
19250 None
19251 }
19252 }
19253
19254 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19256 self.parse_order_by_expr_inner(false)
19257 .map(|(order_by, _)| order_by)
19258 }
19259
19260 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19262 self.parse_order_by_expr_inner(true)
19263 .map(|(column, operator_class)| IndexColumn {
19264 column,
19265 operator_class,
19266 })
19267 }
19268
19269 fn parse_order_by_expr_inner(
19270 &mut self,
19271 with_operator_class: bool,
19272 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19273 let expr = self.parse_expr()?;
19274
19275 let operator_class: Option<ObjectName> = if with_operator_class {
19276 if self
19279 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19280 .is_some()
19281 {
19282 None
19283 } else {
19284 self.maybe_parse(|parser| parser.parse_object_name(false))?
19285 }
19286 } else {
19287 None
19288 };
19289
19290 let options = self.parse_order_by_options()?;
19291
19292 let with_fill = if self.dialect.supports_with_fill()
19293 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19294 {
19295 Some(self.parse_with_fill()?)
19296 } else {
19297 None
19298 };
19299
19300 Ok((
19301 OrderByExpr {
19302 expr,
19303 options,
19304 with_fill,
19305 },
19306 operator_class,
19307 ))
19308 }
19309
19310 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19311 let asc = self.parse_asc_desc();
19312
19313 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19314 Some(true)
19315 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19316 Some(false)
19317 } else {
19318 None
19319 };
19320
19321 Ok(OrderByOptions { asc, nulls_first })
19322 }
19323
19324 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19328 let from = if self.parse_keyword(Keyword::FROM) {
19329 Some(self.parse_expr()?)
19330 } else {
19331 None
19332 };
19333
19334 let to = if self.parse_keyword(Keyword::TO) {
19335 Some(self.parse_expr()?)
19336 } else {
19337 None
19338 };
19339
19340 let step = if self.parse_keyword(Keyword::STEP) {
19341 Some(self.parse_expr()?)
19342 } else {
19343 None
19344 };
19345
19346 Ok(WithFill { from, to, step })
19347 }
19348
19349 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19352 if !self.parse_keyword(Keyword::INTERPOLATE) {
19353 return Ok(None);
19354 }
19355
19356 if self.consume_token(&Token::LParen) {
19357 let interpolations =
19358 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19359 self.expect_token(&Token::RParen)?;
19360 return Ok(Some(Interpolate {
19362 exprs: Some(interpolations),
19363 }));
19364 }
19365
19366 Ok(Some(Interpolate { exprs: None }))
19368 }
19369
19370 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19372 let column = self.parse_identifier()?;
19373 let expr = if self.parse_keyword(Keyword::AS) {
19374 Some(self.parse_expr()?)
19375 } else {
19376 None
19377 };
19378 Ok(InterpolateExpr { column, expr })
19379 }
19380
19381 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19384 let quantity = if self.consume_token(&Token::LParen) {
19385 let quantity = self.parse_expr()?;
19386 self.expect_token(&Token::RParen)?;
19387 Some(TopQuantity::Expr(quantity))
19388 } else {
19389 let next_token = self.next_token();
19390 let quantity = match next_token.token {
19391 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19392 _ => self.expected("literal int", next_token)?,
19393 };
19394 Some(TopQuantity::Constant(quantity))
19395 };
19396
19397 let percent = self.parse_keyword(Keyword::PERCENT);
19398
19399 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19400
19401 Ok(Top {
19402 with_ties,
19403 percent,
19404 quantity,
19405 })
19406 }
19407
19408 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19410 if self.parse_keyword(Keyword::ALL) {
19411 Ok(None)
19412 } else {
19413 Ok(Some(self.parse_expr()?))
19414 }
19415 }
19416
19417 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19419 let value = self.parse_expr()?;
19420 let rows = if self.parse_keyword(Keyword::ROW) {
19421 OffsetRows::Row
19422 } else if self.parse_keyword(Keyword::ROWS) {
19423 OffsetRows::Rows
19424 } else {
19425 OffsetRows::None
19426 };
19427 Ok(Offset { value, rows })
19428 }
19429
19430 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19432 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19433
19434 let (quantity, percent) = if self
19435 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19436 .is_some()
19437 {
19438 (None, false)
19439 } else {
19440 let quantity = Expr::Value(self.parse_value()?);
19441 let percent = self.parse_keyword(Keyword::PERCENT);
19442 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19443 (Some(quantity), percent)
19444 };
19445
19446 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19447 false
19448 } else {
19449 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19450 };
19451
19452 Ok(Fetch {
19453 with_ties,
19454 percent,
19455 quantity,
19456 })
19457 }
19458
19459 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19461 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19462 Keyword::UPDATE => LockType::Update,
19463 Keyword::SHARE => LockType::Share,
19464 unexpected_keyword => return Err(ParserError::ParserError(
19465 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19466 )),
19467 };
19468 let of = if self.parse_keyword(Keyword::OF) {
19469 Some(self.parse_object_name(false)?)
19470 } else {
19471 None
19472 };
19473 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19474 Some(NonBlock::Nowait)
19475 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19476 Some(NonBlock::SkipLocked)
19477 } else {
19478 None
19479 };
19480 Ok(LockClause {
19481 lock_type,
19482 of,
19483 nonblock,
19484 })
19485 }
19486
19487 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19489 self.expect_keyword(Keyword::LOCK)?;
19490
19491 if self.peek_keyword(Keyword::TABLES) {
19492 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19493 }
19494
19495 let _ = self.parse_keyword(Keyword::TABLE);
19496 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19497 let lock_mode = if self.parse_keyword(Keyword::IN) {
19498 let lock_mode = self.parse_lock_table_mode()?;
19499 self.expect_keyword(Keyword::MODE)?;
19500 Some(lock_mode)
19501 } else {
19502 None
19503 };
19504 let nowait = self.parse_keyword(Keyword::NOWAIT);
19505
19506 Ok(Lock {
19507 tables,
19508 lock_mode,
19509 nowait,
19510 })
19511 }
19512
19513 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19514 let only = self.parse_keyword(Keyword::ONLY);
19515 let name = self.parse_object_name(false)?;
19516 let has_asterisk = self.consume_token(&Token::Mul);
19517
19518 Ok(LockTableTarget {
19519 name,
19520 only,
19521 has_asterisk,
19522 })
19523 }
19524
19525 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19526 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19527 Ok(LockTableMode::AccessShare)
19528 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19529 Ok(LockTableMode::AccessExclusive)
19530 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19531 Ok(LockTableMode::RowShare)
19532 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19533 Ok(LockTableMode::RowExclusive)
19534 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19535 Ok(LockTableMode::ShareUpdateExclusive)
19536 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19537 Ok(LockTableMode::ShareRowExclusive)
19538 } else if self.parse_keyword(Keyword::SHARE) {
19539 Ok(LockTableMode::Share)
19540 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19541 Ok(LockTableMode::Exclusive)
19542 } else {
19543 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19544 }
19545 }
19546
19547 pub fn parse_values(
19549 &mut self,
19550 allow_empty: bool,
19551 value_keyword: bool,
19552 ) -> Result<Values, ParserError> {
19553 let mut explicit_row = false;
19554
19555 let rows = self.parse_comma_separated(|parser| {
19556 if parser.parse_keyword(Keyword::ROW) {
19557 explicit_row = true;
19558 }
19559
19560 parser.expect_token(&Token::LParen)?;
19561 if allow_empty && parser.peek_token().token == Token::RParen {
19562 parser.next_token();
19563 Ok(vec![])
19564 } else {
19565 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19566 parser.expect_token(&Token::RParen)?;
19567 Ok(exprs)
19568 }
19569 })?;
19570 Ok(Values {
19571 explicit_row,
19572 rows,
19573 value_keyword,
19574 })
19575 }
19576
19577 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19579 self.expect_keyword_is(Keyword::TRANSACTION)?;
19580 Ok(Statement::StartTransaction {
19581 modes: self.parse_transaction_modes()?,
19582 begin: false,
19583 transaction: Some(BeginTransactionKind::Transaction),
19584 modifier: None,
19585 statements: vec![],
19586 exception: None,
19587 has_end_keyword: false,
19588 })
19589 }
19590
19591 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19593 if !self.dialect.supports_start_transaction_modifier() {
19594 None
19595 } else if self.parse_keyword(Keyword::DEFERRED) {
19596 Some(TransactionModifier::Deferred)
19597 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19598 Some(TransactionModifier::Immediate)
19599 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19600 Some(TransactionModifier::Exclusive)
19601 } else if self.parse_keyword(Keyword::TRY) {
19602 Some(TransactionModifier::Try)
19603 } else if self.parse_keyword(Keyword::CATCH) {
19604 Some(TransactionModifier::Catch)
19605 } else {
19606 None
19607 }
19608 }
19609
19610 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19612 let modifier = self.parse_transaction_modifier();
19613 let transaction =
19614 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19615 {
19616 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19617 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19618 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19619 _ => None,
19620 };
19621 Ok(Statement::StartTransaction {
19622 modes: self.parse_transaction_modes()?,
19623 begin: true,
19624 transaction,
19625 modifier,
19626 statements: vec![],
19627 exception: None,
19628 has_end_keyword: false,
19629 })
19630 }
19631
19632 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19634 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19635
19636 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19637 let mut when = Vec::new();
19638
19639 while !self.peek_keyword(Keyword::END) {
19641 self.expect_keyword(Keyword::WHEN)?;
19642
19643 let mut idents = Vec::new();
19647
19648 while !self.parse_keyword(Keyword::THEN) {
19649 let ident = self.parse_identifier()?;
19650 idents.push(ident);
19651
19652 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19653 }
19654
19655 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19656
19657 when.push(ExceptionWhen { idents, statements });
19658 }
19659
19660 Some(when)
19661 } else {
19662 None
19663 };
19664
19665 self.expect_keyword(Keyword::END)?;
19666
19667 Ok(Statement::StartTransaction {
19668 begin: true,
19669 statements,
19670 exception,
19671 has_end_keyword: true,
19672 transaction: None,
19673 modifier: None,
19674 modes: Default::default(),
19675 })
19676 }
19677
19678 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19680 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19681 None
19682 } else if self.parse_keyword(Keyword::TRY) {
19683 Some(TransactionModifier::Try)
19684 } else if self.parse_keyword(Keyword::CATCH) {
19685 Some(TransactionModifier::Catch)
19686 } else {
19687 None
19688 };
19689 Ok(Statement::Commit {
19690 chain: self.parse_commit_rollback_chain()?,
19691 end: true,
19692 modifier,
19693 })
19694 }
19695
19696 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19698 let mut modes = vec![];
19699 let mut required = false;
19700 loop {
19701 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19702 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19703 TransactionIsolationLevel::ReadUncommitted
19704 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19705 TransactionIsolationLevel::ReadCommitted
19706 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19707 TransactionIsolationLevel::RepeatableRead
19708 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19709 TransactionIsolationLevel::Serializable
19710 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19711 TransactionIsolationLevel::Snapshot
19712 } else {
19713 self.expected_ref("isolation level", self.peek_token_ref())?
19714 };
19715 TransactionMode::IsolationLevel(iso_level)
19716 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19717 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19718 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19719 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19720 } else if required {
19721 self.expected_ref("transaction mode", self.peek_token_ref())?
19722 } else {
19723 break;
19724 };
19725 modes.push(mode);
19726 required = self.consume_token(&Token::Comma);
19731 }
19732 Ok(modes)
19733 }
19734
19735 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19737 Ok(Statement::Commit {
19738 chain: self.parse_commit_rollback_chain()?,
19739 end: false,
19740 modifier: None,
19741 })
19742 }
19743
19744 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19746 let chain = self.parse_commit_rollback_chain()?;
19747 let savepoint = self.parse_rollback_savepoint()?;
19748
19749 Ok(Statement::Rollback { chain, savepoint })
19750 }
19751
19752 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19754 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19755 if self.parse_keyword(Keyword::AND) {
19756 let chain = !self.parse_keyword(Keyword::NO);
19757 self.expect_keyword_is(Keyword::CHAIN)?;
19758 Ok(chain)
19759 } else {
19760 Ok(false)
19761 }
19762 }
19763
19764 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19766 if self.parse_keyword(Keyword::TO) {
19767 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19768 let savepoint = self.parse_identifier()?;
19769
19770 Ok(Some(savepoint))
19771 } else {
19772 Ok(None)
19773 }
19774 }
19775
19776 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19778 self.expect_token(&Token::LParen)?;
19779 let message = Box::new(self.parse_expr()?);
19780 self.expect_token(&Token::Comma)?;
19781 let severity = Box::new(self.parse_expr()?);
19782 self.expect_token(&Token::Comma)?;
19783 let state = Box::new(self.parse_expr()?);
19784 let arguments = if self.consume_token(&Token::Comma) {
19785 self.parse_comma_separated(Parser::parse_expr)?
19786 } else {
19787 vec![]
19788 };
19789 self.expect_token(&Token::RParen)?;
19790 let options = if self.parse_keyword(Keyword::WITH) {
19791 self.parse_comma_separated(Parser::parse_raiserror_option)?
19792 } else {
19793 vec![]
19794 };
19795 Ok(Statement::RaisError {
19796 message,
19797 severity,
19798 state,
19799 arguments,
19800 options,
19801 })
19802 }
19803
19804 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19806 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19807 Keyword::LOG => Ok(RaisErrorOption::Log),
19808 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19809 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19810 _ => self.expected_ref(
19811 "LOG, NOWAIT OR SETERROR raiserror option",
19812 self.peek_token_ref(),
19813 ),
19814 }
19815 }
19816
19817 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19821 self.expect_keyword_is(Keyword::THROW)?;
19822
19823 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19824 let (message, state) = if error_number.is_some() {
19825 self.expect_token(&Token::Comma)?;
19826 let message = Box::new(self.parse_expr()?);
19827 self.expect_token(&Token::Comma)?;
19828 let state = Box::new(self.parse_expr()?);
19829 (Some(message), Some(state))
19830 } else {
19831 (None, None)
19832 };
19833
19834 Ok(ThrowStatement {
19835 error_number,
19836 message,
19837 state,
19838 })
19839 }
19840
19841 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
19843 let prepare = self.parse_keyword(Keyword::PREPARE);
19844 let name = self.parse_identifier()?;
19845 Ok(Statement::Deallocate { name, prepare })
19846 }
19847
19848 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
19850 let immediate =
19851 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
19852
19853 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
19859 None
19860 } else {
19861 Some(self.parse_object_name(false)?)
19862 };
19863
19864 let has_parentheses = self.consume_token(&Token::LParen);
19865
19866 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
19867 let end_token = match (has_parentheses, self.peek_token().token) {
19868 (true, _) => Token::RParen,
19869 (false, Token::EOF) => Token::EOF,
19870 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
19871 (false, _) => Token::SemiColon,
19872 };
19873
19874 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
19875
19876 if has_parentheses {
19877 self.expect_token(&Token::RParen)?;
19878 }
19879
19880 let into = if self.parse_keyword(Keyword::INTO) {
19881 self.parse_comma_separated(Self::parse_identifier)?
19882 } else {
19883 vec![]
19884 };
19885
19886 let using = if self.parse_keyword(Keyword::USING) {
19887 self.parse_comma_separated(Self::parse_expr_with_alias)?
19888 } else {
19889 vec![]
19890 };
19891
19892 let output = self.parse_keyword(Keyword::OUTPUT);
19893
19894 let default = self.parse_keyword(Keyword::DEFAULT);
19895
19896 Ok(Statement::Execute {
19897 immediate,
19898 name,
19899 parameters,
19900 has_parentheses,
19901 into,
19902 using,
19903 output,
19904 default,
19905 })
19906 }
19907
19908 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
19910 let name = self.parse_identifier()?;
19911
19912 let mut data_types = vec![];
19913 if self.consume_token(&Token::LParen) {
19914 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
19915 self.expect_token(&Token::RParen)?;
19916 }
19917
19918 self.expect_keyword_is(Keyword::AS)?;
19919 let statement = Box::new(self.parse_statement()?);
19920 Ok(Statement::Prepare {
19921 name,
19922 data_types,
19923 statement,
19924 })
19925 }
19926
19927 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
19929 self.expect_keyword(Keyword::UNLOAD)?;
19930 self.expect_token(&Token::LParen)?;
19931 let (query, query_text) =
19932 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
19933 (None, Some(self.parse_literal_string()?))
19934 } else {
19935 (Some(self.parse_query()?), None)
19936 };
19937 self.expect_token(&Token::RParen)?;
19938
19939 self.expect_keyword_is(Keyword::TO)?;
19940 let to = self.parse_identifier()?;
19941 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
19942 Some(self.parse_iam_role_kind()?)
19943 } else {
19944 None
19945 };
19946 let with = self.parse_options(Keyword::WITH)?;
19947 let mut options = vec![];
19948 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
19949 options.push(opt);
19950 }
19951 Ok(Statement::Unload {
19952 query,
19953 query_text,
19954 to,
19955 auth,
19956 with,
19957 options,
19958 })
19959 }
19960
19961 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
19962 let temporary = self
19963 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
19964 .is_some();
19965 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
19966 let table = self.parse_keyword(Keyword::TABLE);
19967 let name = self.parse_object_name(false)?;
19968
19969 Ok(SelectInto {
19970 temporary,
19971 unlogged,
19972 table,
19973 name,
19974 })
19975 }
19976
19977 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
19978 let v = self.parse_value()?;
19979 match &v.value {
19980 Value::SingleQuotedString(_) => Ok(v),
19981 Value::DoubleQuotedString(_) => Ok(v),
19982 Value::Number(_, _) => Ok(v),
19983 Value::Placeholder(_) => Ok(v),
19984 _ => {
19985 self.prev_token();
19986 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
19987 }
19988 }
19989 }
19990
19991 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
19993 let name = self.parse_object_name(false)?;
19994 if self.consume_token(&Token::LParen) {
19995 let value = self.parse_pragma_value()?;
19996 self.expect_token(&Token::RParen)?;
19997 Ok(Statement::Pragma {
19998 name,
19999 value: Some(value),
20000 is_eq: false,
20001 })
20002 } else if self.consume_token(&Token::Eq) {
20003 Ok(Statement::Pragma {
20004 name,
20005 value: Some(self.parse_pragma_value()?),
20006 is_eq: true,
20007 })
20008 } else {
20009 Ok(Statement::Pragma {
20010 name,
20011 value: None,
20012 is_eq: false,
20013 })
20014 }
20015 }
20016
20017 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
20019 let extension_name = self.parse_identifier()?;
20020
20021 Ok(Statement::Install { extension_name })
20022 }
20023
20024 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
20026 if self.dialect.supports_load_extension() {
20027 let extension_name = self.parse_identifier()?;
20028 Ok(Statement::Load { extension_name })
20029 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
20030 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
20031 self.expect_keyword_is(Keyword::INPATH)?;
20032 let inpath = self.parse_literal_string()?;
20033 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
20034 self.expect_keyword_is(Keyword::INTO)?;
20035 self.expect_keyword_is(Keyword::TABLE)?;
20036 let table_name = self.parse_object_name(false)?;
20037 let partitioned = self.parse_insert_partition()?;
20038 let table_format = self.parse_load_data_table_format()?;
20039 Ok(Statement::LoadData {
20040 local,
20041 inpath,
20042 overwrite,
20043 table_name,
20044 partitioned,
20045 table_format,
20046 })
20047 } else {
20048 self.expected_ref(
20049 "`DATA` or an extension name after `LOAD`",
20050 self.peek_token_ref(),
20051 )
20052 }
20053 }
20054
20055 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
20067 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
20068
20069 let name = self.parse_object_name(false)?;
20070
20071 let on_cluster = self.parse_optional_on_cluster()?;
20073
20074 let partition = if self.parse_keyword(Keyword::PARTITION) {
20075 if self.parse_keyword(Keyword::ID) {
20076 Some(Partition::Identifier(self.parse_identifier()?))
20077 } else {
20078 Some(Partition::Expr(self.parse_expr()?))
20079 }
20080 } else {
20081 None
20082 };
20083
20084 let include_final = self.parse_keyword(Keyword::FINAL);
20085
20086 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
20087 if self.parse_keyword(Keyword::BY) {
20088 Some(Deduplicate::ByExpression(self.parse_expr()?))
20089 } else {
20090 Some(Deduplicate::All)
20091 }
20092 } else {
20093 None
20094 };
20095
20096 let predicate = if self.parse_keyword(Keyword::WHERE) {
20098 Some(self.parse_expr()?)
20099 } else {
20100 None
20101 };
20102
20103 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
20104 self.expect_token(&Token::LParen)?;
20105 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
20106 self.expect_token(&Token::RParen)?;
20107 Some(columns)
20108 } else {
20109 None
20110 };
20111
20112 Ok(Statement::OptimizeTable {
20113 name,
20114 has_table_keyword,
20115 on_cluster,
20116 partition,
20117 include_final,
20118 deduplicate,
20119 predicate,
20120 zorder,
20121 })
20122 }
20123
20124 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20130 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20132 let name = self.parse_object_name(false)?;
20134 let mut data_type: Option<DataType> = None;
20136 if self.parse_keywords(&[Keyword::AS]) {
20137 data_type = Some(self.parse_data_type()?)
20138 }
20139 let sequence_options = self.parse_create_sequence_options()?;
20140 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20142 if self.parse_keywords(&[Keyword::NONE]) {
20143 Some(ObjectName::from(vec![Ident::new("NONE")]))
20144 } else {
20145 Some(self.parse_object_name(false)?)
20146 }
20147 } else {
20148 None
20149 };
20150 Ok(Statement::CreateSequence {
20151 temporary,
20152 if_not_exists,
20153 name,
20154 data_type,
20155 sequence_options,
20156 owned_by,
20157 })
20158 }
20159
20160 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20161 let mut sequence_options = vec![];
20162 if self.parse_keywords(&[Keyword::INCREMENT]) {
20164 if self.parse_keywords(&[Keyword::BY]) {
20165 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20166 } else {
20167 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20168 }
20169 }
20170 if self.parse_keyword(Keyword::MINVALUE) {
20172 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20173 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20174 sequence_options.push(SequenceOptions::MinValue(None));
20175 }
20176 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20178 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20179 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20180 sequence_options.push(SequenceOptions::MaxValue(None));
20181 }
20182
20183 if self.parse_keywords(&[Keyword::START]) {
20185 if self.parse_keywords(&[Keyword::WITH]) {
20186 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20187 } else {
20188 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20189 }
20190 }
20191 if self.parse_keywords(&[Keyword::CACHE]) {
20193 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20194 }
20195 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20197 sequence_options.push(SequenceOptions::Cycle(true));
20198 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20199 sequence_options.push(SequenceOptions::Cycle(false));
20200 }
20201
20202 Ok(sequence_options)
20203 }
20204
20205 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20209 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20210 let name = self.parse_object_name(false)?;
20211
20212 let server_type = if self.parse_keyword(Keyword::TYPE) {
20213 Some(self.parse_identifier()?)
20214 } else {
20215 None
20216 };
20217
20218 let version = if self.parse_keyword(Keyword::VERSION) {
20219 Some(self.parse_identifier()?)
20220 } else {
20221 None
20222 };
20223
20224 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20225 let foreign_data_wrapper = self.parse_object_name(false)?;
20226
20227 let mut options = None;
20228 if self.parse_keyword(Keyword::OPTIONS) {
20229 self.expect_token(&Token::LParen)?;
20230 options = Some(self.parse_comma_separated(|p| {
20231 let key = p.parse_identifier()?;
20232 let value = p.parse_identifier()?;
20233 Ok(CreateServerOption { key, value })
20234 })?);
20235 self.expect_token(&Token::RParen)?;
20236 }
20237
20238 Ok(Statement::CreateServer(CreateServerStatement {
20239 name,
20240 if_not_exists: ine,
20241 server_type,
20242 version,
20243 foreign_data_wrapper,
20244 options,
20245 }))
20246 }
20247
20248 pub fn parse_create_foreign_data_wrapper(
20252 &mut self,
20253 ) -> Result<CreateForeignDataWrapper, ParserError> {
20254 let name = self.parse_identifier()?;
20255
20256 let handler = if self.parse_keyword(Keyword::HANDLER) {
20257 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20258 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20259 Some(FdwRoutineClause::NoFunction)
20260 } else {
20261 None
20262 };
20263
20264 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20265 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20266 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20267 Some(FdwRoutineClause::NoFunction)
20268 } else {
20269 None
20270 };
20271
20272 let options = if self.parse_keyword(Keyword::OPTIONS) {
20273 self.expect_token(&Token::LParen)?;
20274 let opts = self.parse_comma_separated(|p| {
20275 let key = p.parse_identifier()?;
20276 let value = p.parse_identifier()?;
20277 Ok(CreateServerOption { key, value })
20278 })?;
20279 self.expect_token(&Token::RParen)?;
20280 Some(opts)
20281 } else {
20282 None
20283 };
20284
20285 Ok(CreateForeignDataWrapper {
20286 name,
20287 handler,
20288 validator,
20289 options,
20290 })
20291 }
20292
20293 pub fn parse_create_foreign_table(
20297 &mut self,
20298 ) -> Result<CreateForeignTable, ParserError> {
20299 let if_not_exists =
20300 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20301 let name = self.parse_object_name(false)?;
20302 let (columns, _constraints) = self.parse_columns()?;
20303 self.expect_keyword_is(Keyword::SERVER)?;
20304 let server_name = self.parse_identifier()?;
20305
20306 let options = if self.parse_keyword(Keyword::OPTIONS) {
20307 self.expect_token(&Token::LParen)?;
20308 let opts = self.parse_comma_separated(|p| {
20309 let key = p.parse_identifier()?;
20310 let value = p.parse_identifier()?;
20311 Ok(CreateServerOption { key, value })
20312 })?;
20313 self.expect_token(&Token::RParen)?;
20314 Some(opts)
20315 } else {
20316 None
20317 };
20318
20319 Ok(CreateForeignTable {
20320 name,
20321 if_not_exists,
20322 columns,
20323 server_name,
20324 options,
20325 })
20326 }
20327
20328 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20332 let name = self.parse_identifier()?;
20333
20334 let target = if self.parse_keyword(Keyword::FOR) {
20335 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20336 Some(PublicationTarget::AllTables)
20337 } else if self.parse_keyword(Keyword::TABLE) {
20338 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20339 Some(PublicationTarget::Tables(tables))
20340 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20341 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20342 Some(PublicationTarget::TablesInSchema(schemas))
20343 } else {
20344 return self.expected_ref(
20345 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20346 self.peek_token_ref(),
20347 );
20348 }
20349 } else {
20350 None
20351 };
20352
20353 let with_options = self.parse_options(Keyword::WITH)?;
20354
20355 Ok(CreatePublication {
20356 name,
20357 target,
20358 with_options,
20359 })
20360 }
20361
20362 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20366 let name = self.parse_identifier()?;
20367 self.expect_keyword_is(Keyword::CONNECTION)?;
20368 let connection = self.parse_value()?.value;
20369 self.expect_keyword_is(Keyword::PUBLICATION)?;
20370 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20371 let with_options = self.parse_options(Keyword::WITH)?;
20372
20373 Ok(CreateSubscription {
20374 name,
20375 connection,
20376 publications,
20377 with_options,
20378 })
20379 }
20380
20381 pub fn parse_create_cast(&mut self) -> Result<CreateCast, ParserError> {
20385 self.expect_token(&Token::LParen)?;
20386 let source_type = self.parse_data_type()?;
20387 self.expect_keyword_is(Keyword::AS)?;
20388 let target_type = self.parse_data_type()?;
20389 self.expect_token(&Token::RParen)?;
20390
20391 let function_kind = if self.parse_keywords(&[Keyword::WITHOUT, Keyword::FUNCTION]) {
20392 CastFunctionKind::WithoutFunction
20393 } else if self.parse_keywords(&[Keyword::WITH, Keyword::INOUT]) {
20394 CastFunctionKind::WithInout
20395 } else if self.parse_keywords(&[Keyword::WITH, Keyword::FUNCTION]) {
20396 let function_name = self.parse_object_name(false)?;
20397 let argument_types = if self.peek_token_ref().token == Token::LParen {
20398 self.expect_token(&Token::LParen)?;
20399 let types = if self.peek_token_ref().token == Token::RParen {
20400 vec![]
20401 } else {
20402 self.parse_comma_separated(|p| p.parse_data_type())?
20403 };
20404 self.expect_token(&Token::RParen)?;
20405 types
20406 } else {
20407 vec![]
20408 };
20409 CastFunctionKind::WithFunction {
20410 function_name,
20411 argument_types,
20412 }
20413 } else {
20414 return self.expected_ref(
20415 "WITH FUNCTION, WITHOUT FUNCTION, or WITH INOUT",
20416 self.peek_token_ref(),
20417 );
20418 };
20419
20420 let cast_context = if self.parse_keyword(Keyword::AS) {
20421 if self.parse_keyword(Keyword::ASSIGNMENT) {
20422 CastContext::Assignment
20423 } else if self.parse_keyword(Keyword::IMPLICIT) {
20424 CastContext::Implicit
20425 } else {
20426 return self.expected_ref("ASSIGNMENT or IMPLICIT after AS", self.peek_token_ref());
20427 }
20428 } else {
20429 CastContext::Explicit
20430 };
20431
20432 Ok(CreateCast {
20433 source_type,
20434 target_type,
20435 function_kind,
20436 cast_context,
20437 })
20438 }
20439
20440 pub fn parse_create_conversion(
20444 &mut self,
20445 is_default: bool,
20446 ) -> Result<CreateConversion, ParserError> {
20447 let name = self.parse_object_name(false)?;
20448 self.expect_keyword_is(Keyword::FOR)?;
20449 let source_encoding = self.parse_literal_string()?;
20450 self.expect_keyword_is(Keyword::TO)?;
20451 let destination_encoding = self.parse_literal_string()?;
20452 self.expect_keyword_is(Keyword::FROM)?;
20453 let function_name = self.parse_object_name(false)?;
20454
20455 Ok(CreateConversion {
20456 name,
20457 is_default,
20458 source_encoding,
20459 destination_encoding,
20460 function_name,
20461 })
20462 }
20463
20464 pub fn parse_create_language(
20468 &mut self,
20469 or_replace: bool,
20470 trusted: bool,
20471 procedural: bool,
20472 ) -> Result<CreateLanguage, ParserError> {
20473 let name = self.parse_identifier()?;
20474
20475 let handler = if self.parse_keyword(Keyword::HANDLER) {
20476 Some(self.parse_object_name(false)?)
20477 } else {
20478 None
20479 };
20480
20481 let inline_handler = if self.parse_keyword(Keyword::INLINE) {
20482 Some(self.parse_object_name(false)?)
20483 } else {
20484 None
20485 };
20486
20487 let validator = if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20488 None
20489 } else if self.parse_keyword(Keyword::VALIDATOR) {
20490 Some(self.parse_object_name(false)?)
20491 } else {
20492 None
20493 };
20494
20495 Ok(CreateLanguage {
20496 name,
20497 or_replace,
20498 trusted,
20499 procedural,
20500 handler,
20501 inline_handler,
20502 validator,
20503 })
20504 }
20505
20506 pub fn parse_create_rule(&mut self) -> Result<CreateRule, ParserError> {
20510 let name = self.parse_identifier()?;
20511 self.expect_keyword_is(Keyword::AS)?;
20512 self.expect_keyword_is(Keyword::ON)?;
20513
20514 let event = if self.parse_keyword(Keyword::SELECT) {
20515 RuleEvent::Select
20516 } else if self.parse_keyword(Keyword::INSERT) {
20517 RuleEvent::Insert
20518 } else if self.parse_keyword(Keyword::UPDATE) {
20519 RuleEvent::Update
20520 } else if self.parse_keyword(Keyword::DELETE) {
20521 RuleEvent::Delete
20522 } else {
20523 return self.expected_ref(
20524 "SELECT, INSERT, UPDATE, or DELETE after ON",
20525 self.peek_token_ref(),
20526 );
20527 };
20528
20529 self.expect_keyword_is(Keyword::TO)?;
20530 let table = self.parse_object_name(false)?;
20531
20532 let condition = if self.parse_keyword(Keyword::WHERE) {
20533 Some(self.parse_expr()?)
20534 } else {
20535 None
20536 };
20537
20538 self.expect_keyword_is(Keyword::DO)?;
20539
20540 let instead = if self.parse_keyword(Keyword::INSTEAD) {
20541 true
20542 } else if self.parse_keyword(Keyword::ALSO) {
20543 false
20544 } else {
20545 false
20546 };
20547
20548 let action = if self.parse_keyword(Keyword::NOTHING) {
20549 RuleAction::Nothing
20550 } else if self.peek_token_ref().token == Token::LParen {
20551 self.expect_token(&Token::LParen)?;
20552 let mut stmts = Vec::new();
20553 loop {
20554 stmts.push(self.parse_statement()?);
20555 if !self.consume_token(&Token::SemiColon) {
20556 break;
20557 }
20558 if self.peek_token_ref().token == Token::RParen {
20559 break;
20560 }
20561 }
20562 self.expect_token(&Token::RParen)?;
20563 RuleAction::Statements(stmts)
20564 } else {
20565 let stmt = self.parse_statement()?;
20566 RuleAction::Statements(vec![stmt])
20567 };
20568
20569 Ok(CreateRule {
20570 name,
20571 event,
20572 table,
20573 condition,
20574 instead,
20575 action,
20576 })
20577 }
20578
20579 pub fn parse_create_statistics(&mut self) -> Result<CreateStatistics, ParserError> {
20583 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20584 let name = self.parse_object_name(false)?;
20585
20586 let kinds = if self.consume_token(&Token::LParen) {
20587 let kinds = self.parse_comma_separated(|p| {
20588 let ident = p.parse_identifier()?;
20589 match ident.value.to_lowercase().as_str() {
20590 "ndistinct" => Ok(StatisticsKind::NDistinct),
20591 "dependencies" => Ok(StatisticsKind::Dependencies),
20592 "mcv" => Ok(StatisticsKind::Mcv),
20593 other => Err(ParserError::ParserError(format!(
20594 "Unknown statistics kind: {other}"
20595 ))),
20596 }
20597 })?;
20598 self.expect_token(&Token::RParen)?;
20599 kinds
20600 } else {
20601 vec![]
20602 };
20603
20604 self.expect_keyword_is(Keyword::ON)?;
20605 let on = self.parse_comma_separated(Parser::parse_expr)?;
20606 self.expect_keyword_is(Keyword::FROM)?;
20607 let from = self.parse_object_name(false)?;
20608
20609 Ok(CreateStatistics {
20610 if_not_exists,
20611 name,
20612 kinds,
20613 on,
20614 from,
20615 })
20616 }
20617
20618 pub fn parse_create_access_method(&mut self) -> Result<CreateAccessMethod, ParserError> {
20622 let name = self.parse_identifier()?;
20623 self.expect_keyword_is(Keyword::TYPE)?;
20624 let method_type = if self.parse_keyword(Keyword::INDEX) {
20625 AccessMethodType::Index
20626 } else if self.parse_keyword(Keyword::TABLE) {
20627 AccessMethodType::Table
20628 } else {
20629 return self.expected_ref("INDEX or TABLE after TYPE", self.peek_token_ref());
20630 };
20631 self.expect_keyword_is(Keyword::HANDLER)?;
20632 let handler = self.parse_object_name(false)?;
20633
20634 Ok(CreateAccessMethod {
20635 name,
20636 method_type,
20637 handler,
20638 })
20639 }
20640
20641 pub fn parse_create_event_trigger(&mut self) -> Result<CreateEventTrigger, ParserError> {
20645 let name = self.parse_identifier()?;
20646 self.expect_keyword_is(Keyword::ON)?;
20647 let event_ident = self.parse_identifier()?;
20648 let event = match event_ident.value.to_lowercase().as_str() {
20649 "ddl_command_start" => EventTriggerEvent::DdlCommandStart,
20650 "ddl_command_end" => EventTriggerEvent::DdlCommandEnd,
20651 "table_rewrite" => EventTriggerEvent::TableRewrite,
20652 "sql_drop" => EventTriggerEvent::SqlDrop,
20653 other => {
20654 return Err(ParserError::ParserError(format!(
20655 "Unknown event trigger event: {other}"
20656 )))
20657 }
20658 };
20659
20660 let when_tags = if self.parse_keyword(Keyword::WHEN) {
20661 self.expect_keyword_is(Keyword::TAG)?;
20662 self.expect_keyword_is(Keyword::IN)?;
20663 self.expect_token(&Token::LParen)?;
20664 let tags = self.parse_comma_separated(|p| p.parse_value().map(|v| v.value))?;
20665 self.expect_token(&Token::RParen)?;
20666 Some(tags)
20667 } else {
20668 None
20669 };
20670
20671 self.expect_keyword_is(Keyword::EXECUTE)?;
20672 let is_procedure = if self.parse_keyword(Keyword::FUNCTION) {
20673 false
20674 } else if self.parse_keyword(Keyword::PROCEDURE) {
20675 true
20676 } else {
20677 return self.expected_ref("FUNCTION or PROCEDURE after EXECUTE", self.peek_token_ref());
20678 };
20679 let execute = self.parse_object_name(false)?;
20680 self.expect_token(&Token::LParen)?;
20681 self.expect_token(&Token::RParen)?;
20682
20683 Ok(CreateEventTrigger {
20684 name,
20685 event,
20686 when_tags,
20687 execute,
20688 is_procedure,
20689 })
20690 }
20691
20692 pub fn parse_create_transform(&mut self, or_replace: bool) -> Result<CreateTransform, ParserError> {
20696 self.expect_keyword_is(Keyword::FOR)?;
20697 let type_name = self.parse_data_type()?;
20698 self.expect_keyword_is(Keyword::LANGUAGE)?;
20699 let language = self.parse_identifier()?;
20700 self.expect_token(&Token::LParen)?;
20701 let elements = self.parse_comma_separated(|p| {
20702 let is_from = if p.parse_keyword(Keyword::FROM) {
20703 true
20704 } else {
20705 p.expect_keyword_is(Keyword::TO)?;
20706 false
20707 };
20708 p.expect_keyword_is(Keyword::SQL)?;
20709 p.expect_keyword_is(Keyword::WITH)?;
20710 p.expect_keyword_is(Keyword::FUNCTION)?;
20711 let function = p.parse_object_name(false)?;
20712 p.expect_token(&Token::LParen)?;
20713 let arg_types = if p.peek_token().token == Token::RParen {
20714 vec![]
20715 } else {
20716 p.parse_comma_separated(|p| p.parse_data_type())?
20717 };
20718 p.expect_token(&Token::RParen)?;
20719 Ok(TransformElement {
20720 is_from,
20721 function,
20722 arg_types,
20723 })
20724 })?;
20725 self.expect_token(&Token::RParen)?;
20726
20727 Ok(CreateTransform {
20728 or_replace,
20729 type_name,
20730 language,
20731 elements,
20732 })
20733 }
20734
20735
20736 pub fn parse_security_label(&mut self) -> Result<SecurityLabel, ParserError> {
20740 self.expect_keyword_is(Keyword::LABEL)?;
20741
20742 let provider = if self.parse_keyword(Keyword::FOR) {
20743 Some(self.parse_identifier()?)
20744 } else {
20745 None
20746 };
20747
20748 self.expect_keyword_is(Keyword::ON)?;
20749
20750 let object_kind = if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
20751 SecurityLabelObjectKind::MaterializedView
20752 } else if self.parse_keyword(Keyword::TABLE) {
20753 SecurityLabelObjectKind::Table
20754 } else if self.parse_keyword(Keyword::COLUMN) {
20755 SecurityLabelObjectKind::Column
20756 } else if self.parse_keyword(Keyword::DATABASE) {
20757 SecurityLabelObjectKind::Database
20758 } else if self.parse_keyword(Keyword::DOMAIN) {
20759 SecurityLabelObjectKind::Domain
20760 } else if self.parse_keyword(Keyword::FUNCTION) {
20761 SecurityLabelObjectKind::Function
20762 } else if self.parse_keyword(Keyword::ROLE) {
20763 SecurityLabelObjectKind::Role
20764 } else if self.parse_keyword(Keyword::SCHEMA) {
20765 SecurityLabelObjectKind::Schema
20766 } else if self.parse_keyword(Keyword::SEQUENCE) {
20767 SecurityLabelObjectKind::Sequence
20768 } else if self.parse_keyword(Keyword::TYPE) {
20769 SecurityLabelObjectKind::Type
20770 } else if self.parse_keyword(Keyword::VIEW) {
20771 SecurityLabelObjectKind::View
20772 } else {
20773 return self.expected_ref(
20774 "TABLE, COLUMN, DATABASE, DOMAIN, FUNCTION, MATERIALIZED VIEW, ROLE, SCHEMA, SEQUENCE, TYPE, or VIEW after ON",
20775 self.peek_token_ref(),
20776 );
20777 };
20778
20779 let object_name = self.parse_object_name(false)?;
20780
20781 self.expect_keyword_is(Keyword::IS)?;
20782
20783 let label = if self.parse_keyword(Keyword::NULL) {
20784 None
20785 } else {
20786 Some(self.parse_value()?.value)
20787 };
20788
20789 Ok(SecurityLabel {
20790 provider,
20791 object_kind,
20792 object_name,
20793 label,
20794 })
20795 }
20796
20797 pub fn parse_create_user_mapping(&mut self) -> Result<CreateUserMapping, ParserError> {
20801 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20802
20803 self.expect_keyword_is(Keyword::FOR)?;
20804
20805 let user = if self.parse_keyword(Keyword::CURRENT_ROLE) {
20806 UserMappingUser::CurrentRole
20807 } else if self.parse_keyword(Keyword::CURRENT_USER) {
20808 UserMappingUser::CurrentUser
20809 } else if self.parse_keyword(Keyword::PUBLIC) {
20810 UserMappingUser::Public
20811 } else if self.parse_keyword(Keyword::USER) {
20812 UserMappingUser::User
20813 } else {
20814 UserMappingUser::Ident(self.parse_identifier()?)
20815 };
20816
20817 self.expect_keyword_is(Keyword::SERVER)?;
20818 let server_name = self.parse_identifier()?;
20819
20820 let options = if self.parse_keyword(Keyword::OPTIONS) {
20821 self.expect_token(&Token::LParen)?;
20822 let opts = self.parse_comma_separated(|p| {
20823 let key = p.parse_identifier()?;
20824 let value = p.parse_identifier()?;
20825 Ok(CreateServerOption { key, value })
20826 })?;
20827 self.expect_token(&Token::RParen)?;
20828 Some(opts)
20829 } else {
20830 None
20831 };
20832
20833 Ok(CreateUserMapping {
20834 if_not_exists,
20835 user,
20836 server_name,
20837 options,
20838 })
20839 }
20840
20841 pub fn parse_create_tablespace(&mut self) -> Result<CreateTablespace, ParserError> {
20845 let name = self.parse_identifier()?;
20846
20847 let owner = if self.parse_keyword(Keyword::OWNER) {
20848 Some(self.parse_identifier()?)
20849 } else {
20850 None
20851 };
20852
20853 self.expect_keyword_is(Keyword::LOCATION)?;
20854 let location = self.parse_value()?.value;
20855
20856 let with_options = self.parse_options(Keyword::WITH)?;
20857
20858 Ok(CreateTablespace {
20859 name,
20860 owner,
20861 location,
20862 with_options,
20863 })
20864 }
20865
20866 pub fn index(&self) -> usize {
20868 self.index
20869 }
20870
20871 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
20873 let ident = self.parse_identifier()?;
20874 self.expect_keyword_is(Keyword::AS)?;
20875
20876 let window_expr = if self.consume_token(&Token::LParen) {
20877 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
20878 } else if self.dialect.supports_window_clause_named_window_reference() {
20879 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
20880 } else {
20881 return self.expected_ref("(", self.peek_token_ref());
20882 };
20883
20884 Ok(NamedWindowDefinition(ident, window_expr))
20885 }
20886
20887 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
20889 let name = self.parse_object_name(false)?;
20890 let params = self.parse_optional_procedure_parameters()?;
20891
20892 let language = if self.parse_keyword(Keyword::LANGUAGE) {
20893 Some(self.parse_identifier()?)
20894 } else {
20895 None
20896 };
20897
20898 self.expect_keyword_is(Keyword::AS)?;
20899
20900 let body = self.parse_conditional_statements(&[Keyword::END])?;
20901
20902 Ok(Statement::CreateProcedure {
20903 name,
20904 or_alter,
20905 params,
20906 language,
20907 body,
20908 })
20909 }
20910
20911 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
20913 let window_name = match &self.peek_token_ref().token {
20914 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
20915 self.parse_optional_ident()?
20916 }
20917 _ => None,
20918 };
20919
20920 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
20921 self.parse_comma_separated(Parser::parse_expr)?
20922 } else {
20923 vec![]
20924 };
20925 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
20926 self.parse_comma_separated(Parser::parse_order_by_expr)?
20927 } else {
20928 vec![]
20929 };
20930
20931 let window_frame = if !self.consume_token(&Token::RParen) {
20932 let window_frame = self.parse_window_frame()?;
20933 self.expect_token(&Token::RParen)?;
20934 Some(window_frame)
20935 } else {
20936 None
20937 };
20938 Ok(WindowSpec {
20939 window_name,
20940 partition_by,
20941 order_by,
20942 window_frame,
20943 })
20944 }
20945
20946 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
20948 let name = self.parse_object_name(false)?;
20949
20950 let has_as = self.parse_keyword(Keyword::AS);
20952
20953 if !has_as {
20954 if self.consume_token(&Token::LParen) {
20956 let options = self.parse_create_type_sql_definition_options()?;
20958 self.expect_token(&Token::RParen)?;
20959 return Ok(Statement::CreateType {
20960 name,
20961 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
20962 });
20963 }
20964
20965 return Ok(Statement::CreateType {
20967 name,
20968 representation: None,
20969 });
20970 }
20971
20972 if self.parse_keyword(Keyword::ENUM) {
20974 self.parse_create_type_enum(name)
20976 } else if self.parse_keyword(Keyword::RANGE) {
20977 self.parse_create_type_range(name)
20979 } else if self.consume_token(&Token::LParen) {
20980 self.parse_create_type_composite(name)
20982 } else {
20983 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
20984 }
20985 }
20986
20987 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20991 if self.consume_token(&Token::RParen) {
20992 return Ok(Statement::CreateType {
20994 name,
20995 representation: Some(UserDefinedTypeRepresentation::Composite {
20996 attributes: vec![],
20997 }),
20998 });
20999 }
21000
21001 let mut attributes = vec![];
21002 loop {
21003 let attr_name = self.parse_identifier()?;
21004 let attr_data_type = self.parse_data_type()?;
21005 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
21006 Some(self.parse_object_name(false)?)
21007 } else {
21008 None
21009 };
21010 attributes.push(UserDefinedTypeCompositeAttributeDef {
21011 name: attr_name,
21012 data_type: attr_data_type,
21013 collation: attr_collation,
21014 });
21015
21016 if !self.consume_token(&Token::Comma) {
21017 break;
21018 }
21019 }
21020 self.expect_token(&Token::RParen)?;
21021
21022 Ok(Statement::CreateType {
21023 name,
21024 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
21025 })
21026 }
21027
21028 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21032 self.expect_token(&Token::LParen)?;
21033 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21034 self.expect_token(&Token::RParen)?;
21035
21036 Ok(Statement::CreateType {
21037 name,
21038 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
21039 })
21040 }
21041
21042 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21046 self.expect_token(&Token::LParen)?;
21047 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
21048 self.expect_token(&Token::RParen)?;
21049
21050 Ok(Statement::CreateType {
21051 name,
21052 representation: Some(UserDefinedTypeRepresentation::Range { options }),
21053 })
21054 }
21055
21056 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
21058 let keyword = self.parse_one_of_keywords(&[
21059 Keyword::SUBTYPE,
21060 Keyword::SUBTYPE_OPCLASS,
21061 Keyword::COLLATION,
21062 Keyword::CANONICAL,
21063 Keyword::SUBTYPE_DIFF,
21064 Keyword::MULTIRANGE_TYPE_NAME,
21065 ]);
21066
21067 match keyword {
21068 Some(Keyword::SUBTYPE) => {
21069 self.expect_token(&Token::Eq)?;
21070 let data_type = self.parse_data_type()?;
21071 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
21072 }
21073 Some(Keyword::SUBTYPE_OPCLASS) => {
21074 self.expect_token(&Token::Eq)?;
21075 let name = self.parse_object_name(false)?;
21076 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
21077 }
21078 Some(Keyword::COLLATION) => {
21079 self.expect_token(&Token::Eq)?;
21080 let name = self.parse_object_name(false)?;
21081 Ok(UserDefinedTypeRangeOption::Collation(name))
21082 }
21083 Some(Keyword::CANONICAL) => {
21084 self.expect_token(&Token::Eq)?;
21085 let name = self.parse_object_name(false)?;
21086 Ok(UserDefinedTypeRangeOption::Canonical(name))
21087 }
21088 Some(Keyword::SUBTYPE_DIFF) => {
21089 self.expect_token(&Token::Eq)?;
21090 let name = self.parse_object_name(false)?;
21091 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
21092 }
21093 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
21094 self.expect_token(&Token::Eq)?;
21095 let name = self.parse_object_name(false)?;
21096 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
21097 }
21098 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
21099 }
21100 }
21101
21102 fn parse_create_type_sql_definition_options(
21104 &mut self,
21105 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
21106 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
21107 }
21108
21109 fn parse_sql_definition_option(
21111 &mut self,
21112 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
21113 let keyword = self.parse_one_of_keywords(&[
21114 Keyword::INPUT,
21115 Keyword::OUTPUT,
21116 Keyword::RECEIVE,
21117 Keyword::SEND,
21118 Keyword::TYPMOD_IN,
21119 Keyword::TYPMOD_OUT,
21120 Keyword::ANALYZE,
21121 Keyword::SUBSCRIPT,
21122 Keyword::INTERNALLENGTH,
21123 Keyword::PASSEDBYVALUE,
21124 Keyword::ALIGNMENT,
21125 Keyword::STORAGE,
21126 Keyword::LIKE,
21127 Keyword::CATEGORY,
21128 Keyword::PREFERRED,
21129 Keyword::DEFAULT,
21130 Keyword::ELEMENT,
21131 Keyword::DELIMITER,
21132 Keyword::COLLATABLE,
21133 ]);
21134
21135 match keyword {
21136 Some(Keyword::INPUT) => {
21137 self.expect_token(&Token::Eq)?;
21138 let name = self.parse_object_name(false)?;
21139 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
21140 }
21141 Some(Keyword::OUTPUT) => {
21142 self.expect_token(&Token::Eq)?;
21143 let name = self.parse_object_name(false)?;
21144 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
21145 }
21146 Some(Keyword::RECEIVE) => {
21147 self.expect_token(&Token::Eq)?;
21148 let name = self.parse_object_name(false)?;
21149 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
21150 }
21151 Some(Keyword::SEND) => {
21152 self.expect_token(&Token::Eq)?;
21153 let name = self.parse_object_name(false)?;
21154 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
21155 }
21156 Some(Keyword::TYPMOD_IN) => {
21157 self.expect_token(&Token::Eq)?;
21158 let name = self.parse_object_name(false)?;
21159 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
21160 }
21161 Some(Keyword::TYPMOD_OUT) => {
21162 self.expect_token(&Token::Eq)?;
21163 let name = self.parse_object_name(false)?;
21164 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
21165 }
21166 Some(Keyword::ANALYZE) => {
21167 self.expect_token(&Token::Eq)?;
21168 let name = self.parse_object_name(false)?;
21169 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
21170 }
21171 Some(Keyword::SUBSCRIPT) => {
21172 self.expect_token(&Token::Eq)?;
21173 let name = self.parse_object_name(false)?;
21174 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
21175 }
21176 Some(Keyword::INTERNALLENGTH) => {
21177 self.expect_token(&Token::Eq)?;
21178 if self.parse_keyword(Keyword::VARIABLE) {
21179 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21180 UserDefinedTypeInternalLength::Variable,
21181 ))
21182 } else {
21183 let value = self.parse_literal_uint()?;
21184 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21185 UserDefinedTypeInternalLength::Fixed(value),
21186 ))
21187 }
21188 }
21189 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
21190 Some(Keyword::ALIGNMENT) => {
21191 self.expect_token(&Token::Eq)?;
21192 let align_keyword = self.parse_one_of_keywords(&[
21193 Keyword::CHAR,
21194 Keyword::INT2,
21195 Keyword::INT4,
21196 Keyword::DOUBLE,
21197 ]);
21198 match align_keyword {
21199 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21200 Alignment::Char,
21201 )),
21202 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21203 Alignment::Int2,
21204 )),
21205 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21206 Alignment::Int4,
21207 )),
21208 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21209 Alignment::Double,
21210 )),
21211 _ => self.expected_ref(
21212 "alignment value (char, int2, int4, or double)",
21213 self.peek_token_ref(),
21214 ),
21215 }
21216 }
21217 Some(Keyword::STORAGE) => {
21218 self.expect_token(&Token::Eq)?;
21219 let storage_keyword = self.parse_one_of_keywords(&[
21220 Keyword::PLAIN,
21221 Keyword::EXTERNAL,
21222 Keyword::EXTENDED,
21223 Keyword::MAIN,
21224 ]);
21225 match storage_keyword {
21226 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21227 UserDefinedTypeStorage::Plain,
21228 )),
21229 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21230 UserDefinedTypeStorage::External,
21231 )),
21232 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21233 UserDefinedTypeStorage::Extended,
21234 )),
21235 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21236 UserDefinedTypeStorage::Main,
21237 )),
21238 _ => self.expected_ref(
21239 "storage value (plain, external, extended, or main)",
21240 self.peek_token_ref(),
21241 ),
21242 }
21243 }
21244 Some(Keyword::LIKE) => {
21245 self.expect_token(&Token::Eq)?;
21246 let name = self.parse_object_name(false)?;
21247 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
21248 }
21249 Some(Keyword::CATEGORY) => {
21250 self.expect_token(&Token::Eq)?;
21251 let category_str = self.parse_literal_string()?;
21252 let category_char = category_str.chars().next().ok_or_else(|| {
21253 ParserError::ParserError(
21254 "CATEGORY value must be a single character".to_string(),
21255 )
21256 })?;
21257 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
21258 }
21259 Some(Keyword::PREFERRED) => {
21260 self.expect_token(&Token::Eq)?;
21261 let value =
21262 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21263 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
21264 }
21265 Some(Keyword::DEFAULT) => {
21266 self.expect_token(&Token::Eq)?;
21267 let expr = self.parse_expr()?;
21268 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
21269 }
21270 Some(Keyword::ELEMENT) => {
21271 self.expect_token(&Token::Eq)?;
21272 let data_type = self.parse_data_type()?;
21273 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
21274 }
21275 Some(Keyword::DELIMITER) => {
21276 self.expect_token(&Token::Eq)?;
21277 let delimiter = self.parse_literal_string()?;
21278 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
21279 }
21280 Some(Keyword::COLLATABLE) => {
21281 self.expect_token(&Token::Eq)?;
21282 let value =
21283 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21284 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
21285 }
21286 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
21287 }
21288 }
21289
21290 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
21291 self.expect_token(&Token::LParen)?;
21292 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21293 self.expect_token(&Token::RParen)?;
21294 Ok(idents)
21295 }
21296
21297 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
21298 if dialect_of!(self is MySqlDialect | GenericDialect) {
21299 if self.parse_keyword(Keyword::FIRST) {
21300 Ok(Some(MySQLColumnPosition::First))
21301 } else if self.parse_keyword(Keyword::AFTER) {
21302 let ident = self.parse_identifier()?;
21303 Ok(Some(MySQLColumnPosition::After(ident)))
21304 } else {
21305 Ok(None)
21306 }
21307 } else {
21308 Ok(None)
21309 }
21310 }
21311
21312 fn parse_print(&mut self) -> Result<Statement, ParserError> {
21314 Ok(Statement::Print(PrintStatement {
21315 message: Box::new(self.parse_expr()?),
21316 }))
21317 }
21318
21319 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
21323 let wait_type = if self.parse_keyword(Keyword::DELAY) {
21324 WaitForType::Delay
21325 } else if self.parse_keyword(Keyword::TIME) {
21326 WaitForType::Time
21327 } else {
21328 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
21329 };
21330 let expr = self.parse_expr()?;
21331 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
21332 }
21333
21334 fn parse_return(&mut self) -> Result<Statement, ParserError> {
21336 match self.maybe_parse(|p| p.parse_expr())? {
21337 Some(expr) => Ok(Statement::Return(ReturnStatement {
21338 value: Some(ReturnStatementValue::Expr(expr)),
21339 })),
21340 None => Ok(Statement::Return(ReturnStatement { value: None })),
21341 }
21342 }
21343
21344 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
21348 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
21349
21350 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
21351 Some(self.parse_object_name(false)?)
21352 } else {
21353 None
21354 };
21355 self.expect_keyword(Keyword::OPTIONS)?;
21356 self.expect_token(&Token::LParen)?;
21357 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
21358 self.expect_token(&Token::RParen)?;
21359 self.expect_keyword(Keyword::AS)?;
21360 let query = self.parse_query()?;
21361 Ok(Statement::ExportData(ExportData {
21362 options,
21363 query,
21364 connection,
21365 }))
21366 }
21367
21368 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
21369 self.expect_keyword(Keyword::VACUUM)?;
21370 let full = self.parse_keyword(Keyword::FULL);
21371 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
21372 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
21373 let reindex = self.parse_keyword(Keyword::REINDEX);
21374 let recluster = self.parse_keyword(Keyword::RECLUSTER);
21375 let (table_name, threshold, boost) =
21376 match self.maybe_parse(|p| p.parse_object_name(false))? {
21377 Some(table_name) => {
21378 let threshold = if self.parse_keyword(Keyword::TO) {
21379 let value = self.parse_value()?;
21380 self.expect_keyword(Keyword::PERCENT)?;
21381 Some(value)
21382 } else {
21383 None
21384 };
21385 let boost = self.parse_keyword(Keyword::BOOST);
21386 (Some(table_name), threshold, boost)
21387 }
21388 _ => (None, None, false),
21389 };
21390 Ok(Statement::Vacuum(VacuumStatement {
21391 full,
21392 sort_only,
21393 delete_only,
21394 reindex,
21395 recluster,
21396 table_name,
21397 threshold,
21398 boost,
21399 }))
21400 }
21401
21402 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
21404 self.tokens
21405 }
21406
21407 fn peek_sub_query(&mut self) -> bool {
21409 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
21410 .is_some()
21411 }
21412
21413 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
21414 let show_in;
21415 let mut filter_position = None;
21416 if self.dialect.supports_show_like_before_in() {
21417 if let Some(filter) = self.parse_show_statement_filter()? {
21418 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
21419 }
21420 show_in = self.maybe_parse_show_stmt_in()?;
21421 } else {
21422 show_in = self.maybe_parse_show_stmt_in()?;
21423 if let Some(filter) = self.parse_show_statement_filter()? {
21424 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
21425 }
21426 }
21427 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
21428 let limit = self.maybe_parse_show_stmt_limit()?;
21429 let from = self.maybe_parse_show_stmt_from()?;
21430 Ok(ShowStatementOptions {
21431 filter_position,
21432 show_in,
21433 starts_with,
21434 limit,
21435 limit_from: from,
21436 })
21437 }
21438
21439 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
21440 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
21441 Some(Keyword::FROM) => ShowStatementInClause::FROM,
21442 Some(Keyword::IN) => ShowStatementInClause::IN,
21443 None => return Ok(None),
21444 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
21445 };
21446
21447 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
21448 Keyword::ACCOUNT,
21449 Keyword::DATABASE,
21450 Keyword::SCHEMA,
21451 Keyword::TABLE,
21452 Keyword::VIEW,
21453 ]) {
21454 Some(Keyword::DATABASE)
21456 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21457 | self.peek_keyword(Keyword::LIMIT) =>
21458 {
21459 (Some(ShowStatementInParentType::Database), None)
21460 }
21461 Some(Keyword::SCHEMA)
21462 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21463 | self.peek_keyword(Keyword::LIMIT) =>
21464 {
21465 (Some(ShowStatementInParentType::Schema), None)
21466 }
21467 Some(parent_kw) => {
21468 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
21472 match parent_kw {
21473 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
21474 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
21475 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
21476 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
21477 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
21478 _ => {
21479 return self.expected_ref(
21480 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
21481 self.peek_token_ref(),
21482 )
21483 }
21484 }
21485 }
21486 None => {
21487 let mut parent_name = self.parse_object_name(false)?;
21490 if self
21491 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
21492 .is_some()
21493 {
21494 parent_name
21495 .0
21496 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
21497 }
21498 (None, Some(parent_name))
21499 }
21500 };
21501
21502 Ok(Some(ShowStatementIn {
21503 clause,
21504 parent_type,
21505 parent_name,
21506 }))
21507 }
21508
21509 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21510 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
21511 Ok(Some(self.parse_value()?))
21512 } else {
21513 Ok(None)
21514 }
21515 }
21516
21517 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
21518 if self.parse_keyword(Keyword::LIMIT) {
21519 Ok(self.parse_limit()?)
21520 } else {
21521 Ok(None)
21522 }
21523 }
21524
21525 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21526 if self.parse_keyword(Keyword::FROM) {
21527 Ok(Some(self.parse_value()?))
21528 } else {
21529 Ok(None)
21530 }
21531 }
21532
21533 pub(crate) fn in_column_definition_state(&self) -> bool {
21534 matches!(self.state, ColumnDefinition)
21535 }
21536
21537 pub(crate) fn parse_key_value_options(
21542 &mut self,
21543 parenthesized: bool,
21544 end_words: &[Keyword],
21545 ) -> Result<KeyValueOptions, ParserError> {
21546 let mut options: Vec<KeyValueOption> = Vec::new();
21547 let mut delimiter = KeyValueOptionsDelimiter::Space;
21548 if parenthesized {
21549 self.expect_token(&Token::LParen)?;
21550 }
21551 loop {
21552 match self.next_token().token {
21553 Token::RParen => {
21554 if parenthesized {
21555 break;
21556 } else {
21557 return self.expected_ref(" another option or EOF", self.peek_token_ref());
21558 }
21559 }
21560 Token::EOF | Token::SemiColon => break,
21561 Token::Comma => {
21562 delimiter = KeyValueOptionsDelimiter::Comma;
21563 continue;
21564 }
21565 Token::Word(w) if !end_words.contains(&w.keyword) => {
21566 options.push(self.parse_key_value_option(&w)?)
21567 }
21568 Token::Word(w) if end_words.contains(&w.keyword) => {
21569 self.prev_token();
21570 break;
21571 }
21572 _ => {
21573 return self.expected_ref(
21574 "another option, EOF, SemiColon, Comma or ')'",
21575 self.peek_token_ref(),
21576 )
21577 }
21578 };
21579 }
21580
21581 Ok(KeyValueOptions { delimiter, options })
21582 }
21583
21584 pub(crate) fn parse_key_value_option(
21586 &mut self,
21587 key: &Word,
21588 ) -> Result<KeyValueOption, ParserError> {
21589 self.expect_token(&Token::Eq)?;
21590 let peeked_token = self.peek_token();
21591 match peeked_token.token {
21592 Token::SingleQuotedString(_) => Ok(KeyValueOption {
21593 option_name: key.value.clone(),
21594 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21595 }),
21596 Token::Word(word)
21597 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21598 {
21599 Ok(KeyValueOption {
21600 option_name: key.value.clone(),
21601 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21602 })
21603 }
21604 Token::Number(..) => Ok(KeyValueOption {
21605 option_name: key.value.clone(),
21606 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21607 }),
21608 Token::Word(word) => {
21609 self.next_token();
21610 Ok(KeyValueOption {
21611 option_name: key.value.clone(),
21612 option_value: KeyValueOptionKind::Single(
21613 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21614 ),
21615 })
21616 }
21617 Token::LParen => {
21618 match self.maybe_parse(|parser| {
21622 parser.expect_token(&Token::LParen)?;
21623 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21624 parser.expect_token(&Token::RParen)?;
21625 values
21626 })? {
21627 Some(values) => Ok(KeyValueOption {
21628 option_name: key.value.clone(),
21629 option_value: KeyValueOptionKind::Multi(values),
21630 }),
21631 None => Ok(KeyValueOption {
21632 option_name: key.value.clone(),
21633 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21634 self.parse_key_value_options(true, &[])?,
21635 )),
21636 }),
21637 }
21638 }
21639 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21640 }
21641 }
21642
21643 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21645 if self.parse_keyword(Keyword::ALL) {
21646 return Ok(ResetStatement { reset: Reset::ALL });
21647 }
21648
21649 let obj = self.parse_object_name(false)?;
21650 Ok(ResetStatement {
21651 reset: Reset::ConfigurationParameter(obj),
21652 })
21653 }
21654}
21655
21656fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21657 if let Some(prefix) = prefix {
21658 Expr::Prefixed {
21659 prefix,
21660 value: Box::new(expr),
21661 }
21662 } else {
21663 expr
21664 }
21665}
21666
21667impl Word {
21668 pub fn to_ident(&self, span: Span) -> Ident {
21674 Ident {
21675 value: self.value.clone(),
21676 quote_style: self.quote_style,
21677 span,
21678 }
21679 }
21680
21681 pub fn into_ident(self, span: Span) -> Ident {
21686 Ident {
21687 value: self.value,
21688 quote_style: self.quote_style,
21689 span,
21690 }
21691 }
21692}
21693
21694#[cfg(test)]
21695mod tests {
21696 use crate::test_utils::{all_dialects, TestedDialects};
21697
21698 use super::*;
21699
21700 #[test]
21701 fn test_prev_index() {
21702 let sql = "SELECT version";
21703 all_dialects().run_parser_method(sql, |parser| {
21704 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21705 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21706 parser.prev_token();
21707 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21708 assert_eq!(parser.next_token(), Token::make_word("version", None));
21709 parser.prev_token();
21710 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21711 assert_eq!(parser.next_token(), Token::make_word("version", None));
21712 assert_eq!(parser.peek_token(), Token::EOF);
21713 parser.prev_token();
21714 assert_eq!(parser.next_token(), Token::make_word("version", None));
21715 assert_eq!(parser.next_token(), Token::EOF);
21716 assert_eq!(parser.next_token(), Token::EOF);
21717 parser.prev_token();
21718 });
21719 }
21720
21721 #[test]
21722 fn test_peek_tokens() {
21723 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21724 assert!(matches!(
21725 parser.peek_tokens(),
21726 [Token::Word(Word {
21727 keyword: Keyword::SELECT,
21728 ..
21729 })]
21730 ));
21731
21732 assert!(matches!(
21733 parser.peek_tokens(),
21734 [
21735 Token::Word(Word {
21736 keyword: Keyword::SELECT,
21737 ..
21738 }),
21739 Token::Word(_),
21740 Token::Word(Word {
21741 keyword: Keyword::AS,
21742 ..
21743 }),
21744 ]
21745 ));
21746
21747 for _ in 0..4 {
21748 parser.next_token();
21749 }
21750
21751 assert!(matches!(
21752 parser.peek_tokens(),
21753 [
21754 Token::Word(Word {
21755 keyword: Keyword::FROM,
21756 ..
21757 }),
21758 Token::Word(_),
21759 Token::EOF,
21760 Token::EOF,
21761 ]
21762 ))
21763 })
21764 }
21765
21766 #[cfg(test)]
21767 mod test_parse_data_type {
21768 use crate::ast::{
21769 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21770 };
21771 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21772 use crate::test_utils::TestedDialects;
21773
21774 macro_rules! test_parse_data_type {
21775 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21776 $dialect.run_parser_method(&*$input, |parser| {
21777 let data_type = parser.parse_data_type().unwrap();
21778 assert_eq!($expected_type, data_type);
21779 assert_eq!($input.to_string(), data_type.to_string());
21780 });
21781 }};
21782 }
21783
21784 #[test]
21785 fn test_ansii_character_string_types() {
21786 let dialect =
21788 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21789
21790 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21791
21792 test_parse_data_type!(
21793 dialect,
21794 "CHARACTER(20)",
21795 DataType::Character(Some(CharacterLength::IntegerLength {
21796 length: 20,
21797 unit: None
21798 }))
21799 );
21800
21801 test_parse_data_type!(
21802 dialect,
21803 "CHARACTER(20 CHARACTERS)",
21804 DataType::Character(Some(CharacterLength::IntegerLength {
21805 length: 20,
21806 unit: Some(CharLengthUnits::Characters)
21807 }))
21808 );
21809
21810 test_parse_data_type!(
21811 dialect,
21812 "CHARACTER(20 OCTETS)",
21813 DataType::Character(Some(CharacterLength::IntegerLength {
21814 length: 20,
21815 unit: Some(CharLengthUnits::Octets)
21816 }))
21817 );
21818
21819 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
21820
21821 test_parse_data_type!(
21822 dialect,
21823 "CHAR(20)",
21824 DataType::Char(Some(CharacterLength::IntegerLength {
21825 length: 20,
21826 unit: None
21827 }))
21828 );
21829
21830 test_parse_data_type!(
21831 dialect,
21832 "CHAR(20 CHARACTERS)",
21833 DataType::Char(Some(CharacterLength::IntegerLength {
21834 length: 20,
21835 unit: Some(CharLengthUnits::Characters)
21836 }))
21837 );
21838
21839 test_parse_data_type!(
21840 dialect,
21841 "CHAR(20 OCTETS)",
21842 DataType::Char(Some(CharacterLength::IntegerLength {
21843 length: 20,
21844 unit: Some(CharLengthUnits::Octets)
21845 }))
21846 );
21847
21848 test_parse_data_type!(
21849 dialect,
21850 "CHARACTER VARYING(20)",
21851 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21852 length: 20,
21853 unit: None
21854 }))
21855 );
21856
21857 test_parse_data_type!(
21858 dialect,
21859 "CHARACTER VARYING(20 CHARACTERS)",
21860 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21861 length: 20,
21862 unit: Some(CharLengthUnits::Characters)
21863 }))
21864 );
21865
21866 test_parse_data_type!(
21867 dialect,
21868 "CHARACTER VARYING(20 OCTETS)",
21869 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21870 length: 20,
21871 unit: Some(CharLengthUnits::Octets)
21872 }))
21873 );
21874
21875 test_parse_data_type!(
21876 dialect,
21877 "CHAR VARYING(20)",
21878 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21879 length: 20,
21880 unit: None
21881 }))
21882 );
21883
21884 test_parse_data_type!(
21885 dialect,
21886 "CHAR VARYING(20 CHARACTERS)",
21887 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21888 length: 20,
21889 unit: Some(CharLengthUnits::Characters)
21890 }))
21891 );
21892
21893 test_parse_data_type!(
21894 dialect,
21895 "CHAR VARYING(20 OCTETS)",
21896 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21897 length: 20,
21898 unit: Some(CharLengthUnits::Octets)
21899 }))
21900 );
21901
21902 test_parse_data_type!(
21903 dialect,
21904 "VARCHAR(20)",
21905 DataType::Varchar(Some(CharacterLength::IntegerLength {
21906 length: 20,
21907 unit: None
21908 }))
21909 );
21910 }
21911
21912 #[test]
21913 fn test_ansii_character_large_object_types() {
21914 let dialect =
21916 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21917
21918 test_parse_data_type!(
21919 dialect,
21920 "CHARACTER LARGE OBJECT",
21921 DataType::CharacterLargeObject(None)
21922 );
21923 test_parse_data_type!(
21924 dialect,
21925 "CHARACTER LARGE OBJECT(20)",
21926 DataType::CharacterLargeObject(Some(20))
21927 );
21928
21929 test_parse_data_type!(
21930 dialect,
21931 "CHAR LARGE OBJECT",
21932 DataType::CharLargeObject(None)
21933 );
21934 test_parse_data_type!(
21935 dialect,
21936 "CHAR LARGE OBJECT(20)",
21937 DataType::CharLargeObject(Some(20))
21938 );
21939
21940 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
21941 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
21942 }
21943
21944 #[test]
21945 fn test_parse_custom_types() {
21946 let dialect =
21947 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21948
21949 test_parse_data_type!(
21950 dialect,
21951 "GEOMETRY",
21952 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
21953 );
21954
21955 test_parse_data_type!(
21956 dialect,
21957 "GEOMETRY(POINT)",
21958 DataType::Custom(
21959 ObjectName::from(vec!["GEOMETRY".into()]),
21960 vec!["POINT".to_string()]
21961 )
21962 );
21963
21964 test_parse_data_type!(
21965 dialect,
21966 "GEOMETRY(POINT, 4326)",
21967 DataType::Custom(
21968 ObjectName::from(vec!["GEOMETRY".into()]),
21969 vec!["POINT".to_string(), "4326".to_string()]
21970 )
21971 );
21972 }
21973
21974 #[test]
21975 fn test_ansii_exact_numeric_types() {
21976 let dialect = TestedDialects::new(vec![
21978 Box::new(GenericDialect {}),
21979 Box::new(AnsiDialect {}),
21980 Box::new(PostgreSqlDialect {}),
21981 ]);
21982
21983 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
21984
21985 test_parse_data_type!(
21986 dialect,
21987 "NUMERIC(2)",
21988 DataType::Numeric(ExactNumberInfo::Precision(2))
21989 );
21990
21991 test_parse_data_type!(
21992 dialect,
21993 "NUMERIC(2,10)",
21994 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
21995 );
21996
21997 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
21998
21999 test_parse_data_type!(
22000 dialect,
22001 "DECIMAL(2)",
22002 DataType::Decimal(ExactNumberInfo::Precision(2))
22003 );
22004
22005 test_parse_data_type!(
22006 dialect,
22007 "DECIMAL(2,10)",
22008 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
22009 );
22010
22011 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
22012
22013 test_parse_data_type!(
22014 dialect,
22015 "DEC(2)",
22016 DataType::Dec(ExactNumberInfo::Precision(2))
22017 );
22018
22019 test_parse_data_type!(
22020 dialect,
22021 "DEC(2,10)",
22022 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
22023 );
22024
22025 test_parse_data_type!(
22027 dialect,
22028 "NUMERIC(10,-2)",
22029 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
22030 );
22031
22032 test_parse_data_type!(
22033 dialect,
22034 "DECIMAL(1000,-10)",
22035 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
22036 );
22037
22038 test_parse_data_type!(
22039 dialect,
22040 "DEC(5,-1000)",
22041 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
22042 );
22043
22044 test_parse_data_type!(
22045 dialect,
22046 "NUMERIC(10,-5)",
22047 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
22048 );
22049
22050 test_parse_data_type!(
22051 dialect,
22052 "DECIMAL(20,-10)",
22053 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
22054 );
22055
22056 test_parse_data_type!(
22057 dialect,
22058 "DEC(5,-2)",
22059 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
22060 );
22061
22062 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
22063 let data_type = parser.parse_data_type().unwrap();
22064 assert_eq!(
22065 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
22066 data_type
22067 );
22068 assert_eq!("NUMERIC(10,5)", data_type.to_string());
22070 });
22071 }
22072
22073 #[test]
22074 fn test_ansii_date_type() {
22075 let dialect =
22077 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22078
22079 test_parse_data_type!(dialect, "DATE", DataType::Date);
22080
22081 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
22082
22083 test_parse_data_type!(
22084 dialect,
22085 "TIME(6)",
22086 DataType::Time(Some(6), TimezoneInfo::None)
22087 );
22088
22089 test_parse_data_type!(
22090 dialect,
22091 "TIME WITH TIME ZONE",
22092 DataType::Time(None, TimezoneInfo::WithTimeZone)
22093 );
22094
22095 test_parse_data_type!(
22096 dialect,
22097 "TIME(6) WITH TIME ZONE",
22098 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
22099 );
22100
22101 test_parse_data_type!(
22102 dialect,
22103 "TIME WITHOUT TIME ZONE",
22104 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
22105 );
22106
22107 test_parse_data_type!(
22108 dialect,
22109 "TIME(6) WITHOUT TIME ZONE",
22110 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
22111 );
22112
22113 test_parse_data_type!(
22114 dialect,
22115 "TIMESTAMP",
22116 DataType::Timestamp(None, TimezoneInfo::None)
22117 );
22118
22119 test_parse_data_type!(
22120 dialect,
22121 "TIMESTAMP(22)",
22122 DataType::Timestamp(Some(22), TimezoneInfo::None)
22123 );
22124
22125 test_parse_data_type!(
22126 dialect,
22127 "TIMESTAMP(22) WITH TIME ZONE",
22128 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
22129 );
22130
22131 test_parse_data_type!(
22132 dialect,
22133 "TIMESTAMP(33) WITHOUT TIME ZONE",
22134 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
22135 );
22136 }
22137 }
22138
22139 #[test]
22140 fn test_parse_schema_name() {
22141 macro_rules! test_parse_schema_name {
22143 ($input:expr, $expected_name:expr $(,)?) => {{
22144 all_dialects().run_parser_method(&*$input, |parser| {
22145 let schema_name = parser.parse_schema_name().unwrap();
22146 assert_eq!(schema_name, $expected_name);
22148 assert_eq!(schema_name.to_string(), $input.to_string());
22150 });
22151 }};
22152 }
22153
22154 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
22155 let dummy_authorization = Ident::new("dummy_authorization");
22156
22157 test_parse_schema_name!(
22158 format!("{dummy_name}"),
22159 SchemaName::Simple(dummy_name.clone())
22160 );
22161
22162 test_parse_schema_name!(
22163 format!("AUTHORIZATION {dummy_authorization}"),
22164 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
22165 );
22166 test_parse_schema_name!(
22167 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
22168 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
22169 );
22170 }
22171
22172 #[test]
22173 fn mysql_parse_index_table_constraint() {
22174 macro_rules! test_parse_table_constraint {
22175 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
22176 $dialect.run_parser_method(&*$input, |parser| {
22177 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
22178 assert_eq!(constraint, $expected);
22180 assert_eq!(constraint.to_string(), $input.to_string());
22182 });
22183 }};
22184 }
22185
22186 fn mk_expected_col(name: &str) -> IndexColumn {
22187 IndexColumn {
22188 column: OrderByExpr {
22189 expr: Expr::Identifier(name.into()),
22190 options: OrderByOptions {
22191 asc: None,
22192 nulls_first: None,
22193 },
22194 with_fill: None,
22195 },
22196 operator_class: None,
22197 }
22198 }
22199
22200 let dialect =
22201 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
22202
22203 test_parse_table_constraint!(
22204 dialect,
22205 "INDEX (c1)",
22206 IndexConstraint {
22207 display_as_key: false,
22208 name: None,
22209 index_type: None,
22210 columns: vec![mk_expected_col("c1")],
22211 index_options: vec![],
22212 }
22213 .into()
22214 );
22215
22216 test_parse_table_constraint!(
22217 dialect,
22218 "KEY (c1)",
22219 IndexConstraint {
22220 display_as_key: true,
22221 name: None,
22222 index_type: None,
22223 columns: vec![mk_expected_col("c1")],
22224 index_options: vec![],
22225 }
22226 .into()
22227 );
22228
22229 test_parse_table_constraint!(
22230 dialect,
22231 "INDEX 'index' (c1, c2)",
22232 TableConstraint::Index(IndexConstraint {
22233 display_as_key: false,
22234 name: Some(Ident::with_quote('\'', "index")),
22235 index_type: None,
22236 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
22237 index_options: vec![],
22238 })
22239 );
22240
22241 test_parse_table_constraint!(
22242 dialect,
22243 "INDEX USING BTREE (c1)",
22244 IndexConstraint {
22245 display_as_key: false,
22246 name: None,
22247 index_type: Some(IndexType::BTree),
22248 columns: vec![mk_expected_col("c1")],
22249 index_options: vec![],
22250 }
22251 .into()
22252 );
22253
22254 test_parse_table_constraint!(
22255 dialect,
22256 "INDEX USING HASH (c1)",
22257 IndexConstraint {
22258 display_as_key: false,
22259 name: None,
22260 index_type: Some(IndexType::Hash),
22261 columns: vec![mk_expected_col("c1")],
22262 index_options: vec![],
22263 }
22264 .into()
22265 );
22266
22267 test_parse_table_constraint!(
22268 dialect,
22269 "INDEX idx_name USING BTREE (c1)",
22270 IndexConstraint {
22271 display_as_key: false,
22272 name: Some(Ident::new("idx_name")),
22273 index_type: Some(IndexType::BTree),
22274 columns: vec![mk_expected_col("c1")],
22275 index_options: vec![],
22276 }
22277 .into()
22278 );
22279
22280 test_parse_table_constraint!(
22281 dialect,
22282 "INDEX idx_name USING HASH (c1)",
22283 IndexConstraint {
22284 display_as_key: false,
22285 name: Some(Ident::new("idx_name")),
22286 index_type: Some(IndexType::Hash),
22287 columns: vec![mk_expected_col("c1")],
22288 index_options: vec![],
22289 }
22290 .into()
22291 );
22292 }
22293
22294 #[test]
22295 fn test_tokenizer_error_loc() {
22296 let sql = "foo '";
22297 let ast = Parser::parse_sql(&GenericDialect, sql);
22298 assert_eq!(
22299 ast,
22300 Err(ParserError::TokenizerError(
22301 "Unterminated string literal at Line: 1, Column: 5".to_string()
22302 ))
22303 );
22304 }
22305
22306 #[test]
22307 fn test_parser_error_loc() {
22308 let sql = "SELECT this is a syntax error";
22309 let ast = Parser::parse_sql(&GenericDialect, sql);
22310 assert_eq!(
22311 ast,
22312 Err(ParserError::ParserError(
22313 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
22314 .to_string()
22315 ))
22316 );
22317 }
22318
22319 #[test]
22320 fn test_nested_explain_error() {
22321 let sql = "EXPLAIN EXPLAIN SELECT 1";
22322 let ast = Parser::parse_sql(&GenericDialect, sql);
22323 assert_eq!(
22324 ast,
22325 Err(ParserError::ParserError(
22326 "Explain must be root of the plan".to_string()
22327 ))
22328 );
22329 }
22330
22331 #[test]
22332 fn test_parse_multipart_identifier_positive() {
22333 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
22334
22335 let expected = vec![
22337 Ident {
22338 value: "CATALOG".to_string(),
22339 quote_style: None,
22340 span: Span::empty(),
22341 },
22342 Ident {
22343 value: "F(o)o. \"bar".to_string(),
22344 quote_style: Some('"'),
22345 span: Span::empty(),
22346 },
22347 Ident {
22348 value: "table".to_string(),
22349 quote_style: None,
22350 span: Span::empty(),
22351 },
22352 ];
22353 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
22354 let actual = parser.parse_multipart_identifier().unwrap();
22355 assert_eq!(expected, actual);
22356 });
22357
22358 let expected = vec![
22360 Ident {
22361 value: "CATALOG".to_string(),
22362 quote_style: None,
22363 span: Span::empty(),
22364 },
22365 Ident {
22366 value: "table".to_string(),
22367 quote_style: None,
22368 span: Span::empty(),
22369 },
22370 ];
22371 dialect.run_parser_method("CATALOG . table", |parser| {
22372 let actual = parser.parse_multipart_identifier().unwrap();
22373 assert_eq!(expected, actual);
22374 });
22375 }
22376
22377 #[test]
22378 fn test_parse_multipart_identifier_negative() {
22379 macro_rules! test_parse_multipart_identifier_error {
22380 ($input:expr, $expected_err:expr $(,)?) => {{
22381 all_dialects().run_parser_method(&*$input, |parser| {
22382 let actual_err = parser.parse_multipart_identifier().unwrap_err();
22383 assert_eq!(actual_err.to_string(), $expected_err);
22384 });
22385 }};
22386 }
22387
22388 test_parse_multipart_identifier_error!(
22389 "",
22390 "sql parser error: Empty input when parsing identifier",
22391 );
22392
22393 test_parse_multipart_identifier_error!(
22394 "*schema.table",
22395 "sql parser error: Unexpected token in identifier: *",
22396 );
22397
22398 test_parse_multipart_identifier_error!(
22399 "schema.table*",
22400 "sql parser error: Unexpected token in identifier: *",
22401 );
22402
22403 test_parse_multipart_identifier_error!(
22404 "schema.table.",
22405 "sql parser error: Trailing period in identifier",
22406 );
22407
22408 test_parse_multipart_identifier_error!(
22409 "schema.*",
22410 "sql parser error: Unexpected token following period in identifier: *",
22411 );
22412 }
22413
22414 #[test]
22415 fn test_mysql_partition_selection() {
22416 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
22417 let expected = vec!["p0", "p2"];
22418
22419 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
22420 assert_eq!(ast.len(), 1);
22421 if let Statement::Query(v) = &ast[0] {
22422 if let SetExpr::Select(select) = &*v.body {
22423 assert_eq!(select.from.len(), 1);
22424 let from: &TableWithJoins = &select.from[0];
22425 let table_factor = &from.relation;
22426 if let TableFactor::Table { partitions, .. } = table_factor {
22427 let actual: Vec<&str> = partitions
22428 .iter()
22429 .map(|ident| ident.value.as_str())
22430 .collect();
22431 assert_eq!(expected, actual);
22432 }
22433 }
22434 } else {
22435 panic!("fail to parse mysql partition selection");
22436 }
22437 }
22438
22439 #[test]
22440 fn test_replace_into_placeholders() {
22441 let sql = "REPLACE INTO t (a) VALUES (&a)";
22442
22443 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22444 }
22445
22446 #[test]
22447 fn test_replace_into_set_placeholder() {
22448 let sql = "REPLACE INTO t SET ?";
22449
22450 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22451 }
22452
22453 #[test]
22454 fn test_replace_incomplete() {
22455 let sql = r#"REPLACE"#;
22456
22457 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
22458 }
22459
22460 #[test]
22461 fn test_placeholder_invalid_whitespace() {
22462 for w in [" ", "/*invalid*/"] {
22463 let sql = format!("\nSELECT\n :{w}fooBar");
22464 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
22465 }
22466 }
22467}