1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word) => {
512 if expecting_statement_delimiter && word.keyword == Keyword::END {
513 break;
514 }
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 Keyword::SECURITY => self.parse_security_label().map(Into::into),
723 _ => self.expected("an SQL statement", next_token),
724 },
725 Token::LParen => {
726 self.prev_token();
727 self.parse_query().map(Into::into)
728 }
729 _ => self.expected("an SQL statement", next_token),
730 }
731 }
732
733 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
737 let case_token = self.expect_keyword(Keyword::CASE)?;
738
739 let match_expr = if self.peek_keyword(Keyword::WHEN) {
740 None
741 } else {
742 Some(self.parse_expr()?)
743 };
744
745 self.expect_keyword_is(Keyword::WHEN)?;
746 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
747 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
748 })?;
749
750 let else_block = if self.parse_keyword(Keyword::ELSE) {
751 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
752 } else {
753 None
754 };
755
756 let mut end_case_token = self.expect_keyword(Keyword::END)?;
757 if self.peek_keyword(Keyword::CASE) {
758 end_case_token = self.expect_keyword(Keyword::CASE)?;
759 }
760
761 Ok(CaseStatement {
762 case_token: AttachedToken(case_token),
763 match_expr,
764 when_blocks,
765 else_block,
766 end_case_token: AttachedToken(end_case_token),
767 })
768 }
769
770 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
774 self.expect_keyword_is(Keyword::IF)?;
775 let if_block = self.parse_conditional_statement_block(&[
776 Keyword::ELSE,
777 Keyword::ELSEIF,
778 Keyword::END,
779 ])?;
780
781 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
782 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
783 parser.parse_conditional_statement_block(&[
784 Keyword::ELSEIF,
785 Keyword::ELSE,
786 Keyword::END,
787 ])
788 })?
789 } else {
790 vec![]
791 };
792
793 let else_block = if self.parse_keyword(Keyword::ELSE) {
794 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
795 } else {
796 None
797 };
798
799 self.expect_keyword_is(Keyword::END)?;
800 let end_token = self.expect_keyword(Keyword::IF)?;
801
802 Ok(IfStatement {
803 if_block,
804 elseif_blocks,
805 else_block,
806 end_token: Some(AttachedToken(end_token)),
807 })
808 }
809
810 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
814 self.expect_keyword_is(Keyword::WHILE)?;
815 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
816
817 Ok(WhileStatement { while_block })
818 }
819
820 fn parse_conditional_statement_block(
828 &mut self,
829 terminal_keywords: &[Keyword],
830 ) -> Result<ConditionalStatementBlock, ParserError> {
831 let start_token = self.get_current_token().clone(); let mut then_token = None;
833
834 let condition = match &start_token.token {
835 Token::Word(w) if w.keyword == Keyword::ELSE => None,
836 Token::Word(w) if w.keyword == Keyword::WHILE => {
837 let expr = self.parse_expr()?;
838 Some(expr)
839 }
840 _ => {
841 let expr = self.parse_expr()?;
842 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
843 Some(expr)
844 }
845 };
846
847 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
848
849 Ok(ConditionalStatementBlock {
850 start_token: AttachedToken(start_token),
851 condition,
852 then_token,
853 conditional_statements,
854 })
855 }
856
857 pub(crate) fn parse_conditional_statements(
860 &mut self,
861 terminal_keywords: &[Keyword],
862 ) -> Result<ConditionalStatements, ParserError> {
863 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
864 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
865 let statements = self.parse_statement_list(terminal_keywords)?;
866 let end_token = self.expect_keyword(Keyword::END)?;
867
868 ConditionalStatements::BeginEnd(BeginEndStatements {
869 begin_token: AttachedToken(begin_token),
870 statements,
871 end_token: AttachedToken(end_token),
872 })
873 } else {
874 ConditionalStatements::Sequence {
875 statements: self.parse_statement_list(terminal_keywords)?,
876 }
877 };
878 Ok(conditional_statements)
879 }
880
881 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
885 self.expect_keyword_is(Keyword::RAISE)?;
886
887 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
888 self.expect_token(&Token::Eq)?;
889 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
890 } else {
891 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
892 };
893
894 Ok(RaiseStatement { value })
895 }
896 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
900 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
901
902 self.expect_keyword_is(Keyword::ON)?;
903 let token = self.next_token();
904
905 let (object_type, object_name) = match token.token {
906 Token::Word(w) if w.keyword == Keyword::COLLATION => {
907 (CommentObject::Collation, self.parse_object_name(false)?)
908 }
909 Token::Word(w) if w.keyword == Keyword::COLUMN => {
910 (CommentObject::Column, self.parse_object_name(false)?)
911 }
912 Token::Word(w) if w.keyword == Keyword::DATABASE => {
913 (CommentObject::Database, self.parse_object_name(false)?)
914 }
915 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
916 (CommentObject::Domain, self.parse_object_name(false)?)
917 }
918 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
919 (CommentObject::Extension, self.parse_object_name(false)?)
920 }
921 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
922 (CommentObject::Function, self.parse_object_name(false)?)
923 }
924 Token::Word(w) if w.keyword == Keyword::INDEX => {
925 (CommentObject::Index, self.parse_object_name(false)?)
926 }
927 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
928 self.expect_keyword_is(Keyword::VIEW)?;
929 (
930 CommentObject::MaterializedView,
931 self.parse_object_name(false)?,
932 )
933 }
934 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
935 (CommentObject::Procedure, self.parse_object_name(false)?)
936 }
937 Token::Word(w) if w.keyword == Keyword::ROLE => {
938 (CommentObject::Role, self.parse_object_name(false)?)
939 }
940 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
941 (CommentObject::Schema, self.parse_object_name(false)?)
942 }
943 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
944 (CommentObject::Sequence, self.parse_object_name(false)?)
945 }
946 Token::Word(w) if w.keyword == Keyword::TABLE => {
947 (CommentObject::Table, self.parse_object_name(false)?)
948 }
949 Token::Word(w) if w.keyword == Keyword::TYPE => {
950 (CommentObject::Type, self.parse_object_name(false)?)
951 }
952 Token::Word(w) if w.keyword == Keyword::USER => {
953 (CommentObject::User, self.parse_object_name(false)?)
954 }
955 Token::Word(w) if w.keyword == Keyword::VIEW => {
956 (CommentObject::View, self.parse_object_name(false)?)
957 }
958 _ => self.expected("comment object_type", token)?,
959 };
960
961 self.expect_keyword_is(Keyword::IS)?;
962 let comment = if self.parse_keyword(Keyword::NULL) {
963 None
964 } else {
965 Some(self.parse_literal_string()?)
966 };
967 Ok(Statement::Comment {
968 object_type,
969 object_name,
970 comment,
971 if_exists,
972 })
973 }
974
975 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
977 let mut channel = None;
978 let mut tables: Vec<ObjectName> = vec![];
979 let mut read_lock = false;
980 let mut export = false;
981
982 if !dialect_of!(self is MySqlDialect | GenericDialect) {
983 return parser_err!(
984 "Unsupported statement FLUSH",
985 self.peek_token_ref().span.start
986 );
987 }
988
989 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
990 Some(FlushLocation::NoWriteToBinlog)
991 } else if self.parse_keyword(Keyword::LOCAL) {
992 Some(FlushLocation::Local)
993 } else {
994 None
995 };
996
997 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
998 FlushType::BinaryLogs
999 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
1000 FlushType::EngineLogs
1001 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1002 FlushType::ErrorLogs
1003 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1004 FlushType::GeneralLogs
1005 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1006 FlushType::Hosts
1007 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1008 FlushType::Privileges
1009 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1010 FlushType::OptimizerCosts
1011 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1012 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1013 channel = Some(self.parse_object_name(false).unwrap().to_string());
1014 }
1015 FlushType::RelayLogs
1016 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1017 FlushType::SlowLogs
1018 } else if self.parse_keyword(Keyword::STATUS) {
1019 FlushType::Status
1020 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1021 FlushType::UserResources
1022 } else if self.parse_keywords(&[Keyword::LOGS]) {
1023 FlushType::Logs
1024 } else if self.parse_keywords(&[Keyword::TABLES]) {
1025 loop {
1026 let next_token = self.next_token();
1027 match &next_token.token {
1028 Token::Word(w) => match w.keyword {
1029 Keyword::WITH => {
1030 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1031 }
1032 Keyword::FOR => {
1033 export = self.parse_keyword(Keyword::EXPORT);
1034 }
1035 Keyword::NoKeyword => {
1036 self.prev_token();
1037 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1038 }
1039 _ => {}
1040 },
1041 _ => {
1042 break;
1043 }
1044 }
1045 }
1046
1047 FlushType::Tables
1048 } else {
1049 return self.expected_ref(
1050 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1051 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1052 self.peek_token_ref(),
1053 );
1054 };
1055
1056 Ok(Statement::Flush {
1057 object_type,
1058 location,
1059 channel,
1060 read_lock,
1061 export,
1062 tables,
1063 })
1064 }
1065
1066 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1068 let repair = self.parse_keyword(Keyword::REPAIR);
1069 self.expect_keyword_is(Keyword::TABLE)?;
1070 let table_name = self.parse_object_name(false)?;
1071 let partition_action = self
1072 .maybe_parse(|parser| {
1073 let pa = match parser.parse_one_of_keywords(&[
1074 Keyword::ADD,
1075 Keyword::DROP,
1076 Keyword::SYNC,
1077 ]) {
1078 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1079 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1080 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1081 _ => None,
1082 };
1083 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1084 Ok(pa)
1085 })?
1086 .unwrap_or_default();
1087 Ok(Msck {
1088 repair,
1089 table_name,
1090 partition_action,
1091 })
1092 }
1093
1094 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1096 let table = self.parse_keyword(Keyword::TABLE);
1097 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1098
1099 let table_names = self.parse_comma_separated(|p| {
1100 let only = p.parse_keyword(Keyword::ONLY);
1101 let name = p.parse_object_name(false)?;
1102 let has_asterisk = p.consume_token(&Token::Mul);
1103 Ok(TruncateTableTarget {
1104 name,
1105 only,
1106 has_asterisk,
1107 })
1108 })?;
1109
1110 let mut partitions = None;
1111 if self.parse_keyword(Keyword::PARTITION) {
1112 self.expect_token(&Token::LParen)?;
1113 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1114 self.expect_token(&Token::RParen)?;
1115 }
1116
1117 let mut identity = None;
1118 let mut cascade = None;
1119
1120 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1121 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1122 Some(TruncateIdentityOption::Restart)
1123 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1124 Some(TruncateIdentityOption::Continue)
1125 } else {
1126 None
1127 };
1128
1129 cascade = self.parse_cascade_option();
1130 };
1131
1132 let on_cluster = self.parse_optional_on_cluster()?;
1133
1134 Ok(Truncate {
1135 table_names,
1136 partitions,
1137 table,
1138 if_exists,
1139 identity,
1140 cascade,
1141 on_cluster,
1142 })
1143 }
1144
1145 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1146 if self.parse_keyword(Keyword::CASCADE) {
1147 Some(CascadeOption::Cascade)
1148 } else if self.parse_keyword(Keyword::RESTRICT) {
1149 Some(CascadeOption::Restrict)
1150 } else {
1151 None
1152 }
1153 }
1154
1155 pub fn parse_attach_duckdb_database_options(
1157 &mut self,
1158 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1159 if !self.consume_token(&Token::LParen) {
1160 return Ok(vec![]);
1161 }
1162
1163 let mut options = vec![];
1164 loop {
1165 if self.parse_keyword(Keyword::READ_ONLY) {
1166 let boolean = if self.parse_keyword(Keyword::TRUE) {
1167 Some(true)
1168 } else if self.parse_keyword(Keyword::FALSE) {
1169 Some(false)
1170 } else {
1171 None
1172 };
1173 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1174 } else if self.parse_keyword(Keyword::TYPE) {
1175 let ident = self.parse_identifier()?;
1176 options.push(AttachDuckDBDatabaseOption::Type(ident));
1177 } else {
1178 return self
1179 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1180 };
1181
1182 if self.consume_token(&Token::RParen) {
1183 return Ok(options);
1184 } else if self.consume_token(&Token::Comma) {
1185 continue;
1186 } else {
1187 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1188 }
1189 }
1190 }
1191
1192 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1194 let database = self.parse_keyword(Keyword::DATABASE);
1195 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1196 let database_path = self.parse_identifier()?;
1197 let database_alias = if self.parse_keyword(Keyword::AS) {
1198 Some(self.parse_identifier()?)
1199 } else {
1200 None
1201 };
1202
1203 let attach_options = self.parse_attach_duckdb_database_options()?;
1204 Ok(Statement::AttachDuckDBDatabase {
1205 if_not_exists,
1206 database,
1207 database_path,
1208 database_alias,
1209 attach_options,
1210 })
1211 }
1212
1213 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1215 let database = self.parse_keyword(Keyword::DATABASE);
1216 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1217 let database_alias = self.parse_identifier()?;
1218 Ok(Statement::DetachDuckDBDatabase {
1219 if_exists,
1220 database,
1221 database_alias,
1222 })
1223 }
1224
1225 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1227 let database = self.parse_keyword(Keyword::DATABASE);
1228 let database_file_name = self.parse_expr()?;
1229 self.expect_keyword_is(Keyword::AS)?;
1230 let schema_name = self.parse_identifier()?;
1231 Ok(Statement::AttachDatabase {
1232 database,
1233 schema_name,
1234 database_file_name,
1235 })
1236 }
1237
1238 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1240 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1241 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1242 let mut for_columns = false;
1243 let mut cache_metadata = false;
1244 let mut noscan = false;
1245 let mut partitions = None;
1246 let mut compute_statistics = false;
1247 let mut columns = vec![];
1248
1249 if table_name.is_some() && self.consume_token(&Token::LParen) {
1251 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1252 self.expect_token(&Token::RParen)?;
1253 }
1254
1255 loop {
1256 match self.parse_one_of_keywords(&[
1257 Keyword::PARTITION,
1258 Keyword::FOR,
1259 Keyword::CACHE,
1260 Keyword::NOSCAN,
1261 Keyword::COMPUTE,
1262 ]) {
1263 Some(Keyword::PARTITION) => {
1264 self.expect_token(&Token::LParen)?;
1265 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1266 self.expect_token(&Token::RParen)?;
1267 }
1268 Some(Keyword::NOSCAN) => noscan = true,
1269 Some(Keyword::FOR) => {
1270 self.expect_keyword_is(Keyword::COLUMNS)?;
1271
1272 columns = self
1273 .maybe_parse(|parser| {
1274 parser.parse_comma_separated(|p| p.parse_identifier())
1275 })?
1276 .unwrap_or_default();
1277 for_columns = true
1278 }
1279 Some(Keyword::CACHE) => {
1280 self.expect_keyword_is(Keyword::METADATA)?;
1281 cache_metadata = true
1282 }
1283 Some(Keyword::COMPUTE) => {
1284 self.expect_keyword_is(Keyword::STATISTICS)?;
1285 compute_statistics = true
1286 }
1287 _ => break,
1288 }
1289 }
1290
1291 Ok(Analyze {
1292 has_table_keyword,
1293 table_name,
1294 for_columns,
1295 columns,
1296 partitions,
1297 cache_metadata,
1298 noscan,
1299 compute_statistics,
1300 })
1301 }
1302
1303 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1305 let index = self.index;
1306
1307 let next_token = self.next_token();
1308 match next_token.token {
1309 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1310 if self.peek_token_ref().token == Token::Period {
1311 let mut id_parts: Vec<Ident> = vec![match t {
1312 Token::Word(w) => w.into_ident(next_token.span),
1313 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1314 _ => {
1315 return Err(ParserError::ParserError(
1316 "Internal parser error: unexpected token type".to_string(),
1317 ))
1318 }
1319 }];
1320
1321 while self.consume_token(&Token::Period) {
1322 let next_token = self.next_token();
1323 match next_token.token {
1324 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1325 Token::SingleQuotedString(s) => {
1326 id_parts.push(Ident::with_quote('\'', s))
1328 }
1329 Token::Placeholder(s) => {
1330 id_parts.push(Ident::new(s))
1333 }
1334 Token::Mul => {
1335 return Ok(Expr::QualifiedWildcard(
1336 ObjectName::from(id_parts),
1337 AttachedToken(next_token),
1338 ));
1339 }
1340 _ => {
1341 return self
1342 .expected("an identifier or a '*' after '.'", next_token);
1343 }
1344 }
1345 }
1346 }
1347 }
1348 Token::Mul => {
1349 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1350 }
1351 Token::LParen => {
1353 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1354 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1355 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1358 }
1359 }
1360 _ => (),
1361 };
1362
1363 self.index = index;
1364 self.parse_expr()
1365 }
1366
1367 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1369 self.parse_subexpr(self.dialect.prec_unknown())
1370 }
1371
1372 pub fn parse_expr_with_alias_and_order_by(
1374 &mut self,
1375 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1376 let expr = self.parse_expr()?;
1377
1378 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1379 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1380 }
1381 let alias = self.parse_optional_alias_inner(None, validator)?;
1382 let order_by = OrderByOptions {
1383 asc: self.parse_asc_desc(),
1384 nulls_first: None,
1385 };
1386 Ok(ExprWithAliasAndOrderBy {
1387 expr: ExprWithAlias { expr, alias },
1388 order_by,
1389 })
1390 }
1391
1392 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1394 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1395 let _guard = self.recursion_counter.try_decrease()?;
1396 debug!("parsing expr");
1397 let mut expr = self.parse_prefix()?;
1398
1399 expr = self.parse_compound_expr(expr, vec![])?;
1400
1401 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1405 expr = Expr::Collate {
1406 expr: Box::new(expr),
1407 collation: self.parse_object_name(false)?,
1408 };
1409 }
1410
1411 debug!("prefix: {expr:?}");
1412 loop {
1413 let next_precedence = self.get_next_precedence()?;
1414 debug!("next precedence: {next_precedence:?}");
1415
1416 if precedence >= next_precedence {
1417 break;
1418 }
1419
1420 if Token::Period == self.peek_token_ref().token {
1423 break;
1424 }
1425
1426 expr = self.parse_infix(expr, next_precedence)?;
1427 }
1428 Ok(expr)
1429 }
1430
1431 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1433 let condition = self.parse_expr()?;
1434 let message = if self.parse_keyword(Keyword::AS) {
1435 Some(self.parse_expr()?)
1436 } else {
1437 None
1438 };
1439
1440 Ok(Statement::Assert { condition, message })
1441 }
1442
1443 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1445 let name = self.parse_identifier()?;
1446 Ok(Statement::Savepoint { name })
1447 }
1448
1449 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1451 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1452 let name = self.parse_identifier()?;
1453
1454 Ok(Statement::ReleaseSavepoint { name })
1455 }
1456
1457 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1459 let channel = self.parse_identifier()?;
1460 Ok(Statement::LISTEN { channel })
1461 }
1462
1463 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1465 let channel = if self.consume_token(&Token::Mul) {
1466 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1467 } else {
1468 match self.parse_identifier() {
1469 Ok(expr) => expr,
1470 _ => {
1471 self.prev_token();
1472 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1473 }
1474 }
1475 };
1476 Ok(Statement::UNLISTEN { channel })
1477 }
1478
1479 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1481 let channel = self.parse_identifier()?;
1482 let payload = if self.consume_token(&Token::Comma) {
1483 Some(self.parse_literal_string()?)
1484 } else {
1485 None
1486 };
1487 Ok(Statement::NOTIFY { channel, payload })
1488 }
1489
1490 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1492 if self.peek_keyword(Keyword::TABLE) {
1493 self.expect_keyword(Keyword::TABLE)?;
1494 let rename_tables = self.parse_comma_separated(|parser| {
1495 let old_name = parser.parse_object_name(false)?;
1496 parser.expect_keyword(Keyword::TO)?;
1497 let new_name = parser.parse_object_name(false)?;
1498
1499 Ok(RenameTable { old_name, new_name })
1500 })?;
1501 Ok(rename_tables.into())
1502 } else {
1503 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1504 }
1505 }
1506
1507 fn parse_expr_prefix_by_reserved_word(
1510 &mut self,
1511 w: &Word,
1512 w_span: Span,
1513 ) -> Result<Option<Expr>, ParserError> {
1514 match w.keyword {
1515 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1516 self.prev_token();
1517 Ok(Some(Expr::Value(self.parse_value()?)))
1518 }
1519 Keyword::NULL => {
1520 self.prev_token();
1521 Ok(Some(Expr::Value(self.parse_value()?)))
1522 }
1523 Keyword::CURRENT_CATALOG
1524 | Keyword::CURRENT_USER
1525 | Keyword::SESSION_USER
1526 | Keyword::USER
1527 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1528 {
1529 Ok(Some(Expr::Function(Function {
1530 name: ObjectName::from(vec![w.to_ident(w_span)]),
1531 uses_odbc_syntax: false,
1532 parameters: FunctionArguments::None,
1533 args: FunctionArguments::None,
1534 null_treatment: None,
1535 filter: None,
1536 over: None,
1537 within_group: vec![],
1538 })))
1539 }
1540 Keyword::CURRENT_TIMESTAMP
1541 | Keyword::CURRENT_TIME
1542 | Keyword::CURRENT_DATE
1543 | Keyword::LOCALTIME
1544 | Keyword::LOCALTIMESTAMP => {
1545 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1546 }
1547 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1548 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1549 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1550 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1551 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1552 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1553 Keyword::EXISTS
1554 if !dialect_of!(self is DatabricksDialect)
1556 || matches!(
1557 self.peek_nth_token_ref(1).token,
1558 Token::Word(Word {
1559 keyword: Keyword::SELECT | Keyword::WITH,
1560 ..
1561 })
1562 ) =>
1563 {
1564 Ok(Some(self.parse_exists_expr(false)?))
1565 }
1566 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1567 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1568 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1569 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1570 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1571 }
1572 Keyword::SUBSTR | Keyword::SUBSTRING => {
1573 self.prev_token();
1574 Ok(Some(self.parse_substring()?))
1575 }
1576 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1577 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1578 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1579 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1581 self.expect_token(&Token::LBracket)?;
1582 Ok(Some(self.parse_array_expr(true)?))
1583 }
1584 Keyword::ARRAY
1585 if self.peek_token_ref().token == Token::LParen
1586 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1587 {
1588 self.expect_token(&Token::LParen)?;
1589 let query = self.parse_query()?;
1590 self.expect_token(&Token::RParen)?;
1591 Ok(Some(Expr::Function(Function {
1592 name: ObjectName::from(vec![w.to_ident(w_span)]),
1593 uses_odbc_syntax: false,
1594 parameters: FunctionArguments::None,
1595 args: FunctionArguments::Subquery(query),
1596 filter: None,
1597 null_treatment: None,
1598 over: None,
1599 within_group: vec![],
1600 })))
1601 }
1602 Keyword::NOT => Ok(Some(self.parse_not()?)),
1603 Keyword::MATCH if self.dialect.supports_match_against() => {
1604 Ok(Some(self.parse_match_against()?))
1605 }
1606 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1607 let struct_expr = self.parse_struct_literal()?;
1608 Ok(Some(struct_expr))
1609 }
1610 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1611 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1612 Ok(Some(Expr::Prior(Box::new(expr))))
1613 }
1614 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1615 Ok(Some(self.parse_duckdb_map_literal()?))
1616 }
1617 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1618 Ok(Some(self.parse_lambda_expr()?))
1619 }
1620 _ if self.dialect.supports_geometric_types() => match w.keyword {
1621 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1622 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1623 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1624 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1625 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1626 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1627 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1628 _ => Ok(None),
1629 },
1630 _ => Ok(None),
1631 }
1632 }
1633
1634 fn parse_expr_prefix_by_unreserved_word(
1636 &mut self,
1637 w: &Word,
1638 w_span: Span,
1639 ) -> Result<Expr, ParserError> {
1640 let is_outer_join = self.peek_outer_join_operator();
1641 match &self.peek_token_ref().token {
1642 Token::LParen if !is_outer_join => {
1643 let id_parts = vec![w.to_ident(w_span)];
1644 self.parse_function(ObjectName::from(id_parts))
1645 }
1646 Token::SingleQuotedString(_)
1648 | Token::DoubleQuotedString(_)
1649 | Token::HexStringLiteral(_)
1650 if w.value.starts_with('_') =>
1651 {
1652 Ok(Expr::Prefixed {
1653 prefix: w.to_ident(w_span),
1654 value: self.parse_introduced_string_expr()?.into(),
1655 })
1656 }
1657 Token::SingleQuotedString(_)
1659 | Token::DoubleQuotedString(_)
1660 | Token::HexStringLiteral(_)
1661 if w.value.starts_with('_') =>
1662 {
1663 Ok(Expr::Prefixed {
1664 prefix: w.to_ident(w_span),
1665 value: self.parse_introduced_string_expr()?.into(),
1666 })
1667 }
1668 Token::Arrow if self.dialect.supports_lambda_functions() => {
1672 self.expect_token(&Token::Arrow)?;
1673 Ok(Expr::Lambda(LambdaFunction {
1674 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1675 name: w.to_ident(w_span),
1676 data_type: None,
1677 }),
1678 body: Box::new(self.parse_expr()?),
1679 syntax: LambdaSyntax::Arrow,
1680 }))
1681 }
1682 Token::Word(_)
1686 if self.dialect.supports_lambda_functions()
1687 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1688 {
1689 let data_type = self.parse_data_type()?;
1690 self.expect_token(&Token::Arrow)?;
1691 Ok(Expr::Lambda(LambdaFunction {
1692 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1693 name: w.to_ident(w_span),
1694 data_type: Some(data_type),
1695 }),
1696 body: Box::new(self.parse_expr()?),
1697 syntax: LambdaSyntax::Arrow,
1698 }))
1699 }
1700 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1701 }
1702 }
1703
1704 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1707 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1708 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1709 } else {
1710 false
1711 }
1712 }
1713
1714 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1716 if let Some(prefix) = self.dialect.parse_prefix(self) {
1718 return prefix;
1719 }
1720
1721 let loc = self.peek_token_ref().span.start;
1738 let opt_expr = self.maybe_parse(|parser| {
1739 match parser.parse_data_type()? {
1740 DataType::Interval { .. } => parser.parse_interval(),
1741 DataType::Custom(ref name, ref modifiers)
1752 if modifiers.is_empty()
1753 && Self::is_simple_unquoted_object_name(name, "xml")
1754 && parser.dialect.supports_xml_expressions() =>
1755 {
1756 Ok(Expr::TypedString(TypedString {
1757 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1758 value: parser.parse_value()?,
1759 uses_odbc_syntax: false,
1760 }))
1761 }
1762 DataType::Custom(..) => parser_err!("dummy", loc),
1763 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1765 Ok(Expr::Cast {
1766 kind: CastKind::Cast,
1767 expr: Box::new(parser.parse_expr()?),
1768 data_type: DataType::Binary(None),
1769 array: false,
1770 format: None,
1771 })
1772 }
1773 data_type => Ok(Expr::TypedString(TypedString {
1774 data_type,
1775 value: parser.parse_value()?,
1776 uses_odbc_syntax: false,
1777 })),
1778 }
1779 })?;
1780
1781 if let Some(expr) = opt_expr {
1782 return Ok(expr);
1783 }
1784
1785 let dialect = self.dialect;
1789
1790 self.advance_token();
1791 let next_token_index = self.get_current_index();
1792 let next_token = self.get_current_token();
1793 let span = next_token.span;
1794 let expr = match &next_token.token {
1795 Token::Word(w) => {
1796 let w = w.clone();
1805 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1806 Ok(Some(expr)) => Ok(expr),
1808
1809 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1811
1812 Err(e) => {
1819 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1820 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1821 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1822 }) {
1823 return Ok(expr);
1824 }
1825 }
1826 return Err(e);
1827 }
1828 }
1829 } Token::LBracket => self.parse_array_expr(false),
1832 tok @ Token::Minus | tok @ Token::Plus => {
1833 let op = if *tok == Token::Plus {
1834 UnaryOperator::Plus
1835 } else {
1836 UnaryOperator::Minus
1837 };
1838 Ok(Expr::UnaryOp {
1839 op,
1840 expr: Box::new(
1841 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1842 ),
1843 })
1844 }
1845 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1846 op: UnaryOperator::BangNot,
1847 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1848 }),
1849 tok @ Token::DoubleExclamationMark
1850 | tok @ Token::PGSquareRoot
1851 | tok @ Token::PGCubeRoot
1852 | tok @ Token::AtSign
1853 if dialect_is!(dialect is PostgreSqlDialect) =>
1854 {
1855 let op = match tok {
1856 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1857 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1858 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1859 Token::AtSign => UnaryOperator::PGAbs,
1860 _ => {
1861 return Err(ParserError::ParserError(
1862 "Internal parser error: unexpected unary operator token".to_string(),
1863 ))
1864 }
1865 };
1866 Ok(Expr::UnaryOp {
1867 op,
1868 expr: Box::new(
1869 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1870 ),
1871 })
1872 }
1873 Token::Tilde => Ok(Expr::UnaryOp {
1874 op: UnaryOperator::BitwiseNot,
1875 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1876 }),
1877 tok @ Token::Sharp
1878 | tok @ Token::AtDashAt
1879 | tok @ Token::AtAt
1880 | tok @ Token::QuestionMarkDash
1881 | tok @ Token::QuestionPipe
1882 if self.dialect.supports_geometric_types() =>
1883 {
1884 let op = match tok {
1885 Token::Sharp => UnaryOperator::Hash,
1886 Token::AtDashAt => UnaryOperator::AtDashAt,
1887 Token::AtAt => UnaryOperator::DoubleAt,
1888 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1889 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1890 _ => {
1891 return Err(ParserError::ParserError(format!(
1892 "Unexpected token in unary operator parsing: {tok:?}"
1893 )))
1894 }
1895 };
1896 Ok(Expr::UnaryOp {
1897 op,
1898 expr: Box::new(
1899 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1900 ),
1901 })
1902 }
1903 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1904 {
1905 self.prev_token();
1906 Ok(Expr::Value(self.parse_value()?))
1907 }
1908 Token::UnicodeStringLiteral(_) => {
1909 self.prev_token();
1910 Ok(Expr::Value(self.parse_value()?))
1911 }
1912 Token::Number(_, _)
1913 | Token::SingleQuotedString(_)
1914 | Token::DoubleQuotedString(_)
1915 | Token::TripleSingleQuotedString(_)
1916 | Token::TripleDoubleQuotedString(_)
1917 | Token::DollarQuotedString(_)
1918 | Token::SingleQuotedByteStringLiteral(_)
1919 | Token::DoubleQuotedByteStringLiteral(_)
1920 | Token::TripleSingleQuotedByteStringLiteral(_)
1921 | Token::TripleDoubleQuotedByteStringLiteral(_)
1922 | Token::SingleQuotedRawStringLiteral(_)
1923 | Token::DoubleQuotedRawStringLiteral(_)
1924 | Token::TripleSingleQuotedRawStringLiteral(_)
1925 | Token::TripleDoubleQuotedRawStringLiteral(_)
1926 | Token::NationalStringLiteral(_)
1927 | Token::QuoteDelimitedStringLiteral(_)
1928 | Token::NationalQuoteDelimitedStringLiteral(_)
1929 | Token::HexStringLiteral(_) => {
1930 self.prev_token();
1931 Ok(Expr::Value(self.parse_value()?))
1932 }
1933 Token::LParen => {
1934 let expr =
1935 if let Some(expr) = self.try_parse_expr_sub_query()? {
1936 expr
1937 } else if let Some(lambda) = self.try_parse_lambda()? {
1938 return Ok(lambda);
1939 } else {
1940 let exprs = self.with_state(ParserState::Normal, |p| {
1951 p.parse_comma_separated(Parser::parse_expr)
1952 })?;
1953 match exprs.len() {
1954 0 => return Err(ParserError::ParserError(
1955 "Internal parser error: parse_comma_separated returned empty list"
1956 .to_string(),
1957 )),
1958 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1959 _ => Expr::Tuple(exprs),
1960 }
1961 };
1962 self.expect_token(&Token::RParen)?;
1963 Ok(expr)
1964 }
1965 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1966 self.prev_token();
1967 Ok(Expr::Value(self.parse_value()?))
1968 }
1969 Token::LBrace => {
1970 self.prev_token();
1971 self.parse_lbrace_expr()
1972 }
1973 _ => self.expected_at("an expression", next_token_index),
1974 }?;
1975
1976 Ok(expr)
1977 }
1978
1979 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1980 Ok(Expr::TypedString(TypedString {
1981 data_type: DataType::GeometricType(kind),
1982 value: self.parse_value()?,
1983 uses_odbc_syntax: false,
1984 }))
1985 }
1986
1987 pub fn parse_compound_expr(
1994 &mut self,
1995 root: Expr,
1996 mut chain: Vec<AccessExpr>,
1997 ) -> Result<Expr, ParserError> {
1998 let mut ending_wildcard: Option<TokenWithSpan> = None;
1999 loop {
2000 if self.consume_token(&Token::Period) {
2001 let next_token = self.peek_token_ref();
2002 match &next_token.token {
2003 Token::Mul => {
2004 if dialect_of!(self is PostgreSqlDialect) {
2007 ending_wildcard = Some(self.next_token());
2008 } else {
2009 self.prev_token(); }
2016
2017 break;
2018 }
2019 Token::SingleQuotedString(s) => {
2020 let expr =
2021 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2022 chain.push(AccessExpr::Dot(expr));
2023 self.advance_token(); }
2025 Token::Placeholder(s) => {
2026 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2029 chain.push(AccessExpr::Dot(expr));
2030 self.advance_token(); }
2032 _ => {
2037 let expr = self.maybe_parse(|parser| {
2038 let expr = parser
2039 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2040 match &expr {
2041 Expr::CompoundFieldAccess { .. }
2042 | Expr::CompoundIdentifier(_)
2043 | Expr::Identifier(_)
2044 | Expr::Value(_)
2045 | Expr::Function(_) => Ok(expr),
2046 _ => parser.expected_ref(
2047 "an identifier or value",
2048 parser.peek_token_ref(),
2049 ),
2050 }
2051 })?;
2052
2053 match expr {
2054 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2063 chain.push(AccessExpr::Dot(*root));
2064 chain.extend(access_chain);
2065 }
2066 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2067 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2068 ),
2069 Some(expr) => {
2070 chain.push(AccessExpr::Dot(expr));
2071 }
2072 None => {
2076 chain.push(AccessExpr::Dot(Expr::Identifier(
2077 self.parse_identifier()?,
2078 )));
2079 }
2080 }
2081 }
2082 }
2083 } else if !self.dialect.supports_partiql()
2084 && self.peek_token_ref().token == Token::LBracket
2085 {
2086 self.parse_multi_dim_subscript(&mut chain)?;
2087 } else {
2088 break;
2089 }
2090 }
2091
2092 let tok_index = self.get_current_index();
2093 if let Some(wildcard_token) = ending_wildcard {
2094 if !Self::is_all_ident(&root, &chain) {
2095 return self
2096 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2097 };
2098 Ok(Expr::QualifiedWildcard(
2099 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2100 AttachedToken(wildcard_token),
2101 ))
2102 } else if self.maybe_parse_outer_join_operator() {
2103 if !Self::is_all_ident(&root, &chain) {
2104 return self.expected_at("column identifier before (+)", tok_index);
2105 };
2106 let expr = if chain.is_empty() {
2107 root
2108 } else {
2109 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2110 };
2111 Ok(Expr::OuterJoin(expr.into()))
2112 } else {
2113 Self::build_compound_expr(root, chain)
2114 }
2115 }
2116
2117 fn build_compound_expr(
2122 root: Expr,
2123 mut access_chain: Vec<AccessExpr>,
2124 ) -> Result<Expr, ParserError> {
2125 if access_chain.is_empty() {
2126 return Ok(root);
2127 }
2128
2129 if Self::is_all_ident(&root, &access_chain) {
2130 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2131 root,
2132 access_chain,
2133 )?));
2134 }
2135
2136 if matches!(root, Expr::Identifier(_))
2141 && matches!(
2142 access_chain.last(),
2143 Some(AccessExpr::Dot(Expr::Function(_)))
2144 )
2145 && access_chain
2146 .iter()
2147 .rev()
2148 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2150 {
2151 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2152 return parser_err!("expected function expression", root.span().start);
2153 };
2154
2155 let compound_func_name = [root]
2156 .into_iter()
2157 .chain(access_chain.into_iter().flat_map(|access| match access {
2158 AccessExpr::Dot(expr) => Some(expr),
2159 _ => None,
2160 }))
2161 .flat_map(|expr| match expr {
2162 Expr::Identifier(ident) => Some(ident),
2163 _ => None,
2164 })
2165 .map(ObjectNamePart::Identifier)
2166 .chain(func.name.0)
2167 .collect::<Vec<_>>();
2168 func.name = ObjectName(compound_func_name);
2169
2170 return Ok(Expr::Function(func));
2171 }
2172
2173 if access_chain.len() == 1
2178 && matches!(
2179 access_chain.last(),
2180 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2181 )
2182 {
2183 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2184 return parser_err!("expected (+) expression", root.span().start);
2185 };
2186
2187 if !Self::is_all_ident(&root, &[]) {
2188 return parser_err!("column identifier before (+)", root.span().start);
2189 };
2190
2191 let token_start = root.span().start;
2192 let mut idents = Self::exprs_to_idents(root, vec![])?;
2193 match *inner_expr {
2194 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2195 Expr::Identifier(suffix) => idents.push(suffix),
2196 _ => {
2197 return parser_err!("column identifier before (+)", token_start);
2198 }
2199 }
2200
2201 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2202 }
2203
2204 Ok(Expr::CompoundFieldAccess {
2205 root: Box::new(root),
2206 access_chain,
2207 })
2208 }
2209
2210 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2211 match k {
2212 Keyword::LOCAL => Some(ContextModifier::Local),
2213 Keyword::GLOBAL => Some(ContextModifier::Global),
2214 Keyword::SESSION => Some(ContextModifier::Session),
2215 _ => None,
2216 }
2217 }
2218
2219 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2221 if !matches!(root, Expr::Identifier(_)) {
2222 return false;
2223 }
2224 fields
2225 .iter()
2226 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2227 }
2228
2229 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2231 let mut idents = vec![];
2232 if let Expr::Identifier(root) = root {
2233 idents.push(root);
2234 for x in fields {
2235 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2236 idents.push(ident);
2237 } else {
2238 return parser_err!(
2239 format!("Expected identifier, found: {}", x),
2240 x.span().start
2241 );
2242 }
2243 }
2244 Ok(idents)
2245 } else {
2246 parser_err!(
2247 format!("Expected identifier, found: {}", root),
2248 root.span().start
2249 )
2250 }
2251 }
2252
2253 fn peek_outer_join_operator(&mut self) -> bool {
2255 if !self.dialect.supports_outer_join_operator() {
2256 return false;
2257 }
2258
2259 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2260 Token::LParen == maybe_lparen.token
2261 && Token::Plus == maybe_plus.token
2262 && Token::RParen == maybe_rparen.token
2263 }
2264
2265 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2268 self.dialect.supports_outer_join_operator()
2269 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2270 }
2271
2272 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2274 self.expect_token(&Token::LParen)?;
2275 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2276 self.expect_token(&Token::RParen)?;
2277
2278 Ok(options)
2279 }
2280
2281 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2282 let name = self.parse_identifier()?;
2283
2284 let next_token = self.peek_token_ref();
2285 if next_token == &Token::Comma || next_token == &Token::RParen {
2286 return Ok(UtilityOption { name, arg: None });
2287 }
2288 let arg = self.parse_expr()?;
2289
2290 Ok(UtilityOption {
2291 name,
2292 arg: Some(arg),
2293 })
2294 }
2295
2296 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2297 if !self.peek_sub_query() {
2298 return Ok(None);
2299 }
2300
2301 Ok(Some(Expr::Subquery(self.parse_query()?)))
2302 }
2303
2304 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2305 if !self.dialect.supports_lambda_functions() {
2306 return Ok(None);
2307 }
2308 self.maybe_parse(|p| {
2309 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2310 p.expect_token(&Token::RParen)?;
2311 p.expect_token(&Token::Arrow)?;
2312 let expr = p.parse_expr()?;
2313 Ok(Expr::Lambda(LambdaFunction {
2314 params: OneOrManyWithParens::Many(params),
2315 body: Box::new(expr),
2316 syntax: LambdaSyntax::Arrow,
2317 }))
2318 })
2319 }
2320
2321 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2331 let params = self.parse_lambda_function_parameters()?;
2333 self.expect_token(&Token::Colon)?;
2335 let body = self.parse_expr()?;
2337 Ok(Expr::Lambda(LambdaFunction {
2338 params,
2339 body: Box::new(body),
2340 syntax: LambdaSyntax::LambdaKeyword,
2341 }))
2342 }
2343
2344 fn parse_lambda_function_parameters(
2346 &mut self,
2347 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2348 let params = if self.consume_token(&Token::LParen) {
2350 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2352 self.expect_token(&Token::RParen)?;
2353 OneOrManyWithParens::Many(params)
2354 } else {
2355 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2357 if params.len() == 1 {
2358 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2359 } else {
2360 OneOrManyWithParens::Many(params)
2361 }
2362 };
2363 Ok(params)
2364 }
2365
2366 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2368 let name = self.parse_identifier()?;
2369 let data_type = match &self.peek_token_ref().token {
2370 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2371 _ => None,
2372 };
2373 Ok(LambdaFunctionParameter { name, data_type })
2374 }
2375
2376 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2383 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2385 return Ok(Some(expr));
2386 }
2387 self.maybe_parse_odbc_body_datetime()
2389 }
2390
2391 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2402 self.maybe_parse(|p| {
2403 let token = p.next_token().clone();
2404 let word_string = token.token.to_string();
2405 let data_type = match word_string.as_str() {
2406 "t" => DataType::Time(None, TimezoneInfo::None),
2407 "d" => DataType::Date,
2408 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2409 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2410 };
2411 let value = p.parse_value()?;
2412 Ok(Expr::TypedString(TypedString {
2413 data_type,
2414 value,
2415 uses_odbc_syntax: true,
2416 }))
2417 })
2418 }
2419
2420 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2429 self.maybe_parse(|p| {
2430 p.expect_keyword(Keyword::FN)?;
2431 let fn_name = p.parse_object_name(false)?;
2432 let mut fn_call = p.parse_function_call(fn_name)?;
2433 fn_call.uses_odbc_syntax = true;
2434 Ok(Expr::Function(fn_call))
2435 })
2436 }
2437
2438 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2440 self.parse_function_call(name).map(Expr::Function)
2441 }
2442
2443 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2444 self.expect_token(&Token::LParen)?;
2445
2446 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2449 let subquery = self.parse_query()?;
2450 self.expect_token(&Token::RParen)?;
2451 return Ok(Function {
2452 name,
2453 uses_odbc_syntax: false,
2454 parameters: FunctionArguments::None,
2455 args: FunctionArguments::Subquery(subquery),
2456 filter: None,
2457 null_treatment: None,
2458 over: None,
2459 within_group: vec![],
2460 });
2461 }
2462
2463 let mut args = self.parse_function_argument_list()?;
2464 let mut parameters = FunctionArguments::None;
2465 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2468 && self.consume_token(&Token::LParen)
2469 {
2470 parameters = FunctionArguments::List(args);
2471 args = self.parse_function_argument_list()?;
2472 }
2473
2474 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2475 self.expect_token(&Token::LParen)?;
2476 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2477 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2478 self.expect_token(&Token::RParen)?;
2479 order_by
2480 } else {
2481 vec![]
2482 };
2483
2484 let filter = if self.dialect.supports_filter_during_aggregation()
2485 && self.parse_keyword(Keyword::FILTER)
2486 && self.consume_token(&Token::LParen)
2487 && self.parse_keyword(Keyword::WHERE)
2488 {
2489 let filter = Some(Box::new(self.parse_expr()?));
2490 self.expect_token(&Token::RParen)?;
2491 filter
2492 } else {
2493 None
2494 };
2495
2496 let null_treatment = if args
2499 .clauses
2500 .iter()
2501 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2502 {
2503 self.parse_null_treatment()?
2504 } else {
2505 None
2506 };
2507
2508 let over = if self.parse_keyword(Keyword::OVER) {
2509 if self.consume_token(&Token::LParen) {
2510 let window_spec = self.parse_window_spec()?;
2511 Some(WindowType::WindowSpec(window_spec))
2512 } else {
2513 Some(WindowType::NamedWindow(self.parse_identifier()?))
2514 }
2515 } else {
2516 None
2517 };
2518
2519 Ok(Function {
2520 name,
2521 uses_odbc_syntax: false,
2522 parameters,
2523 args: FunctionArguments::List(args),
2524 null_treatment,
2525 filter,
2526 over,
2527 within_group,
2528 })
2529 }
2530
2531 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2533 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2534 Some(keyword) => {
2535 self.expect_keyword_is(Keyword::NULLS)?;
2536
2537 Ok(match keyword {
2538 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2539 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2540 _ => None,
2541 })
2542 }
2543 None => Ok(None),
2544 }
2545 }
2546
2547 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2549 let args = if self.consume_token(&Token::LParen) {
2550 FunctionArguments::List(self.parse_function_argument_list()?)
2551 } else {
2552 FunctionArguments::None
2553 };
2554 Ok(Expr::Function(Function {
2555 name,
2556 uses_odbc_syntax: false,
2557 parameters: FunctionArguments::None,
2558 args,
2559 filter: None,
2560 over: None,
2561 null_treatment: None,
2562 within_group: vec![],
2563 }))
2564 }
2565
2566 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2568 let next_token = self.next_token();
2569 match &next_token.token {
2570 Token::Word(w) => match w.keyword {
2571 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2572 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2573 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2574 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2575 },
2576 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2577 }
2578 }
2579
2580 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2582 let units = self.parse_window_frame_units()?;
2583 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2584 let start_bound = self.parse_window_frame_bound()?;
2585 self.expect_keyword_is(Keyword::AND)?;
2586 let end_bound = Some(self.parse_window_frame_bound()?);
2587 (start_bound, end_bound)
2588 } else {
2589 (self.parse_window_frame_bound()?, None)
2590 };
2591 Ok(WindowFrame {
2592 units,
2593 start_bound,
2594 end_bound,
2595 })
2596 }
2597
2598 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2600 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2601 Ok(WindowFrameBound::CurrentRow)
2602 } else {
2603 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2604 None
2605 } else {
2606 Some(Box::new(match &self.peek_token_ref().token {
2607 Token::SingleQuotedString(_) => self.parse_interval()?,
2608 _ => self.parse_expr()?,
2609 }))
2610 };
2611 if self.parse_keyword(Keyword::PRECEDING) {
2612 Ok(WindowFrameBound::Preceding(rows))
2613 } else if self.parse_keyword(Keyword::FOLLOWING) {
2614 Ok(WindowFrameBound::Following(rows))
2615 } else {
2616 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2617 }
2618 }
2619 }
2620
2621 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2623 if self.dialect.supports_group_by_expr() {
2624 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2625 self.expect_token(&Token::LParen)?;
2626 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2627 self.expect_token(&Token::RParen)?;
2628 Ok(Expr::GroupingSets(result))
2629 } else if self.parse_keyword(Keyword::CUBE) {
2630 self.expect_token(&Token::LParen)?;
2631 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2632 self.expect_token(&Token::RParen)?;
2633 Ok(Expr::Cube(result))
2634 } else if self.parse_keyword(Keyword::ROLLUP) {
2635 self.expect_token(&Token::LParen)?;
2636 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2637 self.expect_token(&Token::RParen)?;
2638 Ok(Expr::Rollup(result))
2639 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2640 Ok(Expr::Tuple(vec![]))
2644 } else {
2645 self.parse_expr()
2646 }
2647 } else {
2648 self.parse_expr()
2650 }
2651 }
2652
2653 fn parse_tuple(
2657 &mut self,
2658 lift_singleton: bool,
2659 allow_empty: bool,
2660 ) -> Result<Vec<Expr>, ParserError> {
2661 if lift_singleton {
2662 if self.consume_token(&Token::LParen) {
2663 let result = if allow_empty && self.consume_token(&Token::RParen) {
2664 vec![]
2665 } else {
2666 let result = self.parse_comma_separated(Parser::parse_expr)?;
2667 self.expect_token(&Token::RParen)?;
2668 result
2669 };
2670 Ok(result)
2671 } else {
2672 Ok(vec![self.parse_expr()?])
2673 }
2674 } else {
2675 self.expect_token(&Token::LParen)?;
2676 let result = if allow_empty && self.consume_token(&Token::RParen) {
2677 vec![]
2678 } else {
2679 let result = self.parse_comma_separated(Parser::parse_expr)?;
2680 self.expect_token(&Token::RParen)?;
2681 result
2682 };
2683 Ok(result)
2684 }
2685 }
2686
2687 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2689 let case_token = AttachedToken(self.get_current_token().clone());
2690 let mut operand = None;
2691 if !self.parse_keyword(Keyword::WHEN) {
2692 operand = Some(Box::new(self.parse_expr()?));
2693 self.expect_keyword_is(Keyword::WHEN)?;
2694 }
2695 let mut conditions = vec![];
2696 loop {
2697 let condition = self.parse_expr()?;
2698 self.expect_keyword_is(Keyword::THEN)?;
2699 let result = self.parse_expr()?;
2700 conditions.push(CaseWhen { condition, result });
2701 if !self.parse_keyword(Keyword::WHEN) {
2702 break;
2703 }
2704 }
2705 let else_result = if self.parse_keyword(Keyword::ELSE) {
2706 Some(Box::new(self.parse_expr()?))
2707 } else {
2708 None
2709 };
2710 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2711 Ok(Expr::Case {
2712 case_token,
2713 end_token,
2714 operand,
2715 conditions,
2716 else_result,
2717 })
2718 }
2719
2720 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2722 if self.parse_keyword(Keyword::FORMAT) {
2723 let value = self.parse_value()?;
2724 match self.parse_optional_time_zone()? {
2725 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2726 None => Ok(Some(CastFormat::Value(value))),
2727 }
2728 } else {
2729 Ok(None)
2730 }
2731 }
2732
2733 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2735 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2736 self.parse_value().map(Some)
2737 } else {
2738 Ok(None)
2739 }
2740 }
2741
2742 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2744 self.expect_token(&Token::LParen)?;
2745 let data_type = self.parse_data_type()?;
2746 self.expect_token(&Token::Comma)?;
2747 let expr = self.parse_expr()?;
2748 let styles = if self.consume_token(&Token::Comma) {
2749 self.parse_comma_separated(Parser::parse_expr)?
2750 } else {
2751 Default::default()
2752 };
2753 self.expect_token(&Token::RParen)?;
2754 Ok(Expr::Convert {
2755 is_try,
2756 expr: Box::new(expr),
2757 data_type: Some(data_type),
2758 charset: None,
2759 target_before_value: true,
2760 styles,
2761 })
2762 }
2763
2764 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2769 if self.dialect.convert_type_before_value() {
2770 return self.parse_mssql_convert(is_try);
2771 }
2772 self.expect_token(&Token::LParen)?;
2773 let expr = self.parse_expr()?;
2774 if self.parse_keyword(Keyword::USING) {
2775 let charset = self.parse_object_name(false)?;
2776 self.expect_token(&Token::RParen)?;
2777 return Ok(Expr::Convert {
2778 is_try,
2779 expr: Box::new(expr),
2780 data_type: None,
2781 charset: Some(charset),
2782 target_before_value: false,
2783 styles: vec![],
2784 });
2785 }
2786 self.expect_token(&Token::Comma)?;
2787 let data_type = self.parse_data_type()?;
2788 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2789 Some(self.parse_object_name(false)?)
2790 } else {
2791 None
2792 };
2793 self.expect_token(&Token::RParen)?;
2794 Ok(Expr::Convert {
2795 is_try,
2796 expr: Box::new(expr),
2797 data_type: Some(data_type),
2798 charset,
2799 target_before_value: false,
2800 styles: vec![],
2801 })
2802 }
2803
2804 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2806 self.expect_token(&Token::LParen)?;
2807 let expr = self.parse_expr()?;
2808 self.expect_keyword_is(Keyword::AS)?;
2809 let data_type = self.parse_data_type()?;
2810 let array = self.parse_keyword(Keyword::ARRAY);
2811 let format = self.parse_optional_cast_format()?;
2812 self.expect_token(&Token::RParen)?;
2813 Ok(Expr::Cast {
2814 kind,
2815 expr: Box::new(expr),
2816 data_type,
2817 array,
2818 format,
2819 })
2820 }
2821
2822 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2824 self.expect_token(&Token::LParen)?;
2825 let exists_node = Expr::Exists {
2826 negated,
2827 subquery: self.parse_query()?,
2828 };
2829 self.expect_token(&Token::RParen)?;
2830 Ok(exists_node)
2831 }
2832
2833 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2835 self.expect_token(&Token::LParen)?;
2836 let field = self.parse_date_time_field()?;
2837
2838 let syntax = if self.parse_keyword(Keyword::FROM) {
2839 ExtractSyntax::From
2840 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2841 {
2842 ExtractSyntax::Comma
2843 } else {
2844 return Err(ParserError::ParserError(
2845 "Expected 'FROM' or ','".to_string(),
2846 ));
2847 };
2848
2849 let expr = self.parse_expr()?;
2850 self.expect_token(&Token::RParen)?;
2851 Ok(Expr::Extract {
2852 field,
2853 expr: Box::new(expr),
2854 syntax,
2855 })
2856 }
2857
2858 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2860 self.expect_token(&Token::LParen)?;
2861 let expr = self.parse_expr()?;
2862 let field = if self.parse_keyword(Keyword::TO) {
2864 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2866 } else if self.consume_token(&Token::Comma) {
2867 let v = self.parse_value()?;
2869 if matches!(v.value, Value::Number(_, _)) {
2870 CeilFloorKind::Scale(v)
2871 } else {
2872 return Err(ParserError::ParserError(
2873 "Scale field can only be of number type".to_string(),
2874 ));
2875 }
2876 } else {
2877 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2878 };
2879 self.expect_token(&Token::RParen)?;
2880 if is_ceil {
2881 Ok(Expr::Ceil {
2882 expr: Box::new(expr),
2883 field,
2884 })
2885 } else {
2886 Ok(Expr::Floor {
2887 expr: Box::new(expr),
2888 field,
2889 })
2890 }
2891 }
2892
2893 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2895 let between_prec = self.dialect.prec_value(Precedence::Between);
2896 let position_expr = self.maybe_parse(|p| {
2897 p.expect_token(&Token::LParen)?;
2899
2900 let expr = p.parse_subexpr(between_prec)?;
2902 p.expect_keyword_is(Keyword::IN)?;
2903 let from = p.parse_expr()?;
2904 p.expect_token(&Token::RParen)?;
2905 Ok(Expr::Position {
2906 expr: Box::new(expr),
2907 r#in: Box::new(from),
2908 })
2909 })?;
2910 match position_expr {
2911 Some(expr) => Ok(expr),
2912 None => self.parse_function(ObjectName::from(vec![ident])),
2915 }
2916 }
2917
2918 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2920 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2921 Keyword::SUBSTR => true,
2922 Keyword::SUBSTRING => false,
2923 _ => {
2924 self.prev_token();
2925 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2926 }
2927 };
2928 self.expect_token(&Token::LParen)?;
2929 let expr = self.parse_expr()?;
2930 let mut from_expr = None;
2931 let special = self.consume_token(&Token::Comma);
2932 if special || self.parse_keyword(Keyword::FROM) {
2933 from_expr = Some(self.parse_expr()?);
2934 }
2935
2936 let mut to_expr = None;
2937 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2938 to_expr = Some(self.parse_expr()?);
2939 }
2940 self.expect_token(&Token::RParen)?;
2941
2942 Ok(Expr::Substring {
2943 expr: Box::new(expr),
2944 substring_from: from_expr.map(Box::new),
2945 substring_for: to_expr.map(Box::new),
2946 special,
2947 shorthand,
2948 })
2949 }
2950
2951 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2955 self.expect_token(&Token::LParen)?;
2957 let expr = self.parse_expr()?;
2958 self.expect_keyword_is(Keyword::PLACING)?;
2959 let what_expr = self.parse_expr()?;
2960 self.expect_keyword_is(Keyword::FROM)?;
2961 let from_expr = self.parse_expr()?;
2962 let mut for_expr = None;
2963 if self.parse_keyword(Keyword::FOR) {
2964 for_expr = Some(self.parse_expr()?);
2965 }
2966 self.expect_token(&Token::RParen)?;
2967
2968 Ok(Expr::Overlay {
2969 expr: Box::new(expr),
2970 overlay_what: Box::new(what_expr),
2971 overlay_from: Box::new(from_expr),
2972 overlay_for: for_expr.map(Box::new),
2973 })
2974 }
2975
2976 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2982 self.expect_token(&Token::LParen)?;
2983 let mut trim_where = None;
2984 if let Token::Word(word) = &self.peek_token_ref().token {
2985 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2986 trim_where = Some(self.parse_trim_where()?);
2987 }
2988 }
2989 let expr = self.parse_expr()?;
2990 if self.parse_keyword(Keyword::FROM) {
2991 let trim_what = Box::new(expr);
2992 let expr = self.parse_expr()?;
2993 self.expect_token(&Token::RParen)?;
2994 Ok(Expr::Trim {
2995 expr: Box::new(expr),
2996 trim_where,
2997 trim_what: Some(trim_what),
2998 trim_characters: None,
2999 })
3000 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3001 {
3002 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3003 self.expect_token(&Token::RParen)?;
3004 Ok(Expr::Trim {
3005 expr: Box::new(expr),
3006 trim_where: None,
3007 trim_what: None,
3008 trim_characters: Some(characters),
3009 })
3010 } else {
3011 self.expect_token(&Token::RParen)?;
3012 Ok(Expr::Trim {
3013 expr: Box::new(expr),
3014 trim_where,
3015 trim_what: None,
3016 trim_characters: None,
3017 })
3018 }
3019 }
3020
3021 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3025 let next_token = self.next_token();
3026 match &next_token.token {
3027 Token::Word(w) => match w.keyword {
3028 Keyword::BOTH => Ok(TrimWhereField::Both),
3029 Keyword::LEADING => Ok(TrimWhereField::Leading),
3030 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3031 _ => self.expected("trim_where field", next_token)?,
3032 },
3033 _ => self.expected("trim_where field", next_token),
3034 }
3035 }
3036
3037 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3040 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3041 self.expect_token(&Token::RBracket)?;
3042 Ok(Expr::Array(Array { elem: exprs, named }))
3043 }
3044
3045 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3049 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3050 if self.parse_keyword(Keyword::ERROR) {
3051 Ok(Some(ListAggOnOverflow::Error))
3052 } else {
3053 self.expect_keyword_is(Keyword::TRUNCATE)?;
3054 let filler = match &self.peek_token_ref().token {
3055 Token::Word(w)
3056 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3057 {
3058 None
3059 }
3060 Token::SingleQuotedString(_)
3061 | Token::EscapedStringLiteral(_)
3062 | Token::UnicodeStringLiteral(_)
3063 | Token::NationalStringLiteral(_)
3064 | Token::QuoteDelimitedStringLiteral(_)
3065 | Token::NationalQuoteDelimitedStringLiteral(_)
3066 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3067 _ => self.expected_ref(
3068 "either filler, WITH, or WITHOUT in LISTAGG",
3069 self.peek_token_ref(),
3070 )?,
3071 };
3072 let with_count = self.parse_keyword(Keyword::WITH);
3073 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3074 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3075 }
3076 self.expect_keyword_is(Keyword::COUNT)?;
3077 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3078 }
3079 } else {
3080 Ok(None)
3081 }
3082 }
3083
3084 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3091 let next_token = self.next_token();
3092 match &next_token.token {
3093 Token::Word(w) => match w.keyword {
3094 Keyword::YEAR => Ok(DateTimeField::Year),
3095 Keyword::YEARS => Ok(DateTimeField::Years),
3096 Keyword::MONTH => Ok(DateTimeField::Month),
3097 Keyword::MONTHS => Ok(DateTimeField::Months),
3098 Keyword::WEEK => {
3099 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3100 && self.consume_token(&Token::LParen)
3101 {
3102 let week_day = self.parse_identifier()?;
3103 self.expect_token(&Token::RParen)?;
3104 Some(week_day)
3105 } else {
3106 None
3107 };
3108 Ok(DateTimeField::Week(week_day))
3109 }
3110 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3111 Keyword::DAY => Ok(DateTimeField::Day),
3112 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3113 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3114 Keyword::DAYS => Ok(DateTimeField::Days),
3115 Keyword::DATE => Ok(DateTimeField::Date),
3116 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3117 Keyword::HOUR => Ok(DateTimeField::Hour),
3118 Keyword::HOURS => Ok(DateTimeField::Hours),
3119 Keyword::MINUTE => Ok(DateTimeField::Minute),
3120 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3121 Keyword::SECOND => Ok(DateTimeField::Second),
3122 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3123 Keyword::CENTURY => Ok(DateTimeField::Century),
3124 Keyword::DECADE => Ok(DateTimeField::Decade),
3125 Keyword::DOY => Ok(DateTimeField::Doy),
3126 Keyword::DOW => Ok(DateTimeField::Dow),
3127 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3128 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3129 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3130 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3131 Keyword::JULIAN => Ok(DateTimeField::Julian),
3132 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3133 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3134 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3135 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3136 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3137 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3138 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3139 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3140 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3141 Keyword::TIME => Ok(DateTimeField::Time),
3142 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3143 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3144 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3145 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3146 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3147 _ if self.dialect.allow_extract_custom() => {
3148 self.prev_token();
3149 let custom = self.parse_identifier()?;
3150 Ok(DateTimeField::Custom(custom))
3151 }
3152 _ => self.expected("date/time field", next_token),
3153 },
3154 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3155 self.prev_token();
3156 let custom = self.parse_identifier()?;
3157 Ok(DateTimeField::Custom(custom))
3158 }
3159 _ => self.expected("date/time field", next_token),
3160 }
3161 }
3162
3163 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3167 match &self.peek_token_ref().token {
3168 Token::Word(w) => match w.keyword {
3169 Keyword::EXISTS => {
3170 let negated = true;
3171 let _ = self.parse_keyword(Keyword::EXISTS);
3172 self.parse_exists_expr(negated)
3173 }
3174 _ => Ok(Expr::UnaryOp {
3175 op: UnaryOperator::Not,
3176 expr: Box::new(
3177 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3178 ),
3179 }),
3180 },
3181 _ => Ok(Expr::UnaryOp {
3182 op: UnaryOperator::Not,
3183 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3184 }),
3185 }
3186 }
3187
3188 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3198 let token = self.expect_token(&Token::LBrace)?;
3199
3200 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3201 self.expect_token(&Token::RBrace)?;
3202 return Ok(fn_expr);
3203 }
3204
3205 if self.dialect.supports_dictionary_syntax() {
3206 self.prev_token(); return self.parse_dictionary();
3208 }
3209
3210 self.expected("an expression", token)
3211 }
3212
3213 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3219 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3220
3221 self.expect_keyword_is(Keyword::AGAINST)?;
3222
3223 self.expect_token(&Token::LParen)?;
3224
3225 let match_value = self.parse_value()?;
3227
3228 let in_natural_language_mode_keywords = &[
3229 Keyword::IN,
3230 Keyword::NATURAL,
3231 Keyword::LANGUAGE,
3232 Keyword::MODE,
3233 ];
3234
3235 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3236
3237 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3238
3239 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3240 if self.parse_keywords(with_query_expansion_keywords) {
3241 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3242 } else {
3243 Some(SearchModifier::InNaturalLanguageMode)
3244 }
3245 } else if self.parse_keywords(in_boolean_mode_keywords) {
3246 Some(SearchModifier::InBooleanMode)
3247 } else if self.parse_keywords(with_query_expansion_keywords) {
3248 Some(SearchModifier::WithQueryExpansion)
3249 } else {
3250 None
3251 };
3252
3253 self.expect_token(&Token::RParen)?;
3254
3255 Ok(Expr::MatchAgainst {
3256 columns,
3257 match_value,
3258 opt_search_modifier,
3259 })
3260 }
3261
3262 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3278 let value = if self.dialect.require_interval_qualifier() {
3287 self.parse_expr()?
3289 } else {
3290 self.parse_prefix()?
3293 };
3294
3295 let leading_field = if self.next_token_is_temporal_unit() {
3301 Some(self.parse_date_time_field()?)
3302 } else if self.dialect.require_interval_qualifier() {
3303 return parser_err!(
3304 "INTERVAL requires a unit after the literal value",
3305 self.peek_token_ref().span.start
3306 );
3307 } else {
3308 None
3309 };
3310
3311 let (leading_precision, last_field, fsec_precision) =
3312 if leading_field == Some(DateTimeField::Second) {
3313 let last_field = None;
3319 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3320 (leading_precision, last_field, fsec_precision)
3321 } else {
3322 let leading_precision = self.parse_optional_precision()?;
3323 if self.parse_keyword(Keyword::TO) {
3324 let last_field = Some(self.parse_date_time_field()?);
3325 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3326 self.parse_optional_precision()?
3327 } else {
3328 None
3329 };
3330 (leading_precision, last_field, fsec_precision)
3331 } else {
3332 (leading_precision, None, None)
3333 }
3334 };
3335
3336 Ok(Expr::Interval(Interval {
3337 value: Box::new(value),
3338 leading_field,
3339 leading_precision,
3340 last_field,
3341 fractional_seconds_precision: fsec_precision,
3342 }))
3343 }
3344
3345 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3348 if let Token::Word(word) = &self.peek_token_ref().token {
3349 matches!(
3350 word.keyword,
3351 Keyword::YEAR
3352 | Keyword::YEARS
3353 | Keyword::MONTH
3354 | Keyword::MONTHS
3355 | Keyword::WEEK
3356 | Keyword::WEEKS
3357 | Keyword::DAY
3358 | Keyword::DAYS
3359 | Keyword::HOUR
3360 | Keyword::HOURS
3361 | Keyword::MINUTE
3362 | Keyword::MINUTES
3363 | Keyword::SECOND
3364 | Keyword::SECONDS
3365 | Keyword::CENTURY
3366 | Keyword::DECADE
3367 | Keyword::DOW
3368 | Keyword::DOY
3369 | Keyword::EPOCH
3370 | Keyword::ISODOW
3371 | Keyword::ISOYEAR
3372 | Keyword::JULIAN
3373 | Keyword::MICROSECOND
3374 | Keyword::MICROSECONDS
3375 | Keyword::MILLENIUM
3376 | Keyword::MILLENNIUM
3377 | Keyword::MILLISECOND
3378 | Keyword::MILLISECONDS
3379 | Keyword::NANOSECOND
3380 | Keyword::NANOSECONDS
3381 | Keyword::QUARTER
3382 | Keyword::TIMEZONE
3383 | Keyword::TIMEZONE_HOUR
3384 | Keyword::TIMEZONE_MINUTE
3385 )
3386 } else {
3387 false
3388 }
3389 }
3390
3391 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3399 self.prev_token();
3401 let (fields, trailing_bracket) =
3402 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3403 if trailing_bracket.0 {
3404 return parser_err!(
3405 "unmatched > in STRUCT literal",
3406 self.peek_token_ref().span.start
3407 );
3408 }
3409
3410 self.expect_token(&Token::LParen)?;
3412 let values = self
3413 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3414 self.expect_token(&Token::RParen)?;
3415
3416 Ok(Expr::Struct { values, fields })
3417 }
3418
3419 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3433 let expr = self.parse_expr()?;
3434 if self.parse_keyword(Keyword::AS) {
3435 if typed_syntax {
3436 return parser_err!("Typed syntax does not allow AS", {
3437 self.prev_token();
3438 self.peek_token_ref().span.start
3439 });
3440 }
3441 let field_name = self.parse_identifier()?;
3442 Ok(Expr::Named {
3443 expr: expr.into(),
3444 name: field_name,
3445 })
3446 } else {
3447 Ok(expr)
3448 }
3449 }
3450
3451 fn parse_struct_type_def<F>(
3464 &mut self,
3465 mut elem_parser: F,
3466 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3467 where
3468 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3469 {
3470 self.expect_keyword_is(Keyword::STRUCT)?;
3471
3472 if self.peek_token_ref().token != Token::Lt {
3474 return Ok((Default::default(), false.into()));
3475 }
3476 self.next_token();
3477
3478 let mut field_defs = vec![];
3479 let trailing_bracket = loop {
3480 let (def, trailing_bracket) = elem_parser(self)?;
3481 field_defs.push(def);
3482 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3484 break trailing_bracket;
3485 }
3486 };
3487
3488 Ok((
3489 field_defs,
3490 self.expect_closing_angle_bracket(trailing_bracket)?,
3491 ))
3492 }
3493
3494 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3496 self.expect_keyword_is(Keyword::STRUCT)?;
3497 self.expect_token(&Token::LParen)?;
3498 let struct_body = self.parse_comma_separated(|parser| {
3499 let field_name = parser.parse_identifier()?;
3500 let field_type = parser.parse_data_type()?;
3501
3502 Ok(StructField {
3503 field_name: Some(field_name),
3504 field_type,
3505 options: None,
3506 })
3507 });
3508 self.expect_token(&Token::RParen)?;
3509 struct_body
3510 }
3511
3512 fn parse_struct_field_def(
3524 &mut self,
3525 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3526 let is_named_field = matches!(
3529 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3530 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3531 );
3532
3533 let field_name = if is_named_field {
3534 let name = self.parse_identifier()?;
3535 let _ = self.consume_token(&Token::Colon);
3536 Some(name)
3537 } else {
3538 None
3539 };
3540
3541 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3542
3543 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3544 Ok((
3545 StructField {
3546 field_name,
3547 field_type,
3548 options,
3549 },
3550 trailing_bracket,
3551 ))
3552 }
3553
3554 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3564 self.expect_keyword_is(Keyword::UNION)?;
3565
3566 self.expect_token(&Token::LParen)?;
3567
3568 let fields = self.parse_comma_separated(|p| {
3569 Ok(UnionField {
3570 field_name: p.parse_identifier()?,
3571 field_type: p.parse_data_type()?,
3572 })
3573 })?;
3574
3575 self.expect_token(&Token::RParen)?;
3576
3577 Ok(fields)
3578 }
3579
3580 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3591 self.expect_token(&Token::LBrace)?;
3592
3593 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3594
3595 self.expect_token(&Token::RBrace)?;
3596
3597 Ok(Expr::Dictionary(fields))
3598 }
3599
3600 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3611 let key = self.parse_identifier()?;
3612
3613 self.expect_token(&Token::Colon)?;
3614
3615 let expr = self.parse_expr()?;
3616
3617 Ok(DictionaryField {
3618 key,
3619 value: Box::new(expr),
3620 })
3621 }
3622
3623 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3633 self.expect_token(&Token::LBrace)?;
3634 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3635 self.expect_token(&Token::RBrace)?;
3636 Ok(Expr::Map(Map { entries: fields }))
3637 }
3638
3639 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3649 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3651
3652 self.expect_token(&Token::Colon)?;
3653
3654 let value = self.parse_expr()?;
3655
3656 Ok(MapEntry {
3657 key: Box::new(key),
3658 value: Box::new(value),
3659 })
3660 }
3661
3662 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3672 self.expect_keyword_is(Keyword::MAP)?;
3673 self.expect_token(&Token::LParen)?;
3674 let key_data_type = self.parse_data_type()?;
3675 self.expect_token(&Token::Comma)?;
3676 let value_data_type = self.parse_data_type()?;
3677 self.expect_token(&Token::RParen)?;
3678
3679 Ok((key_data_type, value_data_type))
3680 }
3681
3682 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3692 self.expect_keyword_is(Keyword::TUPLE)?;
3693 self.expect_token(&Token::LParen)?;
3694 let mut field_defs = vec![];
3695 loop {
3696 let (def, _) = self.parse_struct_field_def()?;
3697 field_defs.push(def);
3698 if !self.consume_token(&Token::Comma) {
3699 break;
3700 }
3701 }
3702 self.expect_token(&Token::RParen)?;
3703
3704 Ok(field_defs)
3705 }
3706
3707 fn expect_closing_angle_bracket(
3712 &mut self,
3713 trailing_bracket: MatchedTrailingBracket,
3714 ) -> Result<MatchedTrailingBracket, ParserError> {
3715 let trailing_bracket = if !trailing_bracket.0 {
3716 match &self.peek_token_ref().token {
3717 Token::Gt => {
3718 self.next_token();
3719 false.into()
3720 }
3721 Token::ShiftRight => {
3722 self.next_token();
3723 true.into()
3724 }
3725 _ => return self.expected_ref(">", self.peek_token_ref()),
3726 }
3727 } else {
3728 false.into()
3729 };
3730
3731 Ok(trailing_bracket)
3732 }
3733
3734 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3736 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3738 return infix;
3739 }
3740
3741 let dialect = self.dialect;
3742
3743 self.advance_token();
3744 let tok = self.get_current_token();
3745 debug!("infix: {tok:?}");
3746 let tok_index = self.get_current_index();
3747 let span = tok.span;
3748 let regular_binary_operator = match &tok.token {
3749 Token::Spaceship => Some(BinaryOperator::Spaceship),
3750 Token::DoubleEq => Some(BinaryOperator::Eq),
3751 Token::Assignment => Some(BinaryOperator::Assignment),
3752 Token::Eq => Some(BinaryOperator::Eq),
3753 Token::Neq => Some(BinaryOperator::NotEq),
3754 Token::Gt => Some(BinaryOperator::Gt),
3755 Token::GtEq => Some(BinaryOperator::GtEq),
3756 Token::Lt => Some(BinaryOperator::Lt),
3757 Token::LtEq => Some(BinaryOperator::LtEq),
3758 Token::Plus => Some(BinaryOperator::Plus),
3759 Token::Minus => Some(BinaryOperator::Minus),
3760 Token::Mul => Some(BinaryOperator::Multiply),
3761 Token::Mod => Some(BinaryOperator::Modulo),
3762 Token::StringConcat => Some(BinaryOperator::StringConcat),
3763 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3764 Token::Caret => {
3765 if dialect_is!(dialect is PostgreSqlDialect) {
3768 Some(BinaryOperator::PGExp)
3769 } else {
3770 Some(BinaryOperator::BitwiseXor)
3771 }
3772 }
3773 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3774 Token::Div => Some(BinaryOperator::Divide),
3775 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3776 Some(BinaryOperator::DuckIntegerDivide)
3777 }
3778 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3779 Some(BinaryOperator::PGBitwiseShiftLeft)
3780 }
3781 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3782 Some(BinaryOperator::PGBitwiseShiftRight)
3783 }
3784 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3785 Some(BinaryOperator::PGBitwiseXor)
3786 }
3787 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3788 Some(BinaryOperator::PGOverlap)
3789 }
3790 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3791 Some(BinaryOperator::PGOverlap)
3792 }
3793 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3794 Some(BinaryOperator::And)
3795 }
3796 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3797 Some(BinaryOperator::PGStartsWith)
3798 }
3799 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3800 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3801 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3802 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3803 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3804 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3805 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3806 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3807 Token::Arrow => Some(BinaryOperator::Arrow),
3808 Token::LongArrow => Some(BinaryOperator::LongArrow),
3809 Token::HashArrow => Some(BinaryOperator::HashArrow),
3810 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3811 Token::AtArrow => Some(BinaryOperator::AtArrow),
3812 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3813 Token::HashMinus => Some(BinaryOperator::HashMinus),
3814 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3815 Token::AtAt => Some(BinaryOperator::AtAt),
3816 Token::Question => Some(BinaryOperator::Question),
3817 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3818 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3819 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3820 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3821 Some(BinaryOperator::DoubleHash)
3822 }
3823
3824 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3825 Some(BinaryOperator::AndLt)
3826 }
3827 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3828 Some(BinaryOperator::AndGt)
3829 }
3830 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3831 Some(BinaryOperator::QuestionDash)
3832 }
3833 Token::AmpersandLeftAngleBracketVerticalBar
3834 if self.dialect.supports_geometric_types() =>
3835 {
3836 Some(BinaryOperator::AndLtPipe)
3837 }
3838 Token::VerticalBarAmpersandRightAngleBracket
3839 if self.dialect.supports_geometric_types() =>
3840 {
3841 Some(BinaryOperator::PipeAndGt)
3842 }
3843 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3844 Some(BinaryOperator::LtDashGt)
3845 }
3846 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3847 Some(BinaryOperator::LtCaret)
3848 }
3849 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3850 Some(BinaryOperator::GtCaret)
3851 }
3852 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3853 Some(BinaryOperator::QuestionHash)
3854 }
3855 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3856 Some(BinaryOperator::QuestionDoublePipe)
3857 }
3858 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3859 Some(BinaryOperator::QuestionDashPipe)
3860 }
3861 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3862 Some(BinaryOperator::TildeEq)
3863 }
3864 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3865 Some(BinaryOperator::LtLtPipe)
3866 }
3867 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3868 Some(BinaryOperator::PipeGtGt)
3869 }
3870 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3871
3872 Token::Word(w) => match w.keyword {
3873 Keyword::AND => Some(BinaryOperator::And),
3874 Keyword::OR => Some(BinaryOperator::Or),
3875 Keyword::XOR => Some(BinaryOperator::Xor),
3876 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3877 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3878 self.expect_token(&Token::LParen)?;
3879 let mut idents = vec![];
3884 loop {
3885 self.advance_token();
3886 idents.push(self.get_current_token().to_string());
3887 if !self.consume_token(&Token::Period) {
3888 break;
3889 }
3890 }
3891 self.expect_token(&Token::RParen)?;
3892 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3893 }
3894 _ => None,
3895 },
3896 _ => None,
3897 };
3898
3899 let tok = self.token_at(tok_index);
3900 if let Some(op) = regular_binary_operator {
3901 if let Some(keyword) =
3902 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3903 {
3904 self.expect_token(&Token::LParen)?;
3905 let right = if self.peek_sub_query() {
3906 self.prev_token(); self.parse_subexpr(precedence)?
3910 } else {
3911 let right = self.parse_subexpr(precedence)?;
3913 self.expect_token(&Token::RParen)?;
3914 right
3915 };
3916
3917 if !matches!(
3918 op,
3919 BinaryOperator::Gt
3920 | BinaryOperator::Lt
3921 | BinaryOperator::GtEq
3922 | BinaryOperator::LtEq
3923 | BinaryOperator::Eq
3924 | BinaryOperator::NotEq
3925 | BinaryOperator::PGRegexMatch
3926 | BinaryOperator::PGRegexIMatch
3927 | BinaryOperator::PGRegexNotMatch
3928 | BinaryOperator::PGRegexNotIMatch
3929 | BinaryOperator::PGLikeMatch
3930 | BinaryOperator::PGILikeMatch
3931 | BinaryOperator::PGNotLikeMatch
3932 | BinaryOperator::PGNotILikeMatch
3933 ) {
3934 return parser_err!(
3935 format!(
3936 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3937 ),
3938 span.start
3939 );
3940 };
3941
3942 Ok(match keyword {
3943 Keyword::ALL => Expr::AllOp {
3944 left: Box::new(expr),
3945 compare_op: op,
3946 right: Box::new(right),
3947 },
3948 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3949 left: Box::new(expr),
3950 compare_op: op,
3951 right: Box::new(right),
3952 is_some: keyword == Keyword::SOME,
3953 },
3954 unexpected_keyword => return Err(ParserError::ParserError(
3955 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3956 )),
3957 })
3958 } else {
3959 Ok(Expr::BinaryOp {
3960 left: Box::new(expr),
3961 op,
3962 right: Box::new(self.parse_subexpr(precedence)?),
3963 })
3964 }
3965 } else if let Token::Word(w) = &tok.token {
3966 match w.keyword {
3967 Keyword::IS => {
3968 if self.parse_keyword(Keyword::NULL) {
3969 Ok(Expr::IsNull(Box::new(expr)))
3970 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3971 Ok(Expr::IsNotNull(Box::new(expr)))
3972 } else if self.parse_keywords(&[Keyword::TRUE]) {
3973 Ok(Expr::IsTrue(Box::new(expr)))
3974 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3975 Ok(Expr::IsNotTrue(Box::new(expr)))
3976 } else if self.parse_keywords(&[Keyword::FALSE]) {
3977 Ok(Expr::IsFalse(Box::new(expr)))
3978 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3979 Ok(Expr::IsNotFalse(Box::new(expr)))
3980 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3981 Ok(Expr::IsUnknown(Box::new(expr)))
3982 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3983 Ok(Expr::IsNotUnknown(Box::new(expr)))
3984 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3985 let expr2 = self.parse_expr()?;
3986 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3987 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3988 {
3989 let expr2 = self.parse_expr()?;
3990 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3991 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3992 Ok(is_normalized)
3993 } else {
3994 self.expected_ref(
3995 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3996 self.peek_token_ref(),
3997 )
3998 }
3999 }
4000 Keyword::AT => {
4001 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4002 Ok(Expr::AtTimeZone {
4003 timestamp: Box::new(expr),
4004 time_zone: Box::new(self.parse_subexpr(precedence)?),
4005 })
4006 }
4007 Keyword::NOT
4008 | Keyword::IN
4009 | Keyword::BETWEEN
4010 | Keyword::LIKE
4011 | Keyword::ILIKE
4012 | Keyword::SIMILAR
4013 | Keyword::REGEXP
4014 | Keyword::RLIKE => {
4015 self.prev_token();
4016 let negated = self.parse_keyword(Keyword::NOT);
4017 let regexp = self.parse_keyword(Keyword::REGEXP);
4018 let rlike = self.parse_keyword(Keyword::RLIKE);
4019 let null = if !self.in_column_definition_state() {
4020 self.parse_keyword(Keyword::NULL)
4021 } else {
4022 false
4023 };
4024 if regexp || rlike {
4025 Ok(Expr::RLike {
4026 negated,
4027 expr: Box::new(expr),
4028 pattern: Box::new(
4029 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4030 ),
4031 regexp,
4032 })
4033 } else if negated && null {
4034 Ok(Expr::IsNotNull(Box::new(expr)))
4035 } else if self.parse_keyword(Keyword::IN) {
4036 self.parse_in(expr, negated)
4037 } else if self.parse_keyword(Keyword::BETWEEN) {
4038 self.parse_between(expr, negated)
4039 } else if self.parse_keyword(Keyword::LIKE) {
4040 Ok(Expr::Like {
4041 negated,
4042 any: self.parse_keyword(Keyword::ANY),
4043 expr: Box::new(expr),
4044 pattern: Box::new(
4045 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4046 ),
4047 escape_char: self.parse_escape_char()?,
4048 })
4049 } else if self.parse_keyword(Keyword::ILIKE) {
4050 Ok(Expr::ILike {
4051 negated,
4052 any: self.parse_keyword(Keyword::ANY),
4053 expr: Box::new(expr),
4054 pattern: Box::new(
4055 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4056 ),
4057 escape_char: self.parse_escape_char()?,
4058 })
4059 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4060 Ok(Expr::SimilarTo {
4061 negated,
4062 expr: Box::new(expr),
4063 pattern: Box::new(
4064 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4065 ),
4066 escape_char: self.parse_escape_char()?,
4067 })
4068 } else {
4069 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4070 }
4071 }
4072 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4073 Ok(Expr::IsNotNull(Box::new(expr)))
4074 }
4075 Keyword::MEMBER => {
4076 if self.parse_keyword(Keyword::OF) {
4077 self.expect_token(&Token::LParen)?;
4078 let array = self.parse_expr()?;
4079 self.expect_token(&Token::RParen)?;
4080 Ok(Expr::MemberOf(MemberOf {
4081 value: Box::new(expr),
4082 array: Box::new(array),
4083 }))
4084 } else {
4085 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4086 }
4087 }
4088 _ => parser_err!(
4090 format!("No infix parser for token {:?}", tok.token),
4091 tok.span.start
4092 ),
4093 }
4094 } else if Token::DoubleColon == *tok {
4095 Ok(Expr::Cast {
4096 kind: CastKind::DoubleColon,
4097 expr: Box::new(expr),
4098 data_type: self.parse_data_type()?,
4099 array: false,
4100 format: None,
4101 })
4102 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4103 Ok(Expr::UnaryOp {
4104 op: UnaryOperator::PGPostfixFactorial,
4105 expr: Box::new(expr),
4106 })
4107 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4108 || (Token::Colon == *tok)
4109 {
4110 self.prev_token();
4111 self.parse_json_access(expr)
4112 } else {
4113 parser_err!(
4115 format!("No infix parser for token {:?}", tok.token),
4116 tok.span.start
4117 )
4118 }
4119 }
4120
4121 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4123 if self.parse_keyword(Keyword::ESCAPE) {
4124 Ok(Some(self.parse_value()?))
4125 } else {
4126 Ok(None)
4127 }
4128 }
4129
4130 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4140 let lower_bound = if self.consume_token(&Token::Colon) {
4142 None
4143 } else {
4144 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4146 };
4147
4148 if self.consume_token(&Token::RBracket) {
4150 if let Some(lower_bound) = lower_bound {
4151 return Ok(Subscript::Index { index: lower_bound });
4152 };
4153 return Ok(Subscript::Slice {
4154 lower_bound,
4155 upper_bound: None,
4156 stride: None,
4157 });
4158 }
4159
4160 if lower_bound.is_some() {
4162 self.expect_token(&Token::Colon)?;
4163 }
4164
4165 let upper_bound = if self.consume_token(&Token::RBracket) {
4167 return Ok(Subscript::Slice {
4168 lower_bound,
4169 upper_bound: None,
4170 stride: None,
4171 });
4172 } else {
4173 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4175 };
4176
4177 if self.consume_token(&Token::RBracket) {
4179 return Ok(Subscript::Slice {
4180 lower_bound,
4181 upper_bound,
4182 stride: None,
4183 });
4184 }
4185
4186 self.expect_token(&Token::Colon)?;
4188 let stride = if self.consume_token(&Token::RBracket) {
4189 None
4190 } else {
4191 Some(self.parse_expr()?)
4192 };
4193
4194 if stride.is_some() {
4195 self.expect_token(&Token::RBracket)?;
4196 }
4197
4198 Ok(Subscript::Slice {
4199 lower_bound,
4200 upper_bound,
4201 stride,
4202 })
4203 }
4204
4205 pub fn parse_multi_dim_subscript(
4207 &mut self,
4208 chain: &mut Vec<AccessExpr>,
4209 ) -> Result<(), ParserError> {
4210 while self.consume_token(&Token::LBracket) {
4211 self.parse_subscript(chain)?;
4212 }
4213 Ok(())
4214 }
4215
4216 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4220 let subscript = self.parse_subscript_inner()?;
4221 chain.push(AccessExpr::Subscript(subscript));
4222 Ok(())
4223 }
4224
4225 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4226 let token = self.next_token();
4227 match token.token {
4228 Token::Word(Word {
4229 value,
4230 quote_style: quote_style @ (Some('"') | Some('`') | None),
4233 keyword: _,
4236 }) => Ok(JsonPathElem::Dot {
4237 key: value,
4238 quoted: quote_style.is_some(),
4239 }),
4240
4241 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4245
4246 _ => self.expected("variant object key name", token),
4247 }
4248 }
4249
4250 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4251 let path = self.parse_json_path()?;
4252 Ok(Expr::JsonAccess {
4253 value: Box::new(expr),
4254 path,
4255 })
4256 }
4257
4258 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4259 let mut path = Vec::new();
4260 loop {
4261 match self.next_token().token {
4262 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4263 self.next_token();
4264 let key = self.parse_wildcard_expr()?;
4265 self.expect_token(&Token::RBracket)?;
4266 path.push(JsonPathElem::ColonBracket { key });
4267 }
4268 Token::Colon if path.is_empty() => {
4269 path.push(self.parse_json_path_object_key()?);
4270 }
4271 Token::Period if !path.is_empty() => {
4272 path.push(self.parse_json_path_object_key()?);
4273 }
4274 Token::LBracket => {
4275 let key = self.parse_wildcard_expr()?;
4276 self.expect_token(&Token::RBracket)?;
4277
4278 path.push(JsonPathElem::Bracket { key });
4279 }
4280 _ => {
4281 self.prev_token();
4282 break;
4283 }
4284 };
4285 }
4286
4287 debug_assert!(!path.is_empty());
4288 Ok(JsonPath { path })
4289 }
4290
4291 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4293 if self.parse_keyword(Keyword::UNNEST) {
4296 self.expect_token(&Token::LParen)?;
4297 let array_expr = self.parse_expr()?;
4298 self.expect_token(&Token::RParen)?;
4299 return Ok(Expr::InUnnest {
4300 expr: Box::new(expr),
4301 array_expr: Box::new(array_expr),
4302 negated,
4303 });
4304 }
4305 self.expect_token(&Token::LParen)?;
4306 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4307 Some(subquery) => Expr::InSubquery {
4308 expr: Box::new(expr),
4309 subquery,
4310 negated,
4311 },
4312 None => Expr::InList {
4313 expr: Box::new(expr),
4314 list: if self.dialect.supports_in_empty_list() {
4315 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4316 } else {
4317 self.parse_comma_separated(Parser::parse_expr)?
4318 },
4319 negated,
4320 },
4321 };
4322 self.expect_token(&Token::RParen)?;
4323 Ok(in_op)
4324 }
4325
4326 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4328 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4331 self.expect_keyword_is(Keyword::AND)?;
4332 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4333 Ok(Expr::Between {
4334 expr: Box::new(expr),
4335 negated,
4336 low: Box::new(low),
4337 high: Box::new(high),
4338 })
4339 }
4340
4341 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4343 Ok(Expr::Cast {
4344 kind: CastKind::DoubleColon,
4345 expr: Box::new(expr),
4346 data_type: self.parse_data_type()?,
4347 array: false,
4348 format: None,
4349 })
4350 }
4351
4352 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4354 self.dialect.get_next_precedence_default(self)
4355 }
4356
4357 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4360 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4361 }
4362
4363 pub fn peek_token(&self) -> TokenWithSpan {
4368 self.peek_nth_token(0)
4369 }
4370
4371 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4374 self.peek_nth_token_ref(0)
4375 }
4376
4377 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4400 self.peek_tokens_with_location()
4401 .map(|with_loc| with_loc.token)
4402 }
4403
4404 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4409 let mut index = self.index;
4410 core::array::from_fn(|_| loop {
4411 let token = self.tokens.get(index);
4412 index += 1;
4413 if let Some(TokenWithSpan {
4414 token: Token::Whitespace(_),
4415 span: _,
4416 }) = token
4417 {
4418 continue;
4419 }
4420 break token.cloned().unwrap_or(TokenWithSpan {
4421 token: Token::EOF,
4422 span: Span::empty(),
4423 });
4424 })
4425 }
4426
4427 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4432 let mut index = self.index;
4433 core::array::from_fn(|_| loop {
4434 let token = self.tokens.get(index);
4435 index += 1;
4436 if let Some(TokenWithSpan {
4437 token: Token::Whitespace(_),
4438 span: _,
4439 }) = token
4440 {
4441 continue;
4442 }
4443 break token.unwrap_or(&EOF_TOKEN);
4444 })
4445 }
4446
4447 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4449 self.peek_nth_token_ref(n).clone()
4450 }
4451
4452 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4454 let mut index = self.index;
4455 loop {
4456 index += 1;
4457 match self.tokens.get(index - 1) {
4458 Some(TokenWithSpan {
4459 token: Token::Whitespace(_),
4460 span: _,
4461 }) => continue,
4462 non_whitespace => {
4463 if n == 0 {
4464 return non_whitespace.unwrap_or(&EOF_TOKEN);
4465 }
4466 n -= 1;
4467 }
4468 }
4469 }
4470 }
4471
4472 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4475 self.peek_nth_token_no_skip(0)
4476 }
4477
4478 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4480 self.tokens
4481 .get(self.index + n)
4482 .cloned()
4483 .unwrap_or(TokenWithSpan {
4484 token: Token::EOF,
4485 span: Span::empty(),
4486 })
4487 }
4488
4489 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4491 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4492 }
4493
4494 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4498 let index = self.index;
4499 let matched = self.parse_keywords(expected);
4500 self.index = index;
4501 matched
4502 }
4503
4504 pub fn next_token(&mut self) -> TokenWithSpan {
4509 self.advance_token();
4510 self.get_current_token().clone()
4511 }
4512
4513 pub fn get_current_index(&self) -> usize {
4518 self.index.saturating_sub(1)
4519 }
4520
4521 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4523 self.index += 1;
4524 self.tokens.get(self.index - 1)
4525 }
4526
4527 pub fn advance_token(&mut self) {
4531 loop {
4532 self.index += 1;
4533 match self.tokens.get(self.index - 1) {
4534 Some(TokenWithSpan {
4535 token: Token::Whitespace(_),
4536 span: _,
4537 }) => continue,
4538 _ => break,
4539 }
4540 }
4541 }
4542
4543 pub fn get_current_token(&self) -> &TokenWithSpan {
4547 self.token_at(self.index.saturating_sub(1))
4548 }
4549
4550 pub fn get_previous_token(&self) -> &TokenWithSpan {
4554 self.token_at(self.index.saturating_sub(2))
4555 }
4556
4557 pub fn get_next_token(&self) -> &TokenWithSpan {
4561 self.token_at(self.index)
4562 }
4563
4564 pub fn prev_token(&mut self) {
4571 loop {
4572 assert!(self.index > 0);
4573 self.index -= 1;
4574 if let Some(TokenWithSpan {
4575 token: Token::Whitespace(_),
4576 span: _,
4577 }) = self.tokens.get(self.index)
4578 {
4579 continue;
4580 }
4581 return;
4582 }
4583 }
4584
4585 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4587 parser_err!(
4588 format!("Expected: {expected}, found: {found}"),
4589 found.span.start
4590 )
4591 }
4592
4593 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4595 parser_err!(
4596 format!("Expected: {expected}, found: {found}"),
4597 found.span.start
4598 )
4599 }
4600
4601 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4603 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4604 parser_err!(
4605 format!("Expected: {expected}, found: {found}"),
4606 found.span.start
4607 )
4608 }
4609
4610 #[must_use]
4613 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4614 if self.peek_keyword(expected) {
4615 self.advance_token();
4616 true
4617 } else {
4618 false
4619 }
4620 }
4621
4622 #[must_use]
4623 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4627 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4628 }
4629
4630 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4638 self.keyword_with_tokens(expected, tokens, true)
4639 }
4640
4641 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4646 self.keyword_with_tokens(expected, tokens, false)
4647 }
4648
4649 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4650 match &self.peek_token_ref().token {
4651 Token::Word(w) if expected == w.keyword => {
4652 for (idx, token) in tokens.iter().enumerate() {
4653 if self.peek_nth_token_ref(idx + 1).token != *token {
4654 return false;
4655 }
4656 }
4657
4658 if consume {
4659 for _ in 0..(tokens.len() + 1) {
4660 self.advance_token();
4661 }
4662 }
4663
4664 true
4665 }
4666 _ => false,
4667 }
4668 }
4669
4670 #[must_use]
4674 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4675 self.parse_keywords_indexed(keywords).is_some()
4676 }
4677
4678 #[must_use]
4681 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4682 let start_index = self.index;
4683 let mut first_keyword_index = None;
4684 for &keyword in keywords {
4685 if !self.parse_keyword(keyword) {
4686 self.index = start_index;
4687 return None;
4688 }
4689 if first_keyword_index.is_none() {
4690 first_keyword_index = Some(self.index.saturating_sub(1));
4691 }
4692 }
4693 first_keyword_index
4694 }
4695
4696 #[must_use]
4699 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4700 for keyword in keywords {
4701 if self.peek_keyword(*keyword) {
4702 return Some(*keyword);
4703 }
4704 }
4705 None
4706 }
4707
4708 #[must_use]
4712 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4713 match &self.peek_token_ref().token {
4714 Token::Word(w) => {
4715 keywords
4716 .iter()
4717 .find(|keyword| **keyword == w.keyword)
4718 .map(|keyword| {
4719 self.advance_token();
4720 *keyword
4721 })
4722 }
4723 _ => None,
4724 }
4725 }
4726
4727 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4730 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4731 Ok(keyword)
4732 } else {
4733 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4734 self.expected_ref(
4735 &format!("one of {}", keywords.join(" or ")),
4736 self.peek_token_ref(),
4737 )
4738 }
4739 }
4740
4741 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4746 if self.parse_keyword(expected) {
4747 Ok(self.get_current_token().clone())
4748 } else {
4749 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4750 }
4751 }
4752
4753 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4759 if self.parse_keyword(expected) {
4760 Ok(())
4761 } else {
4762 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4763 }
4764 }
4765
4766 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4769 for &kw in expected {
4770 self.expect_keyword_is(kw)?;
4771 }
4772 Ok(())
4773 }
4774
4775 #[must_use]
4779 pub fn consume_token(&mut self, expected: &Token) -> bool {
4780 if self.peek_token_ref() == expected {
4781 self.advance_token();
4782 true
4783 } else {
4784 false
4785 }
4786 }
4787
4788 #[must_use]
4792 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4793 let index = self.index;
4794 for token in tokens {
4795 if !self.consume_token(token) {
4796 self.index = index;
4797 return false;
4798 }
4799 }
4800 true
4801 }
4802
4803 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4805 if self.peek_token_ref() == expected {
4806 Ok(self.next_token())
4807 } else {
4808 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4809 }
4810 }
4811
4812 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4813 where
4814 <T as FromStr>::Err: Display,
4815 {
4816 s.parse::<T>().map_err(|e| {
4817 ParserError::ParserError(format!(
4818 "Could not parse '{s}' as {}: {e}{loc}",
4819 core::any::type_name::<T>()
4820 ))
4821 })
4822 }
4823
4824 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4826 let trailing_commas =
4832 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4833
4834 self.parse_comma_separated_with_trailing_commas(
4835 |p| p.parse_select_item(),
4836 trailing_commas,
4837 Self::is_reserved_for_column_alias,
4838 )
4839 }
4840
4841 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4843 let mut values = vec![];
4844 loop {
4845 values.push(self.parse_grant_permission()?);
4846 if !self.consume_token(&Token::Comma) {
4847 break;
4848 } else if self.options.trailing_commas {
4849 match &self.peek_token_ref().token {
4850 Token::Word(kw) if kw.keyword == Keyword::ON => {
4851 break;
4852 }
4853 Token::RParen
4854 | Token::SemiColon
4855 | Token::EOF
4856 | Token::RBracket
4857 | Token::RBrace => break,
4858 _ => continue,
4859 }
4860 }
4861 }
4862 Ok(values)
4863 }
4864
4865 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4867 let trailing_commas = self.dialect.supports_from_trailing_commas();
4868
4869 self.parse_comma_separated_with_trailing_commas(
4870 Parser::parse_table_and_joins,
4871 trailing_commas,
4872 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4873 )
4874 }
4875
4876 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4883 &mut self,
4884 trailing_commas: bool,
4885 is_reserved_keyword: &R,
4886 ) -> bool
4887 where
4888 R: Fn(&Keyword, &mut Parser) -> bool,
4889 {
4890 if !self.consume_token(&Token::Comma) {
4891 true
4892 } else if trailing_commas {
4893 let token = self.next_token().token;
4894 let is_end = match token {
4895 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4896 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4897 true
4898 }
4899 _ => false,
4900 };
4901 self.prev_token();
4902
4903 is_end
4904 } else {
4905 false
4906 }
4907 }
4908
4909 fn is_parse_comma_separated_end(&mut self) -> bool {
4912 self.is_parse_comma_separated_end_with_trailing_commas(
4913 self.options.trailing_commas,
4914 &Self::is_reserved_for_column_alias,
4915 )
4916 }
4917
4918 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4920 where
4921 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4922 {
4923 self.parse_comma_separated_with_trailing_commas(
4924 f,
4925 self.options.trailing_commas,
4926 Self::is_reserved_for_column_alias,
4927 )
4928 }
4929
4930 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4935 &mut self,
4936 mut f: F,
4937 trailing_commas: bool,
4938 is_reserved_keyword: R,
4939 ) -> Result<Vec<T>, ParserError>
4940 where
4941 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4942 R: Fn(&Keyword, &mut Parser) -> bool,
4943 {
4944 let mut values = vec![];
4945 loop {
4946 values.push(f(self)?);
4947 if self.is_parse_comma_separated_end_with_trailing_commas(
4948 trailing_commas,
4949 &is_reserved_keyword,
4950 ) {
4951 break;
4952 }
4953 }
4954 Ok(values)
4955 }
4956
4957 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4959 where
4960 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4961 {
4962 let mut values = vec![];
4963 loop {
4964 values.push(f(self)?);
4965 if !self.consume_token(&Token::Period) {
4966 break;
4967 }
4968 }
4969 Ok(values)
4970 }
4971
4972 pub fn parse_keyword_separated<T, F>(
4974 &mut self,
4975 keyword: Keyword,
4976 mut f: F,
4977 ) -> Result<Vec<T>, ParserError>
4978 where
4979 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4980 {
4981 let mut values = vec![];
4982 loop {
4983 values.push(f(self)?);
4984 if !self.parse_keyword(keyword) {
4985 break;
4986 }
4987 }
4988 Ok(values)
4989 }
4990
4991 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4993 where
4994 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4995 {
4996 self.expect_token(&Token::LParen)?;
4997 let res = f(self)?;
4998 self.expect_token(&Token::RParen)?;
4999 Ok(res)
5000 }
5001
5002 pub fn parse_comma_separated0<T, F>(
5005 &mut self,
5006 f: F,
5007 end_token: Token,
5008 ) -> Result<Vec<T>, ParserError>
5009 where
5010 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5011 {
5012 if self.peek_token_ref().token == end_token {
5013 return Ok(vec![]);
5014 }
5015
5016 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5017 let _ = self.consume_token(&Token::Comma);
5018 return Ok(vec![]);
5019 }
5020
5021 self.parse_comma_separated(f)
5022 }
5023
5024 pub(crate) fn parse_statement_list(
5028 &mut self,
5029 terminal_keywords: &[Keyword],
5030 ) -> Result<Vec<Statement>, ParserError> {
5031 let mut values = vec![];
5032 loop {
5033 match &self.peek_nth_token_ref(0).token {
5034 Token::EOF => break,
5035 Token::Word(w) => {
5036 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5037 break;
5038 }
5039 }
5040 _ => {}
5041 }
5042
5043 values.push(self.parse_statement()?);
5044 self.expect_token(&Token::SemiColon)?;
5045 }
5046 Ok(values)
5047 }
5048
5049 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5053 !parser.dialect.is_column_alias(kw, parser)
5054 }
5055
5056 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5060 where
5061 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5062 {
5063 match self.try_parse(f) {
5064 Ok(t) => Ok(Some(t)),
5065 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5066 _ => Ok(None),
5067 }
5068 }
5069
5070 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5072 where
5073 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5074 {
5075 let index = self.index;
5076 match f(self) {
5077 Ok(t) => Ok(t),
5078 Err(e) => {
5079 self.index = index;
5081 Err(e)
5082 }
5083 }
5084 }
5085
5086 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5089 let loc = self.peek_token_ref().span.start;
5090 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5091 Some(Keyword::ALL) => {
5092 if self.peek_keyword(Keyword::DISTINCT) {
5093 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5094 }
5095 Some(Distinct::All)
5096 }
5097 Some(Keyword::DISTINCT) => {
5098 if self.peek_keyword(Keyword::ALL) {
5099 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5100 }
5101 Some(Distinct::Distinct)
5102 }
5103 None => return Ok(None),
5104 _ => return parser_err!("ALL or DISTINCT", loc),
5105 };
5106
5107 let Some(Distinct::Distinct) = distinct else {
5108 return Ok(distinct);
5109 };
5110 if !self.parse_keyword(Keyword::ON) {
5111 return Ok(Some(Distinct::Distinct));
5112 }
5113
5114 self.expect_token(&Token::LParen)?;
5115 let col_names = if self.consume_token(&Token::RParen) {
5116 self.prev_token();
5117 Vec::new()
5118 } else {
5119 self.parse_comma_separated(Parser::parse_expr)?
5120 };
5121 self.expect_token(&Token::RParen)?;
5122 Ok(Some(Distinct::On(col_names)))
5123 }
5124
5125 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5127 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5128 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5129 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5130 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5131 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5132 let global: Option<bool> = if global {
5133 Some(true)
5134 } else if local {
5135 Some(false)
5136 } else {
5137 None
5138 };
5139 let temporary = self
5140 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5141 .is_some();
5142 let persistent = dialect_of!(self is DuckDbDialect)
5143 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5144 let create_view_params = self.parse_create_view_params()?;
5145 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5146 self.parse_create_snapshot_table().map(Into::into)
5147 } else if self.parse_keyword(Keyword::TABLE) {
5148 self.parse_create_table(or_replace, temporary, global, transient)
5149 .map(Into::into)
5150 } else if self.peek_keyword(Keyword::MATERIALIZED)
5151 || self.peek_keyword(Keyword::VIEW)
5152 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5153 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5154 {
5155 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5156 .map(Into::into)
5157 } else if self.parse_keyword(Keyword::POLICY) {
5158 self.parse_create_policy().map(Into::into)
5159 } else if self.parse_keyword(Keyword::EXTERNAL) {
5160 self.parse_create_external_table(or_replace).map(Into::into)
5161 } else if self.parse_keyword(Keyword::FUNCTION) {
5162 self.parse_create_function(or_alter, or_replace, temporary)
5163 } else if self.parse_keyword(Keyword::DOMAIN) {
5164 self.parse_create_domain().map(Into::into)
5165 } else if self.parse_keyword(Keyword::TRIGGER) {
5166 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5167 .map(Into::into)
5168 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5169 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5170 .map(Into::into)
5171 } else if self.parse_keyword(Keyword::MACRO) {
5172 self.parse_create_macro(or_replace, temporary)
5173 } else if self.parse_keyword(Keyword::SECRET) {
5174 self.parse_create_secret(or_replace, temporary, persistent)
5175 } else if self.parse_keyword(Keyword::USER) {
5176 if self.parse_keyword(Keyword::MAPPING) {
5177 self.parse_create_user_mapping().map(Into::into)
5178 } else {
5179 self.parse_create_user(or_replace).map(Into::into)
5180 }
5181 } else if self.parse_keyword(Keyword::AGGREGATE) {
5182 self.parse_create_aggregate(or_replace).map(Into::into)
5183 } else if self.peek_keyword(Keyword::TRUSTED)
5184 || self.peek_keyword(Keyword::PROCEDURAL)
5185 || self.peek_keyword(Keyword::LANGUAGE)
5186 {
5187 let trusted = self.parse_keyword(Keyword::TRUSTED);
5188 let procedural = self.parse_keyword(Keyword::PROCEDURAL);
5189 if self.parse_keyword(Keyword::LANGUAGE) {
5190 self.parse_create_language(or_replace, trusted, procedural)
5191 .map(Into::into)
5192 } else {
5193 self.expected_ref(
5194 "LANGUAGE after TRUSTED or PROCEDURAL",
5195 self.peek_token_ref(),
5196 )
5197 }
5198 } else if self.parse_keyword(Keyword::TRANSFORM) {
5199 self.parse_create_transform(or_replace).map(Into::into)
5200 } else if or_replace {
5201 self.expected_ref(
5202 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5203 self.peek_token_ref(),
5204 )
5205 } else if self.parse_keyword(Keyword::CAST) {
5206 self.parse_create_cast().map(Into::into)
5207 } else if self.parse_keyword(Keyword::CONVERSION) {
5208 self.parse_create_conversion(false).map(Into::into)
5209 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CONVERSION]) {
5210 self.parse_create_conversion(true).map(Into::into)
5211 } else if self.parse_keyword(Keyword::RULE) {
5212 self.parse_create_rule().map(Into::into)
5213 } else if self.parse_keyword(Keyword::EXTENSION) {
5214 self.parse_create_extension().map(Into::into)
5215 } else if self.parse_keyword(Keyword::INDEX) {
5216 self.parse_create_index(false).map(Into::into)
5217 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5218 self.parse_create_index(true).map(Into::into)
5219 } else if self.parse_keyword(Keyword::VIRTUAL) {
5220 self.parse_create_virtual_table()
5221 } else if self.parse_keyword(Keyword::SCHEMA) {
5222 self.parse_create_schema()
5223 } else if self.parse_keyword(Keyword::DATABASE) {
5224 self.parse_create_database()
5225 } else if self.parse_keyword(Keyword::ROLE) {
5226 self.parse_create_role().map(Into::into)
5227 } else if self.parse_keyword(Keyword::SEQUENCE) {
5228 self.parse_create_sequence(temporary)
5229 } else if self.parse_keyword(Keyword::COLLATION) {
5230 self.parse_create_collation().map(Into::into)
5231 } else if self.parse_keyword(Keyword::TYPE) {
5232 self.parse_create_type()
5233 } else if self.parse_keyword(Keyword::PROCEDURE) {
5234 self.parse_create_procedure(or_alter)
5235 } else if self.parse_keyword(Keyword::CONNECTOR) {
5236 self.parse_create_connector().map(Into::into)
5237 } else if self.parse_keyword(Keyword::OPERATOR) {
5238 if self.parse_keyword(Keyword::FAMILY) {
5240 self.parse_create_operator_family().map(Into::into)
5241 } else if self.parse_keyword(Keyword::CLASS) {
5242 self.parse_create_operator_class().map(Into::into)
5243 } else {
5244 self.parse_create_operator().map(Into::into)
5245 }
5246 } else if self.parse_keyword(Keyword::SERVER) {
5247 self.parse_pg_create_server()
5248 } else if self.parse_keyword(Keyword::FOREIGN) {
5249 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5250 self.parse_create_foreign_data_wrapper().map(Into::into)
5251 } else if self.parse_keyword(Keyword::TABLE) {
5252 self.parse_create_foreign_table().map(Into::into)
5253 } else {
5254 self.expected_ref(
5255 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5256 self.peek_token_ref(),
5257 )
5258 }
5259 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5260 self.parse_create_text_search()
5261 } else if self.parse_keyword(Keyword::PUBLICATION) {
5262 self.parse_create_publication().map(Into::into)
5263 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5264 self.parse_create_subscription().map(Into::into)
5265 } else if self.parse_keyword(Keyword::STATISTICS) {
5266 self.parse_create_statistics().map(Into::into)
5267 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::METHOD]) {
5268 self.parse_create_access_method().map(Into::into)
5269 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::TRIGGER]) {
5270 self.parse_create_event_trigger().map(Into::into)
5271 } else if self.parse_keyword(Keyword::TABLESPACE) {
5272 self.parse_create_tablespace().map(Into::into)
5273 } else {
5274 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5275 }
5276 }
5277
5278 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5279 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5280 let name = self.parse_identifier()?;
5281 let options = self
5282 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5283 .options;
5284 let with_tags = self.parse_keyword(Keyword::WITH);
5285 let tags = if self.parse_keyword(Keyword::TAG) {
5286 self.parse_key_value_options(true, &[])?.options
5287 } else {
5288 vec![]
5289 };
5290 Ok(CreateUser {
5291 or_replace,
5292 if_not_exists,
5293 name,
5294 options: KeyValueOptions {
5295 options,
5296 delimiter: KeyValueOptionsDelimiter::Space,
5297 },
5298 with_tags,
5299 tags: KeyValueOptions {
5300 options: tags,
5301 delimiter: KeyValueOptionsDelimiter::Comma,
5302 },
5303 })
5304 }
5305
5306 pub fn parse_create_secret(
5308 &mut self,
5309 or_replace: bool,
5310 temporary: bool,
5311 persistent: bool,
5312 ) -> Result<Statement, ParserError> {
5313 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5314
5315 let mut storage_specifier = None;
5316 let mut name = None;
5317 if self.peek_token_ref().token != Token::LParen {
5318 if self.parse_keyword(Keyword::IN) {
5319 storage_specifier = self.parse_identifier().ok()
5320 } else {
5321 name = self.parse_identifier().ok();
5322 }
5323
5324 if storage_specifier.is_none()
5326 && self.peek_token_ref().token != Token::LParen
5327 && self.parse_keyword(Keyword::IN)
5328 {
5329 storage_specifier = self.parse_identifier().ok();
5330 }
5331 }
5332
5333 self.expect_token(&Token::LParen)?;
5334 self.expect_keyword_is(Keyword::TYPE)?;
5335 let secret_type = self.parse_identifier()?;
5336
5337 let mut options = Vec::new();
5338 if self.consume_token(&Token::Comma) {
5339 options.append(&mut self.parse_comma_separated(|p| {
5340 let key = p.parse_identifier()?;
5341 let value = p.parse_identifier()?;
5342 Ok(SecretOption { key, value })
5343 })?);
5344 }
5345 self.expect_token(&Token::RParen)?;
5346
5347 let temp = match (temporary, persistent) {
5348 (true, false) => Some(true),
5349 (false, true) => Some(false),
5350 (false, false) => None,
5351 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5352 };
5353
5354 Ok(Statement::CreateSecret {
5355 or_replace,
5356 temporary: temp,
5357 if_not_exists,
5358 name,
5359 storage_specifier,
5360 secret_type,
5361 options,
5362 })
5363 }
5364
5365 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5367 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5368 if self.parse_keyword(Keyword::TABLE) {
5369 let table_name = self.parse_object_name(false)?;
5370 if self.peek_token_ref().token != Token::EOF {
5371 if let Token::Word(word) = &self.peek_token_ref().token {
5372 if word.keyword == Keyword::OPTIONS {
5373 options = self.parse_options(Keyword::OPTIONS)?
5374 }
5375 };
5376
5377 if self.peek_token_ref().token != Token::EOF {
5378 let (a, q) = self.parse_as_query()?;
5379 has_as = a;
5380 query = Some(q);
5381 }
5382
5383 Ok(Statement::Cache {
5384 table_flag,
5385 table_name,
5386 has_as,
5387 options,
5388 query,
5389 })
5390 } else {
5391 Ok(Statement::Cache {
5392 table_flag,
5393 table_name,
5394 has_as,
5395 options,
5396 query,
5397 })
5398 }
5399 } else {
5400 table_flag = Some(self.parse_object_name(false)?);
5401 if self.parse_keyword(Keyword::TABLE) {
5402 let table_name = self.parse_object_name(false)?;
5403 if self.peek_token_ref().token != Token::EOF {
5404 if let Token::Word(word) = &self.peek_token_ref().token {
5405 if word.keyword == Keyword::OPTIONS {
5406 options = self.parse_options(Keyword::OPTIONS)?
5407 }
5408 };
5409
5410 if self.peek_token_ref().token != Token::EOF {
5411 let (a, q) = self.parse_as_query()?;
5412 has_as = a;
5413 query = Some(q);
5414 }
5415
5416 Ok(Statement::Cache {
5417 table_flag,
5418 table_name,
5419 has_as,
5420 options,
5421 query,
5422 })
5423 } else {
5424 Ok(Statement::Cache {
5425 table_flag,
5426 table_name,
5427 has_as,
5428 options,
5429 query,
5430 })
5431 }
5432 } else {
5433 if self.peek_token_ref().token == Token::EOF {
5434 self.prev_token();
5435 }
5436 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5437 }
5438 }
5439 }
5440
5441 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5443 match &self.peek_token_ref().token {
5444 Token::Word(word) => match word.keyword {
5445 Keyword::AS => {
5446 self.next_token();
5447 Ok((true, self.parse_query()?))
5448 }
5449 _ => Ok((false, self.parse_query()?)),
5450 },
5451 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5452 }
5453 }
5454
5455 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5457 self.expect_keyword_is(Keyword::TABLE)?;
5458 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5459 let table_name = self.parse_object_name(false)?;
5460 Ok(Statement::UNCache {
5461 table_name,
5462 if_exists,
5463 })
5464 }
5465
5466 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5468 self.expect_keyword_is(Keyword::TABLE)?;
5469 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5470 let table_name = self.parse_object_name(false)?;
5471 self.expect_keyword_is(Keyword::USING)?;
5472 let module_name = self.parse_identifier()?;
5473 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5478 Ok(Statement::CreateVirtualTable {
5479 name: table_name,
5480 if_not_exists,
5481 module_name,
5482 module_args,
5483 })
5484 }
5485
5486 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5488 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5489
5490 let schema_name = self.parse_schema_name()?;
5491
5492 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5493 Some(self.parse_expr()?)
5494 } else {
5495 None
5496 };
5497
5498 let with = if self.peek_keyword(Keyword::WITH) {
5499 Some(self.parse_options(Keyword::WITH)?)
5500 } else {
5501 None
5502 };
5503
5504 let options = if self.peek_keyword(Keyword::OPTIONS) {
5505 Some(self.parse_options(Keyword::OPTIONS)?)
5506 } else {
5507 None
5508 };
5509
5510 let clone = if self.parse_keyword(Keyword::CLONE) {
5511 Some(self.parse_object_name(false)?)
5512 } else {
5513 None
5514 };
5515
5516 Ok(Statement::CreateSchema {
5517 schema_name,
5518 if_not_exists,
5519 with,
5520 options,
5521 default_collate_spec,
5522 clone,
5523 })
5524 }
5525
5526 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5527 if self.parse_keyword(Keyword::AUTHORIZATION) {
5528 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5529 } else {
5530 let name = self.parse_object_name(false)?;
5531
5532 if self.parse_keyword(Keyword::AUTHORIZATION) {
5533 Ok(SchemaName::NamedAuthorization(
5534 name,
5535 self.parse_identifier()?,
5536 ))
5537 } else {
5538 Ok(SchemaName::Simple(name))
5539 }
5540 }
5541 }
5542
5543 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5545 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5546 let db_name = self.parse_object_name(false)?;
5547 let mut location = None;
5548 let mut managed_location = None;
5549 loop {
5550 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5551 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5552 Some(Keyword::MANAGEDLOCATION) => {
5553 managed_location = Some(self.parse_literal_string()?)
5554 }
5555 _ => break,
5556 }
5557 }
5558 let clone = if self.parse_keyword(Keyword::CLONE) {
5559 Some(self.parse_object_name(false)?)
5560 } else {
5561 None
5562 };
5563
5564 let mut default_charset = None;
5572 let mut default_collation = None;
5573 loop {
5574 let has_default = self.parse_keyword(Keyword::DEFAULT);
5575 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5576 || self.parse_keyword(Keyword::CHARSET)
5577 {
5578 let _ = self.consume_token(&Token::Eq);
5579 default_charset = Some(self.parse_identifier()?.value);
5580 } else if self.parse_keyword(Keyword::COLLATE) {
5581 let _ = self.consume_token(&Token::Eq);
5582 default_collation = Some(self.parse_identifier()?.value);
5583 } else if has_default {
5584 self.prev_token();
5586 break;
5587 } else {
5588 break;
5589 }
5590 }
5591
5592 Ok(Statement::CreateDatabase {
5593 db_name,
5594 if_not_exists: ine,
5595 location,
5596 managed_location,
5597 or_replace: false,
5598 transient: false,
5599 clone,
5600 data_retention_time_in_days: None,
5601 max_data_extension_time_in_days: None,
5602 external_volume: None,
5603 catalog: None,
5604 replace_invalid_characters: None,
5605 default_ddl_collation: None,
5606 storage_serialization_policy: None,
5607 comment: None,
5608 default_charset,
5609 default_collation,
5610 catalog_sync: None,
5611 catalog_sync_namespace_mode: None,
5612 catalog_sync_namespace_flatten_delimiter: None,
5613 with_tags: None,
5614 with_contacts: None,
5615 })
5616 }
5617
5618 pub fn parse_optional_create_function_using(
5620 &mut self,
5621 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5622 if !self.parse_keyword(Keyword::USING) {
5623 return Ok(None);
5624 };
5625 let keyword =
5626 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5627
5628 let uri = self.parse_literal_string()?;
5629
5630 match keyword {
5631 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5632 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5633 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5634 _ => self.expected(
5635 "JAR, FILE or ARCHIVE, got {:?}",
5636 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5637 ),
5638 }
5639 }
5640
5641 pub fn parse_create_function(
5643 &mut self,
5644 or_alter: bool,
5645 or_replace: bool,
5646 temporary: bool,
5647 ) -> Result<Statement, ParserError> {
5648 if dialect_of!(self is HiveDialect) {
5649 self.parse_hive_create_function(or_replace, temporary)
5650 .map(Into::into)
5651 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5652 self.parse_postgres_create_function(or_replace, temporary)
5653 .map(Into::into)
5654 } else if dialect_of!(self is DuckDbDialect) {
5655 self.parse_create_macro(or_replace, temporary)
5656 } else if dialect_of!(self is BigQueryDialect) {
5657 self.parse_bigquery_create_function(or_replace, temporary)
5658 .map(Into::into)
5659 } else if dialect_of!(self is MsSqlDialect) {
5660 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5661 .map(Into::into)
5662 } else {
5663 self.prev_token();
5664 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5665 }
5666 }
5667
5668 fn parse_postgres_create_function(
5672 &mut self,
5673 or_replace: bool,
5674 temporary: bool,
5675 ) -> Result<CreateFunction, ParserError> {
5676 let name = self.parse_object_name(false)?;
5677
5678 self.expect_token(&Token::LParen)?;
5679 let args = if Token::RParen != self.peek_token_ref().token {
5680 self.parse_comma_separated(Parser::parse_function_arg)?
5681 } else {
5682 vec![]
5683 };
5684 self.expect_token(&Token::RParen)?;
5685
5686 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5687 Some(self.parse_function_return_type()?)
5688 } else {
5689 None
5690 };
5691
5692 #[derive(Default)]
5693 struct Body {
5694 language: Option<Ident>,
5695 behavior: Option<FunctionBehavior>,
5696 function_body: Option<CreateFunctionBody>,
5697 called_on_null: Option<FunctionCalledOnNull>,
5698 parallel: Option<FunctionParallel>,
5699 security: Option<FunctionSecurity>,
5700 }
5701 let mut body = Body::default();
5702 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5703 loop {
5704 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5705 if field.is_some() {
5706 return Err(ParserError::ParserError(format!(
5707 "{name} specified more than once",
5708 )));
5709 }
5710 Ok(())
5711 }
5712 if self.parse_keyword(Keyword::AS) {
5713 ensure_not_set(&body.function_body, "AS")?;
5714 body.function_body = Some(self.parse_create_function_body_string()?);
5715 } else if self.parse_keyword(Keyword::LANGUAGE) {
5716 ensure_not_set(&body.language, "LANGUAGE")?;
5717 body.language = Some(self.parse_identifier()?);
5718 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5719 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5720 body.behavior = Some(FunctionBehavior::Immutable);
5721 } else if self.parse_keyword(Keyword::STABLE) {
5722 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5723 body.behavior = Some(FunctionBehavior::Stable);
5724 } else if self.parse_keyword(Keyword::VOLATILE) {
5725 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5726 body.behavior = Some(FunctionBehavior::Volatile);
5727 } else if self.parse_keywords(&[
5728 Keyword::CALLED,
5729 Keyword::ON,
5730 Keyword::NULL,
5731 Keyword::INPUT,
5732 ]) {
5733 ensure_not_set(
5734 &body.called_on_null,
5735 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5736 )?;
5737 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5738 } else if self.parse_keywords(&[
5739 Keyword::RETURNS,
5740 Keyword::NULL,
5741 Keyword::ON,
5742 Keyword::NULL,
5743 Keyword::INPUT,
5744 ]) {
5745 ensure_not_set(
5746 &body.called_on_null,
5747 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5748 )?;
5749 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5750 } else if self.parse_keyword(Keyword::STRICT) {
5751 ensure_not_set(
5752 &body.called_on_null,
5753 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5754 )?;
5755 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5756 } else if self.parse_keyword(Keyword::PARALLEL) {
5757 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5758 if self.parse_keyword(Keyword::UNSAFE) {
5759 body.parallel = Some(FunctionParallel::Unsafe);
5760 } else if self.parse_keyword(Keyword::RESTRICTED) {
5761 body.parallel = Some(FunctionParallel::Restricted);
5762 } else if self.parse_keyword(Keyword::SAFE) {
5763 body.parallel = Some(FunctionParallel::Safe);
5764 } else {
5765 return self
5766 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5767 }
5768 } else if self.parse_keyword(Keyword::SECURITY) {
5769 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5770 if self.parse_keyword(Keyword::DEFINER) {
5771 body.security = Some(FunctionSecurity::Definer);
5772 } else if self.parse_keyword(Keyword::INVOKER) {
5773 body.security = Some(FunctionSecurity::Invoker);
5774 } else {
5775 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5776 }
5777 } else if self.parse_keyword(Keyword::SET) {
5778 let name = self.parse_object_name(false)?;
5779 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5780 FunctionSetValue::FromCurrent
5781 } else {
5782 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5783 return self.expected_ref("= or TO", self.peek_token_ref());
5784 }
5785 if self.parse_keyword(Keyword::DEFAULT) {
5786 FunctionSetValue::Default
5787 } else {
5788 let values = self.parse_comma_separated(Parser::parse_expr)?;
5789 FunctionSetValue::Values(values)
5790 }
5791 };
5792 set_params.push(FunctionDefinitionSetParam { name, value });
5793 } else if self.parse_keyword(Keyword::RETURN) {
5794 ensure_not_set(&body.function_body, "RETURN")?;
5795 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5796 } else {
5797 break;
5798 }
5799 }
5800
5801 Ok(CreateFunction {
5802 or_alter: false,
5803 or_replace,
5804 temporary,
5805 name,
5806 args: Some(args),
5807 return_type,
5808 behavior: body.behavior,
5809 called_on_null: body.called_on_null,
5810 parallel: body.parallel,
5811 security: body.security,
5812 set_params,
5813 language: body.language,
5814 function_body: body.function_body,
5815 if_not_exists: false,
5816 using: None,
5817 determinism_specifier: None,
5818 options: None,
5819 remote_connection: None,
5820 })
5821 }
5822
5823 fn parse_hive_create_function(
5827 &mut self,
5828 or_replace: bool,
5829 temporary: bool,
5830 ) -> Result<CreateFunction, ParserError> {
5831 let name = self.parse_object_name(false)?;
5832 self.expect_keyword_is(Keyword::AS)?;
5833
5834 let body = self.parse_create_function_body_string()?;
5835 let using = self.parse_optional_create_function_using()?;
5836
5837 Ok(CreateFunction {
5838 or_alter: false,
5839 or_replace,
5840 temporary,
5841 name,
5842 function_body: Some(body),
5843 using,
5844 if_not_exists: false,
5845 args: None,
5846 return_type: None,
5847 behavior: None,
5848 called_on_null: None,
5849 parallel: None,
5850 security: None,
5851 set_params: vec![],
5852 language: None,
5853 determinism_specifier: None,
5854 options: None,
5855 remote_connection: None,
5856 })
5857 }
5858
5859 fn parse_bigquery_create_function(
5863 &mut self,
5864 or_replace: bool,
5865 temporary: bool,
5866 ) -> Result<CreateFunction, ParserError> {
5867 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5868 let (name, args) = self.parse_create_function_name_and_params()?;
5869
5870 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5871 Some(self.parse_function_return_type()?)
5872 } else {
5873 None
5874 };
5875
5876 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5877 Some(FunctionDeterminismSpecifier::Deterministic)
5878 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5879 Some(FunctionDeterminismSpecifier::NotDeterministic)
5880 } else {
5881 None
5882 };
5883
5884 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5885 Some(self.parse_identifier()?)
5886 } else {
5887 None
5888 };
5889
5890 let remote_connection =
5891 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5892 Some(self.parse_object_name(false)?)
5893 } else {
5894 None
5895 };
5896
5897 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5900
5901 let function_body = if remote_connection.is_none() {
5902 self.expect_keyword_is(Keyword::AS)?;
5903 let expr = self.parse_expr()?;
5904 if options.is_none() {
5905 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5906 Some(CreateFunctionBody::AsBeforeOptions {
5907 body: expr,
5908 link_symbol: None,
5909 })
5910 } else {
5911 Some(CreateFunctionBody::AsAfterOptions(expr))
5912 }
5913 } else {
5914 None
5915 };
5916
5917 Ok(CreateFunction {
5918 or_alter: false,
5919 or_replace,
5920 temporary,
5921 if_not_exists,
5922 name,
5923 args: Some(args),
5924 return_type,
5925 function_body,
5926 language,
5927 determinism_specifier,
5928 options,
5929 remote_connection,
5930 using: None,
5931 behavior: None,
5932 called_on_null: None,
5933 parallel: None,
5934 security: None,
5935 set_params: vec![],
5936 })
5937 }
5938
5939 fn parse_mssql_create_function(
5943 &mut self,
5944 or_alter: bool,
5945 or_replace: bool,
5946 temporary: bool,
5947 ) -> Result<CreateFunction, ParserError> {
5948 let (name, args) = self.parse_create_function_name_and_params()?;
5949
5950 self.expect_keyword(Keyword::RETURNS)?;
5951
5952 let return_table = self.maybe_parse(|p| {
5953 let return_table_name = p.parse_identifier()?;
5954
5955 p.expect_keyword_is(Keyword::TABLE)?;
5956 p.prev_token();
5957
5958 let table_column_defs = match p.parse_data_type()? {
5959 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5960 table_column_defs
5961 }
5962 _ => parser_err!(
5963 "Expected table column definitions after TABLE keyword",
5964 p.peek_token_ref().span.start
5965 )?,
5966 };
5967
5968 Ok(DataType::NamedTable {
5969 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5970 columns: table_column_defs,
5971 })
5972 })?;
5973
5974 let data_type = match return_table {
5975 Some(table_type) => table_type,
5976 None => self.parse_data_type()?,
5977 };
5978 let return_type = Some(FunctionReturnType::DataType(data_type));
5979
5980 let _ = self.parse_keyword(Keyword::AS);
5981
5982 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5983 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5984 let statements = self.parse_statement_list(&[Keyword::END])?;
5985 let end_token = self.expect_keyword(Keyword::END)?;
5986
5987 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5988 begin_token: AttachedToken(begin_token),
5989 statements,
5990 end_token: AttachedToken(end_token),
5991 }))
5992 } else if self.parse_keyword(Keyword::RETURN) {
5993 if self.peek_token_ref().token == Token::LParen {
5994 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5995 } else if self.peek_keyword(Keyword::SELECT) {
5996 let select = self.parse_select()?;
5997 Some(CreateFunctionBody::AsReturnSelect(select))
5998 } else {
5999 parser_err!(
6000 "Expected a subquery (or bare SELECT statement) after RETURN",
6001 self.peek_token_ref().span.start
6002 )?
6003 }
6004 } else {
6005 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
6006 };
6007
6008 Ok(CreateFunction {
6009 or_alter,
6010 or_replace,
6011 temporary,
6012 if_not_exists: false,
6013 name,
6014 args: Some(args),
6015 return_type,
6016 function_body,
6017 language: None,
6018 determinism_specifier: None,
6019 options: None,
6020 remote_connection: None,
6021 using: None,
6022 behavior: None,
6023 called_on_null: None,
6024 parallel: None,
6025 security: None,
6026 set_params: vec![],
6027 })
6028 }
6029
6030 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
6031 if self.parse_keyword(Keyword::SETOF) {
6032 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
6033 } else {
6034 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
6035 }
6036 }
6037
6038 fn parse_create_function_name_and_params(
6039 &mut self,
6040 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6041 let name = self.parse_object_name(false)?;
6042 let parse_function_param =
6043 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6044 let name = parser.parse_identifier()?;
6045 let data_type = parser.parse_data_type()?;
6046 let default_expr = if parser.consume_token(&Token::Eq) {
6047 Some(parser.parse_expr()?)
6048 } else {
6049 None
6050 };
6051
6052 Ok(OperateFunctionArg {
6053 mode: None,
6054 name: Some(name),
6055 data_type,
6056 default_expr,
6057 })
6058 };
6059 self.expect_token(&Token::LParen)?;
6060 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6061 self.expect_token(&Token::RParen)?;
6062 Ok((name, args))
6063 }
6064
6065 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6066 let mode = if self.parse_keyword(Keyword::IN) {
6067 Some(ArgMode::In)
6068 } else if self.parse_keyword(Keyword::OUT) {
6069 Some(ArgMode::Out)
6070 } else if self.parse_keyword(Keyword::INOUT) {
6071 Some(ArgMode::InOut)
6072 } else if self.parse_keyword(Keyword::VARIADIC) {
6073 Some(ArgMode::Variadic)
6074 } else {
6075 None
6076 };
6077
6078 let mut name = None;
6080 let mut data_type = self.parse_data_type()?;
6081
6082 let data_type_idx = self.get_current_index();
6086
6087 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6089 if parser.peek_keyword(Keyword::DEFAULT) {
6090 parser_err!(
6092 "The DEFAULT keyword is not a type",
6093 parser.peek_token_ref().span.start
6094 )
6095 } else {
6096 parser.parse_data_type()
6097 }
6098 }
6099
6100 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6101 let token = self.token_at(data_type_idx);
6102
6103 if !matches!(token.token, Token::Word(_)) {
6105 return self.expected("a name or type", token.clone());
6106 }
6107
6108 name = Some(Ident::new(token.to_string()));
6109 data_type = next_data_type;
6110 }
6111
6112 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6113 {
6114 Some(self.parse_expr()?)
6115 } else {
6116 None
6117 };
6118 Ok(OperateFunctionArg {
6119 mode,
6120 name,
6121 data_type,
6122 default_expr,
6123 })
6124 }
6125
6126 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6127 let mode = if self.parse_keyword(Keyword::IN) {
6128 Some(ArgMode::In)
6129 } else {
6130 if self
6131 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6132 .is_some()
6133 {
6134 return self.expected_ref(
6135 "IN or argument type in aggregate signature",
6136 self.peek_token_ref(),
6137 );
6138 }
6139 None
6140 };
6141
6142 let mut name = None;
6145 let mut data_type = self.parse_data_type()?;
6146 let data_type_idx = self.get_current_index();
6147
6148 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6149 if parser.peek_keyword(Keyword::DEFAULT)
6150 || parser.peek_keyword(Keyword::ORDER)
6151 || parser.peek_token_ref().token == Token::Comma
6152 || parser.peek_token_ref().token == Token::RParen
6153 {
6154 parser_err!(
6156 "The current token cannot start an aggregate argument type",
6157 parser.peek_token_ref().span.start
6158 )
6159 } else {
6160 parser.parse_data_type()
6161 }
6162 }
6163
6164 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6165 let token = self.token_at(data_type_idx);
6166 if !matches!(token.token, Token::Word(_)) {
6167 return self.expected("a name or type", token.clone());
6168 }
6169
6170 name = Some(Ident::new(token.to_string()));
6171 data_type = next_data_type;
6172 }
6173
6174 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6175 return self.expected_ref(
6176 "',' or ')' or ORDER BY after aggregate argument type",
6177 self.peek_token_ref(),
6178 );
6179 }
6180
6181 Ok(OperateFunctionArg {
6182 mode,
6183 name,
6184 data_type,
6185 default_expr: None,
6186 })
6187 }
6188
6189 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6195 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6196 {
6197 self.prev_token();
6198 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6199 }
6200 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6201 let trigger_name = self.parse_object_name(false)?;
6202 let table_name = if self.parse_keyword(Keyword::ON) {
6203 Some(self.parse_object_name(false)?)
6204 } else {
6205 None
6206 };
6207 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6208 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6209 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6210 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6211 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6212 )),
6213 None => None,
6214 };
6215 Ok(DropTrigger {
6216 if_exists,
6217 trigger_name,
6218 table_name,
6219 option,
6220 })
6221 }
6222
6223 pub fn parse_create_trigger(
6225 &mut self,
6226 temporary: bool,
6227 or_alter: bool,
6228 or_replace: bool,
6229 is_constraint: bool,
6230 ) -> Result<CreateTrigger, ParserError> {
6231 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6232 {
6233 self.prev_token();
6234 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6235 }
6236
6237 let name = self.parse_object_name(false)?;
6238 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6239
6240 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6241 self.expect_keyword_is(Keyword::ON)?;
6242 let table_name = self.parse_object_name(false)?;
6243
6244 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6245 self.parse_object_name(true).ok()
6246 } else {
6247 None
6248 };
6249
6250 let characteristics = self.parse_constraint_characteristics()?;
6251
6252 let mut referencing = vec![];
6253 if self.parse_keyword(Keyword::REFERENCING) {
6254 while let Some(refer) = self.parse_trigger_referencing()? {
6255 referencing.push(refer);
6256 }
6257 }
6258
6259 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6260 let include_each = self.parse_keyword(Keyword::EACH);
6261 let trigger_object =
6262 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6263 Keyword::ROW => TriggerObject::Row,
6264 Keyword::STATEMENT => TriggerObject::Statement,
6265 unexpected_keyword => return Err(ParserError::ParserError(
6266 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6267 )),
6268 };
6269
6270 Some(if include_each {
6271 TriggerObjectKind::ForEach(trigger_object)
6272 } else {
6273 TriggerObjectKind::For(trigger_object)
6274 })
6275 } else {
6276 let _ = self.parse_keyword(Keyword::FOR);
6277
6278 None
6279 };
6280
6281 let condition = self
6282 .parse_keyword(Keyword::WHEN)
6283 .then(|| self.parse_expr())
6284 .transpose()?;
6285
6286 let mut exec_body = None;
6287 let mut statements = None;
6288 if self.parse_keyword(Keyword::EXECUTE) {
6289 exec_body = Some(self.parse_trigger_exec_body()?);
6290 } else {
6291 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6292 }
6293
6294 Ok(CreateTrigger {
6295 or_alter,
6296 temporary,
6297 or_replace,
6298 is_constraint,
6299 name,
6300 period,
6301 period_before_table: true,
6302 events,
6303 table_name,
6304 referenced_table_name,
6305 referencing,
6306 trigger_object,
6307 condition,
6308 exec_body,
6309 statements_as: false,
6310 statements,
6311 characteristics,
6312 })
6313 }
6314
6315 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6317 Ok(
6318 match self.expect_one_of_keywords(&[
6319 Keyword::FOR,
6320 Keyword::BEFORE,
6321 Keyword::AFTER,
6322 Keyword::INSTEAD,
6323 ])? {
6324 Keyword::FOR => TriggerPeriod::For,
6325 Keyword::BEFORE => TriggerPeriod::Before,
6326 Keyword::AFTER => TriggerPeriod::After,
6327 Keyword::INSTEAD => self
6328 .expect_keyword_is(Keyword::OF)
6329 .map(|_| TriggerPeriod::InsteadOf)?,
6330 unexpected_keyword => return Err(ParserError::ParserError(
6331 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6332 )),
6333 },
6334 )
6335 }
6336
6337 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6339 Ok(
6340 match self.expect_one_of_keywords(&[
6341 Keyword::INSERT,
6342 Keyword::UPDATE,
6343 Keyword::DELETE,
6344 Keyword::TRUNCATE,
6345 ])? {
6346 Keyword::INSERT => TriggerEvent::Insert,
6347 Keyword::UPDATE => {
6348 if self.parse_keyword(Keyword::OF) {
6349 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6350 TriggerEvent::Update(cols)
6351 } else {
6352 TriggerEvent::Update(vec![])
6353 }
6354 }
6355 Keyword::DELETE => TriggerEvent::Delete,
6356 Keyword::TRUNCATE => TriggerEvent::Truncate,
6357 unexpected_keyword => return Err(ParserError::ParserError(
6358 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6359 )),
6360 },
6361 )
6362 }
6363
6364 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6366 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6367 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6368 TriggerReferencingType::OldTable
6369 }
6370 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6371 TriggerReferencingType::NewTable
6372 }
6373 _ => {
6374 return Ok(None);
6375 }
6376 };
6377
6378 let is_as = self.parse_keyword(Keyword::AS);
6379 let transition_relation_name = self.parse_object_name(false)?;
6380 Ok(Some(TriggerReferencing {
6381 refer_type,
6382 is_as,
6383 transition_relation_name,
6384 }))
6385 }
6386
6387 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6389 Ok(TriggerExecBody {
6390 exec_type: match self
6391 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6392 {
6393 Keyword::FUNCTION => TriggerExecBodyType::Function,
6394 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6395 unexpected_keyword => return Err(ParserError::ParserError(
6396 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
6397 )),
6398 },
6399 func_desc: self.parse_function_desc()?,
6400 })
6401 }
6402
6403 pub fn parse_create_macro(
6405 &mut self,
6406 or_replace: bool,
6407 temporary: bool,
6408 ) -> Result<Statement, ParserError> {
6409 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6410 let name = self.parse_object_name(false)?;
6411 self.expect_token(&Token::LParen)?;
6412 let args = if self.consume_token(&Token::RParen) {
6413 self.prev_token();
6414 None
6415 } else {
6416 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6417 };
6418
6419 self.expect_token(&Token::RParen)?;
6420 self.expect_keyword_is(Keyword::AS)?;
6421
6422 Ok(Statement::CreateMacro {
6423 or_replace,
6424 temporary,
6425 name,
6426 args,
6427 definition: if self.parse_keyword(Keyword::TABLE) {
6428 MacroDefinition::Table(self.parse_query()?)
6429 } else {
6430 MacroDefinition::Expr(self.parse_expr()?)
6431 },
6432 })
6433 } else {
6434 self.prev_token();
6435 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6436 }
6437 }
6438
6439 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6440 let name = self.parse_identifier()?;
6441
6442 let default_expr =
6443 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6444 Some(self.parse_expr()?)
6445 } else {
6446 None
6447 };
6448 Ok(MacroArg { name, default_expr })
6449 }
6450
6451 pub fn parse_create_external_table(
6453 &mut self,
6454 or_replace: bool,
6455 ) -> Result<CreateTable, ParserError> {
6456 self.expect_keyword_is(Keyword::TABLE)?;
6457 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6458 let table_name = self.parse_object_name(false)?;
6459 let (columns, constraints) = self.parse_columns()?;
6460
6461 let hive_distribution = self.parse_hive_distribution()?;
6462 let hive_formats = self.parse_hive_formats()?;
6463
6464 let file_format = if let Some(ref hf) = hive_formats {
6465 if let Some(ref ff) = hf.storage {
6466 match ff {
6467 HiveIOFormat::FileFormat { format } => Some(*format),
6468 _ => None,
6469 }
6470 } else {
6471 None
6472 }
6473 } else {
6474 None
6475 };
6476 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6477 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6478 let table_options = if !table_properties.is_empty() {
6479 CreateTableOptions::TableProperties(table_properties)
6480 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6481 CreateTableOptions::Options(options)
6482 } else {
6483 CreateTableOptions::None
6484 };
6485 Ok(CreateTableBuilder::new(table_name)
6486 .columns(columns)
6487 .constraints(constraints)
6488 .hive_distribution(hive_distribution)
6489 .hive_formats(hive_formats)
6490 .table_options(table_options)
6491 .or_replace(or_replace)
6492 .if_not_exists(if_not_exists)
6493 .external(true)
6494 .file_format(file_format)
6495 .location(location)
6496 .build())
6497 }
6498
6499 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6503 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6504 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6505 let table_name = self.parse_object_name(true)?;
6506
6507 self.expect_keyword_is(Keyword::CLONE)?;
6508 let clone = Some(self.parse_object_name(true)?);
6509
6510 let version =
6511 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6512 {
6513 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6514 } else {
6515 None
6516 };
6517
6518 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6519 CreateTableOptions::Options(options)
6520 } else {
6521 CreateTableOptions::None
6522 };
6523
6524 Ok(CreateTableBuilder::new(table_name)
6525 .snapshot(true)
6526 .if_not_exists(if_not_exists)
6527 .clone_clause(clone)
6528 .version(version)
6529 .table_options(table_options)
6530 .build())
6531 }
6532
6533 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6535 let next_token = self.next_token();
6536 match &next_token.token {
6537 Token::Word(w) => match w.keyword {
6538 Keyword::AVRO => Ok(FileFormat::AVRO),
6539 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6540 Keyword::ORC => Ok(FileFormat::ORC),
6541 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6542 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6543 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6544 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6545 _ => self.expected("fileformat", next_token),
6546 },
6547 _ => self.expected("fileformat", next_token),
6548 }
6549 }
6550
6551 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6552 if self.consume_token(&Token::Eq) {
6553 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6554 } else {
6555 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6556 }
6557 }
6558
6559 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6561 let next_token = self.next_token();
6562 match &next_token.token {
6563 Token::Word(w) => match w.keyword {
6564 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6565 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6566 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6567 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6568 _ => self.expected("fileformat", next_token),
6569 },
6570 _ => self.expected("fileformat", next_token),
6571 }
6572 }
6573
6574 pub fn parse_create_view(
6576 &mut self,
6577 or_alter: bool,
6578 or_replace: bool,
6579 temporary: bool,
6580 create_view_params: Option<CreateViewParams>,
6581 ) -> Result<CreateView, ParserError> {
6582 let secure = self.parse_keyword(Keyword::SECURE);
6583 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6584 self.expect_keyword_is(Keyword::VIEW)?;
6585 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6586 let if_not_exists_first =
6589 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6590 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6591 let name_before_not_exists = !if_not_exists_first
6592 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6593 let if_not_exists = if_not_exists_first || name_before_not_exists;
6594 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6595 let columns = self.parse_view_columns()?;
6598 let mut options = CreateTableOptions::None;
6599 let with_options = self.parse_options(Keyword::WITH)?;
6600 if !with_options.is_empty() {
6601 options = CreateTableOptions::With(with_options);
6602 }
6603
6604 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6605 self.expect_keyword_is(Keyword::BY)?;
6606 self.parse_parenthesized_column_list(Optional, false)?
6607 } else {
6608 vec![]
6609 };
6610
6611 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6612 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6613 if !opts.is_empty() {
6614 options = CreateTableOptions::Options(opts);
6615 }
6616 };
6617 }
6618
6619 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6620 && self.parse_keyword(Keyword::TO)
6621 {
6622 Some(self.parse_object_name(false)?)
6623 } else {
6624 None
6625 };
6626
6627 let comment = if self.dialect.supports_create_view_comment_syntax()
6628 && self.parse_keyword(Keyword::COMMENT)
6629 {
6630 self.expect_token(&Token::Eq)?;
6631 Some(self.parse_comment_value()?)
6632 } else {
6633 None
6634 };
6635
6636 self.expect_keyword_is(Keyword::AS)?;
6637 let query = self.parse_query()?;
6638 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6641 && self.parse_keywords(&[
6642 Keyword::WITH,
6643 Keyword::NO,
6644 Keyword::SCHEMA,
6645 Keyword::BINDING,
6646 ]);
6647
6648 Ok(CreateView {
6649 or_alter,
6650 name,
6651 columns,
6652 query,
6653 materialized,
6654 secure,
6655 or_replace,
6656 options,
6657 cluster_by,
6658 comment,
6659 with_no_schema_binding,
6660 if_not_exists,
6661 temporary,
6662 copy_grants,
6663 to,
6664 params: create_view_params,
6665 name_before_not_exists,
6666 })
6667 }
6668
6669 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6673 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6674 self.expect_token(&Token::Eq)?;
6675 Some(
6676 match self.expect_one_of_keywords(&[
6677 Keyword::UNDEFINED,
6678 Keyword::MERGE,
6679 Keyword::TEMPTABLE,
6680 ])? {
6681 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6682 Keyword::MERGE => CreateViewAlgorithm::Merge,
6683 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6684 _ => {
6685 self.prev_token();
6686 let found = self.next_token();
6687 return self
6688 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6689 }
6690 },
6691 )
6692 } else {
6693 None
6694 };
6695 let definer = if self.parse_keyword(Keyword::DEFINER) {
6696 self.expect_token(&Token::Eq)?;
6697 Some(self.parse_grantee_name()?)
6698 } else {
6699 None
6700 };
6701 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6702 Some(
6703 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6704 Keyword::DEFINER => CreateViewSecurity::Definer,
6705 Keyword::INVOKER => CreateViewSecurity::Invoker,
6706 _ => {
6707 self.prev_token();
6708 let found = self.next_token();
6709 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6710 }
6711 },
6712 )
6713 } else {
6714 None
6715 };
6716 if algorithm.is_some() || definer.is_some() || security.is_some() {
6717 Ok(Some(CreateViewParams {
6718 algorithm,
6719 definer,
6720 security,
6721 }))
6722 } else {
6723 Ok(None)
6724 }
6725 }
6726
6727 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6729 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6730 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6731
6732 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6735 vec![Keyword::AUTHORIZATION]
6736 } else if dialect_of!(self is PostgreSqlDialect) {
6737 vec![
6738 Keyword::LOGIN,
6739 Keyword::NOLOGIN,
6740 Keyword::INHERIT,
6741 Keyword::NOINHERIT,
6742 Keyword::BYPASSRLS,
6743 Keyword::NOBYPASSRLS,
6744 Keyword::PASSWORD,
6745 Keyword::CREATEDB,
6746 Keyword::NOCREATEDB,
6747 Keyword::CREATEROLE,
6748 Keyword::NOCREATEROLE,
6749 Keyword::SUPERUSER,
6750 Keyword::NOSUPERUSER,
6751 Keyword::REPLICATION,
6752 Keyword::NOREPLICATION,
6753 Keyword::CONNECTION,
6754 Keyword::VALID,
6755 Keyword::IN,
6756 Keyword::ROLE,
6757 Keyword::ADMIN,
6758 Keyword::USER,
6759 ]
6760 } else {
6761 vec![]
6762 };
6763
6764 let mut authorization_owner = None;
6766 let mut login = None;
6768 let mut inherit = None;
6769 let mut bypassrls = None;
6770 let mut password = None;
6771 let mut create_db = None;
6772 let mut create_role = None;
6773 let mut superuser = None;
6774 let mut replication = None;
6775 let mut connection_limit = None;
6776 let mut valid_until = None;
6777 let mut in_role = vec![];
6778 let mut in_group = vec![];
6779 let mut role = vec![];
6780 let mut user = vec![];
6781 let mut admin = vec![];
6782
6783 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6784 let loc = self
6785 .tokens
6786 .get(self.index - 1)
6787 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6788 match keyword {
6789 Keyword::AUTHORIZATION => {
6790 if authorization_owner.is_some() {
6791 parser_err!("Found multiple AUTHORIZATION", loc)
6792 } else {
6793 authorization_owner = Some(self.parse_object_name(false)?);
6794 Ok(())
6795 }
6796 }
6797 Keyword::LOGIN | Keyword::NOLOGIN => {
6798 if login.is_some() {
6799 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6800 } else {
6801 login = Some(keyword == Keyword::LOGIN);
6802 Ok(())
6803 }
6804 }
6805 Keyword::INHERIT | Keyword::NOINHERIT => {
6806 if inherit.is_some() {
6807 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6808 } else {
6809 inherit = Some(keyword == Keyword::INHERIT);
6810 Ok(())
6811 }
6812 }
6813 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6814 if bypassrls.is_some() {
6815 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6816 } else {
6817 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6818 Ok(())
6819 }
6820 }
6821 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6822 if create_db.is_some() {
6823 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6824 } else {
6825 create_db = Some(keyword == Keyword::CREATEDB);
6826 Ok(())
6827 }
6828 }
6829 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6830 if create_role.is_some() {
6831 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6832 } else {
6833 create_role = Some(keyword == Keyword::CREATEROLE);
6834 Ok(())
6835 }
6836 }
6837 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6838 if superuser.is_some() {
6839 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6840 } else {
6841 superuser = Some(keyword == Keyword::SUPERUSER);
6842 Ok(())
6843 }
6844 }
6845 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6846 if replication.is_some() {
6847 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6848 } else {
6849 replication = Some(keyword == Keyword::REPLICATION);
6850 Ok(())
6851 }
6852 }
6853 Keyword::PASSWORD => {
6854 if password.is_some() {
6855 parser_err!("Found multiple PASSWORD", loc)
6856 } else {
6857 password = if self.parse_keyword(Keyword::NULL) {
6858 Some(Password::NullPassword)
6859 } else {
6860 Some(Password::Password(Expr::Value(self.parse_value()?)))
6861 };
6862 Ok(())
6863 }
6864 }
6865 Keyword::CONNECTION => {
6866 self.expect_keyword_is(Keyword::LIMIT)?;
6867 if connection_limit.is_some() {
6868 parser_err!("Found multiple CONNECTION LIMIT", loc)
6869 } else {
6870 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6871 Ok(())
6872 }
6873 }
6874 Keyword::VALID => {
6875 self.expect_keyword_is(Keyword::UNTIL)?;
6876 if valid_until.is_some() {
6877 parser_err!("Found multiple VALID UNTIL", loc)
6878 } else {
6879 valid_until = Some(Expr::Value(self.parse_value()?));
6880 Ok(())
6881 }
6882 }
6883 Keyword::IN => {
6884 if self.parse_keyword(Keyword::ROLE) {
6885 if !in_role.is_empty() {
6886 parser_err!("Found multiple IN ROLE", loc)
6887 } else {
6888 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6889 Ok(())
6890 }
6891 } else if self.parse_keyword(Keyword::GROUP) {
6892 if !in_group.is_empty() {
6893 parser_err!("Found multiple IN GROUP", loc)
6894 } else {
6895 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6896 Ok(())
6897 }
6898 } else {
6899 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6900 }
6901 }
6902 Keyword::ROLE => {
6903 if !role.is_empty() {
6904 parser_err!("Found multiple ROLE", loc)
6905 } else {
6906 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6907 Ok(())
6908 }
6909 }
6910 Keyword::USER => {
6911 if !user.is_empty() {
6912 parser_err!("Found multiple USER", loc)
6913 } else {
6914 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6915 Ok(())
6916 }
6917 }
6918 Keyword::ADMIN => {
6919 if !admin.is_empty() {
6920 parser_err!("Found multiple ADMIN", loc)
6921 } else {
6922 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6923 Ok(())
6924 }
6925 }
6926 _ => break,
6927 }?
6928 }
6929
6930 Ok(CreateRole {
6931 names,
6932 if_not_exists,
6933 login,
6934 inherit,
6935 bypassrls,
6936 password,
6937 create_db,
6938 create_role,
6939 replication,
6940 superuser,
6941 connection_limit,
6942 valid_until,
6943 in_role,
6944 in_group,
6945 role,
6946 user,
6947 admin,
6948 authorization_owner,
6949 })
6950 }
6951
6952 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6954 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6955 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6956 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6957 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6958 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6959 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6960 )),
6961 None => {
6962 match self.parse_identifier() {
6963 Ok(ident) => Owner::Ident(ident),
6964 Err(e) => {
6965 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6966 }
6967 }
6968 }
6969 };
6970 Ok(owner)
6971 }
6972
6973 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
6975 let name = self.parse_object_name(false)?;
6976 self.expect_keyword_is(Keyword::AS)?;
6977 let data_type = self.parse_data_type()?;
6978 let collation = if self.parse_keyword(Keyword::COLLATE) {
6979 Some(self.parse_identifier()?)
6980 } else {
6981 None
6982 };
6983 let default = if self.parse_keyword(Keyword::DEFAULT) {
6984 Some(self.parse_expr()?)
6985 } else {
6986 None
6987 };
6988 let mut constraints = Vec::new();
6989 while let Some(constraint) = self.parse_optional_table_constraint()? {
6990 constraints.push(constraint);
6991 }
6992
6993 Ok(CreateDomain {
6994 name,
6995 data_type,
6996 collation,
6997 default,
6998 constraints,
6999 })
7000 }
7001
7002 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
7012 let name = self.parse_identifier()?;
7013 self.expect_keyword_is(Keyword::ON)?;
7014 let table_name = self.parse_object_name(false)?;
7015
7016 let policy_type = if self.parse_keyword(Keyword::AS) {
7017 let keyword =
7018 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
7019 Some(match keyword {
7020 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
7021 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
7022 unexpected_keyword => return Err(ParserError::ParserError(
7023 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
7024 )),
7025 })
7026 } else {
7027 None
7028 };
7029
7030 let command = if self.parse_keyword(Keyword::FOR) {
7031 let keyword = self.expect_one_of_keywords(&[
7032 Keyword::ALL,
7033 Keyword::SELECT,
7034 Keyword::INSERT,
7035 Keyword::UPDATE,
7036 Keyword::DELETE,
7037 ])?;
7038 Some(match keyword {
7039 Keyword::ALL => CreatePolicyCommand::All,
7040 Keyword::SELECT => CreatePolicyCommand::Select,
7041 Keyword::INSERT => CreatePolicyCommand::Insert,
7042 Keyword::UPDATE => CreatePolicyCommand::Update,
7043 Keyword::DELETE => CreatePolicyCommand::Delete,
7044 unexpected_keyword => return Err(ParserError::ParserError(
7045 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7046 )),
7047 })
7048 } else {
7049 None
7050 };
7051
7052 let to = if self.parse_keyword(Keyword::TO) {
7053 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7054 } else {
7055 None
7056 };
7057
7058 let using = if self.parse_keyword(Keyword::USING) {
7059 self.expect_token(&Token::LParen)?;
7060 let expr = self.parse_expr()?;
7061 self.expect_token(&Token::RParen)?;
7062 Some(expr)
7063 } else {
7064 None
7065 };
7066
7067 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7068 self.expect_token(&Token::LParen)?;
7069 let expr = self.parse_expr()?;
7070 self.expect_token(&Token::RParen)?;
7071 Some(expr)
7072 } else {
7073 None
7074 };
7075
7076 Ok(CreatePolicy {
7077 name,
7078 table_name,
7079 policy_type,
7080 command,
7081 to,
7082 using,
7083 with_check,
7084 })
7085 }
7086
7087 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7097 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7098 let name = self.parse_identifier()?;
7099
7100 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7101 Some(self.parse_literal_string()?)
7102 } else {
7103 None
7104 };
7105
7106 let url = if self.parse_keyword(Keyword::URL) {
7107 Some(self.parse_literal_string()?)
7108 } else {
7109 None
7110 };
7111
7112 let comment = self.parse_optional_inline_comment()?;
7113
7114 let with_dcproperties =
7115 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7116 properties if !properties.is_empty() => Some(properties),
7117 _ => None,
7118 };
7119
7120 Ok(CreateConnector {
7121 name,
7122 if_not_exists,
7123 connector_type,
7124 url,
7125 comment,
7126 with_dcproperties,
7127 })
7128 }
7129
7130 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7136 let mut parts = vec![];
7137 loop {
7138 parts.push(ObjectNamePart::Identifier(Ident::new(
7139 self.next_token().to_string(),
7140 )));
7141 if !self.consume_token(&Token::Period) {
7142 break;
7143 }
7144 }
7145 Ok(ObjectName(parts))
7146 }
7147
7148 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7152 let name = self.parse_operator_name()?;
7153 self.expect_token(&Token::LParen)?;
7154
7155 let mut function: Option<ObjectName> = None;
7156 let mut is_procedure = false;
7157 let mut left_arg: Option<DataType> = None;
7158 let mut right_arg: Option<DataType> = None;
7159 let mut options: Vec<OperatorOption> = Vec::new();
7160
7161 loop {
7162 let keyword = self.expect_one_of_keywords(&[
7163 Keyword::FUNCTION,
7164 Keyword::PROCEDURE,
7165 Keyword::LEFTARG,
7166 Keyword::RIGHTARG,
7167 Keyword::COMMUTATOR,
7168 Keyword::NEGATOR,
7169 Keyword::RESTRICT,
7170 Keyword::JOIN,
7171 Keyword::HASHES,
7172 Keyword::MERGES,
7173 ])?;
7174
7175 match keyword {
7176 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7177 options.push(OperatorOption::Hashes);
7178 }
7179 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7180 options.push(OperatorOption::Merges);
7181 }
7182 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7183 self.expect_token(&Token::Eq)?;
7184 function = Some(self.parse_object_name(false)?);
7185 is_procedure = keyword == Keyword::PROCEDURE;
7186 }
7187 Keyword::LEFTARG if left_arg.is_none() => {
7188 self.expect_token(&Token::Eq)?;
7189 left_arg = Some(self.parse_data_type()?);
7190 }
7191 Keyword::RIGHTARG if right_arg.is_none() => {
7192 self.expect_token(&Token::Eq)?;
7193 right_arg = Some(self.parse_data_type()?);
7194 }
7195 Keyword::COMMUTATOR
7196 if !options
7197 .iter()
7198 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7199 {
7200 self.expect_token(&Token::Eq)?;
7201 if self.parse_keyword(Keyword::OPERATOR) {
7202 self.expect_token(&Token::LParen)?;
7203 let op = self.parse_operator_name()?;
7204 self.expect_token(&Token::RParen)?;
7205 options.push(OperatorOption::Commutator(op));
7206 } else {
7207 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7208 }
7209 }
7210 Keyword::NEGATOR
7211 if !options
7212 .iter()
7213 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7214 {
7215 self.expect_token(&Token::Eq)?;
7216 if self.parse_keyword(Keyword::OPERATOR) {
7217 self.expect_token(&Token::LParen)?;
7218 let op = self.parse_operator_name()?;
7219 self.expect_token(&Token::RParen)?;
7220 options.push(OperatorOption::Negator(op));
7221 } else {
7222 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7223 }
7224 }
7225 Keyword::RESTRICT
7226 if !options
7227 .iter()
7228 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7229 {
7230 self.expect_token(&Token::Eq)?;
7231 options.push(OperatorOption::Restrict(Some(
7232 self.parse_object_name(false)?,
7233 )));
7234 }
7235 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7236 self.expect_token(&Token::Eq)?;
7237 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7238 }
7239 _ => {
7240 return Err(ParserError::ParserError(format!(
7241 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7242 keyword
7243 )))
7244 }
7245 }
7246
7247 if !self.consume_token(&Token::Comma) {
7248 break;
7249 }
7250 }
7251
7252 self.expect_token(&Token::RParen)?;
7254
7255 let function = function.ok_or_else(|| {
7257 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7258 })?;
7259
7260 Ok(CreateOperator {
7261 name,
7262 function,
7263 is_procedure,
7264 left_arg,
7265 right_arg,
7266 options,
7267 })
7268 }
7269
7270 pub fn parse_create_aggregate(
7274 &mut self,
7275 or_replace: bool,
7276 ) -> Result<CreateAggregate, ParserError> {
7277 let name = self.parse_object_name(false)?;
7278
7279 self.expect_token(&Token::LParen)?;
7281 let args = if self.consume_token(&Token::Mul) {
7282 vec![]
7284 } else if self.consume_token(&Token::RParen) {
7285 self.prev_token();
7286 vec![]
7287 } else {
7288 let parsed = self.parse_comma_separated(|p| p.parse_data_type())?;
7289 parsed
7290 };
7291 self.expect_token(&Token::RParen)?;
7292
7293 self.expect_token(&Token::LParen)?;
7295 let mut options: Vec<CreateAggregateOption> = Vec::new();
7296 loop {
7297 let token = self.next_token();
7298 match &token.token {
7299 Token::RParen => break,
7300 Token::Comma => continue,
7301 Token::Word(word) => {
7302 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7303 options.push(option);
7304 }
7305 other => {
7306 return Err(ParserError::ParserError(format!(
7307 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7308 )));
7309 }
7310 }
7311 }
7312
7313 Ok(CreateAggregate {
7314 or_replace,
7315 name,
7316 args,
7317 options,
7318 })
7319 }
7320
7321 fn parse_create_aggregate_option(
7322 &mut self,
7323 key: &str,
7324 ) -> Result<CreateAggregateOption, ParserError> {
7325 match key {
7326 "SFUNC" => {
7327 self.expect_token(&Token::Eq)?;
7328 Ok(CreateAggregateOption::Sfunc(
7329 self.parse_object_name(false)?,
7330 ))
7331 }
7332 "STYPE" => {
7333 self.expect_token(&Token::Eq)?;
7334 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7335 }
7336 "SSPACE" => {
7337 self.expect_token(&Token::Eq)?;
7338 let size = self.parse_literal_uint()?;
7339 Ok(CreateAggregateOption::Sspace(size))
7340 }
7341 "FINALFUNC" => {
7342 self.expect_token(&Token::Eq)?;
7343 Ok(CreateAggregateOption::Finalfunc(
7344 self.parse_object_name(false)?,
7345 ))
7346 }
7347 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7348 "FINALFUNC_MODIFY" => {
7349 self.expect_token(&Token::Eq)?;
7350 Ok(CreateAggregateOption::FinalfuncModify(
7351 self.parse_aggregate_modify_kind()?,
7352 ))
7353 }
7354 "COMBINEFUNC" => {
7355 self.expect_token(&Token::Eq)?;
7356 Ok(CreateAggregateOption::Combinefunc(
7357 self.parse_object_name(false)?,
7358 ))
7359 }
7360 "SERIALFUNC" => {
7361 self.expect_token(&Token::Eq)?;
7362 Ok(CreateAggregateOption::Serialfunc(
7363 self.parse_object_name(false)?,
7364 ))
7365 }
7366 "DESERIALFUNC" => {
7367 self.expect_token(&Token::Eq)?;
7368 Ok(CreateAggregateOption::Deserialfunc(
7369 self.parse_object_name(false)?,
7370 ))
7371 }
7372 "INITCOND" => {
7373 self.expect_token(&Token::Eq)?;
7374 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7375 }
7376 "MSFUNC" => {
7377 self.expect_token(&Token::Eq)?;
7378 Ok(CreateAggregateOption::Msfunc(
7379 self.parse_object_name(false)?,
7380 ))
7381 }
7382 "MINVFUNC" => {
7383 self.expect_token(&Token::Eq)?;
7384 Ok(CreateAggregateOption::Minvfunc(
7385 self.parse_object_name(false)?,
7386 ))
7387 }
7388 "MSTYPE" => {
7389 self.expect_token(&Token::Eq)?;
7390 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7391 }
7392 "MSSPACE" => {
7393 self.expect_token(&Token::Eq)?;
7394 let size = self.parse_literal_uint()?;
7395 Ok(CreateAggregateOption::Msspace(size))
7396 }
7397 "MFINALFUNC" => {
7398 self.expect_token(&Token::Eq)?;
7399 Ok(CreateAggregateOption::Mfinalfunc(
7400 self.parse_object_name(false)?,
7401 ))
7402 }
7403 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7404 "MFINALFUNC_MODIFY" => {
7405 self.expect_token(&Token::Eq)?;
7406 Ok(CreateAggregateOption::MfinalfuncModify(
7407 self.parse_aggregate_modify_kind()?,
7408 ))
7409 }
7410 "MINITCOND" => {
7411 self.expect_token(&Token::Eq)?;
7412 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7413 }
7414 "SORTOP" => {
7415 self.expect_token(&Token::Eq)?;
7416 Ok(CreateAggregateOption::Sortop(
7417 self.parse_object_name(false)?,
7418 ))
7419 }
7420 "PARALLEL" => {
7421 self.expect_token(&Token::Eq)?;
7422 let parallel = match self.expect_one_of_keywords(&[
7423 Keyword::SAFE,
7424 Keyword::RESTRICTED,
7425 Keyword::UNSAFE,
7426 ])? {
7427 Keyword::SAFE => FunctionParallel::Safe,
7428 Keyword::RESTRICTED => FunctionParallel::Restricted,
7429 Keyword::UNSAFE => FunctionParallel::Unsafe,
7430 _ => unreachable!(),
7431 };
7432 Ok(CreateAggregateOption::Parallel(parallel))
7433 }
7434 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7435 other => Err(ParserError::ParserError(format!(
7436 "Unknown CREATE AGGREGATE option: {other}"
7437 ))),
7438 }
7439 }
7440
7441 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7442 let token = self.next_token();
7443 match &token.token {
7444 Token::Word(word) => match word.value.to_uppercase().as_str() {
7445 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7446 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7447 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7448 other => Err(ParserError::ParserError(format!(
7449 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7450 ))),
7451 },
7452 other => Err(ParserError::ParserError(format!(
7453 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7454 ))),
7455 }
7456 }
7457
7458 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7462 let name = self.parse_object_name(false)?;
7463 self.expect_keyword(Keyword::USING)?;
7464 let using = self.parse_identifier()?;
7465
7466 Ok(CreateOperatorFamily { name, using })
7467 }
7468
7469 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7473 let name = self.parse_object_name(false)?;
7474 let default = self.parse_keyword(Keyword::DEFAULT);
7475 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7476 let for_type = self.parse_data_type()?;
7477 self.expect_keyword(Keyword::USING)?;
7478 let using = self.parse_identifier()?;
7479
7480 let family = if self.parse_keyword(Keyword::FAMILY) {
7481 Some(self.parse_object_name(false)?)
7482 } else {
7483 None
7484 };
7485
7486 self.expect_keyword(Keyword::AS)?;
7487
7488 let mut items = vec![];
7489 loop {
7490 if self.parse_keyword(Keyword::OPERATOR) {
7491 let strategy_number = self.parse_literal_uint()?;
7492 let operator_name = self.parse_operator_name()?;
7493
7494 let op_types = if self.consume_token(&Token::LParen) {
7496 let left = self.parse_data_type()?;
7497 self.expect_token(&Token::Comma)?;
7498 let right = self.parse_data_type()?;
7499 self.expect_token(&Token::RParen)?;
7500 Some(OperatorArgTypes { left, right })
7501 } else {
7502 None
7503 };
7504
7505 let purpose = if self.parse_keyword(Keyword::FOR) {
7507 if self.parse_keyword(Keyword::SEARCH) {
7508 Some(OperatorPurpose::ForSearch)
7509 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7510 let sort_family = self.parse_object_name(false)?;
7511 Some(OperatorPurpose::ForOrderBy { sort_family })
7512 } else {
7513 return self
7514 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7515 }
7516 } else {
7517 None
7518 };
7519
7520 items.push(OperatorClassItem::Operator {
7521 strategy_number,
7522 operator_name,
7523 op_types,
7524 purpose,
7525 });
7526 } else if self.parse_keyword(Keyword::FUNCTION) {
7527 let support_number = self.parse_literal_uint()?;
7528
7529 let op_types = if self.consume_token(&Token::LParen)
7531 && self.peek_token_ref().token != Token::RParen
7532 {
7533 let mut types = vec![];
7534 loop {
7535 types.push(self.parse_data_type()?);
7536 if !self.consume_token(&Token::Comma) {
7537 break;
7538 }
7539 }
7540 self.expect_token(&Token::RParen)?;
7541 Some(types)
7542 } else if self.consume_token(&Token::LParen) {
7543 self.expect_token(&Token::RParen)?;
7544 Some(vec![])
7545 } else {
7546 None
7547 };
7548
7549 let function_name = self.parse_object_name(false)?;
7550
7551 let argument_types = if self.consume_token(&Token::LParen) {
7553 let mut types = vec![];
7554 loop {
7555 if self.peek_token_ref().token == Token::RParen {
7556 break;
7557 }
7558 types.push(self.parse_data_type()?);
7559 if !self.consume_token(&Token::Comma) {
7560 break;
7561 }
7562 }
7563 self.expect_token(&Token::RParen)?;
7564 types
7565 } else {
7566 vec![]
7567 };
7568
7569 items.push(OperatorClassItem::Function {
7570 support_number,
7571 op_types,
7572 function_name,
7573 argument_types,
7574 });
7575 } else if self.parse_keyword(Keyword::STORAGE) {
7576 let storage_type = self.parse_data_type()?;
7577 items.push(OperatorClassItem::Storage { storage_type });
7578 } else {
7579 break;
7580 }
7581
7582 if !self.consume_token(&Token::Comma) {
7584 break;
7585 }
7586 }
7587
7588 Ok(CreateOperatorClass {
7589 name,
7590 default,
7591 for_type,
7592 using,
7593 family,
7594 items,
7595 })
7596 }
7597
7598 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7600 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7602 && self.parse_keyword(Keyword::TEMPORARY);
7603 let persistent = dialect_of!(self is DuckDbDialect)
7604 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7605
7606 let object_type = if self.parse_keyword(Keyword::TABLE) {
7607 ObjectType::Table
7608 } else if self.parse_keyword(Keyword::COLLATION) {
7609 ObjectType::Collation
7610 } else if self.parse_keyword(Keyword::VIEW) {
7611 ObjectType::View
7612 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7613 ObjectType::MaterializedView
7614 } else if self.parse_keyword(Keyword::INDEX) {
7615 ObjectType::Index
7616 } else if self.parse_keyword(Keyword::ROLE) {
7617 ObjectType::Role
7618 } else if self.parse_keyword(Keyword::SCHEMA) {
7619 ObjectType::Schema
7620 } else if self.parse_keyword(Keyword::DATABASE) {
7621 ObjectType::Database
7622 } else if self.parse_keyword(Keyword::SEQUENCE) {
7623 ObjectType::Sequence
7624 } else if self.parse_keyword(Keyword::STAGE) {
7625 ObjectType::Stage
7626 } else if self.parse_keyword(Keyword::TYPE) {
7627 ObjectType::Type
7628 } else if self.parse_keyword(Keyword::USER) {
7629 ObjectType::User
7630 } else if self.parse_keyword(Keyword::STREAM) {
7631 ObjectType::Stream
7632 } else if self.parse_keyword(Keyword::FUNCTION) {
7633 return self.parse_drop_function().map(Into::into);
7634 } else if self.parse_keyword(Keyword::POLICY) {
7635 return self.parse_drop_policy().map(Into::into);
7636 } else if self.parse_keyword(Keyword::CONNECTOR) {
7637 return self.parse_drop_connector();
7638 } else if self.parse_keyword(Keyword::DOMAIN) {
7639 return self.parse_drop_domain().map(Into::into);
7640 } else if self.parse_keyword(Keyword::PROCEDURE) {
7641 return self.parse_drop_procedure();
7642 } else if self.parse_keyword(Keyword::SECRET) {
7643 return self.parse_drop_secret(temporary, persistent);
7644 } else if self.parse_keyword(Keyword::TRIGGER) {
7645 return self.parse_drop_trigger().map(Into::into);
7646 } else if self.parse_keyword(Keyword::EXTENSION) {
7647 return self.parse_drop_extension();
7648 } else if self.parse_keyword(Keyword::OPERATOR) {
7649 return if self.parse_keyword(Keyword::FAMILY) {
7651 self.parse_drop_operator_family()
7652 } else if self.parse_keyword(Keyword::CLASS) {
7653 self.parse_drop_operator_class()
7654 } else {
7655 self.parse_drop_operator()
7656 };
7657 } else {
7658 return self.expected_ref(
7659 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7660 self.peek_token_ref(),
7661 );
7662 };
7663 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7666 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7667
7668 let loc = self.peek_token_ref().span.start;
7669 let cascade = self.parse_keyword(Keyword::CASCADE);
7670 let restrict = self.parse_keyword(Keyword::RESTRICT);
7671 let purge = self.parse_keyword(Keyword::PURGE);
7672 if cascade && restrict {
7673 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7674 }
7675 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7676 return parser_err!(
7677 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7678 loc
7679 );
7680 }
7681 let table = if self.parse_keyword(Keyword::ON) {
7682 Some(self.parse_object_name(false)?)
7683 } else {
7684 None
7685 };
7686 Ok(Statement::Drop {
7687 object_type,
7688 if_exists,
7689 names,
7690 cascade,
7691 restrict,
7692 purge,
7693 temporary,
7694 table,
7695 })
7696 }
7697
7698 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7699 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7700 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7701 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7702 _ => None,
7703 }
7704 }
7705
7706 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7711 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7712 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7713 let drop_behavior = self.parse_optional_drop_behavior();
7714 Ok(DropFunction {
7715 if_exists,
7716 func_desc,
7717 drop_behavior,
7718 })
7719 }
7720
7721 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7727 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7728 let name = self.parse_identifier()?;
7729 self.expect_keyword_is(Keyword::ON)?;
7730 let table_name = self.parse_object_name(false)?;
7731 let drop_behavior = self.parse_optional_drop_behavior();
7732 Ok(DropPolicy {
7733 if_exists,
7734 name,
7735 table_name,
7736 drop_behavior,
7737 })
7738 }
7739 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7745 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7746 let name = self.parse_identifier()?;
7747 Ok(Statement::DropConnector { if_exists, name })
7748 }
7749
7750 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7754 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7755 let name = self.parse_object_name(false)?;
7756 let drop_behavior = self.parse_optional_drop_behavior();
7757 Ok(DropDomain {
7758 if_exists,
7759 name,
7760 drop_behavior,
7761 })
7762 }
7763
7764 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7769 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7770 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7771 let drop_behavior = self.parse_optional_drop_behavior();
7772 Ok(Statement::DropProcedure {
7773 if_exists,
7774 proc_desc,
7775 drop_behavior,
7776 })
7777 }
7778
7779 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7780 let name = self.parse_object_name(false)?;
7781
7782 let args = if self.consume_token(&Token::LParen) {
7783 if self.consume_token(&Token::RParen) {
7784 Some(vec![])
7785 } else {
7786 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7787 self.expect_token(&Token::RParen)?;
7788 Some(args)
7789 }
7790 } else {
7791 None
7792 };
7793
7794 Ok(FunctionDesc { name, args })
7795 }
7796
7797 fn parse_drop_secret(
7799 &mut self,
7800 temporary: bool,
7801 persistent: bool,
7802 ) -> Result<Statement, ParserError> {
7803 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7804 let name = self.parse_identifier()?;
7805 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7806 self.parse_identifier().ok()
7807 } else {
7808 None
7809 };
7810 let temp = match (temporary, persistent) {
7811 (true, false) => Some(true),
7812 (false, true) => Some(false),
7813 (false, false) => None,
7814 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7815 };
7816
7817 Ok(Statement::DropSecret {
7818 if_exists,
7819 temporary: temp,
7820 name,
7821 storage_specifier,
7822 })
7823 }
7824
7825 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7835 if dialect_of!(self is BigQueryDialect) {
7836 return self.parse_big_query_declare();
7837 }
7838 if dialect_of!(self is SnowflakeDialect) {
7839 return self.parse_snowflake_declare();
7840 }
7841 if dialect_of!(self is MsSqlDialect) {
7842 return self.parse_mssql_declare();
7843 }
7844
7845 let name = self.parse_identifier()?;
7846
7847 let binary = Some(self.parse_keyword(Keyword::BINARY));
7848 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7849 Some(true)
7850 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7851 Some(false)
7852 } else {
7853 None
7854 };
7855 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7856 Some(true)
7857 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7858 Some(false)
7859 } else {
7860 None
7861 };
7862
7863 self.expect_keyword_is(Keyword::CURSOR)?;
7864 let declare_type = Some(DeclareType::Cursor);
7865
7866 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7867 Some(keyword) => {
7868 self.expect_keyword_is(Keyword::HOLD)?;
7869
7870 match keyword {
7871 Keyword::WITH => Some(true),
7872 Keyword::WITHOUT => Some(false),
7873 unexpected_keyword => return Err(ParserError::ParserError(
7874 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7875 )),
7876 }
7877 }
7878 None => None,
7879 };
7880
7881 self.expect_keyword_is(Keyword::FOR)?;
7882
7883 let query = Some(self.parse_query()?);
7884
7885 Ok(Statement::Declare {
7886 stmts: vec![Declare {
7887 names: vec![name],
7888 data_type: None,
7889 assignment: None,
7890 declare_type,
7891 binary,
7892 sensitive,
7893 scroll,
7894 hold,
7895 for_query: query,
7896 }],
7897 })
7898 }
7899
7900 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7908 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7909
7910 let data_type = match &self.peek_token_ref().token {
7911 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7912 _ => Some(self.parse_data_type()?),
7913 };
7914
7915 let expr = if data_type.is_some() {
7916 if self.parse_keyword(Keyword::DEFAULT) {
7917 Some(self.parse_expr()?)
7918 } else {
7919 None
7920 }
7921 } else {
7922 self.expect_keyword_is(Keyword::DEFAULT)?;
7925 Some(self.parse_expr()?)
7926 };
7927
7928 Ok(Statement::Declare {
7929 stmts: vec![Declare {
7930 names,
7931 data_type,
7932 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7933 declare_type: None,
7934 binary: None,
7935 sensitive: None,
7936 scroll: None,
7937 hold: None,
7938 for_query: None,
7939 }],
7940 })
7941 }
7942
7943 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7968 let mut stmts = vec![];
7969 loop {
7970 let name = self.parse_identifier()?;
7971 let (declare_type, for_query, assigned_expr, data_type) =
7972 if self.parse_keyword(Keyword::CURSOR) {
7973 self.expect_keyword_is(Keyword::FOR)?;
7974 match &self.peek_token_ref().token {
7975 Token::Word(w) if w.keyword == Keyword::SELECT => (
7976 Some(DeclareType::Cursor),
7977 Some(self.parse_query()?),
7978 None,
7979 None,
7980 ),
7981 _ => (
7982 Some(DeclareType::Cursor),
7983 None,
7984 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7985 None,
7986 ),
7987 }
7988 } else if self.parse_keyword(Keyword::RESULTSET) {
7989 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
7990 self.parse_snowflake_variable_declaration_expression()?
7991 } else {
7992 None
7994 };
7995
7996 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7997 } else if self.parse_keyword(Keyword::EXCEPTION) {
7998 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
7999 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
8000 } else {
8001 None
8003 };
8004
8005 (Some(DeclareType::Exception), None, assigned_expr, None)
8006 } else {
8007 let (assigned_expr, data_type) = if let Some(assigned_expr) =
8009 self.parse_snowflake_variable_declaration_expression()?
8010 {
8011 (Some(assigned_expr), None)
8012 } else if let Token::Word(_) = &self.peek_token_ref().token {
8013 let data_type = self.parse_data_type()?;
8014 (
8015 self.parse_snowflake_variable_declaration_expression()?,
8016 Some(data_type),
8017 )
8018 } else {
8019 (None, None)
8020 };
8021 (None, None, assigned_expr, data_type)
8022 };
8023 let stmt = Declare {
8024 names: vec![name],
8025 data_type,
8026 assignment: assigned_expr,
8027 declare_type,
8028 binary: None,
8029 sensitive: None,
8030 scroll: None,
8031 hold: None,
8032 for_query,
8033 };
8034
8035 stmts.push(stmt);
8036 if self.consume_token(&Token::SemiColon) {
8037 match &self.peek_token_ref().token {
8038 Token::Word(w)
8039 if ALL_KEYWORDS
8040 .binary_search(&w.value.to_uppercase().as_str())
8041 .is_err() =>
8042 {
8043 continue;
8045 }
8046 _ => {
8047 self.prev_token();
8049 }
8050 }
8051 }
8052
8053 break;
8054 }
8055
8056 Ok(Statement::Declare { stmts })
8057 }
8058
8059 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8071 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8072
8073 Ok(Statement::Declare { stmts })
8074 }
8075
8076 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8087 let name = {
8088 let ident = self.parse_identifier()?;
8089 if !ident.value.starts_with('@')
8090 && !matches!(
8091 &self.peek_token_ref().token,
8092 Token::Word(w) if w.keyword == Keyword::CURSOR
8093 )
8094 {
8095 Err(ParserError::TokenizerError(
8096 "Invalid MsSql variable declaration.".to_string(),
8097 ))
8098 } else {
8099 Ok(ident)
8100 }
8101 }?;
8102
8103 let (declare_type, data_type) = match &self.peek_token_ref().token {
8104 Token::Word(w) => match w.keyword {
8105 Keyword::CURSOR => {
8106 self.next_token();
8107 (Some(DeclareType::Cursor), None)
8108 }
8109 Keyword::AS => {
8110 self.next_token();
8111 (None, Some(self.parse_data_type()?))
8112 }
8113 _ => (None, Some(self.parse_data_type()?)),
8114 },
8115 _ => (None, Some(self.parse_data_type()?)),
8116 };
8117
8118 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8119 self.next_token();
8120 let query = Some(self.parse_query()?);
8121 (query, None)
8122 } else {
8123 let assignment = self.parse_mssql_variable_declaration_expression()?;
8124 (None, assignment)
8125 };
8126
8127 Ok(Declare {
8128 names: vec![name],
8129 data_type,
8130 assignment,
8131 declare_type,
8132 binary: None,
8133 sensitive: None,
8134 scroll: None,
8135 hold: None,
8136 for_query,
8137 })
8138 }
8139
8140 pub fn parse_snowflake_variable_declaration_expression(
8148 &mut self,
8149 ) -> Result<Option<DeclareAssignment>, ParserError> {
8150 Ok(match &self.peek_token_ref().token {
8151 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8152 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8154 }
8155 Token::Assignment => {
8156 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8158 self.parse_expr()?,
8159 )))
8160 }
8161 _ => None,
8162 })
8163 }
8164
8165 pub fn parse_mssql_variable_declaration_expression(
8172 &mut self,
8173 ) -> Result<Option<DeclareAssignment>, ParserError> {
8174 Ok(match &self.peek_token_ref().token {
8175 Token::Eq => {
8176 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8178 self.parse_expr()?,
8179 )))
8180 }
8181 _ => None,
8182 })
8183 }
8184
8185 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8187 let direction = if self.parse_keyword(Keyword::NEXT) {
8188 FetchDirection::Next
8189 } else if self.parse_keyword(Keyword::PRIOR) {
8190 FetchDirection::Prior
8191 } else if self.parse_keyword(Keyword::FIRST) {
8192 FetchDirection::First
8193 } else if self.parse_keyword(Keyword::LAST) {
8194 FetchDirection::Last
8195 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8196 FetchDirection::Absolute {
8197 limit: self.parse_number_value()?,
8198 }
8199 } else if self.parse_keyword(Keyword::RELATIVE) {
8200 FetchDirection::Relative {
8201 limit: self.parse_number_value()?,
8202 }
8203 } else if self.parse_keyword(Keyword::FORWARD) {
8204 if self.parse_keyword(Keyword::ALL) {
8205 FetchDirection::ForwardAll
8206 } else {
8207 FetchDirection::Forward {
8208 limit: Some(self.parse_number_value()?),
8210 }
8211 }
8212 } else if self.parse_keyword(Keyword::BACKWARD) {
8213 if self.parse_keyword(Keyword::ALL) {
8214 FetchDirection::BackwardAll
8215 } else {
8216 FetchDirection::Backward {
8217 limit: Some(self.parse_number_value()?),
8219 }
8220 }
8221 } else if self.parse_keyword(Keyword::ALL) {
8222 FetchDirection::All
8223 } else {
8224 FetchDirection::Count {
8225 limit: self.parse_number_value()?,
8226 }
8227 };
8228
8229 let position = if self.peek_keyword(Keyword::FROM) {
8230 self.expect_keyword(Keyword::FROM)?;
8231 FetchPosition::From
8232 } else if self.peek_keyword(Keyword::IN) {
8233 self.expect_keyword(Keyword::IN)?;
8234 FetchPosition::In
8235 } else {
8236 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8237 };
8238
8239 let name = self.parse_identifier()?;
8240
8241 let into = if self.parse_keyword(Keyword::INTO) {
8242 Some(self.parse_object_name(false)?)
8243 } else {
8244 None
8245 };
8246
8247 Ok(Statement::Fetch {
8248 name,
8249 direction,
8250 position,
8251 into,
8252 })
8253 }
8254
8255 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8257 let object_type = if self.parse_keyword(Keyword::ALL) {
8258 DiscardObject::ALL
8259 } else if self.parse_keyword(Keyword::PLANS) {
8260 DiscardObject::PLANS
8261 } else if self.parse_keyword(Keyword::SEQUENCES) {
8262 DiscardObject::SEQUENCES
8263 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8264 DiscardObject::TEMP
8265 } else {
8266 return self.expected_ref(
8267 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8268 self.peek_token_ref(),
8269 );
8270 };
8271 Ok(Statement::Discard { object_type })
8272 }
8273
8274 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8276 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8277 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8278
8279 let mut using = None;
8280
8281 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8282 let index_name = self.parse_object_name(false)?;
8283 using = self.parse_optional_using_then_index_type()?;
8285 self.expect_keyword_is(Keyword::ON)?;
8286 Some(index_name)
8287 } else {
8288 None
8289 };
8290
8291 let table_name = self.parse_object_name(false)?;
8292
8293 using = self.parse_optional_using_then_index_type()?.or(using);
8296
8297 let columns = self.parse_parenthesized_index_column_list()?;
8298
8299 let include = if self.parse_keyword(Keyword::INCLUDE) {
8300 self.expect_token(&Token::LParen)?;
8301 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8302 self.expect_token(&Token::RParen)?;
8303 columns
8304 } else {
8305 vec![]
8306 };
8307
8308 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8309 let not = self.parse_keyword(Keyword::NOT);
8310 self.expect_keyword_is(Keyword::DISTINCT)?;
8311 Some(!not)
8312 } else {
8313 None
8314 };
8315
8316 let with = if self.dialect.supports_create_index_with_clause()
8317 && self.parse_keyword(Keyword::WITH)
8318 {
8319 self.expect_token(&Token::LParen)?;
8320 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8321 self.expect_token(&Token::RParen)?;
8322 with_params
8323 } else {
8324 Vec::new()
8325 };
8326
8327 let predicate = if self.parse_keyword(Keyword::WHERE) {
8328 Some(self.parse_expr()?)
8329 } else {
8330 None
8331 };
8332
8333 let index_options = self.parse_index_options()?;
8339
8340 let mut alter_options = Vec::new();
8342 while self
8343 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8344 .is_some()
8345 {
8346 alter_options.push(self.parse_alter_table_operation()?)
8347 }
8348
8349 Ok(CreateIndex {
8350 name: index_name,
8351 table_name,
8352 using,
8353 columns,
8354 unique,
8355 concurrently,
8356 if_not_exists,
8357 include,
8358 nulls_distinct,
8359 with,
8360 predicate,
8361 index_options,
8362 alter_options,
8363 })
8364 }
8365
8366 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8368 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8369 let name = self.parse_identifier()?;
8370
8371 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8372 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8373 Some(self.parse_identifier()?)
8374 } else {
8375 None
8376 };
8377
8378 let version = if self.parse_keyword(Keyword::VERSION) {
8379 Some(self.parse_identifier()?)
8380 } else {
8381 None
8382 };
8383
8384 let cascade = self.parse_keyword(Keyword::CASCADE);
8385
8386 (schema, version, cascade)
8387 } else {
8388 (None, None, false)
8389 };
8390
8391 Ok(CreateExtension {
8392 name,
8393 if_not_exists,
8394 schema,
8395 version,
8396 cascade,
8397 })
8398 }
8399
8400 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8402 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8403 let name = self.parse_object_name(false)?;
8404
8405 let definition = if self.parse_keyword(Keyword::FROM) {
8406 CreateCollationDefinition::From(self.parse_object_name(false)?)
8407 } else if self.consume_token(&Token::LParen) {
8408 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8409 self.expect_token(&Token::RParen)?;
8410 CreateCollationDefinition::Options(options)
8411 } else {
8412 return self.expected_ref(
8413 "FROM or parenthesized option list after CREATE COLLATION name",
8414 self.peek_token_ref(),
8415 );
8416 };
8417
8418 Ok(CreateCollation {
8419 if_not_exists,
8420 name,
8421 definition,
8422 })
8423 }
8424
8425 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8427 if self.parse_keyword(Keyword::CONFIGURATION) {
8428 let name = self.parse_object_name(false)?;
8429 self.expect_token(&Token::LParen)?;
8430 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8431 self.expect_token(&Token::RParen)?;
8432 Ok(Statement::CreateTextSearchConfiguration(
8433 CreateTextSearchConfiguration { name, options },
8434 ))
8435 } else if self.parse_keyword(Keyword::DICTIONARY) {
8436 let name = self.parse_object_name(false)?;
8437 self.expect_token(&Token::LParen)?;
8438 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8439 self.expect_token(&Token::RParen)?;
8440 Ok(Statement::CreateTextSearchDictionary(
8441 CreateTextSearchDictionary { name, options },
8442 ))
8443 } else if self.parse_keyword(Keyword::PARSER) {
8444 let name = self.parse_object_name(false)?;
8445 self.expect_token(&Token::LParen)?;
8446 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8447 self.expect_token(&Token::RParen)?;
8448 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8449 name,
8450 options,
8451 }))
8452 } else if self.parse_keyword(Keyword::TEMPLATE) {
8453 let name = self.parse_object_name(false)?;
8454 self.expect_token(&Token::LParen)?;
8455 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8456 self.expect_token(&Token::RParen)?;
8457 Ok(Statement::CreateTextSearchTemplate(
8458 CreateTextSearchTemplate { name, options },
8459 ))
8460 } else {
8461 self.expected_ref(
8462 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8463 self.peek_token_ref(),
8464 )
8465 }
8466 }
8467
8468 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8470 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8471 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8472 let cascade_or_restrict =
8473 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8474 Ok(Statement::DropExtension(DropExtension {
8475 names,
8476 if_exists,
8477 cascade_or_restrict: cascade_or_restrict
8478 .map(|k| match k {
8479 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8480 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8481 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8482 })
8483 .transpose()?,
8484 }))
8485 }
8486
8487 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8490 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8491 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8492 let drop_behavior = self.parse_optional_drop_behavior();
8493 Ok(Statement::DropOperator(DropOperator {
8494 if_exists,
8495 operators,
8496 drop_behavior,
8497 }))
8498 }
8499
8500 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8503 let name = self.parse_operator_name()?;
8504 self.expect_token(&Token::LParen)?;
8505
8506 let left_type = if self.parse_keyword(Keyword::NONE) {
8508 None
8509 } else {
8510 Some(self.parse_data_type()?)
8511 };
8512
8513 self.expect_token(&Token::Comma)?;
8514
8515 let right_type = self.parse_data_type()?;
8517
8518 self.expect_token(&Token::RParen)?;
8519
8520 Ok(DropOperatorSignature {
8521 name,
8522 left_type,
8523 right_type,
8524 })
8525 }
8526
8527 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8531 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8532 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8533 self.expect_keyword(Keyword::USING)?;
8534 let using = self.parse_identifier()?;
8535 let drop_behavior = self.parse_optional_drop_behavior();
8536 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8537 if_exists,
8538 names,
8539 using,
8540 drop_behavior,
8541 }))
8542 }
8543
8544 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8548 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8549 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8550 self.expect_keyword(Keyword::USING)?;
8551 let using = self.parse_identifier()?;
8552 let drop_behavior = self.parse_optional_drop_behavior();
8553 Ok(Statement::DropOperatorClass(DropOperatorClass {
8554 if_exists,
8555 names,
8556 using,
8557 drop_behavior,
8558 }))
8559 }
8560
8561 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8565 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8566 self.expect_token(&Token::LParen)?;
8567 let columns =
8568 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8569 self.expect_token(&Token::RParen)?;
8570 Ok(HiveDistributionStyle::PARTITIONED { columns })
8571 } else {
8572 Ok(HiveDistributionStyle::NONE)
8573 }
8574 }
8575
8576 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8580 let token = self.next_token();
8581 match &token.token {
8582 Token::Word(w) => match w.keyword {
8583 Keyword::AUTO => Ok(DistStyle::Auto),
8584 Keyword::EVEN => Ok(DistStyle::Even),
8585 Keyword::KEY => Ok(DistStyle::Key),
8586 Keyword::ALL => Ok(DistStyle::All),
8587 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8588 },
8589 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8590 }
8591 }
8592
8593 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8595 let mut hive_format: Option<HiveFormat> = None;
8596 loop {
8597 match self.parse_one_of_keywords(&[
8598 Keyword::ROW,
8599 Keyword::STORED,
8600 Keyword::LOCATION,
8601 Keyword::WITH,
8602 ]) {
8603 Some(Keyword::ROW) => {
8604 hive_format
8605 .get_or_insert_with(HiveFormat::default)
8606 .row_format = Some(self.parse_row_format()?);
8607 }
8608 Some(Keyword::STORED) => {
8609 self.expect_keyword_is(Keyword::AS)?;
8610 if self.parse_keyword(Keyword::INPUTFORMAT) {
8611 let input_format = self.parse_expr()?;
8612 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8613 let output_format = self.parse_expr()?;
8614 hive_format.get_or_insert_with(HiveFormat::default).storage =
8615 Some(HiveIOFormat::IOF {
8616 input_format,
8617 output_format,
8618 });
8619 } else {
8620 let format = self.parse_file_format()?;
8621 hive_format.get_or_insert_with(HiveFormat::default).storage =
8622 Some(HiveIOFormat::FileFormat { format });
8623 }
8624 }
8625 Some(Keyword::LOCATION) => {
8626 hive_format.get_or_insert_with(HiveFormat::default).location =
8627 Some(self.parse_literal_string()?);
8628 }
8629 Some(Keyword::WITH) => {
8630 self.prev_token();
8631 let properties = self
8632 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8633 if !properties.is_empty() {
8634 hive_format
8635 .get_or_insert_with(HiveFormat::default)
8636 .serde_properties = Some(properties);
8637 } else {
8638 break;
8639 }
8640 }
8641 None => break,
8642 _ => break,
8643 }
8644 }
8645
8646 Ok(hive_format)
8647 }
8648
8649 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8651 self.expect_keyword_is(Keyword::FORMAT)?;
8652 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8653 Some(Keyword::SERDE) => {
8654 let class = self.parse_literal_string()?;
8655 Ok(HiveRowFormat::SERDE { class })
8656 }
8657 _ => {
8658 let mut row_delimiters = vec![];
8659
8660 loop {
8661 match self.parse_one_of_keywords(&[
8662 Keyword::FIELDS,
8663 Keyword::COLLECTION,
8664 Keyword::MAP,
8665 Keyword::LINES,
8666 Keyword::NULL,
8667 ]) {
8668 Some(Keyword::FIELDS) => {
8669 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8670 row_delimiters.push(HiveRowDelimiter {
8671 delimiter: HiveDelimiter::FieldsTerminatedBy,
8672 char: self.parse_identifier()?,
8673 });
8674
8675 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8676 row_delimiters.push(HiveRowDelimiter {
8677 delimiter: HiveDelimiter::FieldsEscapedBy,
8678 char: self.parse_identifier()?,
8679 });
8680 }
8681 } else {
8682 break;
8683 }
8684 }
8685 Some(Keyword::COLLECTION) => {
8686 if self.parse_keywords(&[
8687 Keyword::ITEMS,
8688 Keyword::TERMINATED,
8689 Keyword::BY,
8690 ]) {
8691 row_delimiters.push(HiveRowDelimiter {
8692 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8693 char: self.parse_identifier()?,
8694 });
8695 } else {
8696 break;
8697 }
8698 }
8699 Some(Keyword::MAP) => {
8700 if self.parse_keywords(&[
8701 Keyword::KEYS,
8702 Keyword::TERMINATED,
8703 Keyword::BY,
8704 ]) {
8705 row_delimiters.push(HiveRowDelimiter {
8706 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8707 char: self.parse_identifier()?,
8708 });
8709 } else {
8710 break;
8711 }
8712 }
8713 Some(Keyword::LINES) => {
8714 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8715 row_delimiters.push(HiveRowDelimiter {
8716 delimiter: HiveDelimiter::LinesTerminatedBy,
8717 char: self.parse_identifier()?,
8718 });
8719 } else {
8720 break;
8721 }
8722 }
8723 Some(Keyword::NULL) => {
8724 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8725 row_delimiters.push(HiveRowDelimiter {
8726 delimiter: HiveDelimiter::NullDefinedAs,
8727 char: self.parse_identifier()?,
8728 });
8729 } else {
8730 break;
8731 }
8732 }
8733 _ => {
8734 break;
8735 }
8736 }
8737 }
8738
8739 Ok(HiveRowFormat::DELIMITED {
8740 delimiters: row_delimiters,
8741 })
8742 }
8743 }
8744 }
8745
8746 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8747 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8748 Ok(Some(self.parse_identifier()?))
8749 } else {
8750 Ok(None)
8751 }
8752 }
8753
8754 pub fn parse_create_table(
8756 &mut self,
8757 or_replace: bool,
8758 temporary: bool,
8759 global: Option<bool>,
8760 transient: bool,
8761 ) -> Result<CreateTable, ParserError> {
8762 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8763 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8764 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8765
8766 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8776 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8777 } else {
8778 None
8779 };
8780
8781 let on_cluster = self.parse_optional_on_cluster()?;
8783
8784 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8785
8786 let clone = if self.parse_keyword(Keyword::CLONE) {
8787 self.parse_object_name(allow_unquoted_hyphen).ok()
8788 } else {
8789 None
8790 };
8791
8792 let (columns, constraints) = self.parse_columns()?;
8794 let comment_after_column_def =
8795 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8796 let next_token = self.next_token();
8797 match next_token.token {
8798 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8799 _ => self.expected("comment", next_token)?,
8800 }
8801 } else {
8802 None
8803 };
8804
8805 let for_values = if partition_of.is_some() {
8807 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8808 Some(self.parse_partition_for_values()?)
8809 } else {
8810 return self.expected_ref(
8811 "FOR VALUES or DEFAULT after PARTITION OF",
8812 self.peek_token_ref(),
8813 );
8814 }
8815 } else {
8816 None
8817 };
8818
8819 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8821
8822 let hive_distribution = self.parse_hive_distribution()?;
8823 let clustered_by = self.parse_optional_clustered_by()?;
8824 let hive_formats = self.parse_hive_formats()?;
8825
8826 let create_table_config = self.parse_optional_create_table_config()?;
8827
8828 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8831 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8832 {
8833 Some(Box::new(self.parse_expr()?))
8834 } else {
8835 None
8836 };
8837
8838 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8839 if self.consume_token(&Token::LParen) {
8840 let columns = if self.peek_token_ref().token != Token::RParen {
8841 self.parse_comma_separated(|p| p.parse_expr())?
8842 } else {
8843 vec![]
8844 };
8845 self.expect_token(&Token::RParen)?;
8846 Some(OneOrManyWithParens::Many(columns))
8847 } else {
8848 Some(OneOrManyWithParens::One(self.parse_expr()?))
8849 }
8850 } else {
8851 None
8852 };
8853
8854 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8855 Some(self.parse_create_table_on_commit()?)
8856 } else {
8857 None
8858 };
8859
8860 let strict = self.parse_keyword(Keyword::STRICT);
8861
8862 let backup = if self.parse_keyword(Keyword::BACKUP) {
8864 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8865 Some(keyword == Keyword::YES)
8866 } else {
8867 None
8868 };
8869
8870 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8872 Some(self.parse_dist_style()?)
8873 } else {
8874 None
8875 };
8876 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8877 self.expect_token(&Token::LParen)?;
8878 let expr = self.parse_expr()?;
8879 self.expect_token(&Token::RParen)?;
8880 Some(expr)
8881 } else {
8882 None
8883 };
8884 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8885 self.expect_token(&Token::LParen)?;
8886 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8887 self.expect_token(&Token::RParen)?;
8888 Some(columns)
8889 } else {
8890 None
8891 };
8892
8893 let query = if self.parse_keyword(Keyword::AS) {
8895 Some(self.parse_query()?)
8896 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8897 {
8898 self.prev_token();
8900 Some(self.parse_query()?)
8901 } else {
8902 None
8903 };
8904
8905 Ok(CreateTableBuilder::new(table_name)
8906 .temporary(temporary)
8907 .columns(columns)
8908 .constraints(constraints)
8909 .or_replace(or_replace)
8910 .if_not_exists(if_not_exists)
8911 .transient(transient)
8912 .hive_distribution(hive_distribution)
8913 .hive_formats(hive_formats)
8914 .global(global)
8915 .query(query)
8916 .without_rowid(without_rowid)
8917 .like(like)
8918 .clone_clause(clone)
8919 .comment_after_column_def(comment_after_column_def)
8920 .order_by(order_by)
8921 .on_commit(on_commit)
8922 .on_cluster(on_cluster)
8923 .clustered_by(clustered_by)
8924 .partition_by(create_table_config.partition_by)
8925 .cluster_by(create_table_config.cluster_by)
8926 .inherits(create_table_config.inherits)
8927 .partition_of(partition_of)
8928 .for_values(for_values)
8929 .table_options(create_table_config.table_options)
8930 .primary_key(primary_key)
8931 .strict(strict)
8932 .backup(backup)
8933 .diststyle(diststyle)
8934 .distkey(distkey)
8935 .sortkey(sortkey)
8936 .build())
8937 }
8938
8939 fn maybe_parse_create_table_like(
8940 &mut self,
8941 allow_unquoted_hyphen: bool,
8942 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8943 let like = if self.dialect.supports_create_table_like_parenthesized()
8944 && self.consume_token(&Token::LParen)
8945 {
8946 if self.parse_keyword(Keyword::LIKE) {
8947 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8948 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8949 Some(CreateTableLikeDefaults::Including)
8950 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8951 Some(CreateTableLikeDefaults::Excluding)
8952 } else {
8953 None
8954 };
8955 self.expect_token(&Token::RParen)?;
8956 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8957 name,
8958 defaults,
8959 }))
8960 } else {
8961 self.prev_token();
8963 None
8964 }
8965 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8966 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8967 Some(CreateTableLikeKind::Plain(CreateTableLike {
8968 name,
8969 defaults: None,
8970 }))
8971 } else {
8972 None
8973 };
8974 Ok(like)
8975 }
8976
8977 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8978 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8979 Ok(OnCommit::DeleteRows)
8980 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8981 Ok(OnCommit::PreserveRows)
8982 } else if self.parse_keywords(&[Keyword::DROP]) {
8983 Ok(OnCommit::Drop)
8984 } else {
8985 parser_err!(
8986 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8987 self.peek_token_ref()
8988 )
8989 }
8990 }
8991
8992 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8998 if self.parse_keyword(Keyword::DEFAULT) {
8999 return Ok(ForValues::Default);
9000 }
9001
9002 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9003
9004 if self.parse_keyword(Keyword::IN) {
9005 self.expect_token(&Token::LParen)?;
9007 if self.peek_token_ref().token == Token::RParen {
9008 return self.expected_ref("at least one value", self.peek_token_ref());
9009 }
9010 let values = self.parse_comma_separated(Parser::parse_expr)?;
9011 self.expect_token(&Token::RParen)?;
9012 Ok(ForValues::In(values))
9013 } else if self.parse_keyword(Keyword::FROM) {
9014 self.expect_token(&Token::LParen)?;
9016 if self.peek_token_ref().token == Token::RParen {
9017 return self.expected_ref("at least one value", self.peek_token_ref());
9018 }
9019 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9020 self.expect_token(&Token::RParen)?;
9021 self.expect_keyword(Keyword::TO)?;
9022 self.expect_token(&Token::LParen)?;
9023 if self.peek_token_ref().token == Token::RParen {
9024 return self.expected_ref("at least one value", self.peek_token_ref());
9025 }
9026 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9027 self.expect_token(&Token::RParen)?;
9028 Ok(ForValues::From { from, to })
9029 } else if self.parse_keyword(Keyword::WITH) {
9030 self.expect_token(&Token::LParen)?;
9032 self.expect_keyword(Keyword::MODULUS)?;
9033 let modulus = self.parse_literal_uint()?;
9034 self.expect_token(&Token::Comma)?;
9035 self.expect_keyword(Keyword::REMAINDER)?;
9036 let remainder = self.parse_literal_uint()?;
9037 self.expect_token(&Token::RParen)?;
9038 Ok(ForValues::With { modulus, remainder })
9039 } else {
9040 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9041 }
9042 }
9043
9044 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9046 if self.parse_keyword(Keyword::MINVALUE) {
9047 Ok(PartitionBoundValue::MinValue)
9048 } else if self.parse_keyword(Keyword::MAXVALUE) {
9049 Ok(PartitionBoundValue::MaxValue)
9050 } else {
9051 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9052 }
9053 }
9054
9055 fn parse_optional_create_table_config(
9061 &mut self,
9062 ) -> Result<CreateTableConfiguration, ParserError> {
9063 let mut table_options = CreateTableOptions::None;
9064
9065 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9066 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9067 } else {
9068 None
9069 };
9070
9071 let with_options = self.parse_options(Keyword::WITH)?;
9073 if !with_options.is_empty() {
9074 table_options = CreateTableOptions::With(with_options)
9075 }
9076
9077 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9078 if !table_properties.is_empty() {
9079 table_options = CreateTableOptions::TableProperties(table_properties);
9080 }
9081 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9082 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9083 {
9084 Some(Box::new(self.parse_expr()?))
9085 } else {
9086 None
9087 };
9088
9089 let mut cluster_by = None;
9090 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9091 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9092 cluster_by = Some(WrappedCollection::NoWrapping(
9093 self.parse_comma_separated(|p| p.parse_expr())?,
9094 ));
9095 };
9096
9097 if let Token::Word(word) = &self.peek_token_ref().token {
9098 if word.keyword == Keyword::OPTIONS {
9099 table_options =
9100 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9101 }
9102 };
9103 }
9104
9105 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9106 let plain_options = self.parse_plain_options()?;
9107 if !plain_options.is_empty() {
9108 table_options = CreateTableOptions::Plain(plain_options)
9109 }
9110 };
9111
9112 Ok(CreateTableConfiguration {
9113 partition_by,
9114 cluster_by,
9115 inherits,
9116 table_options,
9117 })
9118 }
9119
9120 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9121 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9124 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9125 }
9126
9127 if self.parse_keywords(&[Keyword::COMMENT]) {
9130 let has_eq = self.consume_token(&Token::Eq);
9131 let value = self.next_token();
9132
9133 let comment = match (has_eq, value.token) {
9134 (true, Token::SingleQuotedString(s)) => {
9135 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9136 }
9137 (false, Token::SingleQuotedString(s)) => {
9138 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9139 }
9140 (_, token) => {
9141 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9142 }
9143 };
9144 return comment;
9145 }
9146
9147 if self.parse_keywords(&[Keyword::ENGINE]) {
9150 let _ = self.consume_token(&Token::Eq);
9151 let value = self.next_token();
9152
9153 let engine = match value.token {
9154 Token::Word(w) => {
9155 let parameters = if self.peek_token_ref().token == Token::LParen {
9156 self.parse_parenthesized_identifiers()?
9157 } else {
9158 vec![]
9159 };
9160
9161 Ok(Some(SqlOption::NamedParenthesizedList(
9162 NamedParenthesizedList {
9163 key: Ident::new("ENGINE"),
9164 name: Some(Ident::new(w.value)),
9165 values: parameters,
9166 },
9167 )))
9168 }
9169 _ => {
9170 return self.expected("Token::Word", value)?;
9171 }
9172 };
9173
9174 return engine;
9175 }
9176
9177 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9179 let _ = self.consume_token(&Token::Eq);
9180 let value = self.next_token();
9181
9182 let tablespace = match value.token {
9183 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9184 let storage = match self.parse_keyword(Keyword::STORAGE) {
9185 true => {
9186 let _ = self.consume_token(&Token::Eq);
9187 let storage_token = self.next_token();
9188 match &storage_token.token {
9189 Token::Word(w) => match w.value.to_uppercase().as_str() {
9190 "DISK" => Some(StorageType::Disk),
9191 "MEMORY" => Some(StorageType::Memory),
9192 _ => self
9193 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9194 },
9195 _ => self.expected("Token::Word", storage_token)?,
9196 }
9197 }
9198 false => None,
9199 };
9200
9201 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9202 name,
9203 storage,
9204 })))
9205 }
9206 _ => {
9207 return self.expected("Token::Word", value)?;
9208 }
9209 };
9210
9211 return tablespace;
9212 }
9213
9214 if self.parse_keyword(Keyword::UNION) {
9216 let _ = self.consume_token(&Token::Eq);
9217 let value = self.next_token();
9218
9219 match value.token {
9220 Token::LParen => {
9221 let tables: Vec<Ident> =
9222 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9223 self.expect_token(&Token::RParen)?;
9224
9225 return Ok(Some(SqlOption::NamedParenthesizedList(
9226 NamedParenthesizedList {
9227 key: Ident::new("UNION"),
9228 name: None,
9229 values: tables,
9230 },
9231 )));
9232 }
9233 _ => {
9234 return self.expected("Token::LParen", value)?;
9235 }
9236 }
9237 }
9238
9239 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9241 Ident::new("DEFAULT CHARSET")
9242 } else if self.parse_keyword(Keyword::CHARSET) {
9243 Ident::new("CHARSET")
9244 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9245 Ident::new("DEFAULT CHARACTER SET")
9246 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9247 Ident::new("CHARACTER SET")
9248 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9249 Ident::new("DEFAULT COLLATE")
9250 } else if self.parse_keyword(Keyword::COLLATE) {
9251 Ident::new("COLLATE")
9252 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9253 Ident::new("DATA DIRECTORY")
9254 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9255 Ident::new("INDEX DIRECTORY")
9256 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9257 Ident::new("KEY_BLOCK_SIZE")
9258 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9259 Ident::new("ROW_FORMAT")
9260 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9261 Ident::new("PACK_KEYS")
9262 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9263 Ident::new("STATS_AUTO_RECALC")
9264 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9265 Ident::new("STATS_PERSISTENT")
9266 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9267 Ident::new("STATS_SAMPLE_PAGES")
9268 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9269 Ident::new("DELAY_KEY_WRITE")
9270 } else if self.parse_keyword(Keyword::COMPRESSION) {
9271 Ident::new("COMPRESSION")
9272 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9273 Ident::new("ENCRYPTION")
9274 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9275 Ident::new("MAX_ROWS")
9276 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9277 Ident::new("MIN_ROWS")
9278 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9279 Ident::new("AUTOEXTEND_SIZE")
9280 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9281 Ident::new("AVG_ROW_LENGTH")
9282 } else if self.parse_keyword(Keyword::CHECKSUM) {
9283 Ident::new("CHECKSUM")
9284 } else if self.parse_keyword(Keyword::CONNECTION) {
9285 Ident::new("CONNECTION")
9286 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9287 Ident::new("ENGINE_ATTRIBUTE")
9288 } else if self.parse_keyword(Keyword::PASSWORD) {
9289 Ident::new("PASSWORD")
9290 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9291 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9292 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9293 Ident::new("INSERT_METHOD")
9294 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9295 Ident::new("AUTO_INCREMENT")
9296 } else {
9297 return Ok(None);
9298 };
9299
9300 let _ = self.consume_token(&Token::Eq);
9301
9302 let value = match self
9303 .maybe_parse(|parser| parser.parse_value())?
9304 .map(Expr::Value)
9305 {
9306 Some(expr) => expr,
9307 None => Expr::Identifier(self.parse_identifier()?),
9308 };
9309
9310 Ok(Some(SqlOption::KeyValue { key, value }))
9311 }
9312
9313 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9315 let mut options = Vec::new();
9316
9317 while let Some(option) = self.parse_plain_option()? {
9318 options.push(option);
9319 let _ = self.consume_token(&Token::Comma);
9322 }
9323
9324 Ok(options)
9325 }
9326
9327 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9329 let comment = if self.parse_keyword(Keyword::COMMENT) {
9330 let has_eq = self.consume_token(&Token::Eq);
9331 let comment = self.parse_comment_value()?;
9332 Some(if has_eq {
9333 CommentDef::WithEq(comment)
9334 } else {
9335 CommentDef::WithoutEq(comment)
9336 })
9337 } else {
9338 None
9339 };
9340 Ok(comment)
9341 }
9342
9343 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9345 let next_token = self.next_token();
9346 let value = match next_token.token {
9347 Token::SingleQuotedString(str) => str,
9348 Token::DollarQuotedString(str) => str.value,
9349 _ => self.expected("string literal", next_token)?,
9350 };
9351 Ok(value)
9352 }
9353
9354 pub fn parse_optional_procedure_parameters(
9356 &mut self,
9357 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9358 let mut params = vec![];
9359 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9360 return Ok(Some(params));
9361 }
9362 loop {
9363 if let Token::Word(_) = &self.peek_token_ref().token {
9364 params.push(self.parse_procedure_param()?)
9365 }
9366 let comma = self.consume_token(&Token::Comma);
9367 if self.consume_token(&Token::RParen) {
9368 break;
9370 } else if !comma {
9371 return self.expected_ref(
9372 "',' or ')' after parameter definition",
9373 self.peek_token_ref(),
9374 );
9375 }
9376 }
9377 Ok(Some(params))
9378 }
9379
9380 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9382 let mut columns = vec![];
9383 let mut constraints = vec![];
9384 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9385 return Ok((columns, constraints));
9386 }
9387
9388 loop {
9389 if let Some(constraint) = self.parse_optional_table_constraint()? {
9390 constraints.push(constraint);
9391 } else if let Token::Word(_) = &self.peek_token_ref().token {
9392 columns.push(self.parse_column_def()?);
9393 } else {
9394 return self.expected_ref(
9395 "column name or constraint definition",
9396 self.peek_token_ref(),
9397 );
9398 }
9399
9400 let comma = self.consume_token(&Token::Comma);
9401 let rparen = self.peek_token_ref().token == Token::RParen;
9402
9403 if !comma && !rparen {
9404 return self
9405 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9406 };
9407
9408 if rparen
9409 && (!comma
9410 || self.dialect.supports_column_definition_trailing_commas()
9411 || self.options.trailing_commas)
9412 {
9413 let _ = self.consume_token(&Token::RParen);
9414 break;
9415 }
9416 }
9417
9418 Ok((columns, constraints))
9419 }
9420
9421 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9423 let mode = if self.parse_keyword(Keyword::IN) {
9424 Some(ArgMode::In)
9425 } else if self.parse_keyword(Keyword::OUT) {
9426 Some(ArgMode::Out)
9427 } else if self.parse_keyword(Keyword::INOUT) {
9428 Some(ArgMode::InOut)
9429 } else {
9430 None
9431 };
9432 let name = self.parse_identifier()?;
9433 let data_type = self.parse_data_type()?;
9434 let default = if self.consume_token(&Token::Eq) {
9435 Some(self.parse_expr()?)
9436 } else {
9437 None
9438 };
9439
9440 Ok(ProcedureParam {
9441 name,
9442 data_type,
9443 mode,
9444 default,
9445 })
9446 }
9447
9448 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9450 self.parse_column_def_inner(false)
9451 }
9452
9453 fn parse_column_def_inner(
9454 &mut self,
9455 optional_data_type: bool,
9456 ) -> Result<ColumnDef, ParserError> {
9457 let col_name = self.parse_identifier()?;
9458 let data_type = if self.is_column_type_sqlite_unspecified() {
9459 DataType::Unspecified
9460 } else if optional_data_type {
9461 self.maybe_parse(|parser| parser.parse_data_type())?
9462 .unwrap_or(DataType::Unspecified)
9463 } else {
9464 self.parse_data_type()?
9465 };
9466 let mut options = vec![];
9467 loop {
9468 if self.parse_keyword(Keyword::CONSTRAINT) {
9469 let name = Some(self.parse_identifier()?);
9470 if let Some(option) = self.parse_optional_column_option()? {
9471 options.push(ColumnOptionDef { name, option });
9472 } else {
9473 return self.expected_ref(
9474 "constraint details after CONSTRAINT <name>",
9475 self.peek_token_ref(),
9476 );
9477 }
9478 } else if let Some(option) = self.parse_optional_column_option()? {
9479 options.push(ColumnOptionDef { name: None, option });
9480 } else {
9481 break;
9482 };
9483 }
9484 Ok(ColumnDef {
9485 name: col_name,
9486 data_type,
9487 options,
9488 })
9489 }
9490
9491 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9492 if dialect_of!(self is SQLiteDialect) {
9493 match &self.peek_token_ref().token {
9494 Token::Word(word) => matches!(
9495 word.keyword,
9496 Keyword::CONSTRAINT
9497 | Keyword::PRIMARY
9498 | Keyword::NOT
9499 | Keyword::UNIQUE
9500 | Keyword::CHECK
9501 | Keyword::DEFAULT
9502 | Keyword::COLLATE
9503 | Keyword::REFERENCES
9504 | Keyword::GENERATED
9505 | Keyword::AS
9506 ),
9507 _ => true, }
9509 } else {
9510 false
9511 }
9512 }
9513
9514 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9516 if let Some(option) = self.dialect.parse_column_option(self)? {
9517 return option;
9518 }
9519
9520 self.with_state(
9521 ColumnDefinition,
9522 |parser| -> Result<Option<ColumnOption>, ParserError> {
9523 parser.parse_optional_column_option_inner()
9524 },
9525 )
9526 }
9527
9528 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9529 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9530 Ok(Some(ColumnOption::CharacterSet(
9531 self.parse_object_name(false)?,
9532 )))
9533 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9534 Ok(Some(ColumnOption::Collation(
9535 self.parse_object_name(false)?,
9536 )))
9537 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9538 Ok(Some(ColumnOption::NotNull))
9539 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9540 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9541 } else if self.parse_keyword(Keyword::NULL) {
9542 Ok(Some(ColumnOption::Null))
9543 } else if self.parse_keyword(Keyword::DEFAULT) {
9544 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9545 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9546 && self.parse_keyword(Keyword::MATERIALIZED)
9547 {
9548 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9549 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9550 && self.parse_keyword(Keyword::ALIAS)
9551 {
9552 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9553 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9554 && self.parse_keyword(Keyword::EPHEMERAL)
9555 {
9556 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9559 Ok(Some(ColumnOption::Ephemeral(None)))
9560 } else {
9561 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9562 }
9563 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9564 let characteristics = self.parse_constraint_characteristics()?;
9565 Ok(Some(
9566 PrimaryKeyConstraint {
9567 name: None,
9568 index_name: None,
9569 index_type: None,
9570 columns: vec![],
9571 index_options: vec![],
9572 characteristics,
9573 }
9574 .into(),
9575 ))
9576 } else if self.parse_keyword(Keyword::UNIQUE) {
9577 let index_type_display =
9578 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9579 KeyOrIndexDisplay::Key
9580 } else {
9581 KeyOrIndexDisplay::None
9582 };
9583 let characteristics = self.parse_constraint_characteristics()?;
9584 Ok(Some(
9585 UniqueConstraint {
9586 name: None,
9587 index_name: None,
9588 index_type_display,
9589 index_type: None,
9590 columns: vec![],
9591 index_options: vec![],
9592 characteristics,
9593 nulls_distinct: NullsDistinctOption::None,
9594 }
9595 .into(),
9596 ))
9597 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9598 let characteristics = self.parse_constraint_characteristics()?;
9601 Ok(Some(
9602 PrimaryKeyConstraint {
9603 name: None,
9604 index_name: None,
9605 index_type: None,
9606 columns: vec![],
9607 index_options: vec![],
9608 characteristics,
9609 }
9610 .into(),
9611 ))
9612 } else if self.parse_keyword(Keyword::REFERENCES) {
9613 let foreign_table = self.parse_object_name(false)?;
9614 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9617 let mut match_kind = None;
9618 let mut on_delete = None;
9619 let mut on_update = None;
9620 loop {
9621 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9622 match_kind = Some(self.parse_match_kind()?);
9623 } else if on_delete.is_none()
9624 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9625 {
9626 on_delete = Some(self.parse_referential_action()?);
9627 } else if on_update.is_none()
9628 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9629 {
9630 on_update = Some(self.parse_referential_action()?);
9631 } else {
9632 break;
9633 }
9634 }
9635 let characteristics = self.parse_constraint_characteristics()?;
9636
9637 Ok(Some(
9638 ForeignKeyConstraint {
9639 name: None, index_name: None, columns: vec![], foreign_table,
9643 referred_columns,
9644 on_delete,
9645 on_update,
9646 match_kind,
9647 characteristics,
9648 }
9649 .into(),
9650 ))
9651 } else if self.parse_keyword(Keyword::CHECK) {
9652 self.expect_token(&Token::LParen)?;
9653 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9655 self.expect_token(&Token::RParen)?;
9656
9657 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9658 Some(true)
9659 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9660 Some(false)
9661 } else {
9662 None
9663 };
9664
9665 Ok(Some(
9666 CheckConstraint {
9667 name: None, expr: Box::new(expr),
9669 enforced,
9670 }
9671 .into(),
9672 ))
9673 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9674 && dialect_of!(self is MySqlDialect | GenericDialect)
9675 {
9676 Ok(Some(ColumnOption::DialectSpecific(vec![
9678 Token::make_keyword("AUTO_INCREMENT"),
9679 ])))
9680 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9681 && dialect_of!(self is SQLiteDialect | GenericDialect)
9682 {
9683 Ok(Some(ColumnOption::DialectSpecific(vec![
9685 Token::make_keyword("AUTOINCREMENT"),
9686 ])))
9687 } else if self.parse_keyword(Keyword::ASC)
9688 && self.dialect.supports_asc_desc_in_column_definition()
9689 {
9690 Ok(Some(ColumnOption::DialectSpecific(vec![
9692 Token::make_keyword("ASC"),
9693 ])))
9694 } else if self.parse_keyword(Keyword::DESC)
9695 && self.dialect.supports_asc_desc_in_column_definition()
9696 {
9697 Ok(Some(ColumnOption::DialectSpecific(vec![
9699 Token::make_keyword("DESC"),
9700 ])))
9701 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9702 && dialect_of!(self is MySqlDialect | GenericDialect)
9703 {
9704 let expr = self.parse_expr()?;
9705 Ok(Some(ColumnOption::OnUpdate(expr)))
9706 } else if self.parse_keyword(Keyword::GENERATED) {
9707 self.parse_optional_column_option_generated()
9708 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9709 && self.parse_keyword(Keyword::OPTIONS)
9710 {
9711 self.prev_token();
9712 Ok(Some(ColumnOption::Options(
9713 self.parse_options(Keyword::OPTIONS)?,
9714 )))
9715 } else if self.parse_keyword(Keyword::AS)
9716 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9717 {
9718 self.parse_optional_column_option_as()
9719 } else if self.parse_keyword(Keyword::SRID)
9720 && dialect_of!(self is MySqlDialect | GenericDialect)
9721 {
9722 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9723 } else if self.parse_keyword(Keyword::IDENTITY)
9724 && dialect_of!(self is MsSqlDialect | GenericDialect)
9725 {
9726 let parameters = if self.consume_token(&Token::LParen) {
9727 let seed = self.parse_number()?;
9728 self.expect_token(&Token::Comma)?;
9729 let increment = self.parse_number()?;
9730 self.expect_token(&Token::RParen)?;
9731
9732 Some(IdentityPropertyFormatKind::FunctionCall(
9733 IdentityParameters { seed, increment },
9734 ))
9735 } else {
9736 None
9737 };
9738 Ok(Some(ColumnOption::Identity(
9739 IdentityPropertyKind::Identity(IdentityProperty {
9740 parameters,
9741 order: None,
9742 }),
9743 )))
9744 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9745 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9746 {
9747 Ok(Some(ColumnOption::OnConflict(
9749 self.expect_one_of_keywords(&[
9750 Keyword::ROLLBACK,
9751 Keyword::ABORT,
9752 Keyword::FAIL,
9753 Keyword::IGNORE,
9754 Keyword::REPLACE,
9755 ])?,
9756 )))
9757 } else if self.parse_keyword(Keyword::INVISIBLE) {
9758 Ok(Some(ColumnOption::Invisible))
9759 } else {
9760 Ok(None)
9761 }
9762 }
9763
9764 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9765 let name = self.parse_object_name(false)?;
9766 self.expect_token(&Token::Eq)?;
9767 let value = self.parse_literal_string()?;
9768
9769 Ok(Tag::new(name, value))
9770 }
9771
9772 fn parse_optional_column_option_generated(
9773 &mut self,
9774 ) -> Result<Option<ColumnOption>, ParserError> {
9775 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9776 let mut sequence_options = vec![];
9777 if self.expect_token(&Token::LParen).is_ok() {
9778 sequence_options = self.parse_create_sequence_options()?;
9779 self.expect_token(&Token::RParen)?;
9780 }
9781 Ok(Some(ColumnOption::Generated {
9782 generated_as: GeneratedAs::Always,
9783 sequence_options: Some(sequence_options),
9784 generation_expr: None,
9785 generation_expr_mode: None,
9786 generated_keyword: true,
9787 }))
9788 } else if self.parse_keywords(&[
9789 Keyword::BY,
9790 Keyword::DEFAULT,
9791 Keyword::AS,
9792 Keyword::IDENTITY,
9793 ]) {
9794 let mut sequence_options = vec![];
9795 if self.expect_token(&Token::LParen).is_ok() {
9796 sequence_options = self.parse_create_sequence_options()?;
9797 self.expect_token(&Token::RParen)?;
9798 }
9799 Ok(Some(ColumnOption::Generated {
9800 generated_as: GeneratedAs::ByDefault,
9801 sequence_options: Some(sequence_options),
9802 generation_expr: None,
9803 generation_expr_mode: None,
9804 generated_keyword: true,
9805 }))
9806 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9807 if self.expect_token(&Token::LParen).is_ok() {
9808 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9809 self.expect_token(&Token::RParen)?;
9810 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9811 Ok((
9812 GeneratedAs::ExpStored,
9813 Some(GeneratedExpressionMode::Stored),
9814 ))
9815 } else if dialect_of!(self is PostgreSqlDialect) {
9816 self.expected_ref("STORED", self.peek_token_ref())
9818 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9819 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9820 } else {
9821 Ok((GeneratedAs::Always, None))
9822 }?;
9823
9824 Ok(Some(ColumnOption::Generated {
9825 generated_as: gen_as,
9826 sequence_options: None,
9827 generation_expr: Some(expr),
9828 generation_expr_mode: expr_mode,
9829 generated_keyword: true,
9830 }))
9831 } else {
9832 Ok(None)
9833 }
9834 } else {
9835 Ok(None)
9836 }
9837 }
9838
9839 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9840 self.expect_token(&Token::LParen)?;
9842 let expr = self.parse_expr()?;
9843 self.expect_token(&Token::RParen)?;
9844
9845 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9846 (
9847 GeneratedAs::ExpStored,
9848 Some(GeneratedExpressionMode::Stored),
9849 )
9850 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9851 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9852 } else {
9853 (GeneratedAs::Always, None)
9854 };
9855
9856 Ok(Some(ColumnOption::Generated {
9857 generated_as: gen_as,
9858 sequence_options: None,
9859 generation_expr: Some(expr),
9860 generation_expr_mode: expr_mode,
9861 generated_keyword: false,
9862 }))
9863 }
9864
9865 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9867 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9868 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9869 {
9870 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9871
9872 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9873 self.expect_token(&Token::LParen)?;
9874 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9875 self.expect_token(&Token::RParen)?;
9876 Some(sorted_by_columns)
9877 } else {
9878 None
9879 };
9880
9881 self.expect_keyword_is(Keyword::INTO)?;
9882 let num_buckets = self.parse_number_value()?.value;
9883 self.expect_keyword_is(Keyword::BUCKETS)?;
9884 Some(ClusteredBy {
9885 columns,
9886 sorted_by,
9887 num_buckets,
9888 })
9889 } else {
9890 None
9891 };
9892 Ok(clustered_by)
9893 }
9894
9895 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9899 if self.parse_keyword(Keyword::RESTRICT) {
9900 Ok(ReferentialAction::Restrict)
9901 } else if self.parse_keyword(Keyword::CASCADE) {
9902 Ok(ReferentialAction::Cascade)
9903 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9904 Ok(ReferentialAction::SetNull)
9905 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9906 Ok(ReferentialAction::NoAction)
9907 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9908 Ok(ReferentialAction::SetDefault)
9909 } else {
9910 self.expected_ref(
9911 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9912 self.peek_token_ref(),
9913 )
9914 }
9915 }
9916
9917 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9919 if self.parse_keyword(Keyword::FULL) {
9920 Ok(ConstraintReferenceMatchKind::Full)
9921 } else if self.parse_keyword(Keyword::PARTIAL) {
9922 Ok(ConstraintReferenceMatchKind::Partial)
9923 } else if self.parse_keyword(Keyword::SIMPLE) {
9924 Ok(ConstraintReferenceMatchKind::Simple)
9925 } else {
9926 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9927 }
9928 }
9929
9930 fn parse_constraint_using_index(
9933 &mut self,
9934 name: Option<Ident>,
9935 ) -> Result<ConstraintUsingIndex, ParserError> {
9936 let index_name = self.parse_identifier()?;
9937 let characteristics = self.parse_constraint_characteristics()?;
9938 Ok(ConstraintUsingIndex {
9939 name,
9940 index_name,
9941 characteristics,
9942 })
9943 }
9944
9945 pub fn parse_constraint_characteristics(
9947 &mut self,
9948 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9949 let mut cc = ConstraintCharacteristics::default();
9950
9951 loop {
9952 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9953 {
9954 cc.deferrable = Some(false);
9955 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9956 cc.deferrable = Some(true);
9957 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9958 if self.parse_keyword(Keyword::DEFERRED) {
9959 cc.initially = Some(DeferrableInitial::Deferred);
9960 } else if self.parse_keyword(Keyword::IMMEDIATE) {
9961 cc.initially = Some(DeferrableInitial::Immediate);
9962 } else {
9963 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
9964 }
9965 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9966 cc.enforced = Some(true);
9967 } else if cc.enforced.is_none()
9968 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9969 {
9970 cc.enforced = Some(false);
9971 } else {
9972 break;
9973 }
9974 }
9975
9976 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9977 Ok(Some(cc))
9978 } else {
9979 Ok(None)
9980 }
9981 }
9982
9983 pub fn parse_optional_table_constraint(
9985 &mut self,
9986 ) -> Result<Option<TableConstraint>, ParserError> {
9987 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9988 if self.dialect.supports_constraint_keyword_without_name()
9989 && self
9990 .peek_one_of_keywords(&[
9991 Keyword::CHECK,
9992 Keyword::PRIMARY,
9993 Keyword::UNIQUE,
9994 Keyword::FOREIGN,
9995 ])
9996 .is_some()
9997 {
9998 None
9999 } else {
10000 Some(self.parse_identifier()?)
10001 }
10002 } else {
10003 None
10004 };
10005
10006 let next_token = self.next_token();
10007 match next_token.token {
10008 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
10009 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10012 return Ok(Some(TableConstraint::UniqueUsingIndex(
10013 self.parse_constraint_using_index(name)?,
10014 )));
10015 }
10016
10017 let index_type_display = self.parse_index_type_display();
10018 if !dialect_of!(self is GenericDialect | MySqlDialect)
10019 && !index_type_display.is_none()
10020 {
10021 return self.expected_ref(
10022 "`index_name` or `(column_name [, ...])`",
10023 self.peek_token_ref(),
10024 );
10025 }
10026
10027 let nulls_distinct = self.parse_optional_nulls_distinct()?;
10028
10029 let index_name = self.parse_optional_ident()?;
10031 let index_type = self.parse_optional_using_then_index_type()?;
10032
10033 let columns = self.parse_parenthesized_index_column_list()?;
10034 let index_options = self.parse_index_options()?;
10035 let characteristics = self.parse_constraint_characteristics()?;
10036 Ok(Some(
10037 UniqueConstraint {
10038 name,
10039 index_name,
10040 index_type_display,
10041 index_type,
10042 columns,
10043 index_options,
10044 characteristics,
10045 nulls_distinct,
10046 }
10047 .into(),
10048 ))
10049 }
10050 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10051 self.expect_keyword_is(Keyword::KEY)?;
10053
10054 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10057 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10058 self.parse_constraint_using_index(name)?,
10059 )));
10060 }
10061
10062 let index_name = self.parse_optional_ident()?;
10064 let index_type = self.parse_optional_using_then_index_type()?;
10065
10066 let columns = self.parse_parenthesized_index_column_list()?;
10067 let index_options = self.parse_index_options()?;
10068 let characteristics = self.parse_constraint_characteristics()?;
10069 Ok(Some(
10070 PrimaryKeyConstraint {
10071 name,
10072 index_name,
10073 index_type,
10074 columns,
10075 index_options,
10076 characteristics,
10077 }
10078 .into(),
10079 ))
10080 }
10081 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10082 self.expect_keyword_is(Keyword::KEY)?;
10083 let index_name = self.parse_optional_ident()?;
10084 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10085 self.expect_keyword_is(Keyword::REFERENCES)?;
10086 let foreign_table = self.parse_object_name(false)?;
10087 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10088 let mut match_kind = None;
10089 let mut on_delete = None;
10090 let mut on_update = None;
10091 loop {
10092 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10093 match_kind = Some(self.parse_match_kind()?);
10094 } else if on_delete.is_none()
10095 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10096 {
10097 on_delete = Some(self.parse_referential_action()?);
10098 } else if on_update.is_none()
10099 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10100 {
10101 on_update = Some(self.parse_referential_action()?);
10102 } else {
10103 break;
10104 }
10105 }
10106
10107 let characteristics = self.parse_constraint_characteristics()?;
10108
10109 Ok(Some(
10110 ForeignKeyConstraint {
10111 name,
10112 index_name,
10113 columns,
10114 foreign_table,
10115 referred_columns,
10116 on_delete,
10117 on_update,
10118 match_kind,
10119 characteristics,
10120 }
10121 .into(),
10122 ))
10123 }
10124 Token::Word(w) if w.keyword == Keyword::CHECK => {
10125 self.expect_token(&Token::LParen)?;
10126 let expr = Box::new(self.parse_expr()?);
10127 self.expect_token(&Token::RParen)?;
10128
10129 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10130 Some(true)
10131 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10132 Some(false)
10133 } else {
10134 None
10135 };
10136
10137 Ok(Some(
10138 CheckConstraint {
10139 name,
10140 expr,
10141 enforced,
10142 }
10143 .into(),
10144 ))
10145 }
10146 Token::Word(w)
10147 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10148 && dialect_of!(self is GenericDialect | MySqlDialect)
10149 && name.is_none() =>
10150 {
10151 let display_as_key = w.keyword == Keyword::KEY;
10152
10153 let name = match &self.peek_token_ref().token {
10154 Token::Word(word) if word.keyword == Keyword::USING => None,
10155 _ => self.parse_optional_ident()?,
10156 };
10157
10158 let index_type = self.parse_optional_using_then_index_type()?;
10159 let columns = self.parse_parenthesized_index_column_list()?;
10160 let index_options = self.parse_index_options()?;
10161
10162 Ok(Some(
10163 IndexConstraint {
10164 display_as_key,
10165 name,
10166 index_type,
10167 columns,
10168 index_options,
10169 }
10170 .into(),
10171 ))
10172 }
10173 Token::Word(w)
10174 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10175 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10176 {
10177 if let Some(name) = name {
10178 return self.expected(
10179 "FULLTEXT or SPATIAL option without constraint name",
10180 TokenWithSpan {
10181 token: Token::make_keyword(&name.to_string()),
10182 span: next_token.span,
10183 },
10184 );
10185 }
10186
10187 let fulltext = w.keyword == Keyword::FULLTEXT;
10188
10189 let index_type_display = self.parse_index_type_display();
10190
10191 let opt_index_name = self.parse_optional_ident()?;
10192
10193 let columns = self.parse_parenthesized_index_column_list()?;
10194
10195 Ok(Some(
10196 FullTextOrSpatialConstraint {
10197 fulltext,
10198 index_type_display,
10199 opt_index_name,
10200 columns,
10201 }
10202 .into(),
10203 ))
10204 }
10205 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10206 let index_method = if self.parse_keyword(Keyword::USING) {
10207 Some(self.parse_identifier()?)
10208 } else {
10209 None
10210 };
10211
10212 self.expect_token(&Token::LParen)?;
10213 let elements =
10214 self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10215 self.expect_token(&Token::RParen)?;
10216
10217 let include = if self.parse_keyword(Keyword::INCLUDE) {
10218 self.expect_token(&Token::LParen)?;
10219 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10220 self.expect_token(&Token::RParen)?;
10221 cols
10222 } else {
10223 vec![]
10224 };
10225
10226 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10227 self.expect_token(&Token::LParen)?;
10228 let predicate = self.parse_expr()?;
10229 self.expect_token(&Token::RParen)?;
10230 Some(Box::new(predicate))
10231 } else {
10232 None
10233 };
10234
10235 let characteristics = self.parse_constraint_characteristics()?;
10236
10237 Ok(Some(
10238 ExclusionConstraint {
10239 name,
10240 index_method,
10241 elements,
10242 include,
10243 where_clause,
10244 characteristics,
10245 }
10246 .into(),
10247 ))
10248 }
10249 _ => {
10250 if name.is_some() {
10251 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10252 } else {
10253 self.prev_token();
10254 Ok(None)
10255 }
10256 }
10257 }
10258 }
10259
10260 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10261 let expr = self.parse_expr()?;
10262 self.expect_keyword_is(Keyword::WITH)?;
10263 let operator_token = self.next_token();
10264 let operator = operator_token.token.to_string();
10265 Ok(ExclusionElement { expr, operator })
10266 }
10267
10268 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10269 Ok(if self.parse_keyword(Keyword::NULLS) {
10270 let not = self.parse_keyword(Keyword::NOT);
10271 self.expect_keyword_is(Keyword::DISTINCT)?;
10272 if not {
10273 NullsDistinctOption::NotDistinct
10274 } else {
10275 NullsDistinctOption::Distinct
10276 }
10277 } else {
10278 NullsDistinctOption::None
10279 })
10280 }
10281
10282 pub fn maybe_parse_options(
10284 &mut self,
10285 keyword: Keyword,
10286 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10287 if let Token::Word(word) = &self.peek_token_ref().token {
10288 if word.keyword == keyword {
10289 return Ok(Some(self.parse_options(keyword)?));
10290 }
10291 };
10292 Ok(None)
10293 }
10294
10295 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10297 if self.parse_keyword(keyword) {
10298 self.expect_token(&Token::LParen)?;
10299 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10300 self.expect_token(&Token::RParen)?;
10301 Ok(options)
10302 } else {
10303 Ok(vec![])
10304 }
10305 }
10306
10307 pub fn parse_options_with_keywords(
10309 &mut self,
10310 keywords: &[Keyword],
10311 ) -> Result<Vec<SqlOption>, ParserError> {
10312 if self.parse_keywords(keywords) {
10313 self.expect_token(&Token::LParen)?;
10314 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10315 self.expect_token(&Token::RParen)?;
10316 Ok(options)
10317 } else {
10318 Ok(vec![])
10319 }
10320 }
10321
10322 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10324 Ok(if self.parse_keyword(Keyword::BTREE) {
10325 IndexType::BTree
10326 } else if self.parse_keyword(Keyword::HASH) {
10327 IndexType::Hash
10328 } else if self.parse_keyword(Keyword::GIN) {
10329 IndexType::GIN
10330 } else if self.parse_keyword(Keyword::GIST) {
10331 IndexType::GiST
10332 } else if self.parse_keyword(Keyword::SPGIST) {
10333 IndexType::SPGiST
10334 } else if self.parse_keyword(Keyword::BRIN) {
10335 IndexType::BRIN
10336 } else if self.parse_keyword(Keyword::BLOOM) {
10337 IndexType::Bloom
10338 } else {
10339 IndexType::Custom(self.parse_identifier()?)
10340 })
10341 }
10342
10343 pub fn parse_optional_using_then_index_type(
10350 &mut self,
10351 ) -> Result<Option<IndexType>, ParserError> {
10352 if self.parse_keyword(Keyword::USING) {
10353 Ok(Some(self.parse_index_type()?))
10354 } else {
10355 Ok(None)
10356 }
10357 }
10358
10359 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10363 self.maybe_parse(|parser| parser.parse_identifier())
10364 }
10365
10366 #[must_use]
10367 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10369 if self.parse_keyword(Keyword::KEY) {
10370 KeyOrIndexDisplay::Key
10371 } else if self.parse_keyword(Keyword::INDEX) {
10372 KeyOrIndexDisplay::Index
10373 } else {
10374 KeyOrIndexDisplay::None
10375 }
10376 }
10377
10378 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10380 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10381 Ok(Some(IndexOption::Using(index_type)))
10382 } else if self.parse_keyword(Keyword::COMMENT) {
10383 let s = self.parse_literal_string()?;
10384 Ok(Some(IndexOption::Comment(s)))
10385 } else {
10386 Ok(None)
10387 }
10388 }
10389
10390 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10392 let mut options = Vec::new();
10393
10394 loop {
10395 match self.parse_optional_index_option()? {
10396 Some(index_option) => options.push(index_option),
10397 None => return Ok(options),
10398 }
10399 }
10400 }
10401
10402 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10404 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10405
10406 match &self.peek_token_ref().token {
10407 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10408 Ok(SqlOption::Ident(self.parse_identifier()?))
10409 }
10410 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10411 self.parse_option_partition()
10412 }
10413 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10414 self.parse_option_clustered()
10415 }
10416 _ => {
10417 let name = self.parse_identifier()?;
10418 self.expect_token(&Token::Eq)?;
10419 let value = self.parse_expr()?;
10420
10421 Ok(SqlOption::KeyValue { key: name, value })
10422 }
10423 }
10424 }
10425
10426 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10428 if self.parse_keywords(&[
10429 Keyword::CLUSTERED,
10430 Keyword::COLUMNSTORE,
10431 Keyword::INDEX,
10432 Keyword::ORDER,
10433 ]) {
10434 Ok(SqlOption::Clustered(
10435 TableOptionsClustered::ColumnstoreIndexOrder(
10436 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10437 ),
10438 ))
10439 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10440 Ok(SqlOption::Clustered(
10441 TableOptionsClustered::ColumnstoreIndex,
10442 ))
10443 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10444 self.expect_token(&Token::LParen)?;
10445
10446 let columns = self.parse_comma_separated(|p| {
10447 let name = p.parse_identifier()?;
10448 let asc = p.parse_asc_desc();
10449
10450 Ok(ClusteredIndex { name, asc })
10451 })?;
10452
10453 self.expect_token(&Token::RParen)?;
10454
10455 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10456 } else {
10457 Err(ParserError::ParserError(
10458 "invalid CLUSTERED sequence".to_string(),
10459 ))
10460 }
10461 }
10462
10463 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10465 self.expect_keyword_is(Keyword::PARTITION)?;
10466 self.expect_token(&Token::LParen)?;
10467 let column_name = self.parse_identifier()?;
10468
10469 self.expect_keyword_is(Keyword::RANGE)?;
10470 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10471 Some(PartitionRangeDirection::Left)
10472 } else if self.parse_keyword(Keyword::RIGHT) {
10473 Some(PartitionRangeDirection::Right)
10474 } else {
10475 None
10476 };
10477
10478 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10479 self.expect_token(&Token::LParen)?;
10480
10481 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10482
10483 self.expect_token(&Token::RParen)?;
10484 self.expect_token(&Token::RParen)?;
10485
10486 Ok(SqlOption::Partition {
10487 column_name,
10488 range_direction,
10489 for_values,
10490 })
10491 }
10492
10493 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10495 self.expect_token(&Token::LParen)?;
10496 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10497 self.expect_token(&Token::RParen)?;
10498 Ok(Partition::Partitions(partitions))
10499 }
10500
10501 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10503 self.expect_token(&Token::LParen)?;
10504 self.expect_keyword_is(Keyword::SELECT)?;
10505 let projection = self.parse_projection()?;
10506 let group_by = self.parse_optional_group_by()?;
10507 let order_by = self.parse_optional_order_by()?;
10508 self.expect_token(&Token::RParen)?;
10509 Ok(ProjectionSelect {
10510 projection,
10511 group_by,
10512 order_by,
10513 })
10514 }
10515 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10517 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10518 let name = self.parse_identifier()?;
10519 let query = self.parse_projection_select()?;
10520 Ok(AlterTableOperation::AddProjection {
10521 if_not_exists,
10522 name,
10523 select: query,
10524 })
10525 }
10526
10527 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10531 self.expect_keyword_is(Keyword::ALTER)?;
10532 self.expect_keyword_is(Keyword::SORTKEY)?;
10533 self.expect_token(&Token::LParen)?;
10534 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10535 self.expect_token(&Token::RParen)?;
10536 Ok(AlterTableOperation::AlterSortKey { columns })
10537 }
10538
10539 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10541 let operation = if self.parse_keyword(Keyword::ADD) {
10542 if let Some(constraint) = self.parse_optional_table_constraint()? {
10543 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10544 AlterTableOperation::AddConstraint {
10545 constraint,
10546 not_valid,
10547 }
10548 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10549 && self.parse_keyword(Keyword::PROJECTION)
10550 {
10551 return self.parse_alter_table_add_projection();
10552 } else {
10553 let if_not_exists =
10554 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10555 let mut new_partitions = vec![];
10556 loop {
10557 if self.parse_keyword(Keyword::PARTITION) {
10558 new_partitions.push(self.parse_partition()?);
10559 } else {
10560 break;
10561 }
10562 }
10563 if !new_partitions.is_empty() {
10564 AlterTableOperation::AddPartitions {
10565 if_not_exists,
10566 new_partitions,
10567 }
10568 } else {
10569 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10570
10571 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10572 {
10573 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10574 || if_not_exists
10575 } else {
10576 false
10577 };
10578
10579 let column_def = self.parse_column_def()?;
10580
10581 let column_position = self.parse_column_position()?;
10582
10583 AlterTableOperation::AddColumn {
10584 column_keyword,
10585 if_not_exists,
10586 column_def,
10587 column_position,
10588 }
10589 }
10590 }
10591 } else if self.parse_keyword(Keyword::RENAME) {
10592 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10593 let old_name = self.parse_identifier()?;
10594 self.expect_keyword_is(Keyword::TO)?;
10595 let new_name = self.parse_identifier()?;
10596 AlterTableOperation::RenameConstraint { old_name, new_name }
10597 } else if self.parse_keyword(Keyword::TO) {
10598 let table_name = self.parse_object_name(false)?;
10599 AlterTableOperation::RenameTable {
10600 table_name: RenameTableNameKind::To(table_name),
10601 }
10602 } else if self.parse_keyword(Keyword::AS) {
10603 let table_name = self.parse_object_name(false)?;
10604 AlterTableOperation::RenameTable {
10605 table_name: RenameTableNameKind::As(table_name),
10606 }
10607 } else {
10608 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10610 self.expect_keyword_is(Keyword::TO)?;
10611 let new_column_name = self.parse_identifier()?;
10612 AlterTableOperation::RenameColumn {
10613 old_column_name,
10614 new_column_name,
10615 }
10616 }
10617 } else if self.parse_keyword(Keyword::DISABLE) {
10618 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10619 AlterTableOperation::DisableRowLevelSecurity {}
10620 } else if self.parse_keyword(Keyword::RULE) {
10621 let name = self.parse_identifier()?;
10622 AlterTableOperation::DisableRule { name }
10623 } else if self.parse_keyword(Keyword::TRIGGER) {
10624 let name = self.parse_identifier()?;
10625 AlterTableOperation::DisableTrigger { name }
10626 } else {
10627 return self.expected_ref(
10628 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10629 self.peek_token_ref(),
10630 );
10631 }
10632 } else if self.parse_keyword(Keyword::ENABLE) {
10633 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10634 let name = self.parse_identifier()?;
10635 AlterTableOperation::EnableAlwaysRule { name }
10636 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10637 let name = self.parse_identifier()?;
10638 AlterTableOperation::EnableAlwaysTrigger { name }
10639 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10640 AlterTableOperation::EnableRowLevelSecurity {}
10641 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10642 let name = self.parse_identifier()?;
10643 AlterTableOperation::EnableReplicaRule { name }
10644 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10645 let name = self.parse_identifier()?;
10646 AlterTableOperation::EnableReplicaTrigger { name }
10647 } else if self.parse_keyword(Keyword::RULE) {
10648 let name = self.parse_identifier()?;
10649 AlterTableOperation::EnableRule { name }
10650 } else if self.parse_keyword(Keyword::TRIGGER) {
10651 let name = self.parse_identifier()?;
10652 AlterTableOperation::EnableTrigger { name }
10653 } else {
10654 return self.expected_ref(
10655 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10656 self.peek_token_ref(),
10657 );
10658 }
10659 } else if self.parse_keywords(&[
10660 Keyword::FORCE,
10661 Keyword::ROW,
10662 Keyword::LEVEL,
10663 Keyword::SECURITY,
10664 ]) {
10665 AlterTableOperation::ForceRowLevelSecurity
10666 } else if self.parse_keywords(&[
10667 Keyword::NO,
10668 Keyword::FORCE,
10669 Keyword::ROW,
10670 Keyword::LEVEL,
10671 Keyword::SECURITY,
10672 ]) {
10673 AlterTableOperation::NoForceRowLevelSecurity
10674 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10675 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10676 {
10677 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10678 let name = self.parse_identifier()?;
10679 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10680 Some(self.parse_identifier()?)
10681 } else {
10682 None
10683 };
10684 AlterTableOperation::ClearProjection {
10685 if_exists,
10686 name,
10687 partition,
10688 }
10689 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10690 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10691 {
10692 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10693 let name = self.parse_identifier()?;
10694 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10695 Some(self.parse_identifier()?)
10696 } else {
10697 None
10698 };
10699 AlterTableOperation::MaterializeProjection {
10700 if_exists,
10701 name,
10702 partition,
10703 }
10704 } else if self.parse_keyword(Keyword::DROP) {
10705 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10706 self.expect_token(&Token::LParen)?;
10707 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10708 self.expect_token(&Token::RParen)?;
10709 AlterTableOperation::DropPartitions {
10710 partitions,
10711 if_exists: true,
10712 }
10713 } else if self.parse_keyword(Keyword::PARTITION) {
10714 self.expect_token(&Token::LParen)?;
10715 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10716 self.expect_token(&Token::RParen)?;
10717 AlterTableOperation::DropPartitions {
10718 partitions,
10719 if_exists: false,
10720 }
10721 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10722 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10723 let name = self.parse_identifier()?;
10724 let drop_behavior = self.parse_optional_drop_behavior();
10725 AlterTableOperation::DropConstraint {
10726 if_exists,
10727 name,
10728 drop_behavior,
10729 }
10730 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10731 let drop_behavior = self.parse_optional_drop_behavior();
10732 AlterTableOperation::DropPrimaryKey { drop_behavior }
10733 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10734 let name = self.parse_identifier()?;
10735 let drop_behavior = self.parse_optional_drop_behavior();
10736 AlterTableOperation::DropForeignKey {
10737 name,
10738 drop_behavior,
10739 }
10740 } else if self.parse_keyword(Keyword::INDEX) {
10741 let name = self.parse_identifier()?;
10742 AlterTableOperation::DropIndex { name }
10743 } else if self.parse_keyword(Keyword::PROJECTION)
10744 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10745 {
10746 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10747 let name = self.parse_identifier()?;
10748 AlterTableOperation::DropProjection { if_exists, name }
10749 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10750 AlterTableOperation::DropClusteringKey
10751 } else {
10752 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10754 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10755 self.parse_comma_separated(Parser::parse_identifier)?
10756 } else {
10757 vec![self.parse_identifier()?]
10758 };
10759 let drop_behavior = self.parse_optional_drop_behavior();
10760 AlterTableOperation::DropColumn {
10761 has_column_keyword,
10762 column_names,
10763 if_exists,
10764 drop_behavior,
10765 }
10766 }
10767 } else if self.parse_keyword(Keyword::PARTITION) {
10768 self.expect_token(&Token::LParen)?;
10769 let before = self.parse_comma_separated(Parser::parse_expr)?;
10770 self.expect_token(&Token::RParen)?;
10771 self.expect_keyword_is(Keyword::RENAME)?;
10772 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10773 self.expect_token(&Token::LParen)?;
10774 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10775 self.expect_token(&Token::RParen)?;
10776 AlterTableOperation::RenamePartitions {
10777 old_partitions: before,
10778 new_partitions: renames,
10779 }
10780 } else if self.parse_keyword(Keyword::CHANGE) {
10781 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10783 let new_name = self.parse_identifier()?;
10784 let data_type = self.parse_data_type()?;
10785 let mut options = vec![];
10786 while let Some(option) = self.parse_optional_column_option()? {
10787 options.push(option);
10788 }
10789
10790 let column_position = self.parse_column_position()?;
10791
10792 AlterTableOperation::ChangeColumn {
10793 old_name,
10794 new_name,
10795 data_type,
10796 options,
10797 column_position,
10798 }
10799 } else if self.parse_keyword(Keyword::MODIFY) {
10800 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10802 let data_type = self.parse_data_type()?;
10803 let mut options = vec![];
10804 while let Some(option) = self.parse_optional_column_option()? {
10805 options.push(option);
10806 }
10807
10808 let column_position = self.parse_column_position()?;
10809
10810 AlterTableOperation::ModifyColumn {
10811 col_name,
10812 data_type,
10813 options,
10814 column_position,
10815 }
10816 } else if self.parse_keyword(Keyword::ALTER) {
10817 if self.peek_keyword(Keyword::SORTKEY) {
10818 self.prev_token();
10819 return self.parse_alter_sort_key();
10820 }
10821
10822 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10824 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10825
10826 let op: AlterColumnOperation = if self.parse_keywords(&[
10827 Keyword::SET,
10828 Keyword::NOT,
10829 Keyword::NULL,
10830 ]) {
10831 AlterColumnOperation::SetNotNull {}
10832 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10833 AlterColumnOperation::DropNotNull {}
10834 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10835 AlterColumnOperation::SetDefault {
10836 value: self.parse_expr()?,
10837 }
10838 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10839 AlterColumnOperation::DropDefault {}
10840 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10841 self.parse_set_data_type(true)?
10842 } else if self.parse_keyword(Keyword::TYPE) {
10843 self.parse_set_data_type(false)?
10844 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10845 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10846 Some(GeneratedAs::Always)
10847 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10848 Some(GeneratedAs::ByDefault)
10849 } else {
10850 None
10851 };
10852
10853 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10854
10855 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10856
10857 if self.peek_token_ref().token == Token::LParen {
10858 self.expect_token(&Token::LParen)?;
10859 sequence_options = Some(self.parse_create_sequence_options()?);
10860 self.expect_token(&Token::RParen)?;
10861 }
10862
10863 AlterColumnOperation::AddGenerated {
10864 generated_as,
10865 sequence_options,
10866 }
10867 } else {
10868 let message = if is_postgresql {
10869 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10870 } else {
10871 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10872 };
10873
10874 return self.expected_ref(message, self.peek_token_ref());
10875 };
10876 AlterTableOperation::AlterColumn { column_name, op }
10877 } else if self.parse_keyword(Keyword::SWAP) {
10878 self.expect_keyword_is(Keyword::WITH)?;
10879 let table_name = self.parse_object_name(false)?;
10880 AlterTableOperation::SwapWith { table_name }
10881 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10882 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10883 {
10884 let new_owner = self.parse_owner()?;
10885 AlterTableOperation::OwnerTo { new_owner }
10886 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10887 && self.parse_keyword(Keyword::ATTACH)
10888 {
10889 AlterTableOperation::AttachPartition {
10890 partition: self.parse_part_or_partition()?,
10891 }
10892 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10893 && self.parse_keyword(Keyword::DETACH)
10894 {
10895 AlterTableOperation::DetachPartition {
10896 partition: self.parse_part_or_partition()?,
10897 }
10898 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10899 && self.parse_keyword(Keyword::FREEZE)
10900 {
10901 let partition = self.parse_part_or_partition()?;
10902 let with_name = if self.parse_keyword(Keyword::WITH) {
10903 self.expect_keyword_is(Keyword::NAME)?;
10904 Some(self.parse_identifier()?)
10905 } else {
10906 None
10907 };
10908 AlterTableOperation::FreezePartition {
10909 partition,
10910 with_name,
10911 }
10912 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10913 && self.parse_keyword(Keyword::UNFREEZE)
10914 {
10915 let partition = self.parse_part_or_partition()?;
10916 let with_name = if self.parse_keyword(Keyword::WITH) {
10917 self.expect_keyword_is(Keyword::NAME)?;
10918 Some(self.parse_identifier()?)
10919 } else {
10920 None
10921 };
10922 AlterTableOperation::UnfreezePartition {
10923 partition,
10924 with_name,
10925 }
10926 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10927 self.expect_token(&Token::LParen)?;
10928 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10929 self.expect_token(&Token::RParen)?;
10930 AlterTableOperation::ClusterBy { exprs }
10931 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10932 AlterTableOperation::SuspendRecluster
10933 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10934 AlterTableOperation::ResumeRecluster
10935 } else if self.parse_keyword(Keyword::LOCK) {
10936 let equals = self.consume_token(&Token::Eq);
10937 let lock = match self.parse_one_of_keywords(&[
10938 Keyword::DEFAULT,
10939 Keyword::EXCLUSIVE,
10940 Keyword::NONE,
10941 Keyword::SHARED,
10942 ]) {
10943 Some(Keyword::DEFAULT) => AlterTableLock::Default,
10944 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10945 Some(Keyword::NONE) => AlterTableLock::None,
10946 Some(Keyword::SHARED) => AlterTableLock::Shared,
10947 _ => self.expected_ref(
10948 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
10949 self.peek_token_ref(),
10950 )?,
10951 };
10952 AlterTableOperation::Lock { equals, lock }
10953 } else if self.parse_keyword(Keyword::ALGORITHM) {
10954 let equals = self.consume_token(&Token::Eq);
10955 let algorithm = match self.parse_one_of_keywords(&[
10956 Keyword::DEFAULT,
10957 Keyword::INSTANT,
10958 Keyword::INPLACE,
10959 Keyword::COPY,
10960 ]) {
10961 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
10962 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
10963 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
10964 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
10965 _ => self.expected_ref(
10966 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
10967 self.peek_token_ref(),
10968 )?,
10969 };
10970 AlterTableOperation::Algorithm { equals, algorithm }
10971 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
10972 let equals = self.consume_token(&Token::Eq);
10973 let value = self.parse_number_value()?;
10974 AlterTableOperation::AutoIncrement { equals, value }
10975 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
10976 let identity = if self.parse_keyword(Keyword::NOTHING) {
10977 ReplicaIdentity::Nothing
10978 } else if self.parse_keyword(Keyword::FULL) {
10979 ReplicaIdentity::Full
10980 } else if self.parse_keyword(Keyword::DEFAULT) {
10981 ReplicaIdentity::Default
10982 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10983 ReplicaIdentity::Index(self.parse_identifier()?)
10984 } else {
10985 return self.expected_ref(
10986 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
10987 self.peek_token_ref(),
10988 );
10989 };
10990
10991 AlterTableOperation::ReplicaIdentity { identity }
10992 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
10993 let name = self.parse_identifier()?;
10994 AlterTableOperation::ValidateConstraint { name }
10995 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
10996 let tablespace_name = self.parse_identifier()?;
10997 AlterTableOperation::SetTablespace { tablespace_name }
10998 } else {
10999 let mut options =
11000 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
11001 if !options.is_empty() {
11002 AlterTableOperation::SetTblProperties {
11003 table_properties: options,
11004 }
11005 } else {
11006 options = self.parse_options(Keyword::SET)?;
11007 if !options.is_empty() {
11008 AlterTableOperation::SetOptionsParens { options }
11009 } else {
11010 return self.expected_ref(
11011 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
11012 self.peek_token_ref(),
11013 );
11014 }
11015 }
11016 };
11017 Ok(operation)
11018 }
11019
11020 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
11021 let data_type = self.parse_data_type()?;
11022 let using = if self.dialect.supports_alter_column_type_using()
11023 && self.parse_keyword(Keyword::USING)
11024 {
11025 Some(self.parse_expr()?)
11026 } else {
11027 None
11028 };
11029 Ok(AlterColumnOperation::SetDataType {
11030 data_type,
11031 using,
11032 had_set,
11033 })
11034 }
11035
11036 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
11037 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11038 match keyword {
11039 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11040 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11041 unexpected_keyword => Err(ParserError::ParserError(
11043 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11044 )),
11045 }
11046 }
11047
11048 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11050 let object_type = self.expect_one_of_keywords(&[
11051 Keyword::VIEW,
11052 Keyword::TYPE,
11053 Keyword::COLLATION,
11054 Keyword::TABLE,
11055 Keyword::INDEX,
11056 Keyword::FUNCTION,
11057 Keyword::AGGREGATE,
11058 Keyword::ROLE,
11059 Keyword::POLICY,
11060 Keyword::CONNECTOR,
11061 Keyword::ICEBERG,
11062 Keyword::SCHEMA,
11063 Keyword::USER,
11064 Keyword::OPERATOR,
11065 Keyword::DOMAIN,
11066 Keyword::TRIGGER,
11067 Keyword::EXTENSION,
11068 Keyword::PROCEDURE,
11069 ])?;
11070 match object_type {
11071 Keyword::SCHEMA => {
11072 self.prev_token();
11073 self.prev_token();
11074 self.parse_alter_schema()
11075 }
11076 Keyword::VIEW => self.parse_alter_view(),
11077 Keyword::TYPE => self.parse_alter_type(),
11078 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11079 Keyword::TABLE => self.parse_alter_table(false),
11080 Keyword::ICEBERG => {
11081 self.expect_keyword(Keyword::TABLE)?;
11082 self.parse_alter_table(true)
11083 }
11084 Keyword::INDEX => {
11085 let index_name = self.parse_object_name(false)?;
11086 let operation = if self.parse_keyword(Keyword::RENAME) {
11087 if self.parse_keyword(Keyword::TO) {
11088 let index_name = self.parse_object_name(false)?;
11089 AlterIndexOperation::RenameIndex { index_name }
11090 } else {
11091 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11092 }
11093 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11094 let tablespace_name = self.parse_identifier()?;
11095 AlterIndexOperation::SetTablespace { tablespace_name }
11096 } else {
11097 return self.expected_ref(
11098 "RENAME or SET TABLESPACE after ALTER INDEX",
11099 self.peek_token_ref(),
11100 );
11101 };
11102
11103 Ok(Statement::AlterIndex {
11104 name: index_name,
11105 operation,
11106 })
11107 }
11108 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11109 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11110 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11111 Keyword::OPERATOR => {
11112 if self.parse_keyword(Keyword::FAMILY) {
11113 self.parse_alter_operator_family().map(Into::into)
11114 } else if self.parse_keyword(Keyword::CLASS) {
11115 self.parse_alter_operator_class().map(Into::into)
11116 } else {
11117 self.parse_alter_operator().map(Into::into)
11118 }
11119 }
11120 Keyword::ROLE => self.parse_alter_role(),
11121 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11122 Keyword::CONNECTOR => self.parse_alter_connector(),
11123 Keyword::USER => self.parse_alter_user().map(Into::into),
11124 Keyword::DOMAIN => self.parse_alter_domain(),
11125 Keyword::TRIGGER => self.parse_alter_trigger(),
11126 Keyword::EXTENSION => self.parse_alter_extension(),
11127 unexpected_keyword => Err(ParserError::ParserError(
11129 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE}}, got {unexpected_keyword:?}"),
11130 )),
11131 }
11132 }
11133
11134 fn parse_alter_aggregate_signature(
11135 &mut self,
11136 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11137 let name = self.parse_object_name(false)?;
11138 self.expect_token(&Token::LParen)?;
11139
11140 if self.consume_token(&Token::Mul) {
11141 self.expect_token(&Token::RParen)?;
11142 return Ok((
11143 FunctionDesc {
11144 name,
11145 args: Some(vec![]),
11146 },
11147 true,
11148 None,
11149 ));
11150 }
11151
11152 let args =
11153 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11154 vec![]
11155 } else {
11156 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11157 };
11158
11159 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11160 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11161 } else {
11162 None
11163 };
11164
11165 self.expect_token(&Token::RParen)?;
11166 Ok((
11167 FunctionDesc {
11168 name,
11169 args: Some(args),
11170 },
11171 false,
11172 aggregate_order_by,
11173 ))
11174 }
11175
11176 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11177 let action = if self.parse_keywords(&[
11178 Keyword::CALLED,
11179 Keyword::ON,
11180 Keyword::NULL,
11181 Keyword::INPUT,
11182 ]) {
11183 Some(AlterFunctionAction::CalledOnNull(
11184 FunctionCalledOnNull::CalledOnNullInput,
11185 ))
11186 } else if self.parse_keywords(&[
11187 Keyword::RETURNS,
11188 Keyword::NULL,
11189 Keyword::ON,
11190 Keyword::NULL,
11191 Keyword::INPUT,
11192 ]) {
11193 Some(AlterFunctionAction::CalledOnNull(
11194 FunctionCalledOnNull::ReturnsNullOnNullInput,
11195 ))
11196 } else if self.parse_keyword(Keyword::STRICT) {
11197 Some(AlterFunctionAction::CalledOnNull(
11198 FunctionCalledOnNull::Strict,
11199 ))
11200 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11201 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11202 } else if self.parse_keyword(Keyword::STABLE) {
11203 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11204 } else if self.parse_keyword(Keyword::VOLATILE) {
11205 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11206 } else if self.parse_keyword(Keyword::NOT) {
11207 self.expect_keyword(Keyword::LEAKPROOF)?;
11208 Some(AlterFunctionAction::Leakproof(false))
11209 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11210 Some(AlterFunctionAction::Leakproof(true))
11211 } else if self.parse_keyword(Keyword::EXTERNAL) {
11212 self.expect_keyword(Keyword::SECURITY)?;
11213 let security = if self.parse_keyword(Keyword::DEFINER) {
11214 FunctionSecurity::Definer
11215 } else if self.parse_keyword(Keyword::INVOKER) {
11216 FunctionSecurity::Invoker
11217 } else {
11218 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11219 };
11220 Some(AlterFunctionAction::Security {
11221 external: true,
11222 security,
11223 })
11224 } else if self.parse_keyword(Keyword::SECURITY) {
11225 let security = if self.parse_keyword(Keyword::DEFINER) {
11226 FunctionSecurity::Definer
11227 } else if self.parse_keyword(Keyword::INVOKER) {
11228 FunctionSecurity::Invoker
11229 } else {
11230 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11231 };
11232 Some(AlterFunctionAction::Security {
11233 external: false,
11234 security,
11235 })
11236 } else if self.parse_keyword(Keyword::PARALLEL) {
11237 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11238 FunctionParallel::Unsafe
11239 } else if self.parse_keyword(Keyword::RESTRICTED) {
11240 FunctionParallel::Restricted
11241 } else if self.parse_keyword(Keyword::SAFE) {
11242 FunctionParallel::Safe
11243 } else {
11244 return self
11245 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11246 };
11247 Some(AlterFunctionAction::Parallel(parallel))
11248 } else if self.parse_keyword(Keyword::COST) {
11249 Some(AlterFunctionAction::Cost(self.parse_number()?))
11250 } else if self.parse_keyword(Keyword::ROWS) {
11251 Some(AlterFunctionAction::Rows(self.parse_number()?))
11252 } else if self.parse_keyword(Keyword::SUPPORT) {
11253 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11254 } else if self.parse_keyword(Keyword::SET) {
11255 let name = self.parse_object_name(false)?;
11256 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11257 FunctionSetValue::FromCurrent
11258 } else {
11259 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11260 return self.expected_ref("= or TO", self.peek_token_ref());
11261 }
11262 if self.parse_keyword(Keyword::DEFAULT) {
11263 FunctionSetValue::Default
11264 } else {
11265 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11266 }
11267 };
11268 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11269 name,
11270 value,
11271 }))
11272 } else if self.parse_keyword(Keyword::RESET) {
11273 let reset_config = if self.parse_keyword(Keyword::ALL) {
11274 ResetConfig::ALL
11275 } else {
11276 ResetConfig::ConfigName(self.parse_object_name(false)?)
11277 };
11278 Some(AlterFunctionAction::Reset(reset_config))
11279 } else {
11280 None
11281 };
11282
11283 Ok(action)
11284 }
11285
11286 fn parse_alter_function_actions(
11287 &mut self,
11288 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11289 let mut actions = vec![];
11290 while let Some(action) = self.parse_alter_function_action()? {
11291 actions.push(action);
11292 }
11293 if actions.is_empty() {
11294 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11295 }
11296 let restrict = self.parse_keyword(Keyword::RESTRICT);
11297 Ok((actions, restrict))
11298 }
11299
11300 pub fn parse_alter_function(
11302 &mut self,
11303 kind: AlterFunctionKind,
11304 ) -> Result<Statement, ParserError> {
11305 let (function, aggregate_star, aggregate_order_by) = match kind {
11306 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11307 (self.parse_function_desc()?, false, None)
11308 }
11309 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11310 };
11311
11312 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11313 let new_name = self.parse_identifier()?;
11314 AlterFunctionOperation::RenameTo { new_name }
11315 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11316 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11317 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11318 AlterFunctionOperation::SetSchema {
11319 schema_name: self.parse_object_name(false)?,
11320 }
11321 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11322 && self.parse_keyword(Keyword::NO)
11323 {
11324 if !self.parse_keyword(Keyword::DEPENDS) {
11325 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11326 }
11327 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11328 AlterFunctionOperation::DependsOnExtension {
11329 no: true,
11330 extension_name: self.parse_object_name(false)?,
11331 }
11332 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11333 && self.parse_keyword(Keyword::DEPENDS)
11334 {
11335 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11336 AlterFunctionOperation::DependsOnExtension {
11337 no: false,
11338 extension_name: self.parse_object_name(false)?,
11339 }
11340 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure) {
11341 let (actions, restrict) = self.parse_alter_function_actions()?;
11342 AlterFunctionOperation::Actions { actions, restrict }
11343 } else {
11344 return self.expected_ref(
11345 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11346 self.peek_token_ref(),
11347 );
11348 };
11349
11350 Ok(Statement::AlterFunction(AlterFunction {
11351 kind,
11352 function,
11353 aggregate_order_by,
11354 aggregate_star,
11355 operation,
11356 }))
11357 }
11358
11359 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11361 let name = self.parse_object_name(false)?;
11362
11363 let operation = if self.parse_keyword(Keyword::ADD) {
11364 if let Some(constraint) = self.parse_optional_table_constraint()? {
11365 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11366 AlterDomainOperation::AddConstraint {
11367 constraint,
11368 not_valid,
11369 }
11370 } else {
11371 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11372 }
11373 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11374 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11375 let name = self.parse_identifier()?;
11376 let drop_behavior = self.parse_optional_drop_behavior();
11377 AlterDomainOperation::DropConstraint {
11378 if_exists,
11379 name,
11380 drop_behavior,
11381 }
11382 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11383 AlterDomainOperation::DropDefault
11384 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11385 let old_name = self.parse_identifier()?;
11386 self.expect_keyword_is(Keyword::TO)?;
11387 let new_name = self.parse_identifier()?;
11388 AlterDomainOperation::RenameConstraint { old_name, new_name }
11389 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11390 let new_name = self.parse_identifier()?;
11391 AlterDomainOperation::RenameTo { new_name }
11392 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11393 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11394 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11395 AlterDomainOperation::SetSchema {
11396 schema_name: self.parse_object_name(false)?,
11397 }
11398 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11399 AlterDomainOperation::SetDefault {
11400 default: self.parse_expr()?,
11401 }
11402 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11403 let name = self.parse_identifier()?;
11404 AlterDomainOperation::ValidateConstraint { name }
11405 } else {
11406 return self.expected_ref(
11407 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11408 self.peek_token_ref(),
11409 );
11410 };
11411
11412 Ok(AlterDomain { name, operation }.into())
11413 }
11414
11415 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11417 let name = self.parse_identifier()?;
11418 self.expect_keyword_is(Keyword::ON)?;
11419 let table_name = self.parse_object_name(false)?;
11420
11421 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11422 let new_name = self.parse_identifier()?;
11423 AlterTriggerOperation::RenameTo { new_name }
11424 } else {
11425 return self.expected_ref("RENAME TO after ALTER TRIGGER ... ON ...", self.peek_token_ref());
11426 };
11427
11428 Ok(AlterTrigger {
11429 name,
11430 table_name,
11431 operation,
11432 }
11433 .into())
11434 }
11435
11436 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11438 let name = self.parse_identifier()?;
11439
11440 let operation = if self.parse_keyword(Keyword::UPDATE) {
11441 let version = if self.parse_keyword(Keyword::TO) {
11442 Some(self.parse_identifier()?)
11443 } else {
11444 None
11445 };
11446 AlterExtensionOperation::UpdateTo { version }
11447 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11448 AlterExtensionOperation::SetSchema {
11449 schema_name: self.parse_object_name(false)?,
11450 }
11451 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11452 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11453 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11454 let new_name = self.parse_identifier()?;
11455 AlterExtensionOperation::RenameTo { new_name }
11456 } else {
11457 return self.expected_ref(
11458 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11459 self.peek_token_ref(),
11460 );
11461 };
11462
11463 Ok(AlterExtension { name, operation }.into())
11464 }
11465
11466 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11468 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11469 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11471 let on_cluster = self.parse_optional_on_cluster()?;
11472 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11473
11474 let mut location = None;
11475 if self.parse_keyword(Keyword::LOCATION) {
11476 location = Some(HiveSetLocation {
11477 has_set: false,
11478 location: self.parse_identifier()?,
11479 });
11480 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11481 location = Some(HiveSetLocation {
11482 has_set: true,
11483 location: self.parse_identifier()?,
11484 });
11485 }
11486
11487 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11488 self.peek_token_ref().clone()
11489 } else {
11490 self.get_current_token().clone()
11491 };
11492
11493 Ok(AlterTable {
11494 name: table_name,
11495 if_exists,
11496 only,
11497 operations,
11498 location,
11499 on_cluster,
11500 table_type: if iceberg {
11501 Some(AlterTableType::Iceberg)
11502 } else {
11503 None
11504 },
11505 end_token: AttachedToken(end_token),
11506 }
11507 .into())
11508 }
11509
11510 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11512 let name = self.parse_object_name(false)?;
11513 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11514
11515 let with_options = self.parse_options(Keyword::WITH)?;
11516
11517 self.expect_keyword_is(Keyword::AS)?;
11518 let query = self.parse_query()?;
11519
11520 Ok(Statement::AlterView {
11521 name,
11522 columns,
11523 query,
11524 with_options,
11525 })
11526 }
11527
11528 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11530 let name = self.parse_object_name(false)?;
11531
11532 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11533 let new_name = self.parse_identifier()?;
11534 Ok(Statement::AlterType(AlterType {
11535 name,
11536 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
11537 }))
11538 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11539 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11540 let new_enum_value = self.parse_identifier()?;
11541 let position = if self.parse_keyword(Keyword::BEFORE) {
11542 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11543 } else if self.parse_keyword(Keyword::AFTER) {
11544 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11545 } else {
11546 None
11547 };
11548
11549 Ok(Statement::AlterType(AlterType {
11550 name,
11551 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
11552 if_not_exists,
11553 value: new_enum_value,
11554 position,
11555 }),
11556 }))
11557 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11558 let existing_enum_value = self.parse_identifier()?;
11559 self.expect_keyword(Keyword::TO)?;
11560 let new_enum_value = self.parse_identifier()?;
11561
11562 Ok(Statement::AlterType(AlterType {
11563 name,
11564 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11565 from: existing_enum_value,
11566 to: new_enum_value,
11567 }),
11568 }))
11569 } else {
11570 self.expected_ref(
11571 "{RENAME TO | { RENAME | ADD } VALUE}",
11572 self.peek_token_ref(),
11573 )
11574 }
11575 }
11576
11577 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11581 let name = self.parse_object_name(false)?;
11582 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11583 AlterCollationOperation::RenameTo {
11584 new_name: self.parse_identifier()?,
11585 }
11586 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11587 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11588 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11589 AlterCollationOperation::SetSchema {
11590 schema_name: self.parse_object_name(false)?,
11591 }
11592 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11593 AlterCollationOperation::RefreshVersion
11594 } else {
11595 return self.expected_ref(
11596 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11597 self.peek_token_ref(),
11598 );
11599 };
11600
11601 Ok(AlterCollation { name, operation })
11602 }
11603
11604 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11608 let name = self.parse_operator_name()?;
11609
11610 self.expect_token(&Token::LParen)?;
11612
11613 let left_type = if self.parse_keyword(Keyword::NONE) {
11614 None
11615 } else {
11616 Some(self.parse_data_type()?)
11617 };
11618
11619 self.expect_token(&Token::Comma)?;
11620 let right_type = self.parse_data_type()?;
11621 self.expect_token(&Token::RParen)?;
11622
11623 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11625 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11626 Owner::CurrentRole
11627 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11628 Owner::CurrentUser
11629 } else if self.parse_keyword(Keyword::SESSION_USER) {
11630 Owner::SessionUser
11631 } else {
11632 Owner::Ident(self.parse_identifier()?)
11633 };
11634 AlterOperatorOperation::OwnerTo(owner)
11635 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11636 let schema_name = self.parse_object_name(false)?;
11637 AlterOperatorOperation::SetSchema { schema_name }
11638 } else if self.parse_keyword(Keyword::SET) {
11639 self.expect_token(&Token::LParen)?;
11640
11641 let mut options = Vec::new();
11642 loop {
11643 let keyword = self.expect_one_of_keywords(&[
11644 Keyword::RESTRICT,
11645 Keyword::JOIN,
11646 Keyword::COMMUTATOR,
11647 Keyword::NEGATOR,
11648 Keyword::HASHES,
11649 Keyword::MERGES,
11650 ])?;
11651
11652 match keyword {
11653 Keyword::RESTRICT => {
11654 self.expect_token(&Token::Eq)?;
11655 let proc_name = if self.parse_keyword(Keyword::NONE) {
11656 None
11657 } else {
11658 Some(self.parse_object_name(false)?)
11659 };
11660 options.push(OperatorOption::Restrict(proc_name));
11661 }
11662 Keyword::JOIN => {
11663 self.expect_token(&Token::Eq)?;
11664 let proc_name = if self.parse_keyword(Keyword::NONE) {
11665 None
11666 } else {
11667 Some(self.parse_object_name(false)?)
11668 };
11669 options.push(OperatorOption::Join(proc_name));
11670 }
11671 Keyword::COMMUTATOR => {
11672 self.expect_token(&Token::Eq)?;
11673 let op_name = self.parse_operator_name()?;
11674 options.push(OperatorOption::Commutator(op_name));
11675 }
11676 Keyword::NEGATOR => {
11677 self.expect_token(&Token::Eq)?;
11678 let op_name = self.parse_operator_name()?;
11679 options.push(OperatorOption::Negator(op_name));
11680 }
11681 Keyword::HASHES => {
11682 options.push(OperatorOption::Hashes);
11683 }
11684 Keyword::MERGES => {
11685 options.push(OperatorOption::Merges);
11686 }
11687 unexpected_keyword => return Err(ParserError::ParserError(
11688 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11689 )),
11690 }
11691
11692 if !self.consume_token(&Token::Comma) {
11693 break;
11694 }
11695 }
11696
11697 self.expect_token(&Token::RParen)?;
11698 AlterOperatorOperation::Set { options }
11699 } else {
11700 return self.expected_ref(
11701 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11702 self.peek_token_ref(),
11703 );
11704 };
11705
11706 Ok(AlterOperator {
11707 name,
11708 left_type,
11709 right_type,
11710 operation,
11711 })
11712 }
11713
11714 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11716 let strategy_number = self.parse_literal_uint()?;
11717 let operator_name = self.parse_operator_name()?;
11718
11719 self.expect_token(&Token::LParen)?;
11721 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11722 self.expect_token(&Token::RParen)?;
11723
11724 let purpose = if self.parse_keyword(Keyword::FOR) {
11726 if self.parse_keyword(Keyword::SEARCH) {
11727 Some(OperatorPurpose::ForSearch)
11728 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11729 let sort_family = self.parse_object_name(false)?;
11730 Some(OperatorPurpose::ForOrderBy { sort_family })
11731 } else {
11732 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11733 }
11734 } else {
11735 None
11736 };
11737
11738 Ok(OperatorFamilyItem::Operator {
11739 strategy_number,
11740 operator_name,
11741 op_types,
11742 purpose,
11743 })
11744 }
11745
11746 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11748 let support_number = self.parse_literal_uint()?;
11749
11750 let op_types =
11752 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11753 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11754 self.expect_token(&Token::RParen)?;
11755 Some(types)
11756 } else if self.consume_token(&Token::LParen) {
11757 self.expect_token(&Token::RParen)?;
11758 Some(vec![])
11759 } else {
11760 None
11761 };
11762
11763 let function_name = self.parse_object_name(false)?;
11764
11765 let argument_types = if self.consume_token(&Token::LParen) {
11767 if self.peek_token_ref().token == Token::RParen {
11768 self.expect_token(&Token::RParen)?;
11769 vec![]
11770 } else {
11771 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11772 self.expect_token(&Token::RParen)?;
11773 types
11774 }
11775 } else {
11776 vec![]
11777 };
11778
11779 Ok(OperatorFamilyItem::Function {
11780 support_number,
11781 op_types,
11782 function_name,
11783 argument_types,
11784 })
11785 }
11786
11787 fn parse_operator_family_drop_operator(
11789 &mut self,
11790 ) -> Result<OperatorFamilyDropItem, ParserError> {
11791 let strategy_number = self.parse_literal_uint()?;
11792
11793 self.expect_token(&Token::LParen)?;
11795 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11796 self.expect_token(&Token::RParen)?;
11797
11798 Ok(OperatorFamilyDropItem::Operator {
11799 strategy_number,
11800 op_types,
11801 })
11802 }
11803
11804 fn parse_operator_family_drop_function(
11806 &mut self,
11807 ) -> Result<OperatorFamilyDropItem, ParserError> {
11808 let support_number = self.parse_literal_uint()?;
11809
11810 self.expect_token(&Token::LParen)?;
11812 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11813 self.expect_token(&Token::RParen)?;
11814
11815 Ok(OperatorFamilyDropItem::Function {
11816 support_number,
11817 op_types,
11818 })
11819 }
11820
11821 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11823 if self.parse_keyword(Keyword::OPERATOR) {
11824 self.parse_operator_family_add_operator()
11825 } else if self.parse_keyword(Keyword::FUNCTION) {
11826 self.parse_operator_family_add_function()
11827 } else {
11828 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11829 }
11830 }
11831
11832 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
11834 if self.parse_keyword(Keyword::OPERATOR) {
11835 self.parse_operator_family_drop_operator()
11836 } else if self.parse_keyword(Keyword::FUNCTION) {
11837 self.parse_operator_family_drop_function()
11838 } else {
11839 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11840 }
11841 }
11842
11843 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
11846 let name = self.parse_object_name(false)?;
11847 self.expect_keyword(Keyword::USING)?;
11848 let using = self.parse_identifier()?;
11849
11850 let operation = if self.parse_keyword(Keyword::ADD) {
11851 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
11852 AlterOperatorFamilyOperation::Add { items }
11853 } else if self.parse_keyword(Keyword::DROP) {
11854 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
11855 AlterOperatorFamilyOperation::Drop { items }
11856 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11857 let new_name = self.parse_object_name(false)?;
11858 AlterOperatorFamilyOperation::RenameTo { new_name }
11859 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11860 let owner = self.parse_owner()?;
11861 AlterOperatorFamilyOperation::OwnerTo(owner)
11862 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11863 let schema_name = self.parse_object_name(false)?;
11864 AlterOperatorFamilyOperation::SetSchema { schema_name }
11865 } else {
11866 return self.expected_ref(
11867 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
11868 self.peek_token_ref(),
11869 );
11870 };
11871
11872 Ok(AlterOperatorFamily {
11873 name,
11874 using,
11875 operation,
11876 })
11877 }
11878
11879 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
11883 let name = self.parse_object_name(false)?;
11884 self.expect_keyword(Keyword::USING)?;
11885 let using = self.parse_identifier()?;
11886
11887 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11888 let new_name = self.parse_object_name(false)?;
11889 AlterOperatorClassOperation::RenameTo { new_name }
11890 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11891 let owner = self.parse_owner()?;
11892 AlterOperatorClassOperation::OwnerTo(owner)
11893 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11894 let schema_name = self.parse_object_name(false)?;
11895 AlterOperatorClassOperation::SetSchema { schema_name }
11896 } else {
11897 return self.expected_ref(
11898 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
11899 self.peek_token_ref(),
11900 );
11901 };
11902
11903 Ok(AlterOperatorClass {
11904 name,
11905 using,
11906 operation,
11907 })
11908 }
11909
11910 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
11914 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
11915 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11916 let name = self.parse_object_name(false)?;
11917 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
11918 self.prev_token();
11919 let options = self.parse_options(Keyword::OPTIONS)?;
11920 AlterSchemaOperation::SetOptionsParens { options }
11921 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
11922 let collate = self.parse_expr()?;
11923 AlterSchemaOperation::SetDefaultCollate { collate }
11924 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
11925 let replica = self.parse_identifier()?;
11926 let options = if self.peek_keyword(Keyword::OPTIONS) {
11927 Some(self.parse_options(Keyword::OPTIONS)?)
11928 } else {
11929 None
11930 };
11931 AlterSchemaOperation::AddReplica { replica, options }
11932 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
11933 let replica = self.parse_identifier()?;
11934 AlterSchemaOperation::DropReplica { replica }
11935 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11936 let new_name = self.parse_object_name(false)?;
11937 AlterSchemaOperation::Rename { name: new_name }
11938 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11939 let owner = self.parse_owner()?;
11940 AlterSchemaOperation::OwnerTo { owner }
11941 } else {
11942 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
11943 };
11944 Ok(Statement::AlterSchema(AlterSchema {
11945 name,
11946 if_exists,
11947 operations: vec![operation],
11948 }))
11949 }
11950
11951 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
11954 let object_name = self.parse_object_name(false)?;
11955 if self.peek_token_ref().token == Token::LParen {
11956 match self.parse_function(object_name)? {
11957 Expr::Function(f) => Ok(Statement::Call(f)),
11958 other => parser_err!(
11959 format!("Expected a simple procedure call but found: {other}"),
11960 self.peek_token_ref().span.start
11961 ),
11962 }
11963 } else {
11964 Ok(Statement::Call(Function {
11965 name: object_name,
11966 uses_odbc_syntax: false,
11967 parameters: FunctionArguments::None,
11968 args: FunctionArguments::None,
11969 over: None,
11970 filter: None,
11971 null_treatment: None,
11972 within_group: vec![],
11973 }))
11974 }
11975 }
11976
11977 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
11979 let source;
11980 if self.consume_token(&Token::LParen) {
11981 source = CopySource::Query(self.parse_query()?);
11982 self.expect_token(&Token::RParen)?;
11983 } else {
11984 let table_name = self.parse_object_name(false)?;
11985 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11986 source = CopySource::Table {
11987 table_name,
11988 columns,
11989 };
11990 }
11991 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
11992 Some(Keyword::FROM) => false,
11993 Some(Keyword::TO) => true,
11994 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
11995 };
11996 if !to {
11997 if let CopySource::Query(_) = source {
12000 return Err(ParserError::ParserError(
12001 "COPY ... FROM does not support query as a source".to_string(),
12002 ));
12003 }
12004 }
12005 let target = if self.parse_keyword(Keyword::STDIN) {
12006 CopyTarget::Stdin
12007 } else if self.parse_keyword(Keyword::STDOUT) {
12008 CopyTarget::Stdout
12009 } else if self.parse_keyword(Keyword::PROGRAM) {
12010 CopyTarget::Program {
12011 command: self.parse_literal_string()?,
12012 }
12013 } else {
12014 CopyTarget::File {
12015 filename: self.parse_literal_string()?,
12016 }
12017 };
12018 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
12020 if self.consume_token(&Token::LParen) {
12021 options = self.parse_comma_separated(Parser::parse_copy_option)?;
12022 self.expect_token(&Token::RParen)?;
12023 }
12024 let mut legacy_options = vec![];
12025 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
12026 legacy_options.push(opt);
12027 }
12028 let values =
12029 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
12030 self.expect_token(&Token::SemiColon)?;
12031 self.parse_tsv()
12032 } else {
12033 vec![]
12034 };
12035 Ok(Statement::Copy {
12036 source,
12037 to,
12038 target,
12039 options,
12040 legacy_options,
12041 values,
12042 })
12043 }
12044
12045 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12047 self.expect_keyword(Keyword::OPEN)?;
12048 Ok(Statement::Open(OpenStatement {
12049 cursor_name: self.parse_identifier()?,
12050 }))
12051 }
12052
12053 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12055 let cursor = if self.parse_keyword(Keyword::ALL) {
12056 CloseCursor::All
12057 } else {
12058 let name = self.parse_identifier()?;
12059
12060 CloseCursor::Specific { name }
12061 };
12062
12063 Ok(Statement::Close { cursor })
12064 }
12065
12066 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12067 let ret = match self.parse_one_of_keywords(&[
12068 Keyword::FORMAT,
12069 Keyword::FREEZE,
12070 Keyword::DELIMITER,
12071 Keyword::NULL,
12072 Keyword::HEADER,
12073 Keyword::QUOTE,
12074 Keyword::ESCAPE,
12075 Keyword::FORCE_QUOTE,
12076 Keyword::FORCE_NOT_NULL,
12077 Keyword::FORCE_NULL,
12078 Keyword::ENCODING,
12079 ]) {
12080 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12081 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12082 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12083 Some(Keyword::FALSE)
12084 )),
12085 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12086 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12087 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12088 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12089 Some(Keyword::FALSE)
12090 )),
12091 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12092 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12093 Some(Keyword::FORCE_QUOTE) => {
12094 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12095 }
12096 Some(Keyword::FORCE_NOT_NULL) => {
12097 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12098 }
12099 Some(Keyword::FORCE_NULL) => {
12100 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12101 }
12102 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12103 _ => self.expected_ref("option", self.peek_token_ref())?,
12104 };
12105 Ok(ret)
12106 }
12107
12108 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12109 if self.parse_keyword(Keyword::FORMAT) {
12111 let _ = self.parse_keyword(Keyword::AS);
12112 }
12113
12114 let ret = match self.parse_one_of_keywords(&[
12115 Keyword::ACCEPTANYDATE,
12116 Keyword::ACCEPTINVCHARS,
12117 Keyword::ADDQUOTES,
12118 Keyword::ALLOWOVERWRITE,
12119 Keyword::BINARY,
12120 Keyword::BLANKSASNULL,
12121 Keyword::BZIP2,
12122 Keyword::CLEANPATH,
12123 Keyword::COMPUPDATE,
12124 Keyword::CREDENTIALS,
12125 Keyword::CSV,
12126 Keyword::DATEFORMAT,
12127 Keyword::DELIMITER,
12128 Keyword::EMPTYASNULL,
12129 Keyword::ENCRYPTED,
12130 Keyword::ESCAPE,
12131 Keyword::EXTENSION,
12132 Keyword::FIXEDWIDTH,
12133 Keyword::GZIP,
12134 Keyword::HEADER,
12135 Keyword::IAM_ROLE,
12136 Keyword::IGNOREHEADER,
12137 Keyword::JSON,
12138 Keyword::MANIFEST,
12139 Keyword::MAXFILESIZE,
12140 Keyword::NULL,
12141 Keyword::PARALLEL,
12142 Keyword::PARQUET,
12143 Keyword::PARTITION,
12144 Keyword::REGION,
12145 Keyword::REMOVEQUOTES,
12146 Keyword::ROWGROUPSIZE,
12147 Keyword::STATUPDATE,
12148 Keyword::TIMEFORMAT,
12149 Keyword::TRUNCATECOLUMNS,
12150 Keyword::ZSTD,
12151 ]) {
12152 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12153 Some(Keyword::ACCEPTINVCHARS) => {
12154 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12156 Some(self.parse_literal_string()?)
12157 } else {
12158 None
12159 };
12160 CopyLegacyOption::AcceptInvChars(ch)
12161 }
12162 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12163 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12164 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12165 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12166 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12167 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12168 Some(Keyword::COMPUPDATE) => {
12169 let preset = self.parse_keyword(Keyword::PRESET);
12170 let enabled = match self.parse_one_of_keywords(&[
12171 Keyword::TRUE,
12172 Keyword::FALSE,
12173 Keyword::ON,
12174 Keyword::OFF,
12175 ]) {
12176 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12177 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12178 _ => None,
12179 };
12180 CopyLegacyOption::CompUpdate { preset, enabled }
12181 }
12182 Some(Keyword::CREDENTIALS) => {
12183 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12184 }
12185 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12186 let mut opts = vec![];
12187 while let Some(opt) =
12188 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12189 {
12190 opts.push(opt);
12191 }
12192 opts
12193 }),
12194 Some(Keyword::DATEFORMAT) => {
12195 let _ = self.parse_keyword(Keyword::AS);
12196 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12197 Some(self.parse_literal_string()?)
12198 } else {
12199 None
12200 };
12201 CopyLegacyOption::DateFormat(fmt)
12202 }
12203 Some(Keyword::DELIMITER) => {
12204 let _ = self.parse_keyword(Keyword::AS);
12205 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12206 }
12207 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12208 Some(Keyword::ENCRYPTED) => {
12209 let auto = self.parse_keyword(Keyword::AUTO);
12210 CopyLegacyOption::Encrypted { auto }
12211 }
12212 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12213 Some(Keyword::EXTENSION) => {
12214 let ext = self.parse_literal_string()?;
12215 CopyLegacyOption::Extension(ext)
12216 }
12217 Some(Keyword::FIXEDWIDTH) => {
12218 let spec = self.parse_literal_string()?;
12219 CopyLegacyOption::FixedWidth(spec)
12220 }
12221 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12222 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12223 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12224 Some(Keyword::IGNOREHEADER) => {
12225 let _ = self.parse_keyword(Keyword::AS);
12226 let num_rows = self.parse_literal_uint()?;
12227 CopyLegacyOption::IgnoreHeader(num_rows)
12228 }
12229 Some(Keyword::JSON) => {
12230 let _ = self.parse_keyword(Keyword::AS);
12231 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12232 Some(self.parse_literal_string()?)
12233 } else {
12234 None
12235 };
12236 CopyLegacyOption::Json(fmt)
12237 }
12238 Some(Keyword::MANIFEST) => {
12239 let verbose = self.parse_keyword(Keyword::VERBOSE);
12240 CopyLegacyOption::Manifest { verbose }
12241 }
12242 Some(Keyword::MAXFILESIZE) => {
12243 let _ = self.parse_keyword(Keyword::AS);
12244 let size = self.parse_number_value()?;
12245 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12246 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12247 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12248 _ => None,
12249 };
12250 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12251 }
12252 Some(Keyword::NULL) => {
12253 let _ = self.parse_keyword(Keyword::AS);
12254 CopyLegacyOption::Null(self.parse_literal_string()?)
12255 }
12256 Some(Keyword::PARALLEL) => {
12257 let enabled = match self.parse_one_of_keywords(&[
12258 Keyword::TRUE,
12259 Keyword::FALSE,
12260 Keyword::ON,
12261 Keyword::OFF,
12262 ]) {
12263 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12264 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12265 _ => None,
12266 };
12267 CopyLegacyOption::Parallel(enabled)
12268 }
12269 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12270 Some(Keyword::PARTITION) => {
12271 self.expect_keyword(Keyword::BY)?;
12272 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12273 let include = self.parse_keyword(Keyword::INCLUDE);
12274 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12275 }
12276 Some(Keyword::REGION) => {
12277 let _ = self.parse_keyword(Keyword::AS);
12278 let region = self.parse_literal_string()?;
12279 CopyLegacyOption::Region(region)
12280 }
12281 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12282 Some(Keyword::ROWGROUPSIZE) => {
12283 let _ = self.parse_keyword(Keyword::AS);
12284 let file_size = self.parse_file_size()?;
12285 CopyLegacyOption::RowGroupSize(file_size)
12286 }
12287 Some(Keyword::STATUPDATE) => {
12288 let enabled = match self.parse_one_of_keywords(&[
12289 Keyword::TRUE,
12290 Keyword::FALSE,
12291 Keyword::ON,
12292 Keyword::OFF,
12293 ]) {
12294 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12295 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12296 _ => None,
12297 };
12298 CopyLegacyOption::StatUpdate(enabled)
12299 }
12300 Some(Keyword::TIMEFORMAT) => {
12301 let _ = self.parse_keyword(Keyword::AS);
12302 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12303 Some(self.parse_literal_string()?)
12304 } else {
12305 None
12306 };
12307 CopyLegacyOption::TimeFormat(fmt)
12308 }
12309 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12310 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12311 _ => self.expected_ref("option", self.peek_token_ref())?,
12312 };
12313 Ok(ret)
12314 }
12315
12316 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12317 let size = self.parse_number_value()?;
12318 let unit = self.maybe_parse_file_size_unit();
12319 Ok(FileSize { size, unit })
12320 }
12321
12322 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12323 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12324 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12325 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12326 _ => None,
12327 }
12328 }
12329
12330 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12331 if self.parse_keyword(Keyword::DEFAULT) {
12332 Ok(IamRoleKind::Default)
12333 } else {
12334 let arn = self.parse_literal_string()?;
12335 Ok(IamRoleKind::Arn(arn))
12336 }
12337 }
12338
12339 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12340 let ret = match self.parse_one_of_keywords(&[
12341 Keyword::HEADER,
12342 Keyword::QUOTE,
12343 Keyword::ESCAPE,
12344 Keyword::FORCE,
12345 ]) {
12346 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12347 Some(Keyword::QUOTE) => {
12348 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12350 }
12351 Some(Keyword::ESCAPE) => {
12352 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12354 }
12355 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12356 CopyLegacyCsvOption::ForceNotNull(
12357 self.parse_comma_separated(|p| p.parse_identifier())?,
12358 )
12359 }
12360 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12361 CopyLegacyCsvOption::ForceQuote(
12362 self.parse_comma_separated(|p| p.parse_identifier())?,
12363 )
12364 }
12365 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12366 };
12367 Ok(ret)
12368 }
12369
12370 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12371 let s = self.parse_literal_string()?;
12372 if s.len() != 1 {
12373 let loc = self
12374 .tokens
12375 .get(self.index - 1)
12376 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12377 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12378 }
12379 Ok(s.chars().next().unwrap())
12380 }
12381
12382 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12385 self.parse_tab_value()
12386 }
12387
12388 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12390 let mut values = vec![];
12391 let mut content = String::new();
12392 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12393 match t {
12394 Token::Whitespace(Whitespace::Tab) => {
12395 values.push(Some(core::mem::take(&mut content)));
12396 }
12397 Token::Whitespace(Whitespace::Newline) => {
12398 values.push(Some(core::mem::take(&mut content)));
12399 }
12400 Token::Backslash => {
12401 if self.consume_token(&Token::Period) {
12402 return values;
12403 }
12404 if let Token::Word(w) = self.next_token().token {
12405 if w.value == "N" {
12406 values.push(None);
12407 }
12408 }
12409 }
12410 _ => {
12411 content.push_str(&t.to_string());
12412 }
12413 }
12414 }
12415 values
12416 }
12417
12418 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12420 let next_token = self.next_token();
12421 let span = next_token.span;
12422 let ok_value = |value: Value| Ok(value.with_span(span));
12423 match next_token.token {
12424 Token::Word(w) => match w.keyword {
12425 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12426 ok_value(Value::Boolean(true))
12427 }
12428 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12429 ok_value(Value::Boolean(false))
12430 }
12431 Keyword::NULL => ok_value(Value::Null),
12432 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12433 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12434 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12435 _ => self.expected(
12436 "A value?",
12437 TokenWithSpan {
12438 token: Token::Word(w),
12439 span,
12440 },
12441 )?,
12442 },
12443 _ => self.expected(
12444 "a concrete value",
12445 TokenWithSpan {
12446 token: Token::Word(w),
12447 span,
12448 },
12449 ),
12450 },
12451 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12455 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12456 self.maybe_concat_string_literal(s.to_string()),
12457 )),
12458 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12459 self.maybe_concat_string_literal(s.to_string()),
12460 )),
12461 Token::TripleSingleQuotedString(ref s) => {
12462 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12463 }
12464 Token::TripleDoubleQuotedString(ref s) => {
12465 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12466 }
12467 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12468 Token::SingleQuotedByteStringLiteral(ref s) => {
12469 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12470 }
12471 Token::DoubleQuotedByteStringLiteral(ref s) => {
12472 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12473 }
12474 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12475 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12476 }
12477 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12478 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12479 }
12480 Token::SingleQuotedRawStringLiteral(ref s) => {
12481 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12482 }
12483 Token::DoubleQuotedRawStringLiteral(ref s) => {
12484 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12485 }
12486 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12487 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12488 }
12489 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12490 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12491 }
12492 Token::NationalStringLiteral(ref s) => {
12493 ok_value(Value::NationalStringLiteral(s.to_string()))
12494 }
12495 Token::QuoteDelimitedStringLiteral(v) => {
12496 ok_value(Value::QuoteDelimitedStringLiteral(v))
12497 }
12498 Token::NationalQuoteDelimitedStringLiteral(v) => {
12499 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12500 }
12501 Token::EscapedStringLiteral(ref s) => {
12502 ok_value(Value::EscapedStringLiteral(s.to_string()))
12503 }
12504 Token::UnicodeStringLiteral(ref s) => {
12505 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12506 }
12507 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12508 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12509 tok @ Token::Colon | tok @ Token::AtSign => {
12510 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12518 let ident = match next_token.token {
12519 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12520 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12521 _ => self.expected("placeholder", next_token),
12522 }?;
12523 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12524 .with_span(Span::new(span.start, ident.span.end)))
12525 }
12526 unexpected => self.expected(
12527 "a value",
12528 TokenWithSpan {
12529 token: unexpected,
12530 span,
12531 },
12532 ),
12533 }
12534 }
12535
12536 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12537 if self.dialect.supports_string_literal_concatenation() {
12538 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12539 self.peek_token_ref().token
12540 {
12541 str.push_str(s);
12542 self.advance_token();
12543 }
12544 } else if self
12545 .dialect
12546 .supports_string_literal_concatenation_with_newline()
12547 {
12548 let mut after_newline = false;
12551 loop {
12552 match self.peek_token_no_skip().token {
12553 Token::Whitespace(Whitespace::Newline) => {
12554 after_newline = true;
12555 self.next_token_no_skip();
12556 }
12557 Token::Whitespace(_) => {
12558 self.next_token_no_skip();
12559 }
12560 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12561 if after_newline =>
12562 {
12563 str.push_str(s.clone().as_str());
12564 self.next_token_no_skip();
12565 after_newline = false;
12566 }
12567 _ => break,
12568 }
12569 }
12570 }
12571
12572 str
12573 }
12574
12575 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12577 let value_wrapper = self.parse_value()?;
12578 match &value_wrapper.value {
12579 Value::Number(_, _) => Ok(value_wrapper),
12580 Value::Placeholder(_) => Ok(value_wrapper),
12581 _ => {
12582 self.prev_token();
12583 self.expected_ref("literal number", self.peek_token_ref())
12584 }
12585 }
12586 }
12587
12588 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12591 let next_token = self.next_token();
12592 match next_token.token {
12593 Token::Plus => Ok(Expr::UnaryOp {
12594 op: UnaryOperator::Plus,
12595 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12596 }),
12597 Token::Minus => Ok(Expr::UnaryOp {
12598 op: UnaryOperator::Minus,
12599 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12600 }),
12601 _ => {
12602 self.prev_token();
12603 Ok(Expr::Value(self.parse_number_value()?))
12604 }
12605 }
12606 }
12607
12608 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12609 let next_token = self.next_token();
12610 let span = next_token.span;
12611 match next_token.token {
12612 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12613 Value::SingleQuotedString(s.to_string()).with_span(span),
12614 )),
12615 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12616 Value::DoubleQuotedString(s.to_string()).with_span(span),
12617 )),
12618 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12619 Value::HexStringLiteral(s.to_string()).with_span(span),
12620 )),
12621 unexpected => self.expected(
12622 "a string value",
12623 TokenWithSpan {
12624 token: unexpected,
12625 span,
12626 },
12627 ),
12628 }
12629 }
12630
12631 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12633 let next_token = self.next_token();
12634 match next_token.token {
12635 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12636 _ => self.expected("literal int", next_token),
12637 }
12638 }
12639
12640 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12643 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12644 let peek_token = parser.peek_token();
12645 let span = peek_token.span;
12646 match peek_token.token {
12647 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12648 {
12649 parser.next_token();
12650 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12651 }
12652 _ => Ok(Expr::Value(
12653 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12654 )),
12655 }
12656 };
12657
12658 Ok(CreateFunctionBody::AsBeforeOptions {
12659 body: parse_string_expr(self)?,
12660 link_symbol: if self.consume_token(&Token::Comma) {
12661 Some(parse_string_expr(self)?)
12662 } else {
12663 None
12664 },
12665 })
12666 }
12667
12668 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12670 let next_token = self.next_token();
12671 match next_token.token {
12672 Token::Word(Word {
12673 value,
12674 keyword: Keyword::NoKeyword,
12675 ..
12676 }) => Ok(value),
12677 Token::SingleQuotedString(s) => Ok(s),
12678 Token::DoubleQuotedString(s) => Ok(s),
12679 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12680 Ok(s)
12681 }
12682 Token::UnicodeStringLiteral(s) => Ok(s),
12683 _ => self.expected("literal string", next_token),
12684 }
12685 }
12686
12687 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12689 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12690 Some(Keyword::TRUE) => Ok(true),
12691 Some(Keyword::FALSE) => Ok(false),
12692 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12693 }
12694 }
12695
12696 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12698 let neg = self.parse_keyword(Keyword::NOT);
12699 let normalized_form = self.maybe_parse(|parser| {
12700 match parser.parse_one_of_keywords(&[
12701 Keyword::NFC,
12702 Keyword::NFD,
12703 Keyword::NFKC,
12704 Keyword::NFKD,
12705 ]) {
12706 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12707 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12708 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12709 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12710 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12711 }
12712 })?;
12713 if self.parse_keyword(Keyword::NORMALIZED) {
12714 return Ok(Expr::IsNormalized {
12715 expr: Box::new(expr),
12716 form: normalized_form,
12717 negated: neg,
12718 });
12719 }
12720 self.expected_ref("unicode normalization form", self.peek_token_ref())
12721 }
12722
12723 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12725 self.expect_token(&Token::LParen)?;
12726 let values = self.parse_comma_separated(|parser| {
12727 let name = parser.parse_literal_string()?;
12728 let e = if parser.consume_token(&Token::Eq) {
12729 let value = parser.parse_number()?;
12730 EnumMember::NamedValue(name, value)
12731 } else {
12732 EnumMember::Name(name)
12733 };
12734 Ok(e)
12735 })?;
12736 self.expect_token(&Token::RParen)?;
12737
12738 Ok(values)
12739 }
12740
12741 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12743 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12744 if trailing_bracket.0 {
12745 return parser_err!(
12746 format!("unmatched > after parsing data type {ty}"),
12747 self.peek_token_ref()
12748 );
12749 }
12750
12751 Ok(ty)
12752 }
12753
12754 fn parse_data_type_helper(
12755 &mut self,
12756 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12757 let dialect = self.dialect;
12758 self.advance_token();
12759 let next_token = self.get_current_token();
12760 let next_token_index = self.get_current_index();
12761
12762 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12763 let mut data = match &next_token.token {
12764 Token::Word(w) => match w.keyword {
12765 Keyword::BOOLEAN => Ok(DataType::Boolean),
12766 Keyword::BOOL => Ok(DataType::Bool),
12767 Keyword::FLOAT => {
12768 let precision = self.parse_exact_number_optional_precision_scale()?;
12769
12770 if self.parse_keyword(Keyword::UNSIGNED) {
12771 Ok(DataType::FloatUnsigned(precision))
12772 } else {
12773 Ok(DataType::Float(precision))
12774 }
12775 }
12776 Keyword::REAL => {
12777 if self.parse_keyword(Keyword::UNSIGNED) {
12778 Ok(DataType::RealUnsigned)
12779 } else {
12780 Ok(DataType::Real)
12781 }
12782 }
12783 Keyword::FLOAT4 => Ok(DataType::Float4),
12784 Keyword::FLOAT32 => Ok(DataType::Float32),
12785 Keyword::FLOAT64 => Ok(DataType::Float64),
12786 Keyword::FLOAT8 => Ok(DataType::Float8),
12787 Keyword::DOUBLE => {
12788 if self.parse_keyword(Keyword::PRECISION) {
12789 if self.parse_keyword(Keyword::UNSIGNED) {
12790 Ok(DataType::DoublePrecisionUnsigned)
12791 } else {
12792 Ok(DataType::DoublePrecision)
12793 }
12794 } else {
12795 let precision = self.parse_exact_number_optional_precision_scale()?;
12796
12797 if self.parse_keyword(Keyword::UNSIGNED) {
12798 Ok(DataType::DoubleUnsigned(precision))
12799 } else {
12800 Ok(DataType::Double(precision))
12801 }
12802 }
12803 }
12804 Keyword::TINYINT => {
12805 let optional_precision = self.parse_optional_precision();
12806 if self.parse_keyword(Keyword::UNSIGNED) {
12807 Ok(DataType::TinyIntUnsigned(optional_precision?))
12808 } else {
12809 if dialect.supports_data_type_signed_suffix() {
12810 let _ = self.parse_keyword(Keyword::SIGNED);
12811 }
12812 Ok(DataType::TinyInt(optional_precision?))
12813 }
12814 }
12815 Keyword::INT2 => {
12816 let optional_precision = self.parse_optional_precision();
12817 if self.parse_keyword(Keyword::UNSIGNED) {
12818 Ok(DataType::Int2Unsigned(optional_precision?))
12819 } else {
12820 Ok(DataType::Int2(optional_precision?))
12821 }
12822 }
12823 Keyword::SMALLINT => {
12824 let optional_precision = self.parse_optional_precision();
12825 if self.parse_keyword(Keyword::UNSIGNED) {
12826 Ok(DataType::SmallIntUnsigned(optional_precision?))
12827 } else {
12828 if dialect.supports_data_type_signed_suffix() {
12829 let _ = self.parse_keyword(Keyword::SIGNED);
12830 }
12831 Ok(DataType::SmallInt(optional_precision?))
12832 }
12833 }
12834 Keyword::MEDIUMINT => {
12835 let optional_precision = self.parse_optional_precision();
12836 if self.parse_keyword(Keyword::UNSIGNED) {
12837 Ok(DataType::MediumIntUnsigned(optional_precision?))
12838 } else {
12839 if dialect.supports_data_type_signed_suffix() {
12840 let _ = self.parse_keyword(Keyword::SIGNED);
12841 }
12842 Ok(DataType::MediumInt(optional_precision?))
12843 }
12844 }
12845 Keyword::INT => {
12846 let optional_precision = self.parse_optional_precision();
12847 if self.parse_keyword(Keyword::UNSIGNED) {
12848 Ok(DataType::IntUnsigned(optional_precision?))
12849 } else {
12850 if dialect.supports_data_type_signed_suffix() {
12851 let _ = self.parse_keyword(Keyword::SIGNED);
12852 }
12853 Ok(DataType::Int(optional_precision?))
12854 }
12855 }
12856 Keyword::INT4 => {
12857 let optional_precision = self.parse_optional_precision();
12858 if self.parse_keyword(Keyword::UNSIGNED) {
12859 Ok(DataType::Int4Unsigned(optional_precision?))
12860 } else {
12861 Ok(DataType::Int4(optional_precision?))
12862 }
12863 }
12864 Keyword::INT8 => {
12865 let optional_precision = self.parse_optional_precision();
12866 if self.parse_keyword(Keyword::UNSIGNED) {
12867 Ok(DataType::Int8Unsigned(optional_precision?))
12868 } else {
12869 Ok(DataType::Int8(optional_precision?))
12870 }
12871 }
12872 Keyword::INT16 => Ok(DataType::Int16),
12873 Keyword::INT32 => Ok(DataType::Int32),
12874 Keyword::INT64 => Ok(DataType::Int64),
12875 Keyword::INT128 => Ok(DataType::Int128),
12876 Keyword::INT256 => Ok(DataType::Int256),
12877 Keyword::INTEGER => {
12878 let optional_precision = self.parse_optional_precision();
12879 if self.parse_keyword(Keyword::UNSIGNED) {
12880 Ok(DataType::IntegerUnsigned(optional_precision?))
12881 } else {
12882 if dialect.supports_data_type_signed_suffix() {
12883 let _ = self.parse_keyword(Keyword::SIGNED);
12884 }
12885 Ok(DataType::Integer(optional_precision?))
12886 }
12887 }
12888 Keyword::BIGINT => {
12889 let optional_precision = self.parse_optional_precision();
12890 if self.parse_keyword(Keyword::UNSIGNED) {
12891 Ok(DataType::BigIntUnsigned(optional_precision?))
12892 } else {
12893 if dialect.supports_data_type_signed_suffix() {
12894 let _ = self.parse_keyword(Keyword::SIGNED);
12895 }
12896 Ok(DataType::BigInt(optional_precision?))
12897 }
12898 }
12899 Keyword::HUGEINT => Ok(DataType::HugeInt),
12900 Keyword::UBIGINT => Ok(DataType::UBigInt),
12901 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
12902 Keyword::USMALLINT => Ok(DataType::USmallInt),
12903 Keyword::UTINYINT => Ok(DataType::UTinyInt),
12904 Keyword::UINT8 => Ok(DataType::UInt8),
12905 Keyword::UINT16 => Ok(DataType::UInt16),
12906 Keyword::UINT32 => Ok(DataType::UInt32),
12907 Keyword::UINT64 => Ok(DataType::UInt64),
12908 Keyword::UINT128 => Ok(DataType::UInt128),
12909 Keyword::UINT256 => Ok(DataType::UInt256),
12910 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
12911 Keyword::NVARCHAR => {
12912 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
12913 }
12914 Keyword::CHARACTER => {
12915 if self.parse_keyword(Keyword::VARYING) {
12916 Ok(DataType::CharacterVarying(
12917 self.parse_optional_character_length()?,
12918 ))
12919 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12920 Ok(DataType::CharacterLargeObject(
12921 self.parse_optional_precision()?,
12922 ))
12923 } else {
12924 Ok(DataType::Character(self.parse_optional_character_length()?))
12925 }
12926 }
12927 Keyword::CHAR => {
12928 if self.parse_keyword(Keyword::VARYING) {
12929 Ok(DataType::CharVarying(
12930 self.parse_optional_character_length()?,
12931 ))
12932 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12933 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
12934 } else {
12935 Ok(DataType::Char(self.parse_optional_character_length()?))
12936 }
12937 }
12938 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
12939 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
12940 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
12941 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
12942 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
12943 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
12944 Keyword::LONGBLOB => Ok(DataType::LongBlob),
12945 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
12946 Keyword::BIT => {
12947 if self.parse_keyword(Keyword::VARYING) {
12948 Ok(DataType::BitVarying(self.parse_optional_precision()?))
12949 } else {
12950 Ok(DataType::Bit(self.parse_optional_precision()?))
12951 }
12952 }
12953 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
12954 Keyword::UUID => Ok(DataType::Uuid),
12955 Keyword::DATE => Ok(DataType::Date),
12956 Keyword::DATE32 => Ok(DataType::Date32),
12957 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
12958 Keyword::DATETIME64 => {
12959 self.prev_token();
12960 let (precision, time_zone) = self.parse_datetime_64()?;
12961 Ok(DataType::Datetime64(precision, time_zone))
12962 }
12963 Keyword::TIMESTAMP => {
12964 let precision = self.parse_optional_precision()?;
12965 let tz = if self.parse_keyword(Keyword::WITH) {
12966 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12967 TimezoneInfo::WithTimeZone
12968 } else if self.parse_keyword(Keyword::WITHOUT) {
12969 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12970 TimezoneInfo::WithoutTimeZone
12971 } else {
12972 TimezoneInfo::None
12973 };
12974 Ok(DataType::Timestamp(precision, tz))
12975 }
12976 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
12977 self.parse_optional_precision()?,
12978 TimezoneInfo::Tz,
12979 )),
12980 Keyword::TIMESTAMP_NTZ => {
12981 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
12982 }
12983 Keyword::TIME => {
12984 let precision = self.parse_optional_precision()?;
12985 let tz = if self.parse_keyword(Keyword::WITH) {
12986 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12987 TimezoneInfo::WithTimeZone
12988 } else if self.parse_keyword(Keyword::WITHOUT) {
12989 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12990 TimezoneInfo::WithoutTimeZone
12991 } else {
12992 TimezoneInfo::None
12993 };
12994 Ok(DataType::Time(precision, tz))
12995 }
12996 Keyword::TIMETZ => Ok(DataType::Time(
12997 self.parse_optional_precision()?,
12998 TimezoneInfo::Tz,
12999 )),
13000 Keyword::INTERVAL => {
13001 if self.dialect.supports_interval_options() {
13002 let fields = self.maybe_parse_optional_interval_fields()?;
13003 let precision = self.parse_optional_precision()?;
13004 Ok(DataType::Interval { fields, precision })
13005 } else {
13006 Ok(DataType::Interval {
13007 fields: None,
13008 precision: None,
13009 })
13010 }
13011 }
13012 Keyword::JSON => Ok(DataType::JSON),
13013 Keyword::JSONB => Ok(DataType::JSONB),
13014 Keyword::REGCLASS => Ok(DataType::Regclass),
13015 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
13016 Keyword::FIXEDSTRING => {
13017 self.expect_token(&Token::LParen)?;
13018 let character_length = self.parse_literal_uint()?;
13019 self.expect_token(&Token::RParen)?;
13020 Ok(DataType::FixedString(character_length))
13021 }
13022 Keyword::TEXT => Ok(DataType::Text),
13023 Keyword::TINYTEXT => Ok(DataType::TinyText),
13024 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
13025 Keyword::LONGTEXT => Ok(DataType::LongText),
13026 Keyword::BYTEA => Ok(DataType::Bytea),
13027 Keyword::NUMERIC => Ok(DataType::Numeric(
13028 self.parse_exact_number_optional_precision_scale()?,
13029 )),
13030 Keyword::DECIMAL => {
13031 let precision = self.parse_exact_number_optional_precision_scale()?;
13032
13033 if self.parse_keyword(Keyword::UNSIGNED) {
13034 Ok(DataType::DecimalUnsigned(precision))
13035 } else {
13036 Ok(DataType::Decimal(precision))
13037 }
13038 }
13039 Keyword::DEC => {
13040 let precision = self.parse_exact_number_optional_precision_scale()?;
13041
13042 if self.parse_keyword(Keyword::UNSIGNED) {
13043 Ok(DataType::DecUnsigned(precision))
13044 } else {
13045 Ok(DataType::Dec(precision))
13046 }
13047 }
13048 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13049 self.parse_exact_number_optional_precision_scale()?,
13050 )),
13051 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13052 self.parse_exact_number_optional_precision_scale()?,
13053 )),
13054 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13055 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13056 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13057 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13058 Keyword::ARRAY => {
13059 if self.dialect.supports_array_typedef_without_element_type() {
13060 Ok(DataType::Array(ArrayElemTypeDef::None))
13061 } else if dialect_of!(self is ClickHouseDialect) {
13062 Ok(self.parse_sub_type(|internal_type| {
13063 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13064 })?)
13065 } else {
13066 self.expect_token(&Token::Lt)?;
13067 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13068 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13069 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13070 inside_type,
13071 ))))
13072 }
13073 }
13074 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13075 self.prev_token();
13076 let field_defs = self.parse_duckdb_struct_type_def()?;
13077 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13078 }
13079 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13080 {
13081 self.prev_token();
13082 let (field_defs, _trailing_bracket) =
13083 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13084 trailing_bracket = _trailing_bracket;
13085 Ok(DataType::Struct(
13086 field_defs,
13087 StructBracketKind::AngleBrackets,
13088 ))
13089 }
13090 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13091 self.prev_token();
13092 let fields = self.parse_union_type_def()?;
13093 Ok(DataType::Union(fields))
13094 }
13095 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13096 Ok(self.parse_sub_type(DataType::Nullable)?)
13097 }
13098 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13099 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13100 }
13101 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13102 self.prev_token();
13103 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13104 Ok(DataType::Map(
13105 Box::new(key_data_type),
13106 Box::new(value_data_type),
13107 ))
13108 }
13109 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13110 self.expect_token(&Token::LParen)?;
13111 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13112 self.expect_token(&Token::RParen)?;
13113 Ok(DataType::Nested(field_defs))
13114 }
13115 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13116 self.prev_token();
13117 let field_defs = self.parse_click_house_tuple_def()?;
13118 Ok(DataType::Tuple(field_defs))
13119 }
13120 Keyword::TRIGGER => Ok(DataType::Trigger),
13121 Keyword::SETOF => {
13122 let inner = self.parse_data_type()?;
13123 Ok(DataType::SetOf(Box::new(inner)))
13124 }
13125 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13126 let _ = self.parse_keyword(Keyword::TYPE);
13127 Ok(DataType::AnyType)
13128 }
13129 Keyword::TABLE => {
13130 if self.peek_token_ref().token == Token::LParen {
13133 let columns = self.parse_returns_table_columns()?;
13134 Ok(DataType::Table(Some(columns)))
13135 } else {
13136 Ok(DataType::Table(None))
13137 }
13138 }
13139 Keyword::SIGNED => {
13140 if self.parse_keyword(Keyword::INTEGER) {
13141 Ok(DataType::SignedInteger)
13142 } else {
13143 Ok(DataType::Signed)
13144 }
13145 }
13146 Keyword::UNSIGNED => {
13147 if self.parse_keyword(Keyword::INTEGER) {
13148 Ok(DataType::UnsignedInteger)
13149 } else {
13150 Ok(DataType::Unsigned)
13151 }
13152 }
13153 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13154 Ok(DataType::TsVector)
13155 }
13156 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13157 Ok(DataType::TsQuery)
13158 }
13159 _ => {
13160 self.prev_token();
13161 let type_name = self.parse_object_name(false)?;
13162 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13163 Ok(DataType::Custom(type_name, modifiers))
13164 } else {
13165 Ok(DataType::Custom(type_name, vec![]))
13166 }
13167 }
13168 },
13169 _ => self.expected_at("a data type name", next_token_index),
13170 }?;
13171
13172 if self.dialect.supports_array_typedef_with_brackets() {
13173 while self.consume_token(&Token::LBracket) {
13174 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13176 self.expect_token(&Token::RBracket)?;
13177 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13178 }
13179 }
13180 Ok((data, trailing_bracket))
13181 }
13182
13183 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13184 self.parse_column_def()
13185 }
13186
13187 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13188 self.expect_token(&Token::LParen)?;
13189 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13190 self.expect_token(&Token::RParen)?;
13191 Ok(columns)
13192 }
13193
13194 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13196 self.expect_token(&Token::LParen)?;
13197 let mut values = Vec::new();
13198 loop {
13199 let next_token = self.next_token();
13200 match next_token.token {
13201 Token::SingleQuotedString(value) => values.push(value),
13202 _ => self.expected("a string", next_token)?,
13203 }
13204 let next_token = self.next_token();
13205 match next_token.token {
13206 Token::Comma => (),
13207 Token::RParen => break,
13208 _ => self.expected(", or }", next_token)?,
13209 }
13210 }
13211 Ok(values)
13212 }
13213
13214 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13216 let ident = self.parse_identifier()?;
13217 self.expect_keyword_is(Keyword::AS)?;
13218 let alias = self.parse_identifier()?;
13219 Ok(IdentWithAlias { ident, alias })
13220 }
13221
13222 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13224 let ident = self.parse_identifier()?;
13225 let _after_as = self.parse_keyword(Keyword::AS);
13226 let alias = self.parse_identifier()?;
13227 Ok(IdentWithAlias { ident, alias })
13228 }
13229
13230 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13232 self.parse_comma_separated(|parser| {
13233 parser.expect_token(&Token::LParen)?;
13234 let query = parser.parse_query()?;
13235 parser.expect_token(&Token::RParen)?;
13236 Ok(*query)
13237 })
13238 }
13239
13240 fn parse_distinct_required_set_quantifier(
13242 &mut self,
13243 operator_name: &str,
13244 ) -> Result<SetQuantifier, ParserError> {
13245 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13246 match quantifier {
13247 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13248 _ => Err(ParserError::ParserError(format!(
13249 "{operator_name} pipe operator requires DISTINCT modifier",
13250 ))),
13251 }
13252 }
13253
13254 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13256 if self.parse_keyword(Keyword::AS) {
13257 Ok(Some(self.parse_identifier()?))
13258 } else {
13259 self.maybe_parse(|parser| parser.parse_identifier())
13261 }
13262 }
13263
13264 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13266 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13267 parser.dialect.is_select_item_alias(explicit, kw, parser)
13268 }
13269 self.parse_optional_alias_inner(None, validator)
13270 }
13271
13272 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13276 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13277 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13278 }
13279 let explicit = self.peek_keyword(Keyword::AS);
13280 match self.parse_optional_alias_inner(None, validator)? {
13281 Some(name) => {
13282 let columns = self.parse_table_alias_column_defs()?;
13283 Ok(Some(TableAlias {
13284 explicit,
13285 name,
13286 columns,
13287 }))
13288 }
13289 None => Ok(None),
13290 }
13291 }
13292
13293 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13294 let mut hints = vec![];
13295 while let Some(hint_type) =
13296 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13297 {
13298 let hint_type = match hint_type {
13299 Keyword::USE => TableIndexHintType::Use,
13300 Keyword::IGNORE => TableIndexHintType::Ignore,
13301 Keyword::FORCE => TableIndexHintType::Force,
13302 _ => {
13303 return self.expected_ref(
13304 "expected to match USE/IGNORE/FORCE keyword",
13305 self.peek_token_ref(),
13306 )
13307 }
13308 };
13309 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13310 Some(Keyword::INDEX) => TableIndexType::Index,
13311 Some(Keyword::KEY) => TableIndexType::Key,
13312 _ => {
13313 return self
13314 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13315 }
13316 };
13317 let for_clause = if self.parse_keyword(Keyword::FOR) {
13318 let clause = if self.parse_keyword(Keyword::JOIN) {
13319 TableIndexHintForClause::Join
13320 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13321 TableIndexHintForClause::OrderBy
13322 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13323 TableIndexHintForClause::GroupBy
13324 } else {
13325 return self.expected_ref(
13326 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13327 self.peek_token_ref(),
13328 );
13329 };
13330 Some(clause)
13331 } else {
13332 None
13333 };
13334
13335 self.expect_token(&Token::LParen)?;
13336 let index_names = if self.peek_token_ref().token != Token::RParen {
13337 self.parse_comma_separated(Parser::parse_identifier)?
13338 } else {
13339 vec![]
13340 };
13341 self.expect_token(&Token::RParen)?;
13342 hints.push(TableIndexHints {
13343 hint_type,
13344 index_type,
13345 for_clause,
13346 index_names,
13347 });
13348 }
13349 Ok(hints)
13350 }
13351
13352 pub fn parse_optional_alias(
13356 &mut self,
13357 reserved_kwds: &[Keyword],
13358 ) -> Result<Option<Ident>, ParserError> {
13359 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13360 false
13361 }
13362 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13363 }
13364
13365 fn parse_optional_alias_inner<F>(
13372 &mut self,
13373 reserved_kwds: Option<&[Keyword]>,
13374 validator: F,
13375 ) -> Result<Option<Ident>, ParserError>
13376 where
13377 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13378 {
13379 let after_as = self.parse_keyword(Keyword::AS);
13380
13381 let next_token = self.next_token();
13382 match next_token.token {
13383 Token::Word(w)
13386 if reserved_kwds.is_some()
13387 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13388 {
13389 Ok(Some(w.into_ident(next_token.span)))
13390 }
13391 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13395 Ok(Some(w.into_ident(next_token.span)))
13396 }
13397 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13399 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13400 _ => {
13401 if after_as {
13402 return self.expected("an identifier after AS", next_token);
13403 }
13404 self.prev_token();
13405 Ok(None) }
13407 }
13408 }
13409
13410 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13412 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13413 let expressions = if self.parse_keyword(Keyword::ALL) {
13414 None
13415 } else {
13416 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13417 };
13418
13419 let mut modifiers = vec![];
13420 if self.dialect.supports_group_by_with_modifier() {
13421 loop {
13422 if !self.parse_keyword(Keyword::WITH) {
13423 break;
13424 }
13425 let keyword = self.expect_one_of_keywords(&[
13426 Keyword::ROLLUP,
13427 Keyword::CUBE,
13428 Keyword::TOTALS,
13429 ])?;
13430 modifiers.push(match keyword {
13431 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13432 Keyword::CUBE => GroupByWithModifier::Cube,
13433 Keyword::TOTALS => GroupByWithModifier::Totals,
13434 _ => {
13435 return parser_err!(
13436 "BUG: expected to match GroupBy modifier keyword",
13437 self.peek_token_ref().span.start
13438 )
13439 }
13440 });
13441 }
13442 }
13443 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13444 self.expect_token(&Token::LParen)?;
13445 let result = self.parse_comma_separated(|p| {
13446 if p.peek_token_ref().token == Token::LParen {
13447 p.parse_tuple(true, true)
13448 } else {
13449 Ok(vec![p.parse_expr()?])
13450 }
13451 })?;
13452 self.expect_token(&Token::RParen)?;
13453 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13454 result,
13455 )));
13456 };
13457 let group_by = match expressions {
13458 None => GroupByExpr::All(modifiers),
13459 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13460 };
13461 Ok(Some(group_by))
13462 } else {
13463 Ok(None)
13464 }
13465 }
13466
13467 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13469 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13470 let order_by =
13471 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13472 let order_by_options = self.parse_order_by_options()?;
13473 OrderBy {
13474 kind: OrderByKind::All(order_by_options),
13475 interpolate: None,
13476 }
13477 } else {
13478 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13479 let interpolate = if self.dialect.supports_interpolate() {
13480 self.parse_interpolations()?
13481 } else {
13482 None
13483 };
13484 OrderBy {
13485 kind: OrderByKind::Expressions(exprs),
13486 interpolate,
13487 }
13488 };
13489 Ok(Some(order_by))
13490 } else {
13491 Ok(None)
13492 }
13493 }
13494
13495 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13496 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13497 Some(self.parse_offset()?)
13498 } else {
13499 None
13500 };
13501
13502 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13503 let expr = self.parse_limit()?;
13504
13505 if self.dialect.supports_limit_comma()
13506 && offset.is_none()
13507 && expr.is_some() && self.consume_token(&Token::Comma)
13509 {
13510 let offset = expr.ok_or_else(|| {
13511 ParserError::ParserError(
13512 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13513 )
13514 })?;
13515 return Ok(Some(LimitClause::OffsetCommaLimit {
13516 offset,
13517 limit: self.parse_expr()?,
13518 }));
13519 }
13520
13521 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13522 Some(self.parse_comma_separated(Parser::parse_expr)?)
13523 } else {
13524 None
13525 };
13526
13527 (Some(expr), limit_by)
13528 } else {
13529 (None, None)
13530 };
13531
13532 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13533 offset = Some(self.parse_offset()?);
13534 }
13535
13536 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13537 Ok(Some(LimitClause::LimitOffset {
13538 limit: limit.unwrap_or_default(),
13539 offset,
13540 limit_by: limit_by.unwrap_or_default(),
13541 }))
13542 } else {
13543 Ok(None)
13544 }
13545 }
13546
13547 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13550 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13551 let fn_name = self.parse_object_name(false)?;
13552 self.parse_function_call(fn_name)
13553 .map(TableObject::TableFunction)
13554 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13555 self.parse_parenthesized(|p| p.parse_query())
13556 .map(TableObject::TableQuery)
13557 } else {
13558 self.parse_object_name(false).map(TableObject::TableName)
13559 }
13560 }
13561
13562 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13569 self.parse_object_name_inner(in_table_clause, false)
13570 }
13571
13572 fn parse_object_name_inner(
13582 &mut self,
13583 in_table_clause: bool,
13584 allow_wildcards: bool,
13585 ) -> Result<ObjectName, ParserError> {
13586 let mut parts = vec![];
13587 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13588 loop {
13589 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13590 parts.push(ObjectNamePart::Identifier(ident));
13591 if !self.consume_token(&Token::Period) && !end_with_period {
13592 break;
13593 }
13594 }
13595 } else {
13596 loop {
13597 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13598 let span = self.next_token().span;
13599 parts.push(ObjectNamePart::Identifier(Ident {
13600 value: Token::Mul.to_string(),
13601 quote_style: None,
13602 span,
13603 }));
13604 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13605 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13606 parts.push(ObjectNamePart::Identifier(ident));
13607 if !self.consume_token(&Token::Period) && !end_with_period {
13608 break;
13609 }
13610 } else if self.dialect.supports_object_name_double_dot_notation()
13611 && parts.len() == 1
13612 && matches!(self.peek_token_ref().token, Token::Period)
13613 {
13614 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13616 } else {
13617 let ident = self.parse_identifier()?;
13618 let part = if self
13619 .dialect
13620 .is_identifier_generating_function_name(&ident, &parts)
13621 {
13622 self.expect_token(&Token::LParen)?;
13623 let args: Vec<FunctionArg> =
13624 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13625 self.expect_token(&Token::RParen)?;
13626 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13627 } else {
13628 ObjectNamePart::Identifier(ident)
13629 };
13630 parts.push(part);
13631 }
13632
13633 if !self.consume_token(&Token::Period) {
13634 break;
13635 }
13636 }
13637 }
13638
13639 if dialect_of!(self is BigQueryDialect)
13642 && parts.iter().any(|part| {
13643 part.as_ident()
13644 .is_some_and(|ident| ident.value.contains('.'))
13645 })
13646 {
13647 parts = parts
13648 .into_iter()
13649 .flat_map(|part| match part.as_ident() {
13650 Some(ident) => ident
13651 .value
13652 .split('.')
13653 .map(|value| {
13654 ObjectNamePart::Identifier(Ident {
13655 value: value.into(),
13656 quote_style: ident.quote_style,
13657 span: ident.span,
13658 })
13659 })
13660 .collect::<Vec<_>>(),
13661 None => vec![part],
13662 })
13663 .collect()
13664 }
13665
13666 Ok(ObjectName(parts))
13667 }
13668
13669 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13671 let mut idents = vec![];
13672 loop {
13673 let token = self.peek_token_ref();
13674 match &token.token {
13675 Token::Word(w) => {
13676 idents.push(w.to_ident(token.span));
13677 }
13678 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13679 break
13680 }
13681 _ => {}
13682 }
13683 self.advance_token();
13684 }
13685 Ok(idents)
13686 }
13687
13688 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13728 let mut idents = vec![];
13729
13730 let next_token = self.next_token();
13732 match next_token.token {
13733 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13734 Token::EOF => {
13735 return Err(ParserError::ParserError(
13736 "Empty input when parsing identifier".to_string(),
13737 ))?
13738 }
13739 token => {
13740 return Err(ParserError::ParserError(format!(
13741 "Unexpected token in identifier: {token}"
13742 )))?
13743 }
13744 };
13745
13746 loop {
13748 match self.next_token().token {
13749 Token::Period => {
13751 let next_token = self.next_token();
13752 match next_token.token {
13753 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13754 Token::EOF => {
13755 return Err(ParserError::ParserError(
13756 "Trailing period in identifier".to_string(),
13757 ))?
13758 }
13759 token => {
13760 return Err(ParserError::ParserError(format!(
13761 "Unexpected token following period in identifier: {token}"
13762 )))?
13763 }
13764 }
13765 }
13766 Token::EOF => break,
13767 token => {
13768 return Err(ParserError::ParserError(format!(
13769 "Unexpected token in identifier: {token}"
13770 )))?;
13771 }
13772 }
13773 }
13774
13775 Ok(idents)
13776 }
13777
13778 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
13780 let next_token = self.next_token();
13781 match next_token.token {
13782 Token::Word(w) => Ok(w.into_ident(next_token.span)),
13783 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
13784 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
13785 _ => self.expected("identifier", next_token),
13786 }
13787 }
13788
13789 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
13800 match self.peek_token().token {
13801 Token::Word(w) => {
13802 let quote_style_is_none = w.quote_style.is_none();
13803 let mut requires_whitespace = false;
13804 let mut ident = w.into_ident(self.next_token().span);
13805 if quote_style_is_none {
13806 while matches!(self.peek_token_no_skip().token, Token::Minus) {
13807 self.next_token();
13808 ident.value.push('-');
13809
13810 let token = self
13811 .next_token_no_skip()
13812 .cloned()
13813 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
13814 requires_whitespace = match token.token {
13815 Token::Word(next_word) if next_word.quote_style.is_none() => {
13816 ident.value.push_str(&next_word.value);
13817 false
13818 }
13819 Token::Number(s, false) => {
13820 if s.ends_with('.') {
13827 let Some(s) = s.split('.').next().filter(|s| {
13828 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
13829 }) else {
13830 return self.expected(
13831 "continuation of hyphenated identifier",
13832 TokenWithSpan::new(Token::Number(s, false), token.span),
13833 );
13834 };
13835 ident.value.push_str(s);
13836 return Ok((ident, true));
13837 } else {
13838 ident.value.push_str(&s);
13839 }
13840 !matches!(self.peek_token_ref().token, Token::Period)
13843 }
13844 _ => {
13845 return self
13846 .expected("continuation of hyphenated identifier", token);
13847 }
13848 }
13849 }
13850
13851 if requires_whitespace {
13854 let token = self.next_token();
13855 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
13856 return self
13857 .expected("whitespace following hyphenated identifier", token);
13858 }
13859 }
13860 }
13861 Ok((ident, false))
13862 }
13863 _ => Ok((self.parse_identifier()?, false)),
13864 }
13865 }
13866
13867 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
13869 if self.consume_token(&Token::LParen) {
13870 if self.peek_token_ref().token == Token::RParen {
13871 self.next_token();
13872 Ok(vec![])
13873 } else {
13874 let cols = self.parse_comma_separated_with_trailing_commas(
13875 Parser::parse_view_column,
13876 self.dialect.supports_column_definition_trailing_commas(),
13877 Self::is_reserved_for_column_alias,
13878 )?;
13879 self.expect_token(&Token::RParen)?;
13880 Ok(cols)
13881 }
13882 } else {
13883 Ok(vec![])
13884 }
13885 }
13886
13887 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
13889 let name = self.parse_identifier()?;
13890 let options = self.parse_view_column_options()?;
13891 let data_type = if dialect_of!(self is ClickHouseDialect) {
13892 Some(self.parse_data_type()?)
13893 } else {
13894 None
13895 };
13896 Ok(ViewColumnDef {
13897 name,
13898 data_type,
13899 options,
13900 })
13901 }
13902
13903 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
13904 let mut options = Vec::new();
13905 loop {
13906 let option = self.parse_optional_column_option()?;
13907 if let Some(option) = option {
13908 options.push(option);
13909 } else {
13910 break;
13911 }
13912 }
13913 if options.is_empty() {
13914 Ok(None)
13915 } else if self.dialect.supports_space_separated_column_options() {
13916 Ok(Some(ColumnOptions::SpaceSeparated(options)))
13917 } else {
13918 Ok(Some(ColumnOptions::CommaSeparated(options)))
13919 }
13920 }
13921
13922 pub fn parse_parenthesized_column_list(
13925 &mut self,
13926 optional: IsOptional,
13927 allow_empty: bool,
13928 ) -> Result<Vec<Ident>, ParserError> {
13929 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
13930 }
13931
13932 pub fn parse_parenthesized_compound_identifier_list(
13934 &mut self,
13935 optional: IsOptional,
13936 allow_empty: bool,
13937 ) -> Result<Vec<Expr>, ParserError> {
13938 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13939 Ok(Expr::CompoundIdentifier(
13940 p.parse_period_separated(|p| p.parse_identifier())?,
13941 ))
13942 })
13943 }
13944
13945 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
13948 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
13949 p.parse_create_index_expr()
13950 })
13951 }
13952
13953 pub fn parse_parenthesized_qualified_column_list(
13956 &mut self,
13957 optional: IsOptional,
13958 allow_empty: bool,
13959 ) -> Result<Vec<ObjectName>, ParserError> {
13960 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13961 p.parse_object_name(true)
13962 })
13963 }
13964
13965 fn parse_parenthesized_column_list_inner<F, T>(
13968 &mut self,
13969 optional: IsOptional,
13970 allow_empty: bool,
13971 mut f: F,
13972 ) -> Result<Vec<T>, ParserError>
13973 where
13974 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13975 {
13976 if self.consume_token(&Token::LParen) {
13977 if allow_empty && self.peek_token_ref().token == Token::RParen {
13978 self.next_token();
13979 Ok(vec![])
13980 } else {
13981 let cols = self.parse_comma_separated(|p| f(p))?;
13982 self.expect_token(&Token::RParen)?;
13983 Ok(cols)
13984 }
13985 } else if optional == Optional {
13986 Ok(vec![])
13987 } else {
13988 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
13989 }
13990 }
13991
13992 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
13994 if self.consume_token(&Token::LParen) {
13995 let cols = self.parse_comma_separated(|p| {
13996 let name = p.parse_identifier()?;
13997 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
13998 Ok(TableAliasColumnDef { name, data_type })
13999 })?;
14000 self.expect_token(&Token::RParen)?;
14001 Ok(cols)
14002 } else {
14003 Ok(vec![])
14004 }
14005 }
14006
14007 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
14009 self.expect_token(&Token::LParen)?;
14010 let n = self.parse_literal_uint()?;
14011 self.expect_token(&Token::RParen)?;
14012 Ok(n)
14013 }
14014
14015 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
14017 if self.consume_token(&Token::LParen) {
14018 let n = self.parse_literal_uint()?;
14019 self.expect_token(&Token::RParen)?;
14020 Ok(Some(n))
14021 } else {
14022 Ok(None)
14023 }
14024 }
14025
14026 fn maybe_parse_optional_interval_fields(
14027 &mut self,
14028 ) -> Result<Option<IntervalFields>, ParserError> {
14029 match self.parse_one_of_keywords(&[
14030 Keyword::YEAR,
14032 Keyword::DAY,
14033 Keyword::HOUR,
14034 Keyword::MINUTE,
14035 Keyword::MONTH,
14037 Keyword::SECOND,
14038 ]) {
14039 Some(Keyword::YEAR) => {
14040 if self.peek_keyword(Keyword::TO) {
14041 self.expect_keyword(Keyword::TO)?;
14042 self.expect_keyword(Keyword::MONTH)?;
14043 Ok(Some(IntervalFields::YearToMonth))
14044 } else {
14045 Ok(Some(IntervalFields::Year))
14046 }
14047 }
14048 Some(Keyword::DAY) => {
14049 if self.peek_keyword(Keyword::TO) {
14050 self.expect_keyword(Keyword::TO)?;
14051 match self.expect_one_of_keywords(&[
14052 Keyword::HOUR,
14053 Keyword::MINUTE,
14054 Keyword::SECOND,
14055 ])? {
14056 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14057 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14058 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14059 _ => {
14060 self.prev_token();
14061 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14062 }
14063 }
14064 } else {
14065 Ok(Some(IntervalFields::Day))
14066 }
14067 }
14068 Some(Keyword::HOUR) => {
14069 if self.peek_keyword(Keyword::TO) {
14070 self.expect_keyword(Keyword::TO)?;
14071 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14072 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14073 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14074 _ => {
14075 self.prev_token();
14076 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14077 }
14078 }
14079 } else {
14080 Ok(Some(IntervalFields::Hour))
14081 }
14082 }
14083 Some(Keyword::MINUTE) => {
14084 if self.peek_keyword(Keyword::TO) {
14085 self.expect_keyword(Keyword::TO)?;
14086 self.expect_keyword(Keyword::SECOND)?;
14087 Ok(Some(IntervalFields::MinuteToSecond))
14088 } else {
14089 Ok(Some(IntervalFields::Minute))
14090 }
14091 }
14092 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14093 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14094 Some(_) => {
14095 self.prev_token();
14096 self.expected_ref(
14097 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14098 self.peek_token_ref(),
14099 )
14100 }
14101 None => Ok(None),
14102 }
14103 }
14104
14105 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14113 self.expect_keyword_is(Keyword::DATETIME64)?;
14114 self.expect_token(&Token::LParen)?;
14115 let precision = self.parse_literal_uint()?;
14116 let time_zone = if self.consume_token(&Token::Comma) {
14117 Some(self.parse_literal_string()?)
14118 } else {
14119 None
14120 };
14121 self.expect_token(&Token::RParen)?;
14122 Ok((precision, time_zone))
14123 }
14124
14125 pub fn parse_optional_character_length(
14127 &mut self,
14128 ) -> Result<Option<CharacterLength>, ParserError> {
14129 if self.consume_token(&Token::LParen) {
14130 let character_length = self.parse_character_length()?;
14131 self.expect_token(&Token::RParen)?;
14132 Ok(Some(character_length))
14133 } else {
14134 Ok(None)
14135 }
14136 }
14137
14138 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14140 if self.consume_token(&Token::LParen) {
14141 let binary_length = self.parse_binary_length()?;
14142 self.expect_token(&Token::RParen)?;
14143 Ok(Some(binary_length))
14144 } else {
14145 Ok(None)
14146 }
14147 }
14148
14149 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14151 if self.parse_keyword(Keyword::MAX) {
14152 return Ok(CharacterLength::Max);
14153 }
14154 let length = self.parse_literal_uint()?;
14155 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14156 Some(CharLengthUnits::Characters)
14157 } else if self.parse_keyword(Keyword::OCTETS) {
14158 Some(CharLengthUnits::Octets)
14159 } else {
14160 None
14161 };
14162 Ok(CharacterLength::IntegerLength { length, unit })
14163 }
14164
14165 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14167 if self.parse_keyword(Keyword::MAX) {
14168 return Ok(BinaryLength::Max);
14169 }
14170 let length = self.parse_literal_uint()?;
14171 Ok(BinaryLength::IntegerLength { length })
14172 }
14173
14174 pub fn parse_optional_precision_scale(
14176 &mut self,
14177 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14178 if self.consume_token(&Token::LParen) {
14179 let n = self.parse_literal_uint()?;
14180 let scale = if self.consume_token(&Token::Comma) {
14181 Some(self.parse_literal_uint()?)
14182 } else {
14183 None
14184 };
14185 self.expect_token(&Token::RParen)?;
14186 Ok((Some(n), scale))
14187 } else {
14188 Ok((None, None))
14189 }
14190 }
14191
14192 pub fn parse_exact_number_optional_precision_scale(
14194 &mut self,
14195 ) -> Result<ExactNumberInfo, ParserError> {
14196 if self.consume_token(&Token::LParen) {
14197 let precision = self.parse_literal_uint()?;
14198 let scale = if self.consume_token(&Token::Comma) {
14199 Some(self.parse_signed_integer()?)
14200 } else {
14201 None
14202 };
14203
14204 self.expect_token(&Token::RParen)?;
14205
14206 match scale {
14207 None => Ok(ExactNumberInfo::Precision(precision)),
14208 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14209 }
14210 } else {
14211 Ok(ExactNumberInfo::None)
14212 }
14213 }
14214
14215 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14217 let is_negative = self.consume_token(&Token::Minus);
14218
14219 if !is_negative {
14220 let _ = self.consume_token(&Token::Plus);
14221 }
14222
14223 let current_token = self.peek_token_ref();
14224 match ¤t_token.token {
14225 Token::Number(s, _) => {
14226 let s = s.clone();
14227 let span_start = current_token.span.start;
14228 self.advance_token();
14229 let value = Self::parse::<i64>(s, span_start)?;
14230 Ok(if is_negative { -value } else { value })
14231 }
14232 _ => self.expected_ref("number", current_token),
14233 }
14234 }
14235
14236 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14238 if self.consume_token(&Token::LParen) {
14239 let mut modifiers = Vec::new();
14240 loop {
14241 let next_token = self.next_token();
14242 match next_token.token {
14243 Token::Word(w) => modifiers.push(w.to_string()),
14244 Token::Number(n, _) => modifiers.push(n),
14245 Token::SingleQuotedString(s) => modifiers.push(s),
14246
14247 Token::Comma => {
14248 continue;
14249 }
14250 Token::RParen => {
14251 break;
14252 }
14253 _ => self.expected("type modifiers", next_token)?,
14254 }
14255 }
14256
14257 Ok(Some(modifiers))
14258 } else {
14259 Ok(None)
14260 }
14261 }
14262
14263 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14265 where
14266 F: FnOnce(Box<DataType>) -> DataType,
14267 {
14268 self.expect_token(&Token::LParen)?;
14269 let inside_type = self.parse_data_type()?;
14270 self.expect_token(&Token::RParen)?;
14271 Ok(parent_type(inside_type.into()))
14272 }
14273
14274 fn parse_delete_setexpr_boxed(
14278 &mut self,
14279 delete_token: TokenWithSpan,
14280 ) -> Result<Box<SetExpr>, ParserError> {
14281 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14282 }
14283
14284 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14286 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14287 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14288 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14291 (vec![], false)
14292 } else {
14293 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14294 self.expect_keyword_is(Keyword::FROM)?;
14295 (tables, true)
14296 }
14297 } else {
14298 (vec![], true)
14299 };
14300
14301 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14302
14303 let output = self.maybe_parse_output_clause()?;
14304
14305 let using = if self.parse_keyword(Keyword::USING) {
14306 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14307 } else {
14308 None
14309 };
14310 let selection = if self.parse_keyword(Keyword::WHERE) {
14311 Some(self.parse_expr()?)
14312 } else {
14313 None
14314 };
14315 let returning = if self.parse_keyword(Keyword::RETURNING) {
14316 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14317 } else {
14318 None
14319 };
14320 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14321 self.parse_comma_separated(Parser::parse_order_by_expr)?
14322 } else {
14323 vec![]
14324 };
14325 let limit = if self.parse_keyword(Keyword::LIMIT) {
14326 self.parse_limit()?
14327 } else {
14328 None
14329 };
14330
14331 Ok(Statement::Delete(Delete {
14332 delete_token: delete_token.into(),
14333 optimizer_hints,
14334 tables,
14335 from: if with_from_keyword {
14336 FromTable::WithFromKeyword(from)
14337 } else {
14338 FromTable::WithoutKeyword(from)
14339 },
14340 using,
14341 selection,
14342 returning,
14343 output,
14344 order_by,
14345 limit,
14346 }))
14347 }
14348
14349 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14352 let modifier_keyword =
14353 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14354
14355 let id = self.parse_literal_uint()?;
14356
14357 let modifier = match modifier_keyword {
14358 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14359 Some(Keyword::QUERY) => Some(KillType::Query),
14360 Some(Keyword::MUTATION) => {
14361 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14362 Some(KillType::Mutation)
14363 } else {
14364 self.expected_ref(
14365 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14366 self.peek_token_ref(),
14367 )?
14368 }
14369 }
14370 _ => None,
14371 };
14372
14373 Ok(Statement::Kill { modifier, id })
14374 }
14375
14376 pub fn parse_explain(
14378 &mut self,
14379 describe_alias: DescribeAlias,
14380 ) -> Result<Statement, ParserError> {
14381 let mut analyze = false;
14382 let mut verbose = false;
14383 let mut query_plan = false;
14384 let mut estimate = false;
14385 let mut format = None;
14386 let mut options = None;
14387
14388 if describe_alias == DescribeAlias::Explain
14391 && self.dialect.supports_explain_with_utility_options()
14392 && self.peek_token_ref().token == Token::LParen
14393 {
14394 options = Some(self.parse_utility_options()?)
14395 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14396 query_plan = true;
14397 } else if self.parse_keyword(Keyword::ESTIMATE) {
14398 estimate = true;
14399 } else {
14400 analyze = self.parse_keyword(Keyword::ANALYZE);
14401 verbose = self.parse_keyword(Keyword::VERBOSE);
14402 if self.parse_keyword(Keyword::FORMAT) {
14403 format = Some(self.parse_analyze_format_kind()?);
14404 }
14405 }
14406
14407 match self.maybe_parse(|parser| parser.parse_statement())? {
14408 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14409 ParserError::ParserError("Explain must be root of the plan".to_string()),
14410 ),
14411 Some(statement) => Ok(Statement::Explain {
14412 describe_alias,
14413 analyze,
14414 verbose,
14415 query_plan,
14416 estimate,
14417 statement: Box::new(statement),
14418 format,
14419 options,
14420 }),
14421 _ => {
14422 let hive_format =
14423 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14424 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14425 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14426 _ => None,
14427 };
14428
14429 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14430 self.parse_keyword(Keyword::TABLE)
14432 } else {
14433 false
14434 };
14435
14436 let table_name = self.parse_object_name(false)?;
14437 Ok(Statement::ExplainTable {
14438 describe_alias,
14439 hive_format,
14440 has_table_keyword,
14441 table_name,
14442 })
14443 }
14444 }
14445 }
14446
14447 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14452 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14453 let _guard = self.recursion_counter.try_decrease()?;
14454 let with = if self.parse_keyword(Keyword::WITH) {
14455 let with_token = self.get_current_token();
14456 Some(With {
14457 with_token: with_token.clone().into(),
14458 recursive: self.parse_keyword(Keyword::RECURSIVE),
14459 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14460 })
14461 } else {
14462 None
14463 };
14464 if self.parse_keyword(Keyword::INSERT) {
14465 Ok(Query {
14466 with,
14467 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14468 order_by: None,
14469 limit_clause: None,
14470 fetch: None,
14471 locks: vec![],
14472 for_clause: None,
14473 settings: None,
14474 format_clause: None,
14475 pipe_operators: vec![],
14476 }
14477 .into())
14478 } else if self.parse_keyword(Keyword::UPDATE) {
14479 Ok(Query {
14480 with,
14481 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14482 order_by: None,
14483 limit_clause: None,
14484 fetch: None,
14485 locks: vec![],
14486 for_clause: None,
14487 settings: None,
14488 format_clause: None,
14489 pipe_operators: vec![],
14490 }
14491 .into())
14492 } else if self.parse_keyword(Keyword::DELETE) {
14493 Ok(Query {
14494 with,
14495 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14496 limit_clause: None,
14497 order_by: None,
14498 fetch: None,
14499 locks: vec![],
14500 for_clause: None,
14501 settings: None,
14502 format_clause: None,
14503 pipe_operators: vec![],
14504 }
14505 .into())
14506 } else if self.parse_keyword(Keyword::MERGE) {
14507 Ok(Query {
14508 with,
14509 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14510 limit_clause: None,
14511 order_by: None,
14512 fetch: None,
14513 locks: vec![],
14514 for_clause: None,
14515 settings: None,
14516 format_clause: None,
14517 pipe_operators: vec![],
14518 }
14519 .into())
14520 } else {
14521 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14522
14523 let order_by = self.parse_optional_order_by()?;
14524
14525 let limit_clause = self.parse_optional_limit_clause()?;
14526
14527 let settings = self.parse_settings()?;
14528
14529 let fetch = if self.parse_keyword(Keyword::FETCH) {
14530 Some(self.parse_fetch()?)
14531 } else {
14532 None
14533 };
14534
14535 let mut for_clause = None;
14536 let mut locks = Vec::new();
14537 while self.parse_keyword(Keyword::FOR) {
14538 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14539 for_clause = Some(parsed_for_clause);
14540 break;
14541 } else {
14542 locks.push(self.parse_lock()?);
14543 }
14544 }
14545 let format_clause =
14546 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14547 if self.parse_keyword(Keyword::NULL) {
14548 Some(FormatClause::Null)
14549 } else {
14550 let ident = self.parse_identifier()?;
14551 Some(FormatClause::Identifier(ident))
14552 }
14553 } else {
14554 None
14555 };
14556
14557 let pipe_operators = if self.dialect.supports_pipe_operator() {
14558 self.parse_pipe_operators()?
14559 } else {
14560 Vec::new()
14561 };
14562
14563 Ok(Query {
14564 with,
14565 body,
14566 order_by,
14567 limit_clause,
14568 fetch,
14569 locks,
14570 for_clause,
14571 settings,
14572 format_clause,
14573 pipe_operators,
14574 }
14575 .into())
14576 }
14577 }
14578
14579 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14580 let mut pipe_operators = Vec::new();
14581
14582 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14583 let kw = self.expect_one_of_keywords(&[
14584 Keyword::SELECT,
14585 Keyword::EXTEND,
14586 Keyword::SET,
14587 Keyword::DROP,
14588 Keyword::AS,
14589 Keyword::WHERE,
14590 Keyword::LIMIT,
14591 Keyword::AGGREGATE,
14592 Keyword::ORDER,
14593 Keyword::TABLESAMPLE,
14594 Keyword::RENAME,
14595 Keyword::UNION,
14596 Keyword::INTERSECT,
14597 Keyword::EXCEPT,
14598 Keyword::CALL,
14599 Keyword::PIVOT,
14600 Keyword::UNPIVOT,
14601 Keyword::JOIN,
14602 Keyword::INNER,
14603 Keyword::LEFT,
14604 Keyword::RIGHT,
14605 Keyword::FULL,
14606 Keyword::CROSS,
14607 ])?;
14608 match kw {
14609 Keyword::SELECT => {
14610 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14611 pipe_operators.push(PipeOperator::Select { exprs })
14612 }
14613 Keyword::EXTEND => {
14614 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14615 pipe_operators.push(PipeOperator::Extend { exprs })
14616 }
14617 Keyword::SET => {
14618 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14619 pipe_operators.push(PipeOperator::Set { assignments })
14620 }
14621 Keyword::DROP => {
14622 let columns = self.parse_identifiers()?;
14623 pipe_operators.push(PipeOperator::Drop { columns })
14624 }
14625 Keyword::AS => {
14626 let alias = self.parse_identifier()?;
14627 pipe_operators.push(PipeOperator::As { alias })
14628 }
14629 Keyword::WHERE => {
14630 let expr = self.parse_expr()?;
14631 pipe_operators.push(PipeOperator::Where { expr })
14632 }
14633 Keyword::LIMIT => {
14634 let expr = self.parse_expr()?;
14635 let offset = if self.parse_keyword(Keyword::OFFSET) {
14636 Some(self.parse_expr()?)
14637 } else {
14638 None
14639 };
14640 pipe_operators.push(PipeOperator::Limit { expr, offset })
14641 }
14642 Keyword::AGGREGATE => {
14643 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14644 vec![]
14645 } else {
14646 self.parse_comma_separated(|parser| {
14647 parser.parse_expr_with_alias_and_order_by()
14648 })?
14649 };
14650
14651 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14652 self.parse_comma_separated(|parser| {
14653 parser.parse_expr_with_alias_and_order_by()
14654 })?
14655 } else {
14656 vec![]
14657 };
14658
14659 pipe_operators.push(PipeOperator::Aggregate {
14660 full_table_exprs,
14661 group_by_expr,
14662 })
14663 }
14664 Keyword::ORDER => {
14665 self.expect_one_of_keywords(&[Keyword::BY])?;
14666 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14667 pipe_operators.push(PipeOperator::OrderBy { exprs })
14668 }
14669 Keyword::TABLESAMPLE => {
14670 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14671 pipe_operators.push(PipeOperator::TableSample { sample });
14672 }
14673 Keyword::RENAME => {
14674 let mappings =
14675 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14676 pipe_operators.push(PipeOperator::Rename { mappings });
14677 }
14678 Keyword::UNION => {
14679 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14680 let queries = self.parse_pipe_operator_queries()?;
14681 pipe_operators.push(PipeOperator::Union {
14682 set_quantifier,
14683 queries,
14684 });
14685 }
14686 Keyword::INTERSECT => {
14687 let set_quantifier =
14688 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14689 let queries = self.parse_pipe_operator_queries()?;
14690 pipe_operators.push(PipeOperator::Intersect {
14691 set_quantifier,
14692 queries,
14693 });
14694 }
14695 Keyword::EXCEPT => {
14696 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14697 let queries = self.parse_pipe_operator_queries()?;
14698 pipe_operators.push(PipeOperator::Except {
14699 set_quantifier,
14700 queries,
14701 });
14702 }
14703 Keyword::CALL => {
14704 let function_name = self.parse_object_name(false)?;
14705 let function_expr = self.parse_function(function_name)?;
14706 if let Expr::Function(function) = function_expr {
14707 let alias = self.parse_identifier_optional_alias()?;
14708 pipe_operators.push(PipeOperator::Call { function, alias });
14709 } else {
14710 return Err(ParserError::ParserError(
14711 "Expected function call after CALL".to_string(),
14712 ));
14713 }
14714 }
14715 Keyword::PIVOT => {
14716 self.expect_token(&Token::LParen)?;
14717 let aggregate_functions =
14718 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14719 self.expect_keyword_is(Keyword::FOR)?;
14720 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14721 self.expect_keyword_is(Keyword::IN)?;
14722
14723 self.expect_token(&Token::LParen)?;
14724 let value_source = if self.parse_keyword(Keyword::ANY) {
14725 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14726 self.parse_comma_separated(Parser::parse_order_by_expr)?
14727 } else {
14728 vec![]
14729 };
14730 PivotValueSource::Any(order_by)
14731 } else if self.peek_sub_query() {
14732 PivotValueSource::Subquery(self.parse_query()?)
14733 } else {
14734 PivotValueSource::List(
14735 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14736 )
14737 };
14738 self.expect_token(&Token::RParen)?;
14739 self.expect_token(&Token::RParen)?;
14740
14741 let alias = self.parse_identifier_optional_alias()?;
14742
14743 pipe_operators.push(PipeOperator::Pivot {
14744 aggregate_functions,
14745 value_column,
14746 value_source,
14747 alias,
14748 });
14749 }
14750 Keyword::UNPIVOT => {
14751 self.expect_token(&Token::LParen)?;
14752 let value_column = self.parse_identifier()?;
14753 self.expect_keyword(Keyword::FOR)?;
14754 let name_column = self.parse_identifier()?;
14755 self.expect_keyword(Keyword::IN)?;
14756
14757 self.expect_token(&Token::LParen)?;
14758 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14759 self.expect_token(&Token::RParen)?;
14760
14761 self.expect_token(&Token::RParen)?;
14762
14763 let alias = self.parse_identifier_optional_alias()?;
14764
14765 pipe_operators.push(PipeOperator::Unpivot {
14766 value_column,
14767 name_column,
14768 unpivot_columns,
14769 alias,
14770 });
14771 }
14772 Keyword::JOIN
14773 | Keyword::INNER
14774 | Keyword::LEFT
14775 | Keyword::RIGHT
14776 | Keyword::FULL
14777 | Keyword::CROSS => {
14778 self.prev_token();
14779 let mut joins = self.parse_joins()?;
14780 if joins.len() != 1 {
14781 return Err(ParserError::ParserError(
14782 "Join pipe operator must have a single join".to_string(),
14783 ));
14784 }
14785 let join = joins.swap_remove(0);
14786 pipe_operators.push(PipeOperator::Join(join))
14787 }
14788 unhandled => {
14789 return Err(ParserError::ParserError(format!(
14790 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
14791 )))
14792 }
14793 }
14794 }
14795 Ok(pipe_operators)
14796 }
14797
14798 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
14799 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
14800 {
14801 let key_values = self.parse_comma_separated(|p| {
14802 let key = p.parse_identifier()?;
14803 p.expect_token(&Token::Eq)?;
14804 let value = p.parse_expr()?;
14805 Ok(Setting { key, value })
14806 })?;
14807 Some(key_values)
14808 } else {
14809 None
14810 };
14811 Ok(settings)
14812 }
14813
14814 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
14816 if self.parse_keyword(Keyword::XML) {
14817 Ok(Some(self.parse_for_xml()?))
14818 } else if self.parse_keyword(Keyword::JSON) {
14819 Ok(Some(self.parse_for_json()?))
14820 } else if self.parse_keyword(Keyword::BROWSE) {
14821 Ok(Some(ForClause::Browse))
14822 } else {
14823 Ok(None)
14824 }
14825 }
14826
14827 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
14829 let for_xml = if self.parse_keyword(Keyword::RAW) {
14830 let mut element_name = None;
14831 if self.peek_token_ref().token == Token::LParen {
14832 self.expect_token(&Token::LParen)?;
14833 element_name = Some(self.parse_literal_string()?);
14834 self.expect_token(&Token::RParen)?;
14835 }
14836 ForXml::Raw(element_name)
14837 } else if self.parse_keyword(Keyword::AUTO) {
14838 ForXml::Auto
14839 } else if self.parse_keyword(Keyword::EXPLICIT) {
14840 ForXml::Explicit
14841 } else if self.parse_keyword(Keyword::PATH) {
14842 let mut element_name = None;
14843 if self.peek_token_ref().token == Token::LParen {
14844 self.expect_token(&Token::LParen)?;
14845 element_name = Some(self.parse_literal_string()?);
14846 self.expect_token(&Token::RParen)?;
14847 }
14848 ForXml::Path(element_name)
14849 } else {
14850 return Err(ParserError::ParserError(
14851 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
14852 ));
14853 };
14854 let mut elements = false;
14855 let mut binary_base64 = false;
14856 let mut root = None;
14857 let mut r#type = false;
14858 while self.peek_token_ref().token == Token::Comma {
14859 self.next_token();
14860 if self.parse_keyword(Keyword::ELEMENTS) {
14861 elements = true;
14862 } else if self.parse_keyword(Keyword::BINARY) {
14863 self.expect_keyword_is(Keyword::BASE64)?;
14864 binary_base64 = true;
14865 } else if self.parse_keyword(Keyword::ROOT) {
14866 self.expect_token(&Token::LParen)?;
14867 root = Some(self.parse_literal_string()?);
14868 self.expect_token(&Token::RParen)?;
14869 } else if self.parse_keyword(Keyword::TYPE) {
14870 r#type = true;
14871 }
14872 }
14873 Ok(ForClause::Xml {
14874 for_xml,
14875 elements,
14876 binary_base64,
14877 root,
14878 r#type,
14879 })
14880 }
14881
14882 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
14884 let for_json = if self.parse_keyword(Keyword::AUTO) {
14885 ForJson::Auto
14886 } else if self.parse_keyword(Keyword::PATH) {
14887 ForJson::Path
14888 } else {
14889 return Err(ParserError::ParserError(
14890 "Expected FOR JSON [AUTO | PATH ]".to_string(),
14891 ));
14892 };
14893 let mut root = None;
14894 let mut include_null_values = false;
14895 let mut without_array_wrapper = false;
14896 while self.peek_token_ref().token == Token::Comma {
14897 self.next_token();
14898 if self.parse_keyword(Keyword::ROOT) {
14899 self.expect_token(&Token::LParen)?;
14900 root = Some(self.parse_literal_string()?);
14901 self.expect_token(&Token::RParen)?;
14902 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
14903 include_null_values = true;
14904 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
14905 without_array_wrapper = true;
14906 }
14907 }
14908 Ok(ForClause::Json {
14909 for_json,
14910 root,
14911 include_null_values,
14912 without_array_wrapper,
14913 })
14914 }
14915
14916 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
14918 let name = self.parse_identifier()?;
14919
14920 let as_optional = self.dialect.supports_cte_without_as();
14921
14922 if as_optional && !self.peek_keyword(Keyword::AS) {
14924 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
14925 p.expect_token(&Token::LParen)?;
14926 let query = p.parse_query()?;
14927 let closing_paren_token = p.expect_token(&Token::RParen)?;
14928 Ok((query, closing_paren_token))
14929 })? {
14930 let mut cte = Cte {
14931 alias: TableAlias {
14932 explicit: false,
14933 name,
14934 columns: vec![],
14935 },
14936 query,
14937 from: None,
14938 materialized: None,
14939 closing_paren_token: closing_paren_token.into(),
14940 };
14941 if self.parse_keyword(Keyword::FROM) {
14942 cte.from = Some(self.parse_identifier()?);
14943 }
14944 return Ok(cte);
14945 }
14946 }
14947
14948 let columns = if self.parse_keyword(Keyword::AS) {
14950 vec![]
14951 } else {
14952 let columns = self.parse_table_alias_column_defs()?;
14953 if as_optional {
14954 let _ = self.parse_keyword(Keyword::AS);
14955 } else {
14956 self.expect_keyword_is(Keyword::AS)?;
14957 }
14958 columns
14959 };
14960
14961 let mut is_materialized = None;
14962 if dialect_of!(self is PostgreSqlDialect) {
14963 if self.parse_keyword(Keyword::MATERIALIZED) {
14964 is_materialized = Some(CteAsMaterialized::Materialized);
14965 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
14966 is_materialized = Some(CteAsMaterialized::NotMaterialized);
14967 }
14968 }
14969
14970 self.expect_token(&Token::LParen)?;
14971 let query = self.parse_query()?;
14972 let closing_paren_token = self.expect_token(&Token::RParen)?;
14973
14974 let mut cte = Cte {
14975 alias: TableAlias {
14976 explicit: false,
14977 name,
14978 columns,
14979 },
14980 query,
14981 from: None,
14982 materialized: is_materialized,
14983 closing_paren_token: closing_paren_token.into(),
14984 };
14985 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
14986 cte.from = Some(self.parse_identifier()?);
14987 }
14988 Ok(cte)
14989 }
14990
14991 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
15000 let expr = if self.peek_keyword(Keyword::SELECT)
15003 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
15004 {
15005 SetExpr::Select(self.parse_select().map(Box::new)?)
15006 } else if self.consume_token(&Token::LParen) {
15007 let subquery = self.parse_query()?;
15009 self.expect_token(&Token::RParen)?;
15010 SetExpr::Query(subquery)
15011 } else if self.parse_keyword(Keyword::VALUES) {
15012 let is_mysql = dialect_of!(self is MySqlDialect);
15013 SetExpr::Values(self.parse_values(is_mysql, false)?)
15014 } else if self.parse_keyword(Keyword::VALUE) {
15015 let is_mysql = dialect_of!(self is MySqlDialect);
15016 SetExpr::Values(self.parse_values(is_mysql, true)?)
15017 } else if self.parse_keyword(Keyword::TABLE) {
15018 SetExpr::Table(Box::new(self.parse_as_table()?))
15019 } else {
15020 return self.expected_ref(
15021 "SELECT, VALUES, or a subquery in the query body",
15022 self.peek_token_ref(),
15023 );
15024 };
15025
15026 self.parse_remaining_set_exprs(expr, precedence)
15027 }
15028
15029 fn parse_remaining_set_exprs(
15033 &mut self,
15034 mut expr: SetExpr,
15035 precedence: u8,
15036 ) -> Result<Box<SetExpr>, ParserError> {
15037 loop {
15038 let op = self.parse_set_operator(&self.peek_token().token);
15040 let next_precedence = match op {
15041 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15043 10
15044 }
15045 Some(SetOperator::Intersect) => 20,
15047 None => break,
15049 };
15050 if precedence >= next_precedence {
15051 break;
15052 }
15053 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15055 expr = SetExpr::SetOperation {
15056 left: Box::new(expr),
15057 op: op.unwrap(),
15058 set_quantifier,
15059 right: self.parse_query_body(next_precedence)?,
15060 };
15061 }
15062
15063 Ok(expr.into())
15064 }
15065
15066 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15068 match token {
15069 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15070 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15071 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15072 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15073 _ => None,
15074 }
15075 }
15076
15077 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15079 match op {
15080 Some(
15081 SetOperator::Except
15082 | SetOperator::Intersect
15083 | SetOperator::Union
15084 | SetOperator::Minus,
15085 ) => {
15086 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15087 SetQuantifier::DistinctByName
15088 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15089 SetQuantifier::ByName
15090 } else if self.parse_keyword(Keyword::ALL) {
15091 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15092 SetQuantifier::AllByName
15093 } else {
15094 SetQuantifier::All
15095 }
15096 } else if self.parse_keyword(Keyword::DISTINCT) {
15097 SetQuantifier::Distinct
15098 } else {
15099 SetQuantifier::None
15100 }
15101 }
15102 _ => SetQuantifier::None,
15103 }
15104 }
15105
15106 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15108 let mut from_first = None;
15109
15110 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15111 let from_token = self.expect_keyword(Keyword::FROM)?;
15112 let from = self.parse_table_with_joins()?;
15113 if !self.peek_keyword(Keyword::SELECT) {
15114 return Ok(Select {
15115 select_token: AttachedToken(from_token),
15116 optimizer_hints: vec![],
15117 distinct: None,
15118 select_modifiers: None,
15119 top: None,
15120 top_before_distinct: false,
15121 projection: vec![],
15122 exclude: None,
15123 into: None,
15124 from,
15125 lateral_views: vec![],
15126 prewhere: None,
15127 selection: None,
15128 group_by: GroupByExpr::Expressions(vec![], vec![]),
15129 cluster_by: vec![],
15130 distribute_by: vec![],
15131 sort_by: vec![],
15132 having: None,
15133 named_window: vec![],
15134 window_before_qualify: false,
15135 qualify: None,
15136 value_table_mode: None,
15137 connect_by: vec![],
15138 flavor: SelectFlavor::FromFirstNoSelect,
15139 });
15140 }
15141 from_first = Some(from);
15142 }
15143
15144 let select_token = self.expect_keyword(Keyword::SELECT)?;
15145 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15146 let value_table_mode = self.parse_value_table_mode()?;
15147
15148 let (select_modifiers, distinct_select_modifier) =
15149 if self.dialect.supports_select_modifiers() {
15150 self.parse_select_modifiers()?
15151 } else {
15152 (None, None)
15153 };
15154
15155 let mut top_before_distinct = false;
15156 let mut top = None;
15157 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15158 top = Some(self.parse_top()?);
15159 top_before_distinct = true;
15160 }
15161
15162 let distinct = if distinct_select_modifier.is_some() {
15163 distinct_select_modifier
15164 } else {
15165 self.parse_all_or_distinct()?
15166 };
15167
15168 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15169 top = Some(self.parse_top()?);
15170 }
15171
15172 let projection =
15173 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15174 vec![]
15175 } else {
15176 self.parse_projection()?
15177 };
15178
15179 let exclude = if self.dialect.supports_select_exclude() {
15180 self.parse_optional_select_item_exclude()?
15181 } else {
15182 None
15183 };
15184
15185 let into = if self.parse_keyword(Keyword::INTO) {
15186 Some(self.parse_select_into()?)
15187 } else {
15188 None
15189 };
15190
15191 let (from, from_first) = if let Some(from) = from_first.take() {
15197 (from, true)
15198 } else if self.parse_keyword(Keyword::FROM) {
15199 (self.parse_table_with_joins()?, false)
15200 } else {
15201 (vec![], false)
15202 };
15203
15204 let mut lateral_views = vec![];
15205 loop {
15206 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15207 let outer = self.parse_keyword(Keyword::OUTER);
15208 let lateral_view = self.parse_expr()?;
15209 let lateral_view_name = self.parse_object_name(false)?;
15210 let lateral_col_alias = self
15211 .parse_comma_separated(|parser| {
15212 parser.parse_optional_alias(&[
15213 Keyword::WHERE,
15214 Keyword::GROUP,
15215 Keyword::CLUSTER,
15216 Keyword::HAVING,
15217 Keyword::LATERAL,
15218 ]) })?
15220 .into_iter()
15221 .flatten()
15222 .collect();
15223
15224 lateral_views.push(LateralView {
15225 lateral_view,
15226 lateral_view_name,
15227 lateral_col_alias,
15228 outer,
15229 });
15230 } else {
15231 break;
15232 }
15233 }
15234
15235 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15236 {
15237 Some(self.parse_expr()?)
15238 } else {
15239 None
15240 };
15241
15242 let selection = if self.parse_keyword(Keyword::WHERE) {
15243 Some(self.parse_expr()?)
15244 } else {
15245 None
15246 };
15247
15248 let connect_by = self.maybe_parse_connect_by()?;
15249
15250 let group_by = self
15251 .parse_optional_group_by()?
15252 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15253
15254 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15255 self.parse_comma_separated(Parser::parse_expr)?
15256 } else {
15257 vec![]
15258 };
15259
15260 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15261 self.parse_comma_separated(Parser::parse_expr)?
15262 } else {
15263 vec![]
15264 };
15265
15266 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15267 self.parse_comma_separated(Parser::parse_order_by_expr)?
15268 } else {
15269 vec![]
15270 };
15271
15272 let having = if self.parse_keyword(Keyword::HAVING) {
15273 Some(self.parse_expr()?)
15274 } else {
15275 None
15276 };
15277
15278 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15280 {
15281 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15282 if self.parse_keyword(Keyword::QUALIFY) {
15283 (named_windows, Some(self.parse_expr()?), true)
15284 } else {
15285 (named_windows, None, true)
15286 }
15287 } else if self.parse_keyword(Keyword::QUALIFY) {
15288 let qualify = Some(self.parse_expr()?);
15289 if self.parse_keyword(Keyword::WINDOW) {
15290 (
15291 self.parse_comma_separated(Parser::parse_named_window)?,
15292 qualify,
15293 false,
15294 )
15295 } else {
15296 (Default::default(), qualify, false)
15297 }
15298 } else {
15299 Default::default()
15300 };
15301
15302 Ok(Select {
15303 select_token: AttachedToken(select_token),
15304 optimizer_hints,
15305 distinct,
15306 select_modifiers,
15307 top,
15308 top_before_distinct,
15309 projection,
15310 exclude,
15311 into,
15312 from,
15313 lateral_views,
15314 prewhere,
15315 selection,
15316 group_by,
15317 cluster_by,
15318 distribute_by,
15319 sort_by,
15320 having,
15321 named_window: named_windows,
15322 window_before_qualify,
15323 qualify,
15324 value_table_mode,
15325 connect_by,
15326 flavor: if from_first {
15327 SelectFlavor::FromFirst
15328 } else {
15329 SelectFlavor::Standard
15330 },
15331 })
15332 }
15333
15334 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15343 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15344 if !supports_hints {
15345 return Ok(vec![]);
15346 }
15347 let mut hints = vec![];
15348 loop {
15349 let t = self.peek_nth_token_no_skip_ref(0);
15350 let Token::Whitespace(ws) = &t.token else {
15351 break;
15352 };
15353 match ws {
15354 Whitespace::SingleLineComment { comment, prefix } => {
15355 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15356 hints.push(OptimizerHint {
15357 prefix: hint_prefix,
15358 text,
15359 style: OptimizerHintStyle::SingleLine {
15360 prefix: prefix.clone(),
15361 },
15362 });
15363 }
15364 self.next_token_no_skip();
15365 }
15366 Whitespace::MultiLineComment(comment) => {
15367 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15368 hints.push(OptimizerHint {
15369 prefix: hint_prefix,
15370 text,
15371 style: OptimizerHintStyle::MultiLine,
15372 });
15373 }
15374 self.next_token_no_skip();
15375 }
15376 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15377 self.next_token_no_skip();
15378 }
15379 }
15380 }
15381 Ok(hints)
15382 }
15383
15384 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15387 let (before_plus, text) = comment.split_once('+')?;
15388 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15389 Some((before_plus.to_string(), text.to_string()))
15390 } else {
15391 None
15392 }
15393 }
15394
15395 fn parse_select_modifiers(
15402 &mut self,
15403 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15404 let mut modifiers = SelectModifiers::default();
15405 let mut distinct = None;
15406
15407 let keywords = &[
15408 Keyword::ALL,
15409 Keyword::DISTINCT,
15410 Keyword::DISTINCTROW,
15411 Keyword::HIGH_PRIORITY,
15412 Keyword::STRAIGHT_JOIN,
15413 Keyword::SQL_SMALL_RESULT,
15414 Keyword::SQL_BIG_RESULT,
15415 Keyword::SQL_BUFFER_RESULT,
15416 Keyword::SQL_NO_CACHE,
15417 Keyword::SQL_CALC_FOUND_ROWS,
15418 ];
15419
15420 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15421 match keyword {
15422 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15423 self.prev_token();
15424 distinct = self.parse_all_or_distinct()?;
15425 }
15426 Keyword::DISTINCTROW if distinct.is_none() => {
15428 distinct = Some(Distinct::Distinct);
15429 }
15430 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15431 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15432 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15433 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15434 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15435 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15436 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15437 _ => {
15438 self.prev_token();
15439 return self.expected_ref(
15440 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15441 self.peek_token_ref(),
15442 );
15443 }
15444 }
15445 }
15446
15447 let select_modifiers = if modifiers.is_any_set() {
15450 Some(modifiers)
15451 } else {
15452 None
15453 };
15454 Ok((select_modifiers, distinct))
15455 }
15456
15457 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15458 if !dialect_of!(self is BigQueryDialect) {
15459 return Ok(None);
15460 }
15461
15462 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15463 Some(ValueTableMode::DistinctAsValue)
15464 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15465 Some(ValueTableMode::DistinctAsStruct)
15466 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15467 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15468 {
15469 Some(ValueTableMode::AsValue)
15470 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15471 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15472 {
15473 Some(ValueTableMode::AsStruct)
15474 } else if self.parse_keyword(Keyword::AS) {
15475 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15476 } else {
15477 None
15478 };
15479
15480 Ok(mode)
15481 }
15482
15483 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15487 where
15488 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15489 {
15490 let current_state = self.state;
15491 self.state = state;
15492 let res = f(self);
15493 self.state = current_state;
15494 res
15495 }
15496
15497 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15499 let mut clauses = Vec::with_capacity(2);
15500 loop {
15501 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15502 clauses.push(ConnectByKind::StartWith {
15503 start_token: self.token_at(idx).clone().into(),
15504 condition: self.parse_expr()?.into(),
15505 });
15506 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15507 {
15508 clauses.push(ConnectByKind::ConnectBy {
15509 connect_token: self.token_at(idx).clone().into(),
15510 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15511 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15512 parser.parse_comma_separated(Parser::parse_expr)
15513 })?,
15514 });
15515 } else {
15516 break;
15517 }
15518 }
15519 Ok(clauses)
15520 }
15521
15522 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15524 let token1 = self.next_token();
15525 let token2 = self.next_token();
15526 let token3 = self.next_token();
15527
15528 let table_name;
15529 let schema_name;
15530 if token2 == Token::Period {
15531 match token1.token {
15532 Token::Word(w) => {
15533 schema_name = w.value;
15534 }
15535 _ => {
15536 return self.expected("Schema name", token1);
15537 }
15538 }
15539 match token3.token {
15540 Token::Word(w) => {
15541 table_name = w.value;
15542 }
15543 _ => {
15544 return self.expected("Table name", token3);
15545 }
15546 }
15547 Ok(Table {
15548 table_name: Some(table_name),
15549 schema_name: Some(schema_name),
15550 })
15551 } else {
15552 match token1.token {
15553 Token::Word(w) => {
15554 table_name = w.value;
15555 }
15556 _ => {
15557 return self.expected("Table name", token1);
15558 }
15559 }
15560 Ok(Table {
15561 table_name: Some(table_name),
15562 schema_name: None,
15563 })
15564 }
15565 }
15566
15567 fn parse_set_role(
15569 &mut self,
15570 modifier: Option<ContextModifier>,
15571 ) -> Result<Statement, ParserError> {
15572 self.expect_keyword_is(Keyword::ROLE)?;
15573
15574 let role_name = if self.parse_keyword(Keyword::NONE) {
15575 None
15576 } else {
15577 Some(self.parse_identifier()?)
15578 };
15579 Ok(Statement::Set(Set::SetRole {
15580 context_modifier: modifier,
15581 role_name,
15582 }))
15583 }
15584
15585 fn parse_set_values(
15586 &mut self,
15587 parenthesized_assignment: bool,
15588 ) -> Result<Vec<Expr>, ParserError> {
15589 let mut values = vec![];
15590
15591 if parenthesized_assignment {
15592 self.expect_token(&Token::LParen)?;
15593 }
15594
15595 loop {
15596 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15597 expr
15598 } else if let Ok(expr) = self.parse_expr() {
15599 expr
15600 } else {
15601 self.expected_ref("variable value", self.peek_token_ref())?
15602 };
15603
15604 values.push(value);
15605 if self.consume_token(&Token::Comma) {
15606 continue;
15607 }
15608
15609 if parenthesized_assignment {
15610 self.expect_token(&Token::RParen)?;
15611 }
15612 return Ok(values);
15613 }
15614 }
15615
15616 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15617 let modifier =
15618 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15619
15620 Self::keyword_to_modifier(modifier)
15621 }
15622
15623 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15625 let scope = self.parse_context_modifier();
15626
15627 let name = if self.dialect.supports_parenthesized_set_variables()
15628 && self.consume_token(&Token::LParen)
15629 {
15630 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15634 } else {
15635 self.parse_object_name(false)?
15636 };
15637
15638 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15639 return self.expected_ref("assignment operator", self.peek_token_ref());
15640 }
15641
15642 let value = self.parse_expr()?;
15643
15644 Ok(SetAssignment { scope, name, value })
15645 }
15646
15647 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15648 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15649
15650 let scope = if !hivevar {
15652 self.parse_context_modifier()
15653 } else {
15654 None
15655 };
15656
15657 if hivevar {
15658 self.expect_token(&Token::Colon)?;
15659 }
15660
15661 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15662 return Ok(set_role_stmt);
15663 }
15664
15665 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15667 || self.parse_keyword(Keyword::TIMEZONE)
15668 {
15669 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15670 return Ok(Set::SingleAssignment {
15671 scope,
15672 hivevar,
15673 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15674 values: self.parse_set_values(false)?,
15675 }
15676 .into());
15677 } else {
15678 return Ok(Set::SetTimeZone {
15682 local: scope == Some(ContextModifier::Local),
15683 value: self.parse_expr()?,
15684 }
15685 .into());
15686 }
15687 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15688 if self.parse_keyword(Keyword::DEFAULT) {
15689 return Ok(Set::SetNamesDefault {}.into());
15690 }
15691 let charset_name = self.parse_identifier()?;
15692 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15693 Some(self.parse_literal_string()?)
15694 } else {
15695 None
15696 };
15697
15698 return Ok(Set::SetNames {
15699 charset_name,
15700 collation_name,
15701 }
15702 .into());
15703 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15704 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15705 return Ok(Set::SetTransaction {
15706 modes: self.parse_transaction_modes()?,
15707 snapshot: None,
15708 session: true,
15709 }
15710 .into());
15711 } else if self.parse_keyword(Keyword::TRANSACTION) {
15712 if self.parse_keyword(Keyword::SNAPSHOT) {
15713 let snapshot_id = self.parse_value()?;
15714 return Ok(Set::SetTransaction {
15715 modes: vec![],
15716 snapshot: Some(snapshot_id),
15717 session: false,
15718 }
15719 .into());
15720 }
15721 return Ok(Set::SetTransaction {
15722 modes: self.parse_transaction_modes()?,
15723 snapshot: None,
15724 session: false,
15725 }
15726 .into());
15727 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15728 let scope = match scope {
15729 Some(s) => s,
15730 None => {
15731 return self.expected_at(
15732 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15733 self.get_current_index(),
15734 )
15735 }
15736 };
15737 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15738 SetSessionAuthorizationParamKind::Default
15739 } else {
15740 let value = self.parse_identifier()?;
15741 SetSessionAuthorizationParamKind::User(value)
15742 };
15743 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15744 scope,
15745 kind: auth_value,
15746 })
15747 .into());
15748 }
15749
15750 if self.dialect.supports_comma_separated_set_assignments() {
15751 if scope.is_some() {
15752 self.prev_token();
15753 }
15754
15755 if let Some(assignments) = self
15756 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15757 {
15758 return if assignments.len() > 1 {
15759 Ok(Set::MultipleAssignments { assignments }.into())
15760 } else {
15761 let SetAssignment { scope, name, value } =
15762 assignments.into_iter().next().ok_or_else(|| {
15763 ParserError::ParserError("Expected at least one assignment".to_string())
15764 })?;
15765
15766 Ok(Set::SingleAssignment {
15767 scope,
15768 hivevar,
15769 variable: name,
15770 values: vec![value],
15771 }
15772 .into())
15773 };
15774 }
15775 }
15776
15777 let variables = if self.dialect.supports_parenthesized_set_variables()
15778 && self.consume_token(&Token::LParen)
15779 {
15780 let vars = OneOrManyWithParens::Many(
15781 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
15782 .into_iter()
15783 .map(|ident| ObjectName::from(vec![ident]))
15784 .collect(),
15785 );
15786 self.expect_token(&Token::RParen)?;
15787 vars
15788 } else {
15789 OneOrManyWithParens::One(self.parse_object_name(false)?)
15790 };
15791
15792 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15793 let stmt = match variables {
15794 OneOrManyWithParens::One(var) => Set::SingleAssignment {
15795 scope,
15796 hivevar,
15797 variable: var,
15798 values: self.parse_set_values(false)?,
15799 },
15800 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
15801 variables: vars,
15802 values: self.parse_set_values(true)?,
15803 },
15804 };
15805
15806 return Ok(stmt.into());
15807 }
15808
15809 if self.dialect.supports_set_stmt_without_operator() {
15810 self.prev_token();
15811 return self.parse_set_session_params();
15812 };
15813
15814 self.expected_ref("equals sign or TO", self.peek_token_ref())
15815 }
15816
15817 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
15819 if self.parse_keyword(Keyword::STATISTICS) {
15820 let topic = match self.parse_one_of_keywords(&[
15821 Keyword::IO,
15822 Keyword::PROFILE,
15823 Keyword::TIME,
15824 Keyword::XML,
15825 ]) {
15826 Some(Keyword::IO) => SessionParamStatsTopic::IO,
15827 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
15828 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
15829 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
15830 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
15831 };
15832 let value = self.parse_session_param_value()?;
15833 Ok(
15834 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
15835 topic,
15836 value,
15837 }))
15838 .into(),
15839 )
15840 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
15841 let obj = self.parse_object_name(false)?;
15842 let value = self.parse_session_param_value()?;
15843 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
15844 SetSessionParamIdentityInsert { obj, value },
15845 ))
15846 .into())
15847 } else if self.parse_keyword(Keyword::OFFSETS) {
15848 let keywords = self.parse_comma_separated(|parser| {
15849 let next_token = parser.next_token();
15850 match &next_token.token {
15851 Token::Word(w) => Ok(w.to_string()),
15852 _ => parser.expected("SQL keyword", next_token),
15853 }
15854 })?;
15855 let value = self.parse_session_param_value()?;
15856 Ok(
15857 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
15858 keywords,
15859 value,
15860 }))
15861 .into(),
15862 )
15863 } else {
15864 let names = self.parse_comma_separated(|parser| {
15865 let next_token = parser.next_token();
15866 match next_token.token {
15867 Token::Word(w) => Ok(w.to_string()),
15868 _ => parser.expected("Session param name", next_token),
15869 }
15870 })?;
15871 let value = self.parse_expr()?.to_string();
15872 Ok(
15873 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
15874 names,
15875 value,
15876 }))
15877 .into(),
15878 )
15879 }
15880 }
15881
15882 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
15883 if self.parse_keyword(Keyword::ON) {
15884 Ok(SessionParamValue::On)
15885 } else if self.parse_keyword(Keyword::OFF) {
15886 Ok(SessionParamValue::Off)
15887 } else {
15888 self.expected_ref("ON or OFF", self.peek_token_ref())
15889 }
15890 }
15891
15892 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
15894 let terse = self.parse_keyword(Keyword::TERSE);
15895 let extended = self.parse_keyword(Keyword::EXTENDED);
15896 let full = self.parse_keyword(Keyword::FULL);
15897 let session = self.parse_keyword(Keyword::SESSION);
15898 let global = self.parse_keyword(Keyword::GLOBAL);
15899 let external = self.parse_keyword(Keyword::EXTERNAL);
15900 if self
15901 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
15902 .is_some()
15903 {
15904 Ok(self.parse_show_columns(extended, full)?)
15905 } else if self.parse_keyword(Keyword::TABLES) {
15906 Ok(self.parse_show_tables(terse, extended, full, external)?)
15907 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
15908 Ok(self.parse_show_views(terse, true)?)
15909 } else if self.parse_keyword(Keyword::VIEWS) {
15910 Ok(self.parse_show_views(terse, false)?)
15911 } else if self.parse_keyword(Keyword::FUNCTIONS) {
15912 Ok(self.parse_show_functions()?)
15913 } else if self.parse_keyword(Keyword::PROCESSLIST) {
15914 Ok(Statement::ShowProcessList { full })
15915 } else if extended || full {
15916 Err(ParserError::ParserError(
15917 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
15918 ))
15919 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
15920 Ok(self.parse_show_create()?)
15921 } else if self.parse_keyword(Keyword::COLLATION) {
15922 Ok(self.parse_show_collation()?)
15923 } else if self.parse_keyword(Keyword::VARIABLES)
15924 && dialect_of!(self is MySqlDialect | GenericDialect)
15925 {
15926 Ok(Statement::ShowVariables {
15927 filter: self.parse_show_statement_filter()?,
15928 session,
15929 global,
15930 })
15931 } else if self.parse_keyword(Keyword::STATUS)
15932 && dialect_of!(self is MySqlDialect | GenericDialect)
15933 {
15934 Ok(Statement::ShowStatus {
15935 filter: self.parse_show_statement_filter()?,
15936 session,
15937 global,
15938 })
15939 } else if self.parse_keyword(Keyword::CATALOGS) {
15940 self.parse_show_catalogs(terse)
15941 } else if self.parse_keyword(Keyword::DATABASES) {
15942 self.parse_show_databases(terse)
15943 } else if self.parse_keyword(Keyword::SCHEMAS) {
15944 self.parse_show_schemas(terse)
15945 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
15946 self.parse_show_charset(false)
15947 } else if self.parse_keyword(Keyword::CHARSET) {
15948 self.parse_show_charset(true)
15949 } else {
15950 Ok(Statement::ShowVariable {
15951 variable: self.parse_identifiers()?,
15952 })
15953 }
15954 }
15955
15956 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
15957 Ok(Statement::ShowCharset(ShowCharset {
15959 is_shorthand,
15960 filter: self.parse_show_statement_filter()?,
15961 }))
15962 }
15963
15964 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
15965 let history = self.parse_keyword(Keyword::HISTORY);
15966 let show_options = self.parse_show_stmt_options()?;
15967 Ok(Statement::ShowCatalogs {
15968 terse,
15969 history,
15970 show_options,
15971 })
15972 }
15973
15974 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
15975 let history = self.parse_keyword(Keyword::HISTORY);
15976 let show_options = self.parse_show_stmt_options()?;
15977 Ok(Statement::ShowDatabases {
15978 terse,
15979 history,
15980 show_options,
15981 })
15982 }
15983
15984 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
15985 let history = self.parse_keyword(Keyword::HISTORY);
15986 let show_options = self.parse_show_stmt_options()?;
15987 Ok(Statement::ShowSchemas {
15988 terse,
15989 history,
15990 show_options,
15991 })
15992 }
15993
15994 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
15996 let obj_type = match self.expect_one_of_keywords(&[
15997 Keyword::TABLE,
15998 Keyword::TRIGGER,
15999 Keyword::FUNCTION,
16000 Keyword::PROCEDURE,
16001 Keyword::EVENT,
16002 Keyword::VIEW,
16003 ])? {
16004 Keyword::TABLE => Ok(ShowCreateObject::Table),
16005 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
16006 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
16007 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
16008 Keyword::EVENT => Ok(ShowCreateObject::Event),
16009 Keyword::VIEW => Ok(ShowCreateObject::View),
16010 keyword => Err(ParserError::ParserError(format!(
16011 "Unable to map keyword to ShowCreateObject: {keyword:?}"
16012 ))),
16013 }?;
16014
16015 let obj_name = self.parse_object_name(false)?;
16016
16017 Ok(Statement::ShowCreate { obj_type, obj_name })
16018 }
16019
16020 pub fn parse_show_columns(
16022 &mut self,
16023 extended: bool,
16024 full: bool,
16025 ) -> Result<Statement, ParserError> {
16026 let show_options = self.parse_show_stmt_options()?;
16027 Ok(Statement::ShowColumns {
16028 extended,
16029 full,
16030 show_options,
16031 })
16032 }
16033
16034 fn parse_show_tables(
16035 &mut self,
16036 terse: bool,
16037 extended: bool,
16038 full: bool,
16039 external: bool,
16040 ) -> Result<Statement, ParserError> {
16041 let history = !external && self.parse_keyword(Keyword::HISTORY);
16042 let show_options = self.parse_show_stmt_options()?;
16043 Ok(Statement::ShowTables {
16044 terse,
16045 history,
16046 extended,
16047 full,
16048 external,
16049 show_options,
16050 })
16051 }
16052
16053 fn parse_show_views(
16054 &mut self,
16055 terse: bool,
16056 materialized: bool,
16057 ) -> Result<Statement, ParserError> {
16058 let show_options = self.parse_show_stmt_options()?;
16059 Ok(Statement::ShowViews {
16060 materialized,
16061 terse,
16062 show_options,
16063 })
16064 }
16065
16066 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16068 let filter = self.parse_show_statement_filter()?;
16069 Ok(Statement::ShowFunctions { filter })
16070 }
16071
16072 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16074 let filter = self.parse_show_statement_filter()?;
16075 Ok(Statement::ShowCollation { filter })
16076 }
16077
16078 pub fn parse_show_statement_filter(
16080 &mut self,
16081 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16082 if self.parse_keyword(Keyword::LIKE) {
16083 Ok(Some(ShowStatementFilter::Like(
16084 self.parse_literal_string()?,
16085 )))
16086 } else if self.parse_keyword(Keyword::ILIKE) {
16087 Ok(Some(ShowStatementFilter::ILike(
16088 self.parse_literal_string()?,
16089 )))
16090 } else if self.parse_keyword(Keyword::WHERE) {
16091 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16092 } else {
16093 self.maybe_parse(|parser| -> Result<String, ParserError> {
16094 parser.parse_literal_string()
16095 })?
16096 .map_or(Ok(None), |filter| {
16097 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16098 })
16099 }
16100 }
16101
16102 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16104 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16106 if self.parse_keyword(Keyword::DEFAULT) {
16108 return Ok(Statement::Use(Use::Default));
16109 }
16110 None } else if dialect_of!(self is DatabricksDialect) {
16112 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16113 } else if dialect_of!(self is SnowflakeDialect) {
16114 self.parse_one_of_keywords(&[
16115 Keyword::DATABASE,
16116 Keyword::SCHEMA,
16117 Keyword::WAREHOUSE,
16118 Keyword::ROLE,
16119 Keyword::SECONDARY,
16120 ])
16121 } else {
16122 None };
16124
16125 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16126 self.parse_secondary_roles()?
16127 } else {
16128 let obj_name = self.parse_object_name(false)?;
16129 match parsed_keyword {
16130 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16131 Some(Keyword::DATABASE) => Use::Database(obj_name),
16132 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16133 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16134 Some(Keyword::ROLE) => Use::Role(obj_name),
16135 _ => Use::Object(obj_name),
16136 }
16137 };
16138
16139 Ok(Statement::Use(result))
16140 }
16141
16142 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16143 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16144 if self.parse_keyword(Keyword::NONE) {
16145 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16146 } else if self.parse_keyword(Keyword::ALL) {
16147 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16148 } else {
16149 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16150 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16151 }
16152 }
16153
16154 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16156 let relation = self.parse_table_factor()?;
16157 let joins = self.parse_joins()?;
16161 Ok(TableWithJoins { relation, joins })
16162 }
16163
16164 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16165 let mut joins = vec![];
16166 loop {
16167 let global = self.parse_keyword(Keyword::GLOBAL);
16168 let join = if self.parse_keyword(Keyword::CROSS) {
16169 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16170 JoinOperator::CrossJoin(JoinConstraint::None)
16171 } else if self.parse_keyword(Keyword::APPLY) {
16172 JoinOperator::CrossApply
16174 } else {
16175 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16176 };
16177 let relation = self.parse_table_factor()?;
16178 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16179 && self.dialect.supports_cross_join_constraint()
16180 {
16181 let constraint = self.parse_join_constraint(false)?;
16182 JoinOperator::CrossJoin(constraint)
16183 } else {
16184 join_operator
16185 };
16186 Join {
16187 relation,
16188 global,
16189 join_operator,
16190 }
16191 } else if self.parse_keyword(Keyword::OUTER) {
16192 self.expect_keyword_is(Keyword::APPLY)?;
16194 Join {
16195 relation: self.parse_table_factor()?,
16196 global,
16197 join_operator: JoinOperator::OuterApply,
16198 }
16199 } else if self.parse_keyword(Keyword::ASOF) {
16200 self.expect_keyword_is(Keyword::JOIN)?;
16201 let relation = self.parse_table_factor()?;
16202 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16203 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16204 Join {
16205 relation,
16206 global,
16207 join_operator: JoinOperator::AsOf {
16208 match_condition,
16209 constraint: self.parse_join_constraint(false)?,
16210 },
16211 }
16212 } else {
16213 let natural = self.parse_keyword(Keyword::NATURAL);
16214 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16215 w.keyword
16216 } else {
16217 Keyword::NoKeyword
16218 };
16219
16220 let join_operator_type = match peek_keyword {
16221 Keyword::INNER | Keyword::JOIN => {
16222 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16224 if inner {
16225 JoinOperator::Inner
16226 } else {
16227 JoinOperator::Join
16228 }
16229 }
16230 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16231 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16233 let join_type = self.parse_one_of_keywords(&[
16234 Keyword::OUTER,
16235 Keyword::SEMI,
16236 Keyword::ANTI,
16237 Keyword::JOIN,
16238 ]);
16239 match join_type {
16240 Some(Keyword::OUTER) => {
16241 self.expect_keyword_is(Keyword::JOIN)?;
16242 if is_left {
16243 JoinOperator::LeftOuter
16244 } else {
16245 JoinOperator::RightOuter
16246 }
16247 }
16248 Some(Keyword::SEMI) => {
16249 self.expect_keyword_is(Keyword::JOIN)?;
16250 if is_left {
16251 JoinOperator::LeftSemi
16252 } else {
16253 JoinOperator::RightSemi
16254 }
16255 }
16256 Some(Keyword::ANTI) => {
16257 self.expect_keyword_is(Keyword::JOIN)?;
16258 if is_left {
16259 JoinOperator::LeftAnti
16260 } else {
16261 JoinOperator::RightAnti
16262 }
16263 }
16264 Some(Keyword::JOIN) => {
16265 if is_left {
16266 JoinOperator::Left
16267 } else {
16268 JoinOperator::Right
16269 }
16270 }
16271 _ => {
16272 return Err(ParserError::ParserError(format!(
16273 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16274 )))
16275 }
16276 }
16277 }
16278 Keyword::ANTI => {
16279 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16281 JoinOperator::Anti
16282 }
16283 Keyword::SEMI => {
16284 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16286 JoinOperator::Semi
16287 }
16288 Keyword::FULL => {
16289 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16292 JoinOperator::FullOuter
16293 }
16294 Keyword::OUTER => {
16295 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16296 }
16297 Keyword::STRAIGHT_JOIN => {
16298 let _ = self.next_token(); JoinOperator::StraightJoin
16300 }
16301 _ if natural => {
16302 return self
16303 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16304 }
16305 _ => break,
16306 };
16307 let mut relation = self.parse_table_factor()?;
16308
16309 if !self
16310 .dialect
16311 .supports_left_associative_joins_without_parens()
16312 && self.peek_parens_less_nested_join()
16313 {
16314 let joins = self.parse_joins()?;
16315 relation = TableFactor::NestedJoin {
16316 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16317 alias: None,
16318 };
16319 }
16320
16321 let join_constraint = self.parse_join_constraint(natural)?;
16322 Join {
16323 relation,
16324 global,
16325 join_operator: join_operator_type(join_constraint),
16326 }
16327 };
16328 joins.push(join);
16329 }
16330 Ok(joins)
16331 }
16332
16333 fn peek_parens_less_nested_join(&self) -> bool {
16334 matches!(
16335 self.peek_token_ref().token,
16336 Token::Word(Word {
16337 keyword: Keyword::JOIN
16338 | Keyword::INNER
16339 | Keyword::LEFT
16340 | Keyword::RIGHT
16341 | Keyword::FULL,
16342 ..
16343 })
16344 )
16345 }
16346
16347 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16349 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16350 let _guard = self.recursion_counter.try_decrease()?;
16351 if self.parse_keyword(Keyword::LATERAL) {
16352 if self.consume_token(&Token::LParen) {
16354 self.parse_derived_table_factor(Lateral)
16355 } else {
16356 let name = self.parse_object_name(false)?;
16357 self.expect_token(&Token::LParen)?;
16358 let args = self.parse_optional_args()?;
16359 let alias = self.maybe_parse_table_alias()?;
16360 Ok(TableFactor::Function {
16361 lateral: true,
16362 name,
16363 args,
16364 alias,
16365 })
16366 }
16367 } else if self.parse_keyword(Keyword::TABLE) {
16368 self.expect_token(&Token::LParen)?;
16370 let expr = self.parse_expr()?;
16371 self.expect_token(&Token::RParen)?;
16372 let alias = self.maybe_parse_table_alias()?;
16373 Ok(TableFactor::TableFunction { expr, alias })
16374 } else if self.consume_token(&Token::LParen) {
16375 if let Some(mut table) =
16397 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16398 {
16399 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16400 {
16401 table = match kw {
16402 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16403 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16404 unexpected_keyword => return Err(ParserError::ParserError(
16405 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16406 )),
16407 }
16408 }
16409 return Ok(table);
16410 }
16411
16412 let mut table_and_joins = self.parse_table_and_joins()?;
16419
16420 #[allow(clippy::if_same_then_else)]
16421 if !table_and_joins.joins.is_empty() {
16422 self.expect_token(&Token::RParen)?;
16423 let alias = self.maybe_parse_table_alias()?;
16424 Ok(TableFactor::NestedJoin {
16425 table_with_joins: Box::new(table_and_joins),
16426 alias,
16427 }) } else if let TableFactor::NestedJoin {
16429 table_with_joins: _,
16430 alias: _,
16431 } = &table_and_joins.relation
16432 {
16433 self.expect_token(&Token::RParen)?;
16436 let alias = self.maybe_parse_table_alias()?;
16437 Ok(TableFactor::NestedJoin {
16438 table_with_joins: Box::new(table_and_joins),
16439 alias,
16440 })
16441 } else if self.dialect.supports_parens_around_table_factor() {
16442 self.expect_token(&Token::RParen)?;
16449
16450 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16451 match &mut table_and_joins.relation {
16454 TableFactor::Derived { alias, .. }
16455 | TableFactor::Table { alias, .. }
16456 | TableFactor::Function { alias, .. }
16457 | TableFactor::UNNEST { alias, .. }
16458 | TableFactor::JsonTable { alias, .. }
16459 | TableFactor::XmlTable { alias, .. }
16460 | TableFactor::OpenJsonTable { alias, .. }
16461 | TableFactor::TableFunction { alias, .. }
16462 | TableFactor::Pivot { alias, .. }
16463 | TableFactor::Unpivot { alias, .. }
16464 | TableFactor::MatchRecognize { alias, .. }
16465 | TableFactor::SemanticView { alias, .. }
16466 | TableFactor::NestedJoin { alias, .. } => {
16467 if let Some(inner_alias) = alias {
16469 return Err(ParserError::ParserError(format!(
16470 "duplicate alias {inner_alias}"
16471 )));
16472 }
16473 alias.replace(outer_alias);
16477 }
16478 };
16479 }
16480 Ok(table_and_joins.relation)
16482 } else {
16483 self.expected_ref("joined table", self.peek_token_ref())
16486 }
16487 } else if self.dialect.supports_values_as_table_factor()
16488 && matches!(
16489 self.peek_tokens(),
16490 [
16491 Token::Word(Word {
16492 keyword: Keyword::VALUES,
16493 ..
16494 }),
16495 Token::LParen
16496 ]
16497 )
16498 {
16499 self.expect_keyword_is(Keyword::VALUES)?;
16500
16501 let values = SetExpr::Values(self.parse_values(false, false)?);
16505 let alias = self.maybe_parse_table_alias()?;
16506 Ok(TableFactor::Derived {
16507 lateral: false,
16508 subquery: Box::new(Query {
16509 with: None,
16510 body: Box::new(values),
16511 order_by: None,
16512 limit_clause: None,
16513 fetch: None,
16514 locks: vec![],
16515 for_clause: None,
16516 settings: None,
16517 format_clause: None,
16518 pipe_operators: vec![],
16519 }),
16520 alias,
16521 sample: None,
16522 })
16523 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16524 && self.parse_keyword(Keyword::UNNEST)
16525 {
16526 self.expect_token(&Token::LParen)?;
16527 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16528 self.expect_token(&Token::RParen)?;
16529
16530 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16531 let alias = match self.maybe_parse_table_alias() {
16532 Ok(Some(alias)) => Some(alias),
16533 Ok(None) => None,
16534 Err(e) => return Err(e),
16535 };
16536
16537 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16538 Ok(()) => true,
16539 Err(_) => false,
16540 };
16541
16542 let with_offset_alias = if with_offset {
16543 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16544 Ok(Some(alias)) => Some(alias),
16545 Ok(None) => None,
16546 Err(e) => return Err(e),
16547 }
16548 } else {
16549 None
16550 };
16551
16552 Ok(TableFactor::UNNEST {
16553 alias,
16554 array_exprs,
16555 with_offset,
16556 with_offset_alias,
16557 with_ordinality,
16558 })
16559 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16560 let json_expr = self.parse_expr()?;
16561 self.expect_token(&Token::Comma)?;
16562 let json_path = self.parse_value()?;
16563 self.expect_keyword_is(Keyword::COLUMNS)?;
16564 self.expect_token(&Token::LParen)?;
16565 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16566 self.expect_token(&Token::RParen)?;
16567 self.expect_token(&Token::RParen)?;
16568 let alias = self.maybe_parse_table_alias()?;
16569 Ok(TableFactor::JsonTable {
16570 json_expr,
16571 json_path,
16572 columns,
16573 alias,
16574 })
16575 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16576 self.prev_token();
16577 self.parse_open_json_table_factor()
16578 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16579 self.prev_token();
16580 self.parse_xml_table_factor()
16581 } else if self.dialect.supports_semantic_view_table_factor()
16582 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16583 {
16584 self.parse_semantic_view_table_factor()
16585 } else if self.peek_token_ref().token == Token::AtSign {
16586 self.parse_snowflake_stage_table_factor()
16588 } else {
16589 let name = self.parse_object_name(true)?;
16590
16591 let json_path = match &self.peek_token_ref().token {
16592 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16593 _ => None,
16594 };
16595
16596 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16597 && self.parse_keyword(Keyword::PARTITION)
16598 {
16599 self.parse_parenthesized_identifiers()?
16600 } else {
16601 vec![]
16602 };
16603
16604 let version = self.maybe_parse_table_version()?;
16606
16607 let args = if self.consume_token(&Token::LParen) {
16609 Some(self.parse_table_function_args()?)
16610 } else {
16611 None
16612 };
16613
16614 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16615
16616 let mut sample = None;
16617 if self.dialect.supports_table_sample_before_alias() {
16618 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16619 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16620 }
16621 }
16622
16623 let alias = self.maybe_parse_table_alias()?;
16624
16625 let index_hints = if self.dialect.supports_table_hints() {
16627 self.maybe_parse(|p| p.parse_table_index_hints())?
16628 .unwrap_or(vec![])
16629 } else {
16630 vec![]
16631 };
16632
16633 let mut with_hints = vec![];
16635 if self.parse_keyword(Keyword::WITH) {
16636 if self.consume_token(&Token::LParen) {
16637 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16638 self.expect_token(&Token::RParen)?;
16639 } else {
16640 self.prev_token();
16642 }
16643 };
16644
16645 if !self.dialect.supports_table_sample_before_alias() {
16646 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16647 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16648 }
16649 }
16650
16651 let mut table = TableFactor::Table {
16652 name,
16653 alias,
16654 args,
16655 with_hints,
16656 version,
16657 partitions,
16658 with_ordinality,
16659 json_path,
16660 sample,
16661 index_hints,
16662 };
16663
16664 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16665 table = match kw {
16666 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16667 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16668 unexpected_keyword => return Err(ParserError::ParserError(
16669 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16670 )),
16671 }
16672 }
16673
16674 if self.dialect.supports_match_recognize()
16675 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16676 {
16677 table = self.parse_match_recognize(table)?;
16678 }
16679
16680 Ok(table)
16681 }
16682 }
16683
16684 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16689 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16691
16692 let args = if self.consume_token(&Token::LParen) {
16694 Some(self.parse_table_function_args()?)
16695 } else {
16696 None
16697 };
16698
16699 let alias = self.maybe_parse_table_alias()?;
16700
16701 Ok(TableFactor::Table {
16702 name,
16703 alias,
16704 args,
16705 with_hints: vec![],
16706 version: None,
16707 partitions: vec![],
16708 with_ordinality: false,
16709 json_path: None,
16710 sample: None,
16711 index_hints: vec![],
16712 })
16713 }
16714
16715 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16716 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16717 TableSampleModifier::TableSample
16718 } else if self.parse_keyword(Keyword::SAMPLE) {
16719 TableSampleModifier::Sample
16720 } else {
16721 return Ok(None);
16722 };
16723 self.parse_table_sample(modifier).map(Some)
16724 }
16725
16726 fn parse_table_sample(
16727 &mut self,
16728 modifier: TableSampleModifier,
16729 ) -> Result<Box<TableSample>, ParserError> {
16730 let name = match self.parse_one_of_keywords(&[
16731 Keyword::BERNOULLI,
16732 Keyword::ROW,
16733 Keyword::SYSTEM,
16734 Keyword::BLOCK,
16735 ]) {
16736 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16737 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16738 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16739 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16740 _ => None,
16741 };
16742
16743 let parenthesized = self.consume_token(&Token::LParen);
16744
16745 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16746 let selected_bucket = self.parse_number_value()?;
16747 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16748 let total = self.parse_number_value()?;
16749 let on = if self.parse_keyword(Keyword::ON) {
16750 Some(self.parse_expr()?)
16751 } else {
16752 None
16753 };
16754 (
16755 None,
16756 Some(TableSampleBucket {
16757 bucket: selected_bucket,
16758 total,
16759 on,
16760 }),
16761 )
16762 } else {
16763 let value = match self.maybe_parse(|p| p.parse_expr())? {
16764 Some(num) => num,
16765 None => {
16766 let next_token = self.next_token();
16767 if let Token::Word(w) = next_token.token {
16768 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16769 } else {
16770 return parser_err!(
16771 "Expecting number or byte length e.g. 100M",
16772 self.peek_token_ref().span.start
16773 );
16774 }
16775 }
16776 };
16777 let unit = if self.parse_keyword(Keyword::ROWS) {
16778 Some(TableSampleUnit::Rows)
16779 } else if self.parse_keyword(Keyword::PERCENT) {
16780 Some(TableSampleUnit::Percent)
16781 } else {
16782 None
16783 };
16784 (
16785 Some(TableSampleQuantity {
16786 parenthesized,
16787 value,
16788 unit,
16789 }),
16790 None,
16791 )
16792 };
16793 if parenthesized {
16794 self.expect_token(&Token::RParen)?;
16795 }
16796
16797 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
16798 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
16799 } else if self.parse_keyword(Keyword::SEED) {
16800 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
16801 } else {
16802 None
16803 };
16804
16805 let offset = if self.parse_keyword(Keyword::OFFSET) {
16806 Some(self.parse_expr()?)
16807 } else {
16808 None
16809 };
16810
16811 Ok(Box::new(TableSample {
16812 modifier,
16813 name,
16814 quantity,
16815 seed,
16816 bucket,
16817 offset,
16818 }))
16819 }
16820
16821 fn parse_table_sample_seed(
16822 &mut self,
16823 modifier: TableSampleSeedModifier,
16824 ) -> Result<TableSampleSeed, ParserError> {
16825 self.expect_token(&Token::LParen)?;
16826 let value = self.parse_number_value()?;
16827 self.expect_token(&Token::RParen)?;
16828 Ok(TableSampleSeed { modifier, value })
16829 }
16830
16831 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16834 self.expect_token(&Token::LParen)?;
16835 let json_expr = self.parse_expr()?;
16836 let json_path = if self.consume_token(&Token::Comma) {
16837 Some(self.parse_value()?)
16838 } else {
16839 None
16840 };
16841 self.expect_token(&Token::RParen)?;
16842 let columns = if self.parse_keyword(Keyword::WITH) {
16843 self.expect_token(&Token::LParen)?;
16844 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
16845 self.expect_token(&Token::RParen)?;
16846 columns
16847 } else {
16848 Vec::new()
16849 };
16850 let alias = self.maybe_parse_table_alias()?;
16851 Ok(TableFactor::OpenJsonTable {
16852 json_expr,
16853 json_path,
16854 columns,
16855 alias,
16856 })
16857 }
16858
16859 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16860 self.expect_token(&Token::LParen)?;
16861 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
16862 self.expect_token(&Token::LParen)?;
16863 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
16864 self.expect_token(&Token::RParen)?;
16865 self.expect_token(&Token::Comma)?;
16866 namespaces
16867 } else {
16868 vec![]
16869 };
16870 let row_expression = self.parse_expr()?;
16871 let passing = self.parse_xml_passing_clause()?;
16872 self.expect_keyword_is(Keyword::COLUMNS)?;
16873 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
16874 self.expect_token(&Token::RParen)?;
16875 let alias = self.maybe_parse_table_alias()?;
16876 Ok(TableFactor::XmlTable {
16877 namespaces,
16878 row_expression,
16879 passing,
16880 columns,
16881 alias,
16882 })
16883 }
16884
16885 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
16886 let uri = self.parse_expr()?;
16887 self.expect_keyword_is(Keyword::AS)?;
16888 let name = self.parse_identifier()?;
16889 Ok(XmlNamespaceDefinition { uri, name })
16890 }
16891
16892 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
16893 let name = self.parse_identifier()?;
16894
16895 let option = if self.parse_keyword(Keyword::FOR) {
16896 self.expect_keyword(Keyword::ORDINALITY)?;
16897 XmlTableColumnOption::ForOrdinality
16898 } else {
16899 let r#type = self.parse_data_type()?;
16900 let mut path = None;
16901 let mut default = None;
16902
16903 if self.parse_keyword(Keyword::PATH) {
16904 path = Some(self.parse_expr()?);
16905 }
16906
16907 if self.parse_keyword(Keyword::DEFAULT) {
16908 default = Some(self.parse_expr()?);
16909 }
16910
16911 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
16912 if !not_null {
16913 let _ = self.parse_keyword(Keyword::NULL);
16915 }
16916
16917 XmlTableColumnOption::NamedInfo {
16918 r#type,
16919 path,
16920 default,
16921 nullable: !not_null,
16922 }
16923 };
16924 Ok(XmlTableColumn { name, option })
16925 }
16926
16927 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
16928 let mut arguments = vec![];
16929 if self.parse_keyword(Keyword::PASSING) {
16930 loop {
16931 let by_value =
16932 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
16933 let expr = self.parse_expr()?;
16934 let alias = if self.parse_keyword(Keyword::AS) {
16935 Some(self.parse_identifier()?)
16936 } else {
16937 None
16938 };
16939 arguments.push(XmlPassingArgument {
16940 expr,
16941 alias,
16942 by_value,
16943 });
16944 if !self.consume_token(&Token::Comma) {
16945 break;
16946 }
16947 }
16948 }
16949 Ok(XmlPassingClause { arguments })
16950 }
16951
16952 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16954 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
16955 self.expect_token(&Token::LParen)?;
16956
16957 let name = self.parse_object_name(true)?;
16958
16959 let mut dimensions = Vec::new();
16961 let mut metrics = Vec::new();
16962 let mut facts = Vec::new();
16963 let mut where_clause = None;
16964
16965 while self.peek_token_ref().token != Token::RParen {
16966 if self.parse_keyword(Keyword::DIMENSIONS) {
16967 if !dimensions.is_empty() {
16968 return Err(ParserError::ParserError(
16969 "DIMENSIONS clause can only be specified once".to_string(),
16970 ));
16971 }
16972 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16973 } else if self.parse_keyword(Keyword::METRICS) {
16974 if !metrics.is_empty() {
16975 return Err(ParserError::ParserError(
16976 "METRICS clause can only be specified once".to_string(),
16977 ));
16978 }
16979 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16980 } else if self.parse_keyword(Keyword::FACTS) {
16981 if !facts.is_empty() {
16982 return Err(ParserError::ParserError(
16983 "FACTS clause can only be specified once".to_string(),
16984 ));
16985 }
16986 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16987 } else if self.parse_keyword(Keyword::WHERE) {
16988 if where_clause.is_some() {
16989 return Err(ParserError::ParserError(
16990 "WHERE clause can only be specified once".to_string(),
16991 ));
16992 }
16993 where_clause = Some(self.parse_expr()?);
16994 } else {
16995 let tok = self.peek_token_ref();
16996 return parser_err!(
16997 format!(
16998 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
16999 tok.token
17000 ),
17001 tok.span.start
17002 )?;
17003 }
17004 }
17005
17006 self.expect_token(&Token::RParen)?;
17007
17008 let alias = self.maybe_parse_table_alias()?;
17009
17010 Ok(TableFactor::SemanticView {
17011 name,
17012 dimensions,
17013 metrics,
17014 facts,
17015 where_clause,
17016 alias,
17017 })
17018 }
17019
17020 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
17021 self.expect_token(&Token::LParen)?;
17022
17023 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17024 self.parse_comma_separated(Parser::parse_expr)?
17025 } else {
17026 vec![]
17027 };
17028
17029 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17030 self.parse_comma_separated(Parser::parse_order_by_expr)?
17031 } else {
17032 vec![]
17033 };
17034
17035 let measures = if self.parse_keyword(Keyword::MEASURES) {
17036 self.parse_comma_separated(|p| {
17037 let expr = p.parse_expr()?;
17038 let _ = p.parse_keyword(Keyword::AS);
17039 let alias = p.parse_identifier()?;
17040 Ok(Measure { expr, alias })
17041 })?
17042 } else {
17043 vec![]
17044 };
17045
17046 let rows_per_match =
17047 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17048 Some(RowsPerMatch::OneRow)
17049 } else if self.parse_keywords(&[
17050 Keyword::ALL,
17051 Keyword::ROWS,
17052 Keyword::PER,
17053 Keyword::MATCH,
17054 ]) {
17055 Some(RowsPerMatch::AllRows(
17056 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17057 Some(EmptyMatchesMode::Show)
17058 } else if self.parse_keywords(&[
17059 Keyword::OMIT,
17060 Keyword::EMPTY,
17061 Keyword::MATCHES,
17062 ]) {
17063 Some(EmptyMatchesMode::Omit)
17064 } else if self.parse_keywords(&[
17065 Keyword::WITH,
17066 Keyword::UNMATCHED,
17067 Keyword::ROWS,
17068 ]) {
17069 Some(EmptyMatchesMode::WithUnmatched)
17070 } else {
17071 None
17072 },
17073 ))
17074 } else {
17075 None
17076 };
17077
17078 let after_match_skip =
17079 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17080 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17081 Some(AfterMatchSkip::PastLastRow)
17082 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17083 Some(AfterMatchSkip::ToNextRow)
17084 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17085 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17086 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17087 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17088 } else {
17089 let found = self.next_token();
17090 return self.expected("after match skip option", found);
17091 }
17092 } else {
17093 None
17094 };
17095
17096 self.expect_keyword_is(Keyword::PATTERN)?;
17097 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17098
17099 self.expect_keyword_is(Keyword::DEFINE)?;
17100
17101 let symbols = self.parse_comma_separated(|p| {
17102 let symbol = p.parse_identifier()?;
17103 p.expect_keyword_is(Keyword::AS)?;
17104 let definition = p.parse_expr()?;
17105 Ok(SymbolDefinition { symbol, definition })
17106 })?;
17107
17108 self.expect_token(&Token::RParen)?;
17109
17110 let alias = self.maybe_parse_table_alias()?;
17111
17112 Ok(TableFactor::MatchRecognize {
17113 table: Box::new(table),
17114 partition_by,
17115 order_by,
17116 measures,
17117 rows_per_match,
17118 after_match_skip,
17119 pattern,
17120 symbols,
17121 alias,
17122 })
17123 }
17124
17125 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17126 match self.next_token().token {
17127 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17128 Token::Placeholder(s) if s == "$" => {
17129 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17130 }
17131 Token::LBrace => {
17132 self.expect_token(&Token::Minus)?;
17133 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17134 self.expect_token(&Token::Minus)?;
17135 self.expect_token(&Token::RBrace)?;
17136 Ok(MatchRecognizePattern::Exclude(symbol))
17137 }
17138 Token::Word(Word {
17139 value,
17140 quote_style: None,
17141 ..
17142 }) if value == "PERMUTE" => {
17143 self.expect_token(&Token::LParen)?;
17144 let symbols = self.parse_comma_separated(|p| {
17145 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17146 })?;
17147 self.expect_token(&Token::RParen)?;
17148 Ok(MatchRecognizePattern::Permute(symbols))
17149 }
17150 Token::LParen => {
17151 let pattern = self.parse_pattern()?;
17152 self.expect_token(&Token::RParen)?;
17153 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17154 }
17155 _ => {
17156 self.prev_token();
17157 self.parse_identifier()
17158 .map(MatchRecognizeSymbol::Named)
17159 .map(MatchRecognizePattern::Symbol)
17160 }
17161 }
17162 }
17163
17164 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17165 let mut pattern = self.parse_base_pattern()?;
17166 loop {
17167 let token = self.next_token();
17168 let quantifier = match token.token {
17169 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17170 Token::Plus => RepetitionQuantifier::OneOrMore,
17171 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17172 Token::LBrace => {
17173 let token = self.next_token();
17175 match token.token {
17176 Token::Comma => {
17177 let next_token = self.next_token();
17178 let Token::Number(n, _) = next_token.token else {
17179 return self.expected("literal number", next_token);
17180 };
17181 self.expect_token(&Token::RBrace)?;
17182 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17183 }
17184 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17185 let next_token = self.next_token();
17186 match next_token.token {
17187 Token::Number(m, _) => {
17188 self.expect_token(&Token::RBrace)?;
17189 RepetitionQuantifier::Range(
17190 Self::parse(n, token.span.start)?,
17191 Self::parse(m, token.span.start)?,
17192 )
17193 }
17194 Token::RBrace => {
17195 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17196 }
17197 _ => {
17198 return self.expected("} or upper bound", next_token);
17199 }
17200 }
17201 }
17202 Token::Number(n, _) => {
17203 self.expect_token(&Token::RBrace)?;
17204 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17205 }
17206 _ => return self.expected("quantifier range", token),
17207 }
17208 }
17209 _ => {
17210 self.prev_token();
17211 break;
17212 }
17213 };
17214 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17215 }
17216 Ok(pattern)
17217 }
17218
17219 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17220 let mut patterns = vec![self.parse_repetition_pattern()?];
17221 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17222 patterns.push(self.parse_repetition_pattern()?);
17223 }
17224 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17225 Ok([pattern]) => Ok(pattern),
17226 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17227 }
17228 }
17229
17230 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17231 let pattern = self.parse_concat_pattern()?;
17232 if self.consume_token(&Token::Pipe) {
17233 match self.parse_pattern()? {
17234 MatchRecognizePattern::Alternation(mut patterns) => {
17236 patterns.insert(0, pattern);
17237 Ok(MatchRecognizePattern::Alternation(patterns))
17238 }
17239 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17240 }
17241 } else {
17242 Ok(pattern)
17243 }
17244 }
17245
17246 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17248 if self.dialect.supports_table_versioning() {
17249 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17250 {
17251 let expr = self.parse_expr()?;
17252 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17253 } else if self.peek_keyword(Keyword::CHANGES) {
17254 return self.parse_table_version_changes().map(Some);
17255 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17256 let func_name = self.parse_object_name(true)?;
17257 let func = self.parse_function(func_name)?;
17258 return Ok(Some(TableVersion::Function(func)));
17259 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17260 let expr = self.parse_expr()?;
17261 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17262 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17263 let expr = Expr::Value(self.parse_number_value()?);
17264 return Ok(Some(TableVersion::VersionAsOf(expr)));
17265 }
17266 }
17267 Ok(None)
17268 }
17269
17270 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17281 let changes_name = self.parse_object_name(true)?;
17282 let changes = self.parse_function(changes_name)?;
17283 let at_name = self.parse_object_name(true)?;
17284 let at = self.parse_function(at_name)?;
17285 let end = if self.peek_keyword(Keyword::END) {
17286 let end_name = self.parse_object_name(true)?;
17287 Some(self.parse_function(end_name)?)
17288 } else {
17289 None
17290 };
17291 Ok(TableVersion::Changes { changes, at, end })
17292 }
17293
17294 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17297 if self.parse_keyword(Keyword::NESTED) {
17298 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17299 let path = self.parse_value()?;
17300 self.expect_keyword_is(Keyword::COLUMNS)?;
17301 let columns = self.parse_parenthesized(|p| {
17302 p.parse_comma_separated(Self::parse_json_table_column_def)
17303 })?;
17304 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17305 path,
17306 columns,
17307 }));
17308 }
17309 let name = self.parse_identifier()?;
17310 if self.parse_keyword(Keyword::FOR) {
17311 self.expect_keyword_is(Keyword::ORDINALITY)?;
17312 return Ok(JsonTableColumn::ForOrdinality(name));
17313 }
17314 let r#type = self.parse_data_type()?;
17315 let exists = self.parse_keyword(Keyword::EXISTS);
17316 self.expect_keyword_is(Keyword::PATH)?;
17317 let path = self.parse_value()?;
17318 let mut on_empty = None;
17319 let mut on_error = None;
17320 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17321 if self.parse_keyword(Keyword::EMPTY) {
17322 on_empty = Some(error_handling);
17323 } else {
17324 self.expect_keyword_is(Keyword::ERROR)?;
17325 on_error = Some(error_handling);
17326 }
17327 }
17328 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17329 name,
17330 r#type,
17331 path,
17332 exists,
17333 on_empty,
17334 on_error,
17335 }))
17336 }
17337
17338 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17346 let name = self.parse_identifier()?;
17347 let r#type = self.parse_data_type()?;
17348 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17349 self.next_token();
17350 Some(path)
17351 } else {
17352 None
17353 };
17354 let as_json = self.parse_keyword(Keyword::AS);
17355 if as_json {
17356 self.expect_keyword_is(Keyword::JSON)?;
17357 }
17358 Ok(OpenJsonTableColumn {
17359 name,
17360 r#type,
17361 path,
17362 as_json,
17363 })
17364 }
17365
17366 fn parse_json_table_column_error_handling(
17367 &mut self,
17368 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17369 let res = if self.parse_keyword(Keyword::NULL) {
17370 JsonTableColumnErrorHandling::Null
17371 } else if self.parse_keyword(Keyword::ERROR) {
17372 JsonTableColumnErrorHandling::Error
17373 } else if self.parse_keyword(Keyword::DEFAULT) {
17374 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17375 } else {
17376 return Ok(None);
17377 };
17378 self.expect_keyword_is(Keyword::ON)?;
17379 Ok(Some(res))
17380 }
17381
17382 pub fn parse_derived_table_factor(
17384 &mut self,
17385 lateral: IsLateral,
17386 ) -> Result<TableFactor, ParserError> {
17387 let subquery = self.parse_query()?;
17388 self.expect_token(&Token::RParen)?;
17389 let alias = self.maybe_parse_table_alias()?;
17390
17391 let sample = self
17393 .maybe_parse_table_sample()?
17394 .map(TableSampleKind::AfterTableAlias);
17395
17396 Ok(TableFactor::Derived {
17397 lateral: match lateral {
17398 Lateral => true,
17399 NotLateral => false,
17400 },
17401 subquery,
17402 alias,
17403 sample,
17404 })
17405 }
17406
17407 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17430 let expr = self.parse_expr()?;
17431 let alias = if self.parse_keyword(Keyword::AS) {
17432 Some(self.parse_identifier()?)
17433 } else {
17434 None
17435 };
17436
17437 Ok(ExprWithAlias { expr, alias })
17438 }
17439
17440 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17444 let expr = self.parse_expr()?;
17445 let alias = self.parse_identifier_optional_alias()?;
17446 Ok(ExprWithAlias { expr, alias })
17447 }
17448
17449 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17451 let function_name = match self.next_token().token {
17452 Token::Word(w) => Ok(w.value),
17453 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17454 }?;
17455 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17456 let alias = {
17457 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17458 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17460 }
17461 self.parse_optional_alias_inner(None, validator)?
17462 };
17463 Ok(ExprWithAlias { expr, alias })
17464 }
17465
17466 pub fn parse_pivot_table_factor(
17468 &mut self,
17469 table: TableFactor,
17470 ) -> Result<TableFactor, ParserError> {
17471 self.expect_token(&Token::LParen)?;
17472 let aggregate_functions =
17473 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17474 self.expect_keyword_is(Keyword::FOR)?;
17475 let value_column = if self.peek_token_ref().token == Token::LParen {
17476 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17477 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17478 })?
17479 } else {
17480 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17481 };
17482 self.expect_keyword_is(Keyword::IN)?;
17483
17484 self.expect_token(&Token::LParen)?;
17485 let value_source = if self.parse_keyword(Keyword::ANY) {
17486 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17487 self.parse_comma_separated(Parser::parse_order_by_expr)?
17488 } else {
17489 vec![]
17490 };
17491 PivotValueSource::Any(order_by)
17492 } else if self.peek_sub_query() {
17493 PivotValueSource::Subquery(self.parse_query()?)
17494 } else {
17495 PivotValueSource::List(
17496 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17497 )
17498 };
17499 self.expect_token(&Token::RParen)?;
17500
17501 let default_on_null =
17502 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17503 self.expect_token(&Token::LParen)?;
17504 let expr = self.parse_expr()?;
17505 self.expect_token(&Token::RParen)?;
17506 Some(expr)
17507 } else {
17508 None
17509 };
17510
17511 self.expect_token(&Token::RParen)?;
17512 let alias = self.maybe_parse_table_alias()?;
17513 Ok(TableFactor::Pivot {
17514 table: Box::new(table),
17515 aggregate_functions,
17516 value_column,
17517 value_source,
17518 default_on_null,
17519 alias,
17520 })
17521 }
17522
17523 pub fn parse_unpivot_table_factor(
17525 &mut self,
17526 table: TableFactor,
17527 ) -> Result<TableFactor, ParserError> {
17528 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17529 self.expect_keyword_is(Keyword::NULLS)?;
17530 Some(NullInclusion::IncludeNulls)
17531 } else if self.parse_keyword(Keyword::EXCLUDE) {
17532 self.expect_keyword_is(Keyword::NULLS)?;
17533 Some(NullInclusion::ExcludeNulls)
17534 } else {
17535 None
17536 };
17537 self.expect_token(&Token::LParen)?;
17538 let value = self.parse_expr()?;
17539 self.expect_keyword_is(Keyword::FOR)?;
17540 let name = self.parse_identifier()?;
17541 self.expect_keyword_is(Keyword::IN)?;
17542 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17543 p.parse_expr_with_alias()
17544 })?;
17545 self.expect_token(&Token::RParen)?;
17546 let alias = self.maybe_parse_table_alias()?;
17547 Ok(TableFactor::Unpivot {
17548 table: Box::new(table),
17549 value,
17550 null_inclusion,
17551 name,
17552 columns,
17553 alias,
17554 })
17555 }
17556
17557 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17559 if natural {
17560 Ok(JoinConstraint::Natural)
17561 } else if self.parse_keyword(Keyword::ON) {
17562 let constraint = self.parse_expr()?;
17563 Ok(JoinConstraint::On(constraint))
17564 } else if self.parse_keyword(Keyword::USING) {
17565 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17566 Ok(JoinConstraint::Using(columns))
17567 } else {
17568 Ok(JoinConstraint::None)
17569 }
17571 }
17572
17573 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17575 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17576
17577 self.expect_keyword_is(Keyword::TO)?;
17578 let grantees = self.parse_grantees()?;
17579
17580 let with_grant_option =
17581 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17582
17583 let current_grants =
17584 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17585 Some(CurrentGrantsKind::CopyCurrentGrants)
17586 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17587 Some(CurrentGrantsKind::RevokeCurrentGrants)
17588 } else {
17589 None
17590 };
17591
17592 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17593 Some(self.parse_identifier()?)
17594 } else {
17595 None
17596 };
17597
17598 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17599 Some(self.parse_identifier()?)
17600 } else {
17601 None
17602 };
17603
17604 Ok(Grant {
17605 privileges,
17606 objects,
17607 grantees,
17608 with_grant_option,
17609 as_grantor,
17610 granted_by,
17611 current_grants,
17612 })
17613 }
17614
17615 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17616 let mut values = vec![];
17617 let mut grantee_type = GranteesType::None;
17618 loop {
17619 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17620 GranteesType::Role
17621 } else if self.parse_keyword(Keyword::USER) {
17622 GranteesType::User
17623 } else if self.parse_keyword(Keyword::SHARE) {
17624 GranteesType::Share
17625 } else if self.parse_keyword(Keyword::GROUP) {
17626 GranteesType::Group
17627 } else if self.parse_keyword(Keyword::PUBLIC) {
17628 GranteesType::Public
17629 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17630 GranteesType::DatabaseRole
17631 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17632 GranteesType::ApplicationRole
17633 } else if self.parse_keyword(Keyword::APPLICATION) {
17634 GranteesType::Application
17635 } else {
17636 grantee_type.clone() };
17638
17639 if self
17640 .dialect
17641 .get_reserved_grantees_types()
17642 .contains(&new_grantee_type)
17643 {
17644 self.prev_token();
17645 } else {
17646 grantee_type = new_grantee_type;
17647 }
17648
17649 let grantee = if grantee_type == GranteesType::Public {
17650 Grantee {
17651 grantee_type: grantee_type.clone(),
17652 name: None,
17653 }
17654 } else {
17655 let mut name = self.parse_grantee_name()?;
17656 if self.consume_token(&Token::Colon) {
17657 let ident = self.parse_identifier()?;
17661 if let GranteeName::ObjectName(namespace) = name {
17662 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17663 format!("{namespace}:{ident}"),
17664 )]));
17665 };
17666 }
17667 Grantee {
17668 grantee_type: grantee_type.clone(),
17669 name: Some(name),
17670 }
17671 };
17672
17673 values.push(grantee);
17674
17675 if !self.consume_token(&Token::Comma) {
17676 break;
17677 }
17678 }
17679
17680 Ok(values)
17681 }
17682
17683 pub fn parse_grant_deny_revoke_privileges_objects(
17685 &mut self,
17686 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17687 let privileges = if self.parse_keyword(Keyword::ALL) {
17688 Privileges::All {
17689 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17690 }
17691 } else {
17692 let actions = self.parse_actions_list()?;
17693 Privileges::Actions(actions)
17694 };
17695
17696 let objects = if self.parse_keyword(Keyword::ON) {
17697 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17698 Some(GrantObjects::AllTablesInSchema {
17699 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17700 })
17701 } else if self.parse_keywords(&[
17702 Keyword::ALL,
17703 Keyword::EXTERNAL,
17704 Keyword::TABLES,
17705 Keyword::IN,
17706 Keyword::SCHEMA,
17707 ]) {
17708 Some(GrantObjects::AllExternalTablesInSchema {
17709 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17710 })
17711 } else if self.parse_keywords(&[
17712 Keyword::ALL,
17713 Keyword::VIEWS,
17714 Keyword::IN,
17715 Keyword::SCHEMA,
17716 ]) {
17717 Some(GrantObjects::AllViewsInSchema {
17718 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17719 })
17720 } else if self.parse_keywords(&[
17721 Keyword::ALL,
17722 Keyword::MATERIALIZED,
17723 Keyword::VIEWS,
17724 Keyword::IN,
17725 Keyword::SCHEMA,
17726 ]) {
17727 Some(GrantObjects::AllMaterializedViewsInSchema {
17728 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17729 })
17730 } else if self.parse_keywords(&[
17731 Keyword::ALL,
17732 Keyword::FUNCTIONS,
17733 Keyword::IN,
17734 Keyword::SCHEMA,
17735 ]) {
17736 Some(GrantObjects::AllFunctionsInSchema {
17737 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17738 })
17739 } else if self.parse_keywords(&[
17740 Keyword::FUTURE,
17741 Keyword::SCHEMAS,
17742 Keyword::IN,
17743 Keyword::DATABASE,
17744 ]) {
17745 Some(GrantObjects::FutureSchemasInDatabase {
17746 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17747 })
17748 } else if self.parse_keywords(&[
17749 Keyword::FUTURE,
17750 Keyword::TABLES,
17751 Keyword::IN,
17752 Keyword::SCHEMA,
17753 ]) {
17754 Some(GrantObjects::FutureTablesInSchema {
17755 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17756 })
17757 } else if self.parse_keywords(&[
17758 Keyword::FUTURE,
17759 Keyword::EXTERNAL,
17760 Keyword::TABLES,
17761 Keyword::IN,
17762 Keyword::SCHEMA,
17763 ]) {
17764 Some(GrantObjects::FutureExternalTablesInSchema {
17765 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17766 })
17767 } else if self.parse_keywords(&[
17768 Keyword::FUTURE,
17769 Keyword::VIEWS,
17770 Keyword::IN,
17771 Keyword::SCHEMA,
17772 ]) {
17773 Some(GrantObjects::FutureViewsInSchema {
17774 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17775 })
17776 } else if self.parse_keywords(&[
17777 Keyword::FUTURE,
17778 Keyword::MATERIALIZED,
17779 Keyword::VIEWS,
17780 Keyword::IN,
17781 Keyword::SCHEMA,
17782 ]) {
17783 Some(GrantObjects::FutureMaterializedViewsInSchema {
17784 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17785 })
17786 } else if self.parse_keywords(&[
17787 Keyword::ALL,
17788 Keyword::SEQUENCES,
17789 Keyword::IN,
17790 Keyword::SCHEMA,
17791 ]) {
17792 Some(GrantObjects::AllSequencesInSchema {
17793 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17794 })
17795 } else if self.parse_keywords(&[
17796 Keyword::FUTURE,
17797 Keyword::SEQUENCES,
17798 Keyword::IN,
17799 Keyword::SCHEMA,
17800 ]) {
17801 Some(GrantObjects::FutureSequencesInSchema {
17802 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17803 })
17804 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
17805 Some(GrantObjects::ResourceMonitors(
17806 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17807 ))
17808 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17809 Some(GrantObjects::ComputePools(
17810 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17811 ))
17812 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17813 Some(GrantObjects::FailoverGroup(
17814 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17815 ))
17816 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17817 Some(GrantObjects::ReplicationGroup(
17818 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17819 ))
17820 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17821 Some(GrantObjects::ExternalVolumes(
17822 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17823 ))
17824 } else {
17825 let object_type = self.parse_one_of_keywords(&[
17826 Keyword::SEQUENCE,
17827 Keyword::DATABASE,
17828 Keyword::SCHEMA,
17829 Keyword::TABLE,
17830 Keyword::VIEW,
17831 Keyword::WAREHOUSE,
17832 Keyword::INTEGRATION,
17833 Keyword::VIEW,
17834 Keyword::WAREHOUSE,
17835 Keyword::INTEGRATION,
17836 Keyword::USER,
17837 Keyword::CONNECTION,
17838 Keyword::PROCEDURE,
17839 Keyword::FUNCTION,
17840 ]);
17841 let objects =
17842 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
17843 match object_type {
17844 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
17845 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
17846 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
17847 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
17848 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
17849 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
17850 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
17851 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
17852 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
17853 if let Some(name) = objects?.first() {
17854 self.parse_grant_procedure_or_function(name, &kw)?
17855 } else {
17856 self.expected_ref("procedure or function name", self.peek_token_ref())?
17857 }
17858 }
17859 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
17860 Some(unexpected_keyword) => return Err(ParserError::ParserError(
17861 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
17862 )),
17863 }
17864 }
17865 } else {
17866 None
17867 };
17868
17869 Ok((privileges, objects))
17870 }
17871
17872 fn parse_grant_procedure_or_function(
17873 &mut self,
17874 name: &ObjectName,
17875 kw: &Option<Keyword>,
17876 ) -> Result<Option<GrantObjects>, ParserError> {
17877 let arg_types = if self.consume_token(&Token::LParen) {
17878 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
17879 self.expect_token(&Token::RParen)?;
17880 list
17881 } else {
17882 vec![]
17883 };
17884 match kw {
17885 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
17886 name: name.clone(),
17887 arg_types,
17888 })),
17889 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
17890 name: name.clone(),
17891 arg_types,
17892 })),
17893 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
17894 }
17895 }
17896
17897 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
17899 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
17900 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
17901 if columns.is_empty() {
17902 Ok(None)
17903 } else {
17904 Ok(Some(columns))
17905 }
17906 }
17907
17908 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
17910 Ok(Action::ImportedPrivileges)
17911 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
17912 Ok(Action::AddSearchOptimization)
17913 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
17914 Ok(Action::AttachListing)
17915 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
17916 Ok(Action::AttachPolicy)
17917 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
17918 Ok(Action::BindServiceEndpoint)
17919 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17920 let role = self.parse_object_name(false)?;
17921 Ok(Action::DatabaseRole { role })
17922 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
17923 Ok(Action::EvolveSchema)
17924 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
17925 Ok(Action::ImportShare)
17926 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
17927 Ok(Action::ManageVersions)
17928 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
17929 Ok(Action::ManageReleases)
17930 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
17931 Ok(Action::OverrideShareRestrictions)
17932 } else if self.parse_keywords(&[
17933 Keyword::PURCHASE,
17934 Keyword::DATA,
17935 Keyword::EXCHANGE,
17936 Keyword::LISTING,
17937 ]) {
17938 Ok(Action::PurchaseDataExchangeListing)
17939 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
17940 Ok(Action::ResolveAll)
17941 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
17942 Ok(Action::ReadSession)
17943
17944 } else if self.parse_keyword(Keyword::APPLY) {
17946 let apply_type = self.parse_action_apply_type()?;
17947 Ok(Action::Apply { apply_type })
17948 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
17949 Ok(Action::ApplyBudget)
17950 } else if self.parse_keyword(Keyword::AUDIT) {
17951 Ok(Action::Audit)
17952 } else if self.parse_keyword(Keyword::CONNECT) {
17953 Ok(Action::Connect)
17954 } else if self.parse_keyword(Keyword::CREATE) {
17955 let obj_type = self.maybe_parse_action_create_object_type();
17956 Ok(Action::Create { obj_type })
17957 } else if self.parse_keyword(Keyword::DELETE) {
17958 Ok(Action::Delete)
17959 } else if self.parse_keyword(Keyword::EXEC) {
17960 let obj_type = self.maybe_parse_action_execute_obj_type();
17961 Ok(Action::Exec { obj_type })
17962 } else if self.parse_keyword(Keyword::EXECUTE) {
17963 let obj_type = self.maybe_parse_action_execute_obj_type();
17964 Ok(Action::Execute { obj_type })
17965 } else if self.parse_keyword(Keyword::FAILOVER) {
17966 Ok(Action::Failover)
17967 } else if self.parse_keyword(Keyword::INSERT) {
17968 Ok(Action::Insert {
17969 columns: parse_columns(self)?,
17970 })
17971 } else if self.parse_keyword(Keyword::MANAGE) {
17972 let manage_type = self.parse_action_manage_type()?;
17973 Ok(Action::Manage { manage_type })
17974 } else if self.parse_keyword(Keyword::MODIFY) {
17975 let modify_type = self.parse_action_modify_type();
17976 Ok(Action::Modify { modify_type })
17977 } else if self.parse_keyword(Keyword::MONITOR) {
17978 let monitor_type = self.parse_action_monitor_type();
17979 Ok(Action::Monitor { monitor_type })
17980 } else if self.parse_keyword(Keyword::OPERATE) {
17981 Ok(Action::Operate)
17982 } else if self.parse_keyword(Keyword::REFERENCES) {
17983 Ok(Action::References {
17984 columns: parse_columns(self)?,
17985 })
17986 } else if self.parse_keyword(Keyword::READ) {
17987 Ok(Action::Read)
17988 } else if self.parse_keyword(Keyword::REPLICATE) {
17989 Ok(Action::Replicate)
17990 } else if self.parse_keyword(Keyword::ROLE) {
17991 let role = self.parse_object_name(false)?;
17992 Ok(Action::Role { role })
17993 } else if self.parse_keyword(Keyword::SELECT) {
17994 Ok(Action::Select {
17995 columns: parse_columns(self)?,
17996 })
17997 } else if self.parse_keyword(Keyword::TEMPORARY) {
17998 Ok(Action::Temporary)
17999 } else if self.parse_keyword(Keyword::TRIGGER) {
18000 Ok(Action::Trigger)
18001 } else if self.parse_keyword(Keyword::TRUNCATE) {
18002 Ok(Action::Truncate)
18003 } else if self.parse_keyword(Keyword::UPDATE) {
18004 Ok(Action::Update {
18005 columns: parse_columns(self)?,
18006 })
18007 } else if self.parse_keyword(Keyword::USAGE) {
18008 Ok(Action::Usage)
18009 } else if self.parse_keyword(Keyword::OWNERSHIP) {
18010 Ok(Action::Ownership)
18011 } else if self.parse_keyword(Keyword::DROP) {
18012 Ok(Action::Drop)
18013 } else {
18014 self.expected_ref("a privilege keyword", self.peek_token_ref())?
18015 }
18016 }
18017
18018 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
18019 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
18021 Some(ActionCreateObjectType::ApplicationPackage)
18022 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18023 Some(ActionCreateObjectType::ComputePool)
18024 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
18025 Some(ActionCreateObjectType::DataExchangeListing)
18026 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18027 Some(ActionCreateObjectType::ExternalVolume)
18028 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18029 Some(ActionCreateObjectType::FailoverGroup)
18030 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
18031 Some(ActionCreateObjectType::NetworkPolicy)
18032 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
18033 Some(ActionCreateObjectType::OrganiationListing)
18034 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18035 Some(ActionCreateObjectType::ReplicationGroup)
18036 }
18037 else if self.parse_keyword(Keyword::ACCOUNT) {
18039 Some(ActionCreateObjectType::Account)
18040 } else if self.parse_keyword(Keyword::APPLICATION) {
18041 Some(ActionCreateObjectType::Application)
18042 } else if self.parse_keyword(Keyword::DATABASE) {
18043 Some(ActionCreateObjectType::Database)
18044 } else if self.parse_keyword(Keyword::INTEGRATION) {
18045 Some(ActionCreateObjectType::Integration)
18046 } else if self.parse_keyword(Keyword::ROLE) {
18047 Some(ActionCreateObjectType::Role)
18048 } else if self.parse_keyword(Keyword::SCHEMA) {
18049 Some(ActionCreateObjectType::Schema)
18050 } else if self.parse_keyword(Keyword::SHARE) {
18051 Some(ActionCreateObjectType::Share)
18052 } else if self.parse_keyword(Keyword::USER) {
18053 Some(ActionCreateObjectType::User)
18054 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18055 Some(ActionCreateObjectType::Warehouse)
18056 } else {
18057 None
18058 }
18059 }
18060
18061 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18062 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18063 Ok(ActionApplyType::AggregationPolicy)
18064 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18065 Ok(ActionApplyType::AuthenticationPolicy)
18066 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18067 Ok(ActionApplyType::JoinPolicy)
18068 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18069 Ok(ActionApplyType::MaskingPolicy)
18070 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18071 Ok(ActionApplyType::PackagesPolicy)
18072 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18073 Ok(ActionApplyType::PasswordPolicy)
18074 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18075 Ok(ActionApplyType::ProjectionPolicy)
18076 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18077 Ok(ActionApplyType::RowAccessPolicy)
18078 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18079 Ok(ActionApplyType::SessionPolicy)
18080 } else if self.parse_keyword(Keyword::TAG) {
18081 Ok(ActionApplyType::Tag)
18082 } else {
18083 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18084 }
18085 }
18086
18087 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18088 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18089 Some(ActionExecuteObjectType::DataMetricFunction)
18090 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18091 Some(ActionExecuteObjectType::ManagedAlert)
18092 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18093 Some(ActionExecuteObjectType::ManagedTask)
18094 } else if self.parse_keyword(Keyword::ALERT) {
18095 Some(ActionExecuteObjectType::Alert)
18096 } else if self.parse_keyword(Keyword::TASK) {
18097 Some(ActionExecuteObjectType::Task)
18098 } else {
18099 None
18100 }
18101 }
18102
18103 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18104 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18105 Ok(ActionManageType::AccountSupportCases)
18106 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18107 Ok(ActionManageType::EventSharing)
18108 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18109 Ok(ActionManageType::ListingAutoFulfillment)
18110 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18111 Ok(ActionManageType::OrganizationSupportCases)
18112 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18113 Ok(ActionManageType::UserSupportCases)
18114 } else if self.parse_keyword(Keyword::GRANTS) {
18115 Ok(ActionManageType::Grants)
18116 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18117 Ok(ActionManageType::Warehouses)
18118 } else {
18119 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18120 }
18121 }
18122
18123 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18124 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18125 Some(ActionModifyType::LogLevel)
18126 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18127 Some(ActionModifyType::TraceLevel)
18128 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18129 Some(ActionModifyType::SessionLogLevel)
18130 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18131 Some(ActionModifyType::SessionTraceLevel)
18132 } else {
18133 None
18134 }
18135 }
18136
18137 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18138 if self.parse_keyword(Keyword::EXECUTION) {
18139 Some(ActionMonitorType::Execution)
18140 } else if self.parse_keyword(Keyword::SECURITY) {
18141 Some(ActionMonitorType::Security)
18142 } else if self.parse_keyword(Keyword::USAGE) {
18143 Some(ActionMonitorType::Usage)
18144 } else {
18145 None
18146 }
18147 }
18148
18149 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18151 let mut name = self.parse_object_name(false)?;
18152 if self.dialect.supports_user_host_grantee()
18153 && name.0.len() == 1
18154 && name.0[0].as_ident().is_some()
18155 && self.consume_token(&Token::AtSign)
18156 {
18157 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18158 let host = self.parse_identifier()?;
18159 Ok(GranteeName::UserHost { user, host })
18160 } else {
18161 Ok(GranteeName::ObjectName(name))
18162 }
18163 }
18164
18165 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18167 self.expect_keyword(Keyword::DENY)?;
18168
18169 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18170 let objects = match objects {
18171 Some(o) => o,
18172 None => {
18173 return parser_err!(
18174 "DENY statements must specify an object",
18175 self.peek_token_ref().span.start
18176 )
18177 }
18178 };
18179
18180 self.expect_keyword_is(Keyword::TO)?;
18181 let grantees = self.parse_grantees()?;
18182 let cascade = self.parse_cascade_option();
18183 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18184 Some(self.parse_identifier()?)
18185 } else {
18186 None
18187 };
18188
18189 Ok(Statement::Deny(DenyStatement {
18190 privileges,
18191 objects,
18192 grantees,
18193 cascade,
18194 granted_by,
18195 }))
18196 }
18197
18198 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18200 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18201
18202 self.expect_keyword_is(Keyword::FROM)?;
18203 let grantees = self.parse_grantees()?;
18204
18205 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18206 Some(self.parse_identifier()?)
18207 } else {
18208 None
18209 };
18210
18211 let cascade = self.parse_cascade_option();
18212
18213 Ok(Revoke {
18214 privileges,
18215 objects,
18216 grantees,
18217 granted_by,
18218 cascade,
18219 })
18220 }
18221
18222 pub fn parse_replace(
18224 &mut self,
18225 replace_token: TokenWithSpan,
18226 ) -> Result<Statement, ParserError> {
18227 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18228 return parser_err!(
18229 "Unsupported statement REPLACE",
18230 self.peek_token_ref().span.start
18231 );
18232 }
18233
18234 let mut insert = self.parse_insert(replace_token)?;
18235 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18236 *replace_into = true;
18237 }
18238
18239 Ok(insert)
18240 }
18241
18242 fn parse_insert_setexpr_boxed(
18246 &mut self,
18247 insert_token: TokenWithSpan,
18248 ) -> Result<Box<SetExpr>, ParserError> {
18249 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18250 }
18251
18252 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18254 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18255 let or = self.parse_conflict_clause();
18256 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18257 None
18258 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18259 Some(MysqlInsertPriority::LowPriority)
18260 } else if self.parse_keyword(Keyword::DELAYED) {
18261 Some(MysqlInsertPriority::Delayed)
18262 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18263 Some(MysqlInsertPriority::HighPriority)
18264 } else {
18265 None
18266 };
18267
18268 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18269 && self.parse_keyword(Keyword::IGNORE);
18270
18271 let replace_into = false;
18272
18273 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18274 let into = self.parse_keyword(Keyword::INTO);
18275
18276 let local = self.parse_keyword(Keyword::LOCAL);
18277
18278 if self.parse_keyword(Keyword::DIRECTORY) {
18279 let path = self.parse_literal_string()?;
18280 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18281 Some(self.parse_file_format()?)
18282 } else {
18283 None
18284 };
18285 let source = self.parse_query()?;
18286 Ok(Statement::Directory {
18287 local,
18288 path,
18289 overwrite,
18290 file_format,
18291 source,
18292 })
18293 } else {
18294 let table = self.parse_keyword(Keyword::TABLE);
18296 let table_object = self.parse_table_object()?;
18297
18298 let table_alias = if self.dialect.supports_insert_table_alias()
18299 && !self.peek_sub_query()
18300 && self
18301 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18302 .is_none()
18303 {
18304 if self.parse_keyword(Keyword::AS) {
18305 Some(TableAliasWithoutColumns {
18306 explicit: true,
18307 alias: self.parse_identifier()?,
18308 })
18309 } else {
18310 self.maybe_parse(|parser| parser.parse_identifier())?
18311 .map(|alias| TableAliasWithoutColumns {
18312 explicit: false,
18313 alias,
18314 })
18315 }
18316 } else {
18317 None
18318 };
18319
18320 let is_mysql = dialect_of!(self is MySqlDialect);
18321
18322 let (columns, partitioned, after_columns, output, source, assignments) = if self
18323 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18324 {
18325 (vec![], None, vec![], None, None, vec![])
18326 } else {
18327 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18328 let columns =
18329 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18330
18331 let partitioned = self.parse_insert_partition()?;
18332 let after_columns = if dialect_of!(self is HiveDialect) {
18334 self.parse_parenthesized_column_list(Optional, false)?
18335 } else {
18336 vec![]
18337 };
18338 (columns, partitioned, after_columns)
18339 } else {
18340 Default::default()
18341 };
18342
18343 let output = self.maybe_parse_output_clause()?;
18344
18345 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18346 || self.peek_keyword(Keyword::SETTINGS)
18347 {
18348 (None, vec![])
18349 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18350 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18351 } else {
18352 (Some(self.parse_query()?), vec![])
18353 };
18354
18355 (
18356 columns,
18357 partitioned,
18358 after_columns,
18359 output,
18360 source,
18361 assignments,
18362 )
18363 };
18364
18365 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18366 let settings = self.parse_settings()?;
18369
18370 let format = if self.parse_keyword(Keyword::FORMAT) {
18371 Some(self.parse_input_format_clause()?)
18372 } else {
18373 None
18374 };
18375
18376 (format, settings)
18377 } else {
18378 Default::default()
18379 };
18380
18381 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18382 && self.parse_keyword(Keyword::AS)
18383 {
18384 let row_alias = self.parse_object_name(false)?;
18385 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18386 Some(InsertAliases {
18387 row_alias,
18388 col_aliases,
18389 })
18390 } else {
18391 None
18392 };
18393
18394 let on = if self.parse_keyword(Keyword::ON) {
18395 if self.parse_keyword(Keyword::CONFLICT) {
18396 let conflict_target =
18397 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18398 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18399 } else if self.peek_token_ref().token == Token::LParen {
18400 Some(ConflictTarget::Columns(
18401 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18402 ))
18403 } else {
18404 None
18405 };
18406
18407 self.expect_keyword_is(Keyword::DO)?;
18408 let action = if self.parse_keyword(Keyword::NOTHING) {
18409 OnConflictAction::DoNothing
18410 } else {
18411 self.expect_keyword_is(Keyword::UPDATE)?;
18412 self.expect_keyword_is(Keyword::SET)?;
18413 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18414 let selection = if self.parse_keyword(Keyword::WHERE) {
18415 Some(self.parse_expr()?)
18416 } else {
18417 None
18418 };
18419 OnConflictAction::DoUpdate(DoUpdate {
18420 assignments,
18421 selection,
18422 })
18423 };
18424
18425 Some(OnInsert::OnConflict(OnConflict {
18426 conflict_target,
18427 action,
18428 }))
18429 } else {
18430 self.expect_keyword_is(Keyword::DUPLICATE)?;
18431 self.expect_keyword_is(Keyword::KEY)?;
18432 self.expect_keyword_is(Keyword::UPDATE)?;
18433 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18434
18435 Some(OnInsert::DuplicateKeyUpdate(l))
18436 }
18437 } else {
18438 None
18439 };
18440
18441 let returning = if self.parse_keyword(Keyword::RETURNING) {
18442 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18443 } else {
18444 None
18445 };
18446
18447 Ok(Insert {
18448 insert_token: insert_token.into(),
18449 optimizer_hints,
18450 or,
18451 table: table_object,
18452 table_alias,
18453 ignore,
18454 into,
18455 overwrite,
18456 partitioned,
18457 columns,
18458 after_columns,
18459 source,
18460 assignments,
18461 has_table_keyword: table,
18462 on,
18463 returning,
18464 output,
18465 replace_into,
18466 priority,
18467 insert_alias,
18468 settings,
18469 format_clause,
18470 multi_table_insert_type: None,
18471 multi_table_into_clauses: vec![],
18472 multi_table_when_clauses: vec![],
18473 multi_table_else_clause: None,
18474 }
18475 .into())
18476 }
18477 }
18478
18479 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18483 let ident = self.parse_identifier()?;
18484 let values = self
18485 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18486 .unwrap_or_default();
18487
18488 Ok(InputFormatClause { ident, values })
18489 }
18490
18491 fn peek_subquery_start(&mut self) -> bool {
18494 matches!(
18495 self.peek_tokens_ref(),
18496 [
18497 TokenWithSpan {
18498 token: Token::LParen,
18499 ..
18500 },
18501 TokenWithSpan {
18502 token: Token::Word(Word {
18503 keyword: Keyword::SELECT,
18504 ..
18505 }),
18506 ..
18507 },
18508 ]
18509 )
18510 }
18511
18512 fn peek_subquery_or_cte_start(&mut self) -> bool {
18516 matches!(
18517 self.peek_tokens_ref(),
18518 [
18519 TokenWithSpan {
18520 token: Token::LParen,
18521 ..
18522 },
18523 TokenWithSpan {
18524 token: Token::Word(Word {
18525 keyword: Keyword::SELECT | Keyword::WITH,
18526 ..
18527 }),
18528 ..
18529 },
18530 ]
18531 )
18532 }
18533
18534 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18535 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18536 Some(SqliteOnConflict::Replace)
18537 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18538 Some(SqliteOnConflict::Rollback)
18539 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18540 Some(SqliteOnConflict::Abort)
18541 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18542 Some(SqliteOnConflict::Fail)
18543 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18544 Some(SqliteOnConflict::Ignore)
18545 } else if self.parse_keyword(Keyword::REPLACE) {
18546 Some(SqliteOnConflict::Replace)
18547 } else {
18548 None
18549 }
18550 }
18551
18552 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18554 if self.parse_keyword(Keyword::PARTITION) {
18555 self.expect_token(&Token::LParen)?;
18556 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18557 self.expect_token(&Token::RParen)?;
18558 Ok(partition_cols)
18559 } else {
18560 Ok(None)
18561 }
18562 }
18563
18564 pub fn parse_load_data_table_format(
18566 &mut self,
18567 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18568 if self.parse_keyword(Keyword::INPUTFORMAT) {
18569 let input_format = self.parse_expr()?;
18570 self.expect_keyword_is(Keyword::SERDE)?;
18571 let serde = self.parse_expr()?;
18572 Ok(Some(HiveLoadDataFormat {
18573 input_format,
18574 serde,
18575 }))
18576 } else {
18577 Ok(None)
18578 }
18579 }
18580
18581 fn parse_update_setexpr_boxed(
18585 &mut self,
18586 update_token: TokenWithSpan,
18587 ) -> Result<Box<SetExpr>, ParserError> {
18588 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18589 }
18590
18591 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18593 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18594 let or = self.parse_conflict_clause();
18595 let table = self.parse_table_and_joins()?;
18596 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18597 Some(UpdateTableFromKind::BeforeSet(
18598 self.parse_table_with_joins()?,
18599 ))
18600 } else {
18601 None
18602 };
18603 self.expect_keyword(Keyword::SET)?;
18604 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18605
18606 let output = self.maybe_parse_output_clause()?;
18607
18608 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18609 Some(UpdateTableFromKind::AfterSet(
18610 self.parse_table_with_joins()?,
18611 ))
18612 } else {
18613 from_before_set
18614 };
18615 let selection = if self.parse_keyword(Keyword::WHERE) {
18616 Some(self.parse_expr()?)
18617 } else {
18618 None
18619 };
18620 let returning = if self.parse_keyword(Keyword::RETURNING) {
18621 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18622 } else {
18623 None
18624 };
18625 let order_by = if self.dialect.supports_update_order_by()
18626 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18627 {
18628 self.parse_comma_separated(Parser::parse_order_by_expr)?
18629 } else {
18630 vec![]
18631 };
18632 let limit = if self.parse_keyword(Keyword::LIMIT) {
18633 Some(self.parse_expr()?)
18634 } else {
18635 None
18636 };
18637 Ok(Update {
18638 update_token: update_token.into(),
18639 optimizer_hints,
18640 table,
18641 assignments,
18642 from,
18643 selection,
18644 returning,
18645 output,
18646 or,
18647 order_by,
18648 limit,
18649 }
18650 .into())
18651 }
18652
18653 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18655 let target = self.parse_assignment_target()?;
18656 self.expect_token(&Token::Eq)?;
18657 let value = self.parse_expr()?;
18658 Ok(Assignment { target, value })
18659 }
18660
18661 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18663 if self.consume_token(&Token::LParen) {
18664 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18665 self.expect_token(&Token::RParen)?;
18666 Ok(AssignmentTarget::Tuple(columns))
18667 } else {
18668 let column = self.parse_object_name(false)?;
18669 Ok(AssignmentTarget::ColumnName(column))
18670 }
18671 }
18672
18673 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18675 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18676 self.maybe_parse(|p| {
18677 let name = p.parse_expr()?;
18678 let operator = p.parse_function_named_arg_operator()?;
18679 let arg = p.parse_wildcard_expr()?.into();
18680 Ok(FunctionArg::ExprNamed {
18681 name,
18682 arg,
18683 operator,
18684 })
18685 })?
18686 } else {
18687 self.maybe_parse(|p| {
18688 let name = p.parse_identifier()?;
18689 let operator = p.parse_function_named_arg_operator()?;
18690 let arg = p.parse_wildcard_expr()?.into();
18691 Ok(FunctionArg::Named {
18692 name,
18693 arg,
18694 operator,
18695 })
18696 })?
18697 };
18698 if let Some(arg) = arg {
18699 return Ok(arg);
18700 }
18701 let wildcard_expr = self.parse_wildcard_expr()?;
18702 let arg_expr: FunctionArgExpr = match wildcard_expr {
18703 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18704 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18707 if opts.opt_exclude.is_some()
18708 || opts.opt_except.is_some()
18709 || opts.opt_replace.is_some()
18710 || opts.opt_rename.is_some()
18711 || opts.opt_ilike.is_some()
18712 {
18713 FunctionArgExpr::WildcardWithOptions(opts)
18714 } else {
18715 wildcard_expr.into()
18716 }
18717 }
18718 other => other.into(),
18719 };
18720 Ok(FunctionArg::Unnamed(arg_expr))
18721 }
18722
18723 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18724 if self.parse_keyword(Keyword::VALUE) {
18725 return Ok(FunctionArgOperator::Value);
18726 }
18727 let tok = self.next_token();
18728 match tok.token {
18729 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18730 Ok(FunctionArgOperator::RightArrow)
18731 }
18732 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18733 Ok(FunctionArgOperator::Equals)
18734 }
18735 Token::Assignment
18736 if self
18737 .dialect
18738 .supports_named_fn_args_with_assignment_operator() =>
18739 {
18740 Ok(FunctionArgOperator::Assignment)
18741 }
18742 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18743 Ok(FunctionArgOperator::Colon)
18744 }
18745 _ => {
18746 self.prev_token();
18747 self.expected("argument operator", tok)
18748 }
18749 }
18750 }
18751
18752 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18754 if self.consume_token(&Token::RParen) {
18755 Ok(vec![])
18756 } else {
18757 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18758 self.expect_token(&Token::RParen)?;
18759 Ok(args)
18760 }
18761 }
18762
18763 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18764 if self.consume_token(&Token::RParen) {
18765 return Ok(TableFunctionArgs {
18766 args: vec![],
18767 settings: None,
18768 });
18769 }
18770 let mut args = vec![];
18771 let settings = loop {
18772 if let Some(settings) = self.parse_settings()? {
18773 break Some(settings);
18774 }
18775 args.push(self.parse_function_args()?);
18776 if self.is_parse_comma_separated_end() {
18777 break None;
18778 }
18779 };
18780 self.expect_token(&Token::RParen)?;
18781 Ok(TableFunctionArgs { args, settings })
18782 }
18783
18784 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
18793 let mut clauses = vec![];
18794
18795 if let Some(null_clause) = self.parse_json_null_clause() {
18798 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18799 }
18800
18801 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18802 clauses.push(FunctionArgumentClause::JsonReturningClause(
18803 json_returning_clause,
18804 ));
18805 }
18806
18807 if self.consume_token(&Token::RParen) {
18808 return Ok(FunctionArgumentList {
18809 duplicate_treatment: None,
18810 args: vec![],
18811 clauses,
18812 });
18813 }
18814
18815 let duplicate_treatment = self.parse_duplicate_treatment()?;
18816 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18817
18818 if self.dialect.supports_window_function_null_treatment_arg() {
18819 if let Some(null_treatment) = self.parse_null_treatment()? {
18820 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
18821 }
18822 }
18823
18824 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18825 clauses.push(FunctionArgumentClause::OrderBy(
18826 self.parse_comma_separated(Parser::parse_order_by_expr)?,
18827 ));
18828 }
18829
18830 if self.parse_keyword(Keyword::LIMIT) {
18831 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
18832 }
18833
18834 if dialect_of!(self is GenericDialect | BigQueryDialect)
18835 && self.parse_keyword(Keyword::HAVING)
18836 {
18837 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
18838 Keyword::MIN => HavingBoundKind::Min,
18839 Keyword::MAX => HavingBoundKind::Max,
18840 unexpected_keyword => return Err(ParserError::ParserError(
18841 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
18842 )),
18843 };
18844 clauses.push(FunctionArgumentClause::Having(HavingBound(
18845 kind,
18846 self.parse_expr()?,
18847 )))
18848 }
18849
18850 if dialect_of!(self is GenericDialect | MySqlDialect)
18851 && self.parse_keyword(Keyword::SEPARATOR)
18852 {
18853 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
18854 }
18855
18856 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
18857 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
18858 }
18859
18860 if let Some(null_clause) = self.parse_json_null_clause() {
18861 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18862 }
18863
18864 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18865 clauses.push(FunctionArgumentClause::JsonReturningClause(
18866 json_returning_clause,
18867 ));
18868 }
18869
18870 self.expect_token(&Token::RParen)?;
18871 Ok(FunctionArgumentList {
18872 duplicate_treatment,
18873 args,
18874 clauses,
18875 })
18876 }
18877
18878 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
18879 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
18880 Some(JsonNullClause::AbsentOnNull)
18881 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
18882 Some(JsonNullClause::NullOnNull)
18883 } else {
18884 None
18885 }
18886 }
18887
18888 fn maybe_parse_json_returning_clause(
18889 &mut self,
18890 ) -> Result<Option<JsonReturningClause>, ParserError> {
18891 if self.parse_keyword(Keyword::RETURNING) {
18892 let data_type = self.parse_data_type()?;
18893 Ok(Some(JsonReturningClause { data_type }))
18894 } else {
18895 Ok(None)
18896 }
18897 }
18898
18899 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
18900 let loc = self.peek_token_ref().span.start;
18901 match (
18902 self.parse_keyword(Keyword::ALL),
18903 self.parse_keyword(Keyword::DISTINCT),
18904 ) {
18905 (true, false) => Ok(Some(DuplicateTreatment::All)),
18906 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
18907 (false, false) => Ok(None),
18908 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
18909 }
18910 }
18911
18912 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
18914 let prefix = self
18915 .parse_one_of_keywords(
18916 self.dialect
18917 .get_reserved_keywords_for_select_item_operator(),
18918 )
18919 .map(|keyword| Ident::new(format!("{keyword:?}")));
18920
18921 match self.parse_wildcard_expr()? {
18922 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
18923 SelectItemQualifiedWildcardKind::ObjectName(prefix),
18924 self.parse_wildcard_additional_options(token.0)?,
18925 )),
18926 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
18927 self.parse_wildcard_additional_options(token.0)?,
18928 )),
18929 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
18930 parser_err!(
18931 format!("Expected an expression, found: {}", v),
18932 self.peek_token_ref().span.start
18933 )
18934 }
18935 Expr::BinaryOp {
18936 left,
18937 op: BinaryOperator::Eq,
18938 right,
18939 } if self.dialect.supports_eq_alias_assignment()
18940 && matches!(left.as_ref(), Expr::Identifier(_)) =>
18941 {
18942 let Expr::Identifier(alias) = *left else {
18943 return parser_err!(
18944 "BUG: expected identifier expression as alias",
18945 self.peek_token_ref().span.start
18946 );
18947 };
18948 Ok(SelectItem::ExprWithAlias {
18949 expr: *right,
18950 alias,
18951 })
18952 }
18953 expr if self.dialect.supports_select_expr_star()
18954 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
18955 {
18956 let wildcard_token = self.get_previous_token().clone();
18957 Ok(SelectItem::QualifiedWildcard(
18958 SelectItemQualifiedWildcardKind::Expr(expr),
18959 self.parse_wildcard_additional_options(wildcard_token)?,
18960 ))
18961 }
18962 expr if self.dialect.supports_select_item_multi_column_alias()
18963 && self.peek_keyword(Keyword::AS)
18964 && self.peek_nth_token(1).token == Token::LParen =>
18965 {
18966 self.expect_keyword(Keyword::AS)?;
18967 self.expect_token(&Token::LParen)?;
18968 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
18969 self.expect_token(&Token::RParen)?;
18970 Ok(SelectItem::ExprWithAliases {
18971 expr: maybe_prefixed_expr(expr, prefix),
18972 aliases,
18973 })
18974 }
18975 expr => self
18976 .maybe_parse_select_item_alias()
18977 .map(|alias| match alias {
18978 Some(alias) => SelectItem::ExprWithAlias {
18979 expr: maybe_prefixed_expr(expr, prefix),
18980 alias,
18981 },
18982 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
18983 }),
18984 }
18985 }
18986
18987 pub fn parse_wildcard_additional_options(
18991 &mut self,
18992 wildcard_token: TokenWithSpan,
18993 ) -> Result<WildcardAdditionalOptions, ParserError> {
18994 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
18995 self.parse_optional_select_item_ilike()?
18996 } else {
18997 None
18998 };
18999 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
19000 {
19001 self.parse_optional_select_item_exclude()?
19002 } else {
19003 None
19004 };
19005 let opt_except = if self.dialect.supports_select_wildcard_except() {
19006 self.parse_optional_select_item_except()?
19007 } else {
19008 None
19009 };
19010 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
19011 self.parse_optional_select_item_replace()?
19012 } else {
19013 None
19014 };
19015 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
19016 self.parse_optional_select_item_rename()?
19017 } else {
19018 None
19019 };
19020
19021 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
19022 self.maybe_parse_select_item_alias()?
19023 } else {
19024 None
19025 };
19026
19027 Ok(WildcardAdditionalOptions {
19028 wildcard_token: wildcard_token.into(),
19029 opt_ilike,
19030 opt_exclude,
19031 opt_except,
19032 opt_rename,
19033 opt_replace,
19034 opt_alias,
19035 })
19036 }
19037
19038 pub fn parse_optional_select_item_ilike(
19042 &mut self,
19043 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19044 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19045 let next_token = self.next_token();
19046 let pattern = match next_token.token {
19047 Token::SingleQuotedString(s) => s,
19048 _ => return self.expected("ilike pattern", next_token),
19049 };
19050 Some(IlikeSelectItem { pattern })
19051 } else {
19052 None
19053 };
19054 Ok(opt_ilike)
19055 }
19056
19057 pub fn parse_optional_select_item_exclude(
19061 &mut self,
19062 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19063 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19064 if self.consume_token(&Token::LParen) {
19065 let columns =
19066 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19067 self.expect_token(&Token::RParen)?;
19068 Some(ExcludeSelectItem::Multiple(columns))
19069 } else {
19070 let column = self.parse_object_name(false)?;
19071 Some(ExcludeSelectItem::Single(column))
19072 }
19073 } else {
19074 None
19075 };
19076
19077 Ok(opt_exclude)
19078 }
19079
19080 pub fn parse_optional_select_item_except(
19084 &mut self,
19085 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19086 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19087 if self.peek_token_ref().token == Token::LParen {
19088 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19089 match &idents[..] {
19090 [] => {
19091 return self.expected_ref(
19092 "at least one column should be parsed by the expect clause",
19093 self.peek_token_ref(),
19094 )?;
19095 }
19096 [first, idents @ ..] => Some(ExceptSelectItem {
19097 first_element: first.clone(),
19098 additional_elements: idents.to_vec(),
19099 }),
19100 }
19101 } else {
19102 let ident = self.parse_identifier()?;
19104 Some(ExceptSelectItem {
19105 first_element: ident,
19106 additional_elements: vec![],
19107 })
19108 }
19109 } else {
19110 None
19111 };
19112
19113 Ok(opt_except)
19114 }
19115
19116 pub fn parse_optional_select_item_rename(
19118 &mut self,
19119 ) -> Result<Option<RenameSelectItem>, ParserError> {
19120 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19121 if self.consume_token(&Token::LParen) {
19122 let idents =
19123 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19124 self.expect_token(&Token::RParen)?;
19125 Some(RenameSelectItem::Multiple(idents))
19126 } else {
19127 let ident = self.parse_identifier_with_alias()?;
19128 Some(RenameSelectItem::Single(ident))
19129 }
19130 } else {
19131 None
19132 };
19133
19134 Ok(opt_rename)
19135 }
19136
19137 pub fn parse_optional_select_item_replace(
19139 &mut self,
19140 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19141 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19142 if self.consume_token(&Token::LParen) {
19143 let items = self.parse_comma_separated(|parser| {
19144 Ok(Box::new(parser.parse_replace_elements()?))
19145 })?;
19146 self.expect_token(&Token::RParen)?;
19147 Some(ReplaceSelectItem { items })
19148 } else {
19149 let tok = self.next_token();
19150 return self.expected("( after REPLACE but", tok);
19151 }
19152 } else {
19153 None
19154 };
19155
19156 Ok(opt_replace)
19157 }
19158 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19160 let expr = self.parse_expr()?;
19161 let as_keyword = self.parse_keyword(Keyword::AS);
19162 let ident = self.parse_identifier()?;
19163 Ok(ReplaceSelectElement {
19164 expr,
19165 column_name: ident,
19166 as_keyword,
19167 })
19168 }
19169
19170 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19173 if self.parse_keyword(Keyword::ASC) {
19174 Some(true)
19175 } else if self.parse_keyword(Keyword::DESC) {
19176 Some(false)
19177 } else {
19178 None
19179 }
19180 }
19181
19182 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19184 self.parse_order_by_expr_inner(false)
19185 .map(|(order_by, _)| order_by)
19186 }
19187
19188 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19190 self.parse_order_by_expr_inner(true)
19191 .map(|(column, operator_class)| IndexColumn {
19192 column,
19193 operator_class,
19194 })
19195 }
19196
19197 fn parse_order_by_expr_inner(
19198 &mut self,
19199 with_operator_class: bool,
19200 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19201 let expr = self.parse_expr()?;
19202
19203 let operator_class: Option<ObjectName> = if with_operator_class {
19204 if self
19207 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19208 .is_some()
19209 {
19210 None
19211 } else {
19212 self.maybe_parse(|parser| parser.parse_object_name(false))?
19213 }
19214 } else {
19215 None
19216 };
19217
19218 let options = self.parse_order_by_options()?;
19219
19220 let with_fill = if self.dialect.supports_with_fill()
19221 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19222 {
19223 Some(self.parse_with_fill()?)
19224 } else {
19225 None
19226 };
19227
19228 Ok((
19229 OrderByExpr {
19230 expr,
19231 options,
19232 with_fill,
19233 },
19234 operator_class,
19235 ))
19236 }
19237
19238 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19239 let asc = self.parse_asc_desc();
19240
19241 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19242 Some(true)
19243 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19244 Some(false)
19245 } else {
19246 None
19247 };
19248
19249 Ok(OrderByOptions { asc, nulls_first })
19250 }
19251
19252 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19256 let from = if self.parse_keyword(Keyword::FROM) {
19257 Some(self.parse_expr()?)
19258 } else {
19259 None
19260 };
19261
19262 let to = if self.parse_keyword(Keyword::TO) {
19263 Some(self.parse_expr()?)
19264 } else {
19265 None
19266 };
19267
19268 let step = if self.parse_keyword(Keyword::STEP) {
19269 Some(self.parse_expr()?)
19270 } else {
19271 None
19272 };
19273
19274 Ok(WithFill { from, to, step })
19275 }
19276
19277 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19280 if !self.parse_keyword(Keyword::INTERPOLATE) {
19281 return Ok(None);
19282 }
19283
19284 if self.consume_token(&Token::LParen) {
19285 let interpolations =
19286 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19287 self.expect_token(&Token::RParen)?;
19288 return Ok(Some(Interpolate {
19290 exprs: Some(interpolations),
19291 }));
19292 }
19293
19294 Ok(Some(Interpolate { exprs: None }))
19296 }
19297
19298 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19300 let column = self.parse_identifier()?;
19301 let expr = if self.parse_keyword(Keyword::AS) {
19302 Some(self.parse_expr()?)
19303 } else {
19304 None
19305 };
19306 Ok(InterpolateExpr { column, expr })
19307 }
19308
19309 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19312 let quantity = if self.consume_token(&Token::LParen) {
19313 let quantity = self.parse_expr()?;
19314 self.expect_token(&Token::RParen)?;
19315 Some(TopQuantity::Expr(quantity))
19316 } else {
19317 let next_token = self.next_token();
19318 let quantity = match next_token.token {
19319 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19320 _ => self.expected("literal int", next_token)?,
19321 };
19322 Some(TopQuantity::Constant(quantity))
19323 };
19324
19325 let percent = self.parse_keyword(Keyword::PERCENT);
19326
19327 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19328
19329 Ok(Top {
19330 with_ties,
19331 percent,
19332 quantity,
19333 })
19334 }
19335
19336 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19338 if self.parse_keyword(Keyword::ALL) {
19339 Ok(None)
19340 } else {
19341 Ok(Some(self.parse_expr()?))
19342 }
19343 }
19344
19345 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19347 let value = self.parse_expr()?;
19348 let rows = if self.parse_keyword(Keyword::ROW) {
19349 OffsetRows::Row
19350 } else if self.parse_keyword(Keyword::ROWS) {
19351 OffsetRows::Rows
19352 } else {
19353 OffsetRows::None
19354 };
19355 Ok(Offset { value, rows })
19356 }
19357
19358 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19360 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19361
19362 let (quantity, percent) = if self
19363 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19364 .is_some()
19365 {
19366 (None, false)
19367 } else {
19368 let quantity = Expr::Value(self.parse_value()?);
19369 let percent = self.parse_keyword(Keyword::PERCENT);
19370 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19371 (Some(quantity), percent)
19372 };
19373
19374 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19375 false
19376 } else {
19377 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19378 };
19379
19380 Ok(Fetch {
19381 with_ties,
19382 percent,
19383 quantity,
19384 })
19385 }
19386
19387 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19389 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19390 Keyword::UPDATE => LockType::Update,
19391 Keyword::SHARE => LockType::Share,
19392 unexpected_keyword => return Err(ParserError::ParserError(
19393 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19394 )),
19395 };
19396 let of = if self.parse_keyword(Keyword::OF) {
19397 Some(self.parse_object_name(false)?)
19398 } else {
19399 None
19400 };
19401 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19402 Some(NonBlock::Nowait)
19403 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19404 Some(NonBlock::SkipLocked)
19405 } else {
19406 None
19407 };
19408 Ok(LockClause {
19409 lock_type,
19410 of,
19411 nonblock,
19412 })
19413 }
19414
19415 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19417 self.expect_keyword(Keyword::LOCK)?;
19418
19419 if self.peek_keyword(Keyword::TABLES) {
19420 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19421 }
19422
19423 let _ = self.parse_keyword(Keyword::TABLE);
19424 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19425 let lock_mode = if self.parse_keyword(Keyword::IN) {
19426 let lock_mode = self.parse_lock_table_mode()?;
19427 self.expect_keyword(Keyword::MODE)?;
19428 Some(lock_mode)
19429 } else {
19430 None
19431 };
19432 let nowait = self.parse_keyword(Keyword::NOWAIT);
19433
19434 Ok(Lock {
19435 tables,
19436 lock_mode,
19437 nowait,
19438 })
19439 }
19440
19441 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19442 let only = self.parse_keyword(Keyword::ONLY);
19443 let name = self.parse_object_name(false)?;
19444 let has_asterisk = self.consume_token(&Token::Mul);
19445
19446 Ok(LockTableTarget {
19447 name,
19448 only,
19449 has_asterisk,
19450 })
19451 }
19452
19453 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19454 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19455 Ok(LockTableMode::AccessShare)
19456 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19457 Ok(LockTableMode::AccessExclusive)
19458 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19459 Ok(LockTableMode::RowShare)
19460 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19461 Ok(LockTableMode::RowExclusive)
19462 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19463 Ok(LockTableMode::ShareUpdateExclusive)
19464 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19465 Ok(LockTableMode::ShareRowExclusive)
19466 } else if self.parse_keyword(Keyword::SHARE) {
19467 Ok(LockTableMode::Share)
19468 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19469 Ok(LockTableMode::Exclusive)
19470 } else {
19471 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19472 }
19473 }
19474
19475 pub fn parse_values(
19477 &mut self,
19478 allow_empty: bool,
19479 value_keyword: bool,
19480 ) -> Result<Values, ParserError> {
19481 let mut explicit_row = false;
19482
19483 let rows = self.parse_comma_separated(|parser| {
19484 if parser.parse_keyword(Keyword::ROW) {
19485 explicit_row = true;
19486 }
19487
19488 parser.expect_token(&Token::LParen)?;
19489 if allow_empty && parser.peek_token().token == Token::RParen {
19490 parser.next_token();
19491 Ok(vec![])
19492 } else {
19493 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19494 parser.expect_token(&Token::RParen)?;
19495 Ok(exprs)
19496 }
19497 })?;
19498 Ok(Values {
19499 explicit_row,
19500 rows,
19501 value_keyword,
19502 })
19503 }
19504
19505 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19507 self.expect_keyword_is(Keyword::TRANSACTION)?;
19508 Ok(Statement::StartTransaction {
19509 modes: self.parse_transaction_modes()?,
19510 begin: false,
19511 transaction: Some(BeginTransactionKind::Transaction),
19512 modifier: None,
19513 statements: vec![],
19514 exception: None,
19515 has_end_keyword: false,
19516 })
19517 }
19518
19519 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19521 if !self.dialect.supports_start_transaction_modifier() {
19522 None
19523 } else if self.parse_keyword(Keyword::DEFERRED) {
19524 Some(TransactionModifier::Deferred)
19525 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19526 Some(TransactionModifier::Immediate)
19527 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19528 Some(TransactionModifier::Exclusive)
19529 } else if self.parse_keyword(Keyword::TRY) {
19530 Some(TransactionModifier::Try)
19531 } else if self.parse_keyword(Keyword::CATCH) {
19532 Some(TransactionModifier::Catch)
19533 } else {
19534 None
19535 }
19536 }
19537
19538 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19540 let modifier = self.parse_transaction_modifier();
19541 let transaction =
19542 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19543 {
19544 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19545 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19546 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19547 _ => None,
19548 };
19549 Ok(Statement::StartTransaction {
19550 modes: self.parse_transaction_modes()?,
19551 begin: true,
19552 transaction,
19553 modifier,
19554 statements: vec![],
19555 exception: None,
19556 has_end_keyword: false,
19557 })
19558 }
19559
19560 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19562 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19563
19564 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19565 let mut when = Vec::new();
19566
19567 while !self.peek_keyword(Keyword::END) {
19569 self.expect_keyword(Keyword::WHEN)?;
19570
19571 let mut idents = Vec::new();
19575
19576 while !self.parse_keyword(Keyword::THEN) {
19577 let ident = self.parse_identifier()?;
19578 idents.push(ident);
19579
19580 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19581 }
19582
19583 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19584
19585 when.push(ExceptionWhen { idents, statements });
19586 }
19587
19588 Some(when)
19589 } else {
19590 None
19591 };
19592
19593 self.expect_keyword(Keyword::END)?;
19594
19595 Ok(Statement::StartTransaction {
19596 begin: true,
19597 statements,
19598 exception,
19599 has_end_keyword: true,
19600 transaction: None,
19601 modifier: None,
19602 modes: Default::default(),
19603 })
19604 }
19605
19606 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19608 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19609 None
19610 } else if self.parse_keyword(Keyword::TRY) {
19611 Some(TransactionModifier::Try)
19612 } else if self.parse_keyword(Keyword::CATCH) {
19613 Some(TransactionModifier::Catch)
19614 } else {
19615 None
19616 };
19617 Ok(Statement::Commit {
19618 chain: self.parse_commit_rollback_chain()?,
19619 end: true,
19620 modifier,
19621 })
19622 }
19623
19624 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19626 let mut modes = vec![];
19627 let mut required = false;
19628 loop {
19629 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19630 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19631 TransactionIsolationLevel::ReadUncommitted
19632 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19633 TransactionIsolationLevel::ReadCommitted
19634 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19635 TransactionIsolationLevel::RepeatableRead
19636 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19637 TransactionIsolationLevel::Serializable
19638 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19639 TransactionIsolationLevel::Snapshot
19640 } else {
19641 self.expected_ref("isolation level", self.peek_token_ref())?
19642 };
19643 TransactionMode::IsolationLevel(iso_level)
19644 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19645 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19646 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19647 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19648 } else if required {
19649 self.expected_ref("transaction mode", self.peek_token_ref())?
19650 } else {
19651 break;
19652 };
19653 modes.push(mode);
19654 required = self.consume_token(&Token::Comma);
19659 }
19660 Ok(modes)
19661 }
19662
19663 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19665 Ok(Statement::Commit {
19666 chain: self.parse_commit_rollback_chain()?,
19667 end: false,
19668 modifier: None,
19669 })
19670 }
19671
19672 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19674 let chain = self.parse_commit_rollback_chain()?;
19675 let savepoint = self.parse_rollback_savepoint()?;
19676
19677 Ok(Statement::Rollback { chain, savepoint })
19678 }
19679
19680 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19682 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19683 if self.parse_keyword(Keyword::AND) {
19684 let chain = !self.parse_keyword(Keyword::NO);
19685 self.expect_keyword_is(Keyword::CHAIN)?;
19686 Ok(chain)
19687 } else {
19688 Ok(false)
19689 }
19690 }
19691
19692 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19694 if self.parse_keyword(Keyword::TO) {
19695 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19696 let savepoint = self.parse_identifier()?;
19697
19698 Ok(Some(savepoint))
19699 } else {
19700 Ok(None)
19701 }
19702 }
19703
19704 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19706 self.expect_token(&Token::LParen)?;
19707 let message = Box::new(self.parse_expr()?);
19708 self.expect_token(&Token::Comma)?;
19709 let severity = Box::new(self.parse_expr()?);
19710 self.expect_token(&Token::Comma)?;
19711 let state = Box::new(self.parse_expr()?);
19712 let arguments = if self.consume_token(&Token::Comma) {
19713 self.parse_comma_separated(Parser::parse_expr)?
19714 } else {
19715 vec![]
19716 };
19717 self.expect_token(&Token::RParen)?;
19718 let options = if self.parse_keyword(Keyword::WITH) {
19719 self.parse_comma_separated(Parser::parse_raiserror_option)?
19720 } else {
19721 vec![]
19722 };
19723 Ok(Statement::RaisError {
19724 message,
19725 severity,
19726 state,
19727 arguments,
19728 options,
19729 })
19730 }
19731
19732 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19734 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19735 Keyword::LOG => Ok(RaisErrorOption::Log),
19736 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19737 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19738 _ => self.expected_ref(
19739 "LOG, NOWAIT OR SETERROR raiserror option",
19740 self.peek_token_ref(),
19741 ),
19742 }
19743 }
19744
19745 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19749 self.expect_keyword_is(Keyword::THROW)?;
19750
19751 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19752 let (message, state) = if error_number.is_some() {
19753 self.expect_token(&Token::Comma)?;
19754 let message = Box::new(self.parse_expr()?);
19755 self.expect_token(&Token::Comma)?;
19756 let state = Box::new(self.parse_expr()?);
19757 (Some(message), Some(state))
19758 } else {
19759 (None, None)
19760 };
19761
19762 Ok(ThrowStatement {
19763 error_number,
19764 message,
19765 state,
19766 })
19767 }
19768
19769 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
19771 let prepare = self.parse_keyword(Keyword::PREPARE);
19772 let name = self.parse_identifier()?;
19773 Ok(Statement::Deallocate { name, prepare })
19774 }
19775
19776 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
19778 let immediate =
19779 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
19780
19781 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
19787 None
19788 } else {
19789 Some(self.parse_object_name(false)?)
19790 };
19791
19792 let has_parentheses = self.consume_token(&Token::LParen);
19793
19794 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
19795 let end_token = match (has_parentheses, self.peek_token().token) {
19796 (true, _) => Token::RParen,
19797 (false, Token::EOF) => Token::EOF,
19798 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
19799 (false, _) => Token::SemiColon,
19800 };
19801
19802 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
19803
19804 if has_parentheses {
19805 self.expect_token(&Token::RParen)?;
19806 }
19807
19808 let into = if self.parse_keyword(Keyword::INTO) {
19809 self.parse_comma_separated(Self::parse_identifier)?
19810 } else {
19811 vec![]
19812 };
19813
19814 let using = if self.parse_keyword(Keyword::USING) {
19815 self.parse_comma_separated(Self::parse_expr_with_alias)?
19816 } else {
19817 vec![]
19818 };
19819
19820 let output = self.parse_keyword(Keyword::OUTPUT);
19821
19822 let default = self.parse_keyword(Keyword::DEFAULT);
19823
19824 Ok(Statement::Execute {
19825 immediate,
19826 name,
19827 parameters,
19828 has_parentheses,
19829 into,
19830 using,
19831 output,
19832 default,
19833 })
19834 }
19835
19836 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
19838 let name = self.parse_identifier()?;
19839
19840 let mut data_types = vec![];
19841 if self.consume_token(&Token::LParen) {
19842 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
19843 self.expect_token(&Token::RParen)?;
19844 }
19845
19846 self.expect_keyword_is(Keyword::AS)?;
19847 let statement = Box::new(self.parse_statement()?);
19848 Ok(Statement::Prepare {
19849 name,
19850 data_types,
19851 statement,
19852 })
19853 }
19854
19855 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
19857 self.expect_keyword(Keyword::UNLOAD)?;
19858 self.expect_token(&Token::LParen)?;
19859 let (query, query_text) =
19860 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
19861 (None, Some(self.parse_literal_string()?))
19862 } else {
19863 (Some(self.parse_query()?), None)
19864 };
19865 self.expect_token(&Token::RParen)?;
19866
19867 self.expect_keyword_is(Keyword::TO)?;
19868 let to = self.parse_identifier()?;
19869 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
19870 Some(self.parse_iam_role_kind()?)
19871 } else {
19872 None
19873 };
19874 let with = self.parse_options(Keyword::WITH)?;
19875 let mut options = vec![];
19876 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
19877 options.push(opt);
19878 }
19879 Ok(Statement::Unload {
19880 query,
19881 query_text,
19882 to,
19883 auth,
19884 with,
19885 options,
19886 })
19887 }
19888
19889 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
19890 let temporary = self
19891 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
19892 .is_some();
19893 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
19894 let table = self.parse_keyword(Keyword::TABLE);
19895 let name = self.parse_object_name(false)?;
19896
19897 Ok(SelectInto {
19898 temporary,
19899 unlogged,
19900 table,
19901 name,
19902 })
19903 }
19904
19905 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
19906 let v = self.parse_value()?;
19907 match &v.value {
19908 Value::SingleQuotedString(_) => Ok(v),
19909 Value::DoubleQuotedString(_) => Ok(v),
19910 Value::Number(_, _) => Ok(v),
19911 Value::Placeholder(_) => Ok(v),
19912 _ => {
19913 self.prev_token();
19914 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
19915 }
19916 }
19917 }
19918
19919 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
19921 let name = self.parse_object_name(false)?;
19922 if self.consume_token(&Token::LParen) {
19923 let value = self.parse_pragma_value()?;
19924 self.expect_token(&Token::RParen)?;
19925 Ok(Statement::Pragma {
19926 name,
19927 value: Some(value),
19928 is_eq: false,
19929 })
19930 } else if self.consume_token(&Token::Eq) {
19931 Ok(Statement::Pragma {
19932 name,
19933 value: Some(self.parse_pragma_value()?),
19934 is_eq: true,
19935 })
19936 } else {
19937 Ok(Statement::Pragma {
19938 name,
19939 value: None,
19940 is_eq: false,
19941 })
19942 }
19943 }
19944
19945 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
19947 let extension_name = self.parse_identifier()?;
19948
19949 Ok(Statement::Install { extension_name })
19950 }
19951
19952 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
19954 if self.dialect.supports_load_extension() {
19955 let extension_name = self.parse_identifier()?;
19956 Ok(Statement::Load { extension_name })
19957 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
19958 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
19959 self.expect_keyword_is(Keyword::INPATH)?;
19960 let inpath = self.parse_literal_string()?;
19961 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
19962 self.expect_keyword_is(Keyword::INTO)?;
19963 self.expect_keyword_is(Keyword::TABLE)?;
19964 let table_name = self.parse_object_name(false)?;
19965 let partitioned = self.parse_insert_partition()?;
19966 let table_format = self.parse_load_data_table_format()?;
19967 Ok(Statement::LoadData {
19968 local,
19969 inpath,
19970 overwrite,
19971 table_name,
19972 partitioned,
19973 table_format,
19974 })
19975 } else {
19976 self.expected_ref(
19977 "`DATA` or an extension name after `LOAD`",
19978 self.peek_token_ref(),
19979 )
19980 }
19981 }
19982
19983 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
19995 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
19996
19997 let name = self.parse_object_name(false)?;
19998
19999 let on_cluster = self.parse_optional_on_cluster()?;
20001
20002 let partition = if self.parse_keyword(Keyword::PARTITION) {
20003 if self.parse_keyword(Keyword::ID) {
20004 Some(Partition::Identifier(self.parse_identifier()?))
20005 } else {
20006 Some(Partition::Expr(self.parse_expr()?))
20007 }
20008 } else {
20009 None
20010 };
20011
20012 let include_final = self.parse_keyword(Keyword::FINAL);
20013
20014 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
20015 if self.parse_keyword(Keyword::BY) {
20016 Some(Deduplicate::ByExpression(self.parse_expr()?))
20017 } else {
20018 Some(Deduplicate::All)
20019 }
20020 } else {
20021 None
20022 };
20023
20024 let predicate = if self.parse_keyword(Keyword::WHERE) {
20026 Some(self.parse_expr()?)
20027 } else {
20028 None
20029 };
20030
20031 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
20032 self.expect_token(&Token::LParen)?;
20033 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
20034 self.expect_token(&Token::RParen)?;
20035 Some(columns)
20036 } else {
20037 None
20038 };
20039
20040 Ok(Statement::OptimizeTable {
20041 name,
20042 has_table_keyword,
20043 on_cluster,
20044 partition,
20045 include_final,
20046 deduplicate,
20047 predicate,
20048 zorder,
20049 })
20050 }
20051
20052 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20058 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20060 let name = self.parse_object_name(false)?;
20062 let mut data_type: Option<DataType> = None;
20064 if self.parse_keywords(&[Keyword::AS]) {
20065 data_type = Some(self.parse_data_type()?)
20066 }
20067 let sequence_options = self.parse_create_sequence_options()?;
20068 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20070 if self.parse_keywords(&[Keyword::NONE]) {
20071 Some(ObjectName::from(vec![Ident::new("NONE")]))
20072 } else {
20073 Some(self.parse_object_name(false)?)
20074 }
20075 } else {
20076 None
20077 };
20078 Ok(Statement::CreateSequence {
20079 temporary,
20080 if_not_exists,
20081 name,
20082 data_type,
20083 sequence_options,
20084 owned_by,
20085 })
20086 }
20087
20088 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20089 let mut sequence_options = vec![];
20090 if self.parse_keywords(&[Keyword::INCREMENT]) {
20092 if self.parse_keywords(&[Keyword::BY]) {
20093 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20094 } else {
20095 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20096 }
20097 }
20098 if self.parse_keyword(Keyword::MINVALUE) {
20100 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20101 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20102 sequence_options.push(SequenceOptions::MinValue(None));
20103 }
20104 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20106 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20107 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20108 sequence_options.push(SequenceOptions::MaxValue(None));
20109 }
20110
20111 if self.parse_keywords(&[Keyword::START]) {
20113 if self.parse_keywords(&[Keyword::WITH]) {
20114 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20115 } else {
20116 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20117 }
20118 }
20119 if self.parse_keywords(&[Keyword::CACHE]) {
20121 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20122 }
20123 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20125 sequence_options.push(SequenceOptions::Cycle(true));
20126 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20127 sequence_options.push(SequenceOptions::Cycle(false));
20128 }
20129
20130 Ok(sequence_options)
20131 }
20132
20133 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20137 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20138 let name = self.parse_object_name(false)?;
20139
20140 let server_type = if self.parse_keyword(Keyword::TYPE) {
20141 Some(self.parse_identifier()?)
20142 } else {
20143 None
20144 };
20145
20146 let version = if self.parse_keyword(Keyword::VERSION) {
20147 Some(self.parse_identifier()?)
20148 } else {
20149 None
20150 };
20151
20152 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20153 let foreign_data_wrapper = self.parse_object_name(false)?;
20154
20155 let mut options = None;
20156 if self.parse_keyword(Keyword::OPTIONS) {
20157 self.expect_token(&Token::LParen)?;
20158 options = Some(self.parse_comma_separated(|p| {
20159 let key = p.parse_identifier()?;
20160 let value = p.parse_identifier()?;
20161 Ok(CreateServerOption { key, value })
20162 })?);
20163 self.expect_token(&Token::RParen)?;
20164 }
20165
20166 Ok(Statement::CreateServer(CreateServerStatement {
20167 name,
20168 if_not_exists: ine,
20169 server_type,
20170 version,
20171 foreign_data_wrapper,
20172 options,
20173 }))
20174 }
20175
20176 pub fn parse_create_foreign_data_wrapper(
20180 &mut self,
20181 ) -> Result<CreateForeignDataWrapper, ParserError> {
20182 let name = self.parse_identifier()?;
20183
20184 let handler = if self.parse_keyword(Keyword::HANDLER) {
20185 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20186 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20187 Some(FdwRoutineClause::NoFunction)
20188 } else {
20189 None
20190 };
20191
20192 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20193 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20194 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20195 Some(FdwRoutineClause::NoFunction)
20196 } else {
20197 None
20198 };
20199
20200 let options = if self.parse_keyword(Keyword::OPTIONS) {
20201 self.expect_token(&Token::LParen)?;
20202 let opts = self.parse_comma_separated(|p| {
20203 let key = p.parse_identifier()?;
20204 let value = p.parse_identifier()?;
20205 Ok(CreateServerOption { key, value })
20206 })?;
20207 self.expect_token(&Token::RParen)?;
20208 Some(opts)
20209 } else {
20210 None
20211 };
20212
20213 Ok(CreateForeignDataWrapper {
20214 name,
20215 handler,
20216 validator,
20217 options,
20218 })
20219 }
20220
20221 pub fn parse_create_foreign_table(
20225 &mut self,
20226 ) -> Result<CreateForeignTable, ParserError> {
20227 let if_not_exists =
20228 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20229 let name = self.parse_object_name(false)?;
20230 let (columns, _constraints) = self.parse_columns()?;
20231 self.expect_keyword_is(Keyword::SERVER)?;
20232 let server_name = self.parse_identifier()?;
20233
20234 let options = if self.parse_keyword(Keyword::OPTIONS) {
20235 self.expect_token(&Token::LParen)?;
20236 let opts = self.parse_comma_separated(|p| {
20237 let key = p.parse_identifier()?;
20238 let value = p.parse_identifier()?;
20239 Ok(CreateServerOption { key, value })
20240 })?;
20241 self.expect_token(&Token::RParen)?;
20242 Some(opts)
20243 } else {
20244 None
20245 };
20246
20247 Ok(CreateForeignTable {
20248 name,
20249 if_not_exists,
20250 columns,
20251 server_name,
20252 options,
20253 })
20254 }
20255
20256 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20260 let name = self.parse_identifier()?;
20261
20262 let target = if self.parse_keyword(Keyword::FOR) {
20263 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20264 Some(PublicationTarget::AllTables)
20265 } else if self.parse_keyword(Keyword::TABLE) {
20266 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20267 Some(PublicationTarget::Tables(tables))
20268 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20269 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20270 Some(PublicationTarget::TablesInSchema(schemas))
20271 } else {
20272 return self.expected_ref(
20273 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20274 self.peek_token_ref(),
20275 );
20276 }
20277 } else {
20278 None
20279 };
20280
20281 let with_options = self.parse_options(Keyword::WITH)?;
20282
20283 Ok(CreatePublication {
20284 name,
20285 target,
20286 with_options,
20287 })
20288 }
20289
20290 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20294 let name = self.parse_identifier()?;
20295 self.expect_keyword_is(Keyword::CONNECTION)?;
20296 let connection = self.parse_value()?.value;
20297 self.expect_keyword_is(Keyword::PUBLICATION)?;
20298 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20299 let with_options = self.parse_options(Keyword::WITH)?;
20300
20301 Ok(CreateSubscription {
20302 name,
20303 connection,
20304 publications,
20305 with_options,
20306 })
20307 }
20308
20309 pub fn parse_create_cast(&mut self) -> Result<CreateCast, ParserError> {
20313 self.expect_token(&Token::LParen)?;
20314 let source_type = self.parse_data_type()?;
20315 self.expect_keyword_is(Keyword::AS)?;
20316 let target_type = self.parse_data_type()?;
20317 self.expect_token(&Token::RParen)?;
20318
20319 let function_kind = if self.parse_keywords(&[Keyword::WITHOUT, Keyword::FUNCTION]) {
20320 CastFunctionKind::WithoutFunction
20321 } else if self.parse_keywords(&[Keyword::WITH, Keyword::INOUT]) {
20322 CastFunctionKind::WithInout
20323 } else if self.parse_keywords(&[Keyword::WITH, Keyword::FUNCTION]) {
20324 let function_name = self.parse_object_name(false)?;
20325 let argument_types = if self.peek_token_ref().token == Token::LParen {
20326 self.expect_token(&Token::LParen)?;
20327 let types = if self.peek_token_ref().token == Token::RParen {
20328 vec![]
20329 } else {
20330 self.parse_comma_separated(|p| p.parse_data_type())?
20331 };
20332 self.expect_token(&Token::RParen)?;
20333 types
20334 } else {
20335 vec![]
20336 };
20337 CastFunctionKind::WithFunction {
20338 function_name,
20339 argument_types,
20340 }
20341 } else {
20342 return self.expected_ref(
20343 "WITH FUNCTION, WITHOUT FUNCTION, or WITH INOUT",
20344 self.peek_token_ref(),
20345 );
20346 };
20347
20348 let cast_context = if self.parse_keyword(Keyword::AS) {
20349 if self.parse_keyword(Keyword::ASSIGNMENT) {
20350 CastContext::Assignment
20351 } else if self.parse_keyword(Keyword::IMPLICIT) {
20352 CastContext::Implicit
20353 } else {
20354 return self.expected_ref("ASSIGNMENT or IMPLICIT after AS", self.peek_token_ref());
20355 }
20356 } else {
20357 CastContext::Explicit
20358 };
20359
20360 Ok(CreateCast {
20361 source_type,
20362 target_type,
20363 function_kind,
20364 cast_context,
20365 })
20366 }
20367
20368 pub fn parse_create_conversion(
20372 &mut self,
20373 is_default: bool,
20374 ) -> Result<CreateConversion, ParserError> {
20375 let name = self.parse_object_name(false)?;
20376 self.expect_keyword_is(Keyword::FOR)?;
20377 let source_encoding = self.parse_literal_string()?;
20378 self.expect_keyword_is(Keyword::TO)?;
20379 let destination_encoding = self.parse_literal_string()?;
20380 self.expect_keyword_is(Keyword::FROM)?;
20381 let function_name = self.parse_object_name(false)?;
20382
20383 Ok(CreateConversion {
20384 name,
20385 is_default,
20386 source_encoding,
20387 destination_encoding,
20388 function_name,
20389 })
20390 }
20391
20392 pub fn parse_create_language(
20396 &mut self,
20397 or_replace: bool,
20398 trusted: bool,
20399 procedural: bool,
20400 ) -> Result<CreateLanguage, ParserError> {
20401 let name = self.parse_identifier()?;
20402
20403 let handler = if self.parse_keyword(Keyword::HANDLER) {
20404 Some(self.parse_object_name(false)?)
20405 } else {
20406 None
20407 };
20408
20409 let inline_handler = if self.parse_keyword(Keyword::INLINE) {
20410 Some(self.parse_object_name(false)?)
20411 } else {
20412 None
20413 };
20414
20415 let validator = if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20416 None
20417 } else if self.parse_keyword(Keyword::VALIDATOR) {
20418 Some(self.parse_object_name(false)?)
20419 } else {
20420 None
20421 };
20422
20423 Ok(CreateLanguage {
20424 name,
20425 or_replace,
20426 trusted,
20427 procedural,
20428 handler,
20429 inline_handler,
20430 validator,
20431 })
20432 }
20433
20434 pub fn parse_create_rule(&mut self) -> Result<CreateRule, ParserError> {
20438 let name = self.parse_identifier()?;
20439 self.expect_keyword_is(Keyword::AS)?;
20440 self.expect_keyword_is(Keyword::ON)?;
20441
20442 let event = if self.parse_keyword(Keyword::SELECT) {
20443 RuleEvent::Select
20444 } else if self.parse_keyword(Keyword::INSERT) {
20445 RuleEvent::Insert
20446 } else if self.parse_keyword(Keyword::UPDATE) {
20447 RuleEvent::Update
20448 } else if self.parse_keyword(Keyword::DELETE) {
20449 RuleEvent::Delete
20450 } else {
20451 return self.expected_ref(
20452 "SELECT, INSERT, UPDATE, or DELETE after ON",
20453 self.peek_token_ref(),
20454 );
20455 };
20456
20457 self.expect_keyword_is(Keyword::TO)?;
20458 let table = self.parse_object_name(false)?;
20459
20460 let condition = if self.parse_keyword(Keyword::WHERE) {
20461 Some(self.parse_expr()?)
20462 } else {
20463 None
20464 };
20465
20466 self.expect_keyword_is(Keyword::DO)?;
20467
20468 let instead = if self.parse_keyword(Keyword::INSTEAD) {
20469 true
20470 } else if self.parse_keyword(Keyword::ALSO) {
20471 false
20472 } else {
20473 false
20474 };
20475
20476 let action = if self.parse_keyword(Keyword::NOTHING) {
20477 RuleAction::Nothing
20478 } else if self.peek_token_ref().token == Token::LParen {
20479 self.expect_token(&Token::LParen)?;
20480 let mut stmts = Vec::new();
20481 loop {
20482 stmts.push(self.parse_statement()?);
20483 if !self.consume_token(&Token::SemiColon) {
20484 break;
20485 }
20486 if self.peek_token_ref().token == Token::RParen {
20487 break;
20488 }
20489 }
20490 self.expect_token(&Token::RParen)?;
20491 RuleAction::Statements(stmts)
20492 } else {
20493 let stmt = self.parse_statement()?;
20494 RuleAction::Statements(vec![stmt])
20495 };
20496
20497 Ok(CreateRule {
20498 name,
20499 event,
20500 table,
20501 condition,
20502 instead,
20503 action,
20504 })
20505 }
20506
20507 pub fn parse_create_statistics(&mut self) -> Result<CreateStatistics, ParserError> {
20511 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20512 let name = self.parse_object_name(false)?;
20513
20514 let kinds = if self.consume_token(&Token::LParen) {
20515 let kinds = self.parse_comma_separated(|p| {
20516 let ident = p.parse_identifier()?;
20517 match ident.value.to_lowercase().as_str() {
20518 "ndistinct" => Ok(StatisticsKind::NDistinct),
20519 "dependencies" => Ok(StatisticsKind::Dependencies),
20520 "mcv" => Ok(StatisticsKind::Mcv),
20521 other => Err(ParserError::ParserError(format!(
20522 "Unknown statistics kind: {other}"
20523 ))),
20524 }
20525 })?;
20526 self.expect_token(&Token::RParen)?;
20527 kinds
20528 } else {
20529 vec![]
20530 };
20531
20532 self.expect_keyword_is(Keyword::ON)?;
20533 let on = self.parse_comma_separated(Parser::parse_expr)?;
20534 self.expect_keyword_is(Keyword::FROM)?;
20535 let from = self.parse_object_name(false)?;
20536
20537 Ok(CreateStatistics {
20538 if_not_exists,
20539 name,
20540 kinds,
20541 on,
20542 from,
20543 })
20544 }
20545
20546 pub fn parse_create_access_method(&mut self) -> Result<CreateAccessMethod, ParserError> {
20550 let name = self.parse_identifier()?;
20551 self.expect_keyword_is(Keyword::TYPE)?;
20552 let method_type = if self.parse_keyword(Keyword::INDEX) {
20553 AccessMethodType::Index
20554 } else if self.parse_keyword(Keyword::TABLE) {
20555 AccessMethodType::Table
20556 } else {
20557 return self.expected_ref("INDEX or TABLE after TYPE", self.peek_token_ref());
20558 };
20559 self.expect_keyword_is(Keyword::HANDLER)?;
20560 let handler = self.parse_object_name(false)?;
20561
20562 Ok(CreateAccessMethod {
20563 name,
20564 method_type,
20565 handler,
20566 })
20567 }
20568
20569 pub fn parse_create_event_trigger(&mut self) -> Result<CreateEventTrigger, ParserError> {
20573 let name = self.parse_identifier()?;
20574 self.expect_keyword_is(Keyword::ON)?;
20575 let event_ident = self.parse_identifier()?;
20576 let event = match event_ident.value.to_lowercase().as_str() {
20577 "ddl_command_start" => EventTriggerEvent::DdlCommandStart,
20578 "ddl_command_end" => EventTriggerEvent::DdlCommandEnd,
20579 "table_rewrite" => EventTriggerEvent::TableRewrite,
20580 "sql_drop" => EventTriggerEvent::SqlDrop,
20581 other => {
20582 return Err(ParserError::ParserError(format!(
20583 "Unknown event trigger event: {other}"
20584 )))
20585 }
20586 };
20587
20588 let when_tags = if self.parse_keyword(Keyword::WHEN) {
20589 self.expect_keyword_is(Keyword::TAG)?;
20590 self.expect_keyword_is(Keyword::IN)?;
20591 self.expect_token(&Token::LParen)?;
20592 let tags = self.parse_comma_separated(|p| p.parse_value().map(|v| v.value))?;
20593 self.expect_token(&Token::RParen)?;
20594 Some(tags)
20595 } else {
20596 None
20597 };
20598
20599 self.expect_keyword_is(Keyword::EXECUTE)?;
20600 let is_procedure = if self.parse_keyword(Keyword::FUNCTION) {
20601 false
20602 } else if self.parse_keyword(Keyword::PROCEDURE) {
20603 true
20604 } else {
20605 return self.expected_ref("FUNCTION or PROCEDURE after EXECUTE", self.peek_token_ref());
20606 };
20607 let execute = self.parse_object_name(false)?;
20608 self.expect_token(&Token::LParen)?;
20609 self.expect_token(&Token::RParen)?;
20610
20611 Ok(CreateEventTrigger {
20612 name,
20613 event,
20614 when_tags,
20615 execute,
20616 is_procedure,
20617 })
20618 }
20619
20620 pub fn parse_create_transform(&mut self, or_replace: bool) -> Result<CreateTransform, ParserError> {
20624 self.expect_keyword_is(Keyword::FOR)?;
20625 let type_name = self.parse_data_type()?;
20626 self.expect_keyword_is(Keyword::LANGUAGE)?;
20627 let language = self.parse_identifier()?;
20628 self.expect_token(&Token::LParen)?;
20629 let elements = self.parse_comma_separated(|p| {
20630 let is_from = if p.parse_keyword(Keyword::FROM) {
20631 true
20632 } else {
20633 p.expect_keyword_is(Keyword::TO)?;
20634 false
20635 };
20636 p.expect_keyword_is(Keyword::SQL)?;
20637 p.expect_keyword_is(Keyword::WITH)?;
20638 p.expect_keyword_is(Keyword::FUNCTION)?;
20639 let function = p.parse_object_name(false)?;
20640 p.expect_token(&Token::LParen)?;
20641 let arg_types = if p.peek_token().token == Token::RParen {
20642 vec![]
20643 } else {
20644 p.parse_comma_separated(|p| p.parse_data_type())?
20645 };
20646 p.expect_token(&Token::RParen)?;
20647 Ok(TransformElement {
20648 is_from,
20649 function,
20650 arg_types,
20651 })
20652 })?;
20653 self.expect_token(&Token::RParen)?;
20654
20655 Ok(CreateTransform {
20656 or_replace,
20657 type_name,
20658 language,
20659 elements,
20660 })
20661 }
20662
20663
20664 pub fn parse_security_label(&mut self) -> Result<SecurityLabel, ParserError> {
20668 self.expect_keyword_is(Keyword::LABEL)?;
20669
20670 let provider = if self.parse_keyword(Keyword::FOR) {
20671 Some(self.parse_identifier()?)
20672 } else {
20673 None
20674 };
20675
20676 self.expect_keyword_is(Keyword::ON)?;
20677
20678 let object_kind = if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
20679 SecurityLabelObjectKind::MaterializedView
20680 } else if self.parse_keyword(Keyword::TABLE) {
20681 SecurityLabelObjectKind::Table
20682 } else if self.parse_keyword(Keyword::COLUMN) {
20683 SecurityLabelObjectKind::Column
20684 } else if self.parse_keyword(Keyword::DATABASE) {
20685 SecurityLabelObjectKind::Database
20686 } else if self.parse_keyword(Keyword::DOMAIN) {
20687 SecurityLabelObjectKind::Domain
20688 } else if self.parse_keyword(Keyword::FUNCTION) {
20689 SecurityLabelObjectKind::Function
20690 } else if self.parse_keyword(Keyword::ROLE) {
20691 SecurityLabelObjectKind::Role
20692 } else if self.parse_keyword(Keyword::SCHEMA) {
20693 SecurityLabelObjectKind::Schema
20694 } else if self.parse_keyword(Keyword::SEQUENCE) {
20695 SecurityLabelObjectKind::Sequence
20696 } else if self.parse_keyword(Keyword::TYPE) {
20697 SecurityLabelObjectKind::Type
20698 } else if self.parse_keyword(Keyword::VIEW) {
20699 SecurityLabelObjectKind::View
20700 } else {
20701 return self.expected_ref(
20702 "TABLE, COLUMN, DATABASE, DOMAIN, FUNCTION, MATERIALIZED VIEW, ROLE, SCHEMA, SEQUENCE, TYPE, or VIEW after ON",
20703 self.peek_token_ref(),
20704 );
20705 };
20706
20707 let object_name = self.parse_object_name(false)?;
20708
20709 self.expect_keyword_is(Keyword::IS)?;
20710
20711 let label = if self.parse_keyword(Keyword::NULL) {
20712 None
20713 } else {
20714 Some(self.parse_value()?.value)
20715 };
20716
20717 Ok(SecurityLabel {
20718 provider,
20719 object_kind,
20720 object_name,
20721 label,
20722 })
20723 }
20724
20725 pub fn parse_create_user_mapping(&mut self) -> Result<CreateUserMapping, ParserError> {
20729 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20730
20731 self.expect_keyword_is(Keyword::FOR)?;
20732
20733 let user = if self.parse_keyword(Keyword::CURRENT_ROLE) {
20734 UserMappingUser::CurrentRole
20735 } else if self.parse_keyword(Keyword::CURRENT_USER) {
20736 UserMappingUser::CurrentUser
20737 } else if self.parse_keyword(Keyword::PUBLIC) {
20738 UserMappingUser::Public
20739 } else if self.parse_keyword(Keyword::USER) {
20740 UserMappingUser::User
20741 } else {
20742 UserMappingUser::Ident(self.parse_identifier()?)
20743 };
20744
20745 self.expect_keyword_is(Keyword::SERVER)?;
20746 let server_name = self.parse_identifier()?;
20747
20748 let options = if self.parse_keyword(Keyword::OPTIONS) {
20749 self.expect_token(&Token::LParen)?;
20750 let opts = self.parse_comma_separated(|p| {
20751 let key = p.parse_identifier()?;
20752 let value = p.parse_identifier()?;
20753 Ok(CreateServerOption { key, value })
20754 })?;
20755 self.expect_token(&Token::RParen)?;
20756 Some(opts)
20757 } else {
20758 None
20759 };
20760
20761 Ok(CreateUserMapping {
20762 if_not_exists,
20763 user,
20764 server_name,
20765 options,
20766 })
20767 }
20768
20769 pub fn parse_create_tablespace(&mut self) -> Result<CreateTablespace, ParserError> {
20773 let name = self.parse_identifier()?;
20774
20775 let owner = if self.parse_keyword(Keyword::OWNER) {
20776 Some(self.parse_identifier()?)
20777 } else {
20778 None
20779 };
20780
20781 self.expect_keyword_is(Keyword::LOCATION)?;
20782 let location = self.parse_value()?.value;
20783
20784 let with_options = self.parse_options(Keyword::WITH)?;
20785
20786 Ok(CreateTablespace {
20787 name,
20788 owner,
20789 location,
20790 with_options,
20791 })
20792 }
20793
20794 pub fn index(&self) -> usize {
20796 self.index
20797 }
20798
20799 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
20801 let ident = self.parse_identifier()?;
20802 self.expect_keyword_is(Keyword::AS)?;
20803
20804 let window_expr = if self.consume_token(&Token::LParen) {
20805 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
20806 } else if self.dialect.supports_window_clause_named_window_reference() {
20807 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
20808 } else {
20809 return self.expected_ref("(", self.peek_token_ref());
20810 };
20811
20812 Ok(NamedWindowDefinition(ident, window_expr))
20813 }
20814
20815 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
20817 let name = self.parse_object_name(false)?;
20818 let params = self.parse_optional_procedure_parameters()?;
20819
20820 let language = if self.parse_keyword(Keyword::LANGUAGE) {
20821 Some(self.parse_identifier()?)
20822 } else {
20823 None
20824 };
20825
20826 self.expect_keyword_is(Keyword::AS)?;
20827
20828 let body = self.parse_conditional_statements(&[Keyword::END])?;
20829
20830 Ok(Statement::CreateProcedure {
20831 name,
20832 or_alter,
20833 params,
20834 language,
20835 body,
20836 })
20837 }
20838
20839 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
20841 let window_name = match &self.peek_token_ref().token {
20842 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
20843 self.parse_optional_ident()?
20844 }
20845 _ => None,
20846 };
20847
20848 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
20849 self.parse_comma_separated(Parser::parse_expr)?
20850 } else {
20851 vec![]
20852 };
20853 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
20854 self.parse_comma_separated(Parser::parse_order_by_expr)?
20855 } else {
20856 vec![]
20857 };
20858
20859 let window_frame = if !self.consume_token(&Token::RParen) {
20860 let window_frame = self.parse_window_frame()?;
20861 self.expect_token(&Token::RParen)?;
20862 Some(window_frame)
20863 } else {
20864 None
20865 };
20866 Ok(WindowSpec {
20867 window_name,
20868 partition_by,
20869 order_by,
20870 window_frame,
20871 })
20872 }
20873
20874 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
20876 let name = self.parse_object_name(false)?;
20877
20878 let has_as = self.parse_keyword(Keyword::AS);
20880
20881 if !has_as {
20882 if self.consume_token(&Token::LParen) {
20884 let options = self.parse_create_type_sql_definition_options()?;
20886 self.expect_token(&Token::RParen)?;
20887 return Ok(Statement::CreateType {
20888 name,
20889 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
20890 });
20891 }
20892
20893 return Ok(Statement::CreateType {
20895 name,
20896 representation: None,
20897 });
20898 }
20899
20900 if self.parse_keyword(Keyword::ENUM) {
20902 self.parse_create_type_enum(name)
20904 } else if self.parse_keyword(Keyword::RANGE) {
20905 self.parse_create_type_range(name)
20907 } else if self.consume_token(&Token::LParen) {
20908 self.parse_create_type_composite(name)
20910 } else {
20911 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
20912 }
20913 }
20914
20915 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20919 if self.consume_token(&Token::RParen) {
20920 return Ok(Statement::CreateType {
20922 name,
20923 representation: Some(UserDefinedTypeRepresentation::Composite {
20924 attributes: vec![],
20925 }),
20926 });
20927 }
20928
20929 let mut attributes = vec![];
20930 loop {
20931 let attr_name = self.parse_identifier()?;
20932 let attr_data_type = self.parse_data_type()?;
20933 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
20934 Some(self.parse_object_name(false)?)
20935 } else {
20936 None
20937 };
20938 attributes.push(UserDefinedTypeCompositeAttributeDef {
20939 name: attr_name,
20940 data_type: attr_data_type,
20941 collation: attr_collation,
20942 });
20943
20944 if !self.consume_token(&Token::Comma) {
20945 break;
20946 }
20947 }
20948 self.expect_token(&Token::RParen)?;
20949
20950 Ok(Statement::CreateType {
20951 name,
20952 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
20953 })
20954 }
20955
20956 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20960 self.expect_token(&Token::LParen)?;
20961 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
20962 self.expect_token(&Token::RParen)?;
20963
20964 Ok(Statement::CreateType {
20965 name,
20966 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
20967 })
20968 }
20969
20970 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20974 self.expect_token(&Token::LParen)?;
20975 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
20976 self.expect_token(&Token::RParen)?;
20977
20978 Ok(Statement::CreateType {
20979 name,
20980 representation: Some(UserDefinedTypeRepresentation::Range { options }),
20981 })
20982 }
20983
20984 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
20986 let keyword = self.parse_one_of_keywords(&[
20987 Keyword::SUBTYPE,
20988 Keyword::SUBTYPE_OPCLASS,
20989 Keyword::COLLATION,
20990 Keyword::CANONICAL,
20991 Keyword::SUBTYPE_DIFF,
20992 Keyword::MULTIRANGE_TYPE_NAME,
20993 ]);
20994
20995 match keyword {
20996 Some(Keyword::SUBTYPE) => {
20997 self.expect_token(&Token::Eq)?;
20998 let data_type = self.parse_data_type()?;
20999 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
21000 }
21001 Some(Keyword::SUBTYPE_OPCLASS) => {
21002 self.expect_token(&Token::Eq)?;
21003 let name = self.parse_object_name(false)?;
21004 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
21005 }
21006 Some(Keyword::COLLATION) => {
21007 self.expect_token(&Token::Eq)?;
21008 let name = self.parse_object_name(false)?;
21009 Ok(UserDefinedTypeRangeOption::Collation(name))
21010 }
21011 Some(Keyword::CANONICAL) => {
21012 self.expect_token(&Token::Eq)?;
21013 let name = self.parse_object_name(false)?;
21014 Ok(UserDefinedTypeRangeOption::Canonical(name))
21015 }
21016 Some(Keyword::SUBTYPE_DIFF) => {
21017 self.expect_token(&Token::Eq)?;
21018 let name = self.parse_object_name(false)?;
21019 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
21020 }
21021 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
21022 self.expect_token(&Token::Eq)?;
21023 let name = self.parse_object_name(false)?;
21024 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
21025 }
21026 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
21027 }
21028 }
21029
21030 fn parse_create_type_sql_definition_options(
21032 &mut self,
21033 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
21034 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
21035 }
21036
21037 fn parse_sql_definition_option(
21039 &mut self,
21040 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
21041 let keyword = self.parse_one_of_keywords(&[
21042 Keyword::INPUT,
21043 Keyword::OUTPUT,
21044 Keyword::RECEIVE,
21045 Keyword::SEND,
21046 Keyword::TYPMOD_IN,
21047 Keyword::TYPMOD_OUT,
21048 Keyword::ANALYZE,
21049 Keyword::SUBSCRIPT,
21050 Keyword::INTERNALLENGTH,
21051 Keyword::PASSEDBYVALUE,
21052 Keyword::ALIGNMENT,
21053 Keyword::STORAGE,
21054 Keyword::LIKE,
21055 Keyword::CATEGORY,
21056 Keyword::PREFERRED,
21057 Keyword::DEFAULT,
21058 Keyword::ELEMENT,
21059 Keyword::DELIMITER,
21060 Keyword::COLLATABLE,
21061 ]);
21062
21063 match keyword {
21064 Some(Keyword::INPUT) => {
21065 self.expect_token(&Token::Eq)?;
21066 let name = self.parse_object_name(false)?;
21067 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
21068 }
21069 Some(Keyword::OUTPUT) => {
21070 self.expect_token(&Token::Eq)?;
21071 let name = self.parse_object_name(false)?;
21072 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
21073 }
21074 Some(Keyword::RECEIVE) => {
21075 self.expect_token(&Token::Eq)?;
21076 let name = self.parse_object_name(false)?;
21077 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
21078 }
21079 Some(Keyword::SEND) => {
21080 self.expect_token(&Token::Eq)?;
21081 let name = self.parse_object_name(false)?;
21082 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
21083 }
21084 Some(Keyword::TYPMOD_IN) => {
21085 self.expect_token(&Token::Eq)?;
21086 let name = self.parse_object_name(false)?;
21087 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
21088 }
21089 Some(Keyword::TYPMOD_OUT) => {
21090 self.expect_token(&Token::Eq)?;
21091 let name = self.parse_object_name(false)?;
21092 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
21093 }
21094 Some(Keyword::ANALYZE) => {
21095 self.expect_token(&Token::Eq)?;
21096 let name = self.parse_object_name(false)?;
21097 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
21098 }
21099 Some(Keyword::SUBSCRIPT) => {
21100 self.expect_token(&Token::Eq)?;
21101 let name = self.parse_object_name(false)?;
21102 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
21103 }
21104 Some(Keyword::INTERNALLENGTH) => {
21105 self.expect_token(&Token::Eq)?;
21106 if self.parse_keyword(Keyword::VARIABLE) {
21107 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21108 UserDefinedTypeInternalLength::Variable,
21109 ))
21110 } else {
21111 let value = self.parse_literal_uint()?;
21112 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21113 UserDefinedTypeInternalLength::Fixed(value),
21114 ))
21115 }
21116 }
21117 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
21118 Some(Keyword::ALIGNMENT) => {
21119 self.expect_token(&Token::Eq)?;
21120 let align_keyword = self.parse_one_of_keywords(&[
21121 Keyword::CHAR,
21122 Keyword::INT2,
21123 Keyword::INT4,
21124 Keyword::DOUBLE,
21125 ]);
21126 match align_keyword {
21127 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21128 Alignment::Char,
21129 )),
21130 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21131 Alignment::Int2,
21132 )),
21133 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21134 Alignment::Int4,
21135 )),
21136 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21137 Alignment::Double,
21138 )),
21139 _ => self.expected_ref(
21140 "alignment value (char, int2, int4, or double)",
21141 self.peek_token_ref(),
21142 ),
21143 }
21144 }
21145 Some(Keyword::STORAGE) => {
21146 self.expect_token(&Token::Eq)?;
21147 let storage_keyword = self.parse_one_of_keywords(&[
21148 Keyword::PLAIN,
21149 Keyword::EXTERNAL,
21150 Keyword::EXTENDED,
21151 Keyword::MAIN,
21152 ]);
21153 match storage_keyword {
21154 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21155 UserDefinedTypeStorage::Plain,
21156 )),
21157 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21158 UserDefinedTypeStorage::External,
21159 )),
21160 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21161 UserDefinedTypeStorage::Extended,
21162 )),
21163 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21164 UserDefinedTypeStorage::Main,
21165 )),
21166 _ => self.expected_ref(
21167 "storage value (plain, external, extended, or main)",
21168 self.peek_token_ref(),
21169 ),
21170 }
21171 }
21172 Some(Keyword::LIKE) => {
21173 self.expect_token(&Token::Eq)?;
21174 let name = self.parse_object_name(false)?;
21175 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
21176 }
21177 Some(Keyword::CATEGORY) => {
21178 self.expect_token(&Token::Eq)?;
21179 let category_str = self.parse_literal_string()?;
21180 let category_char = category_str.chars().next().ok_or_else(|| {
21181 ParserError::ParserError(
21182 "CATEGORY value must be a single character".to_string(),
21183 )
21184 })?;
21185 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
21186 }
21187 Some(Keyword::PREFERRED) => {
21188 self.expect_token(&Token::Eq)?;
21189 let value =
21190 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21191 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
21192 }
21193 Some(Keyword::DEFAULT) => {
21194 self.expect_token(&Token::Eq)?;
21195 let expr = self.parse_expr()?;
21196 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
21197 }
21198 Some(Keyword::ELEMENT) => {
21199 self.expect_token(&Token::Eq)?;
21200 let data_type = self.parse_data_type()?;
21201 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
21202 }
21203 Some(Keyword::DELIMITER) => {
21204 self.expect_token(&Token::Eq)?;
21205 let delimiter = self.parse_literal_string()?;
21206 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
21207 }
21208 Some(Keyword::COLLATABLE) => {
21209 self.expect_token(&Token::Eq)?;
21210 let value =
21211 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21212 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
21213 }
21214 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
21215 }
21216 }
21217
21218 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
21219 self.expect_token(&Token::LParen)?;
21220 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21221 self.expect_token(&Token::RParen)?;
21222 Ok(idents)
21223 }
21224
21225 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
21226 if dialect_of!(self is MySqlDialect | GenericDialect) {
21227 if self.parse_keyword(Keyword::FIRST) {
21228 Ok(Some(MySQLColumnPosition::First))
21229 } else if self.parse_keyword(Keyword::AFTER) {
21230 let ident = self.parse_identifier()?;
21231 Ok(Some(MySQLColumnPosition::After(ident)))
21232 } else {
21233 Ok(None)
21234 }
21235 } else {
21236 Ok(None)
21237 }
21238 }
21239
21240 fn parse_print(&mut self) -> Result<Statement, ParserError> {
21242 Ok(Statement::Print(PrintStatement {
21243 message: Box::new(self.parse_expr()?),
21244 }))
21245 }
21246
21247 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
21251 let wait_type = if self.parse_keyword(Keyword::DELAY) {
21252 WaitForType::Delay
21253 } else if self.parse_keyword(Keyword::TIME) {
21254 WaitForType::Time
21255 } else {
21256 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
21257 };
21258 let expr = self.parse_expr()?;
21259 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
21260 }
21261
21262 fn parse_return(&mut self) -> Result<Statement, ParserError> {
21264 match self.maybe_parse(|p| p.parse_expr())? {
21265 Some(expr) => Ok(Statement::Return(ReturnStatement {
21266 value: Some(ReturnStatementValue::Expr(expr)),
21267 })),
21268 None => Ok(Statement::Return(ReturnStatement { value: None })),
21269 }
21270 }
21271
21272 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
21276 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
21277
21278 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
21279 Some(self.parse_object_name(false)?)
21280 } else {
21281 None
21282 };
21283 self.expect_keyword(Keyword::OPTIONS)?;
21284 self.expect_token(&Token::LParen)?;
21285 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
21286 self.expect_token(&Token::RParen)?;
21287 self.expect_keyword(Keyword::AS)?;
21288 let query = self.parse_query()?;
21289 Ok(Statement::ExportData(ExportData {
21290 options,
21291 query,
21292 connection,
21293 }))
21294 }
21295
21296 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
21297 self.expect_keyword(Keyword::VACUUM)?;
21298 let full = self.parse_keyword(Keyword::FULL);
21299 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
21300 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
21301 let reindex = self.parse_keyword(Keyword::REINDEX);
21302 let recluster = self.parse_keyword(Keyword::RECLUSTER);
21303 let (table_name, threshold, boost) =
21304 match self.maybe_parse(|p| p.parse_object_name(false))? {
21305 Some(table_name) => {
21306 let threshold = if self.parse_keyword(Keyword::TO) {
21307 let value = self.parse_value()?;
21308 self.expect_keyword(Keyword::PERCENT)?;
21309 Some(value)
21310 } else {
21311 None
21312 };
21313 let boost = self.parse_keyword(Keyword::BOOST);
21314 (Some(table_name), threshold, boost)
21315 }
21316 _ => (None, None, false),
21317 };
21318 Ok(Statement::Vacuum(VacuumStatement {
21319 full,
21320 sort_only,
21321 delete_only,
21322 reindex,
21323 recluster,
21324 table_name,
21325 threshold,
21326 boost,
21327 }))
21328 }
21329
21330 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
21332 self.tokens
21333 }
21334
21335 fn peek_sub_query(&mut self) -> bool {
21337 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
21338 .is_some()
21339 }
21340
21341 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
21342 let show_in;
21343 let mut filter_position = None;
21344 if self.dialect.supports_show_like_before_in() {
21345 if let Some(filter) = self.parse_show_statement_filter()? {
21346 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
21347 }
21348 show_in = self.maybe_parse_show_stmt_in()?;
21349 } else {
21350 show_in = self.maybe_parse_show_stmt_in()?;
21351 if let Some(filter) = self.parse_show_statement_filter()? {
21352 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
21353 }
21354 }
21355 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
21356 let limit = self.maybe_parse_show_stmt_limit()?;
21357 let from = self.maybe_parse_show_stmt_from()?;
21358 Ok(ShowStatementOptions {
21359 filter_position,
21360 show_in,
21361 starts_with,
21362 limit,
21363 limit_from: from,
21364 })
21365 }
21366
21367 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
21368 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
21369 Some(Keyword::FROM) => ShowStatementInClause::FROM,
21370 Some(Keyword::IN) => ShowStatementInClause::IN,
21371 None => return Ok(None),
21372 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
21373 };
21374
21375 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
21376 Keyword::ACCOUNT,
21377 Keyword::DATABASE,
21378 Keyword::SCHEMA,
21379 Keyword::TABLE,
21380 Keyword::VIEW,
21381 ]) {
21382 Some(Keyword::DATABASE)
21384 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21385 | self.peek_keyword(Keyword::LIMIT) =>
21386 {
21387 (Some(ShowStatementInParentType::Database), None)
21388 }
21389 Some(Keyword::SCHEMA)
21390 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21391 | self.peek_keyword(Keyword::LIMIT) =>
21392 {
21393 (Some(ShowStatementInParentType::Schema), None)
21394 }
21395 Some(parent_kw) => {
21396 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
21400 match parent_kw {
21401 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
21402 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
21403 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
21404 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
21405 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
21406 _ => {
21407 return self.expected_ref(
21408 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
21409 self.peek_token_ref(),
21410 )
21411 }
21412 }
21413 }
21414 None => {
21415 let mut parent_name = self.parse_object_name(false)?;
21418 if self
21419 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
21420 .is_some()
21421 {
21422 parent_name
21423 .0
21424 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
21425 }
21426 (None, Some(parent_name))
21427 }
21428 };
21429
21430 Ok(Some(ShowStatementIn {
21431 clause,
21432 parent_type,
21433 parent_name,
21434 }))
21435 }
21436
21437 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21438 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
21439 Ok(Some(self.parse_value()?))
21440 } else {
21441 Ok(None)
21442 }
21443 }
21444
21445 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
21446 if self.parse_keyword(Keyword::LIMIT) {
21447 Ok(self.parse_limit()?)
21448 } else {
21449 Ok(None)
21450 }
21451 }
21452
21453 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21454 if self.parse_keyword(Keyword::FROM) {
21455 Ok(Some(self.parse_value()?))
21456 } else {
21457 Ok(None)
21458 }
21459 }
21460
21461 pub(crate) fn in_column_definition_state(&self) -> bool {
21462 matches!(self.state, ColumnDefinition)
21463 }
21464
21465 pub(crate) fn parse_key_value_options(
21470 &mut self,
21471 parenthesized: bool,
21472 end_words: &[Keyword],
21473 ) -> Result<KeyValueOptions, ParserError> {
21474 let mut options: Vec<KeyValueOption> = Vec::new();
21475 let mut delimiter = KeyValueOptionsDelimiter::Space;
21476 if parenthesized {
21477 self.expect_token(&Token::LParen)?;
21478 }
21479 loop {
21480 match self.next_token().token {
21481 Token::RParen => {
21482 if parenthesized {
21483 break;
21484 } else {
21485 return self.expected_ref(" another option or EOF", self.peek_token_ref());
21486 }
21487 }
21488 Token::EOF | Token::SemiColon => break,
21489 Token::Comma => {
21490 delimiter = KeyValueOptionsDelimiter::Comma;
21491 continue;
21492 }
21493 Token::Word(w) if !end_words.contains(&w.keyword) => {
21494 options.push(self.parse_key_value_option(&w)?)
21495 }
21496 Token::Word(w) if end_words.contains(&w.keyword) => {
21497 self.prev_token();
21498 break;
21499 }
21500 _ => {
21501 return self.expected_ref(
21502 "another option, EOF, SemiColon, Comma or ')'",
21503 self.peek_token_ref(),
21504 )
21505 }
21506 };
21507 }
21508
21509 Ok(KeyValueOptions { delimiter, options })
21510 }
21511
21512 pub(crate) fn parse_key_value_option(
21514 &mut self,
21515 key: &Word,
21516 ) -> Result<KeyValueOption, ParserError> {
21517 self.expect_token(&Token::Eq)?;
21518 let peeked_token = self.peek_token();
21519 match peeked_token.token {
21520 Token::SingleQuotedString(_) => Ok(KeyValueOption {
21521 option_name: key.value.clone(),
21522 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21523 }),
21524 Token::Word(word)
21525 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21526 {
21527 Ok(KeyValueOption {
21528 option_name: key.value.clone(),
21529 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21530 })
21531 }
21532 Token::Number(..) => Ok(KeyValueOption {
21533 option_name: key.value.clone(),
21534 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21535 }),
21536 Token::Word(word) => {
21537 self.next_token();
21538 Ok(KeyValueOption {
21539 option_name: key.value.clone(),
21540 option_value: KeyValueOptionKind::Single(
21541 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21542 ),
21543 })
21544 }
21545 Token::LParen => {
21546 match self.maybe_parse(|parser| {
21550 parser.expect_token(&Token::LParen)?;
21551 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21552 parser.expect_token(&Token::RParen)?;
21553 values
21554 })? {
21555 Some(values) => Ok(KeyValueOption {
21556 option_name: key.value.clone(),
21557 option_value: KeyValueOptionKind::Multi(values),
21558 }),
21559 None => Ok(KeyValueOption {
21560 option_name: key.value.clone(),
21561 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21562 self.parse_key_value_options(true, &[])?,
21563 )),
21564 }),
21565 }
21566 }
21567 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21568 }
21569 }
21570
21571 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21573 if self.parse_keyword(Keyword::ALL) {
21574 return Ok(ResetStatement { reset: Reset::ALL });
21575 }
21576
21577 let obj = self.parse_object_name(false)?;
21578 Ok(ResetStatement {
21579 reset: Reset::ConfigurationParameter(obj),
21580 })
21581 }
21582}
21583
21584fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21585 if let Some(prefix) = prefix {
21586 Expr::Prefixed {
21587 prefix,
21588 value: Box::new(expr),
21589 }
21590 } else {
21591 expr
21592 }
21593}
21594
21595impl Word {
21596 pub fn to_ident(&self, span: Span) -> Ident {
21602 Ident {
21603 value: self.value.clone(),
21604 quote_style: self.quote_style,
21605 span,
21606 }
21607 }
21608
21609 pub fn into_ident(self, span: Span) -> Ident {
21614 Ident {
21615 value: self.value,
21616 quote_style: self.quote_style,
21617 span,
21618 }
21619 }
21620}
21621
21622#[cfg(test)]
21623mod tests {
21624 use crate::test_utils::{all_dialects, TestedDialects};
21625
21626 use super::*;
21627
21628 #[test]
21629 fn test_prev_index() {
21630 let sql = "SELECT version";
21631 all_dialects().run_parser_method(sql, |parser| {
21632 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21633 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21634 parser.prev_token();
21635 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21636 assert_eq!(parser.next_token(), Token::make_word("version", None));
21637 parser.prev_token();
21638 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21639 assert_eq!(parser.next_token(), Token::make_word("version", None));
21640 assert_eq!(parser.peek_token(), Token::EOF);
21641 parser.prev_token();
21642 assert_eq!(parser.next_token(), Token::make_word("version", None));
21643 assert_eq!(parser.next_token(), Token::EOF);
21644 assert_eq!(parser.next_token(), Token::EOF);
21645 parser.prev_token();
21646 });
21647 }
21648
21649 #[test]
21650 fn test_peek_tokens() {
21651 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21652 assert!(matches!(
21653 parser.peek_tokens(),
21654 [Token::Word(Word {
21655 keyword: Keyword::SELECT,
21656 ..
21657 })]
21658 ));
21659
21660 assert!(matches!(
21661 parser.peek_tokens(),
21662 [
21663 Token::Word(Word {
21664 keyword: Keyword::SELECT,
21665 ..
21666 }),
21667 Token::Word(_),
21668 Token::Word(Word {
21669 keyword: Keyword::AS,
21670 ..
21671 }),
21672 ]
21673 ));
21674
21675 for _ in 0..4 {
21676 parser.next_token();
21677 }
21678
21679 assert!(matches!(
21680 parser.peek_tokens(),
21681 [
21682 Token::Word(Word {
21683 keyword: Keyword::FROM,
21684 ..
21685 }),
21686 Token::Word(_),
21687 Token::EOF,
21688 Token::EOF,
21689 ]
21690 ))
21691 })
21692 }
21693
21694 #[cfg(test)]
21695 mod test_parse_data_type {
21696 use crate::ast::{
21697 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21698 };
21699 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21700 use crate::test_utils::TestedDialects;
21701
21702 macro_rules! test_parse_data_type {
21703 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21704 $dialect.run_parser_method(&*$input, |parser| {
21705 let data_type = parser.parse_data_type().unwrap();
21706 assert_eq!($expected_type, data_type);
21707 assert_eq!($input.to_string(), data_type.to_string());
21708 });
21709 }};
21710 }
21711
21712 #[test]
21713 fn test_ansii_character_string_types() {
21714 let dialect =
21716 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21717
21718 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21719
21720 test_parse_data_type!(
21721 dialect,
21722 "CHARACTER(20)",
21723 DataType::Character(Some(CharacterLength::IntegerLength {
21724 length: 20,
21725 unit: None
21726 }))
21727 );
21728
21729 test_parse_data_type!(
21730 dialect,
21731 "CHARACTER(20 CHARACTERS)",
21732 DataType::Character(Some(CharacterLength::IntegerLength {
21733 length: 20,
21734 unit: Some(CharLengthUnits::Characters)
21735 }))
21736 );
21737
21738 test_parse_data_type!(
21739 dialect,
21740 "CHARACTER(20 OCTETS)",
21741 DataType::Character(Some(CharacterLength::IntegerLength {
21742 length: 20,
21743 unit: Some(CharLengthUnits::Octets)
21744 }))
21745 );
21746
21747 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
21748
21749 test_parse_data_type!(
21750 dialect,
21751 "CHAR(20)",
21752 DataType::Char(Some(CharacterLength::IntegerLength {
21753 length: 20,
21754 unit: None
21755 }))
21756 );
21757
21758 test_parse_data_type!(
21759 dialect,
21760 "CHAR(20 CHARACTERS)",
21761 DataType::Char(Some(CharacterLength::IntegerLength {
21762 length: 20,
21763 unit: Some(CharLengthUnits::Characters)
21764 }))
21765 );
21766
21767 test_parse_data_type!(
21768 dialect,
21769 "CHAR(20 OCTETS)",
21770 DataType::Char(Some(CharacterLength::IntegerLength {
21771 length: 20,
21772 unit: Some(CharLengthUnits::Octets)
21773 }))
21774 );
21775
21776 test_parse_data_type!(
21777 dialect,
21778 "CHARACTER VARYING(20)",
21779 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21780 length: 20,
21781 unit: None
21782 }))
21783 );
21784
21785 test_parse_data_type!(
21786 dialect,
21787 "CHARACTER VARYING(20 CHARACTERS)",
21788 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21789 length: 20,
21790 unit: Some(CharLengthUnits::Characters)
21791 }))
21792 );
21793
21794 test_parse_data_type!(
21795 dialect,
21796 "CHARACTER VARYING(20 OCTETS)",
21797 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21798 length: 20,
21799 unit: Some(CharLengthUnits::Octets)
21800 }))
21801 );
21802
21803 test_parse_data_type!(
21804 dialect,
21805 "CHAR VARYING(20)",
21806 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21807 length: 20,
21808 unit: None
21809 }))
21810 );
21811
21812 test_parse_data_type!(
21813 dialect,
21814 "CHAR VARYING(20 CHARACTERS)",
21815 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21816 length: 20,
21817 unit: Some(CharLengthUnits::Characters)
21818 }))
21819 );
21820
21821 test_parse_data_type!(
21822 dialect,
21823 "CHAR VARYING(20 OCTETS)",
21824 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21825 length: 20,
21826 unit: Some(CharLengthUnits::Octets)
21827 }))
21828 );
21829
21830 test_parse_data_type!(
21831 dialect,
21832 "VARCHAR(20)",
21833 DataType::Varchar(Some(CharacterLength::IntegerLength {
21834 length: 20,
21835 unit: None
21836 }))
21837 );
21838 }
21839
21840 #[test]
21841 fn test_ansii_character_large_object_types() {
21842 let dialect =
21844 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21845
21846 test_parse_data_type!(
21847 dialect,
21848 "CHARACTER LARGE OBJECT",
21849 DataType::CharacterLargeObject(None)
21850 );
21851 test_parse_data_type!(
21852 dialect,
21853 "CHARACTER LARGE OBJECT(20)",
21854 DataType::CharacterLargeObject(Some(20))
21855 );
21856
21857 test_parse_data_type!(
21858 dialect,
21859 "CHAR LARGE OBJECT",
21860 DataType::CharLargeObject(None)
21861 );
21862 test_parse_data_type!(
21863 dialect,
21864 "CHAR LARGE OBJECT(20)",
21865 DataType::CharLargeObject(Some(20))
21866 );
21867
21868 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
21869 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
21870 }
21871
21872 #[test]
21873 fn test_parse_custom_types() {
21874 let dialect =
21875 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21876
21877 test_parse_data_type!(
21878 dialect,
21879 "GEOMETRY",
21880 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
21881 );
21882
21883 test_parse_data_type!(
21884 dialect,
21885 "GEOMETRY(POINT)",
21886 DataType::Custom(
21887 ObjectName::from(vec!["GEOMETRY".into()]),
21888 vec!["POINT".to_string()]
21889 )
21890 );
21891
21892 test_parse_data_type!(
21893 dialect,
21894 "GEOMETRY(POINT, 4326)",
21895 DataType::Custom(
21896 ObjectName::from(vec!["GEOMETRY".into()]),
21897 vec!["POINT".to_string(), "4326".to_string()]
21898 )
21899 );
21900 }
21901
21902 #[test]
21903 fn test_ansii_exact_numeric_types() {
21904 let dialect = TestedDialects::new(vec![
21906 Box::new(GenericDialect {}),
21907 Box::new(AnsiDialect {}),
21908 Box::new(PostgreSqlDialect {}),
21909 ]);
21910
21911 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
21912
21913 test_parse_data_type!(
21914 dialect,
21915 "NUMERIC(2)",
21916 DataType::Numeric(ExactNumberInfo::Precision(2))
21917 );
21918
21919 test_parse_data_type!(
21920 dialect,
21921 "NUMERIC(2,10)",
21922 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
21923 );
21924
21925 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
21926
21927 test_parse_data_type!(
21928 dialect,
21929 "DECIMAL(2)",
21930 DataType::Decimal(ExactNumberInfo::Precision(2))
21931 );
21932
21933 test_parse_data_type!(
21934 dialect,
21935 "DECIMAL(2,10)",
21936 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
21937 );
21938
21939 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
21940
21941 test_parse_data_type!(
21942 dialect,
21943 "DEC(2)",
21944 DataType::Dec(ExactNumberInfo::Precision(2))
21945 );
21946
21947 test_parse_data_type!(
21948 dialect,
21949 "DEC(2,10)",
21950 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
21951 );
21952
21953 test_parse_data_type!(
21955 dialect,
21956 "NUMERIC(10,-2)",
21957 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
21958 );
21959
21960 test_parse_data_type!(
21961 dialect,
21962 "DECIMAL(1000,-10)",
21963 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
21964 );
21965
21966 test_parse_data_type!(
21967 dialect,
21968 "DEC(5,-1000)",
21969 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
21970 );
21971
21972 test_parse_data_type!(
21973 dialect,
21974 "NUMERIC(10,-5)",
21975 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
21976 );
21977
21978 test_parse_data_type!(
21979 dialect,
21980 "DECIMAL(20,-10)",
21981 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
21982 );
21983
21984 test_parse_data_type!(
21985 dialect,
21986 "DEC(5,-2)",
21987 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
21988 );
21989
21990 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
21991 let data_type = parser.parse_data_type().unwrap();
21992 assert_eq!(
21993 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
21994 data_type
21995 );
21996 assert_eq!("NUMERIC(10,5)", data_type.to_string());
21998 });
21999 }
22000
22001 #[test]
22002 fn test_ansii_date_type() {
22003 let dialect =
22005 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22006
22007 test_parse_data_type!(dialect, "DATE", DataType::Date);
22008
22009 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
22010
22011 test_parse_data_type!(
22012 dialect,
22013 "TIME(6)",
22014 DataType::Time(Some(6), TimezoneInfo::None)
22015 );
22016
22017 test_parse_data_type!(
22018 dialect,
22019 "TIME WITH TIME ZONE",
22020 DataType::Time(None, TimezoneInfo::WithTimeZone)
22021 );
22022
22023 test_parse_data_type!(
22024 dialect,
22025 "TIME(6) WITH TIME ZONE",
22026 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
22027 );
22028
22029 test_parse_data_type!(
22030 dialect,
22031 "TIME WITHOUT TIME ZONE",
22032 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
22033 );
22034
22035 test_parse_data_type!(
22036 dialect,
22037 "TIME(6) WITHOUT TIME ZONE",
22038 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
22039 );
22040
22041 test_parse_data_type!(
22042 dialect,
22043 "TIMESTAMP",
22044 DataType::Timestamp(None, TimezoneInfo::None)
22045 );
22046
22047 test_parse_data_type!(
22048 dialect,
22049 "TIMESTAMP(22)",
22050 DataType::Timestamp(Some(22), TimezoneInfo::None)
22051 );
22052
22053 test_parse_data_type!(
22054 dialect,
22055 "TIMESTAMP(22) WITH TIME ZONE",
22056 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
22057 );
22058
22059 test_parse_data_type!(
22060 dialect,
22061 "TIMESTAMP(33) WITHOUT TIME ZONE",
22062 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
22063 );
22064 }
22065 }
22066
22067 #[test]
22068 fn test_parse_schema_name() {
22069 macro_rules! test_parse_schema_name {
22071 ($input:expr, $expected_name:expr $(,)?) => {{
22072 all_dialects().run_parser_method(&*$input, |parser| {
22073 let schema_name = parser.parse_schema_name().unwrap();
22074 assert_eq!(schema_name, $expected_name);
22076 assert_eq!(schema_name.to_string(), $input.to_string());
22078 });
22079 }};
22080 }
22081
22082 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
22083 let dummy_authorization = Ident::new("dummy_authorization");
22084
22085 test_parse_schema_name!(
22086 format!("{dummy_name}"),
22087 SchemaName::Simple(dummy_name.clone())
22088 );
22089
22090 test_parse_schema_name!(
22091 format!("AUTHORIZATION {dummy_authorization}"),
22092 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
22093 );
22094 test_parse_schema_name!(
22095 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
22096 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
22097 );
22098 }
22099
22100 #[test]
22101 fn mysql_parse_index_table_constraint() {
22102 macro_rules! test_parse_table_constraint {
22103 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
22104 $dialect.run_parser_method(&*$input, |parser| {
22105 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
22106 assert_eq!(constraint, $expected);
22108 assert_eq!(constraint.to_string(), $input.to_string());
22110 });
22111 }};
22112 }
22113
22114 fn mk_expected_col(name: &str) -> IndexColumn {
22115 IndexColumn {
22116 column: OrderByExpr {
22117 expr: Expr::Identifier(name.into()),
22118 options: OrderByOptions {
22119 asc: None,
22120 nulls_first: None,
22121 },
22122 with_fill: None,
22123 },
22124 operator_class: None,
22125 }
22126 }
22127
22128 let dialect =
22129 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
22130
22131 test_parse_table_constraint!(
22132 dialect,
22133 "INDEX (c1)",
22134 IndexConstraint {
22135 display_as_key: false,
22136 name: None,
22137 index_type: None,
22138 columns: vec![mk_expected_col("c1")],
22139 index_options: vec![],
22140 }
22141 .into()
22142 );
22143
22144 test_parse_table_constraint!(
22145 dialect,
22146 "KEY (c1)",
22147 IndexConstraint {
22148 display_as_key: true,
22149 name: None,
22150 index_type: None,
22151 columns: vec![mk_expected_col("c1")],
22152 index_options: vec![],
22153 }
22154 .into()
22155 );
22156
22157 test_parse_table_constraint!(
22158 dialect,
22159 "INDEX 'index' (c1, c2)",
22160 TableConstraint::Index(IndexConstraint {
22161 display_as_key: false,
22162 name: Some(Ident::with_quote('\'', "index")),
22163 index_type: None,
22164 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
22165 index_options: vec![],
22166 })
22167 );
22168
22169 test_parse_table_constraint!(
22170 dialect,
22171 "INDEX USING BTREE (c1)",
22172 IndexConstraint {
22173 display_as_key: false,
22174 name: None,
22175 index_type: Some(IndexType::BTree),
22176 columns: vec![mk_expected_col("c1")],
22177 index_options: vec![],
22178 }
22179 .into()
22180 );
22181
22182 test_parse_table_constraint!(
22183 dialect,
22184 "INDEX USING HASH (c1)",
22185 IndexConstraint {
22186 display_as_key: false,
22187 name: None,
22188 index_type: Some(IndexType::Hash),
22189 columns: vec![mk_expected_col("c1")],
22190 index_options: vec![],
22191 }
22192 .into()
22193 );
22194
22195 test_parse_table_constraint!(
22196 dialect,
22197 "INDEX idx_name USING BTREE (c1)",
22198 IndexConstraint {
22199 display_as_key: false,
22200 name: Some(Ident::new("idx_name")),
22201 index_type: Some(IndexType::BTree),
22202 columns: vec![mk_expected_col("c1")],
22203 index_options: vec![],
22204 }
22205 .into()
22206 );
22207
22208 test_parse_table_constraint!(
22209 dialect,
22210 "INDEX idx_name USING HASH (c1)",
22211 IndexConstraint {
22212 display_as_key: false,
22213 name: Some(Ident::new("idx_name")),
22214 index_type: Some(IndexType::Hash),
22215 columns: vec![mk_expected_col("c1")],
22216 index_options: vec![],
22217 }
22218 .into()
22219 );
22220 }
22221
22222 #[test]
22223 fn test_tokenizer_error_loc() {
22224 let sql = "foo '";
22225 let ast = Parser::parse_sql(&GenericDialect, sql);
22226 assert_eq!(
22227 ast,
22228 Err(ParserError::TokenizerError(
22229 "Unterminated string literal at Line: 1, Column: 5".to_string()
22230 ))
22231 );
22232 }
22233
22234 #[test]
22235 fn test_parser_error_loc() {
22236 let sql = "SELECT this is a syntax error";
22237 let ast = Parser::parse_sql(&GenericDialect, sql);
22238 assert_eq!(
22239 ast,
22240 Err(ParserError::ParserError(
22241 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
22242 .to_string()
22243 ))
22244 );
22245 }
22246
22247 #[test]
22248 fn test_nested_explain_error() {
22249 let sql = "EXPLAIN EXPLAIN SELECT 1";
22250 let ast = Parser::parse_sql(&GenericDialect, sql);
22251 assert_eq!(
22252 ast,
22253 Err(ParserError::ParserError(
22254 "Explain must be root of the plan".to_string()
22255 ))
22256 );
22257 }
22258
22259 #[test]
22260 fn test_parse_multipart_identifier_positive() {
22261 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
22262
22263 let expected = vec![
22265 Ident {
22266 value: "CATALOG".to_string(),
22267 quote_style: None,
22268 span: Span::empty(),
22269 },
22270 Ident {
22271 value: "F(o)o. \"bar".to_string(),
22272 quote_style: Some('"'),
22273 span: Span::empty(),
22274 },
22275 Ident {
22276 value: "table".to_string(),
22277 quote_style: None,
22278 span: Span::empty(),
22279 },
22280 ];
22281 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
22282 let actual = parser.parse_multipart_identifier().unwrap();
22283 assert_eq!(expected, actual);
22284 });
22285
22286 let expected = vec![
22288 Ident {
22289 value: "CATALOG".to_string(),
22290 quote_style: None,
22291 span: Span::empty(),
22292 },
22293 Ident {
22294 value: "table".to_string(),
22295 quote_style: None,
22296 span: Span::empty(),
22297 },
22298 ];
22299 dialect.run_parser_method("CATALOG . table", |parser| {
22300 let actual = parser.parse_multipart_identifier().unwrap();
22301 assert_eq!(expected, actual);
22302 });
22303 }
22304
22305 #[test]
22306 fn test_parse_multipart_identifier_negative() {
22307 macro_rules! test_parse_multipart_identifier_error {
22308 ($input:expr, $expected_err:expr $(,)?) => {{
22309 all_dialects().run_parser_method(&*$input, |parser| {
22310 let actual_err = parser.parse_multipart_identifier().unwrap_err();
22311 assert_eq!(actual_err.to_string(), $expected_err);
22312 });
22313 }};
22314 }
22315
22316 test_parse_multipart_identifier_error!(
22317 "",
22318 "sql parser error: Empty input when parsing identifier",
22319 );
22320
22321 test_parse_multipart_identifier_error!(
22322 "*schema.table",
22323 "sql parser error: Unexpected token in identifier: *",
22324 );
22325
22326 test_parse_multipart_identifier_error!(
22327 "schema.table*",
22328 "sql parser error: Unexpected token in identifier: *",
22329 );
22330
22331 test_parse_multipart_identifier_error!(
22332 "schema.table.",
22333 "sql parser error: Trailing period in identifier",
22334 );
22335
22336 test_parse_multipart_identifier_error!(
22337 "schema.*",
22338 "sql parser error: Unexpected token following period in identifier: *",
22339 );
22340 }
22341
22342 #[test]
22343 fn test_mysql_partition_selection() {
22344 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
22345 let expected = vec!["p0", "p2"];
22346
22347 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
22348 assert_eq!(ast.len(), 1);
22349 if let Statement::Query(v) = &ast[0] {
22350 if let SetExpr::Select(select) = &*v.body {
22351 assert_eq!(select.from.len(), 1);
22352 let from: &TableWithJoins = &select.from[0];
22353 let table_factor = &from.relation;
22354 if let TableFactor::Table { partitions, .. } = table_factor {
22355 let actual: Vec<&str> = partitions
22356 .iter()
22357 .map(|ident| ident.value.as_str())
22358 .collect();
22359 assert_eq!(expected, actual);
22360 }
22361 }
22362 } else {
22363 panic!("fail to parse mysql partition selection");
22364 }
22365 }
22366
22367 #[test]
22368 fn test_replace_into_placeholders() {
22369 let sql = "REPLACE INTO t (a) VALUES (&a)";
22370
22371 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22372 }
22373
22374 #[test]
22375 fn test_replace_into_set_placeholder() {
22376 let sql = "REPLACE INTO t SET ?";
22377
22378 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22379 }
22380
22381 #[test]
22382 fn test_replace_incomplete() {
22383 let sql = r#"REPLACE"#;
22384
22385 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
22386 }
22387
22388 #[test]
22389 fn test_placeholder_invalid_whitespace() {
22390 for w in [" ", "/*invalid*/"] {
22391 let sql = format!("\nSELECT\n :{w}fooBar");
22392 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
22393 }
22394 }
22395}