1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word) => {
512 if expecting_statement_delimiter && word.keyword == Keyword::END {
513 break;
514 }
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 Keyword::SECURITY => self.parse_security_label().map(Into::into),
723 _ => self.expected("an SQL statement", next_token),
724 },
725 Token::LParen => {
726 self.prev_token();
727 self.parse_query().map(Into::into)
728 }
729 _ => self.expected("an SQL statement", next_token),
730 }
731 }
732
733 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
737 let case_token = self.expect_keyword(Keyword::CASE)?;
738
739 let match_expr = if self.peek_keyword(Keyword::WHEN) {
740 None
741 } else {
742 Some(self.parse_expr()?)
743 };
744
745 self.expect_keyword_is(Keyword::WHEN)?;
746 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
747 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
748 })?;
749
750 let else_block = if self.parse_keyword(Keyword::ELSE) {
751 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
752 } else {
753 None
754 };
755
756 let mut end_case_token = self.expect_keyword(Keyword::END)?;
757 if self.peek_keyword(Keyword::CASE) {
758 end_case_token = self.expect_keyword(Keyword::CASE)?;
759 }
760
761 Ok(CaseStatement {
762 case_token: AttachedToken(case_token),
763 match_expr,
764 when_blocks,
765 else_block,
766 end_case_token: AttachedToken(end_case_token),
767 })
768 }
769
770 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
774 self.expect_keyword_is(Keyword::IF)?;
775 let if_block = self.parse_conditional_statement_block(&[
776 Keyword::ELSE,
777 Keyword::ELSEIF,
778 Keyword::END,
779 ])?;
780
781 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
782 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
783 parser.parse_conditional_statement_block(&[
784 Keyword::ELSEIF,
785 Keyword::ELSE,
786 Keyword::END,
787 ])
788 })?
789 } else {
790 vec![]
791 };
792
793 let else_block = if self.parse_keyword(Keyword::ELSE) {
794 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
795 } else {
796 None
797 };
798
799 self.expect_keyword_is(Keyword::END)?;
800 let end_token = self.expect_keyword(Keyword::IF)?;
801
802 Ok(IfStatement {
803 if_block,
804 elseif_blocks,
805 else_block,
806 end_token: Some(AttachedToken(end_token)),
807 })
808 }
809
810 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
814 self.expect_keyword_is(Keyword::WHILE)?;
815 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
816
817 Ok(WhileStatement { while_block })
818 }
819
820 fn parse_conditional_statement_block(
828 &mut self,
829 terminal_keywords: &[Keyword],
830 ) -> Result<ConditionalStatementBlock, ParserError> {
831 let start_token = self.get_current_token().clone(); let mut then_token = None;
833
834 let condition = match &start_token.token {
835 Token::Word(w) if w.keyword == Keyword::ELSE => None,
836 Token::Word(w) if w.keyword == Keyword::WHILE => {
837 let expr = self.parse_expr()?;
838 Some(expr)
839 }
840 _ => {
841 let expr = self.parse_expr()?;
842 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
843 Some(expr)
844 }
845 };
846
847 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
848
849 Ok(ConditionalStatementBlock {
850 start_token: AttachedToken(start_token),
851 condition,
852 then_token,
853 conditional_statements,
854 })
855 }
856
857 pub(crate) fn parse_conditional_statements(
860 &mut self,
861 terminal_keywords: &[Keyword],
862 ) -> Result<ConditionalStatements, ParserError> {
863 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
864 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
865 let statements = self.parse_statement_list(terminal_keywords)?;
866 let end_token = self.expect_keyword(Keyword::END)?;
867
868 ConditionalStatements::BeginEnd(BeginEndStatements {
869 begin_token: AttachedToken(begin_token),
870 statements,
871 end_token: AttachedToken(end_token),
872 })
873 } else {
874 ConditionalStatements::Sequence {
875 statements: self.parse_statement_list(terminal_keywords)?,
876 }
877 };
878 Ok(conditional_statements)
879 }
880
881 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
885 self.expect_keyword_is(Keyword::RAISE)?;
886
887 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
888 self.expect_token(&Token::Eq)?;
889 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
890 } else {
891 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
892 };
893
894 Ok(RaiseStatement { value })
895 }
896 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
900 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
901
902 self.expect_keyword_is(Keyword::ON)?;
903 let token = self.next_token();
904
905 let (object_type, object_name) = match token.token {
906 Token::Word(w) if w.keyword == Keyword::COLLATION => {
907 (CommentObject::Collation, self.parse_object_name(false)?)
908 }
909 Token::Word(w) if w.keyword == Keyword::COLUMN => {
910 (CommentObject::Column, self.parse_object_name(false)?)
911 }
912 Token::Word(w) if w.keyword == Keyword::DATABASE => {
913 (CommentObject::Database, self.parse_object_name(false)?)
914 }
915 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
916 (CommentObject::Domain, self.parse_object_name(false)?)
917 }
918 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
919 (CommentObject::Extension, self.parse_object_name(false)?)
920 }
921 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
922 (CommentObject::Function, self.parse_object_name(false)?)
923 }
924 Token::Word(w) if w.keyword == Keyword::INDEX => {
925 (CommentObject::Index, self.parse_object_name(false)?)
926 }
927 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
928 self.expect_keyword_is(Keyword::VIEW)?;
929 (
930 CommentObject::MaterializedView,
931 self.parse_object_name(false)?,
932 )
933 }
934 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
935 (CommentObject::Procedure, self.parse_object_name(false)?)
936 }
937 Token::Word(w) if w.keyword == Keyword::ROLE => {
938 (CommentObject::Role, self.parse_object_name(false)?)
939 }
940 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
941 (CommentObject::Schema, self.parse_object_name(false)?)
942 }
943 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
944 (CommentObject::Sequence, self.parse_object_name(false)?)
945 }
946 Token::Word(w) if w.keyword == Keyword::TABLE => {
947 (CommentObject::Table, self.parse_object_name(false)?)
948 }
949 Token::Word(w) if w.keyword == Keyword::TYPE => {
950 (CommentObject::Type, self.parse_object_name(false)?)
951 }
952 Token::Word(w) if w.keyword == Keyword::USER => {
953 (CommentObject::User, self.parse_object_name(false)?)
954 }
955 Token::Word(w) if w.keyword == Keyword::VIEW => {
956 (CommentObject::View, self.parse_object_name(false)?)
957 }
958 _ => self.expected("comment object_type", token)?,
959 };
960
961 self.expect_keyword_is(Keyword::IS)?;
962 let comment = if self.parse_keyword(Keyword::NULL) {
963 None
964 } else {
965 Some(self.parse_literal_string()?)
966 };
967 Ok(Statement::Comment {
968 object_type,
969 object_name,
970 comment,
971 if_exists,
972 })
973 }
974
975 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
977 let mut channel = None;
978 let mut tables: Vec<ObjectName> = vec![];
979 let mut read_lock = false;
980 let mut export = false;
981
982 if !dialect_of!(self is MySqlDialect | GenericDialect) {
983 return parser_err!(
984 "Unsupported statement FLUSH",
985 self.peek_token_ref().span.start
986 );
987 }
988
989 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
990 Some(FlushLocation::NoWriteToBinlog)
991 } else if self.parse_keyword(Keyword::LOCAL) {
992 Some(FlushLocation::Local)
993 } else {
994 None
995 };
996
997 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
998 FlushType::BinaryLogs
999 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
1000 FlushType::EngineLogs
1001 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1002 FlushType::ErrorLogs
1003 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1004 FlushType::GeneralLogs
1005 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1006 FlushType::Hosts
1007 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1008 FlushType::Privileges
1009 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1010 FlushType::OptimizerCosts
1011 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1012 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1013 channel = Some(self.parse_object_name(false).unwrap().to_string());
1014 }
1015 FlushType::RelayLogs
1016 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1017 FlushType::SlowLogs
1018 } else if self.parse_keyword(Keyword::STATUS) {
1019 FlushType::Status
1020 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1021 FlushType::UserResources
1022 } else if self.parse_keywords(&[Keyword::LOGS]) {
1023 FlushType::Logs
1024 } else if self.parse_keywords(&[Keyword::TABLES]) {
1025 loop {
1026 let next_token = self.next_token();
1027 match &next_token.token {
1028 Token::Word(w) => match w.keyword {
1029 Keyword::WITH => {
1030 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1031 }
1032 Keyword::FOR => {
1033 export = self.parse_keyword(Keyword::EXPORT);
1034 }
1035 Keyword::NoKeyword => {
1036 self.prev_token();
1037 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1038 }
1039 _ => {}
1040 },
1041 _ => {
1042 break;
1043 }
1044 }
1045 }
1046
1047 FlushType::Tables
1048 } else {
1049 return self.expected_ref(
1050 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1051 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1052 self.peek_token_ref(),
1053 );
1054 };
1055
1056 Ok(Statement::Flush {
1057 object_type,
1058 location,
1059 channel,
1060 read_lock,
1061 export,
1062 tables,
1063 })
1064 }
1065
1066 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1068 let repair = self.parse_keyword(Keyword::REPAIR);
1069 self.expect_keyword_is(Keyword::TABLE)?;
1070 let table_name = self.parse_object_name(false)?;
1071 let partition_action = self
1072 .maybe_parse(|parser| {
1073 let pa = match parser.parse_one_of_keywords(&[
1074 Keyword::ADD,
1075 Keyword::DROP,
1076 Keyword::SYNC,
1077 ]) {
1078 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1079 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1080 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1081 _ => None,
1082 };
1083 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1084 Ok(pa)
1085 })?
1086 .unwrap_or_default();
1087 Ok(Msck {
1088 repair,
1089 table_name,
1090 partition_action,
1091 })
1092 }
1093
1094 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1096 let table = self.parse_keyword(Keyword::TABLE);
1097 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1098
1099 let table_names = self.parse_comma_separated(|p| {
1100 let only = p.parse_keyword(Keyword::ONLY);
1101 let name = p.parse_object_name(false)?;
1102 let has_asterisk = p.consume_token(&Token::Mul);
1103 Ok(TruncateTableTarget {
1104 name,
1105 only,
1106 has_asterisk,
1107 })
1108 })?;
1109
1110 let mut partitions = None;
1111 if self.parse_keyword(Keyword::PARTITION) {
1112 self.expect_token(&Token::LParen)?;
1113 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1114 self.expect_token(&Token::RParen)?;
1115 }
1116
1117 let mut identity = None;
1118 let mut cascade = None;
1119
1120 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1121 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1122 Some(TruncateIdentityOption::Restart)
1123 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1124 Some(TruncateIdentityOption::Continue)
1125 } else {
1126 None
1127 };
1128
1129 cascade = self.parse_cascade_option();
1130 };
1131
1132 let on_cluster = self.parse_optional_on_cluster()?;
1133
1134 Ok(Truncate {
1135 table_names,
1136 partitions,
1137 table,
1138 if_exists,
1139 identity,
1140 cascade,
1141 on_cluster,
1142 })
1143 }
1144
1145 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1146 if self.parse_keyword(Keyword::CASCADE) {
1147 Some(CascadeOption::Cascade)
1148 } else if self.parse_keyword(Keyword::RESTRICT) {
1149 Some(CascadeOption::Restrict)
1150 } else {
1151 None
1152 }
1153 }
1154
1155 pub fn parse_attach_duckdb_database_options(
1157 &mut self,
1158 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1159 if !self.consume_token(&Token::LParen) {
1160 return Ok(vec![]);
1161 }
1162
1163 let mut options = vec![];
1164 loop {
1165 if self.parse_keyword(Keyword::READ_ONLY) {
1166 let boolean = if self.parse_keyword(Keyword::TRUE) {
1167 Some(true)
1168 } else if self.parse_keyword(Keyword::FALSE) {
1169 Some(false)
1170 } else {
1171 None
1172 };
1173 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1174 } else if self.parse_keyword(Keyword::TYPE) {
1175 let ident = self.parse_identifier()?;
1176 options.push(AttachDuckDBDatabaseOption::Type(ident));
1177 } else {
1178 return self
1179 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1180 };
1181
1182 if self.consume_token(&Token::RParen) {
1183 return Ok(options);
1184 } else if self.consume_token(&Token::Comma) {
1185 continue;
1186 } else {
1187 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1188 }
1189 }
1190 }
1191
1192 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1194 let database = self.parse_keyword(Keyword::DATABASE);
1195 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1196 let database_path = self.parse_identifier()?;
1197 let database_alias = if self.parse_keyword(Keyword::AS) {
1198 Some(self.parse_identifier()?)
1199 } else {
1200 None
1201 };
1202
1203 let attach_options = self.parse_attach_duckdb_database_options()?;
1204 Ok(Statement::AttachDuckDBDatabase {
1205 if_not_exists,
1206 database,
1207 database_path,
1208 database_alias,
1209 attach_options,
1210 })
1211 }
1212
1213 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1215 let database = self.parse_keyword(Keyword::DATABASE);
1216 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1217 let database_alias = self.parse_identifier()?;
1218 Ok(Statement::DetachDuckDBDatabase {
1219 if_exists,
1220 database,
1221 database_alias,
1222 })
1223 }
1224
1225 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1227 let database = self.parse_keyword(Keyword::DATABASE);
1228 let database_file_name = self.parse_expr()?;
1229 self.expect_keyword_is(Keyword::AS)?;
1230 let schema_name = self.parse_identifier()?;
1231 Ok(Statement::AttachDatabase {
1232 database,
1233 schema_name,
1234 database_file_name,
1235 })
1236 }
1237
1238 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1240 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1241 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1242 let mut for_columns = false;
1243 let mut cache_metadata = false;
1244 let mut noscan = false;
1245 let mut partitions = None;
1246 let mut compute_statistics = false;
1247 let mut columns = vec![];
1248
1249 if table_name.is_some() && self.consume_token(&Token::LParen) {
1251 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1252 self.expect_token(&Token::RParen)?;
1253 }
1254
1255 loop {
1256 match self.parse_one_of_keywords(&[
1257 Keyword::PARTITION,
1258 Keyword::FOR,
1259 Keyword::CACHE,
1260 Keyword::NOSCAN,
1261 Keyword::COMPUTE,
1262 ]) {
1263 Some(Keyword::PARTITION) => {
1264 self.expect_token(&Token::LParen)?;
1265 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1266 self.expect_token(&Token::RParen)?;
1267 }
1268 Some(Keyword::NOSCAN) => noscan = true,
1269 Some(Keyword::FOR) => {
1270 self.expect_keyword_is(Keyword::COLUMNS)?;
1271
1272 columns = self
1273 .maybe_parse(|parser| {
1274 parser.parse_comma_separated(|p| p.parse_identifier())
1275 })?
1276 .unwrap_or_default();
1277 for_columns = true
1278 }
1279 Some(Keyword::CACHE) => {
1280 self.expect_keyword_is(Keyword::METADATA)?;
1281 cache_metadata = true
1282 }
1283 Some(Keyword::COMPUTE) => {
1284 self.expect_keyword_is(Keyword::STATISTICS)?;
1285 compute_statistics = true
1286 }
1287 _ => break,
1288 }
1289 }
1290
1291 Ok(Analyze {
1292 has_table_keyword,
1293 table_name,
1294 for_columns,
1295 columns,
1296 partitions,
1297 cache_metadata,
1298 noscan,
1299 compute_statistics,
1300 })
1301 }
1302
1303 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1305 let index = self.index;
1306
1307 let next_token = self.next_token();
1308 match next_token.token {
1309 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1310 if self.peek_token_ref().token == Token::Period {
1311 let mut id_parts: Vec<Ident> = vec![match t {
1312 Token::Word(w) => w.into_ident(next_token.span),
1313 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1314 _ => {
1315 return Err(ParserError::ParserError(
1316 "Internal parser error: unexpected token type".to_string(),
1317 ))
1318 }
1319 }];
1320
1321 while self.consume_token(&Token::Period) {
1322 let next_token = self.next_token();
1323 match next_token.token {
1324 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1325 Token::SingleQuotedString(s) => {
1326 id_parts.push(Ident::with_quote('\'', s))
1328 }
1329 Token::Placeholder(s) => {
1330 id_parts.push(Ident::new(s))
1333 }
1334 Token::Mul => {
1335 return Ok(Expr::QualifiedWildcard(
1336 ObjectName::from(id_parts),
1337 AttachedToken(next_token),
1338 ));
1339 }
1340 _ => {
1341 return self
1342 .expected("an identifier or a '*' after '.'", next_token);
1343 }
1344 }
1345 }
1346 }
1347 }
1348 Token::Mul => {
1349 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1350 }
1351 Token::LParen => {
1353 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1354 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1355 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1358 }
1359 }
1360 _ => (),
1361 };
1362
1363 self.index = index;
1364 self.parse_expr()
1365 }
1366
1367 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1369 self.parse_subexpr(self.dialect.prec_unknown())
1370 }
1371
1372 pub fn parse_expr_with_alias_and_order_by(
1374 &mut self,
1375 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1376 let expr = self.parse_expr()?;
1377
1378 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1379 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1380 }
1381 let alias = self.parse_optional_alias_inner(None, validator)?;
1382 let order_by = OrderByOptions {
1383 asc: self.parse_asc_desc(),
1384 nulls_first: None,
1385 };
1386 Ok(ExprWithAliasAndOrderBy {
1387 expr: ExprWithAlias { expr, alias },
1388 order_by,
1389 })
1390 }
1391
1392 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1394 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1395 let _guard = self.recursion_counter.try_decrease()?;
1396 debug!("parsing expr");
1397 let mut expr = self.parse_prefix()?;
1398
1399 expr = self.parse_compound_expr(expr, vec![])?;
1400
1401 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1405 expr = Expr::Collate {
1406 expr: Box::new(expr),
1407 collation: self.parse_object_name(false)?,
1408 };
1409 }
1410
1411 debug!("prefix: {expr:?}");
1412 loop {
1413 let next_precedence = self.get_next_precedence()?;
1414 debug!("next precedence: {next_precedence:?}");
1415
1416 if precedence >= next_precedence {
1417 break;
1418 }
1419
1420 if Token::Period == self.peek_token_ref().token {
1423 break;
1424 }
1425
1426 expr = self.parse_infix(expr, next_precedence)?;
1427 }
1428 Ok(expr)
1429 }
1430
1431 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1433 let condition = self.parse_expr()?;
1434 let message = if self.parse_keyword(Keyword::AS) {
1435 Some(self.parse_expr()?)
1436 } else {
1437 None
1438 };
1439
1440 Ok(Statement::Assert { condition, message })
1441 }
1442
1443 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1445 let name = self.parse_identifier()?;
1446 Ok(Statement::Savepoint { name })
1447 }
1448
1449 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1451 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1452 let name = self.parse_identifier()?;
1453
1454 Ok(Statement::ReleaseSavepoint { name })
1455 }
1456
1457 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1459 let channel = self.parse_identifier()?;
1460 Ok(Statement::LISTEN { channel })
1461 }
1462
1463 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1465 let channel = if self.consume_token(&Token::Mul) {
1466 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1467 } else {
1468 match self.parse_identifier() {
1469 Ok(expr) => expr,
1470 _ => {
1471 self.prev_token();
1472 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1473 }
1474 }
1475 };
1476 Ok(Statement::UNLISTEN { channel })
1477 }
1478
1479 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1481 let channel = self.parse_identifier()?;
1482 let payload = if self.consume_token(&Token::Comma) {
1483 Some(self.parse_literal_string()?)
1484 } else {
1485 None
1486 };
1487 Ok(Statement::NOTIFY { channel, payload })
1488 }
1489
1490 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1492 if self.peek_keyword(Keyword::TABLE) {
1493 self.expect_keyword(Keyword::TABLE)?;
1494 let rename_tables = self.parse_comma_separated(|parser| {
1495 let old_name = parser.parse_object_name(false)?;
1496 parser.expect_keyword(Keyword::TO)?;
1497 let new_name = parser.parse_object_name(false)?;
1498
1499 Ok(RenameTable { old_name, new_name })
1500 })?;
1501 Ok(rename_tables.into())
1502 } else {
1503 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1504 }
1505 }
1506
1507 fn parse_expr_prefix_by_reserved_word(
1510 &mut self,
1511 w: &Word,
1512 w_span: Span,
1513 ) -> Result<Option<Expr>, ParserError> {
1514 match w.keyword {
1515 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1516 self.prev_token();
1517 Ok(Some(Expr::Value(self.parse_value()?)))
1518 }
1519 Keyword::NULL => {
1520 self.prev_token();
1521 Ok(Some(Expr::Value(self.parse_value()?)))
1522 }
1523 Keyword::CURRENT_CATALOG
1524 | Keyword::CURRENT_USER
1525 | Keyword::SESSION_USER
1526 | Keyword::USER
1527 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1528 {
1529 Ok(Some(Expr::Function(Function {
1530 name: ObjectName::from(vec![w.to_ident(w_span)]),
1531 uses_odbc_syntax: false,
1532 parameters: FunctionArguments::None,
1533 args: FunctionArguments::None,
1534 null_treatment: None,
1535 filter: None,
1536 over: None,
1537 within_group: vec![],
1538 })))
1539 }
1540 Keyword::CURRENT_TIMESTAMP
1541 | Keyword::CURRENT_TIME
1542 | Keyword::CURRENT_DATE
1543 | Keyword::LOCALTIME
1544 | Keyword::LOCALTIMESTAMP => {
1545 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1546 }
1547 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1548 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1549 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1550 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1551 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1552 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1553 Keyword::EXISTS
1554 if !dialect_of!(self is DatabricksDialect)
1556 || matches!(
1557 self.peek_nth_token_ref(1).token,
1558 Token::Word(Word {
1559 keyword: Keyword::SELECT | Keyword::WITH,
1560 ..
1561 })
1562 ) =>
1563 {
1564 Ok(Some(self.parse_exists_expr(false)?))
1565 }
1566 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1567 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1568 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1569 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1570 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1571 }
1572 Keyword::SUBSTR | Keyword::SUBSTRING => {
1573 self.prev_token();
1574 Ok(Some(self.parse_substring()?))
1575 }
1576 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1577 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1578 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1579 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1581 self.expect_token(&Token::LBracket)?;
1582 Ok(Some(self.parse_array_expr(true)?))
1583 }
1584 Keyword::ARRAY
1585 if self.peek_token_ref().token == Token::LParen
1586 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1587 {
1588 self.expect_token(&Token::LParen)?;
1589 let query = self.parse_query()?;
1590 self.expect_token(&Token::RParen)?;
1591 Ok(Some(Expr::Function(Function {
1592 name: ObjectName::from(vec![w.to_ident(w_span)]),
1593 uses_odbc_syntax: false,
1594 parameters: FunctionArguments::None,
1595 args: FunctionArguments::Subquery(query),
1596 filter: None,
1597 null_treatment: None,
1598 over: None,
1599 within_group: vec![],
1600 })))
1601 }
1602 Keyword::NOT => Ok(Some(self.parse_not()?)),
1603 Keyword::MATCH if self.dialect.supports_match_against() => {
1604 Ok(Some(self.parse_match_against()?))
1605 }
1606 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1607 let struct_expr = self.parse_struct_literal()?;
1608 Ok(Some(struct_expr))
1609 }
1610 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1611 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1612 Ok(Some(Expr::Prior(Box::new(expr))))
1613 }
1614 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1615 Ok(Some(self.parse_duckdb_map_literal()?))
1616 }
1617 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1618 Ok(Some(self.parse_lambda_expr()?))
1619 }
1620 _ if self.dialect.supports_geometric_types() => match w.keyword {
1621 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1622 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1623 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1624 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1625 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1626 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1627 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1628 _ => Ok(None),
1629 },
1630 _ => Ok(None),
1631 }
1632 }
1633
1634 fn parse_expr_prefix_by_unreserved_word(
1636 &mut self,
1637 w: &Word,
1638 w_span: Span,
1639 ) -> Result<Expr, ParserError> {
1640 let is_outer_join = self.peek_outer_join_operator();
1641 match &self.peek_token_ref().token {
1642 Token::LParen if !is_outer_join => {
1643 let id_parts = vec![w.to_ident(w_span)];
1644 self.parse_function(ObjectName::from(id_parts))
1645 }
1646 Token::SingleQuotedString(_)
1648 | Token::DoubleQuotedString(_)
1649 | Token::HexStringLiteral(_)
1650 if w.value.starts_with('_') =>
1651 {
1652 Ok(Expr::Prefixed {
1653 prefix: w.to_ident(w_span),
1654 value: self.parse_introduced_string_expr()?.into(),
1655 })
1656 }
1657 Token::SingleQuotedString(_)
1659 | Token::DoubleQuotedString(_)
1660 | Token::HexStringLiteral(_)
1661 if w.value.starts_with('_') =>
1662 {
1663 Ok(Expr::Prefixed {
1664 prefix: w.to_ident(w_span),
1665 value: self.parse_introduced_string_expr()?.into(),
1666 })
1667 }
1668 Token::Arrow if self.dialect.supports_lambda_functions() => {
1672 self.expect_token(&Token::Arrow)?;
1673 Ok(Expr::Lambda(LambdaFunction {
1674 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1675 name: w.to_ident(w_span),
1676 data_type: None,
1677 }),
1678 body: Box::new(self.parse_expr()?),
1679 syntax: LambdaSyntax::Arrow,
1680 }))
1681 }
1682 Token::Word(_)
1686 if self.dialect.supports_lambda_functions()
1687 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1688 {
1689 let data_type = self.parse_data_type()?;
1690 self.expect_token(&Token::Arrow)?;
1691 Ok(Expr::Lambda(LambdaFunction {
1692 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1693 name: w.to_ident(w_span),
1694 data_type: Some(data_type),
1695 }),
1696 body: Box::new(self.parse_expr()?),
1697 syntax: LambdaSyntax::Arrow,
1698 }))
1699 }
1700 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1701 }
1702 }
1703
1704 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1707 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1708 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1709 } else {
1710 false
1711 }
1712 }
1713
1714 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1716 if let Some(prefix) = self.dialect.parse_prefix(self) {
1718 return prefix;
1719 }
1720
1721 let loc = self.peek_token_ref().span.start;
1738 let opt_expr = self.maybe_parse(|parser| {
1739 match parser.parse_data_type()? {
1740 DataType::Interval { .. } => parser.parse_interval(),
1741 DataType::Custom(ref name, ref modifiers)
1752 if modifiers.is_empty()
1753 && Self::is_simple_unquoted_object_name(name, "xml")
1754 && parser.dialect.supports_xml_expressions() =>
1755 {
1756 Ok(Expr::TypedString(TypedString {
1757 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1758 value: parser.parse_value()?,
1759 uses_odbc_syntax: false,
1760 }))
1761 }
1762 DataType::Custom(..) => parser_err!("dummy", loc),
1763 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1765 Ok(Expr::Cast {
1766 kind: CastKind::Cast,
1767 expr: Box::new(parser.parse_expr()?),
1768 data_type: DataType::Binary(None),
1769 array: false,
1770 format: None,
1771 })
1772 }
1773 data_type => Ok(Expr::TypedString(TypedString {
1774 data_type,
1775 value: parser.parse_value()?,
1776 uses_odbc_syntax: false,
1777 })),
1778 }
1779 })?;
1780
1781 if let Some(expr) = opt_expr {
1782 return Ok(expr);
1783 }
1784
1785 let dialect = self.dialect;
1789
1790 self.advance_token();
1791 let next_token_index = self.get_current_index();
1792 let next_token = self.get_current_token();
1793 let span = next_token.span;
1794 let expr = match &next_token.token {
1795 Token::Word(w) => {
1796 let w = w.clone();
1805 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1806 Ok(Some(expr)) => Ok(expr),
1808
1809 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1811
1812 Err(e) => {
1819 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1820 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1821 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1822 }) {
1823 return Ok(expr);
1824 }
1825 }
1826 return Err(e);
1827 }
1828 }
1829 } Token::LBracket => self.parse_array_expr(false),
1832 tok @ Token::Minus | tok @ Token::Plus => {
1833 let op = if *tok == Token::Plus {
1834 UnaryOperator::Plus
1835 } else {
1836 UnaryOperator::Minus
1837 };
1838 Ok(Expr::UnaryOp {
1839 op,
1840 expr: Box::new(
1841 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1842 ),
1843 })
1844 }
1845 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1846 op: UnaryOperator::BangNot,
1847 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1848 }),
1849 tok @ Token::DoubleExclamationMark
1850 | tok @ Token::PGSquareRoot
1851 | tok @ Token::PGCubeRoot
1852 | tok @ Token::AtSign
1853 if dialect_is!(dialect is PostgreSqlDialect) =>
1854 {
1855 let op = match tok {
1856 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1857 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1858 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1859 Token::AtSign => UnaryOperator::PGAbs,
1860 _ => {
1861 return Err(ParserError::ParserError(
1862 "Internal parser error: unexpected unary operator token".to_string(),
1863 ))
1864 }
1865 };
1866 Ok(Expr::UnaryOp {
1867 op,
1868 expr: Box::new(
1869 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1870 ),
1871 })
1872 }
1873 Token::Tilde => Ok(Expr::UnaryOp {
1874 op: UnaryOperator::BitwiseNot,
1875 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1876 }),
1877 tok @ Token::Sharp
1878 | tok @ Token::AtDashAt
1879 | tok @ Token::AtAt
1880 | tok @ Token::QuestionMarkDash
1881 | tok @ Token::QuestionPipe
1882 if self.dialect.supports_geometric_types() =>
1883 {
1884 let op = match tok {
1885 Token::Sharp => UnaryOperator::Hash,
1886 Token::AtDashAt => UnaryOperator::AtDashAt,
1887 Token::AtAt => UnaryOperator::DoubleAt,
1888 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1889 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1890 _ => {
1891 return Err(ParserError::ParserError(format!(
1892 "Unexpected token in unary operator parsing: {tok:?}"
1893 )))
1894 }
1895 };
1896 Ok(Expr::UnaryOp {
1897 op,
1898 expr: Box::new(
1899 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1900 ),
1901 })
1902 }
1903 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1904 {
1905 self.prev_token();
1906 Ok(Expr::Value(self.parse_value()?))
1907 }
1908 Token::UnicodeStringLiteral(_) => {
1909 self.prev_token();
1910 Ok(Expr::Value(self.parse_value()?))
1911 }
1912 Token::Number(_, _)
1913 | Token::SingleQuotedString(_)
1914 | Token::DoubleQuotedString(_)
1915 | Token::TripleSingleQuotedString(_)
1916 | Token::TripleDoubleQuotedString(_)
1917 | Token::DollarQuotedString(_)
1918 | Token::SingleQuotedByteStringLiteral(_)
1919 | Token::DoubleQuotedByteStringLiteral(_)
1920 | Token::TripleSingleQuotedByteStringLiteral(_)
1921 | Token::TripleDoubleQuotedByteStringLiteral(_)
1922 | Token::SingleQuotedRawStringLiteral(_)
1923 | Token::DoubleQuotedRawStringLiteral(_)
1924 | Token::TripleSingleQuotedRawStringLiteral(_)
1925 | Token::TripleDoubleQuotedRawStringLiteral(_)
1926 | Token::NationalStringLiteral(_)
1927 | Token::QuoteDelimitedStringLiteral(_)
1928 | Token::NationalQuoteDelimitedStringLiteral(_)
1929 | Token::HexStringLiteral(_) => {
1930 self.prev_token();
1931 Ok(Expr::Value(self.parse_value()?))
1932 }
1933 Token::LParen => {
1934 let expr =
1935 if let Some(expr) = self.try_parse_expr_sub_query()? {
1936 expr
1937 } else if let Some(lambda) = self.try_parse_lambda()? {
1938 return Ok(lambda);
1939 } else {
1940 let exprs = self.with_state(ParserState::Normal, |p| {
1951 p.parse_comma_separated(Parser::parse_expr)
1952 })?;
1953 match exprs.len() {
1954 0 => return Err(ParserError::ParserError(
1955 "Internal parser error: parse_comma_separated returned empty list"
1956 .to_string(),
1957 )),
1958 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1959 _ => Expr::Tuple(exprs),
1960 }
1961 };
1962 self.expect_token(&Token::RParen)?;
1963 Ok(expr)
1964 }
1965 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1966 self.prev_token();
1967 Ok(Expr::Value(self.parse_value()?))
1968 }
1969 Token::LBrace => {
1970 self.prev_token();
1971 self.parse_lbrace_expr()
1972 }
1973 _ => self.expected_at("an expression", next_token_index),
1974 }?;
1975
1976 Ok(expr)
1977 }
1978
1979 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1980 Ok(Expr::TypedString(TypedString {
1981 data_type: DataType::GeometricType(kind),
1982 value: self.parse_value()?,
1983 uses_odbc_syntax: false,
1984 }))
1985 }
1986
1987 pub fn parse_compound_expr(
1994 &mut self,
1995 root: Expr,
1996 mut chain: Vec<AccessExpr>,
1997 ) -> Result<Expr, ParserError> {
1998 let mut ending_wildcard: Option<TokenWithSpan> = None;
1999 loop {
2000 if self.consume_token(&Token::Period) {
2001 let next_token = self.peek_token_ref();
2002 match &next_token.token {
2003 Token::Mul => {
2004 if dialect_of!(self is PostgreSqlDialect) {
2007 ending_wildcard = Some(self.next_token());
2008 } else {
2009 self.prev_token(); }
2016
2017 break;
2018 }
2019 Token::SingleQuotedString(s) => {
2020 let expr =
2021 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2022 chain.push(AccessExpr::Dot(expr));
2023 self.advance_token(); }
2025 Token::Placeholder(s) => {
2026 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2029 chain.push(AccessExpr::Dot(expr));
2030 self.advance_token(); }
2032 _ => {
2037 let expr = self.maybe_parse(|parser| {
2038 let expr = parser
2039 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2040 match &expr {
2041 Expr::CompoundFieldAccess { .. }
2042 | Expr::CompoundIdentifier(_)
2043 | Expr::Identifier(_)
2044 | Expr::Value(_)
2045 | Expr::Function(_) => Ok(expr),
2046 _ => parser.expected_ref(
2047 "an identifier or value",
2048 parser.peek_token_ref(),
2049 ),
2050 }
2051 })?;
2052
2053 match expr {
2054 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2063 chain.push(AccessExpr::Dot(*root));
2064 chain.extend(access_chain);
2065 }
2066 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2067 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2068 ),
2069 Some(expr) => {
2070 chain.push(AccessExpr::Dot(expr));
2071 }
2072 None => {
2076 chain.push(AccessExpr::Dot(Expr::Identifier(
2077 self.parse_identifier()?,
2078 )));
2079 }
2080 }
2081 }
2082 }
2083 } else if !self.dialect.supports_partiql()
2084 && self.peek_token_ref().token == Token::LBracket
2085 {
2086 self.parse_multi_dim_subscript(&mut chain)?;
2087 } else {
2088 break;
2089 }
2090 }
2091
2092 let tok_index = self.get_current_index();
2093 if let Some(wildcard_token) = ending_wildcard {
2094 if !Self::is_all_ident(&root, &chain) {
2095 return self
2096 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2097 };
2098 Ok(Expr::QualifiedWildcard(
2099 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2100 AttachedToken(wildcard_token),
2101 ))
2102 } else if self.maybe_parse_outer_join_operator() {
2103 if !Self::is_all_ident(&root, &chain) {
2104 return self.expected_at("column identifier before (+)", tok_index);
2105 };
2106 let expr = if chain.is_empty() {
2107 root
2108 } else {
2109 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2110 };
2111 Ok(Expr::OuterJoin(expr.into()))
2112 } else {
2113 Self::build_compound_expr(root, chain)
2114 }
2115 }
2116
2117 fn build_compound_expr(
2122 root: Expr,
2123 mut access_chain: Vec<AccessExpr>,
2124 ) -> Result<Expr, ParserError> {
2125 if access_chain.is_empty() {
2126 return Ok(root);
2127 }
2128
2129 if Self::is_all_ident(&root, &access_chain) {
2130 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2131 root,
2132 access_chain,
2133 )?));
2134 }
2135
2136 if matches!(root, Expr::Identifier(_))
2141 && matches!(
2142 access_chain.last(),
2143 Some(AccessExpr::Dot(Expr::Function(_)))
2144 )
2145 && access_chain
2146 .iter()
2147 .rev()
2148 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2150 {
2151 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2152 return parser_err!("expected function expression", root.span().start);
2153 };
2154
2155 let compound_func_name = [root]
2156 .into_iter()
2157 .chain(access_chain.into_iter().flat_map(|access| match access {
2158 AccessExpr::Dot(expr) => Some(expr),
2159 _ => None,
2160 }))
2161 .flat_map(|expr| match expr {
2162 Expr::Identifier(ident) => Some(ident),
2163 _ => None,
2164 })
2165 .map(ObjectNamePart::Identifier)
2166 .chain(func.name.0)
2167 .collect::<Vec<_>>();
2168 func.name = ObjectName(compound_func_name);
2169
2170 return Ok(Expr::Function(func));
2171 }
2172
2173 if access_chain.len() == 1
2178 && matches!(
2179 access_chain.last(),
2180 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2181 )
2182 {
2183 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2184 return parser_err!("expected (+) expression", root.span().start);
2185 };
2186
2187 if !Self::is_all_ident(&root, &[]) {
2188 return parser_err!("column identifier before (+)", root.span().start);
2189 };
2190
2191 let token_start = root.span().start;
2192 let mut idents = Self::exprs_to_idents(root, vec![])?;
2193 match *inner_expr {
2194 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2195 Expr::Identifier(suffix) => idents.push(suffix),
2196 _ => {
2197 return parser_err!("column identifier before (+)", token_start);
2198 }
2199 }
2200
2201 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2202 }
2203
2204 Ok(Expr::CompoundFieldAccess {
2205 root: Box::new(root),
2206 access_chain,
2207 })
2208 }
2209
2210 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2211 match k {
2212 Keyword::LOCAL => Some(ContextModifier::Local),
2213 Keyword::GLOBAL => Some(ContextModifier::Global),
2214 Keyword::SESSION => Some(ContextModifier::Session),
2215 _ => None,
2216 }
2217 }
2218
2219 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2221 if !matches!(root, Expr::Identifier(_)) {
2222 return false;
2223 }
2224 fields
2225 .iter()
2226 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2227 }
2228
2229 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2231 let mut idents = vec![];
2232 if let Expr::Identifier(root) = root {
2233 idents.push(root);
2234 for x in fields {
2235 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2236 idents.push(ident);
2237 } else {
2238 return parser_err!(
2239 format!("Expected identifier, found: {}", x),
2240 x.span().start
2241 );
2242 }
2243 }
2244 Ok(idents)
2245 } else {
2246 parser_err!(
2247 format!("Expected identifier, found: {}", root),
2248 root.span().start
2249 )
2250 }
2251 }
2252
2253 fn peek_outer_join_operator(&mut self) -> bool {
2255 if !self.dialect.supports_outer_join_operator() {
2256 return false;
2257 }
2258
2259 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2260 Token::LParen == maybe_lparen.token
2261 && Token::Plus == maybe_plus.token
2262 && Token::RParen == maybe_rparen.token
2263 }
2264
2265 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2268 self.dialect.supports_outer_join_operator()
2269 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2270 }
2271
2272 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2274 self.expect_token(&Token::LParen)?;
2275 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2276 self.expect_token(&Token::RParen)?;
2277
2278 Ok(options)
2279 }
2280
2281 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2282 let name = self.parse_identifier()?;
2283
2284 let next_token = self.peek_token_ref();
2285 if next_token == &Token::Comma || next_token == &Token::RParen {
2286 return Ok(UtilityOption { name, arg: None });
2287 }
2288 let arg = self.parse_expr()?;
2289
2290 Ok(UtilityOption {
2291 name,
2292 arg: Some(arg),
2293 })
2294 }
2295
2296 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2297 if !self.peek_sub_query() {
2298 return Ok(None);
2299 }
2300
2301 Ok(Some(Expr::Subquery(self.parse_query()?)))
2302 }
2303
2304 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2305 if !self.dialect.supports_lambda_functions() {
2306 return Ok(None);
2307 }
2308 self.maybe_parse(|p| {
2309 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2310 p.expect_token(&Token::RParen)?;
2311 p.expect_token(&Token::Arrow)?;
2312 let expr = p.parse_expr()?;
2313 Ok(Expr::Lambda(LambdaFunction {
2314 params: OneOrManyWithParens::Many(params),
2315 body: Box::new(expr),
2316 syntax: LambdaSyntax::Arrow,
2317 }))
2318 })
2319 }
2320
2321 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2331 let params = self.parse_lambda_function_parameters()?;
2333 self.expect_token(&Token::Colon)?;
2335 let body = self.parse_expr()?;
2337 Ok(Expr::Lambda(LambdaFunction {
2338 params,
2339 body: Box::new(body),
2340 syntax: LambdaSyntax::LambdaKeyword,
2341 }))
2342 }
2343
2344 fn parse_lambda_function_parameters(
2346 &mut self,
2347 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2348 let params = if self.consume_token(&Token::LParen) {
2350 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2352 self.expect_token(&Token::RParen)?;
2353 OneOrManyWithParens::Many(params)
2354 } else {
2355 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2357 if params.len() == 1 {
2358 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2359 } else {
2360 OneOrManyWithParens::Many(params)
2361 }
2362 };
2363 Ok(params)
2364 }
2365
2366 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2368 let name = self.parse_identifier()?;
2369 let data_type = match &self.peek_token_ref().token {
2370 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2371 _ => None,
2372 };
2373 Ok(LambdaFunctionParameter { name, data_type })
2374 }
2375
2376 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2383 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2385 return Ok(Some(expr));
2386 }
2387 self.maybe_parse_odbc_body_datetime()
2389 }
2390
2391 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2402 self.maybe_parse(|p| {
2403 let token = p.next_token().clone();
2404 let word_string = token.token.to_string();
2405 let data_type = match word_string.as_str() {
2406 "t" => DataType::Time(None, TimezoneInfo::None),
2407 "d" => DataType::Date,
2408 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2409 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2410 };
2411 let value = p.parse_value()?;
2412 Ok(Expr::TypedString(TypedString {
2413 data_type,
2414 value,
2415 uses_odbc_syntax: true,
2416 }))
2417 })
2418 }
2419
2420 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2429 self.maybe_parse(|p| {
2430 p.expect_keyword(Keyword::FN)?;
2431 let fn_name = p.parse_object_name(false)?;
2432 let mut fn_call = p.parse_function_call(fn_name)?;
2433 fn_call.uses_odbc_syntax = true;
2434 Ok(Expr::Function(fn_call))
2435 })
2436 }
2437
2438 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2440 self.parse_function_call(name).map(Expr::Function)
2441 }
2442
2443 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2444 self.expect_token(&Token::LParen)?;
2445
2446 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2449 let subquery = self.parse_query()?;
2450 self.expect_token(&Token::RParen)?;
2451 return Ok(Function {
2452 name,
2453 uses_odbc_syntax: false,
2454 parameters: FunctionArguments::None,
2455 args: FunctionArguments::Subquery(subquery),
2456 filter: None,
2457 null_treatment: None,
2458 over: None,
2459 within_group: vec![],
2460 });
2461 }
2462
2463 let mut args = self.parse_function_argument_list()?;
2464 let mut parameters = FunctionArguments::None;
2465 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2468 && self.consume_token(&Token::LParen)
2469 {
2470 parameters = FunctionArguments::List(args);
2471 args = self.parse_function_argument_list()?;
2472 }
2473
2474 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2475 self.expect_token(&Token::LParen)?;
2476 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2477 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2478 self.expect_token(&Token::RParen)?;
2479 order_by
2480 } else {
2481 vec![]
2482 };
2483
2484 let filter = if self.dialect.supports_filter_during_aggregation()
2485 && self.parse_keyword(Keyword::FILTER)
2486 && self.consume_token(&Token::LParen)
2487 && self.parse_keyword(Keyword::WHERE)
2488 {
2489 let filter = Some(Box::new(self.parse_expr()?));
2490 self.expect_token(&Token::RParen)?;
2491 filter
2492 } else {
2493 None
2494 };
2495
2496 let null_treatment = if args
2499 .clauses
2500 .iter()
2501 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2502 {
2503 self.parse_null_treatment()?
2504 } else {
2505 None
2506 };
2507
2508 let over = if self.parse_keyword(Keyword::OVER) {
2509 if self.consume_token(&Token::LParen) {
2510 let window_spec = self.parse_window_spec()?;
2511 Some(WindowType::WindowSpec(window_spec))
2512 } else {
2513 Some(WindowType::NamedWindow(self.parse_identifier()?))
2514 }
2515 } else {
2516 None
2517 };
2518
2519 Ok(Function {
2520 name,
2521 uses_odbc_syntax: false,
2522 parameters,
2523 args: FunctionArguments::List(args),
2524 null_treatment,
2525 filter,
2526 over,
2527 within_group,
2528 })
2529 }
2530
2531 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2533 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2534 Some(keyword) => {
2535 self.expect_keyword_is(Keyword::NULLS)?;
2536
2537 Ok(match keyword {
2538 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2539 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2540 _ => None,
2541 })
2542 }
2543 None => Ok(None),
2544 }
2545 }
2546
2547 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2549 let args = if self.consume_token(&Token::LParen) {
2550 FunctionArguments::List(self.parse_function_argument_list()?)
2551 } else {
2552 FunctionArguments::None
2553 };
2554 Ok(Expr::Function(Function {
2555 name,
2556 uses_odbc_syntax: false,
2557 parameters: FunctionArguments::None,
2558 args,
2559 filter: None,
2560 over: None,
2561 null_treatment: None,
2562 within_group: vec![],
2563 }))
2564 }
2565
2566 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2568 let next_token = self.next_token();
2569 match &next_token.token {
2570 Token::Word(w) => match w.keyword {
2571 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2572 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2573 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2574 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2575 },
2576 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2577 }
2578 }
2579
2580 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2582 let units = self.parse_window_frame_units()?;
2583 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2584 let start_bound = self.parse_window_frame_bound()?;
2585 self.expect_keyword_is(Keyword::AND)?;
2586 let end_bound = Some(self.parse_window_frame_bound()?);
2587 (start_bound, end_bound)
2588 } else {
2589 (self.parse_window_frame_bound()?, None)
2590 };
2591 Ok(WindowFrame {
2592 units,
2593 start_bound,
2594 end_bound,
2595 })
2596 }
2597
2598 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2600 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2601 Ok(WindowFrameBound::CurrentRow)
2602 } else {
2603 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2604 None
2605 } else {
2606 Some(Box::new(match &self.peek_token_ref().token {
2607 Token::SingleQuotedString(_) => self.parse_interval()?,
2608 _ => self.parse_expr()?,
2609 }))
2610 };
2611 if self.parse_keyword(Keyword::PRECEDING) {
2612 Ok(WindowFrameBound::Preceding(rows))
2613 } else if self.parse_keyword(Keyword::FOLLOWING) {
2614 Ok(WindowFrameBound::Following(rows))
2615 } else {
2616 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2617 }
2618 }
2619 }
2620
2621 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2623 if self.dialect.supports_group_by_expr() {
2624 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2625 self.expect_token(&Token::LParen)?;
2626 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2627 self.expect_token(&Token::RParen)?;
2628 Ok(Expr::GroupingSets(result))
2629 } else if self.parse_keyword(Keyword::CUBE) {
2630 self.expect_token(&Token::LParen)?;
2631 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2632 self.expect_token(&Token::RParen)?;
2633 Ok(Expr::Cube(result))
2634 } else if self.parse_keyword(Keyword::ROLLUP) {
2635 self.expect_token(&Token::LParen)?;
2636 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2637 self.expect_token(&Token::RParen)?;
2638 Ok(Expr::Rollup(result))
2639 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2640 Ok(Expr::Tuple(vec![]))
2644 } else {
2645 self.parse_expr()
2646 }
2647 } else {
2648 self.parse_expr()
2650 }
2651 }
2652
2653 fn parse_tuple(
2657 &mut self,
2658 lift_singleton: bool,
2659 allow_empty: bool,
2660 ) -> Result<Vec<Expr>, ParserError> {
2661 if lift_singleton {
2662 if self.consume_token(&Token::LParen) {
2663 let result = if allow_empty && self.consume_token(&Token::RParen) {
2664 vec![]
2665 } else {
2666 let result = self.parse_comma_separated(Parser::parse_expr)?;
2667 self.expect_token(&Token::RParen)?;
2668 result
2669 };
2670 Ok(result)
2671 } else {
2672 Ok(vec![self.parse_expr()?])
2673 }
2674 } else {
2675 self.expect_token(&Token::LParen)?;
2676 let result = if allow_empty && self.consume_token(&Token::RParen) {
2677 vec![]
2678 } else {
2679 let result = self.parse_comma_separated(Parser::parse_expr)?;
2680 self.expect_token(&Token::RParen)?;
2681 result
2682 };
2683 Ok(result)
2684 }
2685 }
2686
2687 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2689 let case_token = AttachedToken(self.get_current_token().clone());
2690 let mut operand = None;
2691 if !self.parse_keyword(Keyword::WHEN) {
2692 operand = Some(Box::new(self.parse_expr()?));
2693 self.expect_keyword_is(Keyword::WHEN)?;
2694 }
2695 let mut conditions = vec![];
2696 loop {
2697 let condition = self.parse_expr()?;
2698 self.expect_keyword_is(Keyword::THEN)?;
2699 let result = self.parse_expr()?;
2700 conditions.push(CaseWhen { condition, result });
2701 if !self.parse_keyword(Keyword::WHEN) {
2702 break;
2703 }
2704 }
2705 let else_result = if self.parse_keyword(Keyword::ELSE) {
2706 Some(Box::new(self.parse_expr()?))
2707 } else {
2708 None
2709 };
2710 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2711 Ok(Expr::Case {
2712 case_token,
2713 end_token,
2714 operand,
2715 conditions,
2716 else_result,
2717 })
2718 }
2719
2720 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2722 if self.parse_keyword(Keyword::FORMAT) {
2723 let value = self.parse_value()?;
2724 match self.parse_optional_time_zone()? {
2725 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2726 None => Ok(Some(CastFormat::Value(value))),
2727 }
2728 } else {
2729 Ok(None)
2730 }
2731 }
2732
2733 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2735 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2736 self.parse_value().map(Some)
2737 } else {
2738 Ok(None)
2739 }
2740 }
2741
2742 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2744 self.expect_token(&Token::LParen)?;
2745 let data_type = self.parse_data_type()?;
2746 self.expect_token(&Token::Comma)?;
2747 let expr = self.parse_expr()?;
2748 let styles = if self.consume_token(&Token::Comma) {
2749 self.parse_comma_separated(Parser::parse_expr)?
2750 } else {
2751 Default::default()
2752 };
2753 self.expect_token(&Token::RParen)?;
2754 Ok(Expr::Convert {
2755 is_try,
2756 expr: Box::new(expr),
2757 data_type: Some(data_type),
2758 charset: None,
2759 target_before_value: true,
2760 styles,
2761 })
2762 }
2763
2764 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2769 if self.dialect.convert_type_before_value() {
2770 return self.parse_mssql_convert(is_try);
2771 }
2772 self.expect_token(&Token::LParen)?;
2773 let expr = self.parse_expr()?;
2774 if self.parse_keyword(Keyword::USING) {
2775 let charset = self.parse_object_name(false)?;
2776 self.expect_token(&Token::RParen)?;
2777 return Ok(Expr::Convert {
2778 is_try,
2779 expr: Box::new(expr),
2780 data_type: None,
2781 charset: Some(charset),
2782 target_before_value: false,
2783 styles: vec![],
2784 });
2785 }
2786 self.expect_token(&Token::Comma)?;
2787 let data_type = self.parse_data_type()?;
2788 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2789 Some(self.parse_object_name(false)?)
2790 } else {
2791 None
2792 };
2793 self.expect_token(&Token::RParen)?;
2794 Ok(Expr::Convert {
2795 is_try,
2796 expr: Box::new(expr),
2797 data_type: Some(data_type),
2798 charset,
2799 target_before_value: false,
2800 styles: vec![],
2801 })
2802 }
2803
2804 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2806 self.expect_token(&Token::LParen)?;
2807 let expr = self.parse_expr()?;
2808 self.expect_keyword_is(Keyword::AS)?;
2809 let data_type = self.parse_data_type()?;
2810 let array = self.parse_keyword(Keyword::ARRAY);
2811 let format = self.parse_optional_cast_format()?;
2812 self.expect_token(&Token::RParen)?;
2813 Ok(Expr::Cast {
2814 kind,
2815 expr: Box::new(expr),
2816 data_type,
2817 array,
2818 format,
2819 })
2820 }
2821
2822 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2824 self.expect_token(&Token::LParen)?;
2825 let exists_node = Expr::Exists {
2826 negated,
2827 subquery: self.parse_query()?,
2828 };
2829 self.expect_token(&Token::RParen)?;
2830 Ok(exists_node)
2831 }
2832
2833 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2835 self.expect_token(&Token::LParen)?;
2836 let field = self.parse_date_time_field()?;
2837
2838 let syntax = if self.parse_keyword(Keyword::FROM) {
2839 ExtractSyntax::From
2840 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2841 {
2842 ExtractSyntax::Comma
2843 } else {
2844 return Err(ParserError::ParserError(
2845 "Expected 'FROM' or ','".to_string(),
2846 ));
2847 };
2848
2849 let expr = self.parse_expr()?;
2850 self.expect_token(&Token::RParen)?;
2851 Ok(Expr::Extract {
2852 field,
2853 expr: Box::new(expr),
2854 syntax,
2855 })
2856 }
2857
2858 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2860 self.expect_token(&Token::LParen)?;
2861 let expr = self.parse_expr()?;
2862 let field = if self.parse_keyword(Keyword::TO) {
2864 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2866 } else if self.consume_token(&Token::Comma) {
2867 let v = self.parse_value()?;
2869 if matches!(v.value, Value::Number(_, _)) {
2870 CeilFloorKind::Scale(v)
2871 } else {
2872 return Err(ParserError::ParserError(
2873 "Scale field can only be of number type".to_string(),
2874 ));
2875 }
2876 } else {
2877 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2878 };
2879 self.expect_token(&Token::RParen)?;
2880 if is_ceil {
2881 Ok(Expr::Ceil {
2882 expr: Box::new(expr),
2883 field,
2884 })
2885 } else {
2886 Ok(Expr::Floor {
2887 expr: Box::new(expr),
2888 field,
2889 })
2890 }
2891 }
2892
2893 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2895 let between_prec = self.dialect.prec_value(Precedence::Between);
2896 let position_expr = self.maybe_parse(|p| {
2897 p.expect_token(&Token::LParen)?;
2899
2900 let expr = p.parse_subexpr(between_prec)?;
2902 p.expect_keyword_is(Keyword::IN)?;
2903 let from = p.parse_expr()?;
2904 p.expect_token(&Token::RParen)?;
2905 Ok(Expr::Position {
2906 expr: Box::new(expr),
2907 r#in: Box::new(from),
2908 })
2909 })?;
2910 match position_expr {
2911 Some(expr) => Ok(expr),
2912 None => self.parse_function(ObjectName::from(vec![ident])),
2915 }
2916 }
2917
2918 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2920 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2921 Keyword::SUBSTR => true,
2922 Keyword::SUBSTRING => false,
2923 _ => {
2924 self.prev_token();
2925 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2926 }
2927 };
2928 self.expect_token(&Token::LParen)?;
2929 let expr = self.parse_expr()?;
2930 let mut from_expr = None;
2931 let special = self.consume_token(&Token::Comma);
2932 if special || self.parse_keyword(Keyword::FROM) {
2933 from_expr = Some(self.parse_expr()?);
2934 }
2935
2936 let mut to_expr = None;
2937 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2938 to_expr = Some(self.parse_expr()?);
2939 }
2940 self.expect_token(&Token::RParen)?;
2941
2942 Ok(Expr::Substring {
2943 expr: Box::new(expr),
2944 substring_from: from_expr.map(Box::new),
2945 substring_for: to_expr.map(Box::new),
2946 special,
2947 shorthand,
2948 })
2949 }
2950
2951 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2955 self.expect_token(&Token::LParen)?;
2957 let expr = self.parse_expr()?;
2958 self.expect_keyword_is(Keyword::PLACING)?;
2959 let what_expr = self.parse_expr()?;
2960 self.expect_keyword_is(Keyword::FROM)?;
2961 let from_expr = self.parse_expr()?;
2962 let mut for_expr = None;
2963 if self.parse_keyword(Keyword::FOR) {
2964 for_expr = Some(self.parse_expr()?);
2965 }
2966 self.expect_token(&Token::RParen)?;
2967
2968 Ok(Expr::Overlay {
2969 expr: Box::new(expr),
2970 overlay_what: Box::new(what_expr),
2971 overlay_from: Box::new(from_expr),
2972 overlay_for: for_expr.map(Box::new),
2973 })
2974 }
2975
2976 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2982 self.expect_token(&Token::LParen)?;
2983 let mut trim_where = None;
2984 if let Token::Word(word) = &self.peek_token_ref().token {
2985 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2986 trim_where = Some(self.parse_trim_where()?);
2987 }
2988 }
2989 let expr = self.parse_expr()?;
2990 if self.parse_keyword(Keyword::FROM) {
2991 let trim_what = Box::new(expr);
2992 let expr = self.parse_expr()?;
2993 self.expect_token(&Token::RParen)?;
2994 Ok(Expr::Trim {
2995 expr: Box::new(expr),
2996 trim_where,
2997 trim_what: Some(trim_what),
2998 trim_characters: None,
2999 })
3000 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3001 {
3002 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3003 self.expect_token(&Token::RParen)?;
3004 Ok(Expr::Trim {
3005 expr: Box::new(expr),
3006 trim_where: None,
3007 trim_what: None,
3008 trim_characters: Some(characters),
3009 })
3010 } else {
3011 self.expect_token(&Token::RParen)?;
3012 Ok(Expr::Trim {
3013 expr: Box::new(expr),
3014 trim_where,
3015 trim_what: None,
3016 trim_characters: None,
3017 })
3018 }
3019 }
3020
3021 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3025 let next_token = self.next_token();
3026 match &next_token.token {
3027 Token::Word(w) => match w.keyword {
3028 Keyword::BOTH => Ok(TrimWhereField::Both),
3029 Keyword::LEADING => Ok(TrimWhereField::Leading),
3030 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3031 _ => self.expected("trim_where field", next_token)?,
3032 },
3033 _ => self.expected("trim_where field", next_token),
3034 }
3035 }
3036
3037 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3040 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3041 self.expect_token(&Token::RBracket)?;
3042 Ok(Expr::Array(Array { elem: exprs, named }))
3043 }
3044
3045 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3049 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3050 if self.parse_keyword(Keyword::ERROR) {
3051 Ok(Some(ListAggOnOverflow::Error))
3052 } else {
3053 self.expect_keyword_is(Keyword::TRUNCATE)?;
3054 let filler = match &self.peek_token_ref().token {
3055 Token::Word(w)
3056 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3057 {
3058 None
3059 }
3060 Token::SingleQuotedString(_)
3061 | Token::EscapedStringLiteral(_)
3062 | Token::UnicodeStringLiteral(_)
3063 | Token::NationalStringLiteral(_)
3064 | Token::QuoteDelimitedStringLiteral(_)
3065 | Token::NationalQuoteDelimitedStringLiteral(_)
3066 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3067 _ => self.expected_ref(
3068 "either filler, WITH, or WITHOUT in LISTAGG",
3069 self.peek_token_ref(),
3070 )?,
3071 };
3072 let with_count = self.parse_keyword(Keyword::WITH);
3073 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3074 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3075 }
3076 self.expect_keyword_is(Keyword::COUNT)?;
3077 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3078 }
3079 } else {
3080 Ok(None)
3081 }
3082 }
3083
3084 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3091 let next_token = self.next_token();
3092 match &next_token.token {
3093 Token::Word(w) => match w.keyword {
3094 Keyword::YEAR => Ok(DateTimeField::Year),
3095 Keyword::YEARS => Ok(DateTimeField::Years),
3096 Keyword::MONTH => Ok(DateTimeField::Month),
3097 Keyword::MONTHS => Ok(DateTimeField::Months),
3098 Keyword::WEEK => {
3099 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3100 && self.consume_token(&Token::LParen)
3101 {
3102 let week_day = self.parse_identifier()?;
3103 self.expect_token(&Token::RParen)?;
3104 Some(week_day)
3105 } else {
3106 None
3107 };
3108 Ok(DateTimeField::Week(week_day))
3109 }
3110 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3111 Keyword::DAY => Ok(DateTimeField::Day),
3112 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3113 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3114 Keyword::DAYS => Ok(DateTimeField::Days),
3115 Keyword::DATE => Ok(DateTimeField::Date),
3116 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3117 Keyword::HOUR => Ok(DateTimeField::Hour),
3118 Keyword::HOURS => Ok(DateTimeField::Hours),
3119 Keyword::MINUTE => Ok(DateTimeField::Minute),
3120 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3121 Keyword::SECOND => Ok(DateTimeField::Second),
3122 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3123 Keyword::CENTURY => Ok(DateTimeField::Century),
3124 Keyword::DECADE => Ok(DateTimeField::Decade),
3125 Keyword::DOY => Ok(DateTimeField::Doy),
3126 Keyword::DOW => Ok(DateTimeField::Dow),
3127 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3128 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3129 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3130 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3131 Keyword::JULIAN => Ok(DateTimeField::Julian),
3132 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3133 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3134 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3135 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3136 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3137 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3138 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3139 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3140 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3141 Keyword::TIME => Ok(DateTimeField::Time),
3142 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3143 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3144 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3145 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3146 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3147 _ if self.dialect.allow_extract_custom() => {
3148 self.prev_token();
3149 let custom = self.parse_identifier()?;
3150 Ok(DateTimeField::Custom(custom))
3151 }
3152 _ => self.expected("date/time field", next_token),
3153 },
3154 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3155 self.prev_token();
3156 let custom = self.parse_identifier()?;
3157 Ok(DateTimeField::Custom(custom))
3158 }
3159 _ => self.expected("date/time field", next_token),
3160 }
3161 }
3162
3163 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3167 match &self.peek_token_ref().token {
3168 Token::Word(w) => match w.keyword {
3169 Keyword::EXISTS => {
3170 let negated = true;
3171 let _ = self.parse_keyword(Keyword::EXISTS);
3172 self.parse_exists_expr(negated)
3173 }
3174 _ => Ok(Expr::UnaryOp {
3175 op: UnaryOperator::Not,
3176 expr: Box::new(
3177 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3178 ),
3179 }),
3180 },
3181 _ => Ok(Expr::UnaryOp {
3182 op: UnaryOperator::Not,
3183 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3184 }),
3185 }
3186 }
3187
3188 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3198 let token = self.expect_token(&Token::LBrace)?;
3199
3200 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3201 self.expect_token(&Token::RBrace)?;
3202 return Ok(fn_expr);
3203 }
3204
3205 if self.dialect.supports_dictionary_syntax() {
3206 self.prev_token(); return self.parse_dictionary();
3208 }
3209
3210 self.expected("an expression", token)
3211 }
3212
3213 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3219 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3220
3221 self.expect_keyword_is(Keyword::AGAINST)?;
3222
3223 self.expect_token(&Token::LParen)?;
3224
3225 let match_value = self.parse_value()?;
3227
3228 let in_natural_language_mode_keywords = &[
3229 Keyword::IN,
3230 Keyword::NATURAL,
3231 Keyword::LANGUAGE,
3232 Keyword::MODE,
3233 ];
3234
3235 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3236
3237 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3238
3239 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3240 if self.parse_keywords(with_query_expansion_keywords) {
3241 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3242 } else {
3243 Some(SearchModifier::InNaturalLanguageMode)
3244 }
3245 } else if self.parse_keywords(in_boolean_mode_keywords) {
3246 Some(SearchModifier::InBooleanMode)
3247 } else if self.parse_keywords(with_query_expansion_keywords) {
3248 Some(SearchModifier::WithQueryExpansion)
3249 } else {
3250 None
3251 };
3252
3253 self.expect_token(&Token::RParen)?;
3254
3255 Ok(Expr::MatchAgainst {
3256 columns,
3257 match_value,
3258 opt_search_modifier,
3259 })
3260 }
3261
3262 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3278 let value = if self.dialect.require_interval_qualifier() {
3287 self.parse_expr()?
3289 } else {
3290 self.parse_prefix()?
3293 };
3294
3295 let leading_field = if self.next_token_is_temporal_unit() {
3301 Some(self.parse_date_time_field()?)
3302 } else if self.dialect.require_interval_qualifier() {
3303 return parser_err!(
3304 "INTERVAL requires a unit after the literal value",
3305 self.peek_token_ref().span.start
3306 );
3307 } else {
3308 None
3309 };
3310
3311 let (leading_precision, last_field, fsec_precision) =
3312 if leading_field == Some(DateTimeField::Second) {
3313 let last_field = None;
3319 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3320 (leading_precision, last_field, fsec_precision)
3321 } else {
3322 let leading_precision = self.parse_optional_precision()?;
3323 if self.parse_keyword(Keyword::TO) {
3324 let last_field = Some(self.parse_date_time_field()?);
3325 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3326 self.parse_optional_precision()?
3327 } else {
3328 None
3329 };
3330 (leading_precision, last_field, fsec_precision)
3331 } else {
3332 (leading_precision, None, None)
3333 }
3334 };
3335
3336 Ok(Expr::Interval(Interval {
3337 value: Box::new(value),
3338 leading_field,
3339 leading_precision,
3340 last_field,
3341 fractional_seconds_precision: fsec_precision,
3342 }))
3343 }
3344
3345 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3348 if let Token::Word(word) = &self.peek_token_ref().token {
3349 matches!(
3350 word.keyword,
3351 Keyword::YEAR
3352 | Keyword::YEARS
3353 | Keyword::MONTH
3354 | Keyword::MONTHS
3355 | Keyword::WEEK
3356 | Keyword::WEEKS
3357 | Keyword::DAY
3358 | Keyword::DAYS
3359 | Keyword::HOUR
3360 | Keyword::HOURS
3361 | Keyword::MINUTE
3362 | Keyword::MINUTES
3363 | Keyword::SECOND
3364 | Keyword::SECONDS
3365 | Keyword::CENTURY
3366 | Keyword::DECADE
3367 | Keyword::DOW
3368 | Keyword::DOY
3369 | Keyword::EPOCH
3370 | Keyword::ISODOW
3371 | Keyword::ISOYEAR
3372 | Keyword::JULIAN
3373 | Keyword::MICROSECOND
3374 | Keyword::MICROSECONDS
3375 | Keyword::MILLENIUM
3376 | Keyword::MILLENNIUM
3377 | Keyword::MILLISECOND
3378 | Keyword::MILLISECONDS
3379 | Keyword::NANOSECOND
3380 | Keyword::NANOSECONDS
3381 | Keyword::QUARTER
3382 | Keyword::TIMEZONE
3383 | Keyword::TIMEZONE_HOUR
3384 | Keyword::TIMEZONE_MINUTE
3385 )
3386 } else {
3387 false
3388 }
3389 }
3390
3391 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3399 self.prev_token();
3401 let (fields, trailing_bracket) =
3402 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3403 if trailing_bracket.0 {
3404 return parser_err!(
3405 "unmatched > in STRUCT literal",
3406 self.peek_token_ref().span.start
3407 );
3408 }
3409
3410 self.expect_token(&Token::LParen)?;
3412 let values = self
3413 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3414 self.expect_token(&Token::RParen)?;
3415
3416 Ok(Expr::Struct { values, fields })
3417 }
3418
3419 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3433 let expr = self.parse_expr()?;
3434 if self.parse_keyword(Keyword::AS) {
3435 if typed_syntax {
3436 return parser_err!("Typed syntax does not allow AS", {
3437 self.prev_token();
3438 self.peek_token_ref().span.start
3439 });
3440 }
3441 let field_name = self.parse_identifier()?;
3442 Ok(Expr::Named {
3443 expr: expr.into(),
3444 name: field_name,
3445 })
3446 } else {
3447 Ok(expr)
3448 }
3449 }
3450
3451 fn parse_struct_type_def<F>(
3464 &mut self,
3465 mut elem_parser: F,
3466 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3467 where
3468 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3469 {
3470 self.expect_keyword_is(Keyword::STRUCT)?;
3471
3472 if self.peek_token_ref().token != Token::Lt {
3474 return Ok((Default::default(), false.into()));
3475 }
3476 self.next_token();
3477
3478 let mut field_defs = vec![];
3479 let trailing_bracket = loop {
3480 let (def, trailing_bracket) = elem_parser(self)?;
3481 field_defs.push(def);
3482 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3484 break trailing_bracket;
3485 }
3486 };
3487
3488 Ok((
3489 field_defs,
3490 self.expect_closing_angle_bracket(trailing_bracket)?,
3491 ))
3492 }
3493
3494 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3496 self.expect_keyword_is(Keyword::STRUCT)?;
3497 self.expect_token(&Token::LParen)?;
3498 let struct_body = self.parse_comma_separated(|parser| {
3499 let field_name = parser.parse_identifier()?;
3500 let field_type = parser.parse_data_type()?;
3501
3502 Ok(StructField {
3503 field_name: Some(field_name),
3504 field_type,
3505 options: None,
3506 })
3507 });
3508 self.expect_token(&Token::RParen)?;
3509 struct_body
3510 }
3511
3512 fn parse_struct_field_def(
3524 &mut self,
3525 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3526 let is_named_field = matches!(
3529 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3530 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3531 );
3532
3533 let field_name = if is_named_field {
3534 let name = self.parse_identifier()?;
3535 let _ = self.consume_token(&Token::Colon);
3536 Some(name)
3537 } else {
3538 None
3539 };
3540
3541 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3542
3543 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3544 Ok((
3545 StructField {
3546 field_name,
3547 field_type,
3548 options,
3549 },
3550 trailing_bracket,
3551 ))
3552 }
3553
3554 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3564 self.expect_keyword_is(Keyword::UNION)?;
3565
3566 self.expect_token(&Token::LParen)?;
3567
3568 let fields = self.parse_comma_separated(|p| {
3569 Ok(UnionField {
3570 field_name: p.parse_identifier()?,
3571 field_type: p.parse_data_type()?,
3572 })
3573 })?;
3574
3575 self.expect_token(&Token::RParen)?;
3576
3577 Ok(fields)
3578 }
3579
3580 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3591 self.expect_token(&Token::LBrace)?;
3592
3593 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3594
3595 self.expect_token(&Token::RBrace)?;
3596
3597 Ok(Expr::Dictionary(fields))
3598 }
3599
3600 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3611 let key = self.parse_identifier()?;
3612
3613 self.expect_token(&Token::Colon)?;
3614
3615 let expr = self.parse_expr()?;
3616
3617 Ok(DictionaryField {
3618 key,
3619 value: Box::new(expr),
3620 })
3621 }
3622
3623 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3633 self.expect_token(&Token::LBrace)?;
3634 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3635 self.expect_token(&Token::RBrace)?;
3636 Ok(Expr::Map(Map { entries: fields }))
3637 }
3638
3639 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3649 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3651
3652 self.expect_token(&Token::Colon)?;
3653
3654 let value = self.parse_expr()?;
3655
3656 Ok(MapEntry {
3657 key: Box::new(key),
3658 value: Box::new(value),
3659 })
3660 }
3661
3662 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3672 self.expect_keyword_is(Keyword::MAP)?;
3673 self.expect_token(&Token::LParen)?;
3674 let key_data_type = self.parse_data_type()?;
3675 self.expect_token(&Token::Comma)?;
3676 let value_data_type = self.parse_data_type()?;
3677 self.expect_token(&Token::RParen)?;
3678
3679 Ok((key_data_type, value_data_type))
3680 }
3681
3682 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3692 self.expect_keyword_is(Keyword::TUPLE)?;
3693 self.expect_token(&Token::LParen)?;
3694 let mut field_defs = vec![];
3695 loop {
3696 let (def, _) = self.parse_struct_field_def()?;
3697 field_defs.push(def);
3698 if !self.consume_token(&Token::Comma) {
3699 break;
3700 }
3701 }
3702 self.expect_token(&Token::RParen)?;
3703
3704 Ok(field_defs)
3705 }
3706
3707 fn expect_closing_angle_bracket(
3712 &mut self,
3713 trailing_bracket: MatchedTrailingBracket,
3714 ) -> Result<MatchedTrailingBracket, ParserError> {
3715 let trailing_bracket = if !trailing_bracket.0 {
3716 match &self.peek_token_ref().token {
3717 Token::Gt => {
3718 self.next_token();
3719 false.into()
3720 }
3721 Token::ShiftRight => {
3722 self.next_token();
3723 true.into()
3724 }
3725 _ => return self.expected_ref(">", self.peek_token_ref()),
3726 }
3727 } else {
3728 false.into()
3729 };
3730
3731 Ok(trailing_bracket)
3732 }
3733
3734 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3736 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3738 return infix;
3739 }
3740
3741 let dialect = self.dialect;
3742
3743 self.advance_token();
3744 let tok = self.get_current_token();
3745 debug!("infix: {tok:?}");
3746 let tok_index = self.get_current_index();
3747 let span = tok.span;
3748 let regular_binary_operator = match &tok.token {
3749 Token::Spaceship => Some(BinaryOperator::Spaceship),
3750 Token::DoubleEq => Some(BinaryOperator::Eq),
3751 Token::Assignment => Some(BinaryOperator::Assignment),
3752 Token::Eq => Some(BinaryOperator::Eq),
3753 Token::Neq => Some(BinaryOperator::NotEq),
3754 Token::Gt => Some(BinaryOperator::Gt),
3755 Token::GtEq => Some(BinaryOperator::GtEq),
3756 Token::Lt => Some(BinaryOperator::Lt),
3757 Token::LtEq => Some(BinaryOperator::LtEq),
3758 Token::Plus => Some(BinaryOperator::Plus),
3759 Token::Minus => Some(BinaryOperator::Minus),
3760 Token::Mul => Some(BinaryOperator::Multiply),
3761 Token::Mod => Some(BinaryOperator::Modulo),
3762 Token::StringConcat => Some(BinaryOperator::StringConcat),
3763 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3764 Token::Caret => {
3765 if dialect_is!(dialect is PostgreSqlDialect) {
3768 Some(BinaryOperator::PGExp)
3769 } else {
3770 Some(BinaryOperator::BitwiseXor)
3771 }
3772 }
3773 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3774 Token::Div => Some(BinaryOperator::Divide),
3775 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3776 Some(BinaryOperator::DuckIntegerDivide)
3777 }
3778 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3779 Some(BinaryOperator::PGBitwiseShiftLeft)
3780 }
3781 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3782 Some(BinaryOperator::PGBitwiseShiftRight)
3783 }
3784 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3785 Some(BinaryOperator::PGBitwiseXor)
3786 }
3787 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3788 Some(BinaryOperator::PGOverlap)
3789 }
3790 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3791 Some(BinaryOperator::PGOverlap)
3792 }
3793 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3794 Some(BinaryOperator::And)
3795 }
3796 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3797 Some(BinaryOperator::PGStartsWith)
3798 }
3799 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3800 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3801 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3802 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3803 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3804 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3805 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3806 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3807 Token::Arrow => Some(BinaryOperator::Arrow),
3808 Token::LongArrow => Some(BinaryOperator::LongArrow),
3809 Token::HashArrow => Some(BinaryOperator::HashArrow),
3810 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3811 Token::AtArrow => Some(BinaryOperator::AtArrow),
3812 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3813 Token::HashMinus => Some(BinaryOperator::HashMinus),
3814 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3815 Token::AtAt => Some(BinaryOperator::AtAt),
3816 Token::Question => Some(BinaryOperator::Question),
3817 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3818 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3819 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3820 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3821 Some(BinaryOperator::DoubleHash)
3822 }
3823
3824 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3825 Some(BinaryOperator::AndLt)
3826 }
3827 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3828 Some(BinaryOperator::AndGt)
3829 }
3830 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3831 Some(BinaryOperator::QuestionDash)
3832 }
3833 Token::AmpersandLeftAngleBracketVerticalBar
3834 if self.dialect.supports_geometric_types() =>
3835 {
3836 Some(BinaryOperator::AndLtPipe)
3837 }
3838 Token::VerticalBarAmpersandRightAngleBracket
3839 if self.dialect.supports_geometric_types() =>
3840 {
3841 Some(BinaryOperator::PipeAndGt)
3842 }
3843 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3844 Some(BinaryOperator::LtDashGt)
3845 }
3846 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3847 Some(BinaryOperator::LtCaret)
3848 }
3849 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3850 Some(BinaryOperator::GtCaret)
3851 }
3852 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3853 Some(BinaryOperator::QuestionHash)
3854 }
3855 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3856 Some(BinaryOperator::QuestionDoublePipe)
3857 }
3858 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3859 Some(BinaryOperator::QuestionDashPipe)
3860 }
3861 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3862 Some(BinaryOperator::TildeEq)
3863 }
3864 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3865 Some(BinaryOperator::LtLtPipe)
3866 }
3867 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3868 Some(BinaryOperator::PipeGtGt)
3869 }
3870 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3871
3872 Token::Word(w) => match w.keyword {
3873 Keyword::AND => Some(BinaryOperator::And),
3874 Keyword::OR => Some(BinaryOperator::Or),
3875 Keyword::XOR => Some(BinaryOperator::Xor),
3876 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3877 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3878 self.expect_token(&Token::LParen)?;
3879 let mut idents = vec![];
3884 loop {
3885 self.advance_token();
3886 idents.push(self.get_current_token().to_string());
3887 if !self.consume_token(&Token::Period) {
3888 break;
3889 }
3890 }
3891 self.expect_token(&Token::RParen)?;
3892 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3893 }
3894 _ => None,
3895 },
3896 _ => None,
3897 };
3898
3899 let tok = self.token_at(tok_index);
3900 if let Some(op) = regular_binary_operator {
3901 if let Some(keyword) =
3902 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3903 {
3904 self.expect_token(&Token::LParen)?;
3905 let right = if self.peek_sub_query() {
3906 self.prev_token(); self.parse_subexpr(precedence)?
3910 } else {
3911 let right = self.parse_subexpr(precedence)?;
3913 self.expect_token(&Token::RParen)?;
3914 right
3915 };
3916
3917 if !matches!(
3918 op,
3919 BinaryOperator::Gt
3920 | BinaryOperator::Lt
3921 | BinaryOperator::GtEq
3922 | BinaryOperator::LtEq
3923 | BinaryOperator::Eq
3924 | BinaryOperator::NotEq
3925 | BinaryOperator::PGRegexMatch
3926 | BinaryOperator::PGRegexIMatch
3927 | BinaryOperator::PGRegexNotMatch
3928 | BinaryOperator::PGRegexNotIMatch
3929 | BinaryOperator::PGLikeMatch
3930 | BinaryOperator::PGILikeMatch
3931 | BinaryOperator::PGNotLikeMatch
3932 | BinaryOperator::PGNotILikeMatch
3933 ) {
3934 return parser_err!(
3935 format!(
3936 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3937 ),
3938 span.start
3939 );
3940 };
3941
3942 Ok(match keyword {
3943 Keyword::ALL => Expr::AllOp {
3944 left: Box::new(expr),
3945 compare_op: op,
3946 right: Box::new(right),
3947 },
3948 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3949 left: Box::new(expr),
3950 compare_op: op,
3951 right: Box::new(right),
3952 is_some: keyword == Keyword::SOME,
3953 },
3954 unexpected_keyword => return Err(ParserError::ParserError(
3955 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3956 )),
3957 })
3958 } else {
3959 Ok(Expr::BinaryOp {
3960 left: Box::new(expr),
3961 op,
3962 right: Box::new(self.parse_subexpr(precedence)?),
3963 })
3964 }
3965 } else if let Token::Word(w) = &tok.token {
3966 match w.keyword {
3967 Keyword::IS => {
3968 if self.parse_keyword(Keyword::NULL) {
3969 Ok(Expr::IsNull(Box::new(expr)))
3970 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3971 Ok(Expr::IsNotNull(Box::new(expr)))
3972 } else if self.parse_keywords(&[Keyword::TRUE]) {
3973 Ok(Expr::IsTrue(Box::new(expr)))
3974 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3975 Ok(Expr::IsNotTrue(Box::new(expr)))
3976 } else if self.parse_keywords(&[Keyword::FALSE]) {
3977 Ok(Expr::IsFalse(Box::new(expr)))
3978 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3979 Ok(Expr::IsNotFalse(Box::new(expr)))
3980 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3981 Ok(Expr::IsUnknown(Box::new(expr)))
3982 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3983 Ok(Expr::IsNotUnknown(Box::new(expr)))
3984 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3985 let expr2 = self.parse_expr()?;
3986 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3987 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3988 {
3989 let expr2 = self.parse_expr()?;
3990 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3991 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3992 Ok(is_normalized)
3993 } else {
3994 self.expected_ref(
3995 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3996 self.peek_token_ref(),
3997 )
3998 }
3999 }
4000 Keyword::AT => {
4001 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4002 Ok(Expr::AtTimeZone {
4003 timestamp: Box::new(expr),
4004 time_zone: Box::new(self.parse_subexpr(precedence)?),
4005 })
4006 }
4007 Keyword::NOT
4008 | Keyword::IN
4009 | Keyword::BETWEEN
4010 | Keyword::LIKE
4011 | Keyword::ILIKE
4012 | Keyword::SIMILAR
4013 | Keyword::REGEXP
4014 | Keyword::RLIKE => {
4015 self.prev_token();
4016 let negated = self.parse_keyword(Keyword::NOT);
4017 let regexp = self.parse_keyword(Keyword::REGEXP);
4018 let rlike = self.parse_keyword(Keyword::RLIKE);
4019 let null = if !self.in_column_definition_state() {
4020 self.parse_keyword(Keyword::NULL)
4021 } else {
4022 false
4023 };
4024 if regexp || rlike {
4025 Ok(Expr::RLike {
4026 negated,
4027 expr: Box::new(expr),
4028 pattern: Box::new(
4029 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4030 ),
4031 regexp,
4032 })
4033 } else if negated && null {
4034 Ok(Expr::IsNotNull(Box::new(expr)))
4035 } else if self.parse_keyword(Keyword::IN) {
4036 self.parse_in(expr, negated)
4037 } else if self.parse_keyword(Keyword::BETWEEN) {
4038 self.parse_between(expr, negated)
4039 } else if self.parse_keyword(Keyword::LIKE) {
4040 Ok(Expr::Like {
4041 negated,
4042 any: self.parse_keyword(Keyword::ANY),
4043 expr: Box::new(expr),
4044 pattern: Box::new(
4045 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4046 ),
4047 escape_char: self.parse_escape_char()?,
4048 })
4049 } else if self.parse_keyword(Keyword::ILIKE) {
4050 Ok(Expr::ILike {
4051 negated,
4052 any: self.parse_keyword(Keyword::ANY),
4053 expr: Box::new(expr),
4054 pattern: Box::new(
4055 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4056 ),
4057 escape_char: self.parse_escape_char()?,
4058 })
4059 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4060 Ok(Expr::SimilarTo {
4061 negated,
4062 expr: Box::new(expr),
4063 pattern: Box::new(
4064 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4065 ),
4066 escape_char: self.parse_escape_char()?,
4067 })
4068 } else {
4069 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4070 }
4071 }
4072 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4073 Ok(Expr::IsNotNull(Box::new(expr)))
4074 }
4075 Keyword::MEMBER => {
4076 if self.parse_keyword(Keyword::OF) {
4077 self.expect_token(&Token::LParen)?;
4078 let array = self.parse_expr()?;
4079 self.expect_token(&Token::RParen)?;
4080 Ok(Expr::MemberOf(MemberOf {
4081 value: Box::new(expr),
4082 array: Box::new(array),
4083 }))
4084 } else {
4085 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4086 }
4087 }
4088 _ => parser_err!(
4090 format!("No infix parser for token {:?}", tok.token),
4091 tok.span.start
4092 ),
4093 }
4094 } else if Token::DoubleColon == *tok {
4095 Ok(Expr::Cast {
4096 kind: CastKind::DoubleColon,
4097 expr: Box::new(expr),
4098 data_type: self.parse_data_type()?,
4099 array: false,
4100 format: None,
4101 })
4102 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4103 Ok(Expr::UnaryOp {
4104 op: UnaryOperator::PGPostfixFactorial,
4105 expr: Box::new(expr),
4106 })
4107 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4108 || (Token::Colon == *tok)
4109 {
4110 self.prev_token();
4111 self.parse_json_access(expr)
4112 } else {
4113 parser_err!(
4115 format!("No infix parser for token {:?}", tok.token),
4116 tok.span.start
4117 )
4118 }
4119 }
4120
4121 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4123 if self.parse_keyword(Keyword::ESCAPE) {
4124 Ok(Some(self.parse_value()?))
4125 } else {
4126 Ok(None)
4127 }
4128 }
4129
4130 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4140 let lower_bound = if self.consume_token(&Token::Colon) {
4142 None
4143 } else {
4144 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4146 };
4147
4148 if self.consume_token(&Token::RBracket) {
4150 if let Some(lower_bound) = lower_bound {
4151 return Ok(Subscript::Index { index: lower_bound });
4152 };
4153 return Ok(Subscript::Slice {
4154 lower_bound,
4155 upper_bound: None,
4156 stride: None,
4157 });
4158 }
4159
4160 if lower_bound.is_some() {
4162 self.expect_token(&Token::Colon)?;
4163 }
4164
4165 let upper_bound = if self.consume_token(&Token::RBracket) {
4167 return Ok(Subscript::Slice {
4168 lower_bound,
4169 upper_bound: None,
4170 stride: None,
4171 });
4172 } else {
4173 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4175 };
4176
4177 if self.consume_token(&Token::RBracket) {
4179 return Ok(Subscript::Slice {
4180 lower_bound,
4181 upper_bound,
4182 stride: None,
4183 });
4184 }
4185
4186 self.expect_token(&Token::Colon)?;
4188 let stride = if self.consume_token(&Token::RBracket) {
4189 None
4190 } else {
4191 Some(self.parse_expr()?)
4192 };
4193
4194 if stride.is_some() {
4195 self.expect_token(&Token::RBracket)?;
4196 }
4197
4198 Ok(Subscript::Slice {
4199 lower_bound,
4200 upper_bound,
4201 stride,
4202 })
4203 }
4204
4205 pub fn parse_multi_dim_subscript(
4207 &mut self,
4208 chain: &mut Vec<AccessExpr>,
4209 ) -> Result<(), ParserError> {
4210 while self.consume_token(&Token::LBracket) {
4211 self.parse_subscript(chain)?;
4212 }
4213 Ok(())
4214 }
4215
4216 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4220 let subscript = self.parse_subscript_inner()?;
4221 chain.push(AccessExpr::Subscript(subscript));
4222 Ok(())
4223 }
4224
4225 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4226 let token = self.next_token();
4227 match token.token {
4228 Token::Word(Word {
4229 value,
4230 quote_style: quote_style @ (Some('"') | Some('`') | None),
4233 keyword: _,
4236 }) => Ok(JsonPathElem::Dot {
4237 key: value,
4238 quoted: quote_style.is_some(),
4239 }),
4240
4241 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4245
4246 _ => self.expected("variant object key name", token),
4247 }
4248 }
4249
4250 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4251 let path = self.parse_json_path()?;
4252 Ok(Expr::JsonAccess {
4253 value: Box::new(expr),
4254 path,
4255 })
4256 }
4257
4258 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4259 let mut path = Vec::new();
4260 loop {
4261 match self.next_token().token {
4262 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4263 self.next_token();
4264 let key = self.parse_wildcard_expr()?;
4265 self.expect_token(&Token::RBracket)?;
4266 path.push(JsonPathElem::ColonBracket { key });
4267 }
4268 Token::Colon if path.is_empty() => {
4269 path.push(self.parse_json_path_object_key()?);
4270 }
4271 Token::Period if !path.is_empty() => {
4272 path.push(self.parse_json_path_object_key()?);
4273 }
4274 Token::LBracket => {
4275 let key = self.parse_wildcard_expr()?;
4276 self.expect_token(&Token::RBracket)?;
4277
4278 path.push(JsonPathElem::Bracket { key });
4279 }
4280 _ => {
4281 self.prev_token();
4282 break;
4283 }
4284 };
4285 }
4286
4287 debug_assert!(!path.is_empty());
4288 Ok(JsonPath { path })
4289 }
4290
4291 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4293 if self.parse_keyword(Keyword::UNNEST) {
4296 self.expect_token(&Token::LParen)?;
4297 let array_expr = self.parse_expr()?;
4298 self.expect_token(&Token::RParen)?;
4299 return Ok(Expr::InUnnest {
4300 expr: Box::new(expr),
4301 array_expr: Box::new(array_expr),
4302 negated,
4303 });
4304 }
4305 self.expect_token(&Token::LParen)?;
4306 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4307 Some(subquery) => Expr::InSubquery {
4308 expr: Box::new(expr),
4309 subquery,
4310 negated,
4311 },
4312 None => Expr::InList {
4313 expr: Box::new(expr),
4314 list: if self.dialect.supports_in_empty_list() {
4315 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4316 } else {
4317 self.parse_comma_separated(Parser::parse_expr)?
4318 },
4319 negated,
4320 },
4321 };
4322 self.expect_token(&Token::RParen)?;
4323 Ok(in_op)
4324 }
4325
4326 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4328 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4331 self.expect_keyword_is(Keyword::AND)?;
4332 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4333 Ok(Expr::Between {
4334 expr: Box::new(expr),
4335 negated,
4336 low: Box::new(low),
4337 high: Box::new(high),
4338 })
4339 }
4340
4341 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4343 Ok(Expr::Cast {
4344 kind: CastKind::DoubleColon,
4345 expr: Box::new(expr),
4346 data_type: self.parse_data_type()?,
4347 array: false,
4348 format: None,
4349 })
4350 }
4351
4352 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4354 self.dialect.get_next_precedence_default(self)
4355 }
4356
4357 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4360 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4361 }
4362
4363 pub fn peek_token(&self) -> TokenWithSpan {
4368 self.peek_nth_token(0)
4369 }
4370
4371 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4374 self.peek_nth_token_ref(0)
4375 }
4376
4377 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4400 self.peek_tokens_with_location()
4401 .map(|with_loc| with_loc.token)
4402 }
4403
4404 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4409 let mut index = self.index;
4410 core::array::from_fn(|_| loop {
4411 let token = self.tokens.get(index);
4412 index += 1;
4413 if let Some(TokenWithSpan {
4414 token: Token::Whitespace(_),
4415 span: _,
4416 }) = token
4417 {
4418 continue;
4419 }
4420 break token.cloned().unwrap_or(TokenWithSpan {
4421 token: Token::EOF,
4422 span: Span::empty(),
4423 });
4424 })
4425 }
4426
4427 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4432 let mut index = self.index;
4433 core::array::from_fn(|_| loop {
4434 let token = self.tokens.get(index);
4435 index += 1;
4436 if let Some(TokenWithSpan {
4437 token: Token::Whitespace(_),
4438 span: _,
4439 }) = token
4440 {
4441 continue;
4442 }
4443 break token.unwrap_or(&EOF_TOKEN);
4444 })
4445 }
4446
4447 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4449 self.peek_nth_token_ref(n).clone()
4450 }
4451
4452 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4454 let mut index = self.index;
4455 loop {
4456 index += 1;
4457 match self.tokens.get(index - 1) {
4458 Some(TokenWithSpan {
4459 token: Token::Whitespace(_),
4460 span: _,
4461 }) => continue,
4462 non_whitespace => {
4463 if n == 0 {
4464 return non_whitespace.unwrap_or(&EOF_TOKEN);
4465 }
4466 n -= 1;
4467 }
4468 }
4469 }
4470 }
4471
4472 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4475 self.peek_nth_token_no_skip(0)
4476 }
4477
4478 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4480 self.tokens
4481 .get(self.index + n)
4482 .cloned()
4483 .unwrap_or(TokenWithSpan {
4484 token: Token::EOF,
4485 span: Span::empty(),
4486 })
4487 }
4488
4489 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4491 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4492 }
4493
4494 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4498 let index = self.index;
4499 let matched = self.parse_keywords(expected);
4500 self.index = index;
4501 matched
4502 }
4503
4504 pub fn next_token(&mut self) -> TokenWithSpan {
4509 self.advance_token();
4510 self.get_current_token().clone()
4511 }
4512
4513 pub fn get_current_index(&self) -> usize {
4518 self.index.saturating_sub(1)
4519 }
4520
4521 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4523 self.index += 1;
4524 self.tokens.get(self.index - 1)
4525 }
4526
4527 pub fn advance_token(&mut self) {
4531 loop {
4532 self.index += 1;
4533 match self.tokens.get(self.index - 1) {
4534 Some(TokenWithSpan {
4535 token: Token::Whitespace(_),
4536 span: _,
4537 }) => continue,
4538 _ => break,
4539 }
4540 }
4541 }
4542
4543 pub fn get_current_token(&self) -> &TokenWithSpan {
4547 self.token_at(self.index.saturating_sub(1))
4548 }
4549
4550 pub fn get_previous_token(&self) -> &TokenWithSpan {
4554 self.token_at(self.index.saturating_sub(2))
4555 }
4556
4557 pub fn get_next_token(&self) -> &TokenWithSpan {
4561 self.token_at(self.index)
4562 }
4563
4564 pub fn prev_token(&mut self) {
4571 loop {
4572 assert!(self.index > 0);
4573 self.index -= 1;
4574 if let Some(TokenWithSpan {
4575 token: Token::Whitespace(_),
4576 span: _,
4577 }) = self.tokens.get(self.index)
4578 {
4579 continue;
4580 }
4581 return;
4582 }
4583 }
4584
4585 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4587 parser_err!(
4588 format!("Expected: {expected}, found: {found}"),
4589 found.span.start
4590 )
4591 }
4592
4593 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4595 parser_err!(
4596 format!("Expected: {expected}, found: {found}"),
4597 found.span.start
4598 )
4599 }
4600
4601 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4603 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4604 parser_err!(
4605 format!("Expected: {expected}, found: {found}"),
4606 found.span.start
4607 )
4608 }
4609
4610 #[must_use]
4613 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4614 if self.peek_keyword(expected) {
4615 self.advance_token();
4616 true
4617 } else {
4618 false
4619 }
4620 }
4621
4622 #[must_use]
4623 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4627 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4628 }
4629
4630 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4638 self.keyword_with_tokens(expected, tokens, true)
4639 }
4640
4641 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4646 self.keyword_with_tokens(expected, tokens, false)
4647 }
4648
4649 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4650 match &self.peek_token_ref().token {
4651 Token::Word(w) if expected == w.keyword => {
4652 for (idx, token) in tokens.iter().enumerate() {
4653 if self.peek_nth_token_ref(idx + 1).token != *token {
4654 return false;
4655 }
4656 }
4657
4658 if consume {
4659 for _ in 0..(tokens.len() + 1) {
4660 self.advance_token();
4661 }
4662 }
4663
4664 true
4665 }
4666 _ => false,
4667 }
4668 }
4669
4670 #[must_use]
4674 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4675 self.parse_keywords_indexed(keywords).is_some()
4676 }
4677
4678 #[must_use]
4681 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4682 let start_index = self.index;
4683 let mut first_keyword_index = None;
4684 for &keyword in keywords {
4685 if !self.parse_keyword(keyword) {
4686 self.index = start_index;
4687 return None;
4688 }
4689 if first_keyword_index.is_none() {
4690 first_keyword_index = Some(self.index.saturating_sub(1));
4691 }
4692 }
4693 first_keyword_index
4694 }
4695
4696 #[must_use]
4699 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4700 for keyword in keywords {
4701 if self.peek_keyword(*keyword) {
4702 return Some(*keyword);
4703 }
4704 }
4705 None
4706 }
4707
4708 #[must_use]
4712 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4713 match &self.peek_token_ref().token {
4714 Token::Word(w) => {
4715 keywords
4716 .iter()
4717 .find(|keyword| **keyword == w.keyword)
4718 .map(|keyword| {
4719 self.advance_token();
4720 *keyword
4721 })
4722 }
4723 _ => None,
4724 }
4725 }
4726
4727 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4730 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4731 Ok(keyword)
4732 } else {
4733 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4734 self.expected_ref(
4735 &format!("one of {}", keywords.join(" or ")),
4736 self.peek_token_ref(),
4737 )
4738 }
4739 }
4740
4741 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4746 if self.parse_keyword(expected) {
4747 Ok(self.get_current_token().clone())
4748 } else {
4749 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4750 }
4751 }
4752
4753 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4759 if self.parse_keyword(expected) {
4760 Ok(())
4761 } else {
4762 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4763 }
4764 }
4765
4766 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4769 for &kw in expected {
4770 self.expect_keyword_is(kw)?;
4771 }
4772 Ok(())
4773 }
4774
4775 #[must_use]
4779 pub fn consume_token(&mut self, expected: &Token) -> bool {
4780 if self.peek_token_ref() == expected {
4781 self.advance_token();
4782 true
4783 } else {
4784 false
4785 }
4786 }
4787
4788 #[must_use]
4792 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4793 let index = self.index;
4794 for token in tokens {
4795 if !self.consume_token(token) {
4796 self.index = index;
4797 return false;
4798 }
4799 }
4800 true
4801 }
4802
4803 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4805 if self.peek_token_ref() == expected {
4806 Ok(self.next_token())
4807 } else {
4808 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4809 }
4810 }
4811
4812 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4813 where
4814 <T as FromStr>::Err: Display,
4815 {
4816 s.parse::<T>().map_err(|e| {
4817 ParserError::ParserError(format!(
4818 "Could not parse '{s}' as {}: {e}{loc}",
4819 core::any::type_name::<T>()
4820 ))
4821 })
4822 }
4823
4824 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4826 let trailing_commas =
4832 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4833
4834 self.parse_comma_separated_with_trailing_commas(
4835 |p| p.parse_select_item(),
4836 trailing_commas,
4837 Self::is_reserved_for_column_alias,
4838 )
4839 }
4840
4841 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4843 let mut values = vec![];
4844 loop {
4845 values.push(self.parse_grant_permission()?);
4846 if !self.consume_token(&Token::Comma) {
4847 break;
4848 } else if self.options.trailing_commas {
4849 match &self.peek_token_ref().token {
4850 Token::Word(kw) if kw.keyword == Keyword::ON => {
4851 break;
4852 }
4853 Token::RParen
4854 | Token::SemiColon
4855 | Token::EOF
4856 | Token::RBracket
4857 | Token::RBrace => break,
4858 _ => continue,
4859 }
4860 }
4861 }
4862 Ok(values)
4863 }
4864
4865 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4867 let trailing_commas = self.dialect.supports_from_trailing_commas();
4868
4869 self.parse_comma_separated_with_trailing_commas(
4870 Parser::parse_table_and_joins,
4871 trailing_commas,
4872 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4873 )
4874 }
4875
4876 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4883 &mut self,
4884 trailing_commas: bool,
4885 is_reserved_keyword: &R,
4886 ) -> bool
4887 where
4888 R: Fn(&Keyword, &mut Parser) -> bool,
4889 {
4890 if !self.consume_token(&Token::Comma) {
4891 true
4892 } else if trailing_commas {
4893 let token = self.next_token().token;
4894 let is_end = match token {
4895 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4896 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4897 true
4898 }
4899 _ => false,
4900 };
4901 self.prev_token();
4902
4903 is_end
4904 } else {
4905 false
4906 }
4907 }
4908
4909 fn is_parse_comma_separated_end(&mut self) -> bool {
4912 self.is_parse_comma_separated_end_with_trailing_commas(
4913 self.options.trailing_commas,
4914 &Self::is_reserved_for_column_alias,
4915 )
4916 }
4917
4918 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4920 where
4921 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4922 {
4923 self.parse_comma_separated_with_trailing_commas(
4924 f,
4925 self.options.trailing_commas,
4926 Self::is_reserved_for_column_alias,
4927 )
4928 }
4929
4930 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4935 &mut self,
4936 mut f: F,
4937 trailing_commas: bool,
4938 is_reserved_keyword: R,
4939 ) -> Result<Vec<T>, ParserError>
4940 where
4941 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4942 R: Fn(&Keyword, &mut Parser) -> bool,
4943 {
4944 let mut values = vec![];
4945 loop {
4946 values.push(f(self)?);
4947 if self.is_parse_comma_separated_end_with_trailing_commas(
4948 trailing_commas,
4949 &is_reserved_keyword,
4950 ) {
4951 break;
4952 }
4953 }
4954 Ok(values)
4955 }
4956
4957 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4959 where
4960 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4961 {
4962 let mut values = vec![];
4963 loop {
4964 values.push(f(self)?);
4965 if !self.consume_token(&Token::Period) {
4966 break;
4967 }
4968 }
4969 Ok(values)
4970 }
4971
4972 pub fn parse_keyword_separated<T, F>(
4974 &mut self,
4975 keyword: Keyword,
4976 mut f: F,
4977 ) -> Result<Vec<T>, ParserError>
4978 where
4979 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4980 {
4981 let mut values = vec![];
4982 loop {
4983 values.push(f(self)?);
4984 if !self.parse_keyword(keyword) {
4985 break;
4986 }
4987 }
4988 Ok(values)
4989 }
4990
4991 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4993 where
4994 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4995 {
4996 self.expect_token(&Token::LParen)?;
4997 let res = f(self)?;
4998 self.expect_token(&Token::RParen)?;
4999 Ok(res)
5000 }
5001
5002 pub fn parse_comma_separated0<T, F>(
5005 &mut self,
5006 f: F,
5007 end_token: Token,
5008 ) -> Result<Vec<T>, ParserError>
5009 where
5010 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5011 {
5012 if self.peek_token_ref().token == end_token {
5013 return Ok(vec![]);
5014 }
5015
5016 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5017 let _ = self.consume_token(&Token::Comma);
5018 return Ok(vec![]);
5019 }
5020
5021 self.parse_comma_separated(f)
5022 }
5023
5024 pub(crate) fn parse_statement_list(
5028 &mut self,
5029 terminal_keywords: &[Keyword],
5030 ) -> Result<Vec<Statement>, ParserError> {
5031 let mut values = vec![];
5032 loop {
5033 match &self.peek_nth_token_ref(0).token {
5034 Token::EOF => break,
5035 Token::Word(w) => {
5036 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5037 break;
5038 }
5039 }
5040 _ => {}
5041 }
5042
5043 values.push(self.parse_statement()?);
5044 self.expect_token(&Token::SemiColon)?;
5045 }
5046 Ok(values)
5047 }
5048
5049 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5053 !parser.dialect.is_column_alias(kw, parser)
5054 }
5055
5056 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5060 where
5061 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5062 {
5063 match self.try_parse(f) {
5064 Ok(t) => Ok(Some(t)),
5065 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5066 _ => Ok(None),
5067 }
5068 }
5069
5070 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5072 where
5073 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5074 {
5075 let index = self.index;
5076 match f(self) {
5077 Ok(t) => Ok(t),
5078 Err(e) => {
5079 self.index = index;
5081 Err(e)
5082 }
5083 }
5084 }
5085
5086 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5089 let loc = self.peek_token_ref().span.start;
5090 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5091 Some(Keyword::ALL) => {
5092 if self.peek_keyword(Keyword::DISTINCT) {
5093 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5094 }
5095 Some(Distinct::All)
5096 }
5097 Some(Keyword::DISTINCT) => {
5098 if self.peek_keyword(Keyword::ALL) {
5099 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5100 }
5101 Some(Distinct::Distinct)
5102 }
5103 None => return Ok(None),
5104 _ => return parser_err!("ALL or DISTINCT", loc),
5105 };
5106
5107 let Some(Distinct::Distinct) = distinct else {
5108 return Ok(distinct);
5109 };
5110 if !self.parse_keyword(Keyword::ON) {
5111 return Ok(Some(Distinct::Distinct));
5112 }
5113
5114 self.expect_token(&Token::LParen)?;
5115 let col_names = if self.consume_token(&Token::RParen) {
5116 self.prev_token();
5117 Vec::new()
5118 } else {
5119 self.parse_comma_separated(Parser::parse_expr)?
5120 };
5121 self.expect_token(&Token::RParen)?;
5122 Ok(Some(Distinct::On(col_names)))
5123 }
5124
5125 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5127 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5128 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5129 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5130 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5131 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5132 let global: Option<bool> = if global {
5133 Some(true)
5134 } else if local {
5135 Some(false)
5136 } else {
5137 None
5138 };
5139 let temporary = self
5140 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5141 .is_some();
5142 let persistent = dialect_of!(self is DuckDbDialect)
5143 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5144 let create_view_params = self.parse_create_view_params()?;
5145 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5146 self.parse_create_snapshot_table().map(Into::into)
5147 } else if self.parse_keyword(Keyword::TABLE) {
5148 self.parse_create_table(or_replace, temporary, global, transient)
5149 .map(Into::into)
5150 } else if self.peek_keyword(Keyword::MATERIALIZED)
5151 || self.peek_keyword(Keyword::VIEW)
5152 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5153 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5154 {
5155 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5156 .map(Into::into)
5157 } else if self.parse_keyword(Keyword::POLICY) {
5158 self.parse_create_policy().map(Into::into)
5159 } else if self.parse_keyword(Keyword::EXTERNAL) {
5160 self.parse_create_external_table(or_replace).map(Into::into)
5161 } else if self.parse_keyword(Keyword::FUNCTION) {
5162 self.parse_create_function(or_alter, or_replace, temporary)
5163 } else if self.parse_keyword(Keyword::DOMAIN) {
5164 self.parse_create_domain().map(Into::into)
5165 } else if self.parse_keyword(Keyword::TRIGGER) {
5166 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5167 .map(Into::into)
5168 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5169 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5170 .map(Into::into)
5171 } else if self.parse_keyword(Keyword::MACRO) {
5172 self.parse_create_macro(or_replace, temporary)
5173 } else if self.parse_keyword(Keyword::SECRET) {
5174 self.parse_create_secret(or_replace, temporary, persistent)
5175 } else if self.parse_keyword(Keyword::USER) {
5176 if self.parse_keyword(Keyword::MAPPING) {
5177 self.parse_create_user_mapping().map(Into::into)
5178 } else {
5179 self.parse_create_user(or_replace).map(Into::into)
5180 }
5181 } else if self.parse_keyword(Keyword::AGGREGATE) {
5182 self.parse_create_aggregate(or_replace).map(Into::into)
5183 } else if self.peek_keyword(Keyword::TRUSTED)
5184 || self.peek_keyword(Keyword::PROCEDURAL)
5185 || self.peek_keyword(Keyword::LANGUAGE)
5186 {
5187 let trusted = self.parse_keyword(Keyword::TRUSTED);
5188 let procedural = self.parse_keyword(Keyword::PROCEDURAL);
5189 if self.parse_keyword(Keyword::LANGUAGE) {
5190 self.parse_create_language(or_replace, trusted, procedural)
5191 .map(Into::into)
5192 } else {
5193 self.expected_ref(
5194 "LANGUAGE after TRUSTED or PROCEDURAL",
5195 self.peek_token_ref(),
5196 )
5197 }
5198 } else if self.parse_keyword(Keyword::TRANSFORM) {
5199 self.parse_create_transform(or_replace).map(Into::into)
5200 } else if or_replace {
5201 self.expected_ref(
5202 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5203 self.peek_token_ref(),
5204 )
5205 } else if self.parse_keyword(Keyword::CAST) {
5206 self.parse_create_cast().map(Into::into)
5207 } else if self.parse_keyword(Keyword::CONVERSION) {
5208 self.parse_create_conversion(false).map(Into::into)
5209 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CONVERSION]) {
5210 self.parse_create_conversion(true).map(Into::into)
5211 } else if self.parse_keyword(Keyword::RULE) {
5212 self.parse_create_rule().map(Into::into)
5213 } else if self.parse_keyword(Keyword::EXTENSION) {
5214 self.parse_create_extension().map(Into::into)
5215 } else if self.parse_keyword(Keyword::INDEX) {
5216 self.parse_create_index(false).map(Into::into)
5217 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5218 self.parse_create_index(true).map(Into::into)
5219 } else if self.parse_keyword(Keyword::VIRTUAL) {
5220 self.parse_create_virtual_table()
5221 } else if self.parse_keyword(Keyword::SCHEMA) {
5222 self.parse_create_schema()
5223 } else if self.parse_keyword(Keyword::DATABASE) {
5224 self.parse_create_database()
5225 } else if self.parse_keyword(Keyword::ROLE) {
5226 self.parse_create_role().map(Into::into)
5227 } else if self.parse_keyword(Keyword::SEQUENCE) {
5228 self.parse_create_sequence(temporary)
5229 } else if self.parse_keyword(Keyword::COLLATION) {
5230 self.parse_create_collation().map(Into::into)
5231 } else if self.parse_keyword(Keyword::TYPE) {
5232 self.parse_create_type()
5233 } else if self.parse_keyword(Keyword::PROCEDURE) {
5234 self.parse_create_procedure(or_alter)
5235 } else if self.parse_keyword(Keyword::CONNECTOR) {
5236 self.parse_create_connector().map(Into::into)
5237 } else if self.parse_keyword(Keyword::OPERATOR) {
5238 if self.parse_keyword(Keyword::FAMILY) {
5240 self.parse_create_operator_family().map(Into::into)
5241 } else if self.parse_keyword(Keyword::CLASS) {
5242 self.parse_create_operator_class().map(Into::into)
5243 } else {
5244 self.parse_create_operator().map(Into::into)
5245 }
5246 } else if self.parse_keyword(Keyword::SERVER) {
5247 self.parse_pg_create_server()
5248 } else if self.parse_keyword(Keyword::FOREIGN) {
5249 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5250 self.parse_create_foreign_data_wrapper().map(Into::into)
5251 } else if self.parse_keyword(Keyword::TABLE) {
5252 self.parse_create_foreign_table().map(Into::into)
5253 } else {
5254 self.expected_ref(
5255 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5256 self.peek_token_ref(),
5257 )
5258 }
5259 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5260 self.parse_create_text_search()
5261 } else if self.parse_keyword(Keyword::PUBLICATION) {
5262 self.parse_create_publication().map(Into::into)
5263 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5264 self.parse_create_subscription().map(Into::into)
5265 } else if self.parse_keyword(Keyword::STATISTICS) {
5266 self.parse_create_statistics().map(Into::into)
5267 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::METHOD]) {
5268 self.parse_create_access_method().map(Into::into)
5269 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::TRIGGER]) {
5270 self.parse_create_event_trigger().map(Into::into)
5271 } else if self.parse_keyword(Keyword::TABLESPACE) {
5272 self.parse_create_tablespace().map(Into::into)
5273 } else {
5274 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5275 }
5276 }
5277
5278 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5279 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5280 let name = self.parse_identifier()?;
5281 let options = self
5282 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5283 .options;
5284 let with_tags = self.parse_keyword(Keyword::WITH);
5285 let tags = if self.parse_keyword(Keyword::TAG) {
5286 self.parse_key_value_options(true, &[])?.options
5287 } else {
5288 vec![]
5289 };
5290 Ok(CreateUser {
5291 or_replace,
5292 if_not_exists,
5293 name,
5294 options: KeyValueOptions {
5295 options,
5296 delimiter: KeyValueOptionsDelimiter::Space,
5297 },
5298 with_tags,
5299 tags: KeyValueOptions {
5300 options: tags,
5301 delimiter: KeyValueOptionsDelimiter::Comma,
5302 },
5303 })
5304 }
5305
5306 pub fn parse_create_secret(
5308 &mut self,
5309 or_replace: bool,
5310 temporary: bool,
5311 persistent: bool,
5312 ) -> Result<Statement, ParserError> {
5313 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5314
5315 let mut storage_specifier = None;
5316 let mut name = None;
5317 if self.peek_token_ref().token != Token::LParen {
5318 if self.parse_keyword(Keyword::IN) {
5319 storage_specifier = self.parse_identifier().ok()
5320 } else {
5321 name = self.parse_identifier().ok();
5322 }
5323
5324 if storage_specifier.is_none()
5326 && self.peek_token_ref().token != Token::LParen
5327 && self.parse_keyword(Keyword::IN)
5328 {
5329 storage_specifier = self.parse_identifier().ok();
5330 }
5331 }
5332
5333 self.expect_token(&Token::LParen)?;
5334 self.expect_keyword_is(Keyword::TYPE)?;
5335 let secret_type = self.parse_identifier()?;
5336
5337 let mut options = Vec::new();
5338 if self.consume_token(&Token::Comma) {
5339 options.append(&mut self.parse_comma_separated(|p| {
5340 let key = p.parse_identifier()?;
5341 let value = p.parse_identifier()?;
5342 Ok(SecretOption { key, value })
5343 })?);
5344 }
5345 self.expect_token(&Token::RParen)?;
5346
5347 let temp = match (temporary, persistent) {
5348 (true, false) => Some(true),
5349 (false, true) => Some(false),
5350 (false, false) => None,
5351 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5352 };
5353
5354 Ok(Statement::CreateSecret {
5355 or_replace,
5356 temporary: temp,
5357 if_not_exists,
5358 name,
5359 storage_specifier,
5360 secret_type,
5361 options,
5362 })
5363 }
5364
5365 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5367 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5368 if self.parse_keyword(Keyword::TABLE) {
5369 let table_name = self.parse_object_name(false)?;
5370 if self.peek_token_ref().token != Token::EOF {
5371 if let Token::Word(word) = &self.peek_token_ref().token {
5372 if word.keyword == Keyword::OPTIONS {
5373 options = self.parse_options(Keyword::OPTIONS)?
5374 }
5375 };
5376
5377 if self.peek_token_ref().token != Token::EOF {
5378 let (a, q) = self.parse_as_query()?;
5379 has_as = a;
5380 query = Some(q);
5381 }
5382
5383 Ok(Statement::Cache {
5384 table_flag,
5385 table_name,
5386 has_as,
5387 options,
5388 query,
5389 })
5390 } else {
5391 Ok(Statement::Cache {
5392 table_flag,
5393 table_name,
5394 has_as,
5395 options,
5396 query,
5397 })
5398 }
5399 } else {
5400 table_flag = Some(self.parse_object_name(false)?);
5401 if self.parse_keyword(Keyword::TABLE) {
5402 let table_name = self.parse_object_name(false)?;
5403 if self.peek_token_ref().token != Token::EOF {
5404 if let Token::Word(word) = &self.peek_token_ref().token {
5405 if word.keyword == Keyword::OPTIONS {
5406 options = self.parse_options(Keyword::OPTIONS)?
5407 }
5408 };
5409
5410 if self.peek_token_ref().token != Token::EOF {
5411 let (a, q) = self.parse_as_query()?;
5412 has_as = a;
5413 query = Some(q);
5414 }
5415
5416 Ok(Statement::Cache {
5417 table_flag,
5418 table_name,
5419 has_as,
5420 options,
5421 query,
5422 })
5423 } else {
5424 Ok(Statement::Cache {
5425 table_flag,
5426 table_name,
5427 has_as,
5428 options,
5429 query,
5430 })
5431 }
5432 } else {
5433 if self.peek_token_ref().token == Token::EOF {
5434 self.prev_token();
5435 }
5436 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5437 }
5438 }
5439 }
5440
5441 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5443 match &self.peek_token_ref().token {
5444 Token::Word(word) => match word.keyword {
5445 Keyword::AS => {
5446 self.next_token();
5447 Ok((true, self.parse_query()?))
5448 }
5449 _ => Ok((false, self.parse_query()?)),
5450 },
5451 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5452 }
5453 }
5454
5455 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5457 self.expect_keyword_is(Keyword::TABLE)?;
5458 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5459 let table_name = self.parse_object_name(false)?;
5460 Ok(Statement::UNCache {
5461 table_name,
5462 if_exists,
5463 })
5464 }
5465
5466 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5468 self.expect_keyword_is(Keyword::TABLE)?;
5469 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5470 let table_name = self.parse_object_name(false)?;
5471 self.expect_keyword_is(Keyword::USING)?;
5472 let module_name = self.parse_identifier()?;
5473 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5478 Ok(Statement::CreateVirtualTable {
5479 name: table_name,
5480 if_not_exists,
5481 module_name,
5482 module_args,
5483 })
5484 }
5485
5486 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5488 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5489
5490 let schema_name = self.parse_schema_name()?;
5491
5492 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5493 Some(self.parse_expr()?)
5494 } else {
5495 None
5496 };
5497
5498 let with = if self.peek_keyword(Keyword::WITH) {
5499 Some(self.parse_options(Keyword::WITH)?)
5500 } else {
5501 None
5502 };
5503
5504 let options = if self.peek_keyword(Keyword::OPTIONS) {
5505 Some(self.parse_options(Keyword::OPTIONS)?)
5506 } else {
5507 None
5508 };
5509
5510 let clone = if self.parse_keyword(Keyword::CLONE) {
5511 Some(self.parse_object_name(false)?)
5512 } else {
5513 None
5514 };
5515
5516 Ok(Statement::CreateSchema {
5517 schema_name,
5518 if_not_exists,
5519 with,
5520 options,
5521 default_collate_spec,
5522 clone,
5523 })
5524 }
5525
5526 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5527 if self.parse_keyword(Keyword::AUTHORIZATION) {
5528 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5529 } else {
5530 let name = self.parse_object_name(false)?;
5531
5532 if self.parse_keyword(Keyword::AUTHORIZATION) {
5533 Ok(SchemaName::NamedAuthorization(
5534 name,
5535 self.parse_identifier()?,
5536 ))
5537 } else {
5538 Ok(SchemaName::Simple(name))
5539 }
5540 }
5541 }
5542
5543 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5545 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5546 let db_name = self.parse_object_name(false)?;
5547 let mut location = None;
5548 let mut managed_location = None;
5549 loop {
5550 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5551 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5552 Some(Keyword::MANAGEDLOCATION) => {
5553 managed_location = Some(self.parse_literal_string()?)
5554 }
5555 _ => break,
5556 }
5557 }
5558 let clone = if self.parse_keyword(Keyword::CLONE) {
5559 Some(self.parse_object_name(false)?)
5560 } else {
5561 None
5562 };
5563
5564 let mut default_charset = None;
5572 let mut default_collation = None;
5573 loop {
5574 let has_default = self.parse_keyword(Keyword::DEFAULT);
5575 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5576 || self.parse_keyword(Keyword::CHARSET)
5577 {
5578 let _ = self.consume_token(&Token::Eq);
5579 default_charset = Some(self.parse_identifier()?.value);
5580 } else if self.parse_keyword(Keyword::COLLATE) {
5581 let _ = self.consume_token(&Token::Eq);
5582 default_collation = Some(self.parse_identifier()?.value);
5583 } else if has_default {
5584 self.prev_token();
5586 break;
5587 } else {
5588 break;
5589 }
5590 }
5591
5592 Ok(Statement::CreateDatabase {
5593 db_name,
5594 if_not_exists: ine,
5595 location,
5596 managed_location,
5597 or_replace: false,
5598 transient: false,
5599 clone,
5600 data_retention_time_in_days: None,
5601 max_data_extension_time_in_days: None,
5602 external_volume: None,
5603 catalog: None,
5604 replace_invalid_characters: None,
5605 default_ddl_collation: None,
5606 storage_serialization_policy: None,
5607 comment: None,
5608 default_charset,
5609 default_collation,
5610 catalog_sync: None,
5611 catalog_sync_namespace_mode: None,
5612 catalog_sync_namespace_flatten_delimiter: None,
5613 with_tags: None,
5614 with_contacts: None,
5615 })
5616 }
5617
5618 pub fn parse_optional_create_function_using(
5620 &mut self,
5621 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5622 if !self.parse_keyword(Keyword::USING) {
5623 return Ok(None);
5624 };
5625 let keyword =
5626 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5627
5628 let uri = self.parse_literal_string()?;
5629
5630 match keyword {
5631 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5632 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5633 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5634 _ => self.expected(
5635 "JAR, FILE or ARCHIVE, got {:?}",
5636 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5637 ),
5638 }
5639 }
5640
5641 pub fn parse_create_function(
5643 &mut self,
5644 or_alter: bool,
5645 or_replace: bool,
5646 temporary: bool,
5647 ) -> Result<Statement, ParserError> {
5648 if dialect_of!(self is HiveDialect) {
5649 self.parse_hive_create_function(or_replace, temporary)
5650 .map(Into::into)
5651 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5652 self.parse_postgres_create_function(or_replace, temporary)
5653 .map(Into::into)
5654 } else if dialect_of!(self is DuckDbDialect) {
5655 self.parse_create_macro(or_replace, temporary)
5656 } else if dialect_of!(self is BigQueryDialect) {
5657 self.parse_bigquery_create_function(or_replace, temporary)
5658 .map(Into::into)
5659 } else if dialect_of!(self is MsSqlDialect) {
5660 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5661 .map(Into::into)
5662 } else {
5663 self.prev_token();
5664 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5665 }
5666 }
5667
5668 fn parse_postgres_create_function(
5672 &mut self,
5673 or_replace: bool,
5674 temporary: bool,
5675 ) -> Result<CreateFunction, ParserError> {
5676 let name = self.parse_object_name(false)?;
5677
5678 self.expect_token(&Token::LParen)?;
5679 let args = if Token::RParen != self.peek_token_ref().token {
5680 self.parse_comma_separated(Parser::parse_function_arg)?
5681 } else {
5682 vec![]
5683 };
5684 self.expect_token(&Token::RParen)?;
5685
5686 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5687 Some(self.parse_function_return_type()?)
5688 } else {
5689 None
5690 };
5691
5692 #[derive(Default)]
5693 struct Body {
5694 language: Option<Ident>,
5695 behavior: Option<FunctionBehavior>,
5696 function_body: Option<CreateFunctionBody>,
5697 called_on_null: Option<FunctionCalledOnNull>,
5698 parallel: Option<FunctionParallel>,
5699 security: Option<FunctionSecurity>,
5700 }
5701 let mut body = Body::default();
5702 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5703 loop {
5704 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5705 if field.is_some() {
5706 return Err(ParserError::ParserError(format!(
5707 "{name} specified more than once",
5708 )));
5709 }
5710 Ok(())
5711 }
5712 if self.parse_keyword(Keyword::AS) {
5713 ensure_not_set(&body.function_body, "AS")?;
5714 body.function_body = Some(self.parse_create_function_body_string()?);
5715 } else if self.parse_keyword(Keyword::LANGUAGE) {
5716 ensure_not_set(&body.language, "LANGUAGE")?;
5717 body.language = Some(self.parse_identifier()?);
5718 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5719 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5720 body.behavior = Some(FunctionBehavior::Immutable);
5721 } else if self.parse_keyword(Keyword::STABLE) {
5722 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5723 body.behavior = Some(FunctionBehavior::Stable);
5724 } else if self.parse_keyword(Keyword::VOLATILE) {
5725 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5726 body.behavior = Some(FunctionBehavior::Volatile);
5727 } else if self.parse_keywords(&[
5728 Keyword::CALLED,
5729 Keyword::ON,
5730 Keyword::NULL,
5731 Keyword::INPUT,
5732 ]) {
5733 ensure_not_set(
5734 &body.called_on_null,
5735 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5736 )?;
5737 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5738 } else if self.parse_keywords(&[
5739 Keyword::RETURNS,
5740 Keyword::NULL,
5741 Keyword::ON,
5742 Keyword::NULL,
5743 Keyword::INPUT,
5744 ]) {
5745 ensure_not_set(
5746 &body.called_on_null,
5747 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5748 )?;
5749 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5750 } else if self.parse_keyword(Keyword::STRICT) {
5751 ensure_not_set(
5752 &body.called_on_null,
5753 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5754 )?;
5755 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5756 } else if self.parse_keyword(Keyword::PARALLEL) {
5757 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5758 if self.parse_keyword(Keyword::UNSAFE) {
5759 body.parallel = Some(FunctionParallel::Unsafe);
5760 } else if self.parse_keyword(Keyword::RESTRICTED) {
5761 body.parallel = Some(FunctionParallel::Restricted);
5762 } else if self.parse_keyword(Keyword::SAFE) {
5763 body.parallel = Some(FunctionParallel::Safe);
5764 } else {
5765 return self
5766 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5767 }
5768 } else if self.parse_keyword(Keyword::SECURITY) {
5769 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5770 if self.parse_keyword(Keyword::DEFINER) {
5771 body.security = Some(FunctionSecurity::Definer);
5772 } else if self.parse_keyword(Keyword::INVOKER) {
5773 body.security = Some(FunctionSecurity::Invoker);
5774 } else {
5775 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5776 }
5777 } else if self.parse_keyword(Keyword::SET) {
5778 let name = self.parse_object_name(false)?;
5779 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5780 FunctionSetValue::FromCurrent
5781 } else {
5782 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5783 return self.expected_ref("= or TO", self.peek_token_ref());
5784 }
5785 if self.parse_keyword(Keyword::DEFAULT) {
5786 FunctionSetValue::Default
5787 } else {
5788 let values = self.parse_comma_separated(Parser::parse_expr)?;
5789 FunctionSetValue::Values(values)
5790 }
5791 };
5792 set_params.push(FunctionDefinitionSetParam { name, value });
5793 } else if self.parse_keyword(Keyword::RETURN) {
5794 ensure_not_set(&body.function_body, "RETURN")?;
5795 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5796 } else {
5797 break;
5798 }
5799 }
5800
5801 Ok(CreateFunction {
5802 or_alter: false,
5803 or_replace,
5804 temporary,
5805 name,
5806 args: Some(args),
5807 return_type,
5808 behavior: body.behavior,
5809 called_on_null: body.called_on_null,
5810 parallel: body.parallel,
5811 security: body.security,
5812 set_params,
5813 language: body.language,
5814 function_body: body.function_body,
5815 if_not_exists: false,
5816 using: None,
5817 determinism_specifier: None,
5818 options: None,
5819 remote_connection: None,
5820 })
5821 }
5822
5823 fn parse_hive_create_function(
5827 &mut self,
5828 or_replace: bool,
5829 temporary: bool,
5830 ) -> Result<CreateFunction, ParserError> {
5831 let name = self.parse_object_name(false)?;
5832 self.expect_keyword_is(Keyword::AS)?;
5833
5834 let body = self.parse_create_function_body_string()?;
5835 let using = self.parse_optional_create_function_using()?;
5836
5837 Ok(CreateFunction {
5838 or_alter: false,
5839 or_replace,
5840 temporary,
5841 name,
5842 function_body: Some(body),
5843 using,
5844 if_not_exists: false,
5845 args: None,
5846 return_type: None,
5847 behavior: None,
5848 called_on_null: None,
5849 parallel: None,
5850 security: None,
5851 set_params: vec![],
5852 language: None,
5853 determinism_specifier: None,
5854 options: None,
5855 remote_connection: None,
5856 })
5857 }
5858
5859 fn parse_bigquery_create_function(
5863 &mut self,
5864 or_replace: bool,
5865 temporary: bool,
5866 ) -> Result<CreateFunction, ParserError> {
5867 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5868 let (name, args) = self.parse_create_function_name_and_params()?;
5869
5870 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5871 Some(self.parse_function_return_type()?)
5872 } else {
5873 None
5874 };
5875
5876 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5877 Some(FunctionDeterminismSpecifier::Deterministic)
5878 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5879 Some(FunctionDeterminismSpecifier::NotDeterministic)
5880 } else {
5881 None
5882 };
5883
5884 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5885 Some(self.parse_identifier()?)
5886 } else {
5887 None
5888 };
5889
5890 let remote_connection =
5891 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5892 Some(self.parse_object_name(false)?)
5893 } else {
5894 None
5895 };
5896
5897 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5900
5901 let function_body = if remote_connection.is_none() {
5902 self.expect_keyword_is(Keyword::AS)?;
5903 let expr = self.parse_expr()?;
5904 if options.is_none() {
5905 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5906 Some(CreateFunctionBody::AsBeforeOptions {
5907 body: expr,
5908 link_symbol: None,
5909 })
5910 } else {
5911 Some(CreateFunctionBody::AsAfterOptions(expr))
5912 }
5913 } else {
5914 None
5915 };
5916
5917 Ok(CreateFunction {
5918 or_alter: false,
5919 or_replace,
5920 temporary,
5921 if_not_exists,
5922 name,
5923 args: Some(args),
5924 return_type,
5925 function_body,
5926 language,
5927 determinism_specifier,
5928 options,
5929 remote_connection,
5930 using: None,
5931 behavior: None,
5932 called_on_null: None,
5933 parallel: None,
5934 security: None,
5935 set_params: vec![],
5936 })
5937 }
5938
5939 fn parse_mssql_create_function(
5943 &mut self,
5944 or_alter: bool,
5945 or_replace: bool,
5946 temporary: bool,
5947 ) -> Result<CreateFunction, ParserError> {
5948 let (name, args) = self.parse_create_function_name_and_params()?;
5949
5950 self.expect_keyword(Keyword::RETURNS)?;
5951
5952 let return_table = self.maybe_parse(|p| {
5953 let return_table_name = p.parse_identifier()?;
5954
5955 p.expect_keyword_is(Keyword::TABLE)?;
5956 p.prev_token();
5957
5958 let table_column_defs = match p.parse_data_type()? {
5959 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5960 table_column_defs
5961 }
5962 _ => parser_err!(
5963 "Expected table column definitions after TABLE keyword",
5964 p.peek_token_ref().span.start
5965 )?,
5966 };
5967
5968 Ok(DataType::NamedTable {
5969 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5970 columns: table_column_defs,
5971 })
5972 })?;
5973
5974 let data_type = match return_table {
5975 Some(table_type) => table_type,
5976 None => self.parse_data_type()?,
5977 };
5978 let return_type = Some(FunctionReturnType::DataType(data_type));
5979
5980 let _ = self.parse_keyword(Keyword::AS);
5981
5982 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5983 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5984 let statements = self.parse_statement_list(&[Keyword::END])?;
5985 let end_token = self.expect_keyword(Keyword::END)?;
5986
5987 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5988 begin_token: AttachedToken(begin_token),
5989 statements,
5990 end_token: AttachedToken(end_token),
5991 }))
5992 } else if self.parse_keyword(Keyword::RETURN) {
5993 if self.peek_token_ref().token == Token::LParen {
5994 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5995 } else if self.peek_keyword(Keyword::SELECT) {
5996 let select = self.parse_select()?;
5997 Some(CreateFunctionBody::AsReturnSelect(select))
5998 } else {
5999 parser_err!(
6000 "Expected a subquery (or bare SELECT statement) after RETURN",
6001 self.peek_token_ref().span.start
6002 )?
6003 }
6004 } else {
6005 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
6006 };
6007
6008 Ok(CreateFunction {
6009 or_alter,
6010 or_replace,
6011 temporary,
6012 if_not_exists: false,
6013 name,
6014 args: Some(args),
6015 return_type,
6016 function_body,
6017 language: None,
6018 determinism_specifier: None,
6019 options: None,
6020 remote_connection: None,
6021 using: None,
6022 behavior: None,
6023 called_on_null: None,
6024 parallel: None,
6025 security: None,
6026 set_params: vec![],
6027 })
6028 }
6029
6030 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
6031 if self.parse_keyword(Keyword::SETOF) {
6032 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
6033 } else {
6034 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
6035 }
6036 }
6037
6038 fn parse_create_function_name_and_params(
6039 &mut self,
6040 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6041 let name = self.parse_object_name(false)?;
6042 let parse_function_param =
6043 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6044 let name = parser.parse_identifier()?;
6045 let data_type = parser.parse_data_type()?;
6046 let default_expr = if parser.consume_token(&Token::Eq) {
6047 Some(parser.parse_expr()?)
6048 } else {
6049 None
6050 };
6051
6052 Ok(OperateFunctionArg {
6053 mode: None,
6054 name: Some(name),
6055 data_type,
6056 default_expr,
6057 })
6058 };
6059 self.expect_token(&Token::LParen)?;
6060 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6061 self.expect_token(&Token::RParen)?;
6062 Ok((name, args))
6063 }
6064
6065 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6066 let mode = if self.parse_keyword(Keyword::IN) {
6067 Some(ArgMode::In)
6068 } else if self.parse_keyword(Keyword::OUT) {
6069 Some(ArgMode::Out)
6070 } else if self.parse_keyword(Keyword::INOUT) {
6071 Some(ArgMode::InOut)
6072 } else if self.parse_keyword(Keyword::VARIADIC) {
6073 Some(ArgMode::Variadic)
6074 } else {
6075 None
6076 };
6077
6078 let mut name = None;
6080 let mut data_type = self.parse_data_type()?;
6081
6082 let data_type_idx = self.get_current_index();
6086
6087 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6089 if parser.peek_keyword(Keyword::DEFAULT) {
6090 parser_err!(
6092 "The DEFAULT keyword is not a type",
6093 parser.peek_token_ref().span.start
6094 )
6095 } else {
6096 parser.parse_data_type()
6097 }
6098 }
6099
6100 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6101 let token = self.token_at(data_type_idx);
6102
6103 if !matches!(token.token, Token::Word(_)) {
6105 return self.expected("a name or type", token.clone());
6106 }
6107
6108 name = Some(Ident::new(token.to_string()));
6109 data_type = next_data_type;
6110 }
6111
6112 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6113 {
6114 Some(self.parse_expr()?)
6115 } else {
6116 None
6117 };
6118 Ok(OperateFunctionArg {
6119 mode,
6120 name,
6121 data_type,
6122 default_expr,
6123 })
6124 }
6125
6126 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6127 let mode = if self.parse_keyword(Keyword::IN) {
6128 Some(ArgMode::In)
6129 } else {
6130 if self
6131 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6132 .is_some()
6133 {
6134 return self.expected_ref(
6135 "IN or argument type in aggregate signature",
6136 self.peek_token_ref(),
6137 );
6138 }
6139 None
6140 };
6141
6142 let mut name = None;
6145 let mut data_type = self.parse_data_type()?;
6146 let data_type_idx = self.get_current_index();
6147
6148 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6149 if parser.peek_keyword(Keyword::DEFAULT)
6150 || parser.peek_keyword(Keyword::ORDER)
6151 || parser.peek_token_ref().token == Token::Comma
6152 || parser.peek_token_ref().token == Token::RParen
6153 {
6154 parser_err!(
6156 "The current token cannot start an aggregate argument type",
6157 parser.peek_token_ref().span.start
6158 )
6159 } else {
6160 parser.parse_data_type()
6161 }
6162 }
6163
6164 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6165 let token = self.token_at(data_type_idx);
6166 if !matches!(token.token, Token::Word(_)) {
6167 return self.expected("a name or type", token.clone());
6168 }
6169
6170 name = Some(Ident::new(token.to_string()));
6171 data_type = next_data_type;
6172 }
6173
6174 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6175 return self.expected_ref(
6176 "',' or ')' or ORDER BY after aggregate argument type",
6177 self.peek_token_ref(),
6178 );
6179 }
6180
6181 Ok(OperateFunctionArg {
6182 mode,
6183 name,
6184 data_type,
6185 default_expr: None,
6186 })
6187 }
6188
6189 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6195 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6196 {
6197 self.prev_token();
6198 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6199 }
6200 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6201 let trigger_name = self.parse_object_name(false)?;
6202 let table_name = if self.parse_keyword(Keyword::ON) {
6203 Some(self.parse_object_name(false)?)
6204 } else {
6205 None
6206 };
6207 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6208 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6209 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6210 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6211 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6212 )),
6213 None => None,
6214 };
6215 Ok(DropTrigger {
6216 if_exists,
6217 trigger_name,
6218 table_name,
6219 option,
6220 })
6221 }
6222
6223 pub fn parse_create_trigger(
6225 &mut self,
6226 temporary: bool,
6227 or_alter: bool,
6228 or_replace: bool,
6229 is_constraint: bool,
6230 ) -> Result<CreateTrigger, ParserError> {
6231 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6232 {
6233 self.prev_token();
6234 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6235 }
6236
6237 let name = self.parse_object_name(false)?;
6238 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6239
6240 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6241 self.expect_keyword_is(Keyword::ON)?;
6242 let table_name = self.parse_object_name(false)?;
6243
6244 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6245 self.parse_object_name(true).ok()
6246 } else {
6247 None
6248 };
6249
6250 let characteristics = self.parse_constraint_characteristics()?;
6251
6252 let mut referencing = vec![];
6253 if self.parse_keyword(Keyword::REFERENCING) {
6254 while let Some(refer) = self.parse_trigger_referencing()? {
6255 referencing.push(refer);
6256 }
6257 }
6258
6259 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6260 let include_each = self.parse_keyword(Keyword::EACH);
6261 let trigger_object =
6262 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6263 Keyword::ROW => TriggerObject::Row,
6264 Keyword::STATEMENT => TriggerObject::Statement,
6265 unexpected_keyword => return Err(ParserError::ParserError(
6266 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6267 )),
6268 };
6269
6270 Some(if include_each {
6271 TriggerObjectKind::ForEach(trigger_object)
6272 } else {
6273 TriggerObjectKind::For(trigger_object)
6274 })
6275 } else {
6276 let _ = self.parse_keyword(Keyword::FOR);
6277
6278 None
6279 };
6280
6281 let condition = self
6282 .parse_keyword(Keyword::WHEN)
6283 .then(|| self.parse_expr())
6284 .transpose()?;
6285
6286 let mut exec_body = None;
6287 let mut statements = None;
6288 if self.parse_keyword(Keyword::EXECUTE) {
6289 exec_body = Some(self.parse_trigger_exec_body()?);
6290 } else {
6291 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6292 }
6293
6294 Ok(CreateTrigger {
6295 or_alter,
6296 temporary,
6297 or_replace,
6298 is_constraint,
6299 name,
6300 period,
6301 period_before_table: true,
6302 events,
6303 table_name,
6304 referenced_table_name,
6305 referencing,
6306 trigger_object,
6307 condition,
6308 exec_body,
6309 statements_as: false,
6310 statements,
6311 characteristics,
6312 })
6313 }
6314
6315 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6317 Ok(
6318 match self.expect_one_of_keywords(&[
6319 Keyword::FOR,
6320 Keyword::BEFORE,
6321 Keyword::AFTER,
6322 Keyword::INSTEAD,
6323 ])? {
6324 Keyword::FOR => TriggerPeriod::For,
6325 Keyword::BEFORE => TriggerPeriod::Before,
6326 Keyword::AFTER => TriggerPeriod::After,
6327 Keyword::INSTEAD => self
6328 .expect_keyword_is(Keyword::OF)
6329 .map(|_| TriggerPeriod::InsteadOf)?,
6330 unexpected_keyword => return Err(ParserError::ParserError(
6331 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6332 )),
6333 },
6334 )
6335 }
6336
6337 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6339 Ok(
6340 match self.expect_one_of_keywords(&[
6341 Keyword::INSERT,
6342 Keyword::UPDATE,
6343 Keyword::DELETE,
6344 Keyword::TRUNCATE,
6345 ])? {
6346 Keyword::INSERT => TriggerEvent::Insert,
6347 Keyword::UPDATE => {
6348 if self.parse_keyword(Keyword::OF) {
6349 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6350 TriggerEvent::Update(cols)
6351 } else {
6352 TriggerEvent::Update(vec![])
6353 }
6354 }
6355 Keyword::DELETE => TriggerEvent::Delete,
6356 Keyword::TRUNCATE => TriggerEvent::Truncate,
6357 unexpected_keyword => return Err(ParserError::ParserError(
6358 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6359 )),
6360 },
6361 )
6362 }
6363
6364 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6366 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6367 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6368 TriggerReferencingType::OldTable
6369 }
6370 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6371 TriggerReferencingType::NewTable
6372 }
6373 _ => {
6374 return Ok(None);
6375 }
6376 };
6377
6378 let is_as = self.parse_keyword(Keyword::AS);
6379 let transition_relation_name = self.parse_object_name(false)?;
6380 Ok(Some(TriggerReferencing {
6381 refer_type,
6382 is_as,
6383 transition_relation_name,
6384 }))
6385 }
6386
6387 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6389 Ok(TriggerExecBody {
6390 exec_type: match self
6391 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6392 {
6393 Keyword::FUNCTION => TriggerExecBodyType::Function,
6394 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6395 unexpected_keyword => return Err(ParserError::ParserError(
6396 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
6397 )),
6398 },
6399 func_desc: self.parse_function_desc()?,
6400 })
6401 }
6402
6403 pub fn parse_create_macro(
6405 &mut self,
6406 or_replace: bool,
6407 temporary: bool,
6408 ) -> Result<Statement, ParserError> {
6409 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6410 let name = self.parse_object_name(false)?;
6411 self.expect_token(&Token::LParen)?;
6412 let args = if self.consume_token(&Token::RParen) {
6413 self.prev_token();
6414 None
6415 } else {
6416 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6417 };
6418
6419 self.expect_token(&Token::RParen)?;
6420 self.expect_keyword_is(Keyword::AS)?;
6421
6422 Ok(Statement::CreateMacro {
6423 or_replace,
6424 temporary,
6425 name,
6426 args,
6427 definition: if self.parse_keyword(Keyword::TABLE) {
6428 MacroDefinition::Table(self.parse_query()?)
6429 } else {
6430 MacroDefinition::Expr(self.parse_expr()?)
6431 },
6432 })
6433 } else {
6434 self.prev_token();
6435 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6436 }
6437 }
6438
6439 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6440 let name = self.parse_identifier()?;
6441
6442 let default_expr =
6443 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6444 Some(self.parse_expr()?)
6445 } else {
6446 None
6447 };
6448 Ok(MacroArg { name, default_expr })
6449 }
6450
6451 pub fn parse_create_external_table(
6453 &mut self,
6454 or_replace: bool,
6455 ) -> Result<CreateTable, ParserError> {
6456 self.expect_keyword_is(Keyword::TABLE)?;
6457 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6458 let table_name = self.parse_object_name(false)?;
6459 let (columns, constraints) = self.parse_columns()?;
6460
6461 let hive_distribution = self.parse_hive_distribution()?;
6462 let hive_formats = self.parse_hive_formats()?;
6463
6464 let file_format = if let Some(ref hf) = hive_formats {
6465 if let Some(ref ff) = hf.storage {
6466 match ff {
6467 HiveIOFormat::FileFormat { format } => Some(*format),
6468 _ => None,
6469 }
6470 } else {
6471 None
6472 }
6473 } else {
6474 None
6475 };
6476 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6477 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6478 let table_options = if !table_properties.is_empty() {
6479 CreateTableOptions::TableProperties(table_properties)
6480 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6481 CreateTableOptions::Options(options)
6482 } else {
6483 CreateTableOptions::None
6484 };
6485 Ok(CreateTableBuilder::new(table_name)
6486 .columns(columns)
6487 .constraints(constraints)
6488 .hive_distribution(hive_distribution)
6489 .hive_formats(hive_formats)
6490 .table_options(table_options)
6491 .or_replace(or_replace)
6492 .if_not_exists(if_not_exists)
6493 .external(true)
6494 .file_format(file_format)
6495 .location(location)
6496 .build())
6497 }
6498
6499 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6503 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6504 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6505 let table_name = self.parse_object_name(true)?;
6506
6507 self.expect_keyword_is(Keyword::CLONE)?;
6508 let clone = Some(self.parse_object_name(true)?);
6509
6510 let version =
6511 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6512 {
6513 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6514 } else {
6515 None
6516 };
6517
6518 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6519 CreateTableOptions::Options(options)
6520 } else {
6521 CreateTableOptions::None
6522 };
6523
6524 Ok(CreateTableBuilder::new(table_name)
6525 .snapshot(true)
6526 .if_not_exists(if_not_exists)
6527 .clone_clause(clone)
6528 .version(version)
6529 .table_options(table_options)
6530 .build())
6531 }
6532
6533 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6535 let next_token = self.next_token();
6536 match &next_token.token {
6537 Token::Word(w) => match w.keyword {
6538 Keyword::AVRO => Ok(FileFormat::AVRO),
6539 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6540 Keyword::ORC => Ok(FileFormat::ORC),
6541 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6542 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6543 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6544 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6545 _ => self.expected("fileformat", next_token),
6546 },
6547 _ => self.expected("fileformat", next_token),
6548 }
6549 }
6550
6551 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6552 if self.consume_token(&Token::Eq) {
6553 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6554 } else {
6555 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6556 }
6557 }
6558
6559 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6561 let next_token = self.next_token();
6562 match &next_token.token {
6563 Token::Word(w) => match w.keyword {
6564 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6565 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6566 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6567 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6568 _ => self.expected("fileformat", next_token),
6569 },
6570 _ => self.expected("fileformat", next_token),
6571 }
6572 }
6573
6574 pub fn parse_create_view(
6576 &mut self,
6577 or_alter: bool,
6578 or_replace: bool,
6579 temporary: bool,
6580 create_view_params: Option<CreateViewParams>,
6581 ) -> Result<CreateView, ParserError> {
6582 let secure = self.parse_keyword(Keyword::SECURE);
6583 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6584 self.expect_keyword_is(Keyword::VIEW)?;
6585 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6586 let if_not_exists_first =
6589 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6590 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6591 let name_before_not_exists = !if_not_exists_first
6592 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6593 let if_not_exists = if_not_exists_first || name_before_not_exists;
6594 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6595 let columns = self.parse_view_columns()?;
6598 let mut options = CreateTableOptions::None;
6599 let with_options = self.parse_options(Keyword::WITH)?;
6600 if !with_options.is_empty() {
6601 options = CreateTableOptions::With(with_options);
6602 }
6603
6604 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6605 self.expect_keyword_is(Keyword::BY)?;
6606 self.parse_parenthesized_column_list(Optional, false)?
6607 } else {
6608 vec![]
6609 };
6610
6611 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6612 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6613 if !opts.is_empty() {
6614 options = CreateTableOptions::Options(opts);
6615 }
6616 };
6617 }
6618
6619 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6620 && self.parse_keyword(Keyword::TO)
6621 {
6622 Some(self.parse_object_name(false)?)
6623 } else {
6624 None
6625 };
6626
6627 let comment = if self.dialect.supports_create_view_comment_syntax()
6628 && self.parse_keyword(Keyword::COMMENT)
6629 {
6630 self.expect_token(&Token::Eq)?;
6631 Some(self.parse_comment_value()?)
6632 } else {
6633 None
6634 };
6635
6636 self.expect_keyword_is(Keyword::AS)?;
6637 let query = self.parse_query()?;
6638 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6641 && self.parse_keywords(&[
6642 Keyword::WITH,
6643 Keyword::NO,
6644 Keyword::SCHEMA,
6645 Keyword::BINDING,
6646 ]);
6647
6648 let with_data = if materialized && self.parse_keyword(Keyword::WITH) {
6651 if self.parse_keyword(Keyword::NO) {
6652 self.expect_keyword_is(Keyword::DATA)?;
6653 Some(false)
6654 } else {
6655 self.expect_keyword_is(Keyword::DATA)?;
6656 Some(true)
6657 }
6658 } else {
6659 None
6660 };
6661
6662 Ok(CreateView {
6663 or_alter,
6664 name,
6665 columns,
6666 query,
6667 materialized,
6668 secure,
6669 or_replace,
6670 options,
6671 cluster_by,
6672 comment,
6673 with_no_schema_binding,
6674 if_not_exists,
6675 temporary,
6676 copy_grants,
6677 to,
6678 params: create_view_params,
6679 name_before_not_exists,
6680 with_data,
6681 })
6682 }
6683
6684 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6688 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6689 self.expect_token(&Token::Eq)?;
6690 Some(
6691 match self.expect_one_of_keywords(&[
6692 Keyword::UNDEFINED,
6693 Keyword::MERGE,
6694 Keyword::TEMPTABLE,
6695 ])? {
6696 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6697 Keyword::MERGE => CreateViewAlgorithm::Merge,
6698 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6699 _ => {
6700 self.prev_token();
6701 let found = self.next_token();
6702 return self
6703 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6704 }
6705 },
6706 )
6707 } else {
6708 None
6709 };
6710 let definer = if self.parse_keyword(Keyword::DEFINER) {
6711 self.expect_token(&Token::Eq)?;
6712 Some(self.parse_grantee_name()?)
6713 } else {
6714 None
6715 };
6716 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6717 Some(
6718 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6719 Keyword::DEFINER => CreateViewSecurity::Definer,
6720 Keyword::INVOKER => CreateViewSecurity::Invoker,
6721 _ => {
6722 self.prev_token();
6723 let found = self.next_token();
6724 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6725 }
6726 },
6727 )
6728 } else {
6729 None
6730 };
6731 if algorithm.is_some() || definer.is_some() || security.is_some() {
6732 Ok(Some(CreateViewParams {
6733 algorithm,
6734 definer,
6735 security,
6736 }))
6737 } else {
6738 Ok(None)
6739 }
6740 }
6741
6742 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6744 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6745 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6746
6747 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6750 vec![Keyword::AUTHORIZATION]
6751 } else if dialect_of!(self is PostgreSqlDialect) {
6752 vec![
6753 Keyword::LOGIN,
6754 Keyword::NOLOGIN,
6755 Keyword::INHERIT,
6756 Keyword::NOINHERIT,
6757 Keyword::BYPASSRLS,
6758 Keyword::NOBYPASSRLS,
6759 Keyword::PASSWORD,
6760 Keyword::CREATEDB,
6761 Keyword::NOCREATEDB,
6762 Keyword::CREATEROLE,
6763 Keyword::NOCREATEROLE,
6764 Keyword::SUPERUSER,
6765 Keyword::NOSUPERUSER,
6766 Keyword::REPLICATION,
6767 Keyword::NOREPLICATION,
6768 Keyword::CONNECTION,
6769 Keyword::VALID,
6770 Keyword::IN,
6771 Keyword::ROLE,
6772 Keyword::ADMIN,
6773 Keyword::USER,
6774 ]
6775 } else {
6776 vec![]
6777 };
6778
6779 let mut authorization_owner = None;
6781 let mut login = None;
6783 let mut inherit = None;
6784 let mut bypassrls = None;
6785 let mut password = None;
6786 let mut create_db = None;
6787 let mut create_role = None;
6788 let mut superuser = None;
6789 let mut replication = None;
6790 let mut connection_limit = None;
6791 let mut valid_until = None;
6792 let mut in_role = vec![];
6793 let mut in_group = vec![];
6794 let mut role = vec![];
6795 let mut user = vec![];
6796 let mut admin = vec![];
6797
6798 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6799 let loc = self
6800 .tokens
6801 .get(self.index - 1)
6802 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6803 match keyword {
6804 Keyword::AUTHORIZATION => {
6805 if authorization_owner.is_some() {
6806 parser_err!("Found multiple AUTHORIZATION", loc)
6807 } else {
6808 authorization_owner = Some(self.parse_object_name(false)?);
6809 Ok(())
6810 }
6811 }
6812 Keyword::LOGIN | Keyword::NOLOGIN => {
6813 if login.is_some() {
6814 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6815 } else {
6816 login = Some(keyword == Keyword::LOGIN);
6817 Ok(())
6818 }
6819 }
6820 Keyword::INHERIT | Keyword::NOINHERIT => {
6821 if inherit.is_some() {
6822 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6823 } else {
6824 inherit = Some(keyword == Keyword::INHERIT);
6825 Ok(())
6826 }
6827 }
6828 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6829 if bypassrls.is_some() {
6830 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6831 } else {
6832 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6833 Ok(())
6834 }
6835 }
6836 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6837 if create_db.is_some() {
6838 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6839 } else {
6840 create_db = Some(keyword == Keyword::CREATEDB);
6841 Ok(())
6842 }
6843 }
6844 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6845 if create_role.is_some() {
6846 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6847 } else {
6848 create_role = Some(keyword == Keyword::CREATEROLE);
6849 Ok(())
6850 }
6851 }
6852 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6853 if superuser.is_some() {
6854 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6855 } else {
6856 superuser = Some(keyword == Keyword::SUPERUSER);
6857 Ok(())
6858 }
6859 }
6860 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6861 if replication.is_some() {
6862 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6863 } else {
6864 replication = Some(keyword == Keyword::REPLICATION);
6865 Ok(())
6866 }
6867 }
6868 Keyword::PASSWORD => {
6869 if password.is_some() {
6870 parser_err!("Found multiple PASSWORD", loc)
6871 } else {
6872 password = if self.parse_keyword(Keyword::NULL) {
6873 Some(Password::NullPassword)
6874 } else {
6875 Some(Password::Password(Expr::Value(self.parse_value()?)))
6876 };
6877 Ok(())
6878 }
6879 }
6880 Keyword::CONNECTION => {
6881 self.expect_keyword_is(Keyword::LIMIT)?;
6882 if connection_limit.is_some() {
6883 parser_err!("Found multiple CONNECTION LIMIT", loc)
6884 } else {
6885 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6886 Ok(())
6887 }
6888 }
6889 Keyword::VALID => {
6890 self.expect_keyword_is(Keyword::UNTIL)?;
6891 if valid_until.is_some() {
6892 parser_err!("Found multiple VALID UNTIL", loc)
6893 } else {
6894 valid_until = Some(Expr::Value(self.parse_value()?));
6895 Ok(())
6896 }
6897 }
6898 Keyword::IN => {
6899 if self.parse_keyword(Keyword::ROLE) {
6900 if !in_role.is_empty() {
6901 parser_err!("Found multiple IN ROLE", loc)
6902 } else {
6903 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6904 Ok(())
6905 }
6906 } else if self.parse_keyword(Keyword::GROUP) {
6907 if !in_group.is_empty() {
6908 parser_err!("Found multiple IN GROUP", loc)
6909 } else {
6910 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6911 Ok(())
6912 }
6913 } else {
6914 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6915 }
6916 }
6917 Keyword::ROLE => {
6918 if !role.is_empty() {
6919 parser_err!("Found multiple ROLE", loc)
6920 } else {
6921 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6922 Ok(())
6923 }
6924 }
6925 Keyword::USER => {
6926 if !user.is_empty() {
6927 parser_err!("Found multiple USER", loc)
6928 } else {
6929 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6930 Ok(())
6931 }
6932 }
6933 Keyword::ADMIN => {
6934 if !admin.is_empty() {
6935 parser_err!("Found multiple ADMIN", loc)
6936 } else {
6937 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6938 Ok(())
6939 }
6940 }
6941 _ => break,
6942 }?
6943 }
6944
6945 Ok(CreateRole {
6946 names,
6947 if_not_exists,
6948 login,
6949 inherit,
6950 bypassrls,
6951 password,
6952 create_db,
6953 create_role,
6954 replication,
6955 superuser,
6956 connection_limit,
6957 valid_until,
6958 in_role,
6959 in_group,
6960 role,
6961 user,
6962 admin,
6963 authorization_owner,
6964 })
6965 }
6966
6967 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6969 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6970 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6971 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6972 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6973 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6974 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6975 )),
6976 None => {
6977 match self.parse_identifier() {
6978 Ok(ident) => Owner::Ident(ident),
6979 Err(e) => {
6980 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6981 }
6982 }
6983 }
6984 };
6985 Ok(owner)
6986 }
6987
6988 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
6990 let name = self.parse_object_name(false)?;
6991 self.expect_keyword_is(Keyword::AS)?;
6992 let data_type = self.parse_data_type()?;
6993 let collation = if self.parse_keyword(Keyword::COLLATE) {
6994 Some(self.parse_identifier()?)
6995 } else {
6996 None
6997 };
6998 let default = if self.parse_keyword(Keyword::DEFAULT) {
6999 Some(self.parse_expr()?)
7000 } else {
7001 None
7002 };
7003 let mut constraints = Vec::new();
7004 while let Some(constraint) = self.parse_optional_table_constraint()? {
7005 constraints.push(constraint);
7006 }
7007
7008 Ok(CreateDomain {
7009 name,
7010 data_type,
7011 collation,
7012 default,
7013 constraints,
7014 })
7015 }
7016
7017 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
7027 let name = self.parse_identifier()?;
7028 self.expect_keyword_is(Keyword::ON)?;
7029 let table_name = self.parse_object_name(false)?;
7030
7031 let policy_type = if self.parse_keyword(Keyword::AS) {
7032 let keyword =
7033 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
7034 Some(match keyword {
7035 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
7036 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
7037 unexpected_keyword => return Err(ParserError::ParserError(
7038 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
7039 )),
7040 })
7041 } else {
7042 None
7043 };
7044
7045 let command = if self.parse_keyword(Keyword::FOR) {
7046 let keyword = self.expect_one_of_keywords(&[
7047 Keyword::ALL,
7048 Keyword::SELECT,
7049 Keyword::INSERT,
7050 Keyword::UPDATE,
7051 Keyword::DELETE,
7052 ])?;
7053 Some(match keyword {
7054 Keyword::ALL => CreatePolicyCommand::All,
7055 Keyword::SELECT => CreatePolicyCommand::Select,
7056 Keyword::INSERT => CreatePolicyCommand::Insert,
7057 Keyword::UPDATE => CreatePolicyCommand::Update,
7058 Keyword::DELETE => CreatePolicyCommand::Delete,
7059 unexpected_keyword => return Err(ParserError::ParserError(
7060 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7061 )),
7062 })
7063 } else {
7064 None
7065 };
7066
7067 let to = if self.parse_keyword(Keyword::TO) {
7068 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7069 } else {
7070 None
7071 };
7072
7073 let using = if self.parse_keyword(Keyword::USING) {
7074 self.expect_token(&Token::LParen)?;
7075 let expr = self.parse_expr()?;
7076 self.expect_token(&Token::RParen)?;
7077 Some(expr)
7078 } else {
7079 None
7080 };
7081
7082 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7083 self.expect_token(&Token::LParen)?;
7084 let expr = self.parse_expr()?;
7085 self.expect_token(&Token::RParen)?;
7086 Some(expr)
7087 } else {
7088 None
7089 };
7090
7091 Ok(CreatePolicy {
7092 name,
7093 table_name,
7094 policy_type,
7095 command,
7096 to,
7097 using,
7098 with_check,
7099 })
7100 }
7101
7102 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7112 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7113 let name = self.parse_identifier()?;
7114
7115 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7116 Some(self.parse_literal_string()?)
7117 } else {
7118 None
7119 };
7120
7121 let url = if self.parse_keyword(Keyword::URL) {
7122 Some(self.parse_literal_string()?)
7123 } else {
7124 None
7125 };
7126
7127 let comment = self.parse_optional_inline_comment()?;
7128
7129 let with_dcproperties =
7130 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7131 properties if !properties.is_empty() => Some(properties),
7132 _ => None,
7133 };
7134
7135 Ok(CreateConnector {
7136 name,
7137 if_not_exists,
7138 connector_type,
7139 url,
7140 comment,
7141 with_dcproperties,
7142 })
7143 }
7144
7145 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7151 let mut parts = vec![];
7152 loop {
7153 parts.push(ObjectNamePart::Identifier(Ident::new(
7154 self.next_token().to_string(),
7155 )));
7156 if !self.consume_token(&Token::Period) {
7157 break;
7158 }
7159 }
7160 Ok(ObjectName(parts))
7161 }
7162
7163 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7167 let name = self.parse_operator_name()?;
7168 self.expect_token(&Token::LParen)?;
7169
7170 let mut function: Option<ObjectName> = None;
7171 let mut is_procedure = false;
7172 let mut left_arg: Option<DataType> = None;
7173 let mut right_arg: Option<DataType> = None;
7174 let mut options: Vec<OperatorOption> = Vec::new();
7175
7176 loop {
7177 let keyword = self.expect_one_of_keywords(&[
7178 Keyword::FUNCTION,
7179 Keyword::PROCEDURE,
7180 Keyword::LEFTARG,
7181 Keyword::RIGHTARG,
7182 Keyword::COMMUTATOR,
7183 Keyword::NEGATOR,
7184 Keyword::RESTRICT,
7185 Keyword::JOIN,
7186 Keyword::HASHES,
7187 Keyword::MERGES,
7188 ])?;
7189
7190 match keyword {
7191 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7192 options.push(OperatorOption::Hashes);
7193 }
7194 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7195 options.push(OperatorOption::Merges);
7196 }
7197 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7198 self.expect_token(&Token::Eq)?;
7199 function = Some(self.parse_object_name(false)?);
7200 is_procedure = keyword == Keyword::PROCEDURE;
7201 }
7202 Keyword::LEFTARG if left_arg.is_none() => {
7203 self.expect_token(&Token::Eq)?;
7204 left_arg = Some(self.parse_data_type()?);
7205 }
7206 Keyword::RIGHTARG if right_arg.is_none() => {
7207 self.expect_token(&Token::Eq)?;
7208 right_arg = Some(self.parse_data_type()?);
7209 }
7210 Keyword::COMMUTATOR
7211 if !options
7212 .iter()
7213 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7214 {
7215 self.expect_token(&Token::Eq)?;
7216 if self.parse_keyword(Keyword::OPERATOR) {
7217 self.expect_token(&Token::LParen)?;
7218 let op = self.parse_operator_name()?;
7219 self.expect_token(&Token::RParen)?;
7220 options.push(OperatorOption::Commutator(op));
7221 } else {
7222 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7223 }
7224 }
7225 Keyword::NEGATOR
7226 if !options
7227 .iter()
7228 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7229 {
7230 self.expect_token(&Token::Eq)?;
7231 if self.parse_keyword(Keyword::OPERATOR) {
7232 self.expect_token(&Token::LParen)?;
7233 let op = self.parse_operator_name()?;
7234 self.expect_token(&Token::RParen)?;
7235 options.push(OperatorOption::Negator(op));
7236 } else {
7237 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7238 }
7239 }
7240 Keyword::RESTRICT
7241 if !options
7242 .iter()
7243 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7244 {
7245 self.expect_token(&Token::Eq)?;
7246 options.push(OperatorOption::Restrict(Some(
7247 self.parse_object_name(false)?,
7248 )));
7249 }
7250 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7251 self.expect_token(&Token::Eq)?;
7252 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7253 }
7254 _ => {
7255 return Err(ParserError::ParserError(format!(
7256 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7257 keyword
7258 )))
7259 }
7260 }
7261
7262 if !self.consume_token(&Token::Comma) {
7263 break;
7264 }
7265 }
7266
7267 self.expect_token(&Token::RParen)?;
7269
7270 let function = function.ok_or_else(|| {
7272 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7273 })?;
7274
7275 Ok(CreateOperator {
7276 name,
7277 function,
7278 is_procedure,
7279 left_arg,
7280 right_arg,
7281 options,
7282 })
7283 }
7284
7285 pub fn parse_create_aggregate(
7289 &mut self,
7290 or_replace: bool,
7291 ) -> Result<CreateAggregate, ParserError> {
7292 let name = self.parse_object_name(false)?;
7293
7294 self.expect_token(&Token::LParen)?;
7296 let args = if self.consume_token(&Token::Mul) {
7297 vec![]
7299 } else if self.consume_token(&Token::RParen) {
7300 self.prev_token();
7301 vec![]
7302 } else {
7303 let parsed = self.parse_comma_separated(|p| p.parse_data_type())?;
7304 parsed
7305 };
7306 self.expect_token(&Token::RParen)?;
7307
7308 self.expect_token(&Token::LParen)?;
7310 let mut options: Vec<CreateAggregateOption> = Vec::new();
7311 loop {
7312 let token = self.next_token();
7313 match &token.token {
7314 Token::RParen => break,
7315 Token::Comma => continue,
7316 Token::Word(word) => {
7317 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7318 options.push(option);
7319 }
7320 other => {
7321 return Err(ParserError::ParserError(format!(
7322 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7323 )));
7324 }
7325 }
7326 }
7327
7328 Ok(CreateAggregate {
7329 or_replace,
7330 name,
7331 args,
7332 options,
7333 })
7334 }
7335
7336 fn parse_create_aggregate_option(
7337 &mut self,
7338 key: &str,
7339 ) -> Result<CreateAggregateOption, ParserError> {
7340 match key {
7341 "SFUNC" => {
7342 self.expect_token(&Token::Eq)?;
7343 Ok(CreateAggregateOption::Sfunc(
7344 self.parse_object_name(false)?,
7345 ))
7346 }
7347 "STYPE" => {
7348 self.expect_token(&Token::Eq)?;
7349 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7350 }
7351 "SSPACE" => {
7352 self.expect_token(&Token::Eq)?;
7353 let size = self.parse_literal_uint()?;
7354 Ok(CreateAggregateOption::Sspace(size))
7355 }
7356 "FINALFUNC" => {
7357 self.expect_token(&Token::Eq)?;
7358 Ok(CreateAggregateOption::Finalfunc(
7359 self.parse_object_name(false)?,
7360 ))
7361 }
7362 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7363 "FINALFUNC_MODIFY" => {
7364 self.expect_token(&Token::Eq)?;
7365 Ok(CreateAggregateOption::FinalfuncModify(
7366 self.parse_aggregate_modify_kind()?,
7367 ))
7368 }
7369 "COMBINEFUNC" => {
7370 self.expect_token(&Token::Eq)?;
7371 Ok(CreateAggregateOption::Combinefunc(
7372 self.parse_object_name(false)?,
7373 ))
7374 }
7375 "SERIALFUNC" => {
7376 self.expect_token(&Token::Eq)?;
7377 Ok(CreateAggregateOption::Serialfunc(
7378 self.parse_object_name(false)?,
7379 ))
7380 }
7381 "DESERIALFUNC" => {
7382 self.expect_token(&Token::Eq)?;
7383 Ok(CreateAggregateOption::Deserialfunc(
7384 self.parse_object_name(false)?,
7385 ))
7386 }
7387 "INITCOND" => {
7388 self.expect_token(&Token::Eq)?;
7389 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7390 }
7391 "MSFUNC" => {
7392 self.expect_token(&Token::Eq)?;
7393 Ok(CreateAggregateOption::Msfunc(
7394 self.parse_object_name(false)?,
7395 ))
7396 }
7397 "MINVFUNC" => {
7398 self.expect_token(&Token::Eq)?;
7399 Ok(CreateAggregateOption::Minvfunc(
7400 self.parse_object_name(false)?,
7401 ))
7402 }
7403 "MSTYPE" => {
7404 self.expect_token(&Token::Eq)?;
7405 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7406 }
7407 "MSSPACE" => {
7408 self.expect_token(&Token::Eq)?;
7409 let size = self.parse_literal_uint()?;
7410 Ok(CreateAggregateOption::Msspace(size))
7411 }
7412 "MFINALFUNC" => {
7413 self.expect_token(&Token::Eq)?;
7414 Ok(CreateAggregateOption::Mfinalfunc(
7415 self.parse_object_name(false)?,
7416 ))
7417 }
7418 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7419 "MFINALFUNC_MODIFY" => {
7420 self.expect_token(&Token::Eq)?;
7421 Ok(CreateAggregateOption::MfinalfuncModify(
7422 self.parse_aggregate_modify_kind()?,
7423 ))
7424 }
7425 "MINITCOND" => {
7426 self.expect_token(&Token::Eq)?;
7427 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7428 }
7429 "SORTOP" => {
7430 self.expect_token(&Token::Eq)?;
7431 Ok(CreateAggregateOption::Sortop(
7432 self.parse_object_name(false)?,
7433 ))
7434 }
7435 "PARALLEL" => {
7436 self.expect_token(&Token::Eq)?;
7437 let parallel = match self.expect_one_of_keywords(&[
7438 Keyword::SAFE,
7439 Keyword::RESTRICTED,
7440 Keyword::UNSAFE,
7441 ])? {
7442 Keyword::SAFE => FunctionParallel::Safe,
7443 Keyword::RESTRICTED => FunctionParallel::Restricted,
7444 Keyword::UNSAFE => FunctionParallel::Unsafe,
7445 _ => unreachable!(),
7446 };
7447 Ok(CreateAggregateOption::Parallel(parallel))
7448 }
7449 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7450 other => Err(ParserError::ParserError(format!(
7451 "Unknown CREATE AGGREGATE option: {other}"
7452 ))),
7453 }
7454 }
7455
7456 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7457 let token = self.next_token();
7458 match &token.token {
7459 Token::Word(word) => match word.value.to_uppercase().as_str() {
7460 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7461 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7462 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7463 other => Err(ParserError::ParserError(format!(
7464 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7465 ))),
7466 },
7467 other => Err(ParserError::ParserError(format!(
7468 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7469 ))),
7470 }
7471 }
7472
7473 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7477 let name = self.parse_object_name(false)?;
7478 self.expect_keyword(Keyword::USING)?;
7479 let using = self.parse_identifier()?;
7480
7481 Ok(CreateOperatorFamily { name, using })
7482 }
7483
7484 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7488 let name = self.parse_object_name(false)?;
7489 let default = self.parse_keyword(Keyword::DEFAULT);
7490 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7491 let for_type = self.parse_data_type()?;
7492 self.expect_keyword(Keyword::USING)?;
7493 let using = self.parse_identifier()?;
7494
7495 let family = if self.parse_keyword(Keyword::FAMILY) {
7496 Some(self.parse_object_name(false)?)
7497 } else {
7498 None
7499 };
7500
7501 self.expect_keyword(Keyword::AS)?;
7502
7503 let mut items = vec![];
7504 loop {
7505 if self.parse_keyword(Keyword::OPERATOR) {
7506 let strategy_number = self.parse_literal_uint()?;
7507 let operator_name = self.parse_operator_name()?;
7508
7509 let op_types = if self.consume_token(&Token::LParen) {
7511 let left = self.parse_data_type()?;
7512 self.expect_token(&Token::Comma)?;
7513 let right = self.parse_data_type()?;
7514 self.expect_token(&Token::RParen)?;
7515 Some(OperatorArgTypes { left, right })
7516 } else {
7517 None
7518 };
7519
7520 let purpose = if self.parse_keyword(Keyword::FOR) {
7522 if self.parse_keyword(Keyword::SEARCH) {
7523 Some(OperatorPurpose::ForSearch)
7524 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7525 let sort_family = self.parse_object_name(false)?;
7526 Some(OperatorPurpose::ForOrderBy { sort_family })
7527 } else {
7528 return self
7529 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7530 }
7531 } else {
7532 None
7533 };
7534
7535 items.push(OperatorClassItem::Operator {
7536 strategy_number,
7537 operator_name,
7538 op_types,
7539 purpose,
7540 });
7541 } else if self.parse_keyword(Keyword::FUNCTION) {
7542 let support_number = self.parse_literal_uint()?;
7543
7544 let op_types = if self.consume_token(&Token::LParen)
7546 && self.peek_token_ref().token != Token::RParen
7547 {
7548 let mut types = vec![];
7549 loop {
7550 types.push(self.parse_data_type()?);
7551 if !self.consume_token(&Token::Comma) {
7552 break;
7553 }
7554 }
7555 self.expect_token(&Token::RParen)?;
7556 Some(types)
7557 } else if self.consume_token(&Token::LParen) {
7558 self.expect_token(&Token::RParen)?;
7559 Some(vec![])
7560 } else {
7561 None
7562 };
7563
7564 let function_name = self.parse_object_name(false)?;
7565
7566 let argument_types = if self.consume_token(&Token::LParen) {
7568 let mut types = vec![];
7569 loop {
7570 if self.peek_token_ref().token == Token::RParen {
7571 break;
7572 }
7573 types.push(self.parse_data_type()?);
7574 if !self.consume_token(&Token::Comma) {
7575 break;
7576 }
7577 }
7578 self.expect_token(&Token::RParen)?;
7579 types
7580 } else {
7581 vec![]
7582 };
7583
7584 items.push(OperatorClassItem::Function {
7585 support_number,
7586 op_types,
7587 function_name,
7588 argument_types,
7589 });
7590 } else if self.parse_keyword(Keyword::STORAGE) {
7591 let storage_type = self.parse_data_type()?;
7592 items.push(OperatorClassItem::Storage { storage_type });
7593 } else {
7594 break;
7595 }
7596
7597 if !self.consume_token(&Token::Comma) {
7599 break;
7600 }
7601 }
7602
7603 Ok(CreateOperatorClass {
7604 name,
7605 default,
7606 for_type,
7607 using,
7608 family,
7609 items,
7610 })
7611 }
7612
7613 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7615 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7617 && self.parse_keyword(Keyword::TEMPORARY);
7618 let persistent = dialect_of!(self is DuckDbDialect)
7619 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7620
7621 let object_type = if self.parse_keyword(Keyword::TABLE) {
7622 ObjectType::Table
7623 } else if self.parse_keyword(Keyword::COLLATION) {
7624 ObjectType::Collation
7625 } else if self.parse_keyword(Keyword::VIEW) {
7626 ObjectType::View
7627 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7628 ObjectType::MaterializedView
7629 } else if self.parse_keyword(Keyword::INDEX) {
7630 ObjectType::Index
7631 } else if self.parse_keyword(Keyword::ROLE) {
7632 ObjectType::Role
7633 } else if self.parse_keyword(Keyword::SCHEMA) {
7634 ObjectType::Schema
7635 } else if self.parse_keyword(Keyword::DATABASE) {
7636 ObjectType::Database
7637 } else if self.parse_keyword(Keyword::SEQUENCE) {
7638 ObjectType::Sequence
7639 } else if self.parse_keyword(Keyword::STAGE) {
7640 ObjectType::Stage
7641 } else if self.parse_keyword(Keyword::TYPE) {
7642 ObjectType::Type
7643 } else if self.parse_keyword(Keyword::USER) {
7644 ObjectType::User
7645 } else if self.parse_keyword(Keyword::STREAM) {
7646 ObjectType::Stream
7647 } else if self.parse_keyword(Keyword::FUNCTION) {
7648 return self.parse_drop_function().map(Into::into);
7649 } else if self.parse_keyword(Keyword::POLICY) {
7650 return self.parse_drop_policy().map(Into::into);
7651 } else if self.parse_keyword(Keyword::CONNECTOR) {
7652 return self.parse_drop_connector();
7653 } else if self.parse_keyword(Keyword::DOMAIN) {
7654 return self.parse_drop_domain().map(Into::into);
7655 } else if self.parse_keyword(Keyword::PROCEDURE) {
7656 return self.parse_drop_procedure();
7657 } else if self.parse_keyword(Keyword::SECRET) {
7658 return self.parse_drop_secret(temporary, persistent);
7659 } else if self.parse_keyword(Keyword::TRIGGER) {
7660 return self.parse_drop_trigger().map(Into::into);
7661 } else if self.parse_keyword(Keyword::EXTENSION) {
7662 return self.parse_drop_extension();
7663 } else if self.parse_keyword(Keyword::OPERATOR) {
7664 return if self.parse_keyword(Keyword::FAMILY) {
7666 self.parse_drop_operator_family()
7667 } else if self.parse_keyword(Keyword::CLASS) {
7668 self.parse_drop_operator_class()
7669 } else {
7670 self.parse_drop_operator()
7671 };
7672 } else {
7673 return self.expected_ref(
7674 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7675 self.peek_token_ref(),
7676 );
7677 };
7678 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7681 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7682
7683 let loc = self.peek_token_ref().span.start;
7684 let cascade = self.parse_keyword(Keyword::CASCADE);
7685 let restrict = self.parse_keyword(Keyword::RESTRICT);
7686 let purge = self.parse_keyword(Keyword::PURGE);
7687 if cascade && restrict {
7688 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7689 }
7690 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7691 return parser_err!(
7692 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7693 loc
7694 );
7695 }
7696 let table = if self.parse_keyword(Keyword::ON) {
7697 Some(self.parse_object_name(false)?)
7698 } else {
7699 None
7700 };
7701 Ok(Statement::Drop {
7702 object_type,
7703 if_exists,
7704 names,
7705 cascade,
7706 restrict,
7707 purge,
7708 temporary,
7709 table,
7710 })
7711 }
7712
7713 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7714 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7715 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7716 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7717 _ => None,
7718 }
7719 }
7720
7721 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7726 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7727 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7728 let drop_behavior = self.parse_optional_drop_behavior();
7729 Ok(DropFunction {
7730 if_exists,
7731 func_desc,
7732 drop_behavior,
7733 })
7734 }
7735
7736 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7742 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7743 let name = self.parse_identifier()?;
7744 self.expect_keyword_is(Keyword::ON)?;
7745 let table_name = self.parse_object_name(false)?;
7746 let drop_behavior = self.parse_optional_drop_behavior();
7747 Ok(DropPolicy {
7748 if_exists,
7749 name,
7750 table_name,
7751 drop_behavior,
7752 })
7753 }
7754 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7760 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7761 let name = self.parse_identifier()?;
7762 Ok(Statement::DropConnector { if_exists, name })
7763 }
7764
7765 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7769 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7770 let name = self.parse_object_name(false)?;
7771 let drop_behavior = self.parse_optional_drop_behavior();
7772 Ok(DropDomain {
7773 if_exists,
7774 name,
7775 drop_behavior,
7776 })
7777 }
7778
7779 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7784 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7785 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7786 let drop_behavior = self.parse_optional_drop_behavior();
7787 Ok(Statement::DropProcedure {
7788 if_exists,
7789 proc_desc,
7790 drop_behavior,
7791 })
7792 }
7793
7794 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7795 let name = self.parse_object_name(false)?;
7796
7797 let args = if self.consume_token(&Token::LParen) {
7798 if self.consume_token(&Token::RParen) {
7799 Some(vec![])
7800 } else {
7801 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7802 self.expect_token(&Token::RParen)?;
7803 Some(args)
7804 }
7805 } else {
7806 None
7807 };
7808
7809 Ok(FunctionDesc { name, args })
7810 }
7811
7812 fn parse_drop_secret(
7814 &mut self,
7815 temporary: bool,
7816 persistent: bool,
7817 ) -> Result<Statement, ParserError> {
7818 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7819 let name = self.parse_identifier()?;
7820 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7821 self.parse_identifier().ok()
7822 } else {
7823 None
7824 };
7825 let temp = match (temporary, persistent) {
7826 (true, false) => Some(true),
7827 (false, true) => Some(false),
7828 (false, false) => None,
7829 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7830 };
7831
7832 Ok(Statement::DropSecret {
7833 if_exists,
7834 temporary: temp,
7835 name,
7836 storage_specifier,
7837 })
7838 }
7839
7840 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7850 if dialect_of!(self is BigQueryDialect) {
7851 return self.parse_big_query_declare();
7852 }
7853 if dialect_of!(self is SnowflakeDialect) {
7854 return self.parse_snowflake_declare();
7855 }
7856 if dialect_of!(self is MsSqlDialect) {
7857 return self.parse_mssql_declare();
7858 }
7859
7860 let name = self.parse_identifier()?;
7861
7862 let binary = Some(self.parse_keyword(Keyword::BINARY));
7863 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7864 Some(true)
7865 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7866 Some(false)
7867 } else {
7868 None
7869 };
7870 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7871 Some(true)
7872 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7873 Some(false)
7874 } else {
7875 None
7876 };
7877
7878 self.expect_keyword_is(Keyword::CURSOR)?;
7879 let declare_type = Some(DeclareType::Cursor);
7880
7881 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7882 Some(keyword) => {
7883 self.expect_keyword_is(Keyword::HOLD)?;
7884
7885 match keyword {
7886 Keyword::WITH => Some(true),
7887 Keyword::WITHOUT => Some(false),
7888 unexpected_keyword => return Err(ParserError::ParserError(
7889 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7890 )),
7891 }
7892 }
7893 None => None,
7894 };
7895
7896 self.expect_keyword_is(Keyword::FOR)?;
7897
7898 let query = Some(self.parse_query()?);
7899
7900 Ok(Statement::Declare {
7901 stmts: vec![Declare {
7902 names: vec![name],
7903 data_type: None,
7904 assignment: None,
7905 declare_type,
7906 binary,
7907 sensitive,
7908 scroll,
7909 hold,
7910 for_query: query,
7911 }],
7912 })
7913 }
7914
7915 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7923 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7924
7925 let data_type = match &self.peek_token_ref().token {
7926 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7927 _ => Some(self.parse_data_type()?),
7928 };
7929
7930 let expr = if data_type.is_some() {
7931 if self.parse_keyword(Keyword::DEFAULT) {
7932 Some(self.parse_expr()?)
7933 } else {
7934 None
7935 }
7936 } else {
7937 self.expect_keyword_is(Keyword::DEFAULT)?;
7940 Some(self.parse_expr()?)
7941 };
7942
7943 Ok(Statement::Declare {
7944 stmts: vec![Declare {
7945 names,
7946 data_type,
7947 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7948 declare_type: None,
7949 binary: None,
7950 sensitive: None,
7951 scroll: None,
7952 hold: None,
7953 for_query: None,
7954 }],
7955 })
7956 }
7957
7958 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7983 let mut stmts = vec![];
7984 loop {
7985 let name = self.parse_identifier()?;
7986 let (declare_type, for_query, assigned_expr, data_type) =
7987 if self.parse_keyword(Keyword::CURSOR) {
7988 self.expect_keyword_is(Keyword::FOR)?;
7989 match &self.peek_token_ref().token {
7990 Token::Word(w) if w.keyword == Keyword::SELECT => (
7991 Some(DeclareType::Cursor),
7992 Some(self.parse_query()?),
7993 None,
7994 None,
7995 ),
7996 _ => (
7997 Some(DeclareType::Cursor),
7998 None,
7999 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
8000 None,
8001 ),
8002 }
8003 } else if self.parse_keyword(Keyword::RESULTSET) {
8004 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
8005 self.parse_snowflake_variable_declaration_expression()?
8006 } else {
8007 None
8009 };
8010
8011 (Some(DeclareType::ResultSet), None, assigned_expr, None)
8012 } else if self.parse_keyword(Keyword::EXCEPTION) {
8013 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
8014 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
8015 } else {
8016 None
8018 };
8019
8020 (Some(DeclareType::Exception), None, assigned_expr, None)
8021 } else {
8022 let (assigned_expr, data_type) = if let Some(assigned_expr) =
8024 self.parse_snowflake_variable_declaration_expression()?
8025 {
8026 (Some(assigned_expr), None)
8027 } else if let Token::Word(_) = &self.peek_token_ref().token {
8028 let data_type = self.parse_data_type()?;
8029 (
8030 self.parse_snowflake_variable_declaration_expression()?,
8031 Some(data_type),
8032 )
8033 } else {
8034 (None, None)
8035 };
8036 (None, None, assigned_expr, data_type)
8037 };
8038 let stmt = Declare {
8039 names: vec![name],
8040 data_type,
8041 assignment: assigned_expr,
8042 declare_type,
8043 binary: None,
8044 sensitive: None,
8045 scroll: None,
8046 hold: None,
8047 for_query,
8048 };
8049
8050 stmts.push(stmt);
8051 if self.consume_token(&Token::SemiColon) {
8052 match &self.peek_token_ref().token {
8053 Token::Word(w)
8054 if ALL_KEYWORDS
8055 .binary_search(&w.value.to_uppercase().as_str())
8056 .is_err() =>
8057 {
8058 continue;
8060 }
8061 _ => {
8062 self.prev_token();
8064 }
8065 }
8066 }
8067
8068 break;
8069 }
8070
8071 Ok(Statement::Declare { stmts })
8072 }
8073
8074 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8086 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8087
8088 Ok(Statement::Declare { stmts })
8089 }
8090
8091 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8102 let name = {
8103 let ident = self.parse_identifier()?;
8104 if !ident.value.starts_with('@')
8105 && !matches!(
8106 &self.peek_token_ref().token,
8107 Token::Word(w) if w.keyword == Keyword::CURSOR
8108 )
8109 {
8110 Err(ParserError::TokenizerError(
8111 "Invalid MsSql variable declaration.".to_string(),
8112 ))
8113 } else {
8114 Ok(ident)
8115 }
8116 }?;
8117
8118 let (declare_type, data_type) = match &self.peek_token_ref().token {
8119 Token::Word(w) => match w.keyword {
8120 Keyword::CURSOR => {
8121 self.next_token();
8122 (Some(DeclareType::Cursor), None)
8123 }
8124 Keyword::AS => {
8125 self.next_token();
8126 (None, Some(self.parse_data_type()?))
8127 }
8128 _ => (None, Some(self.parse_data_type()?)),
8129 },
8130 _ => (None, Some(self.parse_data_type()?)),
8131 };
8132
8133 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8134 self.next_token();
8135 let query = Some(self.parse_query()?);
8136 (query, None)
8137 } else {
8138 let assignment = self.parse_mssql_variable_declaration_expression()?;
8139 (None, assignment)
8140 };
8141
8142 Ok(Declare {
8143 names: vec![name],
8144 data_type,
8145 assignment,
8146 declare_type,
8147 binary: None,
8148 sensitive: None,
8149 scroll: None,
8150 hold: None,
8151 for_query,
8152 })
8153 }
8154
8155 pub fn parse_snowflake_variable_declaration_expression(
8163 &mut self,
8164 ) -> Result<Option<DeclareAssignment>, ParserError> {
8165 Ok(match &self.peek_token_ref().token {
8166 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8167 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8169 }
8170 Token::Assignment => {
8171 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8173 self.parse_expr()?,
8174 )))
8175 }
8176 _ => None,
8177 })
8178 }
8179
8180 pub fn parse_mssql_variable_declaration_expression(
8187 &mut self,
8188 ) -> Result<Option<DeclareAssignment>, ParserError> {
8189 Ok(match &self.peek_token_ref().token {
8190 Token::Eq => {
8191 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8193 self.parse_expr()?,
8194 )))
8195 }
8196 _ => None,
8197 })
8198 }
8199
8200 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8202 let direction = if self.parse_keyword(Keyword::NEXT) {
8203 FetchDirection::Next
8204 } else if self.parse_keyword(Keyword::PRIOR) {
8205 FetchDirection::Prior
8206 } else if self.parse_keyword(Keyword::FIRST) {
8207 FetchDirection::First
8208 } else if self.parse_keyword(Keyword::LAST) {
8209 FetchDirection::Last
8210 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8211 FetchDirection::Absolute {
8212 limit: self.parse_number_value()?,
8213 }
8214 } else if self.parse_keyword(Keyword::RELATIVE) {
8215 FetchDirection::Relative {
8216 limit: self.parse_number_value()?,
8217 }
8218 } else if self.parse_keyword(Keyword::FORWARD) {
8219 if self.parse_keyword(Keyword::ALL) {
8220 FetchDirection::ForwardAll
8221 } else {
8222 FetchDirection::Forward {
8223 limit: Some(self.parse_number_value()?),
8225 }
8226 }
8227 } else if self.parse_keyword(Keyword::BACKWARD) {
8228 if self.parse_keyword(Keyword::ALL) {
8229 FetchDirection::BackwardAll
8230 } else {
8231 FetchDirection::Backward {
8232 limit: Some(self.parse_number_value()?),
8234 }
8235 }
8236 } else if self.parse_keyword(Keyword::ALL) {
8237 FetchDirection::All
8238 } else {
8239 FetchDirection::Count {
8240 limit: self.parse_number_value()?,
8241 }
8242 };
8243
8244 let position = if self.peek_keyword(Keyword::FROM) {
8245 self.expect_keyword(Keyword::FROM)?;
8246 FetchPosition::From
8247 } else if self.peek_keyword(Keyword::IN) {
8248 self.expect_keyword(Keyword::IN)?;
8249 FetchPosition::In
8250 } else {
8251 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8252 };
8253
8254 let name = self.parse_identifier()?;
8255
8256 let into = if self.parse_keyword(Keyword::INTO) {
8257 Some(self.parse_object_name(false)?)
8258 } else {
8259 None
8260 };
8261
8262 Ok(Statement::Fetch {
8263 name,
8264 direction,
8265 position,
8266 into,
8267 })
8268 }
8269
8270 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8272 let object_type = if self.parse_keyword(Keyword::ALL) {
8273 DiscardObject::ALL
8274 } else if self.parse_keyword(Keyword::PLANS) {
8275 DiscardObject::PLANS
8276 } else if self.parse_keyword(Keyword::SEQUENCES) {
8277 DiscardObject::SEQUENCES
8278 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8279 DiscardObject::TEMP
8280 } else {
8281 return self.expected_ref(
8282 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8283 self.peek_token_ref(),
8284 );
8285 };
8286 Ok(Statement::Discard { object_type })
8287 }
8288
8289 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8291 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8292 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8293
8294 let mut using = None;
8295
8296 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8297 let index_name = self.parse_object_name(false)?;
8298 using = self.parse_optional_using_then_index_type()?;
8300 self.expect_keyword_is(Keyword::ON)?;
8301 Some(index_name)
8302 } else {
8303 None
8304 };
8305
8306 let table_name = self.parse_object_name(false)?;
8307
8308 using = self.parse_optional_using_then_index_type()?.or(using);
8311
8312 let columns = self.parse_parenthesized_index_column_list()?;
8313
8314 let include = if self.parse_keyword(Keyword::INCLUDE) {
8315 self.expect_token(&Token::LParen)?;
8316 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8317 self.expect_token(&Token::RParen)?;
8318 columns
8319 } else {
8320 vec![]
8321 };
8322
8323 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8324 let not = self.parse_keyword(Keyword::NOT);
8325 self.expect_keyword_is(Keyword::DISTINCT)?;
8326 Some(!not)
8327 } else {
8328 None
8329 };
8330
8331 let with = if self.dialect.supports_create_index_with_clause()
8332 && self.parse_keyword(Keyword::WITH)
8333 {
8334 self.expect_token(&Token::LParen)?;
8335 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8336 self.expect_token(&Token::RParen)?;
8337 with_params
8338 } else {
8339 Vec::new()
8340 };
8341
8342 let predicate = if self.parse_keyword(Keyword::WHERE) {
8343 Some(self.parse_expr()?)
8344 } else {
8345 None
8346 };
8347
8348 let index_options = self.parse_index_options()?;
8354
8355 let mut alter_options = Vec::new();
8357 while self
8358 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8359 .is_some()
8360 {
8361 alter_options.push(self.parse_alter_table_operation()?)
8362 }
8363
8364 Ok(CreateIndex {
8365 name: index_name,
8366 table_name,
8367 using,
8368 columns,
8369 unique,
8370 concurrently,
8371 if_not_exists,
8372 include,
8373 nulls_distinct,
8374 with,
8375 predicate,
8376 index_options,
8377 alter_options,
8378 })
8379 }
8380
8381 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8383 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8384 let name = self.parse_identifier()?;
8385
8386 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8387 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8388 Some(self.parse_identifier()?)
8389 } else {
8390 None
8391 };
8392
8393 let version = if self.parse_keyword(Keyword::VERSION) {
8394 Some(self.parse_identifier()?)
8395 } else {
8396 None
8397 };
8398
8399 let cascade = self.parse_keyword(Keyword::CASCADE);
8400
8401 (schema, version, cascade)
8402 } else {
8403 (None, None, false)
8404 };
8405
8406 Ok(CreateExtension {
8407 name,
8408 if_not_exists,
8409 schema,
8410 version,
8411 cascade,
8412 })
8413 }
8414
8415 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8417 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8418 let name = self.parse_object_name(false)?;
8419
8420 let definition = if self.parse_keyword(Keyword::FROM) {
8421 CreateCollationDefinition::From(self.parse_object_name(false)?)
8422 } else if self.consume_token(&Token::LParen) {
8423 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8424 self.expect_token(&Token::RParen)?;
8425 CreateCollationDefinition::Options(options)
8426 } else {
8427 return self.expected_ref(
8428 "FROM or parenthesized option list after CREATE COLLATION name",
8429 self.peek_token_ref(),
8430 );
8431 };
8432
8433 Ok(CreateCollation {
8434 if_not_exists,
8435 name,
8436 definition,
8437 })
8438 }
8439
8440 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8442 if self.parse_keyword(Keyword::CONFIGURATION) {
8443 let name = self.parse_object_name(false)?;
8444 self.expect_token(&Token::LParen)?;
8445 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8446 self.expect_token(&Token::RParen)?;
8447 Ok(Statement::CreateTextSearchConfiguration(
8448 CreateTextSearchConfiguration { name, options },
8449 ))
8450 } else if self.parse_keyword(Keyword::DICTIONARY) {
8451 let name = self.parse_object_name(false)?;
8452 self.expect_token(&Token::LParen)?;
8453 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8454 self.expect_token(&Token::RParen)?;
8455 Ok(Statement::CreateTextSearchDictionary(
8456 CreateTextSearchDictionary { name, options },
8457 ))
8458 } else if self.parse_keyword(Keyword::PARSER) {
8459 let name = self.parse_object_name(false)?;
8460 self.expect_token(&Token::LParen)?;
8461 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8462 self.expect_token(&Token::RParen)?;
8463 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8464 name,
8465 options,
8466 }))
8467 } else if self.parse_keyword(Keyword::TEMPLATE) {
8468 let name = self.parse_object_name(false)?;
8469 self.expect_token(&Token::LParen)?;
8470 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8471 self.expect_token(&Token::RParen)?;
8472 Ok(Statement::CreateTextSearchTemplate(
8473 CreateTextSearchTemplate { name, options },
8474 ))
8475 } else {
8476 self.expected_ref(
8477 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8478 self.peek_token_ref(),
8479 )
8480 }
8481 }
8482
8483 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8485 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8486 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8487 let cascade_or_restrict =
8488 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8489 Ok(Statement::DropExtension(DropExtension {
8490 names,
8491 if_exists,
8492 cascade_or_restrict: cascade_or_restrict
8493 .map(|k| match k {
8494 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8495 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8496 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8497 })
8498 .transpose()?,
8499 }))
8500 }
8501
8502 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8505 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8506 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8507 let drop_behavior = self.parse_optional_drop_behavior();
8508 Ok(Statement::DropOperator(DropOperator {
8509 if_exists,
8510 operators,
8511 drop_behavior,
8512 }))
8513 }
8514
8515 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8518 let name = self.parse_operator_name()?;
8519 self.expect_token(&Token::LParen)?;
8520
8521 let left_type = if self.parse_keyword(Keyword::NONE) {
8523 None
8524 } else {
8525 Some(self.parse_data_type()?)
8526 };
8527
8528 self.expect_token(&Token::Comma)?;
8529
8530 let right_type = self.parse_data_type()?;
8532
8533 self.expect_token(&Token::RParen)?;
8534
8535 Ok(DropOperatorSignature {
8536 name,
8537 left_type,
8538 right_type,
8539 })
8540 }
8541
8542 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8546 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8547 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8548 self.expect_keyword(Keyword::USING)?;
8549 let using = self.parse_identifier()?;
8550 let drop_behavior = self.parse_optional_drop_behavior();
8551 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8552 if_exists,
8553 names,
8554 using,
8555 drop_behavior,
8556 }))
8557 }
8558
8559 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8563 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8564 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8565 self.expect_keyword(Keyword::USING)?;
8566 let using = self.parse_identifier()?;
8567 let drop_behavior = self.parse_optional_drop_behavior();
8568 Ok(Statement::DropOperatorClass(DropOperatorClass {
8569 if_exists,
8570 names,
8571 using,
8572 drop_behavior,
8573 }))
8574 }
8575
8576 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8580 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8581 self.expect_token(&Token::LParen)?;
8582 let columns =
8583 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8584 self.expect_token(&Token::RParen)?;
8585 Ok(HiveDistributionStyle::PARTITIONED { columns })
8586 } else {
8587 Ok(HiveDistributionStyle::NONE)
8588 }
8589 }
8590
8591 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8595 let token = self.next_token();
8596 match &token.token {
8597 Token::Word(w) => match w.keyword {
8598 Keyword::AUTO => Ok(DistStyle::Auto),
8599 Keyword::EVEN => Ok(DistStyle::Even),
8600 Keyword::KEY => Ok(DistStyle::Key),
8601 Keyword::ALL => Ok(DistStyle::All),
8602 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8603 },
8604 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8605 }
8606 }
8607
8608 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8610 let mut hive_format: Option<HiveFormat> = None;
8611 loop {
8612 match self.parse_one_of_keywords(&[
8613 Keyword::ROW,
8614 Keyword::STORED,
8615 Keyword::LOCATION,
8616 Keyword::WITH,
8617 ]) {
8618 Some(Keyword::ROW) => {
8619 hive_format
8620 .get_or_insert_with(HiveFormat::default)
8621 .row_format = Some(self.parse_row_format()?);
8622 }
8623 Some(Keyword::STORED) => {
8624 self.expect_keyword_is(Keyword::AS)?;
8625 if self.parse_keyword(Keyword::INPUTFORMAT) {
8626 let input_format = self.parse_expr()?;
8627 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8628 let output_format = self.parse_expr()?;
8629 hive_format.get_or_insert_with(HiveFormat::default).storage =
8630 Some(HiveIOFormat::IOF {
8631 input_format,
8632 output_format,
8633 });
8634 } else {
8635 let format = self.parse_file_format()?;
8636 hive_format.get_or_insert_with(HiveFormat::default).storage =
8637 Some(HiveIOFormat::FileFormat { format });
8638 }
8639 }
8640 Some(Keyword::LOCATION) => {
8641 hive_format.get_or_insert_with(HiveFormat::default).location =
8642 Some(self.parse_literal_string()?);
8643 }
8644 Some(Keyword::WITH) => {
8645 self.prev_token();
8646 let properties = self
8647 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8648 if !properties.is_empty() {
8649 hive_format
8650 .get_or_insert_with(HiveFormat::default)
8651 .serde_properties = Some(properties);
8652 } else {
8653 break;
8654 }
8655 }
8656 None => break,
8657 _ => break,
8658 }
8659 }
8660
8661 Ok(hive_format)
8662 }
8663
8664 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8666 self.expect_keyword_is(Keyword::FORMAT)?;
8667 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8668 Some(Keyword::SERDE) => {
8669 let class = self.parse_literal_string()?;
8670 Ok(HiveRowFormat::SERDE { class })
8671 }
8672 _ => {
8673 let mut row_delimiters = vec![];
8674
8675 loop {
8676 match self.parse_one_of_keywords(&[
8677 Keyword::FIELDS,
8678 Keyword::COLLECTION,
8679 Keyword::MAP,
8680 Keyword::LINES,
8681 Keyword::NULL,
8682 ]) {
8683 Some(Keyword::FIELDS) => {
8684 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8685 row_delimiters.push(HiveRowDelimiter {
8686 delimiter: HiveDelimiter::FieldsTerminatedBy,
8687 char: self.parse_identifier()?,
8688 });
8689
8690 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8691 row_delimiters.push(HiveRowDelimiter {
8692 delimiter: HiveDelimiter::FieldsEscapedBy,
8693 char: self.parse_identifier()?,
8694 });
8695 }
8696 } else {
8697 break;
8698 }
8699 }
8700 Some(Keyword::COLLECTION) => {
8701 if self.parse_keywords(&[
8702 Keyword::ITEMS,
8703 Keyword::TERMINATED,
8704 Keyword::BY,
8705 ]) {
8706 row_delimiters.push(HiveRowDelimiter {
8707 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8708 char: self.parse_identifier()?,
8709 });
8710 } else {
8711 break;
8712 }
8713 }
8714 Some(Keyword::MAP) => {
8715 if self.parse_keywords(&[
8716 Keyword::KEYS,
8717 Keyword::TERMINATED,
8718 Keyword::BY,
8719 ]) {
8720 row_delimiters.push(HiveRowDelimiter {
8721 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8722 char: self.parse_identifier()?,
8723 });
8724 } else {
8725 break;
8726 }
8727 }
8728 Some(Keyword::LINES) => {
8729 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8730 row_delimiters.push(HiveRowDelimiter {
8731 delimiter: HiveDelimiter::LinesTerminatedBy,
8732 char: self.parse_identifier()?,
8733 });
8734 } else {
8735 break;
8736 }
8737 }
8738 Some(Keyword::NULL) => {
8739 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8740 row_delimiters.push(HiveRowDelimiter {
8741 delimiter: HiveDelimiter::NullDefinedAs,
8742 char: self.parse_identifier()?,
8743 });
8744 } else {
8745 break;
8746 }
8747 }
8748 _ => {
8749 break;
8750 }
8751 }
8752 }
8753
8754 Ok(HiveRowFormat::DELIMITED {
8755 delimiters: row_delimiters,
8756 })
8757 }
8758 }
8759 }
8760
8761 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8762 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8763 Ok(Some(self.parse_identifier()?))
8764 } else {
8765 Ok(None)
8766 }
8767 }
8768
8769 pub fn parse_create_table(
8771 &mut self,
8772 or_replace: bool,
8773 temporary: bool,
8774 global: Option<bool>,
8775 transient: bool,
8776 ) -> Result<CreateTable, ParserError> {
8777 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8778 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8779 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8780
8781 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8791 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8792 } else {
8793 None
8794 };
8795
8796 let on_cluster = self.parse_optional_on_cluster()?;
8798
8799 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8800
8801 let clone = if self.parse_keyword(Keyword::CLONE) {
8802 self.parse_object_name(allow_unquoted_hyphen).ok()
8803 } else {
8804 None
8805 };
8806
8807 let (columns, constraints) = self.parse_columns()?;
8809 let comment_after_column_def =
8810 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8811 let next_token = self.next_token();
8812 match next_token.token {
8813 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8814 _ => self.expected("comment", next_token)?,
8815 }
8816 } else {
8817 None
8818 };
8819
8820 let for_values = if partition_of.is_some() {
8822 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8823 Some(self.parse_partition_for_values()?)
8824 } else {
8825 return self.expected_ref(
8826 "FOR VALUES or DEFAULT after PARTITION OF",
8827 self.peek_token_ref(),
8828 );
8829 }
8830 } else {
8831 None
8832 };
8833
8834 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8836
8837 let hive_distribution = self.parse_hive_distribution()?;
8838 let clustered_by = self.parse_optional_clustered_by()?;
8839 let hive_formats = self.parse_hive_formats()?;
8840
8841 let create_table_config = self.parse_optional_create_table_config()?;
8842
8843 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8846 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8847 {
8848 Some(Box::new(self.parse_expr()?))
8849 } else {
8850 None
8851 };
8852
8853 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8854 if self.consume_token(&Token::LParen) {
8855 let columns = if self.peek_token_ref().token != Token::RParen {
8856 self.parse_comma_separated(|p| p.parse_expr())?
8857 } else {
8858 vec![]
8859 };
8860 self.expect_token(&Token::RParen)?;
8861 Some(OneOrManyWithParens::Many(columns))
8862 } else {
8863 Some(OneOrManyWithParens::One(self.parse_expr()?))
8864 }
8865 } else {
8866 None
8867 };
8868
8869 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8870 Some(self.parse_create_table_on_commit()?)
8871 } else {
8872 None
8873 };
8874
8875 let strict = self.parse_keyword(Keyword::STRICT);
8876
8877 let backup = if self.parse_keyword(Keyword::BACKUP) {
8879 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8880 Some(keyword == Keyword::YES)
8881 } else {
8882 None
8883 };
8884
8885 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8887 Some(self.parse_dist_style()?)
8888 } else {
8889 None
8890 };
8891 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8892 self.expect_token(&Token::LParen)?;
8893 let expr = self.parse_expr()?;
8894 self.expect_token(&Token::RParen)?;
8895 Some(expr)
8896 } else {
8897 None
8898 };
8899 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8900 self.expect_token(&Token::LParen)?;
8901 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8902 self.expect_token(&Token::RParen)?;
8903 Some(columns)
8904 } else {
8905 None
8906 };
8907
8908 let query = if self.parse_keyword(Keyword::AS) {
8910 Some(self.parse_query()?)
8911 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8912 {
8913 self.prev_token();
8915 Some(self.parse_query()?)
8916 } else {
8917 None
8918 };
8919
8920 Ok(CreateTableBuilder::new(table_name)
8921 .temporary(temporary)
8922 .columns(columns)
8923 .constraints(constraints)
8924 .or_replace(or_replace)
8925 .if_not_exists(if_not_exists)
8926 .transient(transient)
8927 .hive_distribution(hive_distribution)
8928 .hive_formats(hive_formats)
8929 .global(global)
8930 .query(query)
8931 .without_rowid(without_rowid)
8932 .like(like)
8933 .clone_clause(clone)
8934 .comment_after_column_def(comment_after_column_def)
8935 .order_by(order_by)
8936 .on_commit(on_commit)
8937 .on_cluster(on_cluster)
8938 .clustered_by(clustered_by)
8939 .partition_by(create_table_config.partition_by)
8940 .cluster_by(create_table_config.cluster_by)
8941 .inherits(create_table_config.inherits)
8942 .partition_of(partition_of)
8943 .for_values(for_values)
8944 .table_options(create_table_config.table_options)
8945 .primary_key(primary_key)
8946 .strict(strict)
8947 .backup(backup)
8948 .diststyle(diststyle)
8949 .distkey(distkey)
8950 .sortkey(sortkey)
8951 .build())
8952 }
8953
8954 fn maybe_parse_create_table_like(
8955 &mut self,
8956 allow_unquoted_hyphen: bool,
8957 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8958 let like = if self.dialect.supports_create_table_like_parenthesized()
8959 && self.consume_token(&Token::LParen)
8960 {
8961 if self.parse_keyword(Keyword::LIKE) {
8962 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8963 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8964 Some(CreateTableLikeDefaults::Including)
8965 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8966 Some(CreateTableLikeDefaults::Excluding)
8967 } else {
8968 None
8969 };
8970 self.expect_token(&Token::RParen)?;
8971 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8972 name,
8973 defaults,
8974 }))
8975 } else {
8976 self.prev_token();
8978 None
8979 }
8980 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8981 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8982 Some(CreateTableLikeKind::Plain(CreateTableLike {
8983 name,
8984 defaults: None,
8985 }))
8986 } else {
8987 None
8988 };
8989 Ok(like)
8990 }
8991
8992 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8993 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8994 Ok(OnCommit::DeleteRows)
8995 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8996 Ok(OnCommit::PreserveRows)
8997 } else if self.parse_keywords(&[Keyword::DROP]) {
8998 Ok(OnCommit::Drop)
8999 } else {
9000 parser_err!(
9001 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
9002 self.peek_token_ref()
9003 )
9004 }
9005 }
9006
9007 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
9013 if self.parse_keyword(Keyword::DEFAULT) {
9014 return Ok(ForValues::Default);
9015 }
9016
9017 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9018
9019 if self.parse_keyword(Keyword::IN) {
9020 self.expect_token(&Token::LParen)?;
9022 if self.peek_token_ref().token == Token::RParen {
9023 return self.expected_ref("at least one value", self.peek_token_ref());
9024 }
9025 let values = self.parse_comma_separated(Parser::parse_expr)?;
9026 self.expect_token(&Token::RParen)?;
9027 Ok(ForValues::In(values))
9028 } else if self.parse_keyword(Keyword::FROM) {
9029 self.expect_token(&Token::LParen)?;
9031 if self.peek_token_ref().token == Token::RParen {
9032 return self.expected_ref("at least one value", self.peek_token_ref());
9033 }
9034 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9035 self.expect_token(&Token::RParen)?;
9036 self.expect_keyword(Keyword::TO)?;
9037 self.expect_token(&Token::LParen)?;
9038 if self.peek_token_ref().token == Token::RParen {
9039 return self.expected_ref("at least one value", self.peek_token_ref());
9040 }
9041 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
9042 self.expect_token(&Token::RParen)?;
9043 Ok(ForValues::From { from, to })
9044 } else if self.parse_keyword(Keyword::WITH) {
9045 self.expect_token(&Token::LParen)?;
9047 self.expect_keyword(Keyword::MODULUS)?;
9048 let modulus = self.parse_literal_uint()?;
9049 self.expect_token(&Token::Comma)?;
9050 self.expect_keyword(Keyword::REMAINDER)?;
9051 let remainder = self.parse_literal_uint()?;
9052 self.expect_token(&Token::RParen)?;
9053 Ok(ForValues::With { modulus, remainder })
9054 } else {
9055 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9056 }
9057 }
9058
9059 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9061 if self.parse_keyword(Keyword::MINVALUE) {
9062 Ok(PartitionBoundValue::MinValue)
9063 } else if self.parse_keyword(Keyword::MAXVALUE) {
9064 Ok(PartitionBoundValue::MaxValue)
9065 } else {
9066 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9067 }
9068 }
9069
9070 fn parse_optional_create_table_config(
9076 &mut self,
9077 ) -> Result<CreateTableConfiguration, ParserError> {
9078 let mut table_options = CreateTableOptions::None;
9079
9080 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9081 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9082 } else {
9083 None
9084 };
9085
9086 let with_options = self.parse_options(Keyword::WITH)?;
9088 if !with_options.is_empty() {
9089 table_options = CreateTableOptions::With(with_options)
9090 }
9091
9092 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9093 if !table_properties.is_empty() {
9094 table_options = CreateTableOptions::TableProperties(table_properties);
9095 }
9096 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9097 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9098 {
9099 Some(Box::new(self.parse_expr()?))
9100 } else {
9101 None
9102 };
9103
9104 let mut cluster_by = None;
9105 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9106 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9107 cluster_by = Some(WrappedCollection::NoWrapping(
9108 self.parse_comma_separated(|p| p.parse_expr())?,
9109 ));
9110 };
9111
9112 if let Token::Word(word) = &self.peek_token_ref().token {
9113 if word.keyword == Keyword::OPTIONS {
9114 table_options =
9115 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9116 }
9117 };
9118 }
9119
9120 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9121 let plain_options = self.parse_plain_options()?;
9122 if !plain_options.is_empty() {
9123 table_options = CreateTableOptions::Plain(plain_options)
9124 }
9125 };
9126
9127 Ok(CreateTableConfiguration {
9128 partition_by,
9129 cluster_by,
9130 inherits,
9131 table_options,
9132 })
9133 }
9134
9135 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9136 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9139 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9140 }
9141
9142 if self.parse_keywords(&[Keyword::COMMENT]) {
9145 let has_eq = self.consume_token(&Token::Eq);
9146 let value = self.next_token();
9147
9148 let comment = match (has_eq, value.token) {
9149 (true, Token::SingleQuotedString(s)) => {
9150 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9151 }
9152 (false, Token::SingleQuotedString(s)) => {
9153 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9154 }
9155 (_, token) => {
9156 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9157 }
9158 };
9159 return comment;
9160 }
9161
9162 if self.parse_keywords(&[Keyword::ENGINE]) {
9165 let _ = self.consume_token(&Token::Eq);
9166 let value = self.next_token();
9167
9168 let engine = match value.token {
9169 Token::Word(w) => {
9170 let parameters = if self.peek_token_ref().token == Token::LParen {
9171 self.parse_parenthesized_identifiers()?
9172 } else {
9173 vec![]
9174 };
9175
9176 Ok(Some(SqlOption::NamedParenthesizedList(
9177 NamedParenthesizedList {
9178 key: Ident::new("ENGINE"),
9179 name: Some(Ident::new(w.value)),
9180 values: parameters,
9181 },
9182 )))
9183 }
9184 _ => {
9185 return self.expected("Token::Word", value)?;
9186 }
9187 };
9188
9189 return engine;
9190 }
9191
9192 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9194 let _ = self.consume_token(&Token::Eq);
9195 let value = self.next_token();
9196
9197 let tablespace = match value.token {
9198 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9199 let storage = match self.parse_keyword(Keyword::STORAGE) {
9200 true => {
9201 let _ = self.consume_token(&Token::Eq);
9202 let storage_token = self.next_token();
9203 match &storage_token.token {
9204 Token::Word(w) => match w.value.to_uppercase().as_str() {
9205 "DISK" => Some(StorageType::Disk),
9206 "MEMORY" => Some(StorageType::Memory),
9207 _ => self
9208 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9209 },
9210 _ => self.expected("Token::Word", storage_token)?,
9211 }
9212 }
9213 false => None,
9214 };
9215
9216 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9217 name,
9218 storage,
9219 })))
9220 }
9221 _ => {
9222 return self.expected("Token::Word", value)?;
9223 }
9224 };
9225
9226 return tablespace;
9227 }
9228
9229 if self.parse_keyword(Keyword::UNION) {
9231 let _ = self.consume_token(&Token::Eq);
9232 let value = self.next_token();
9233
9234 match value.token {
9235 Token::LParen => {
9236 let tables: Vec<Ident> =
9237 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9238 self.expect_token(&Token::RParen)?;
9239
9240 return Ok(Some(SqlOption::NamedParenthesizedList(
9241 NamedParenthesizedList {
9242 key: Ident::new("UNION"),
9243 name: None,
9244 values: tables,
9245 },
9246 )));
9247 }
9248 _ => {
9249 return self.expected("Token::LParen", value)?;
9250 }
9251 }
9252 }
9253
9254 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9256 Ident::new("DEFAULT CHARSET")
9257 } else if self.parse_keyword(Keyword::CHARSET) {
9258 Ident::new("CHARSET")
9259 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9260 Ident::new("DEFAULT CHARACTER SET")
9261 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9262 Ident::new("CHARACTER SET")
9263 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9264 Ident::new("DEFAULT COLLATE")
9265 } else if self.parse_keyword(Keyword::COLLATE) {
9266 Ident::new("COLLATE")
9267 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9268 Ident::new("DATA DIRECTORY")
9269 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9270 Ident::new("INDEX DIRECTORY")
9271 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9272 Ident::new("KEY_BLOCK_SIZE")
9273 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9274 Ident::new("ROW_FORMAT")
9275 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9276 Ident::new("PACK_KEYS")
9277 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9278 Ident::new("STATS_AUTO_RECALC")
9279 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9280 Ident::new("STATS_PERSISTENT")
9281 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9282 Ident::new("STATS_SAMPLE_PAGES")
9283 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9284 Ident::new("DELAY_KEY_WRITE")
9285 } else if self.parse_keyword(Keyword::COMPRESSION) {
9286 Ident::new("COMPRESSION")
9287 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9288 Ident::new("ENCRYPTION")
9289 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9290 Ident::new("MAX_ROWS")
9291 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9292 Ident::new("MIN_ROWS")
9293 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9294 Ident::new("AUTOEXTEND_SIZE")
9295 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9296 Ident::new("AVG_ROW_LENGTH")
9297 } else if self.parse_keyword(Keyword::CHECKSUM) {
9298 Ident::new("CHECKSUM")
9299 } else if self.parse_keyword(Keyword::CONNECTION) {
9300 Ident::new("CONNECTION")
9301 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9302 Ident::new("ENGINE_ATTRIBUTE")
9303 } else if self.parse_keyword(Keyword::PASSWORD) {
9304 Ident::new("PASSWORD")
9305 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9306 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9307 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9308 Ident::new("INSERT_METHOD")
9309 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9310 Ident::new("AUTO_INCREMENT")
9311 } else {
9312 return Ok(None);
9313 };
9314
9315 let _ = self.consume_token(&Token::Eq);
9316
9317 let value = match self
9318 .maybe_parse(|parser| parser.parse_value())?
9319 .map(Expr::Value)
9320 {
9321 Some(expr) => expr,
9322 None => Expr::Identifier(self.parse_identifier()?),
9323 };
9324
9325 Ok(Some(SqlOption::KeyValue { key, value }))
9326 }
9327
9328 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9330 let mut options = Vec::new();
9331
9332 while let Some(option) = self.parse_plain_option()? {
9333 options.push(option);
9334 let _ = self.consume_token(&Token::Comma);
9337 }
9338
9339 Ok(options)
9340 }
9341
9342 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9344 let comment = if self.parse_keyword(Keyword::COMMENT) {
9345 let has_eq = self.consume_token(&Token::Eq);
9346 let comment = self.parse_comment_value()?;
9347 Some(if has_eq {
9348 CommentDef::WithEq(comment)
9349 } else {
9350 CommentDef::WithoutEq(comment)
9351 })
9352 } else {
9353 None
9354 };
9355 Ok(comment)
9356 }
9357
9358 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9360 let next_token = self.next_token();
9361 let value = match next_token.token {
9362 Token::SingleQuotedString(str) => str,
9363 Token::DollarQuotedString(str) => str.value,
9364 _ => self.expected("string literal", next_token)?,
9365 };
9366 Ok(value)
9367 }
9368
9369 pub fn parse_optional_procedure_parameters(
9371 &mut self,
9372 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9373 let mut params = vec![];
9374 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9375 return Ok(Some(params));
9376 }
9377 loop {
9378 if let Token::Word(_) = &self.peek_token_ref().token {
9379 params.push(self.parse_procedure_param()?)
9380 }
9381 let comma = self.consume_token(&Token::Comma);
9382 if self.consume_token(&Token::RParen) {
9383 break;
9385 } else if !comma {
9386 return self.expected_ref(
9387 "',' or ')' after parameter definition",
9388 self.peek_token_ref(),
9389 );
9390 }
9391 }
9392 Ok(Some(params))
9393 }
9394
9395 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9397 let mut columns = vec![];
9398 let mut constraints = vec![];
9399 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9400 return Ok((columns, constraints));
9401 }
9402
9403 loop {
9404 if let Some(constraint) = self.parse_optional_table_constraint()? {
9405 constraints.push(constraint);
9406 } else if let Token::Word(_) = &self.peek_token_ref().token {
9407 columns.push(self.parse_column_def()?);
9408 } else {
9409 return self.expected_ref(
9410 "column name or constraint definition",
9411 self.peek_token_ref(),
9412 );
9413 }
9414
9415 let comma = self.consume_token(&Token::Comma);
9416 let rparen = self.peek_token_ref().token == Token::RParen;
9417
9418 if !comma && !rparen {
9419 return self
9420 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9421 };
9422
9423 if rparen
9424 && (!comma
9425 || self.dialect.supports_column_definition_trailing_commas()
9426 || self.options.trailing_commas)
9427 {
9428 let _ = self.consume_token(&Token::RParen);
9429 break;
9430 }
9431 }
9432
9433 Ok((columns, constraints))
9434 }
9435
9436 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9438 let mode = if self.parse_keyword(Keyword::IN) {
9439 Some(ArgMode::In)
9440 } else if self.parse_keyword(Keyword::OUT) {
9441 Some(ArgMode::Out)
9442 } else if self.parse_keyword(Keyword::INOUT) {
9443 Some(ArgMode::InOut)
9444 } else {
9445 None
9446 };
9447 let name = self.parse_identifier()?;
9448 let data_type = self.parse_data_type()?;
9449 let default = if self.consume_token(&Token::Eq) {
9450 Some(self.parse_expr()?)
9451 } else {
9452 None
9453 };
9454
9455 Ok(ProcedureParam {
9456 name,
9457 data_type,
9458 mode,
9459 default,
9460 })
9461 }
9462
9463 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9465 self.parse_column_def_inner(false)
9466 }
9467
9468 fn parse_column_def_inner(
9469 &mut self,
9470 optional_data_type: bool,
9471 ) -> Result<ColumnDef, ParserError> {
9472 let col_name = self.parse_identifier()?;
9473 let data_type = if self.is_column_type_sqlite_unspecified() {
9474 DataType::Unspecified
9475 } else if optional_data_type {
9476 self.maybe_parse(|parser| parser.parse_data_type())?
9477 .unwrap_or(DataType::Unspecified)
9478 } else {
9479 self.parse_data_type()?
9480 };
9481 let mut options = vec![];
9482 loop {
9483 if self.parse_keyword(Keyword::CONSTRAINT) {
9484 let name = Some(self.parse_identifier()?);
9485 if let Some(option) = self.parse_optional_column_option()? {
9486 options.push(ColumnOptionDef { name, option });
9487 } else {
9488 return self.expected_ref(
9489 "constraint details after CONSTRAINT <name>",
9490 self.peek_token_ref(),
9491 );
9492 }
9493 } else if let Some(option) = self.parse_optional_column_option()? {
9494 options.push(ColumnOptionDef { name: None, option });
9495 } else {
9496 break;
9497 };
9498 }
9499 Ok(ColumnDef {
9500 name: col_name,
9501 data_type,
9502 options,
9503 })
9504 }
9505
9506 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9507 if dialect_of!(self is SQLiteDialect) {
9508 match &self.peek_token_ref().token {
9509 Token::Word(word) => matches!(
9510 word.keyword,
9511 Keyword::CONSTRAINT
9512 | Keyword::PRIMARY
9513 | Keyword::NOT
9514 | Keyword::UNIQUE
9515 | Keyword::CHECK
9516 | Keyword::DEFAULT
9517 | Keyword::COLLATE
9518 | Keyword::REFERENCES
9519 | Keyword::GENERATED
9520 | Keyword::AS
9521 ),
9522 _ => true, }
9524 } else {
9525 false
9526 }
9527 }
9528
9529 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9531 if let Some(option) = self.dialect.parse_column_option(self)? {
9532 return option;
9533 }
9534
9535 self.with_state(
9536 ColumnDefinition,
9537 |parser| -> Result<Option<ColumnOption>, ParserError> {
9538 parser.parse_optional_column_option_inner()
9539 },
9540 )
9541 }
9542
9543 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9544 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9545 Ok(Some(ColumnOption::CharacterSet(
9546 self.parse_object_name(false)?,
9547 )))
9548 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9549 Ok(Some(ColumnOption::Collation(
9550 self.parse_object_name(false)?,
9551 )))
9552 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9553 Ok(Some(ColumnOption::NotNull))
9554 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9555 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9556 } else if self.parse_keyword(Keyword::NULL) {
9557 Ok(Some(ColumnOption::Null))
9558 } else if self.parse_keyword(Keyword::DEFAULT) {
9559 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9560 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9561 && self.parse_keyword(Keyword::MATERIALIZED)
9562 {
9563 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9564 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9565 && self.parse_keyword(Keyword::ALIAS)
9566 {
9567 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9568 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9569 && self.parse_keyword(Keyword::EPHEMERAL)
9570 {
9571 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9574 Ok(Some(ColumnOption::Ephemeral(None)))
9575 } else {
9576 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9577 }
9578 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9579 let characteristics = self.parse_constraint_characteristics()?;
9580 Ok(Some(
9581 PrimaryKeyConstraint {
9582 name: None,
9583 index_name: None,
9584 index_type: None,
9585 columns: vec![],
9586 index_options: vec![],
9587 characteristics,
9588 }
9589 .into(),
9590 ))
9591 } else if self.parse_keyword(Keyword::UNIQUE) {
9592 let index_type_display =
9593 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9594 KeyOrIndexDisplay::Key
9595 } else {
9596 KeyOrIndexDisplay::None
9597 };
9598 let characteristics = self.parse_constraint_characteristics()?;
9599 Ok(Some(
9600 UniqueConstraint {
9601 name: None,
9602 index_name: None,
9603 index_type_display,
9604 index_type: None,
9605 columns: vec![],
9606 index_options: vec![],
9607 characteristics,
9608 nulls_distinct: NullsDistinctOption::None,
9609 }
9610 .into(),
9611 ))
9612 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9613 let characteristics = self.parse_constraint_characteristics()?;
9616 Ok(Some(
9617 PrimaryKeyConstraint {
9618 name: None,
9619 index_name: None,
9620 index_type: None,
9621 columns: vec![],
9622 index_options: vec![],
9623 characteristics,
9624 }
9625 .into(),
9626 ))
9627 } else if self.parse_keyword(Keyword::REFERENCES) {
9628 let foreign_table = self.parse_object_name(false)?;
9629 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9632 let mut match_kind = None;
9633 let mut on_delete = None;
9634 let mut on_update = None;
9635 loop {
9636 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9637 match_kind = Some(self.parse_match_kind()?);
9638 } else if on_delete.is_none()
9639 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9640 {
9641 on_delete = Some(self.parse_referential_action()?);
9642 } else if on_update.is_none()
9643 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9644 {
9645 on_update = Some(self.parse_referential_action()?);
9646 } else {
9647 break;
9648 }
9649 }
9650 let characteristics = self.parse_constraint_characteristics()?;
9651
9652 Ok(Some(
9653 ForeignKeyConstraint {
9654 name: None, index_name: None, columns: vec![], foreign_table,
9658 referred_columns,
9659 on_delete,
9660 on_update,
9661 match_kind,
9662 characteristics,
9663 }
9664 .into(),
9665 ))
9666 } else if self.parse_keyword(Keyword::CHECK) {
9667 self.expect_token(&Token::LParen)?;
9668 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9670 self.expect_token(&Token::RParen)?;
9671
9672 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9673 Some(true)
9674 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9675 Some(false)
9676 } else {
9677 None
9678 };
9679
9680 Ok(Some(
9681 CheckConstraint {
9682 name: None, expr: Box::new(expr),
9684 enforced,
9685 }
9686 .into(),
9687 ))
9688 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9689 && dialect_of!(self is MySqlDialect | GenericDialect)
9690 {
9691 Ok(Some(ColumnOption::DialectSpecific(vec![
9693 Token::make_keyword("AUTO_INCREMENT"),
9694 ])))
9695 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9696 && dialect_of!(self is SQLiteDialect | GenericDialect)
9697 {
9698 Ok(Some(ColumnOption::DialectSpecific(vec![
9700 Token::make_keyword("AUTOINCREMENT"),
9701 ])))
9702 } else if self.parse_keyword(Keyword::ASC)
9703 && self.dialect.supports_asc_desc_in_column_definition()
9704 {
9705 Ok(Some(ColumnOption::DialectSpecific(vec![
9707 Token::make_keyword("ASC"),
9708 ])))
9709 } else if self.parse_keyword(Keyword::DESC)
9710 && self.dialect.supports_asc_desc_in_column_definition()
9711 {
9712 Ok(Some(ColumnOption::DialectSpecific(vec![
9714 Token::make_keyword("DESC"),
9715 ])))
9716 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9717 && dialect_of!(self is MySqlDialect | GenericDialect)
9718 {
9719 let expr = self.parse_expr()?;
9720 Ok(Some(ColumnOption::OnUpdate(expr)))
9721 } else if self.parse_keyword(Keyword::GENERATED) {
9722 self.parse_optional_column_option_generated()
9723 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9724 && self.parse_keyword(Keyword::OPTIONS)
9725 {
9726 self.prev_token();
9727 Ok(Some(ColumnOption::Options(
9728 self.parse_options(Keyword::OPTIONS)?,
9729 )))
9730 } else if self.parse_keyword(Keyword::AS)
9731 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9732 {
9733 self.parse_optional_column_option_as()
9734 } else if self.parse_keyword(Keyword::SRID)
9735 && dialect_of!(self is MySqlDialect | GenericDialect)
9736 {
9737 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9738 } else if self.parse_keyword(Keyword::IDENTITY)
9739 && dialect_of!(self is MsSqlDialect | GenericDialect)
9740 {
9741 let parameters = if self.consume_token(&Token::LParen) {
9742 let seed = self.parse_number()?;
9743 self.expect_token(&Token::Comma)?;
9744 let increment = self.parse_number()?;
9745 self.expect_token(&Token::RParen)?;
9746
9747 Some(IdentityPropertyFormatKind::FunctionCall(
9748 IdentityParameters { seed, increment },
9749 ))
9750 } else {
9751 None
9752 };
9753 Ok(Some(ColumnOption::Identity(
9754 IdentityPropertyKind::Identity(IdentityProperty {
9755 parameters,
9756 order: None,
9757 }),
9758 )))
9759 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9760 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9761 {
9762 Ok(Some(ColumnOption::OnConflict(
9764 self.expect_one_of_keywords(&[
9765 Keyword::ROLLBACK,
9766 Keyword::ABORT,
9767 Keyword::FAIL,
9768 Keyword::IGNORE,
9769 Keyword::REPLACE,
9770 ])?,
9771 )))
9772 } else if self.parse_keyword(Keyword::INVISIBLE) {
9773 Ok(Some(ColumnOption::Invisible))
9774 } else {
9775 Ok(None)
9776 }
9777 }
9778
9779 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9780 let name = self.parse_object_name(false)?;
9781 self.expect_token(&Token::Eq)?;
9782 let value = self.parse_literal_string()?;
9783
9784 Ok(Tag::new(name, value))
9785 }
9786
9787 fn parse_optional_column_option_generated(
9788 &mut self,
9789 ) -> Result<Option<ColumnOption>, ParserError> {
9790 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9791 let mut sequence_options = vec![];
9792 if self.expect_token(&Token::LParen).is_ok() {
9793 sequence_options = self.parse_create_sequence_options()?;
9794 self.expect_token(&Token::RParen)?;
9795 }
9796 Ok(Some(ColumnOption::Generated {
9797 generated_as: GeneratedAs::Always,
9798 sequence_options: Some(sequence_options),
9799 generation_expr: None,
9800 generation_expr_mode: None,
9801 generated_keyword: true,
9802 }))
9803 } else if self.parse_keywords(&[
9804 Keyword::BY,
9805 Keyword::DEFAULT,
9806 Keyword::AS,
9807 Keyword::IDENTITY,
9808 ]) {
9809 let mut sequence_options = vec![];
9810 if self.expect_token(&Token::LParen).is_ok() {
9811 sequence_options = self.parse_create_sequence_options()?;
9812 self.expect_token(&Token::RParen)?;
9813 }
9814 Ok(Some(ColumnOption::Generated {
9815 generated_as: GeneratedAs::ByDefault,
9816 sequence_options: Some(sequence_options),
9817 generation_expr: None,
9818 generation_expr_mode: None,
9819 generated_keyword: true,
9820 }))
9821 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9822 if self.expect_token(&Token::LParen).is_ok() {
9823 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9824 self.expect_token(&Token::RParen)?;
9825 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9826 Ok((
9827 GeneratedAs::ExpStored,
9828 Some(GeneratedExpressionMode::Stored),
9829 ))
9830 } else if dialect_of!(self is PostgreSqlDialect) {
9831 self.expected_ref("STORED", self.peek_token_ref())
9833 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9834 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9835 } else {
9836 Ok((GeneratedAs::Always, None))
9837 }?;
9838
9839 Ok(Some(ColumnOption::Generated {
9840 generated_as: gen_as,
9841 sequence_options: None,
9842 generation_expr: Some(expr),
9843 generation_expr_mode: expr_mode,
9844 generated_keyword: true,
9845 }))
9846 } else {
9847 Ok(None)
9848 }
9849 } else {
9850 Ok(None)
9851 }
9852 }
9853
9854 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9855 self.expect_token(&Token::LParen)?;
9857 let expr = self.parse_expr()?;
9858 self.expect_token(&Token::RParen)?;
9859
9860 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9861 (
9862 GeneratedAs::ExpStored,
9863 Some(GeneratedExpressionMode::Stored),
9864 )
9865 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9866 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9867 } else {
9868 (GeneratedAs::Always, None)
9869 };
9870
9871 Ok(Some(ColumnOption::Generated {
9872 generated_as: gen_as,
9873 sequence_options: None,
9874 generation_expr: Some(expr),
9875 generation_expr_mode: expr_mode,
9876 generated_keyword: false,
9877 }))
9878 }
9879
9880 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9882 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9883 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9884 {
9885 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9886
9887 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9888 self.expect_token(&Token::LParen)?;
9889 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9890 self.expect_token(&Token::RParen)?;
9891 Some(sorted_by_columns)
9892 } else {
9893 None
9894 };
9895
9896 self.expect_keyword_is(Keyword::INTO)?;
9897 let num_buckets = self.parse_number_value()?.value;
9898 self.expect_keyword_is(Keyword::BUCKETS)?;
9899 Some(ClusteredBy {
9900 columns,
9901 sorted_by,
9902 num_buckets,
9903 })
9904 } else {
9905 None
9906 };
9907 Ok(clustered_by)
9908 }
9909
9910 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9914 if self.parse_keyword(Keyword::RESTRICT) {
9915 Ok(ReferentialAction::Restrict)
9916 } else if self.parse_keyword(Keyword::CASCADE) {
9917 Ok(ReferentialAction::Cascade)
9918 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9919 Ok(ReferentialAction::SetNull)
9920 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9921 Ok(ReferentialAction::NoAction)
9922 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9923 Ok(ReferentialAction::SetDefault)
9924 } else {
9925 self.expected_ref(
9926 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9927 self.peek_token_ref(),
9928 )
9929 }
9930 }
9931
9932 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9934 if self.parse_keyword(Keyword::FULL) {
9935 Ok(ConstraintReferenceMatchKind::Full)
9936 } else if self.parse_keyword(Keyword::PARTIAL) {
9937 Ok(ConstraintReferenceMatchKind::Partial)
9938 } else if self.parse_keyword(Keyword::SIMPLE) {
9939 Ok(ConstraintReferenceMatchKind::Simple)
9940 } else {
9941 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9942 }
9943 }
9944
9945 fn parse_constraint_using_index(
9948 &mut self,
9949 name: Option<Ident>,
9950 ) -> Result<ConstraintUsingIndex, ParserError> {
9951 let index_name = self.parse_identifier()?;
9952 let characteristics = self.parse_constraint_characteristics()?;
9953 Ok(ConstraintUsingIndex {
9954 name,
9955 index_name,
9956 characteristics,
9957 })
9958 }
9959
9960 pub fn parse_constraint_characteristics(
9962 &mut self,
9963 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9964 let mut cc = ConstraintCharacteristics::default();
9965
9966 loop {
9967 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9968 {
9969 cc.deferrable = Some(false);
9970 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9971 cc.deferrable = Some(true);
9972 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9973 if self.parse_keyword(Keyword::DEFERRED) {
9974 cc.initially = Some(DeferrableInitial::Deferred);
9975 } else if self.parse_keyword(Keyword::IMMEDIATE) {
9976 cc.initially = Some(DeferrableInitial::Immediate);
9977 } else {
9978 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
9979 }
9980 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9981 cc.enforced = Some(true);
9982 } else if cc.enforced.is_none()
9983 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9984 {
9985 cc.enforced = Some(false);
9986 } else {
9987 break;
9988 }
9989 }
9990
9991 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9992 Ok(Some(cc))
9993 } else {
9994 Ok(None)
9995 }
9996 }
9997
9998 pub fn parse_optional_table_constraint(
10000 &mut self,
10001 ) -> Result<Option<TableConstraint>, ParserError> {
10002 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
10003 if self.dialect.supports_constraint_keyword_without_name()
10004 && self
10005 .peek_one_of_keywords(&[
10006 Keyword::CHECK,
10007 Keyword::PRIMARY,
10008 Keyword::UNIQUE,
10009 Keyword::FOREIGN,
10010 ])
10011 .is_some()
10012 {
10013 None
10014 } else {
10015 Some(self.parse_identifier()?)
10016 }
10017 } else {
10018 None
10019 };
10020
10021 if name.is_none()
10026 && self
10027 .peek_one_of_keywords(&[Keyword::FULLTEXT, Keyword::SPATIAL])
10028 .is_some()
10029 && !dialect_of!(self is GenericDialect | MySqlDialect)
10030 {
10031 return Ok(None);
10032 }
10033
10034 let next_token = self.next_token();
10035 match next_token.token {
10036 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
10037 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10040 return Ok(Some(TableConstraint::UniqueUsingIndex(
10041 self.parse_constraint_using_index(name)?,
10042 )));
10043 }
10044
10045 let index_type_display = self.parse_index_type_display();
10046 if !dialect_of!(self is GenericDialect | MySqlDialect)
10047 && !index_type_display.is_none()
10048 {
10049 return self.expected_ref(
10050 "`index_name` or `(column_name [, ...])`",
10051 self.peek_token_ref(),
10052 );
10053 }
10054
10055 let nulls_distinct = self.parse_optional_nulls_distinct()?;
10056
10057 let index_name = self.parse_optional_ident()?;
10059 let index_type = self.parse_optional_using_then_index_type()?;
10060
10061 let columns = self.parse_parenthesized_index_column_list()?;
10062 let index_options = self.parse_index_options()?;
10063 let characteristics = self.parse_constraint_characteristics()?;
10064 Ok(Some(
10065 UniqueConstraint {
10066 name,
10067 index_name,
10068 index_type_display,
10069 index_type,
10070 columns,
10071 index_options,
10072 characteristics,
10073 nulls_distinct,
10074 }
10075 .into(),
10076 ))
10077 }
10078 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10079 self.expect_keyword_is(Keyword::KEY)?;
10081
10082 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10085 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10086 self.parse_constraint_using_index(name)?,
10087 )));
10088 }
10089
10090 let index_name = self.parse_optional_ident()?;
10092 let index_type = self.parse_optional_using_then_index_type()?;
10093
10094 let columns = self.parse_parenthesized_index_column_list()?;
10095 let index_options = self.parse_index_options()?;
10096 let characteristics = self.parse_constraint_characteristics()?;
10097 Ok(Some(
10098 PrimaryKeyConstraint {
10099 name,
10100 index_name,
10101 index_type,
10102 columns,
10103 index_options,
10104 characteristics,
10105 }
10106 .into(),
10107 ))
10108 }
10109 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10110 self.expect_keyword_is(Keyword::KEY)?;
10111 let index_name = self.parse_optional_ident()?;
10112 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10113 self.expect_keyword_is(Keyword::REFERENCES)?;
10114 let foreign_table = self.parse_object_name(false)?;
10115 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10116 let mut match_kind = None;
10117 let mut on_delete = None;
10118 let mut on_update = None;
10119 loop {
10120 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10121 match_kind = Some(self.parse_match_kind()?);
10122 } else if on_delete.is_none()
10123 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10124 {
10125 on_delete = Some(self.parse_referential_action()?);
10126 } else if on_update.is_none()
10127 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10128 {
10129 on_update = Some(self.parse_referential_action()?);
10130 } else {
10131 break;
10132 }
10133 }
10134
10135 let characteristics = self.parse_constraint_characteristics()?;
10136
10137 Ok(Some(
10138 ForeignKeyConstraint {
10139 name,
10140 index_name,
10141 columns,
10142 foreign_table,
10143 referred_columns,
10144 on_delete,
10145 on_update,
10146 match_kind,
10147 characteristics,
10148 }
10149 .into(),
10150 ))
10151 }
10152 Token::Word(w) if w.keyword == Keyword::CHECK => {
10153 self.expect_token(&Token::LParen)?;
10154 let expr = Box::new(self.parse_expr()?);
10155 self.expect_token(&Token::RParen)?;
10156
10157 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10158 Some(true)
10159 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10160 Some(false)
10161 } else {
10162 None
10163 };
10164
10165 Ok(Some(
10166 CheckConstraint {
10167 name,
10168 expr,
10169 enforced,
10170 }
10171 .into(),
10172 ))
10173 }
10174 Token::Word(w)
10175 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10176 && dialect_of!(self is GenericDialect | MySqlDialect)
10177 && name.is_none() =>
10178 {
10179 let display_as_key = w.keyword == Keyword::KEY;
10180
10181 let name = match &self.peek_token_ref().token {
10182 Token::Word(word) if word.keyword == Keyword::USING => None,
10183 _ => self.parse_optional_ident()?,
10184 };
10185
10186 let index_type = self.parse_optional_using_then_index_type()?;
10187 let columns = self.parse_parenthesized_index_column_list()?;
10188 let index_options = self.parse_index_options()?;
10189
10190 Ok(Some(
10191 IndexConstraint {
10192 display_as_key,
10193 name,
10194 index_type,
10195 columns,
10196 index_options,
10197 }
10198 .into(),
10199 ))
10200 }
10201 Token::Word(w)
10202 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10203 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10204 {
10205 if let Some(name) = name {
10206 return self.expected(
10207 "FULLTEXT or SPATIAL option without constraint name",
10208 TokenWithSpan {
10209 token: Token::make_keyword(&name.to_string()),
10210 span: next_token.span,
10211 },
10212 );
10213 }
10214
10215 let fulltext = w.keyword == Keyword::FULLTEXT;
10216
10217 let index_type_display = self.parse_index_type_display();
10218
10219 let opt_index_name = self.parse_optional_ident()?;
10220
10221 let columns = self.parse_parenthesized_index_column_list()?;
10222
10223 Ok(Some(
10224 FullTextOrSpatialConstraint {
10225 fulltext,
10226 index_type_display,
10227 opt_index_name,
10228 columns,
10229 }
10230 .into(),
10231 ))
10232 }
10233 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10234 let index_method = if self.parse_keyword(Keyword::USING) {
10235 Some(self.parse_identifier()?)
10236 } else {
10237 None
10238 };
10239
10240 self.expect_token(&Token::LParen)?;
10241 let elements =
10242 self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10243 self.expect_token(&Token::RParen)?;
10244
10245 let include = if self.parse_keyword(Keyword::INCLUDE) {
10246 self.expect_token(&Token::LParen)?;
10247 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10248 self.expect_token(&Token::RParen)?;
10249 cols
10250 } else {
10251 vec![]
10252 };
10253
10254 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10255 self.expect_token(&Token::LParen)?;
10256 let predicate = self.parse_expr()?;
10257 self.expect_token(&Token::RParen)?;
10258 Some(Box::new(predicate))
10259 } else {
10260 None
10261 };
10262
10263 let characteristics = self.parse_constraint_characteristics()?;
10264
10265 Ok(Some(
10266 ExclusionConstraint {
10267 name,
10268 index_method,
10269 elements,
10270 include,
10271 where_clause,
10272 characteristics,
10273 }
10274 .into(),
10275 ))
10276 }
10277 _ => {
10278 if name.is_some() {
10279 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10280 } else {
10281 self.prev_token();
10282 Ok(None)
10283 }
10284 }
10285 }
10286 }
10287
10288 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10289 let expr = self.parse_expr()?;
10290 self.expect_keyword_is(Keyword::WITH)?;
10291 let operator_token = self.next_token();
10292 let operator = operator_token.token.to_string();
10293 Ok(ExclusionElement { expr, operator })
10294 }
10295
10296 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10297 Ok(if self.parse_keyword(Keyword::NULLS) {
10298 let not = self.parse_keyword(Keyword::NOT);
10299 self.expect_keyword_is(Keyword::DISTINCT)?;
10300 if not {
10301 NullsDistinctOption::NotDistinct
10302 } else {
10303 NullsDistinctOption::Distinct
10304 }
10305 } else {
10306 NullsDistinctOption::None
10307 })
10308 }
10309
10310 pub fn maybe_parse_options(
10312 &mut self,
10313 keyword: Keyword,
10314 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10315 if let Token::Word(word) = &self.peek_token_ref().token {
10316 if word.keyword == keyword {
10317 return Ok(Some(self.parse_options(keyword)?));
10318 }
10319 };
10320 Ok(None)
10321 }
10322
10323 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10325 if self.parse_keyword(keyword) {
10326 self.expect_token(&Token::LParen)?;
10327 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10328 self.expect_token(&Token::RParen)?;
10329 Ok(options)
10330 } else {
10331 Ok(vec![])
10332 }
10333 }
10334
10335 pub fn parse_options_with_keywords(
10337 &mut self,
10338 keywords: &[Keyword],
10339 ) -> Result<Vec<SqlOption>, ParserError> {
10340 if self.parse_keywords(keywords) {
10341 self.expect_token(&Token::LParen)?;
10342 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10343 self.expect_token(&Token::RParen)?;
10344 Ok(options)
10345 } else {
10346 Ok(vec![])
10347 }
10348 }
10349
10350 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10352 Ok(if self.parse_keyword(Keyword::BTREE) {
10353 IndexType::BTree
10354 } else if self.parse_keyword(Keyword::HASH) {
10355 IndexType::Hash
10356 } else if self.parse_keyword(Keyword::GIN) {
10357 IndexType::GIN
10358 } else if self.parse_keyword(Keyword::GIST) {
10359 IndexType::GiST
10360 } else if self.parse_keyword(Keyword::SPGIST) {
10361 IndexType::SPGiST
10362 } else if self.parse_keyword(Keyword::BRIN) {
10363 IndexType::BRIN
10364 } else if self.parse_keyword(Keyword::BLOOM) {
10365 IndexType::Bloom
10366 } else {
10367 IndexType::Custom(self.parse_identifier()?)
10368 })
10369 }
10370
10371 pub fn parse_optional_using_then_index_type(
10378 &mut self,
10379 ) -> Result<Option<IndexType>, ParserError> {
10380 if self.parse_keyword(Keyword::USING) {
10381 Ok(Some(self.parse_index_type()?))
10382 } else {
10383 Ok(None)
10384 }
10385 }
10386
10387 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10391 self.maybe_parse(|parser| parser.parse_identifier())
10392 }
10393
10394 #[must_use]
10395 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10397 if self.parse_keyword(Keyword::KEY) {
10398 KeyOrIndexDisplay::Key
10399 } else if self.parse_keyword(Keyword::INDEX) {
10400 KeyOrIndexDisplay::Index
10401 } else {
10402 KeyOrIndexDisplay::None
10403 }
10404 }
10405
10406 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10408 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10409 Ok(Some(IndexOption::Using(index_type)))
10410 } else if self.parse_keyword(Keyword::COMMENT) {
10411 let s = self.parse_literal_string()?;
10412 Ok(Some(IndexOption::Comment(s)))
10413 } else {
10414 Ok(None)
10415 }
10416 }
10417
10418 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10420 let mut options = Vec::new();
10421
10422 loop {
10423 match self.parse_optional_index_option()? {
10424 Some(index_option) => options.push(index_option),
10425 None => return Ok(options),
10426 }
10427 }
10428 }
10429
10430 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10432 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10433
10434 match &self.peek_token_ref().token {
10435 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10436 Ok(SqlOption::Ident(self.parse_identifier()?))
10437 }
10438 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10439 self.parse_option_partition()
10440 }
10441 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10442 self.parse_option_clustered()
10443 }
10444 _ => {
10445 let name = self.parse_identifier()?;
10446 self.expect_token(&Token::Eq)?;
10447 let value = self.parse_expr()?;
10448
10449 Ok(SqlOption::KeyValue { key: name, value })
10450 }
10451 }
10452 }
10453
10454 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10456 if self.parse_keywords(&[
10457 Keyword::CLUSTERED,
10458 Keyword::COLUMNSTORE,
10459 Keyword::INDEX,
10460 Keyword::ORDER,
10461 ]) {
10462 Ok(SqlOption::Clustered(
10463 TableOptionsClustered::ColumnstoreIndexOrder(
10464 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10465 ),
10466 ))
10467 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10468 Ok(SqlOption::Clustered(
10469 TableOptionsClustered::ColumnstoreIndex,
10470 ))
10471 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10472 self.expect_token(&Token::LParen)?;
10473
10474 let columns = self.parse_comma_separated(|p| {
10475 let name = p.parse_identifier()?;
10476 let asc = p.parse_asc_desc();
10477
10478 Ok(ClusteredIndex { name, asc })
10479 })?;
10480
10481 self.expect_token(&Token::RParen)?;
10482
10483 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10484 } else {
10485 Err(ParserError::ParserError(
10486 "invalid CLUSTERED sequence".to_string(),
10487 ))
10488 }
10489 }
10490
10491 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10493 self.expect_keyword_is(Keyword::PARTITION)?;
10494 self.expect_token(&Token::LParen)?;
10495 let column_name = self.parse_identifier()?;
10496
10497 self.expect_keyword_is(Keyword::RANGE)?;
10498 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10499 Some(PartitionRangeDirection::Left)
10500 } else if self.parse_keyword(Keyword::RIGHT) {
10501 Some(PartitionRangeDirection::Right)
10502 } else {
10503 None
10504 };
10505
10506 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10507 self.expect_token(&Token::LParen)?;
10508
10509 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10510
10511 self.expect_token(&Token::RParen)?;
10512 self.expect_token(&Token::RParen)?;
10513
10514 Ok(SqlOption::Partition {
10515 column_name,
10516 range_direction,
10517 for_values,
10518 })
10519 }
10520
10521 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10523 self.expect_token(&Token::LParen)?;
10524 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10525 self.expect_token(&Token::RParen)?;
10526 Ok(Partition::Partitions(partitions))
10527 }
10528
10529 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10531 self.expect_token(&Token::LParen)?;
10532 self.expect_keyword_is(Keyword::SELECT)?;
10533 let projection = self.parse_projection()?;
10534 let group_by = self.parse_optional_group_by()?;
10535 let order_by = self.parse_optional_order_by()?;
10536 self.expect_token(&Token::RParen)?;
10537 Ok(ProjectionSelect {
10538 projection,
10539 group_by,
10540 order_by,
10541 })
10542 }
10543 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10545 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10546 let name = self.parse_identifier()?;
10547 let query = self.parse_projection_select()?;
10548 Ok(AlterTableOperation::AddProjection {
10549 if_not_exists,
10550 name,
10551 select: query,
10552 })
10553 }
10554
10555 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10559 self.expect_keyword_is(Keyword::ALTER)?;
10560 self.expect_keyword_is(Keyword::SORTKEY)?;
10561 self.expect_token(&Token::LParen)?;
10562 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10563 self.expect_token(&Token::RParen)?;
10564 Ok(AlterTableOperation::AlterSortKey { columns })
10565 }
10566
10567 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10569 let operation = if self.parse_keyword(Keyword::ADD) {
10570 if let Some(constraint) = self.parse_optional_table_constraint()? {
10571 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10572 AlterTableOperation::AddConstraint {
10573 constraint,
10574 not_valid,
10575 }
10576 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10577 && self.parse_keyword(Keyword::PROJECTION)
10578 {
10579 return self.parse_alter_table_add_projection();
10580 } else {
10581 let if_not_exists =
10582 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10583 let mut new_partitions = vec![];
10584 loop {
10585 if self.parse_keyword(Keyword::PARTITION) {
10586 new_partitions.push(self.parse_partition()?);
10587 } else {
10588 break;
10589 }
10590 }
10591 if !new_partitions.is_empty() {
10592 AlterTableOperation::AddPartitions {
10593 if_not_exists,
10594 new_partitions,
10595 }
10596 } else {
10597 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10598
10599 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10600 {
10601 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10602 || if_not_exists
10603 } else {
10604 false
10605 };
10606
10607 let column_def = self.parse_column_def()?;
10608
10609 let column_position = self.parse_column_position()?;
10610
10611 AlterTableOperation::AddColumn {
10612 column_keyword,
10613 if_not_exists,
10614 column_def,
10615 column_position,
10616 }
10617 }
10618 }
10619 } else if self.parse_keyword(Keyword::RENAME) {
10620 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10621 let old_name = self.parse_identifier()?;
10622 self.expect_keyword_is(Keyword::TO)?;
10623 let new_name = self.parse_identifier()?;
10624 AlterTableOperation::RenameConstraint { old_name, new_name }
10625 } else if self.parse_keyword(Keyword::TO) {
10626 let table_name = self.parse_object_name(false)?;
10627 AlterTableOperation::RenameTable {
10628 table_name: RenameTableNameKind::To(table_name),
10629 }
10630 } else if self.parse_keyword(Keyword::AS) {
10631 let table_name = self.parse_object_name(false)?;
10632 AlterTableOperation::RenameTable {
10633 table_name: RenameTableNameKind::As(table_name),
10634 }
10635 } else {
10636 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10638 self.expect_keyword_is(Keyword::TO)?;
10639 let new_column_name = self.parse_identifier()?;
10640 AlterTableOperation::RenameColumn {
10641 old_column_name,
10642 new_column_name,
10643 }
10644 }
10645 } else if self.parse_keyword(Keyword::DISABLE) {
10646 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10647 AlterTableOperation::DisableRowLevelSecurity {}
10648 } else if self.parse_keyword(Keyword::RULE) {
10649 let name = self.parse_identifier()?;
10650 AlterTableOperation::DisableRule { name }
10651 } else if self.parse_keyword(Keyword::TRIGGER) {
10652 let name = self.parse_identifier()?;
10653 AlterTableOperation::DisableTrigger { name }
10654 } else {
10655 return self.expected_ref(
10656 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10657 self.peek_token_ref(),
10658 );
10659 }
10660 } else if self.parse_keyword(Keyword::ENABLE) {
10661 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10662 let name = self.parse_identifier()?;
10663 AlterTableOperation::EnableAlwaysRule { name }
10664 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10665 let name = self.parse_identifier()?;
10666 AlterTableOperation::EnableAlwaysTrigger { name }
10667 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10668 AlterTableOperation::EnableRowLevelSecurity {}
10669 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10670 let name = self.parse_identifier()?;
10671 AlterTableOperation::EnableReplicaRule { name }
10672 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10673 let name = self.parse_identifier()?;
10674 AlterTableOperation::EnableReplicaTrigger { name }
10675 } else if self.parse_keyword(Keyword::RULE) {
10676 let name = self.parse_identifier()?;
10677 AlterTableOperation::EnableRule { name }
10678 } else if self.parse_keyword(Keyword::TRIGGER) {
10679 let name = self.parse_identifier()?;
10680 AlterTableOperation::EnableTrigger { name }
10681 } else {
10682 return self.expected_ref(
10683 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10684 self.peek_token_ref(),
10685 );
10686 }
10687 } else if self.parse_keywords(&[
10688 Keyword::FORCE,
10689 Keyword::ROW,
10690 Keyword::LEVEL,
10691 Keyword::SECURITY,
10692 ]) {
10693 AlterTableOperation::ForceRowLevelSecurity
10694 } else if self.parse_keywords(&[
10695 Keyword::NO,
10696 Keyword::FORCE,
10697 Keyword::ROW,
10698 Keyword::LEVEL,
10699 Keyword::SECURITY,
10700 ]) {
10701 AlterTableOperation::NoForceRowLevelSecurity
10702 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10703 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10704 {
10705 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10706 let name = self.parse_identifier()?;
10707 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10708 Some(self.parse_identifier()?)
10709 } else {
10710 None
10711 };
10712 AlterTableOperation::ClearProjection {
10713 if_exists,
10714 name,
10715 partition,
10716 }
10717 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10718 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10719 {
10720 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10721 let name = self.parse_identifier()?;
10722 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10723 Some(self.parse_identifier()?)
10724 } else {
10725 None
10726 };
10727 AlterTableOperation::MaterializeProjection {
10728 if_exists,
10729 name,
10730 partition,
10731 }
10732 } else if self.parse_keyword(Keyword::DROP) {
10733 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10734 self.expect_token(&Token::LParen)?;
10735 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10736 self.expect_token(&Token::RParen)?;
10737 AlterTableOperation::DropPartitions {
10738 partitions,
10739 if_exists: true,
10740 }
10741 } else if self.parse_keyword(Keyword::PARTITION) {
10742 self.expect_token(&Token::LParen)?;
10743 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10744 self.expect_token(&Token::RParen)?;
10745 AlterTableOperation::DropPartitions {
10746 partitions,
10747 if_exists: false,
10748 }
10749 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10750 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10751 let name = self.parse_identifier()?;
10752 let drop_behavior = self.parse_optional_drop_behavior();
10753 AlterTableOperation::DropConstraint {
10754 if_exists,
10755 name,
10756 drop_behavior,
10757 }
10758 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10759 let drop_behavior = self.parse_optional_drop_behavior();
10760 AlterTableOperation::DropPrimaryKey { drop_behavior }
10761 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10762 let name = self.parse_identifier()?;
10763 let drop_behavior = self.parse_optional_drop_behavior();
10764 AlterTableOperation::DropForeignKey {
10765 name,
10766 drop_behavior,
10767 }
10768 } else if self.parse_keyword(Keyword::INDEX) {
10769 let name = self.parse_identifier()?;
10770 AlterTableOperation::DropIndex { name }
10771 } else if self.parse_keyword(Keyword::PROJECTION)
10772 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10773 {
10774 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10775 let name = self.parse_identifier()?;
10776 AlterTableOperation::DropProjection { if_exists, name }
10777 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10778 AlterTableOperation::DropClusteringKey
10779 } else {
10780 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10782 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10783 self.parse_comma_separated(Parser::parse_identifier)?
10784 } else {
10785 vec![self.parse_identifier()?]
10786 };
10787 let drop_behavior = self.parse_optional_drop_behavior();
10788 AlterTableOperation::DropColumn {
10789 has_column_keyword,
10790 column_names,
10791 if_exists,
10792 drop_behavior,
10793 }
10794 }
10795 } else if self.parse_keyword(Keyword::PARTITION) {
10796 self.expect_token(&Token::LParen)?;
10797 let before = self.parse_comma_separated(Parser::parse_expr)?;
10798 self.expect_token(&Token::RParen)?;
10799 self.expect_keyword_is(Keyword::RENAME)?;
10800 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10801 self.expect_token(&Token::LParen)?;
10802 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10803 self.expect_token(&Token::RParen)?;
10804 AlterTableOperation::RenamePartitions {
10805 old_partitions: before,
10806 new_partitions: renames,
10807 }
10808 } else if self.parse_keyword(Keyword::CHANGE) {
10809 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10811 let new_name = self.parse_identifier()?;
10812 let data_type = self.parse_data_type()?;
10813 let mut options = vec![];
10814 while let Some(option) = self.parse_optional_column_option()? {
10815 options.push(option);
10816 }
10817
10818 let column_position = self.parse_column_position()?;
10819
10820 AlterTableOperation::ChangeColumn {
10821 old_name,
10822 new_name,
10823 data_type,
10824 options,
10825 column_position,
10826 }
10827 } else if self.parse_keyword(Keyword::MODIFY) {
10828 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10830 let data_type = self.parse_data_type()?;
10831 let mut options = vec![];
10832 while let Some(option) = self.parse_optional_column_option()? {
10833 options.push(option);
10834 }
10835
10836 let column_position = self.parse_column_position()?;
10837
10838 AlterTableOperation::ModifyColumn {
10839 col_name,
10840 data_type,
10841 options,
10842 column_position,
10843 }
10844 } else if self.parse_keyword(Keyword::ALTER) {
10845 if self.peek_keyword(Keyword::SORTKEY) {
10846 self.prev_token();
10847 return self.parse_alter_sort_key();
10848 }
10849
10850 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10852 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10853
10854 let op: AlterColumnOperation = if self.parse_keywords(&[
10855 Keyword::SET,
10856 Keyword::NOT,
10857 Keyword::NULL,
10858 ]) {
10859 AlterColumnOperation::SetNotNull {}
10860 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10861 AlterColumnOperation::DropNotNull {}
10862 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10863 AlterColumnOperation::SetDefault {
10864 value: self.parse_expr()?,
10865 }
10866 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10867 AlterColumnOperation::DropDefault {}
10868 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10869 self.parse_set_data_type(true)?
10870 } else if self.parse_keyword(Keyword::TYPE) {
10871 self.parse_set_data_type(false)?
10872 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10873 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10874 Some(GeneratedAs::Always)
10875 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10876 Some(GeneratedAs::ByDefault)
10877 } else {
10878 None
10879 };
10880
10881 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10882
10883 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10884
10885 if self.peek_token_ref().token == Token::LParen {
10886 self.expect_token(&Token::LParen)?;
10887 sequence_options = Some(self.parse_create_sequence_options()?);
10888 self.expect_token(&Token::RParen)?;
10889 }
10890
10891 AlterColumnOperation::AddGenerated {
10892 generated_as,
10893 sequence_options,
10894 }
10895 } else {
10896 let message = if is_postgresql {
10897 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10898 } else {
10899 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10900 };
10901
10902 return self.expected_ref(message, self.peek_token_ref());
10903 };
10904 AlterTableOperation::AlterColumn { column_name, op }
10905 } else if self.parse_keyword(Keyword::SWAP) {
10906 self.expect_keyword_is(Keyword::WITH)?;
10907 let table_name = self.parse_object_name(false)?;
10908 AlterTableOperation::SwapWith { table_name }
10909 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10910 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10911 {
10912 let new_owner = self.parse_owner()?;
10913 AlterTableOperation::OwnerTo { new_owner }
10914 } else if dialect_of!(self is PostgreSqlDialect)
10915 && self.parse_keywords(&[Keyword::ATTACH, Keyword::PARTITION])
10916 {
10917 let partition_name = self.parse_object_name(false)?;
10918 let partition_bound = self.parse_partition_for_values()?;
10919 AlterTableOperation::AttachPartitionOf {
10920 partition_name,
10921 partition_bound,
10922 }
10923 } else if dialect_of!(self is PostgreSqlDialect)
10924 && self.parse_keywords(&[Keyword::DETACH, Keyword::PARTITION])
10925 {
10926 let partition_name = self.parse_object_name(false)?;
10927 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
10928 let finalize = self.parse_keyword(Keyword::FINALIZE);
10929 AlterTableOperation::DetachPartitionOf {
10930 partition_name,
10931 concurrently,
10932 finalize,
10933 }
10934 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10935 && self.parse_keyword(Keyword::ATTACH)
10936 {
10937 AlterTableOperation::AttachPartition {
10938 partition: self.parse_part_or_partition()?,
10939 }
10940 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10941 && self.parse_keyword(Keyword::DETACH)
10942 {
10943 AlterTableOperation::DetachPartition {
10944 partition: self.parse_part_or_partition()?,
10945 }
10946 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10947 && self.parse_keyword(Keyword::FREEZE)
10948 {
10949 let partition = self.parse_part_or_partition()?;
10950 let with_name = if self.parse_keyword(Keyword::WITH) {
10951 self.expect_keyword_is(Keyword::NAME)?;
10952 Some(self.parse_identifier()?)
10953 } else {
10954 None
10955 };
10956 AlterTableOperation::FreezePartition {
10957 partition,
10958 with_name,
10959 }
10960 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10961 && self.parse_keyword(Keyword::UNFREEZE)
10962 {
10963 let partition = self.parse_part_or_partition()?;
10964 let with_name = if self.parse_keyword(Keyword::WITH) {
10965 self.expect_keyword_is(Keyword::NAME)?;
10966 Some(self.parse_identifier()?)
10967 } else {
10968 None
10969 };
10970 AlterTableOperation::UnfreezePartition {
10971 partition,
10972 with_name,
10973 }
10974 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10975 self.expect_token(&Token::LParen)?;
10976 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10977 self.expect_token(&Token::RParen)?;
10978 AlterTableOperation::ClusterBy { exprs }
10979 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10980 AlterTableOperation::SuspendRecluster
10981 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10982 AlterTableOperation::ResumeRecluster
10983 } else if self.parse_keyword(Keyword::LOCK) {
10984 let equals = self.consume_token(&Token::Eq);
10985 let lock = match self.parse_one_of_keywords(&[
10986 Keyword::DEFAULT,
10987 Keyword::EXCLUSIVE,
10988 Keyword::NONE,
10989 Keyword::SHARED,
10990 ]) {
10991 Some(Keyword::DEFAULT) => AlterTableLock::Default,
10992 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10993 Some(Keyword::NONE) => AlterTableLock::None,
10994 Some(Keyword::SHARED) => AlterTableLock::Shared,
10995 _ => self.expected_ref(
10996 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
10997 self.peek_token_ref(),
10998 )?,
10999 };
11000 AlterTableOperation::Lock { equals, lock }
11001 } else if self.parse_keyword(Keyword::ALGORITHM) {
11002 let equals = self.consume_token(&Token::Eq);
11003 let algorithm = match self.parse_one_of_keywords(&[
11004 Keyword::DEFAULT,
11005 Keyword::INSTANT,
11006 Keyword::INPLACE,
11007 Keyword::COPY,
11008 ]) {
11009 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
11010 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
11011 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
11012 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
11013 _ => self.expected_ref(
11014 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
11015 self.peek_token_ref(),
11016 )?,
11017 };
11018 AlterTableOperation::Algorithm { equals, algorithm }
11019 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
11020 let equals = self.consume_token(&Token::Eq);
11021 let value = self.parse_number_value()?;
11022 AlterTableOperation::AutoIncrement { equals, value }
11023 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
11024 let identity = if self.parse_keyword(Keyword::NOTHING) {
11025 ReplicaIdentity::Nothing
11026 } else if self.parse_keyword(Keyword::FULL) {
11027 ReplicaIdentity::Full
11028 } else if self.parse_keyword(Keyword::DEFAULT) {
11029 ReplicaIdentity::Default
11030 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
11031 ReplicaIdentity::Index(self.parse_identifier()?)
11032 } else {
11033 return self.expected_ref(
11034 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
11035 self.peek_token_ref(),
11036 );
11037 };
11038
11039 AlterTableOperation::ReplicaIdentity { identity }
11040 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11041 let name = self.parse_identifier()?;
11042 AlterTableOperation::ValidateConstraint { name }
11043 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11044 let tablespace_name = self.parse_identifier()?;
11045 AlterTableOperation::SetTablespace { tablespace_name }
11046 } else {
11047 let mut options =
11048 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
11049 if !options.is_empty() {
11050 AlterTableOperation::SetTblProperties {
11051 table_properties: options,
11052 }
11053 } else {
11054 options = self.parse_options(Keyword::SET)?;
11055 if !options.is_empty() {
11056 AlterTableOperation::SetOptionsParens { options }
11057 } else {
11058 return self.expected_ref(
11059 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
11060 self.peek_token_ref(),
11061 );
11062 }
11063 }
11064 };
11065 Ok(operation)
11066 }
11067
11068 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
11069 let data_type = self.parse_data_type()?;
11070 let using = if self.dialect.supports_alter_column_type_using()
11071 && self.parse_keyword(Keyword::USING)
11072 {
11073 Some(self.parse_expr()?)
11074 } else {
11075 None
11076 };
11077 Ok(AlterColumnOperation::SetDataType {
11078 data_type,
11079 using,
11080 had_set,
11081 })
11082 }
11083
11084 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
11085 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11086 match keyword {
11087 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11088 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11089 unexpected_keyword => Err(ParserError::ParserError(
11091 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11092 )),
11093 }
11094 }
11095
11096 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11098 let object_type = self.expect_one_of_keywords(&[
11099 Keyword::VIEW,
11100 Keyword::TYPE,
11101 Keyword::COLLATION,
11102 Keyword::TABLE,
11103 Keyword::INDEX,
11104 Keyword::FUNCTION,
11105 Keyword::AGGREGATE,
11106 Keyword::ROLE,
11107 Keyword::POLICY,
11108 Keyword::CONNECTOR,
11109 Keyword::ICEBERG,
11110 Keyword::SCHEMA,
11111 Keyword::USER,
11112 Keyword::OPERATOR,
11113 Keyword::DOMAIN,
11114 Keyword::TRIGGER,
11115 Keyword::EXTENSION,
11116 Keyword::PROCEDURE,
11117 ])?;
11118 match object_type {
11119 Keyword::SCHEMA => {
11120 self.prev_token();
11121 self.prev_token();
11122 self.parse_alter_schema()
11123 }
11124 Keyword::VIEW => self.parse_alter_view(),
11125 Keyword::TYPE => self.parse_alter_type(),
11126 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11127 Keyword::TABLE => self.parse_alter_table(false),
11128 Keyword::ICEBERG => {
11129 self.expect_keyword(Keyword::TABLE)?;
11130 self.parse_alter_table(true)
11131 }
11132 Keyword::INDEX => {
11133 let index_name = self.parse_object_name(false)?;
11134 let operation = if self.parse_keyword(Keyword::RENAME) {
11135 if self.parse_keyword(Keyword::TO) {
11136 let index_name = self.parse_object_name(false)?;
11137 AlterIndexOperation::RenameIndex { index_name }
11138 } else {
11139 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11140 }
11141 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11142 let tablespace_name = self.parse_identifier()?;
11143 AlterIndexOperation::SetTablespace { tablespace_name }
11144 } else {
11145 return self.expected_ref(
11146 "RENAME or SET TABLESPACE after ALTER INDEX",
11147 self.peek_token_ref(),
11148 );
11149 };
11150
11151 Ok(Statement::AlterIndex {
11152 name: index_name,
11153 operation,
11154 })
11155 }
11156 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11157 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11158 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11159 Keyword::OPERATOR => {
11160 if self.parse_keyword(Keyword::FAMILY) {
11161 self.parse_alter_operator_family().map(Into::into)
11162 } else if self.parse_keyword(Keyword::CLASS) {
11163 self.parse_alter_operator_class().map(Into::into)
11164 } else {
11165 self.parse_alter_operator().map(Into::into)
11166 }
11167 }
11168 Keyword::ROLE => self.parse_alter_role(),
11169 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11170 Keyword::CONNECTOR => self.parse_alter_connector(),
11171 Keyword::USER => self.parse_alter_user().map(Into::into),
11172 Keyword::DOMAIN => self.parse_alter_domain(),
11173 Keyword::TRIGGER => self.parse_alter_trigger(),
11174 Keyword::EXTENSION => self.parse_alter_extension(),
11175 unexpected_keyword => Err(ParserError::ParserError(
11177 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE}}, got {unexpected_keyword:?}"),
11178 )),
11179 }
11180 }
11181
11182 fn parse_alter_aggregate_signature(
11183 &mut self,
11184 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11185 let name = self.parse_object_name(false)?;
11186 self.expect_token(&Token::LParen)?;
11187
11188 if self.consume_token(&Token::Mul) {
11189 self.expect_token(&Token::RParen)?;
11190 return Ok((
11191 FunctionDesc {
11192 name,
11193 args: Some(vec![]),
11194 },
11195 true,
11196 None,
11197 ));
11198 }
11199
11200 let args =
11201 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11202 vec![]
11203 } else {
11204 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11205 };
11206
11207 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11208 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11209 } else {
11210 None
11211 };
11212
11213 self.expect_token(&Token::RParen)?;
11214 Ok((
11215 FunctionDesc {
11216 name,
11217 args: Some(args),
11218 },
11219 false,
11220 aggregate_order_by,
11221 ))
11222 }
11223
11224 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11225 let action = if self.parse_keywords(&[
11226 Keyword::CALLED,
11227 Keyword::ON,
11228 Keyword::NULL,
11229 Keyword::INPUT,
11230 ]) {
11231 Some(AlterFunctionAction::CalledOnNull(
11232 FunctionCalledOnNull::CalledOnNullInput,
11233 ))
11234 } else if self.parse_keywords(&[
11235 Keyword::RETURNS,
11236 Keyword::NULL,
11237 Keyword::ON,
11238 Keyword::NULL,
11239 Keyword::INPUT,
11240 ]) {
11241 Some(AlterFunctionAction::CalledOnNull(
11242 FunctionCalledOnNull::ReturnsNullOnNullInput,
11243 ))
11244 } else if self.parse_keyword(Keyword::STRICT) {
11245 Some(AlterFunctionAction::CalledOnNull(
11246 FunctionCalledOnNull::Strict,
11247 ))
11248 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11249 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11250 } else if self.parse_keyword(Keyword::STABLE) {
11251 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11252 } else if self.parse_keyword(Keyword::VOLATILE) {
11253 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11254 } else if self.parse_keyword(Keyword::NOT) {
11255 self.expect_keyword(Keyword::LEAKPROOF)?;
11256 Some(AlterFunctionAction::Leakproof(false))
11257 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11258 Some(AlterFunctionAction::Leakproof(true))
11259 } else if self.parse_keyword(Keyword::EXTERNAL) {
11260 self.expect_keyword(Keyword::SECURITY)?;
11261 let security = if self.parse_keyword(Keyword::DEFINER) {
11262 FunctionSecurity::Definer
11263 } else if self.parse_keyword(Keyword::INVOKER) {
11264 FunctionSecurity::Invoker
11265 } else {
11266 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11267 };
11268 Some(AlterFunctionAction::Security {
11269 external: true,
11270 security,
11271 })
11272 } else if self.parse_keyword(Keyword::SECURITY) {
11273 let security = if self.parse_keyword(Keyword::DEFINER) {
11274 FunctionSecurity::Definer
11275 } else if self.parse_keyword(Keyword::INVOKER) {
11276 FunctionSecurity::Invoker
11277 } else {
11278 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11279 };
11280 Some(AlterFunctionAction::Security {
11281 external: false,
11282 security,
11283 })
11284 } else if self.parse_keyword(Keyword::PARALLEL) {
11285 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11286 FunctionParallel::Unsafe
11287 } else if self.parse_keyword(Keyword::RESTRICTED) {
11288 FunctionParallel::Restricted
11289 } else if self.parse_keyword(Keyword::SAFE) {
11290 FunctionParallel::Safe
11291 } else {
11292 return self
11293 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11294 };
11295 Some(AlterFunctionAction::Parallel(parallel))
11296 } else if self.parse_keyword(Keyword::COST) {
11297 Some(AlterFunctionAction::Cost(self.parse_number()?))
11298 } else if self.parse_keyword(Keyword::ROWS) {
11299 Some(AlterFunctionAction::Rows(self.parse_number()?))
11300 } else if self.parse_keyword(Keyword::SUPPORT) {
11301 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11302 } else if self.parse_keyword(Keyword::SET) {
11303 let name = self.parse_object_name(false)?;
11304 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11305 FunctionSetValue::FromCurrent
11306 } else {
11307 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11308 return self.expected_ref("= or TO", self.peek_token_ref());
11309 }
11310 if self.parse_keyword(Keyword::DEFAULT) {
11311 FunctionSetValue::Default
11312 } else {
11313 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11314 }
11315 };
11316 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11317 name,
11318 value,
11319 }))
11320 } else if self.parse_keyword(Keyword::RESET) {
11321 let reset_config = if self.parse_keyword(Keyword::ALL) {
11322 ResetConfig::ALL
11323 } else {
11324 ResetConfig::ConfigName(self.parse_object_name(false)?)
11325 };
11326 Some(AlterFunctionAction::Reset(reset_config))
11327 } else {
11328 None
11329 };
11330
11331 Ok(action)
11332 }
11333
11334 fn parse_alter_function_actions(
11335 &mut self,
11336 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11337 let mut actions = vec![];
11338 while let Some(action) = self.parse_alter_function_action()? {
11339 actions.push(action);
11340 }
11341 if actions.is_empty() {
11342 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11343 }
11344 let restrict = self.parse_keyword(Keyword::RESTRICT);
11345 Ok((actions, restrict))
11346 }
11347
11348 pub fn parse_alter_function(
11350 &mut self,
11351 kind: AlterFunctionKind,
11352 ) -> Result<Statement, ParserError> {
11353 let (function, aggregate_star, aggregate_order_by) = match kind {
11354 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11355 (self.parse_function_desc()?, false, None)
11356 }
11357 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11358 };
11359
11360 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11361 let new_name = self.parse_identifier()?;
11362 AlterFunctionOperation::RenameTo { new_name }
11363 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11364 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11365 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11366 AlterFunctionOperation::SetSchema {
11367 schema_name: self.parse_object_name(false)?,
11368 }
11369 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11370 && self.parse_keyword(Keyword::NO)
11371 {
11372 if !self.parse_keyword(Keyword::DEPENDS) {
11373 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11374 }
11375 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11376 AlterFunctionOperation::DependsOnExtension {
11377 no: true,
11378 extension_name: self.parse_object_name(false)?,
11379 }
11380 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11381 && self.parse_keyword(Keyword::DEPENDS)
11382 {
11383 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11384 AlterFunctionOperation::DependsOnExtension {
11385 no: false,
11386 extension_name: self.parse_object_name(false)?,
11387 }
11388 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure) {
11389 let (actions, restrict) = self.parse_alter_function_actions()?;
11390 AlterFunctionOperation::Actions { actions, restrict }
11391 } else {
11392 return self.expected_ref(
11393 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11394 self.peek_token_ref(),
11395 );
11396 };
11397
11398 Ok(Statement::AlterFunction(AlterFunction {
11399 kind,
11400 function,
11401 aggregate_order_by,
11402 aggregate_star,
11403 operation,
11404 }))
11405 }
11406
11407 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11409 let name = self.parse_object_name(false)?;
11410
11411 let operation = if self.parse_keyword(Keyword::ADD) {
11412 if let Some(constraint) = self.parse_optional_table_constraint()? {
11413 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11414 AlterDomainOperation::AddConstraint {
11415 constraint,
11416 not_valid,
11417 }
11418 } else {
11419 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11420 }
11421 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11422 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11423 let name = self.parse_identifier()?;
11424 let drop_behavior = self.parse_optional_drop_behavior();
11425 AlterDomainOperation::DropConstraint {
11426 if_exists,
11427 name,
11428 drop_behavior,
11429 }
11430 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11431 AlterDomainOperation::DropDefault
11432 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11433 let old_name = self.parse_identifier()?;
11434 self.expect_keyword_is(Keyword::TO)?;
11435 let new_name = self.parse_identifier()?;
11436 AlterDomainOperation::RenameConstraint { old_name, new_name }
11437 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11438 let new_name = self.parse_identifier()?;
11439 AlterDomainOperation::RenameTo { new_name }
11440 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11441 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11442 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11443 AlterDomainOperation::SetSchema {
11444 schema_name: self.parse_object_name(false)?,
11445 }
11446 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11447 AlterDomainOperation::SetDefault {
11448 default: self.parse_expr()?,
11449 }
11450 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11451 let name = self.parse_identifier()?;
11452 AlterDomainOperation::ValidateConstraint { name }
11453 } else {
11454 return self.expected_ref(
11455 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11456 self.peek_token_ref(),
11457 );
11458 };
11459
11460 Ok(AlterDomain { name, operation }.into())
11461 }
11462
11463 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11465 let name = self.parse_identifier()?;
11466 self.expect_keyword_is(Keyword::ON)?;
11467 let table_name = self.parse_object_name(false)?;
11468
11469 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11470 let new_name = self.parse_identifier()?;
11471 AlterTriggerOperation::RenameTo { new_name }
11472 } else {
11473 return self.expected_ref("RENAME TO after ALTER TRIGGER ... ON ...", self.peek_token_ref());
11474 };
11475
11476 Ok(AlterTrigger {
11477 name,
11478 table_name,
11479 operation,
11480 }
11481 .into())
11482 }
11483
11484 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11486 let name = self.parse_identifier()?;
11487
11488 let operation = if self.parse_keyword(Keyword::UPDATE) {
11489 let version = if self.parse_keyword(Keyword::TO) {
11490 Some(self.parse_identifier()?)
11491 } else {
11492 None
11493 };
11494 AlterExtensionOperation::UpdateTo { version }
11495 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11496 AlterExtensionOperation::SetSchema {
11497 schema_name: self.parse_object_name(false)?,
11498 }
11499 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11500 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11501 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11502 let new_name = self.parse_identifier()?;
11503 AlterExtensionOperation::RenameTo { new_name }
11504 } else {
11505 return self.expected_ref(
11506 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11507 self.peek_token_ref(),
11508 );
11509 };
11510
11511 Ok(AlterExtension { name, operation }.into())
11512 }
11513
11514 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11516 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11517 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11519 let on_cluster = self.parse_optional_on_cluster()?;
11520 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11521
11522 let mut location = None;
11523 if self.parse_keyword(Keyword::LOCATION) {
11524 location = Some(HiveSetLocation {
11525 has_set: false,
11526 location: self.parse_identifier()?,
11527 });
11528 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11529 location = Some(HiveSetLocation {
11530 has_set: true,
11531 location: self.parse_identifier()?,
11532 });
11533 }
11534
11535 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11536 self.peek_token_ref().clone()
11537 } else {
11538 self.get_current_token().clone()
11539 };
11540
11541 Ok(AlterTable {
11542 name: table_name,
11543 if_exists,
11544 only,
11545 operations,
11546 location,
11547 on_cluster,
11548 table_type: if iceberg {
11549 Some(AlterTableType::Iceberg)
11550 } else {
11551 None
11552 },
11553 end_token: AttachedToken(end_token),
11554 }
11555 .into())
11556 }
11557
11558 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11560 let name = self.parse_object_name(false)?;
11561 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11562
11563 let with_options = self.parse_options(Keyword::WITH)?;
11564
11565 self.expect_keyword_is(Keyword::AS)?;
11566 let query = self.parse_query()?;
11567
11568 Ok(Statement::AlterView {
11569 name,
11570 columns,
11571 query,
11572 with_options,
11573 })
11574 }
11575
11576 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11578 let name = self.parse_object_name(false)?;
11579
11580 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11581 let new_name = self.parse_identifier()?;
11582 Ok(Statement::AlterType(AlterType {
11583 name,
11584 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
11585 }))
11586 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11587 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11588 let new_enum_value = self.parse_identifier()?;
11589 let position = if self.parse_keyword(Keyword::BEFORE) {
11590 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11591 } else if self.parse_keyword(Keyword::AFTER) {
11592 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11593 } else {
11594 None
11595 };
11596
11597 Ok(Statement::AlterType(AlterType {
11598 name,
11599 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
11600 if_not_exists,
11601 value: new_enum_value,
11602 position,
11603 }),
11604 }))
11605 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11606 let existing_enum_value = self.parse_identifier()?;
11607 self.expect_keyword(Keyword::TO)?;
11608 let new_enum_value = self.parse_identifier()?;
11609
11610 Ok(Statement::AlterType(AlterType {
11611 name,
11612 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11613 from: existing_enum_value,
11614 to: new_enum_value,
11615 }),
11616 }))
11617 } else {
11618 self.expected_ref(
11619 "{RENAME TO | { RENAME | ADD } VALUE}",
11620 self.peek_token_ref(),
11621 )
11622 }
11623 }
11624
11625 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11629 let name = self.parse_object_name(false)?;
11630 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11631 AlterCollationOperation::RenameTo {
11632 new_name: self.parse_identifier()?,
11633 }
11634 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11635 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11636 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11637 AlterCollationOperation::SetSchema {
11638 schema_name: self.parse_object_name(false)?,
11639 }
11640 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11641 AlterCollationOperation::RefreshVersion
11642 } else {
11643 return self.expected_ref(
11644 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11645 self.peek_token_ref(),
11646 );
11647 };
11648
11649 Ok(AlterCollation { name, operation })
11650 }
11651
11652 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11656 let name = self.parse_operator_name()?;
11657
11658 self.expect_token(&Token::LParen)?;
11660
11661 let left_type = if self.parse_keyword(Keyword::NONE) {
11662 None
11663 } else {
11664 Some(self.parse_data_type()?)
11665 };
11666
11667 self.expect_token(&Token::Comma)?;
11668 let right_type = self.parse_data_type()?;
11669 self.expect_token(&Token::RParen)?;
11670
11671 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11673 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11674 Owner::CurrentRole
11675 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11676 Owner::CurrentUser
11677 } else if self.parse_keyword(Keyword::SESSION_USER) {
11678 Owner::SessionUser
11679 } else {
11680 Owner::Ident(self.parse_identifier()?)
11681 };
11682 AlterOperatorOperation::OwnerTo(owner)
11683 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11684 let schema_name = self.parse_object_name(false)?;
11685 AlterOperatorOperation::SetSchema { schema_name }
11686 } else if self.parse_keyword(Keyword::SET) {
11687 self.expect_token(&Token::LParen)?;
11688
11689 let mut options = Vec::new();
11690 loop {
11691 let keyword = self.expect_one_of_keywords(&[
11692 Keyword::RESTRICT,
11693 Keyword::JOIN,
11694 Keyword::COMMUTATOR,
11695 Keyword::NEGATOR,
11696 Keyword::HASHES,
11697 Keyword::MERGES,
11698 ])?;
11699
11700 match keyword {
11701 Keyword::RESTRICT => {
11702 self.expect_token(&Token::Eq)?;
11703 let proc_name = if self.parse_keyword(Keyword::NONE) {
11704 None
11705 } else {
11706 Some(self.parse_object_name(false)?)
11707 };
11708 options.push(OperatorOption::Restrict(proc_name));
11709 }
11710 Keyword::JOIN => {
11711 self.expect_token(&Token::Eq)?;
11712 let proc_name = if self.parse_keyword(Keyword::NONE) {
11713 None
11714 } else {
11715 Some(self.parse_object_name(false)?)
11716 };
11717 options.push(OperatorOption::Join(proc_name));
11718 }
11719 Keyword::COMMUTATOR => {
11720 self.expect_token(&Token::Eq)?;
11721 let op_name = self.parse_operator_name()?;
11722 options.push(OperatorOption::Commutator(op_name));
11723 }
11724 Keyword::NEGATOR => {
11725 self.expect_token(&Token::Eq)?;
11726 let op_name = self.parse_operator_name()?;
11727 options.push(OperatorOption::Negator(op_name));
11728 }
11729 Keyword::HASHES => {
11730 options.push(OperatorOption::Hashes);
11731 }
11732 Keyword::MERGES => {
11733 options.push(OperatorOption::Merges);
11734 }
11735 unexpected_keyword => return Err(ParserError::ParserError(
11736 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11737 )),
11738 }
11739
11740 if !self.consume_token(&Token::Comma) {
11741 break;
11742 }
11743 }
11744
11745 self.expect_token(&Token::RParen)?;
11746 AlterOperatorOperation::Set { options }
11747 } else {
11748 return self.expected_ref(
11749 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11750 self.peek_token_ref(),
11751 );
11752 };
11753
11754 Ok(AlterOperator {
11755 name,
11756 left_type,
11757 right_type,
11758 operation,
11759 })
11760 }
11761
11762 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11764 let strategy_number = self.parse_literal_uint()?;
11765 let operator_name = self.parse_operator_name()?;
11766
11767 self.expect_token(&Token::LParen)?;
11769 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11770 self.expect_token(&Token::RParen)?;
11771
11772 let purpose = if self.parse_keyword(Keyword::FOR) {
11774 if self.parse_keyword(Keyword::SEARCH) {
11775 Some(OperatorPurpose::ForSearch)
11776 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11777 let sort_family = self.parse_object_name(false)?;
11778 Some(OperatorPurpose::ForOrderBy { sort_family })
11779 } else {
11780 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11781 }
11782 } else {
11783 None
11784 };
11785
11786 Ok(OperatorFamilyItem::Operator {
11787 strategy_number,
11788 operator_name,
11789 op_types,
11790 purpose,
11791 })
11792 }
11793
11794 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11796 let support_number = self.parse_literal_uint()?;
11797
11798 let op_types =
11800 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11801 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11802 self.expect_token(&Token::RParen)?;
11803 Some(types)
11804 } else if self.consume_token(&Token::LParen) {
11805 self.expect_token(&Token::RParen)?;
11806 Some(vec![])
11807 } else {
11808 None
11809 };
11810
11811 let function_name = self.parse_object_name(false)?;
11812
11813 let argument_types = if self.consume_token(&Token::LParen) {
11815 if self.peek_token_ref().token == Token::RParen {
11816 self.expect_token(&Token::RParen)?;
11817 vec![]
11818 } else {
11819 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11820 self.expect_token(&Token::RParen)?;
11821 types
11822 }
11823 } else {
11824 vec![]
11825 };
11826
11827 Ok(OperatorFamilyItem::Function {
11828 support_number,
11829 op_types,
11830 function_name,
11831 argument_types,
11832 })
11833 }
11834
11835 fn parse_operator_family_drop_operator(
11837 &mut self,
11838 ) -> Result<OperatorFamilyDropItem, ParserError> {
11839 let strategy_number = self.parse_literal_uint()?;
11840
11841 self.expect_token(&Token::LParen)?;
11843 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11844 self.expect_token(&Token::RParen)?;
11845
11846 Ok(OperatorFamilyDropItem::Operator {
11847 strategy_number,
11848 op_types,
11849 })
11850 }
11851
11852 fn parse_operator_family_drop_function(
11854 &mut self,
11855 ) -> Result<OperatorFamilyDropItem, ParserError> {
11856 let support_number = self.parse_literal_uint()?;
11857
11858 self.expect_token(&Token::LParen)?;
11860 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11861 self.expect_token(&Token::RParen)?;
11862
11863 Ok(OperatorFamilyDropItem::Function {
11864 support_number,
11865 op_types,
11866 })
11867 }
11868
11869 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11871 if self.parse_keyword(Keyword::OPERATOR) {
11872 self.parse_operator_family_add_operator()
11873 } else if self.parse_keyword(Keyword::FUNCTION) {
11874 self.parse_operator_family_add_function()
11875 } else {
11876 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11877 }
11878 }
11879
11880 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
11882 if self.parse_keyword(Keyword::OPERATOR) {
11883 self.parse_operator_family_drop_operator()
11884 } else if self.parse_keyword(Keyword::FUNCTION) {
11885 self.parse_operator_family_drop_function()
11886 } else {
11887 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11888 }
11889 }
11890
11891 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
11894 let name = self.parse_object_name(false)?;
11895 self.expect_keyword(Keyword::USING)?;
11896 let using = self.parse_identifier()?;
11897
11898 let operation = if self.parse_keyword(Keyword::ADD) {
11899 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
11900 AlterOperatorFamilyOperation::Add { items }
11901 } else if self.parse_keyword(Keyword::DROP) {
11902 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
11903 AlterOperatorFamilyOperation::Drop { items }
11904 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11905 let new_name = self.parse_object_name(false)?;
11906 AlterOperatorFamilyOperation::RenameTo { new_name }
11907 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11908 let owner = self.parse_owner()?;
11909 AlterOperatorFamilyOperation::OwnerTo(owner)
11910 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11911 let schema_name = self.parse_object_name(false)?;
11912 AlterOperatorFamilyOperation::SetSchema { schema_name }
11913 } else {
11914 return self.expected_ref(
11915 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
11916 self.peek_token_ref(),
11917 );
11918 };
11919
11920 Ok(AlterOperatorFamily {
11921 name,
11922 using,
11923 operation,
11924 })
11925 }
11926
11927 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
11931 let name = self.parse_object_name(false)?;
11932 self.expect_keyword(Keyword::USING)?;
11933 let using = self.parse_identifier()?;
11934
11935 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11936 let new_name = self.parse_object_name(false)?;
11937 AlterOperatorClassOperation::RenameTo { new_name }
11938 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11939 let owner = self.parse_owner()?;
11940 AlterOperatorClassOperation::OwnerTo(owner)
11941 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11942 let schema_name = self.parse_object_name(false)?;
11943 AlterOperatorClassOperation::SetSchema { schema_name }
11944 } else {
11945 return self.expected_ref(
11946 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
11947 self.peek_token_ref(),
11948 );
11949 };
11950
11951 Ok(AlterOperatorClass {
11952 name,
11953 using,
11954 operation,
11955 })
11956 }
11957
11958 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
11962 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
11963 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11964 let name = self.parse_object_name(false)?;
11965 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
11966 self.prev_token();
11967 let options = self.parse_options(Keyword::OPTIONS)?;
11968 AlterSchemaOperation::SetOptionsParens { options }
11969 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
11970 let collate = self.parse_expr()?;
11971 AlterSchemaOperation::SetDefaultCollate { collate }
11972 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
11973 let replica = self.parse_identifier()?;
11974 let options = if self.peek_keyword(Keyword::OPTIONS) {
11975 Some(self.parse_options(Keyword::OPTIONS)?)
11976 } else {
11977 None
11978 };
11979 AlterSchemaOperation::AddReplica { replica, options }
11980 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
11981 let replica = self.parse_identifier()?;
11982 AlterSchemaOperation::DropReplica { replica }
11983 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11984 let new_name = self.parse_object_name(false)?;
11985 AlterSchemaOperation::Rename { name: new_name }
11986 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11987 let owner = self.parse_owner()?;
11988 AlterSchemaOperation::OwnerTo { owner }
11989 } else {
11990 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
11991 };
11992 Ok(Statement::AlterSchema(AlterSchema {
11993 name,
11994 if_exists,
11995 operations: vec![operation],
11996 }))
11997 }
11998
11999 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
12002 let object_name = self.parse_object_name(false)?;
12003 if self.peek_token_ref().token == Token::LParen {
12004 match self.parse_function(object_name)? {
12005 Expr::Function(f) => Ok(Statement::Call(f)),
12006 other => parser_err!(
12007 format!("Expected a simple procedure call but found: {other}"),
12008 self.peek_token_ref().span.start
12009 ),
12010 }
12011 } else {
12012 Ok(Statement::Call(Function {
12013 name: object_name,
12014 uses_odbc_syntax: false,
12015 parameters: FunctionArguments::None,
12016 args: FunctionArguments::None,
12017 over: None,
12018 filter: None,
12019 null_treatment: None,
12020 within_group: vec![],
12021 }))
12022 }
12023 }
12024
12025 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
12027 let source;
12028 if self.consume_token(&Token::LParen) {
12029 source = CopySource::Query(self.parse_query()?);
12030 self.expect_token(&Token::RParen)?;
12031 } else {
12032 let table_name = self.parse_object_name(false)?;
12033 let columns = self.parse_parenthesized_column_list(Optional, false)?;
12034 source = CopySource::Table {
12035 table_name,
12036 columns,
12037 };
12038 }
12039 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
12040 Some(Keyword::FROM) => false,
12041 Some(Keyword::TO) => true,
12042 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
12043 };
12044 if !to {
12045 if let CopySource::Query(_) = source {
12048 return Err(ParserError::ParserError(
12049 "COPY ... FROM does not support query as a source".to_string(),
12050 ));
12051 }
12052 }
12053 let target = if self.parse_keyword(Keyword::STDIN) {
12054 CopyTarget::Stdin
12055 } else if self.parse_keyword(Keyword::STDOUT) {
12056 CopyTarget::Stdout
12057 } else if self.parse_keyword(Keyword::PROGRAM) {
12058 CopyTarget::Program {
12059 command: self.parse_literal_string()?,
12060 }
12061 } else {
12062 CopyTarget::File {
12063 filename: self.parse_literal_string()?,
12064 }
12065 };
12066 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
12068 if self.consume_token(&Token::LParen) {
12069 options = self.parse_comma_separated(Parser::parse_copy_option)?;
12070 self.expect_token(&Token::RParen)?;
12071 }
12072 let mut legacy_options = vec![];
12073 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
12074 legacy_options.push(opt);
12075 }
12076 let values =
12077 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
12078 self.expect_token(&Token::SemiColon)?;
12079 self.parse_tsv()
12080 } else {
12081 vec![]
12082 };
12083 Ok(Statement::Copy {
12084 source,
12085 to,
12086 target,
12087 options,
12088 legacy_options,
12089 values,
12090 })
12091 }
12092
12093 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12095 self.expect_keyword(Keyword::OPEN)?;
12096 Ok(Statement::Open(OpenStatement {
12097 cursor_name: self.parse_identifier()?,
12098 }))
12099 }
12100
12101 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12103 let cursor = if self.parse_keyword(Keyword::ALL) {
12104 CloseCursor::All
12105 } else {
12106 let name = self.parse_identifier()?;
12107
12108 CloseCursor::Specific { name }
12109 };
12110
12111 Ok(Statement::Close { cursor })
12112 }
12113
12114 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12115 let ret = match self.parse_one_of_keywords(&[
12116 Keyword::FORMAT,
12117 Keyword::FREEZE,
12118 Keyword::DELIMITER,
12119 Keyword::NULL,
12120 Keyword::HEADER,
12121 Keyword::QUOTE,
12122 Keyword::ESCAPE,
12123 Keyword::FORCE_QUOTE,
12124 Keyword::FORCE_NOT_NULL,
12125 Keyword::FORCE_NULL,
12126 Keyword::ENCODING,
12127 ]) {
12128 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12129 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12130 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12131 Some(Keyword::FALSE)
12132 )),
12133 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12134 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12135 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12136 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12137 Some(Keyword::FALSE)
12138 )),
12139 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12140 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12141 Some(Keyword::FORCE_QUOTE) => {
12142 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12143 }
12144 Some(Keyword::FORCE_NOT_NULL) => {
12145 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12146 }
12147 Some(Keyword::FORCE_NULL) => {
12148 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12149 }
12150 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12151 _ => self.expected_ref("option", self.peek_token_ref())?,
12152 };
12153 Ok(ret)
12154 }
12155
12156 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12157 if self.parse_keyword(Keyword::FORMAT) {
12159 let _ = self.parse_keyword(Keyword::AS);
12160 }
12161
12162 let ret = match self.parse_one_of_keywords(&[
12163 Keyword::ACCEPTANYDATE,
12164 Keyword::ACCEPTINVCHARS,
12165 Keyword::ADDQUOTES,
12166 Keyword::ALLOWOVERWRITE,
12167 Keyword::BINARY,
12168 Keyword::BLANKSASNULL,
12169 Keyword::BZIP2,
12170 Keyword::CLEANPATH,
12171 Keyword::COMPUPDATE,
12172 Keyword::CREDENTIALS,
12173 Keyword::CSV,
12174 Keyword::DATEFORMAT,
12175 Keyword::DELIMITER,
12176 Keyword::EMPTYASNULL,
12177 Keyword::ENCRYPTED,
12178 Keyword::ESCAPE,
12179 Keyword::EXTENSION,
12180 Keyword::FIXEDWIDTH,
12181 Keyword::GZIP,
12182 Keyword::HEADER,
12183 Keyword::IAM_ROLE,
12184 Keyword::IGNOREHEADER,
12185 Keyword::JSON,
12186 Keyword::MANIFEST,
12187 Keyword::MAXFILESIZE,
12188 Keyword::NULL,
12189 Keyword::PARALLEL,
12190 Keyword::PARQUET,
12191 Keyword::PARTITION,
12192 Keyword::REGION,
12193 Keyword::REMOVEQUOTES,
12194 Keyword::ROWGROUPSIZE,
12195 Keyword::STATUPDATE,
12196 Keyword::TIMEFORMAT,
12197 Keyword::TRUNCATECOLUMNS,
12198 Keyword::ZSTD,
12199 ]) {
12200 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12201 Some(Keyword::ACCEPTINVCHARS) => {
12202 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12204 Some(self.parse_literal_string()?)
12205 } else {
12206 None
12207 };
12208 CopyLegacyOption::AcceptInvChars(ch)
12209 }
12210 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12211 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12212 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12213 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12214 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12215 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12216 Some(Keyword::COMPUPDATE) => {
12217 let preset = self.parse_keyword(Keyword::PRESET);
12218 let enabled = match self.parse_one_of_keywords(&[
12219 Keyword::TRUE,
12220 Keyword::FALSE,
12221 Keyword::ON,
12222 Keyword::OFF,
12223 ]) {
12224 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12225 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12226 _ => None,
12227 };
12228 CopyLegacyOption::CompUpdate { preset, enabled }
12229 }
12230 Some(Keyword::CREDENTIALS) => {
12231 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12232 }
12233 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12234 let mut opts = vec![];
12235 while let Some(opt) =
12236 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12237 {
12238 opts.push(opt);
12239 }
12240 opts
12241 }),
12242 Some(Keyword::DATEFORMAT) => {
12243 let _ = self.parse_keyword(Keyword::AS);
12244 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12245 Some(self.parse_literal_string()?)
12246 } else {
12247 None
12248 };
12249 CopyLegacyOption::DateFormat(fmt)
12250 }
12251 Some(Keyword::DELIMITER) => {
12252 let _ = self.parse_keyword(Keyword::AS);
12253 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12254 }
12255 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12256 Some(Keyword::ENCRYPTED) => {
12257 let auto = self.parse_keyword(Keyword::AUTO);
12258 CopyLegacyOption::Encrypted { auto }
12259 }
12260 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12261 Some(Keyword::EXTENSION) => {
12262 let ext = self.parse_literal_string()?;
12263 CopyLegacyOption::Extension(ext)
12264 }
12265 Some(Keyword::FIXEDWIDTH) => {
12266 let spec = self.parse_literal_string()?;
12267 CopyLegacyOption::FixedWidth(spec)
12268 }
12269 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12270 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12271 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12272 Some(Keyword::IGNOREHEADER) => {
12273 let _ = self.parse_keyword(Keyword::AS);
12274 let num_rows = self.parse_literal_uint()?;
12275 CopyLegacyOption::IgnoreHeader(num_rows)
12276 }
12277 Some(Keyword::JSON) => {
12278 let _ = self.parse_keyword(Keyword::AS);
12279 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12280 Some(self.parse_literal_string()?)
12281 } else {
12282 None
12283 };
12284 CopyLegacyOption::Json(fmt)
12285 }
12286 Some(Keyword::MANIFEST) => {
12287 let verbose = self.parse_keyword(Keyword::VERBOSE);
12288 CopyLegacyOption::Manifest { verbose }
12289 }
12290 Some(Keyword::MAXFILESIZE) => {
12291 let _ = self.parse_keyword(Keyword::AS);
12292 let size = self.parse_number_value()?;
12293 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12294 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12295 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12296 _ => None,
12297 };
12298 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12299 }
12300 Some(Keyword::NULL) => {
12301 let _ = self.parse_keyword(Keyword::AS);
12302 CopyLegacyOption::Null(self.parse_literal_string()?)
12303 }
12304 Some(Keyword::PARALLEL) => {
12305 let enabled = match self.parse_one_of_keywords(&[
12306 Keyword::TRUE,
12307 Keyword::FALSE,
12308 Keyword::ON,
12309 Keyword::OFF,
12310 ]) {
12311 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12312 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12313 _ => None,
12314 };
12315 CopyLegacyOption::Parallel(enabled)
12316 }
12317 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12318 Some(Keyword::PARTITION) => {
12319 self.expect_keyword(Keyword::BY)?;
12320 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12321 let include = self.parse_keyword(Keyword::INCLUDE);
12322 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12323 }
12324 Some(Keyword::REGION) => {
12325 let _ = self.parse_keyword(Keyword::AS);
12326 let region = self.parse_literal_string()?;
12327 CopyLegacyOption::Region(region)
12328 }
12329 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12330 Some(Keyword::ROWGROUPSIZE) => {
12331 let _ = self.parse_keyword(Keyword::AS);
12332 let file_size = self.parse_file_size()?;
12333 CopyLegacyOption::RowGroupSize(file_size)
12334 }
12335 Some(Keyword::STATUPDATE) => {
12336 let enabled = match self.parse_one_of_keywords(&[
12337 Keyword::TRUE,
12338 Keyword::FALSE,
12339 Keyword::ON,
12340 Keyword::OFF,
12341 ]) {
12342 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12343 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12344 _ => None,
12345 };
12346 CopyLegacyOption::StatUpdate(enabled)
12347 }
12348 Some(Keyword::TIMEFORMAT) => {
12349 let _ = self.parse_keyword(Keyword::AS);
12350 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12351 Some(self.parse_literal_string()?)
12352 } else {
12353 None
12354 };
12355 CopyLegacyOption::TimeFormat(fmt)
12356 }
12357 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12358 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12359 _ => self.expected_ref("option", self.peek_token_ref())?,
12360 };
12361 Ok(ret)
12362 }
12363
12364 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12365 let size = self.parse_number_value()?;
12366 let unit = self.maybe_parse_file_size_unit();
12367 Ok(FileSize { size, unit })
12368 }
12369
12370 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12371 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12372 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12373 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12374 _ => None,
12375 }
12376 }
12377
12378 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12379 if self.parse_keyword(Keyword::DEFAULT) {
12380 Ok(IamRoleKind::Default)
12381 } else {
12382 let arn = self.parse_literal_string()?;
12383 Ok(IamRoleKind::Arn(arn))
12384 }
12385 }
12386
12387 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12388 let ret = match self.parse_one_of_keywords(&[
12389 Keyword::HEADER,
12390 Keyword::QUOTE,
12391 Keyword::ESCAPE,
12392 Keyword::FORCE,
12393 ]) {
12394 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12395 Some(Keyword::QUOTE) => {
12396 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12398 }
12399 Some(Keyword::ESCAPE) => {
12400 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12402 }
12403 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12404 CopyLegacyCsvOption::ForceNotNull(
12405 self.parse_comma_separated(|p| p.parse_identifier())?,
12406 )
12407 }
12408 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12409 CopyLegacyCsvOption::ForceQuote(
12410 self.parse_comma_separated(|p| p.parse_identifier())?,
12411 )
12412 }
12413 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12414 };
12415 Ok(ret)
12416 }
12417
12418 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12419 let s = self.parse_literal_string()?;
12420 if s.len() != 1 {
12421 let loc = self
12422 .tokens
12423 .get(self.index - 1)
12424 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12425 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12426 }
12427 Ok(s.chars().next().unwrap())
12428 }
12429
12430 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12433 self.parse_tab_value()
12434 }
12435
12436 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12438 let mut values = vec![];
12439 let mut content = String::new();
12440 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12441 match t {
12442 Token::Whitespace(Whitespace::Tab) => {
12443 values.push(Some(core::mem::take(&mut content)));
12444 }
12445 Token::Whitespace(Whitespace::Newline) => {
12446 values.push(Some(core::mem::take(&mut content)));
12447 }
12448 Token::Backslash => {
12449 if self.consume_token(&Token::Period) {
12450 return values;
12451 }
12452 if let Token::Word(w) = self.next_token().token {
12453 if w.value == "N" {
12454 values.push(None);
12455 }
12456 }
12457 }
12458 _ => {
12459 content.push_str(&t.to_string());
12460 }
12461 }
12462 }
12463 values
12464 }
12465
12466 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12468 let next_token = self.next_token();
12469 let span = next_token.span;
12470 let ok_value = |value: Value| Ok(value.with_span(span));
12471 match next_token.token {
12472 Token::Word(w) => match w.keyword {
12473 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12474 ok_value(Value::Boolean(true))
12475 }
12476 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12477 ok_value(Value::Boolean(false))
12478 }
12479 Keyword::NULL => ok_value(Value::Null),
12480 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12481 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12482 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12483 _ => self.expected(
12484 "A value?",
12485 TokenWithSpan {
12486 token: Token::Word(w),
12487 span,
12488 },
12489 )?,
12490 },
12491 _ => self.expected(
12492 "a concrete value",
12493 TokenWithSpan {
12494 token: Token::Word(w),
12495 span,
12496 },
12497 ),
12498 },
12499 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12503 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12504 self.maybe_concat_string_literal(s.to_string()),
12505 )),
12506 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12507 self.maybe_concat_string_literal(s.to_string()),
12508 )),
12509 Token::TripleSingleQuotedString(ref s) => {
12510 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12511 }
12512 Token::TripleDoubleQuotedString(ref s) => {
12513 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12514 }
12515 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12516 Token::SingleQuotedByteStringLiteral(ref s) => {
12517 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12518 }
12519 Token::DoubleQuotedByteStringLiteral(ref s) => {
12520 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12521 }
12522 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12523 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12524 }
12525 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12526 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12527 }
12528 Token::SingleQuotedRawStringLiteral(ref s) => {
12529 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12530 }
12531 Token::DoubleQuotedRawStringLiteral(ref s) => {
12532 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12533 }
12534 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12535 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12536 }
12537 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12538 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12539 }
12540 Token::NationalStringLiteral(ref s) => {
12541 ok_value(Value::NationalStringLiteral(s.to_string()))
12542 }
12543 Token::QuoteDelimitedStringLiteral(v) => {
12544 ok_value(Value::QuoteDelimitedStringLiteral(v))
12545 }
12546 Token::NationalQuoteDelimitedStringLiteral(v) => {
12547 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12548 }
12549 Token::EscapedStringLiteral(ref s) => {
12550 ok_value(Value::EscapedStringLiteral(s.to_string()))
12551 }
12552 Token::UnicodeStringLiteral(ref s) => {
12553 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12554 }
12555 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12556 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12557 tok @ Token::Colon | tok @ Token::AtSign => {
12558 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12566 let ident = match next_token.token {
12567 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12568 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12569 _ => self.expected("placeholder", next_token),
12570 }?;
12571 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12572 .with_span(Span::new(span.start, ident.span.end)))
12573 }
12574 unexpected => self.expected(
12575 "a value",
12576 TokenWithSpan {
12577 token: unexpected,
12578 span,
12579 },
12580 ),
12581 }
12582 }
12583
12584 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12585 if self.dialect.supports_string_literal_concatenation() {
12586 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12587 self.peek_token_ref().token
12588 {
12589 str.push_str(s);
12590 self.advance_token();
12591 }
12592 } else if self
12593 .dialect
12594 .supports_string_literal_concatenation_with_newline()
12595 {
12596 let mut after_newline = false;
12599 loop {
12600 match self.peek_token_no_skip().token {
12601 Token::Whitespace(Whitespace::Newline) => {
12602 after_newline = true;
12603 self.next_token_no_skip();
12604 }
12605 Token::Whitespace(_) => {
12606 self.next_token_no_skip();
12607 }
12608 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12609 if after_newline =>
12610 {
12611 str.push_str(s.clone().as_str());
12612 self.next_token_no_skip();
12613 after_newline = false;
12614 }
12615 _ => break,
12616 }
12617 }
12618 }
12619
12620 str
12621 }
12622
12623 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12625 let value_wrapper = self.parse_value()?;
12626 match &value_wrapper.value {
12627 Value::Number(_, _) => Ok(value_wrapper),
12628 Value::Placeholder(_) => Ok(value_wrapper),
12629 _ => {
12630 self.prev_token();
12631 self.expected_ref("literal number", self.peek_token_ref())
12632 }
12633 }
12634 }
12635
12636 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12639 let next_token = self.next_token();
12640 match next_token.token {
12641 Token::Plus => Ok(Expr::UnaryOp {
12642 op: UnaryOperator::Plus,
12643 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12644 }),
12645 Token::Minus => Ok(Expr::UnaryOp {
12646 op: UnaryOperator::Minus,
12647 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12648 }),
12649 _ => {
12650 self.prev_token();
12651 Ok(Expr::Value(self.parse_number_value()?))
12652 }
12653 }
12654 }
12655
12656 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12657 let next_token = self.next_token();
12658 let span = next_token.span;
12659 match next_token.token {
12660 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12661 Value::SingleQuotedString(s.to_string()).with_span(span),
12662 )),
12663 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12664 Value::DoubleQuotedString(s.to_string()).with_span(span),
12665 )),
12666 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12667 Value::HexStringLiteral(s.to_string()).with_span(span),
12668 )),
12669 unexpected => self.expected(
12670 "a string value",
12671 TokenWithSpan {
12672 token: unexpected,
12673 span,
12674 },
12675 ),
12676 }
12677 }
12678
12679 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12681 let next_token = self.next_token();
12682 match next_token.token {
12683 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12684 _ => self.expected("literal int", next_token),
12685 }
12686 }
12687
12688 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12691 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12692 let peek_token = parser.peek_token();
12693 let span = peek_token.span;
12694 match peek_token.token {
12695 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12696 {
12697 parser.next_token();
12698 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12699 }
12700 _ => Ok(Expr::Value(
12701 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12702 )),
12703 }
12704 };
12705
12706 Ok(CreateFunctionBody::AsBeforeOptions {
12707 body: parse_string_expr(self)?,
12708 link_symbol: if self.consume_token(&Token::Comma) {
12709 Some(parse_string_expr(self)?)
12710 } else {
12711 None
12712 },
12713 })
12714 }
12715
12716 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12718 let next_token = self.next_token();
12719 match next_token.token {
12720 Token::Word(Word {
12721 value,
12722 keyword: Keyword::NoKeyword,
12723 ..
12724 }) => Ok(value),
12725 Token::SingleQuotedString(s) => Ok(s),
12726 Token::DoubleQuotedString(s) => Ok(s),
12727 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12728 Ok(s)
12729 }
12730 Token::UnicodeStringLiteral(s) => Ok(s),
12731 _ => self.expected("literal string", next_token),
12732 }
12733 }
12734
12735 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12737 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12738 Some(Keyword::TRUE) => Ok(true),
12739 Some(Keyword::FALSE) => Ok(false),
12740 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12741 }
12742 }
12743
12744 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12746 let neg = self.parse_keyword(Keyword::NOT);
12747 let normalized_form = self.maybe_parse(|parser| {
12748 match parser.parse_one_of_keywords(&[
12749 Keyword::NFC,
12750 Keyword::NFD,
12751 Keyword::NFKC,
12752 Keyword::NFKD,
12753 ]) {
12754 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12755 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12756 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12757 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12758 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12759 }
12760 })?;
12761 if self.parse_keyword(Keyword::NORMALIZED) {
12762 return Ok(Expr::IsNormalized {
12763 expr: Box::new(expr),
12764 form: normalized_form,
12765 negated: neg,
12766 });
12767 }
12768 self.expected_ref("unicode normalization form", self.peek_token_ref())
12769 }
12770
12771 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12773 self.expect_token(&Token::LParen)?;
12774 let values = self.parse_comma_separated(|parser| {
12775 let name = parser.parse_literal_string()?;
12776 let e = if parser.consume_token(&Token::Eq) {
12777 let value = parser.parse_number()?;
12778 EnumMember::NamedValue(name, value)
12779 } else {
12780 EnumMember::Name(name)
12781 };
12782 Ok(e)
12783 })?;
12784 self.expect_token(&Token::RParen)?;
12785
12786 Ok(values)
12787 }
12788
12789 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12791 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12792 if trailing_bracket.0 {
12793 return parser_err!(
12794 format!("unmatched > after parsing data type {ty}"),
12795 self.peek_token_ref()
12796 );
12797 }
12798
12799 Ok(ty)
12800 }
12801
12802 fn parse_data_type_helper(
12803 &mut self,
12804 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12805 let dialect = self.dialect;
12806 self.advance_token();
12807 let next_token = self.get_current_token();
12808 let next_token_index = self.get_current_index();
12809
12810 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12811 let mut data = match &next_token.token {
12812 Token::Word(w) => match w.keyword {
12813 Keyword::BOOLEAN => Ok(DataType::Boolean),
12814 Keyword::BOOL => Ok(DataType::Bool),
12815 Keyword::FLOAT => {
12816 let precision = self.parse_exact_number_optional_precision_scale()?;
12817
12818 if self.parse_keyword(Keyword::UNSIGNED) {
12819 Ok(DataType::FloatUnsigned(precision))
12820 } else {
12821 Ok(DataType::Float(precision))
12822 }
12823 }
12824 Keyword::REAL => {
12825 if self.parse_keyword(Keyword::UNSIGNED) {
12826 Ok(DataType::RealUnsigned)
12827 } else {
12828 Ok(DataType::Real)
12829 }
12830 }
12831 Keyword::FLOAT4 => Ok(DataType::Float4),
12832 Keyword::FLOAT32 => Ok(DataType::Float32),
12833 Keyword::FLOAT64 => Ok(DataType::Float64),
12834 Keyword::FLOAT8 => Ok(DataType::Float8),
12835 Keyword::DOUBLE => {
12836 if self.parse_keyword(Keyword::PRECISION) {
12837 if self.parse_keyword(Keyword::UNSIGNED) {
12838 Ok(DataType::DoublePrecisionUnsigned)
12839 } else {
12840 Ok(DataType::DoublePrecision)
12841 }
12842 } else {
12843 let precision = self.parse_exact_number_optional_precision_scale()?;
12844
12845 if self.parse_keyword(Keyword::UNSIGNED) {
12846 Ok(DataType::DoubleUnsigned(precision))
12847 } else {
12848 Ok(DataType::Double(precision))
12849 }
12850 }
12851 }
12852 Keyword::TINYINT => {
12853 let optional_precision = self.parse_optional_precision();
12854 if self.parse_keyword(Keyword::UNSIGNED) {
12855 Ok(DataType::TinyIntUnsigned(optional_precision?))
12856 } else {
12857 if dialect.supports_data_type_signed_suffix() {
12858 let _ = self.parse_keyword(Keyword::SIGNED);
12859 }
12860 Ok(DataType::TinyInt(optional_precision?))
12861 }
12862 }
12863 Keyword::INT2 => {
12864 let optional_precision = self.parse_optional_precision();
12865 if self.parse_keyword(Keyword::UNSIGNED) {
12866 Ok(DataType::Int2Unsigned(optional_precision?))
12867 } else {
12868 Ok(DataType::Int2(optional_precision?))
12869 }
12870 }
12871 Keyword::SMALLINT => {
12872 let optional_precision = self.parse_optional_precision();
12873 if self.parse_keyword(Keyword::UNSIGNED) {
12874 Ok(DataType::SmallIntUnsigned(optional_precision?))
12875 } else {
12876 if dialect.supports_data_type_signed_suffix() {
12877 let _ = self.parse_keyword(Keyword::SIGNED);
12878 }
12879 Ok(DataType::SmallInt(optional_precision?))
12880 }
12881 }
12882 Keyword::MEDIUMINT => {
12883 let optional_precision = self.parse_optional_precision();
12884 if self.parse_keyword(Keyword::UNSIGNED) {
12885 Ok(DataType::MediumIntUnsigned(optional_precision?))
12886 } else {
12887 if dialect.supports_data_type_signed_suffix() {
12888 let _ = self.parse_keyword(Keyword::SIGNED);
12889 }
12890 Ok(DataType::MediumInt(optional_precision?))
12891 }
12892 }
12893 Keyword::INT => {
12894 let optional_precision = self.parse_optional_precision();
12895 if self.parse_keyword(Keyword::UNSIGNED) {
12896 Ok(DataType::IntUnsigned(optional_precision?))
12897 } else {
12898 if dialect.supports_data_type_signed_suffix() {
12899 let _ = self.parse_keyword(Keyword::SIGNED);
12900 }
12901 Ok(DataType::Int(optional_precision?))
12902 }
12903 }
12904 Keyword::INT4 => {
12905 let optional_precision = self.parse_optional_precision();
12906 if self.parse_keyword(Keyword::UNSIGNED) {
12907 Ok(DataType::Int4Unsigned(optional_precision?))
12908 } else {
12909 Ok(DataType::Int4(optional_precision?))
12910 }
12911 }
12912 Keyword::INT8 => {
12913 let optional_precision = self.parse_optional_precision();
12914 if self.parse_keyword(Keyword::UNSIGNED) {
12915 Ok(DataType::Int8Unsigned(optional_precision?))
12916 } else {
12917 Ok(DataType::Int8(optional_precision?))
12918 }
12919 }
12920 Keyword::INT16 => Ok(DataType::Int16),
12921 Keyword::INT32 => Ok(DataType::Int32),
12922 Keyword::INT64 => Ok(DataType::Int64),
12923 Keyword::INT128 => Ok(DataType::Int128),
12924 Keyword::INT256 => Ok(DataType::Int256),
12925 Keyword::INTEGER => {
12926 let optional_precision = self.parse_optional_precision();
12927 if self.parse_keyword(Keyword::UNSIGNED) {
12928 Ok(DataType::IntegerUnsigned(optional_precision?))
12929 } else {
12930 if dialect.supports_data_type_signed_suffix() {
12931 let _ = self.parse_keyword(Keyword::SIGNED);
12932 }
12933 Ok(DataType::Integer(optional_precision?))
12934 }
12935 }
12936 Keyword::BIGINT => {
12937 let optional_precision = self.parse_optional_precision();
12938 if self.parse_keyword(Keyword::UNSIGNED) {
12939 Ok(DataType::BigIntUnsigned(optional_precision?))
12940 } else {
12941 if dialect.supports_data_type_signed_suffix() {
12942 let _ = self.parse_keyword(Keyword::SIGNED);
12943 }
12944 Ok(DataType::BigInt(optional_precision?))
12945 }
12946 }
12947 Keyword::HUGEINT => Ok(DataType::HugeInt),
12948 Keyword::UBIGINT => Ok(DataType::UBigInt),
12949 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
12950 Keyword::USMALLINT => Ok(DataType::USmallInt),
12951 Keyword::UTINYINT => Ok(DataType::UTinyInt),
12952 Keyword::UINT8 => Ok(DataType::UInt8),
12953 Keyword::UINT16 => Ok(DataType::UInt16),
12954 Keyword::UINT32 => Ok(DataType::UInt32),
12955 Keyword::UINT64 => Ok(DataType::UInt64),
12956 Keyword::UINT128 => Ok(DataType::UInt128),
12957 Keyword::UINT256 => Ok(DataType::UInt256),
12958 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
12959 Keyword::NVARCHAR => {
12960 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
12961 }
12962 Keyword::CHARACTER => {
12963 if self.parse_keyword(Keyword::VARYING) {
12964 Ok(DataType::CharacterVarying(
12965 self.parse_optional_character_length()?,
12966 ))
12967 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12968 Ok(DataType::CharacterLargeObject(
12969 self.parse_optional_precision()?,
12970 ))
12971 } else {
12972 Ok(DataType::Character(self.parse_optional_character_length()?))
12973 }
12974 }
12975 Keyword::CHAR => {
12976 if self.parse_keyword(Keyword::VARYING) {
12977 Ok(DataType::CharVarying(
12978 self.parse_optional_character_length()?,
12979 ))
12980 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12981 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
12982 } else {
12983 Ok(DataType::Char(self.parse_optional_character_length()?))
12984 }
12985 }
12986 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
12987 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
12988 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
12989 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
12990 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
12991 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
12992 Keyword::LONGBLOB => Ok(DataType::LongBlob),
12993 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
12994 Keyword::BIT => {
12995 if self.parse_keyword(Keyword::VARYING) {
12996 Ok(DataType::BitVarying(self.parse_optional_precision()?))
12997 } else {
12998 Ok(DataType::Bit(self.parse_optional_precision()?))
12999 }
13000 }
13001 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
13002 Keyword::UUID => Ok(DataType::Uuid),
13003 Keyword::DATE => Ok(DataType::Date),
13004 Keyword::DATE32 => Ok(DataType::Date32),
13005 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
13006 Keyword::DATETIME64 => {
13007 self.prev_token();
13008 let (precision, time_zone) = self.parse_datetime_64()?;
13009 Ok(DataType::Datetime64(precision, time_zone))
13010 }
13011 Keyword::TIMESTAMP => {
13012 let precision = self.parse_optional_precision()?;
13013 let tz = if self.parse_keyword(Keyword::WITH) {
13014 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13015 TimezoneInfo::WithTimeZone
13016 } else if self.parse_keyword(Keyword::WITHOUT) {
13017 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13018 TimezoneInfo::WithoutTimeZone
13019 } else {
13020 TimezoneInfo::None
13021 };
13022 Ok(DataType::Timestamp(precision, tz))
13023 }
13024 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
13025 self.parse_optional_precision()?,
13026 TimezoneInfo::Tz,
13027 )),
13028 Keyword::TIMESTAMP_NTZ => {
13029 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
13030 }
13031 Keyword::TIME => {
13032 let precision = self.parse_optional_precision()?;
13033 let tz = if self.parse_keyword(Keyword::WITH) {
13034 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13035 TimezoneInfo::WithTimeZone
13036 } else if self.parse_keyword(Keyword::WITHOUT) {
13037 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
13038 TimezoneInfo::WithoutTimeZone
13039 } else {
13040 TimezoneInfo::None
13041 };
13042 Ok(DataType::Time(precision, tz))
13043 }
13044 Keyword::TIMETZ => Ok(DataType::Time(
13045 self.parse_optional_precision()?,
13046 TimezoneInfo::Tz,
13047 )),
13048 Keyword::INTERVAL => {
13049 if self.dialect.supports_interval_options() {
13050 let fields = self.maybe_parse_optional_interval_fields()?;
13051 let precision = self.parse_optional_precision()?;
13052 Ok(DataType::Interval { fields, precision })
13053 } else {
13054 Ok(DataType::Interval {
13055 fields: None,
13056 precision: None,
13057 })
13058 }
13059 }
13060 Keyword::JSON => Ok(DataType::JSON),
13061 Keyword::JSONB => Ok(DataType::JSONB),
13062 Keyword::REGCLASS => Ok(DataType::Regclass),
13063 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
13064 Keyword::FIXEDSTRING => {
13065 self.expect_token(&Token::LParen)?;
13066 let character_length = self.parse_literal_uint()?;
13067 self.expect_token(&Token::RParen)?;
13068 Ok(DataType::FixedString(character_length))
13069 }
13070 Keyword::TEXT => Ok(DataType::Text),
13071 Keyword::TINYTEXT => Ok(DataType::TinyText),
13072 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
13073 Keyword::LONGTEXT => Ok(DataType::LongText),
13074 Keyword::BYTEA => Ok(DataType::Bytea),
13075 Keyword::NUMERIC => Ok(DataType::Numeric(
13076 self.parse_exact_number_optional_precision_scale()?,
13077 )),
13078 Keyword::DECIMAL => {
13079 let precision = self.parse_exact_number_optional_precision_scale()?;
13080
13081 if self.parse_keyword(Keyword::UNSIGNED) {
13082 Ok(DataType::DecimalUnsigned(precision))
13083 } else {
13084 Ok(DataType::Decimal(precision))
13085 }
13086 }
13087 Keyword::DEC => {
13088 let precision = self.parse_exact_number_optional_precision_scale()?;
13089
13090 if self.parse_keyword(Keyword::UNSIGNED) {
13091 Ok(DataType::DecUnsigned(precision))
13092 } else {
13093 Ok(DataType::Dec(precision))
13094 }
13095 }
13096 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13097 self.parse_exact_number_optional_precision_scale()?,
13098 )),
13099 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13100 self.parse_exact_number_optional_precision_scale()?,
13101 )),
13102 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13103 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13104 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13105 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13106 Keyword::ARRAY => {
13107 if self.dialect.supports_array_typedef_without_element_type() {
13108 Ok(DataType::Array(ArrayElemTypeDef::None))
13109 } else if dialect_of!(self is ClickHouseDialect) {
13110 Ok(self.parse_sub_type(|internal_type| {
13111 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13112 })?)
13113 } else {
13114 self.expect_token(&Token::Lt)?;
13115 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13116 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13117 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13118 inside_type,
13119 ))))
13120 }
13121 }
13122 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13123 self.prev_token();
13124 let field_defs = self.parse_duckdb_struct_type_def()?;
13125 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13126 }
13127 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13128 {
13129 self.prev_token();
13130 let (field_defs, _trailing_bracket) =
13131 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13132 trailing_bracket = _trailing_bracket;
13133 Ok(DataType::Struct(
13134 field_defs,
13135 StructBracketKind::AngleBrackets,
13136 ))
13137 }
13138 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13139 self.prev_token();
13140 let fields = self.parse_union_type_def()?;
13141 Ok(DataType::Union(fields))
13142 }
13143 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13144 Ok(self.parse_sub_type(DataType::Nullable)?)
13145 }
13146 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13147 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13148 }
13149 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13150 self.prev_token();
13151 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13152 Ok(DataType::Map(
13153 Box::new(key_data_type),
13154 Box::new(value_data_type),
13155 ))
13156 }
13157 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13158 self.expect_token(&Token::LParen)?;
13159 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13160 self.expect_token(&Token::RParen)?;
13161 Ok(DataType::Nested(field_defs))
13162 }
13163 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13164 self.prev_token();
13165 let field_defs = self.parse_click_house_tuple_def()?;
13166 Ok(DataType::Tuple(field_defs))
13167 }
13168 Keyword::TRIGGER => Ok(DataType::Trigger),
13169 Keyword::SETOF => {
13170 let inner = self.parse_data_type()?;
13171 Ok(DataType::SetOf(Box::new(inner)))
13172 }
13173 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13174 let _ = self.parse_keyword(Keyword::TYPE);
13175 Ok(DataType::AnyType)
13176 }
13177 Keyword::TABLE => {
13178 if self.peek_token_ref().token == Token::LParen {
13181 let columns = self.parse_returns_table_columns()?;
13182 Ok(DataType::Table(Some(columns)))
13183 } else {
13184 Ok(DataType::Table(None))
13185 }
13186 }
13187 Keyword::SIGNED => {
13188 if self.parse_keyword(Keyword::INTEGER) {
13189 Ok(DataType::SignedInteger)
13190 } else {
13191 Ok(DataType::Signed)
13192 }
13193 }
13194 Keyword::UNSIGNED => {
13195 if self.parse_keyword(Keyword::INTEGER) {
13196 Ok(DataType::UnsignedInteger)
13197 } else {
13198 Ok(DataType::Unsigned)
13199 }
13200 }
13201 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13202 Ok(DataType::TsVector)
13203 }
13204 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13205 Ok(DataType::TsQuery)
13206 }
13207 _ => {
13208 self.prev_token();
13209 let type_name = self.parse_object_name(false)?;
13210 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13211 Ok(DataType::Custom(type_name, modifiers))
13212 } else {
13213 Ok(DataType::Custom(type_name, vec![]))
13214 }
13215 }
13216 },
13217 _ => self.expected_at("a data type name", next_token_index),
13218 }?;
13219
13220 if self.dialect.supports_array_typedef_with_brackets() {
13221 while self.consume_token(&Token::LBracket) {
13222 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13224 self.expect_token(&Token::RBracket)?;
13225 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13226 }
13227 }
13228 Ok((data, trailing_bracket))
13229 }
13230
13231 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13232 self.parse_column_def()
13233 }
13234
13235 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13236 self.expect_token(&Token::LParen)?;
13237 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13238 self.expect_token(&Token::RParen)?;
13239 Ok(columns)
13240 }
13241
13242 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13244 self.expect_token(&Token::LParen)?;
13245 let mut values = Vec::new();
13246 loop {
13247 let next_token = self.next_token();
13248 match next_token.token {
13249 Token::SingleQuotedString(value) => values.push(value),
13250 _ => self.expected("a string", next_token)?,
13251 }
13252 let next_token = self.next_token();
13253 match next_token.token {
13254 Token::Comma => (),
13255 Token::RParen => break,
13256 _ => self.expected(", or }", next_token)?,
13257 }
13258 }
13259 Ok(values)
13260 }
13261
13262 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13264 let ident = self.parse_identifier()?;
13265 self.expect_keyword_is(Keyword::AS)?;
13266 let alias = self.parse_identifier()?;
13267 Ok(IdentWithAlias { ident, alias })
13268 }
13269
13270 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13272 let ident = self.parse_identifier()?;
13273 let _after_as = self.parse_keyword(Keyword::AS);
13274 let alias = self.parse_identifier()?;
13275 Ok(IdentWithAlias { ident, alias })
13276 }
13277
13278 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13280 self.parse_comma_separated(|parser| {
13281 parser.expect_token(&Token::LParen)?;
13282 let query = parser.parse_query()?;
13283 parser.expect_token(&Token::RParen)?;
13284 Ok(*query)
13285 })
13286 }
13287
13288 fn parse_distinct_required_set_quantifier(
13290 &mut self,
13291 operator_name: &str,
13292 ) -> Result<SetQuantifier, ParserError> {
13293 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13294 match quantifier {
13295 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13296 _ => Err(ParserError::ParserError(format!(
13297 "{operator_name} pipe operator requires DISTINCT modifier",
13298 ))),
13299 }
13300 }
13301
13302 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13304 if self.parse_keyword(Keyword::AS) {
13305 Ok(Some(self.parse_identifier()?))
13306 } else {
13307 self.maybe_parse(|parser| parser.parse_identifier())
13309 }
13310 }
13311
13312 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13314 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13315 parser.dialect.is_select_item_alias(explicit, kw, parser)
13316 }
13317 self.parse_optional_alias_inner(None, validator)
13318 }
13319
13320 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13324 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13325 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13326 }
13327 let explicit = self.peek_keyword(Keyword::AS);
13328 match self.parse_optional_alias_inner(None, validator)? {
13329 Some(name) => {
13330 let columns = self.parse_table_alias_column_defs()?;
13331 Ok(Some(TableAlias {
13332 explicit,
13333 name,
13334 columns,
13335 }))
13336 }
13337 None => Ok(None),
13338 }
13339 }
13340
13341 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13342 let mut hints = vec![];
13343 while let Some(hint_type) =
13344 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13345 {
13346 let hint_type = match hint_type {
13347 Keyword::USE => TableIndexHintType::Use,
13348 Keyword::IGNORE => TableIndexHintType::Ignore,
13349 Keyword::FORCE => TableIndexHintType::Force,
13350 _ => {
13351 return self.expected_ref(
13352 "expected to match USE/IGNORE/FORCE keyword",
13353 self.peek_token_ref(),
13354 )
13355 }
13356 };
13357 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13358 Some(Keyword::INDEX) => TableIndexType::Index,
13359 Some(Keyword::KEY) => TableIndexType::Key,
13360 _ => {
13361 return self
13362 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13363 }
13364 };
13365 let for_clause = if self.parse_keyword(Keyword::FOR) {
13366 let clause = if self.parse_keyword(Keyword::JOIN) {
13367 TableIndexHintForClause::Join
13368 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13369 TableIndexHintForClause::OrderBy
13370 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13371 TableIndexHintForClause::GroupBy
13372 } else {
13373 return self.expected_ref(
13374 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13375 self.peek_token_ref(),
13376 );
13377 };
13378 Some(clause)
13379 } else {
13380 None
13381 };
13382
13383 self.expect_token(&Token::LParen)?;
13384 let index_names = if self.peek_token_ref().token != Token::RParen {
13385 self.parse_comma_separated(Parser::parse_identifier)?
13386 } else {
13387 vec![]
13388 };
13389 self.expect_token(&Token::RParen)?;
13390 hints.push(TableIndexHints {
13391 hint_type,
13392 index_type,
13393 for_clause,
13394 index_names,
13395 });
13396 }
13397 Ok(hints)
13398 }
13399
13400 pub fn parse_optional_alias(
13404 &mut self,
13405 reserved_kwds: &[Keyword],
13406 ) -> Result<Option<Ident>, ParserError> {
13407 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13408 false
13409 }
13410 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13411 }
13412
13413 fn parse_optional_alias_inner<F>(
13420 &mut self,
13421 reserved_kwds: Option<&[Keyword]>,
13422 validator: F,
13423 ) -> Result<Option<Ident>, ParserError>
13424 where
13425 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13426 {
13427 let after_as = self.parse_keyword(Keyword::AS);
13428
13429 let next_token = self.next_token();
13430 match next_token.token {
13431 Token::Word(w)
13434 if reserved_kwds.is_some()
13435 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13436 {
13437 Ok(Some(w.into_ident(next_token.span)))
13438 }
13439 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13443 Ok(Some(w.into_ident(next_token.span)))
13444 }
13445 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13447 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13448 _ => {
13449 if after_as {
13450 return self.expected("an identifier after AS", next_token);
13451 }
13452 self.prev_token();
13453 Ok(None) }
13455 }
13456 }
13457
13458 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13460 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13461 let expressions = if self.parse_keyword(Keyword::ALL) {
13462 None
13463 } else {
13464 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13465 };
13466
13467 let mut modifiers = vec![];
13468 if self.dialect.supports_group_by_with_modifier() {
13469 loop {
13470 if !self.parse_keyword(Keyword::WITH) {
13471 break;
13472 }
13473 let keyword = self.expect_one_of_keywords(&[
13474 Keyword::ROLLUP,
13475 Keyword::CUBE,
13476 Keyword::TOTALS,
13477 ])?;
13478 modifiers.push(match keyword {
13479 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13480 Keyword::CUBE => GroupByWithModifier::Cube,
13481 Keyword::TOTALS => GroupByWithModifier::Totals,
13482 _ => {
13483 return parser_err!(
13484 "BUG: expected to match GroupBy modifier keyword",
13485 self.peek_token_ref().span.start
13486 )
13487 }
13488 });
13489 }
13490 }
13491 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13492 self.expect_token(&Token::LParen)?;
13493 let result = self.parse_comma_separated(|p| {
13494 if p.peek_token_ref().token == Token::LParen {
13495 p.parse_tuple(true, true)
13496 } else {
13497 Ok(vec![p.parse_expr()?])
13498 }
13499 })?;
13500 self.expect_token(&Token::RParen)?;
13501 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13502 result,
13503 )));
13504 };
13505 let group_by = match expressions {
13506 None => GroupByExpr::All(modifiers),
13507 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13508 };
13509 Ok(Some(group_by))
13510 } else {
13511 Ok(None)
13512 }
13513 }
13514
13515 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13517 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13518 let order_by =
13519 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13520 let order_by_options = self.parse_order_by_options()?;
13521 OrderBy {
13522 kind: OrderByKind::All(order_by_options),
13523 interpolate: None,
13524 }
13525 } else {
13526 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13527 let interpolate = if self.dialect.supports_interpolate() {
13528 self.parse_interpolations()?
13529 } else {
13530 None
13531 };
13532 OrderBy {
13533 kind: OrderByKind::Expressions(exprs),
13534 interpolate,
13535 }
13536 };
13537 Ok(Some(order_by))
13538 } else {
13539 Ok(None)
13540 }
13541 }
13542
13543 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13544 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13545 Some(self.parse_offset()?)
13546 } else {
13547 None
13548 };
13549
13550 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13551 let expr = self.parse_limit()?;
13552
13553 if self.dialect.supports_limit_comma()
13554 && offset.is_none()
13555 && expr.is_some() && self.consume_token(&Token::Comma)
13557 {
13558 let offset = expr.ok_or_else(|| {
13559 ParserError::ParserError(
13560 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13561 )
13562 })?;
13563 return Ok(Some(LimitClause::OffsetCommaLimit {
13564 offset,
13565 limit: self.parse_expr()?,
13566 }));
13567 }
13568
13569 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13570 Some(self.parse_comma_separated(Parser::parse_expr)?)
13571 } else {
13572 None
13573 };
13574
13575 (Some(expr), limit_by)
13576 } else {
13577 (None, None)
13578 };
13579
13580 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13581 offset = Some(self.parse_offset()?);
13582 }
13583
13584 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13585 Ok(Some(LimitClause::LimitOffset {
13586 limit: limit.unwrap_or_default(),
13587 offset,
13588 limit_by: limit_by.unwrap_or_default(),
13589 }))
13590 } else {
13591 Ok(None)
13592 }
13593 }
13594
13595 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13598 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13599 let fn_name = self.parse_object_name(false)?;
13600 self.parse_function_call(fn_name)
13601 .map(TableObject::TableFunction)
13602 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13603 self.parse_parenthesized(|p| p.parse_query())
13604 .map(TableObject::TableQuery)
13605 } else {
13606 self.parse_object_name(false).map(TableObject::TableName)
13607 }
13608 }
13609
13610 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13617 self.parse_object_name_inner(in_table_clause, false)
13618 }
13619
13620 fn parse_object_name_inner(
13630 &mut self,
13631 in_table_clause: bool,
13632 allow_wildcards: bool,
13633 ) -> Result<ObjectName, ParserError> {
13634 let mut parts = vec![];
13635 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13636 loop {
13637 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13638 parts.push(ObjectNamePart::Identifier(ident));
13639 if !self.consume_token(&Token::Period) && !end_with_period {
13640 break;
13641 }
13642 }
13643 } else {
13644 loop {
13645 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13646 let span = self.next_token().span;
13647 parts.push(ObjectNamePart::Identifier(Ident {
13648 value: Token::Mul.to_string(),
13649 quote_style: None,
13650 span,
13651 }));
13652 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13653 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13654 parts.push(ObjectNamePart::Identifier(ident));
13655 if !self.consume_token(&Token::Period) && !end_with_period {
13656 break;
13657 }
13658 } else if self.dialect.supports_object_name_double_dot_notation()
13659 && parts.len() == 1
13660 && matches!(self.peek_token_ref().token, Token::Period)
13661 {
13662 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13664 } else {
13665 let ident = self.parse_identifier()?;
13666 let part = if self
13667 .dialect
13668 .is_identifier_generating_function_name(&ident, &parts)
13669 {
13670 self.expect_token(&Token::LParen)?;
13671 let args: Vec<FunctionArg> =
13672 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13673 self.expect_token(&Token::RParen)?;
13674 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13675 } else {
13676 ObjectNamePart::Identifier(ident)
13677 };
13678 parts.push(part);
13679 }
13680
13681 if !self.consume_token(&Token::Period) {
13682 break;
13683 }
13684 }
13685 }
13686
13687 if dialect_of!(self is BigQueryDialect)
13690 && parts.iter().any(|part| {
13691 part.as_ident()
13692 .is_some_and(|ident| ident.value.contains('.'))
13693 })
13694 {
13695 parts = parts
13696 .into_iter()
13697 .flat_map(|part| match part.as_ident() {
13698 Some(ident) => ident
13699 .value
13700 .split('.')
13701 .map(|value| {
13702 ObjectNamePart::Identifier(Ident {
13703 value: value.into(),
13704 quote_style: ident.quote_style,
13705 span: ident.span,
13706 })
13707 })
13708 .collect::<Vec<_>>(),
13709 None => vec![part],
13710 })
13711 .collect()
13712 }
13713
13714 Ok(ObjectName(parts))
13715 }
13716
13717 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13719 let mut idents = vec![];
13720 loop {
13721 let token = self.peek_token_ref();
13722 match &token.token {
13723 Token::Word(w) => {
13724 idents.push(w.to_ident(token.span));
13725 }
13726 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13727 break
13728 }
13729 _ => {}
13730 }
13731 self.advance_token();
13732 }
13733 Ok(idents)
13734 }
13735
13736 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13776 let mut idents = vec![];
13777
13778 let next_token = self.next_token();
13780 match next_token.token {
13781 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13782 Token::EOF => {
13783 return Err(ParserError::ParserError(
13784 "Empty input when parsing identifier".to_string(),
13785 ))?
13786 }
13787 token => {
13788 return Err(ParserError::ParserError(format!(
13789 "Unexpected token in identifier: {token}"
13790 )))?
13791 }
13792 };
13793
13794 loop {
13796 match self.next_token().token {
13797 Token::Period => {
13799 let next_token = self.next_token();
13800 match next_token.token {
13801 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13802 Token::EOF => {
13803 return Err(ParserError::ParserError(
13804 "Trailing period in identifier".to_string(),
13805 ))?
13806 }
13807 token => {
13808 return Err(ParserError::ParserError(format!(
13809 "Unexpected token following period in identifier: {token}"
13810 )))?
13811 }
13812 }
13813 }
13814 Token::EOF => break,
13815 token => {
13816 return Err(ParserError::ParserError(format!(
13817 "Unexpected token in identifier: {token}"
13818 )))?;
13819 }
13820 }
13821 }
13822
13823 Ok(idents)
13824 }
13825
13826 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
13828 let next_token = self.next_token();
13829 match next_token.token {
13830 Token::Word(w) => Ok(w.into_ident(next_token.span)),
13831 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
13832 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
13833 _ => self.expected("identifier", next_token),
13834 }
13835 }
13836
13837 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
13848 match self.peek_token().token {
13849 Token::Word(w) => {
13850 let quote_style_is_none = w.quote_style.is_none();
13851 let mut requires_whitespace = false;
13852 let mut ident = w.into_ident(self.next_token().span);
13853 if quote_style_is_none {
13854 while matches!(self.peek_token_no_skip().token, Token::Minus) {
13855 self.next_token();
13856 ident.value.push('-');
13857
13858 let token = self
13859 .next_token_no_skip()
13860 .cloned()
13861 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
13862 requires_whitespace = match token.token {
13863 Token::Word(next_word) if next_word.quote_style.is_none() => {
13864 ident.value.push_str(&next_word.value);
13865 false
13866 }
13867 Token::Number(s, false) => {
13868 if s.ends_with('.') {
13875 let Some(s) = s.split('.').next().filter(|s| {
13876 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
13877 }) else {
13878 return self.expected(
13879 "continuation of hyphenated identifier",
13880 TokenWithSpan::new(Token::Number(s, false), token.span),
13881 );
13882 };
13883 ident.value.push_str(s);
13884 return Ok((ident, true));
13885 } else {
13886 ident.value.push_str(&s);
13887 }
13888 !matches!(self.peek_token_ref().token, Token::Period)
13891 }
13892 _ => {
13893 return self
13894 .expected("continuation of hyphenated identifier", token);
13895 }
13896 }
13897 }
13898
13899 if requires_whitespace {
13902 let token = self.next_token();
13903 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
13904 return self
13905 .expected("whitespace following hyphenated identifier", token);
13906 }
13907 }
13908 }
13909 Ok((ident, false))
13910 }
13911 _ => Ok((self.parse_identifier()?, false)),
13912 }
13913 }
13914
13915 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
13917 if self.consume_token(&Token::LParen) {
13918 if self.peek_token_ref().token == Token::RParen {
13919 self.next_token();
13920 Ok(vec![])
13921 } else {
13922 let cols = self.parse_comma_separated_with_trailing_commas(
13923 Parser::parse_view_column,
13924 self.dialect.supports_column_definition_trailing_commas(),
13925 Self::is_reserved_for_column_alias,
13926 )?;
13927 self.expect_token(&Token::RParen)?;
13928 Ok(cols)
13929 }
13930 } else {
13931 Ok(vec![])
13932 }
13933 }
13934
13935 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
13937 let name = self.parse_identifier()?;
13938 let options = self.parse_view_column_options()?;
13939 let data_type = if dialect_of!(self is ClickHouseDialect) {
13940 Some(self.parse_data_type()?)
13941 } else {
13942 None
13943 };
13944 Ok(ViewColumnDef {
13945 name,
13946 data_type,
13947 options,
13948 })
13949 }
13950
13951 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
13952 let mut options = Vec::new();
13953 loop {
13954 let option = self.parse_optional_column_option()?;
13955 if let Some(option) = option {
13956 options.push(option);
13957 } else {
13958 break;
13959 }
13960 }
13961 if options.is_empty() {
13962 Ok(None)
13963 } else if self.dialect.supports_space_separated_column_options() {
13964 Ok(Some(ColumnOptions::SpaceSeparated(options)))
13965 } else {
13966 Ok(Some(ColumnOptions::CommaSeparated(options)))
13967 }
13968 }
13969
13970 pub fn parse_parenthesized_column_list(
13973 &mut self,
13974 optional: IsOptional,
13975 allow_empty: bool,
13976 ) -> Result<Vec<Ident>, ParserError> {
13977 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
13978 }
13979
13980 pub fn parse_parenthesized_compound_identifier_list(
13982 &mut self,
13983 optional: IsOptional,
13984 allow_empty: bool,
13985 ) -> Result<Vec<Expr>, ParserError> {
13986 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13987 Ok(Expr::CompoundIdentifier(
13988 p.parse_period_separated(|p| p.parse_identifier())?,
13989 ))
13990 })
13991 }
13992
13993 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
13996 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
13997 p.parse_create_index_expr()
13998 })
13999 }
14000
14001 pub fn parse_parenthesized_qualified_column_list(
14004 &mut self,
14005 optional: IsOptional,
14006 allow_empty: bool,
14007 ) -> Result<Vec<ObjectName>, ParserError> {
14008 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
14009 p.parse_object_name(true)
14010 })
14011 }
14012
14013 fn parse_parenthesized_column_list_inner<F, T>(
14016 &mut self,
14017 optional: IsOptional,
14018 allow_empty: bool,
14019 mut f: F,
14020 ) -> Result<Vec<T>, ParserError>
14021 where
14022 F: FnMut(&mut Parser) -> Result<T, ParserError>,
14023 {
14024 if self.consume_token(&Token::LParen) {
14025 if allow_empty && self.peek_token_ref().token == Token::RParen {
14026 self.next_token();
14027 Ok(vec![])
14028 } else {
14029 let cols = self.parse_comma_separated(|p| f(p))?;
14030 self.expect_token(&Token::RParen)?;
14031 Ok(cols)
14032 }
14033 } else if optional == Optional {
14034 Ok(vec![])
14035 } else {
14036 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
14037 }
14038 }
14039
14040 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
14042 if self.consume_token(&Token::LParen) {
14043 let cols = self.parse_comma_separated(|p| {
14044 let name = p.parse_identifier()?;
14045 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
14046 Ok(TableAliasColumnDef { name, data_type })
14047 })?;
14048 self.expect_token(&Token::RParen)?;
14049 Ok(cols)
14050 } else {
14051 Ok(vec![])
14052 }
14053 }
14054
14055 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
14057 self.expect_token(&Token::LParen)?;
14058 let n = self.parse_literal_uint()?;
14059 self.expect_token(&Token::RParen)?;
14060 Ok(n)
14061 }
14062
14063 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
14065 if self.consume_token(&Token::LParen) {
14066 let n = self.parse_literal_uint()?;
14067 self.expect_token(&Token::RParen)?;
14068 Ok(Some(n))
14069 } else {
14070 Ok(None)
14071 }
14072 }
14073
14074 fn maybe_parse_optional_interval_fields(
14075 &mut self,
14076 ) -> Result<Option<IntervalFields>, ParserError> {
14077 match self.parse_one_of_keywords(&[
14078 Keyword::YEAR,
14080 Keyword::DAY,
14081 Keyword::HOUR,
14082 Keyword::MINUTE,
14083 Keyword::MONTH,
14085 Keyword::SECOND,
14086 ]) {
14087 Some(Keyword::YEAR) => {
14088 if self.peek_keyword(Keyword::TO) {
14089 self.expect_keyword(Keyword::TO)?;
14090 self.expect_keyword(Keyword::MONTH)?;
14091 Ok(Some(IntervalFields::YearToMonth))
14092 } else {
14093 Ok(Some(IntervalFields::Year))
14094 }
14095 }
14096 Some(Keyword::DAY) => {
14097 if self.peek_keyword(Keyword::TO) {
14098 self.expect_keyword(Keyword::TO)?;
14099 match self.expect_one_of_keywords(&[
14100 Keyword::HOUR,
14101 Keyword::MINUTE,
14102 Keyword::SECOND,
14103 ])? {
14104 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14105 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14106 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14107 _ => {
14108 self.prev_token();
14109 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14110 }
14111 }
14112 } else {
14113 Ok(Some(IntervalFields::Day))
14114 }
14115 }
14116 Some(Keyword::HOUR) => {
14117 if self.peek_keyword(Keyword::TO) {
14118 self.expect_keyword(Keyword::TO)?;
14119 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14120 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14121 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14122 _ => {
14123 self.prev_token();
14124 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14125 }
14126 }
14127 } else {
14128 Ok(Some(IntervalFields::Hour))
14129 }
14130 }
14131 Some(Keyword::MINUTE) => {
14132 if self.peek_keyword(Keyword::TO) {
14133 self.expect_keyword(Keyword::TO)?;
14134 self.expect_keyword(Keyword::SECOND)?;
14135 Ok(Some(IntervalFields::MinuteToSecond))
14136 } else {
14137 Ok(Some(IntervalFields::Minute))
14138 }
14139 }
14140 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14141 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14142 Some(_) => {
14143 self.prev_token();
14144 self.expected_ref(
14145 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14146 self.peek_token_ref(),
14147 )
14148 }
14149 None => Ok(None),
14150 }
14151 }
14152
14153 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14161 self.expect_keyword_is(Keyword::DATETIME64)?;
14162 self.expect_token(&Token::LParen)?;
14163 let precision = self.parse_literal_uint()?;
14164 let time_zone = if self.consume_token(&Token::Comma) {
14165 Some(self.parse_literal_string()?)
14166 } else {
14167 None
14168 };
14169 self.expect_token(&Token::RParen)?;
14170 Ok((precision, time_zone))
14171 }
14172
14173 pub fn parse_optional_character_length(
14175 &mut self,
14176 ) -> Result<Option<CharacterLength>, ParserError> {
14177 if self.consume_token(&Token::LParen) {
14178 let character_length = self.parse_character_length()?;
14179 self.expect_token(&Token::RParen)?;
14180 Ok(Some(character_length))
14181 } else {
14182 Ok(None)
14183 }
14184 }
14185
14186 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14188 if self.consume_token(&Token::LParen) {
14189 let binary_length = self.parse_binary_length()?;
14190 self.expect_token(&Token::RParen)?;
14191 Ok(Some(binary_length))
14192 } else {
14193 Ok(None)
14194 }
14195 }
14196
14197 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14199 if self.parse_keyword(Keyword::MAX) {
14200 return Ok(CharacterLength::Max);
14201 }
14202 let length = self.parse_literal_uint()?;
14203 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14204 Some(CharLengthUnits::Characters)
14205 } else if self.parse_keyword(Keyword::OCTETS) {
14206 Some(CharLengthUnits::Octets)
14207 } else {
14208 None
14209 };
14210 Ok(CharacterLength::IntegerLength { length, unit })
14211 }
14212
14213 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14215 if self.parse_keyword(Keyword::MAX) {
14216 return Ok(BinaryLength::Max);
14217 }
14218 let length = self.parse_literal_uint()?;
14219 Ok(BinaryLength::IntegerLength { length })
14220 }
14221
14222 pub fn parse_optional_precision_scale(
14224 &mut self,
14225 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14226 if self.consume_token(&Token::LParen) {
14227 let n = self.parse_literal_uint()?;
14228 let scale = if self.consume_token(&Token::Comma) {
14229 Some(self.parse_literal_uint()?)
14230 } else {
14231 None
14232 };
14233 self.expect_token(&Token::RParen)?;
14234 Ok((Some(n), scale))
14235 } else {
14236 Ok((None, None))
14237 }
14238 }
14239
14240 pub fn parse_exact_number_optional_precision_scale(
14242 &mut self,
14243 ) -> Result<ExactNumberInfo, ParserError> {
14244 if self.consume_token(&Token::LParen) {
14245 let precision = self.parse_literal_uint()?;
14246 let scale = if self.consume_token(&Token::Comma) {
14247 Some(self.parse_signed_integer()?)
14248 } else {
14249 None
14250 };
14251
14252 self.expect_token(&Token::RParen)?;
14253
14254 match scale {
14255 None => Ok(ExactNumberInfo::Precision(precision)),
14256 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14257 }
14258 } else {
14259 Ok(ExactNumberInfo::None)
14260 }
14261 }
14262
14263 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14265 let is_negative = self.consume_token(&Token::Minus);
14266
14267 if !is_negative {
14268 let _ = self.consume_token(&Token::Plus);
14269 }
14270
14271 let current_token = self.peek_token_ref();
14272 match ¤t_token.token {
14273 Token::Number(s, _) => {
14274 let s = s.clone();
14275 let span_start = current_token.span.start;
14276 self.advance_token();
14277 let value = Self::parse::<i64>(s, span_start)?;
14278 Ok(if is_negative { -value } else { value })
14279 }
14280 _ => self.expected_ref("number", current_token),
14281 }
14282 }
14283
14284 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14286 if self.consume_token(&Token::LParen) {
14287 let mut modifiers = Vec::new();
14288 loop {
14289 let next_token = self.next_token();
14290 match next_token.token {
14291 Token::Word(w) => modifiers.push(w.to_string()),
14292 Token::Number(n, _) => modifiers.push(n),
14293 Token::SingleQuotedString(s) => modifiers.push(s),
14294
14295 Token::Comma => {
14296 continue;
14297 }
14298 Token::RParen => {
14299 break;
14300 }
14301 _ => self.expected("type modifiers", next_token)?,
14302 }
14303 }
14304
14305 Ok(Some(modifiers))
14306 } else {
14307 Ok(None)
14308 }
14309 }
14310
14311 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14313 where
14314 F: FnOnce(Box<DataType>) -> DataType,
14315 {
14316 self.expect_token(&Token::LParen)?;
14317 let inside_type = self.parse_data_type()?;
14318 self.expect_token(&Token::RParen)?;
14319 Ok(parent_type(inside_type.into()))
14320 }
14321
14322 fn parse_delete_setexpr_boxed(
14326 &mut self,
14327 delete_token: TokenWithSpan,
14328 ) -> Result<Box<SetExpr>, ParserError> {
14329 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14330 }
14331
14332 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14334 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14335 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14336 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14339 (vec![], false)
14340 } else {
14341 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14342 self.expect_keyword_is(Keyword::FROM)?;
14343 (tables, true)
14344 }
14345 } else {
14346 (vec![], true)
14347 };
14348
14349 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14350
14351 let output = self.maybe_parse_output_clause()?;
14352
14353 let using = if self.parse_keyword(Keyword::USING) {
14354 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14355 } else {
14356 None
14357 };
14358 let selection = if self.parse_keyword(Keyword::WHERE) {
14359 Some(self.parse_expr()?)
14360 } else {
14361 None
14362 };
14363 let returning = if self.parse_keyword(Keyword::RETURNING) {
14364 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14365 } else {
14366 None
14367 };
14368 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14369 self.parse_comma_separated(Parser::parse_order_by_expr)?
14370 } else {
14371 vec![]
14372 };
14373 let limit = if self.parse_keyword(Keyword::LIMIT) {
14374 self.parse_limit()?
14375 } else {
14376 None
14377 };
14378
14379 Ok(Statement::Delete(Delete {
14380 delete_token: delete_token.into(),
14381 optimizer_hints,
14382 tables,
14383 from: if with_from_keyword {
14384 FromTable::WithFromKeyword(from)
14385 } else {
14386 FromTable::WithoutKeyword(from)
14387 },
14388 using,
14389 selection,
14390 returning,
14391 output,
14392 order_by,
14393 limit,
14394 }))
14395 }
14396
14397 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14400 let modifier_keyword =
14401 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14402
14403 let id = self.parse_literal_uint()?;
14404
14405 let modifier = match modifier_keyword {
14406 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14407 Some(Keyword::QUERY) => Some(KillType::Query),
14408 Some(Keyword::MUTATION) => {
14409 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14410 Some(KillType::Mutation)
14411 } else {
14412 self.expected_ref(
14413 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14414 self.peek_token_ref(),
14415 )?
14416 }
14417 }
14418 _ => None,
14419 };
14420
14421 Ok(Statement::Kill { modifier, id })
14422 }
14423
14424 pub fn parse_explain(
14426 &mut self,
14427 describe_alias: DescribeAlias,
14428 ) -> Result<Statement, ParserError> {
14429 let mut analyze = false;
14430 let mut verbose = false;
14431 let mut query_plan = false;
14432 let mut estimate = false;
14433 let mut format = None;
14434 let mut options = None;
14435
14436 if describe_alias == DescribeAlias::Explain
14439 && self.dialect.supports_explain_with_utility_options()
14440 && self.peek_token_ref().token == Token::LParen
14441 {
14442 options = Some(self.parse_utility_options()?)
14443 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14444 query_plan = true;
14445 } else if self.parse_keyword(Keyword::ESTIMATE) {
14446 estimate = true;
14447 } else {
14448 analyze = self.parse_keyword(Keyword::ANALYZE);
14449 verbose = self.parse_keyword(Keyword::VERBOSE);
14450 if self.parse_keyword(Keyword::FORMAT) {
14451 format = Some(self.parse_analyze_format_kind()?);
14452 }
14453 }
14454
14455 match self.maybe_parse(|parser| parser.parse_statement())? {
14456 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14457 ParserError::ParserError("Explain must be root of the plan".to_string()),
14458 ),
14459 Some(statement) => Ok(Statement::Explain {
14460 describe_alias,
14461 analyze,
14462 verbose,
14463 query_plan,
14464 estimate,
14465 statement: Box::new(statement),
14466 format,
14467 options,
14468 }),
14469 _ => {
14470 let hive_format =
14471 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14472 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14473 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14474 _ => None,
14475 };
14476
14477 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14478 self.parse_keyword(Keyword::TABLE)
14480 } else {
14481 false
14482 };
14483
14484 let table_name = self.parse_object_name(false)?;
14485 Ok(Statement::ExplainTable {
14486 describe_alias,
14487 hive_format,
14488 has_table_keyword,
14489 table_name,
14490 })
14491 }
14492 }
14493 }
14494
14495 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14500 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14501 let _guard = self.recursion_counter.try_decrease()?;
14502 let with = if self.parse_keyword(Keyword::WITH) {
14503 let with_token = self.get_current_token();
14504 Some(With {
14505 with_token: with_token.clone().into(),
14506 recursive: self.parse_keyword(Keyword::RECURSIVE),
14507 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14508 })
14509 } else {
14510 None
14511 };
14512 if self.parse_keyword(Keyword::INSERT) {
14513 Ok(Query {
14514 with,
14515 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14516 order_by: None,
14517 limit_clause: None,
14518 fetch: None,
14519 locks: vec![],
14520 for_clause: None,
14521 settings: None,
14522 format_clause: None,
14523 pipe_operators: vec![],
14524 }
14525 .into())
14526 } else if self.parse_keyword(Keyword::UPDATE) {
14527 Ok(Query {
14528 with,
14529 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14530 order_by: None,
14531 limit_clause: None,
14532 fetch: None,
14533 locks: vec![],
14534 for_clause: None,
14535 settings: None,
14536 format_clause: None,
14537 pipe_operators: vec![],
14538 }
14539 .into())
14540 } else if self.parse_keyword(Keyword::DELETE) {
14541 Ok(Query {
14542 with,
14543 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14544 limit_clause: None,
14545 order_by: None,
14546 fetch: None,
14547 locks: vec![],
14548 for_clause: None,
14549 settings: None,
14550 format_clause: None,
14551 pipe_operators: vec![],
14552 }
14553 .into())
14554 } else if self.parse_keyword(Keyword::MERGE) {
14555 Ok(Query {
14556 with,
14557 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14558 limit_clause: None,
14559 order_by: None,
14560 fetch: None,
14561 locks: vec![],
14562 for_clause: None,
14563 settings: None,
14564 format_clause: None,
14565 pipe_operators: vec![],
14566 }
14567 .into())
14568 } else {
14569 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14570
14571 let order_by = self.parse_optional_order_by()?;
14572
14573 let limit_clause = self.parse_optional_limit_clause()?;
14574
14575 let settings = self.parse_settings()?;
14576
14577 let fetch = if self.parse_keyword(Keyword::FETCH) {
14578 Some(self.parse_fetch()?)
14579 } else {
14580 None
14581 };
14582
14583 let mut for_clause = None;
14584 let mut locks = Vec::new();
14585 while self.parse_keyword(Keyword::FOR) {
14586 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14587 for_clause = Some(parsed_for_clause);
14588 break;
14589 } else {
14590 locks.push(self.parse_lock()?);
14591 }
14592 }
14593 let format_clause =
14594 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14595 if self.parse_keyword(Keyword::NULL) {
14596 Some(FormatClause::Null)
14597 } else {
14598 let ident = self.parse_identifier()?;
14599 Some(FormatClause::Identifier(ident))
14600 }
14601 } else {
14602 None
14603 };
14604
14605 let pipe_operators = if self.dialect.supports_pipe_operator() {
14606 self.parse_pipe_operators()?
14607 } else {
14608 Vec::new()
14609 };
14610
14611 Ok(Query {
14612 with,
14613 body,
14614 order_by,
14615 limit_clause,
14616 fetch,
14617 locks,
14618 for_clause,
14619 settings,
14620 format_clause,
14621 pipe_operators,
14622 }
14623 .into())
14624 }
14625 }
14626
14627 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14628 let mut pipe_operators = Vec::new();
14629
14630 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14631 let kw = self.expect_one_of_keywords(&[
14632 Keyword::SELECT,
14633 Keyword::EXTEND,
14634 Keyword::SET,
14635 Keyword::DROP,
14636 Keyword::AS,
14637 Keyword::WHERE,
14638 Keyword::LIMIT,
14639 Keyword::AGGREGATE,
14640 Keyword::ORDER,
14641 Keyword::TABLESAMPLE,
14642 Keyword::RENAME,
14643 Keyword::UNION,
14644 Keyword::INTERSECT,
14645 Keyword::EXCEPT,
14646 Keyword::CALL,
14647 Keyword::PIVOT,
14648 Keyword::UNPIVOT,
14649 Keyword::JOIN,
14650 Keyword::INNER,
14651 Keyword::LEFT,
14652 Keyword::RIGHT,
14653 Keyword::FULL,
14654 Keyword::CROSS,
14655 ])?;
14656 match kw {
14657 Keyword::SELECT => {
14658 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14659 pipe_operators.push(PipeOperator::Select { exprs })
14660 }
14661 Keyword::EXTEND => {
14662 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14663 pipe_operators.push(PipeOperator::Extend { exprs })
14664 }
14665 Keyword::SET => {
14666 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14667 pipe_operators.push(PipeOperator::Set { assignments })
14668 }
14669 Keyword::DROP => {
14670 let columns = self.parse_identifiers()?;
14671 pipe_operators.push(PipeOperator::Drop { columns })
14672 }
14673 Keyword::AS => {
14674 let alias = self.parse_identifier()?;
14675 pipe_operators.push(PipeOperator::As { alias })
14676 }
14677 Keyword::WHERE => {
14678 let expr = self.parse_expr()?;
14679 pipe_operators.push(PipeOperator::Where { expr })
14680 }
14681 Keyword::LIMIT => {
14682 let expr = self.parse_expr()?;
14683 let offset = if self.parse_keyword(Keyword::OFFSET) {
14684 Some(self.parse_expr()?)
14685 } else {
14686 None
14687 };
14688 pipe_operators.push(PipeOperator::Limit { expr, offset })
14689 }
14690 Keyword::AGGREGATE => {
14691 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14692 vec![]
14693 } else {
14694 self.parse_comma_separated(|parser| {
14695 parser.parse_expr_with_alias_and_order_by()
14696 })?
14697 };
14698
14699 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14700 self.parse_comma_separated(|parser| {
14701 parser.parse_expr_with_alias_and_order_by()
14702 })?
14703 } else {
14704 vec![]
14705 };
14706
14707 pipe_operators.push(PipeOperator::Aggregate {
14708 full_table_exprs,
14709 group_by_expr,
14710 })
14711 }
14712 Keyword::ORDER => {
14713 self.expect_one_of_keywords(&[Keyword::BY])?;
14714 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14715 pipe_operators.push(PipeOperator::OrderBy { exprs })
14716 }
14717 Keyword::TABLESAMPLE => {
14718 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14719 pipe_operators.push(PipeOperator::TableSample { sample });
14720 }
14721 Keyword::RENAME => {
14722 let mappings =
14723 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14724 pipe_operators.push(PipeOperator::Rename { mappings });
14725 }
14726 Keyword::UNION => {
14727 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14728 let queries = self.parse_pipe_operator_queries()?;
14729 pipe_operators.push(PipeOperator::Union {
14730 set_quantifier,
14731 queries,
14732 });
14733 }
14734 Keyword::INTERSECT => {
14735 let set_quantifier =
14736 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14737 let queries = self.parse_pipe_operator_queries()?;
14738 pipe_operators.push(PipeOperator::Intersect {
14739 set_quantifier,
14740 queries,
14741 });
14742 }
14743 Keyword::EXCEPT => {
14744 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14745 let queries = self.parse_pipe_operator_queries()?;
14746 pipe_operators.push(PipeOperator::Except {
14747 set_quantifier,
14748 queries,
14749 });
14750 }
14751 Keyword::CALL => {
14752 let function_name = self.parse_object_name(false)?;
14753 let function_expr = self.parse_function(function_name)?;
14754 if let Expr::Function(function) = function_expr {
14755 let alias = self.parse_identifier_optional_alias()?;
14756 pipe_operators.push(PipeOperator::Call { function, alias });
14757 } else {
14758 return Err(ParserError::ParserError(
14759 "Expected function call after CALL".to_string(),
14760 ));
14761 }
14762 }
14763 Keyword::PIVOT => {
14764 self.expect_token(&Token::LParen)?;
14765 let aggregate_functions =
14766 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14767 self.expect_keyword_is(Keyword::FOR)?;
14768 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14769 self.expect_keyword_is(Keyword::IN)?;
14770
14771 self.expect_token(&Token::LParen)?;
14772 let value_source = if self.parse_keyword(Keyword::ANY) {
14773 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14774 self.parse_comma_separated(Parser::parse_order_by_expr)?
14775 } else {
14776 vec![]
14777 };
14778 PivotValueSource::Any(order_by)
14779 } else if self.peek_sub_query() {
14780 PivotValueSource::Subquery(self.parse_query()?)
14781 } else {
14782 PivotValueSource::List(
14783 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14784 )
14785 };
14786 self.expect_token(&Token::RParen)?;
14787 self.expect_token(&Token::RParen)?;
14788
14789 let alias = self.parse_identifier_optional_alias()?;
14790
14791 pipe_operators.push(PipeOperator::Pivot {
14792 aggregate_functions,
14793 value_column,
14794 value_source,
14795 alias,
14796 });
14797 }
14798 Keyword::UNPIVOT => {
14799 self.expect_token(&Token::LParen)?;
14800 let value_column = self.parse_identifier()?;
14801 self.expect_keyword(Keyword::FOR)?;
14802 let name_column = self.parse_identifier()?;
14803 self.expect_keyword(Keyword::IN)?;
14804
14805 self.expect_token(&Token::LParen)?;
14806 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14807 self.expect_token(&Token::RParen)?;
14808
14809 self.expect_token(&Token::RParen)?;
14810
14811 let alias = self.parse_identifier_optional_alias()?;
14812
14813 pipe_operators.push(PipeOperator::Unpivot {
14814 value_column,
14815 name_column,
14816 unpivot_columns,
14817 alias,
14818 });
14819 }
14820 Keyword::JOIN
14821 | Keyword::INNER
14822 | Keyword::LEFT
14823 | Keyword::RIGHT
14824 | Keyword::FULL
14825 | Keyword::CROSS => {
14826 self.prev_token();
14827 let mut joins = self.parse_joins()?;
14828 if joins.len() != 1 {
14829 return Err(ParserError::ParserError(
14830 "Join pipe operator must have a single join".to_string(),
14831 ));
14832 }
14833 let join = joins.swap_remove(0);
14834 pipe_operators.push(PipeOperator::Join(join))
14835 }
14836 unhandled => {
14837 return Err(ParserError::ParserError(format!(
14838 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
14839 )))
14840 }
14841 }
14842 }
14843 Ok(pipe_operators)
14844 }
14845
14846 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
14847 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
14848 {
14849 let key_values = self.parse_comma_separated(|p| {
14850 let key = p.parse_identifier()?;
14851 p.expect_token(&Token::Eq)?;
14852 let value = p.parse_expr()?;
14853 Ok(Setting { key, value })
14854 })?;
14855 Some(key_values)
14856 } else {
14857 None
14858 };
14859 Ok(settings)
14860 }
14861
14862 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
14864 if self.parse_keyword(Keyword::XML) {
14865 Ok(Some(self.parse_for_xml()?))
14866 } else if self.parse_keyword(Keyword::JSON) {
14867 Ok(Some(self.parse_for_json()?))
14868 } else if self.parse_keyword(Keyword::BROWSE) {
14869 Ok(Some(ForClause::Browse))
14870 } else {
14871 Ok(None)
14872 }
14873 }
14874
14875 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
14877 let for_xml = if self.parse_keyword(Keyword::RAW) {
14878 let mut element_name = None;
14879 if self.peek_token_ref().token == Token::LParen {
14880 self.expect_token(&Token::LParen)?;
14881 element_name = Some(self.parse_literal_string()?);
14882 self.expect_token(&Token::RParen)?;
14883 }
14884 ForXml::Raw(element_name)
14885 } else if self.parse_keyword(Keyword::AUTO) {
14886 ForXml::Auto
14887 } else if self.parse_keyword(Keyword::EXPLICIT) {
14888 ForXml::Explicit
14889 } else if self.parse_keyword(Keyword::PATH) {
14890 let mut element_name = None;
14891 if self.peek_token_ref().token == Token::LParen {
14892 self.expect_token(&Token::LParen)?;
14893 element_name = Some(self.parse_literal_string()?);
14894 self.expect_token(&Token::RParen)?;
14895 }
14896 ForXml::Path(element_name)
14897 } else {
14898 return Err(ParserError::ParserError(
14899 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
14900 ));
14901 };
14902 let mut elements = false;
14903 let mut binary_base64 = false;
14904 let mut root = None;
14905 let mut r#type = false;
14906 while self.peek_token_ref().token == Token::Comma {
14907 self.next_token();
14908 if self.parse_keyword(Keyword::ELEMENTS) {
14909 elements = true;
14910 } else if self.parse_keyword(Keyword::BINARY) {
14911 self.expect_keyword_is(Keyword::BASE64)?;
14912 binary_base64 = true;
14913 } else if self.parse_keyword(Keyword::ROOT) {
14914 self.expect_token(&Token::LParen)?;
14915 root = Some(self.parse_literal_string()?);
14916 self.expect_token(&Token::RParen)?;
14917 } else if self.parse_keyword(Keyword::TYPE) {
14918 r#type = true;
14919 }
14920 }
14921 Ok(ForClause::Xml {
14922 for_xml,
14923 elements,
14924 binary_base64,
14925 root,
14926 r#type,
14927 })
14928 }
14929
14930 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
14932 let for_json = if self.parse_keyword(Keyword::AUTO) {
14933 ForJson::Auto
14934 } else if self.parse_keyword(Keyword::PATH) {
14935 ForJson::Path
14936 } else {
14937 return Err(ParserError::ParserError(
14938 "Expected FOR JSON [AUTO | PATH ]".to_string(),
14939 ));
14940 };
14941 let mut root = None;
14942 let mut include_null_values = false;
14943 let mut without_array_wrapper = false;
14944 while self.peek_token_ref().token == Token::Comma {
14945 self.next_token();
14946 if self.parse_keyword(Keyword::ROOT) {
14947 self.expect_token(&Token::LParen)?;
14948 root = Some(self.parse_literal_string()?);
14949 self.expect_token(&Token::RParen)?;
14950 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
14951 include_null_values = true;
14952 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
14953 without_array_wrapper = true;
14954 }
14955 }
14956 Ok(ForClause::Json {
14957 for_json,
14958 root,
14959 include_null_values,
14960 without_array_wrapper,
14961 })
14962 }
14963
14964 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
14966 let name = self.parse_identifier()?;
14967
14968 let as_optional = self.dialect.supports_cte_without_as();
14969
14970 if as_optional && !self.peek_keyword(Keyword::AS) {
14972 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
14973 p.expect_token(&Token::LParen)?;
14974 let query = p.parse_query()?;
14975 let closing_paren_token = p.expect_token(&Token::RParen)?;
14976 Ok((query, closing_paren_token))
14977 })? {
14978 let mut cte = Cte {
14979 alias: TableAlias {
14980 explicit: false,
14981 name,
14982 columns: vec![],
14983 },
14984 query,
14985 from: None,
14986 materialized: None,
14987 closing_paren_token: closing_paren_token.into(),
14988 };
14989 if self.parse_keyword(Keyword::FROM) {
14990 cte.from = Some(self.parse_identifier()?);
14991 }
14992 return Ok(cte);
14993 }
14994 }
14995
14996 let columns = if self.parse_keyword(Keyword::AS) {
14998 vec![]
14999 } else {
15000 let columns = self.parse_table_alias_column_defs()?;
15001 if as_optional {
15002 let _ = self.parse_keyword(Keyword::AS);
15003 } else {
15004 self.expect_keyword_is(Keyword::AS)?;
15005 }
15006 columns
15007 };
15008
15009 let mut is_materialized = None;
15010 if dialect_of!(self is PostgreSqlDialect) {
15011 if self.parse_keyword(Keyword::MATERIALIZED) {
15012 is_materialized = Some(CteAsMaterialized::Materialized);
15013 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
15014 is_materialized = Some(CteAsMaterialized::NotMaterialized);
15015 }
15016 }
15017
15018 self.expect_token(&Token::LParen)?;
15019 let query = self.parse_query()?;
15020 let closing_paren_token = self.expect_token(&Token::RParen)?;
15021
15022 let mut cte = Cte {
15023 alias: TableAlias {
15024 explicit: false,
15025 name,
15026 columns,
15027 },
15028 query,
15029 from: None,
15030 materialized: is_materialized,
15031 closing_paren_token: closing_paren_token.into(),
15032 };
15033 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
15034 cte.from = Some(self.parse_identifier()?);
15035 }
15036 Ok(cte)
15037 }
15038
15039 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
15048 let expr = if self.peek_keyword(Keyword::SELECT)
15051 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
15052 {
15053 SetExpr::Select(self.parse_select().map(Box::new)?)
15054 } else if self.consume_token(&Token::LParen) {
15055 let subquery = self.parse_query()?;
15057 self.expect_token(&Token::RParen)?;
15058 SetExpr::Query(subquery)
15059 } else if self.parse_keyword(Keyword::VALUES) {
15060 let is_mysql = dialect_of!(self is MySqlDialect);
15061 SetExpr::Values(self.parse_values(is_mysql, false)?)
15062 } else if self.parse_keyword(Keyword::VALUE) {
15063 let is_mysql = dialect_of!(self is MySqlDialect);
15064 SetExpr::Values(self.parse_values(is_mysql, true)?)
15065 } else if self.parse_keyword(Keyword::TABLE) {
15066 SetExpr::Table(Box::new(self.parse_as_table()?))
15067 } else {
15068 return self.expected_ref(
15069 "SELECT, VALUES, or a subquery in the query body",
15070 self.peek_token_ref(),
15071 );
15072 };
15073
15074 self.parse_remaining_set_exprs(expr, precedence)
15075 }
15076
15077 fn parse_remaining_set_exprs(
15081 &mut self,
15082 mut expr: SetExpr,
15083 precedence: u8,
15084 ) -> Result<Box<SetExpr>, ParserError> {
15085 loop {
15086 let op = self.parse_set_operator(&self.peek_token().token);
15088 let next_precedence = match op {
15089 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15091 10
15092 }
15093 Some(SetOperator::Intersect) => 20,
15095 None => break,
15097 };
15098 if precedence >= next_precedence {
15099 break;
15100 }
15101 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15103 expr = SetExpr::SetOperation {
15104 left: Box::new(expr),
15105 op: op.unwrap(),
15106 set_quantifier,
15107 right: self.parse_query_body(next_precedence)?,
15108 };
15109 }
15110
15111 Ok(expr.into())
15112 }
15113
15114 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15116 match token {
15117 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15118 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15119 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15120 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15121 _ => None,
15122 }
15123 }
15124
15125 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15127 match op {
15128 Some(
15129 SetOperator::Except
15130 | SetOperator::Intersect
15131 | SetOperator::Union
15132 | SetOperator::Minus,
15133 ) => {
15134 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15135 SetQuantifier::DistinctByName
15136 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15137 SetQuantifier::ByName
15138 } else if self.parse_keyword(Keyword::ALL) {
15139 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15140 SetQuantifier::AllByName
15141 } else {
15142 SetQuantifier::All
15143 }
15144 } else if self.parse_keyword(Keyword::DISTINCT) {
15145 SetQuantifier::Distinct
15146 } else {
15147 SetQuantifier::None
15148 }
15149 }
15150 _ => SetQuantifier::None,
15151 }
15152 }
15153
15154 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15156 let mut from_first = None;
15157
15158 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15159 let from_token = self.expect_keyword(Keyword::FROM)?;
15160 let from = self.parse_table_with_joins()?;
15161 if !self.peek_keyword(Keyword::SELECT) {
15162 return Ok(Select {
15163 select_token: AttachedToken(from_token),
15164 optimizer_hints: vec![],
15165 distinct: None,
15166 select_modifiers: None,
15167 top: None,
15168 top_before_distinct: false,
15169 projection: vec![],
15170 exclude: None,
15171 into: None,
15172 from,
15173 lateral_views: vec![],
15174 prewhere: None,
15175 selection: None,
15176 group_by: GroupByExpr::Expressions(vec![], vec![]),
15177 cluster_by: vec![],
15178 distribute_by: vec![],
15179 sort_by: vec![],
15180 having: None,
15181 named_window: vec![],
15182 window_before_qualify: false,
15183 qualify: None,
15184 value_table_mode: None,
15185 connect_by: vec![],
15186 flavor: SelectFlavor::FromFirstNoSelect,
15187 });
15188 }
15189 from_first = Some(from);
15190 }
15191
15192 let select_token = self.expect_keyword(Keyword::SELECT)?;
15193 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15194 let value_table_mode = self.parse_value_table_mode()?;
15195
15196 let (select_modifiers, distinct_select_modifier) =
15197 if self.dialect.supports_select_modifiers() {
15198 self.parse_select_modifiers()?
15199 } else {
15200 (None, None)
15201 };
15202
15203 let mut top_before_distinct = false;
15204 let mut top = None;
15205 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15206 top = Some(self.parse_top()?);
15207 top_before_distinct = true;
15208 }
15209
15210 let distinct = if distinct_select_modifier.is_some() {
15211 distinct_select_modifier
15212 } else {
15213 self.parse_all_or_distinct()?
15214 };
15215
15216 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15217 top = Some(self.parse_top()?);
15218 }
15219
15220 let projection =
15221 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15222 vec![]
15223 } else {
15224 self.parse_projection()?
15225 };
15226
15227 let exclude = if self.dialect.supports_select_exclude() {
15228 self.parse_optional_select_item_exclude()?
15229 } else {
15230 None
15231 };
15232
15233 let into = if self.parse_keyword(Keyword::INTO) {
15234 Some(self.parse_select_into()?)
15235 } else {
15236 None
15237 };
15238
15239 let (from, from_first) = if let Some(from) = from_first.take() {
15245 (from, true)
15246 } else if self.parse_keyword(Keyword::FROM) {
15247 (self.parse_table_with_joins()?, false)
15248 } else {
15249 (vec![], false)
15250 };
15251
15252 let mut lateral_views = vec![];
15253 loop {
15254 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15255 let outer = self.parse_keyword(Keyword::OUTER);
15256 let lateral_view = self.parse_expr()?;
15257 let lateral_view_name = self.parse_object_name(false)?;
15258 let lateral_col_alias = self
15259 .parse_comma_separated(|parser| {
15260 parser.parse_optional_alias(&[
15261 Keyword::WHERE,
15262 Keyword::GROUP,
15263 Keyword::CLUSTER,
15264 Keyword::HAVING,
15265 Keyword::LATERAL,
15266 ]) })?
15268 .into_iter()
15269 .flatten()
15270 .collect();
15271
15272 lateral_views.push(LateralView {
15273 lateral_view,
15274 lateral_view_name,
15275 lateral_col_alias,
15276 outer,
15277 });
15278 } else {
15279 break;
15280 }
15281 }
15282
15283 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15284 {
15285 Some(self.parse_expr()?)
15286 } else {
15287 None
15288 };
15289
15290 let selection = if self.parse_keyword(Keyword::WHERE) {
15291 Some(self.parse_expr()?)
15292 } else {
15293 None
15294 };
15295
15296 let connect_by = self.maybe_parse_connect_by()?;
15297
15298 let group_by = self
15299 .parse_optional_group_by()?
15300 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15301
15302 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15303 self.parse_comma_separated(Parser::parse_expr)?
15304 } else {
15305 vec![]
15306 };
15307
15308 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15309 self.parse_comma_separated(Parser::parse_expr)?
15310 } else {
15311 vec![]
15312 };
15313
15314 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15315 self.parse_comma_separated(Parser::parse_order_by_expr)?
15316 } else {
15317 vec![]
15318 };
15319
15320 let having = if self.parse_keyword(Keyword::HAVING) {
15321 Some(self.parse_expr()?)
15322 } else {
15323 None
15324 };
15325
15326 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15328 {
15329 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15330 if self.parse_keyword(Keyword::QUALIFY) {
15331 (named_windows, Some(self.parse_expr()?), true)
15332 } else {
15333 (named_windows, None, true)
15334 }
15335 } else if self.parse_keyword(Keyword::QUALIFY) {
15336 let qualify = Some(self.parse_expr()?);
15337 if self.parse_keyword(Keyword::WINDOW) {
15338 (
15339 self.parse_comma_separated(Parser::parse_named_window)?,
15340 qualify,
15341 false,
15342 )
15343 } else {
15344 (Default::default(), qualify, false)
15345 }
15346 } else {
15347 Default::default()
15348 };
15349
15350 Ok(Select {
15351 select_token: AttachedToken(select_token),
15352 optimizer_hints,
15353 distinct,
15354 select_modifiers,
15355 top,
15356 top_before_distinct,
15357 projection,
15358 exclude,
15359 into,
15360 from,
15361 lateral_views,
15362 prewhere,
15363 selection,
15364 group_by,
15365 cluster_by,
15366 distribute_by,
15367 sort_by,
15368 having,
15369 named_window: named_windows,
15370 window_before_qualify,
15371 qualify,
15372 value_table_mode,
15373 connect_by,
15374 flavor: if from_first {
15375 SelectFlavor::FromFirst
15376 } else {
15377 SelectFlavor::Standard
15378 },
15379 })
15380 }
15381
15382 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15391 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15392 if !supports_hints {
15393 return Ok(vec![]);
15394 }
15395 let mut hints = vec![];
15396 loop {
15397 let t = self.peek_nth_token_no_skip_ref(0);
15398 let Token::Whitespace(ws) = &t.token else {
15399 break;
15400 };
15401 match ws {
15402 Whitespace::SingleLineComment { comment, prefix } => {
15403 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15404 hints.push(OptimizerHint {
15405 prefix: hint_prefix,
15406 text,
15407 style: OptimizerHintStyle::SingleLine {
15408 prefix: prefix.clone(),
15409 },
15410 });
15411 }
15412 self.next_token_no_skip();
15413 }
15414 Whitespace::MultiLineComment(comment) => {
15415 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15416 hints.push(OptimizerHint {
15417 prefix: hint_prefix,
15418 text,
15419 style: OptimizerHintStyle::MultiLine,
15420 });
15421 }
15422 self.next_token_no_skip();
15423 }
15424 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15425 self.next_token_no_skip();
15426 }
15427 }
15428 }
15429 Ok(hints)
15430 }
15431
15432 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15435 let (before_plus, text) = comment.split_once('+')?;
15436 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15437 Some((before_plus.to_string(), text.to_string()))
15438 } else {
15439 None
15440 }
15441 }
15442
15443 fn parse_select_modifiers(
15450 &mut self,
15451 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15452 let mut modifiers = SelectModifiers::default();
15453 let mut distinct = None;
15454
15455 let keywords = &[
15456 Keyword::ALL,
15457 Keyword::DISTINCT,
15458 Keyword::DISTINCTROW,
15459 Keyword::HIGH_PRIORITY,
15460 Keyword::STRAIGHT_JOIN,
15461 Keyword::SQL_SMALL_RESULT,
15462 Keyword::SQL_BIG_RESULT,
15463 Keyword::SQL_BUFFER_RESULT,
15464 Keyword::SQL_NO_CACHE,
15465 Keyword::SQL_CALC_FOUND_ROWS,
15466 ];
15467
15468 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15469 match keyword {
15470 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15471 self.prev_token();
15472 distinct = self.parse_all_or_distinct()?;
15473 }
15474 Keyword::DISTINCTROW if distinct.is_none() => {
15476 distinct = Some(Distinct::Distinct);
15477 }
15478 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15479 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15480 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15481 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15482 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15483 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15484 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15485 _ => {
15486 self.prev_token();
15487 return self.expected_ref(
15488 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15489 self.peek_token_ref(),
15490 );
15491 }
15492 }
15493 }
15494
15495 let select_modifiers = if modifiers.is_any_set() {
15498 Some(modifiers)
15499 } else {
15500 None
15501 };
15502 Ok((select_modifiers, distinct))
15503 }
15504
15505 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15506 if !dialect_of!(self is BigQueryDialect) {
15507 return Ok(None);
15508 }
15509
15510 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15511 Some(ValueTableMode::DistinctAsValue)
15512 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15513 Some(ValueTableMode::DistinctAsStruct)
15514 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15515 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15516 {
15517 Some(ValueTableMode::AsValue)
15518 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15519 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15520 {
15521 Some(ValueTableMode::AsStruct)
15522 } else if self.parse_keyword(Keyword::AS) {
15523 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15524 } else {
15525 None
15526 };
15527
15528 Ok(mode)
15529 }
15530
15531 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15535 where
15536 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15537 {
15538 let current_state = self.state;
15539 self.state = state;
15540 let res = f(self);
15541 self.state = current_state;
15542 res
15543 }
15544
15545 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15547 let mut clauses = Vec::with_capacity(2);
15548 loop {
15549 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15550 clauses.push(ConnectByKind::StartWith {
15551 start_token: self.token_at(idx).clone().into(),
15552 condition: self.parse_expr()?.into(),
15553 });
15554 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15555 {
15556 clauses.push(ConnectByKind::ConnectBy {
15557 connect_token: self.token_at(idx).clone().into(),
15558 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15559 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15560 parser.parse_comma_separated(Parser::parse_expr)
15561 })?,
15562 });
15563 } else {
15564 break;
15565 }
15566 }
15567 Ok(clauses)
15568 }
15569
15570 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15572 let token1 = self.next_token();
15573 let token2 = self.next_token();
15574 let token3 = self.next_token();
15575
15576 let table_name;
15577 let schema_name;
15578 if token2 == Token::Period {
15579 match token1.token {
15580 Token::Word(w) => {
15581 schema_name = w.value;
15582 }
15583 _ => {
15584 return self.expected("Schema name", token1);
15585 }
15586 }
15587 match token3.token {
15588 Token::Word(w) => {
15589 table_name = w.value;
15590 }
15591 _ => {
15592 return self.expected("Table name", token3);
15593 }
15594 }
15595 Ok(Table {
15596 table_name: Some(table_name),
15597 schema_name: Some(schema_name),
15598 })
15599 } else {
15600 match token1.token {
15601 Token::Word(w) => {
15602 table_name = w.value;
15603 }
15604 _ => {
15605 return self.expected("Table name", token1);
15606 }
15607 }
15608 Ok(Table {
15609 table_name: Some(table_name),
15610 schema_name: None,
15611 })
15612 }
15613 }
15614
15615 fn parse_set_role(
15617 &mut self,
15618 modifier: Option<ContextModifier>,
15619 ) -> Result<Statement, ParserError> {
15620 self.expect_keyword_is(Keyword::ROLE)?;
15621
15622 let role_name = if self.parse_keyword(Keyword::NONE) {
15623 None
15624 } else {
15625 Some(self.parse_identifier()?)
15626 };
15627 Ok(Statement::Set(Set::SetRole {
15628 context_modifier: modifier,
15629 role_name,
15630 }))
15631 }
15632
15633 fn parse_set_values(
15634 &mut self,
15635 parenthesized_assignment: bool,
15636 ) -> Result<Vec<Expr>, ParserError> {
15637 let mut values = vec![];
15638
15639 if parenthesized_assignment {
15640 self.expect_token(&Token::LParen)?;
15641 }
15642
15643 loop {
15644 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15645 expr
15646 } else if let Ok(expr) = self.parse_expr() {
15647 expr
15648 } else {
15649 self.expected_ref("variable value", self.peek_token_ref())?
15650 };
15651
15652 values.push(value);
15653 if self.consume_token(&Token::Comma) {
15654 continue;
15655 }
15656
15657 if parenthesized_assignment {
15658 self.expect_token(&Token::RParen)?;
15659 }
15660 return Ok(values);
15661 }
15662 }
15663
15664 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15665 let modifier =
15666 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15667
15668 Self::keyword_to_modifier(modifier)
15669 }
15670
15671 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15673 let scope = self.parse_context_modifier();
15674
15675 let name = if self.dialect.supports_parenthesized_set_variables()
15676 && self.consume_token(&Token::LParen)
15677 {
15678 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15682 } else {
15683 self.parse_object_name(false)?
15684 };
15685
15686 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15687 return self.expected_ref("assignment operator", self.peek_token_ref());
15688 }
15689
15690 let value = self.parse_expr()?;
15691
15692 Ok(SetAssignment { scope, name, value })
15693 }
15694
15695 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15696 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15697
15698 let scope = if !hivevar {
15700 self.parse_context_modifier()
15701 } else {
15702 None
15703 };
15704
15705 if hivevar {
15706 self.expect_token(&Token::Colon)?;
15707 }
15708
15709 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15710 return Ok(set_role_stmt);
15711 }
15712
15713 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15715 || self.parse_keyword(Keyword::TIMEZONE)
15716 {
15717 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15718 return Ok(Set::SingleAssignment {
15719 scope,
15720 hivevar,
15721 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15722 values: self.parse_set_values(false)?,
15723 }
15724 .into());
15725 } else {
15726 return Ok(Set::SetTimeZone {
15730 local: scope == Some(ContextModifier::Local),
15731 value: self.parse_expr()?,
15732 }
15733 .into());
15734 }
15735 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15736 if self.parse_keyword(Keyword::DEFAULT) {
15737 return Ok(Set::SetNamesDefault {}.into());
15738 }
15739 let charset_name = self.parse_identifier()?;
15740 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15741 Some(self.parse_literal_string()?)
15742 } else {
15743 None
15744 };
15745
15746 return Ok(Set::SetNames {
15747 charset_name,
15748 collation_name,
15749 }
15750 .into());
15751 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15752 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15753 return Ok(Set::SetTransaction {
15754 modes: self.parse_transaction_modes()?,
15755 snapshot: None,
15756 session: true,
15757 }
15758 .into());
15759 } else if self.parse_keyword(Keyword::TRANSACTION) {
15760 if self.parse_keyword(Keyword::SNAPSHOT) {
15761 let snapshot_id = self.parse_value()?;
15762 return Ok(Set::SetTransaction {
15763 modes: vec![],
15764 snapshot: Some(snapshot_id),
15765 session: false,
15766 }
15767 .into());
15768 }
15769 return Ok(Set::SetTransaction {
15770 modes: self.parse_transaction_modes()?,
15771 snapshot: None,
15772 session: false,
15773 }
15774 .into());
15775 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15776 let scope = match scope {
15777 Some(s) => s,
15778 None => {
15779 return self.expected_at(
15780 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15781 self.get_current_index(),
15782 )
15783 }
15784 };
15785 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15786 SetSessionAuthorizationParamKind::Default
15787 } else {
15788 let value = self.parse_identifier()?;
15789 SetSessionAuthorizationParamKind::User(value)
15790 };
15791 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15792 scope,
15793 kind: auth_value,
15794 })
15795 .into());
15796 }
15797
15798 if self.dialect.supports_comma_separated_set_assignments() {
15799 if scope.is_some() {
15800 self.prev_token();
15801 }
15802
15803 if let Some(assignments) = self
15804 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15805 {
15806 return if assignments.len() > 1 {
15807 Ok(Set::MultipleAssignments { assignments }.into())
15808 } else {
15809 let SetAssignment { scope, name, value } =
15810 assignments.into_iter().next().ok_or_else(|| {
15811 ParserError::ParserError("Expected at least one assignment".to_string())
15812 })?;
15813
15814 Ok(Set::SingleAssignment {
15815 scope,
15816 hivevar,
15817 variable: name,
15818 values: vec![value],
15819 }
15820 .into())
15821 };
15822 }
15823 }
15824
15825 let variables = if self.dialect.supports_parenthesized_set_variables()
15826 && self.consume_token(&Token::LParen)
15827 {
15828 let vars = OneOrManyWithParens::Many(
15829 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
15830 .into_iter()
15831 .map(|ident| ObjectName::from(vec![ident]))
15832 .collect(),
15833 );
15834 self.expect_token(&Token::RParen)?;
15835 vars
15836 } else {
15837 OneOrManyWithParens::One(self.parse_object_name(false)?)
15838 };
15839
15840 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15841 let stmt = match variables {
15842 OneOrManyWithParens::One(var) => Set::SingleAssignment {
15843 scope,
15844 hivevar,
15845 variable: var,
15846 values: self.parse_set_values(false)?,
15847 },
15848 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
15849 variables: vars,
15850 values: self.parse_set_values(true)?,
15851 },
15852 };
15853
15854 return Ok(stmt.into());
15855 }
15856
15857 if self.dialect.supports_set_stmt_without_operator() {
15858 self.prev_token();
15859 return self.parse_set_session_params();
15860 };
15861
15862 self.expected_ref("equals sign or TO", self.peek_token_ref())
15863 }
15864
15865 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
15867 if self.parse_keyword(Keyword::STATISTICS) {
15868 let topic = match self.parse_one_of_keywords(&[
15869 Keyword::IO,
15870 Keyword::PROFILE,
15871 Keyword::TIME,
15872 Keyword::XML,
15873 ]) {
15874 Some(Keyword::IO) => SessionParamStatsTopic::IO,
15875 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
15876 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
15877 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
15878 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
15879 };
15880 let value = self.parse_session_param_value()?;
15881 Ok(
15882 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
15883 topic,
15884 value,
15885 }))
15886 .into(),
15887 )
15888 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
15889 let obj = self.parse_object_name(false)?;
15890 let value = self.parse_session_param_value()?;
15891 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
15892 SetSessionParamIdentityInsert { obj, value },
15893 ))
15894 .into())
15895 } else if self.parse_keyword(Keyword::OFFSETS) {
15896 let keywords = self.parse_comma_separated(|parser| {
15897 let next_token = parser.next_token();
15898 match &next_token.token {
15899 Token::Word(w) => Ok(w.to_string()),
15900 _ => parser.expected("SQL keyword", next_token),
15901 }
15902 })?;
15903 let value = self.parse_session_param_value()?;
15904 Ok(
15905 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
15906 keywords,
15907 value,
15908 }))
15909 .into(),
15910 )
15911 } else {
15912 let names = self.parse_comma_separated(|parser| {
15913 let next_token = parser.next_token();
15914 match next_token.token {
15915 Token::Word(w) => Ok(w.to_string()),
15916 _ => parser.expected("Session param name", next_token),
15917 }
15918 })?;
15919 let value = self.parse_expr()?.to_string();
15920 Ok(
15921 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
15922 names,
15923 value,
15924 }))
15925 .into(),
15926 )
15927 }
15928 }
15929
15930 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
15931 if self.parse_keyword(Keyword::ON) {
15932 Ok(SessionParamValue::On)
15933 } else if self.parse_keyword(Keyword::OFF) {
15934 Ok(SessionParamValue::Off)
15935 } else {
15936 self.expected_ref("ON or OFF", self.peek_token_ref())
15937 }
15938 }
15939
15940 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
15942 let terse = self.parse_keyword(Keyword::TERSE);
15943 let extended = self.parse_keyword(Keyword::EXTENDED);
15944 let full = self.parse_keyword(Keyword::FULL);
15945 let session = self.parse_keyword(Keyword::SESSION);
15946 let global = self.parse_keyword(Keyword::GLOBAL);
15947 let external = self.parse_keyword(Keyword::EXTERNAL);
15948 if self
15949 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
15950 .is_some()
15951 {
15952 Ok(self.parse_show_columns(extended, full)?)
15953 } else if self.parse_keyword(Keyword::TABLES) {
15954 Ok(self.parse_show_tables(terse, extended, full, external)?)
15955 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
15956 Ok(self.parse_show_views(terse, true)?)
15957 } else if self.parse_keyword(Keyword::VIEWS) {
15958 Ok(self.parse_show_views(terse, false)?)
15959 } else if self.parse_keyword(Keyword::FUNCTIONS) {
15960 Ok(self.parse_show_functions()?)
15961 } else if self.parse_keyword(Keyword::PROCESSLIST) {
15962 Ok(Statement::ShowProcessList { full })
15963 } else if extended || full {
15964 Err(ParserError::ParserError(
15965 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
15966 ))
15967 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
15968 Ok(self.parse_show_create()?)
15969 } else if self.parse_keyword(Keyword::COLLATION) {
15970 Ok(self.parse_show_collation()?)
15971 } else if self.parse_keyword(Keyword::VARIABLES)
15972 && dialect_of!(self is MySqlDialect | GenericDialect)
15973 {
15974 Ok(Statement::ShowVariables {
15975 filter: self.parse_show_statement_filter()?,
15976 session,
15977 global,
15978 })
15979 } else if self.parse_keyword(Keyword::STATUS)
15980 && dialect_of!(self is MySqlDialect | GenericDialect)
15981 {
15982 Ok(Statement::ShowStatus {
15983 filter: self.parse_show_statement_filter()?,
15984 session,
15985 global,
15986 })
15987 } else if self.parse_keyword(Keyword::CATALOGS) {
15988 self.parse_show_catalogs(terse)
15989 } else if self.parse_keyword(Keyword::DATABASES) {
15990 self.parse_show_databases(terse)
15991 } else if self.parse_keyword(Keyword::SCHEMAS) {
15992 self.parse_show_schemas(terse)
15993 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
15994 self.parse_show_charset(false)
15995 } else if self.parse_keyword(Keyword::CHARSET) {
15996 self.parse_show_charset(true)
15997 } else {
15998 Ok(Statement::ShowVariable {
15999 variable: self.parse_identifiers()?,
16000 })
16001 }
16002 }
16003
16004 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
16005 Ok(Statement::ShowCharset(ShowCharset {
16007 is_shorthand,
16008 filter: self.parse_show_statement_filter()?,
16009 }))
16010 }
16011
16012 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
16013 let history = self.parse_keyword(Keyword::HISTORY);
16014 let show_options = self.parse_show_stmt_options()?;
16015 Ok(Statement::ShowCatalogs {
16016 terse,
16017 history,
16018 show_options,
16019 })
16020 }
16021
16022 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
16023 let history = self.parse_keyword(Keyword::HISTORY);
16024 let show_options = self.parse_show_stmt_options()?;
16025 Ok(Statement::ShowDatabases {
16026 terse,
16027 history,
16028 show_options,
16029 })
16030 }
16031
16032 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
16033 let history = self.parse_keyword(Keyword::HISTORY);
16034 let show_options = self.parse_show_stmt_options()?;
16035 Ok(Statement::ShowSchemas {
16036 terse,
16037 history,
16038 show_options,
16039 })
16040 }
16041
16042 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
16044 let obj_type = match self.expect_one_of_keywords(&[
16045 Keyword::TABLE,
16046 Keyword::TRIGGER,
16047 Keyword::FUNCTION,
16048 Keyword::PROCEDURE,
16049 Keyword::EVENT,
16050 Keyword::VIEW,
16051 ])? {
16052 Keyword::TABLE => Ok(ShowCreateObject::Table),
16053 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
16054 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
16055 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
16056 Keyword::EVENT => Ok(ShowCreateObject::Event),
16057 Keyword::VIEW => Ok(ShowCreateObject::View),
16058 keyword => Err(ParserError::ParserError(format!(
16059 "Unable to map keyword to ShowCreateObject: {keyword:?}"
16060 ))),
16061 }?;
16062
16063 let obj_name = self.parse_object_name(false)?;
16064
16065 Ok(Statement::ShowCreate { obj_type, obj_name })
16066 }
16067
16068 pub fn parse_show_columns(
16070 &mut self,
16071 extended: bool,
16072 full: bool,
16073 ) -> Result<Statement, ParserError> {
16074 let show_options = self.parse_show_stmt_options()?;
16075 Ok(Statement::ShowColumns {
16076 extended,
16077 full,
16078 show_options,
16079 })
16080 }
16081
16082 fn parse_show_tables(
16083 &mut self,
16084 terse: bool,
16085 extended: bool,
16086 full: bool,
16087 external: bool,
16088 ) -> Result<Statement, ParserError> {
16089 let history = !external && self.parse_keyword(Keyword::HISTORY);
16090 let show_options = self.parse_show_stmt_options()?;
16091 Ok(Statement::ShowTables {
16092 terse,
16093 history,
16094 extended,
16095 full,
16096 external,
16097 show_options,
16098 })
16099 }
16100
16101 fn parse_show_views(
16102 &mut self,
16103 terse: bool,
16104 materialized: bool,
16105 ) -> Result<Statement, ParserError> {
16106 let show_options = self.parse_show_stmt_options()?;
16107 Ok(Statement::ShowViews {
16108 materialized,
16109 terse,
16110 show_options,
16111 })
16112 }
16113
16114 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16116 let filter = self.parse_show_statement_filter()?;
16117 Ok(Statement::ShowFunctions { filter })
16118 }
16119
16120 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16122 let filter = self.parse_show_statement_filter()?;
16123 Ok(Statement::ShowCollation { filter })
16124 }
16125
16126 pub fn parse_show_statement_filter(
16128 &mut self,
16129 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16130 if self.parse_keyword(Keyword::LIKE) {
16131 Ok(Some(ShowStatementFilter::Like(
16132 self.parse_literal_string()?,
16133 )))
16134 } else if self.parse_keyword(Keyword::ILIKE) {
16135 Ok(Some(ShowStatementFilter::ILike(
16136 self.parse_literal_string()?,
16137 )))
16138 } else if self.parse_keyword(Keyword::WHERE) {
16139 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16140 } else {
16141 self.maybe_parse(|parser| -> Result<String, ParserError> {
16142 parser.parse_literal_string()
16143 })?
16144 .map_or(Ok(None), |filter| {
16145 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16146 })
16147 }
16148 }
16149
16150 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16152 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16154 if self.parse_keyword(Keyword::DEFAULT) {
16156 return Ok(Statement::Use(Use::Default));
16157 }
16158 None } else if dialect_of!(self is DatabricksDialect) {
16160 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16161 } else if dialect_of!(self is SnowflakeDialect) {
16162 self.parse_one_of_keywords(&[
16163 Keyword::DATABASE,
16164 Keyword::SCHEMA,
16165 Keyword::WAREHOUSE,
16166 Keyword::ROLE,
16167 Keyword::SECONDARY,
16168 ])
16169 } else {
16170 None };
16172
16173 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16174 self.parse_secondary_roles()?
16175 } else {
16176 let obj_name = self.parse_object_name(false)?;
16177 match parsed_keyword {
16178 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16179 Some(Keyword::DATABASE) => Use::Database(obj_name),
16180 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16181 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16182 Some(Keyword::ROLE) => Use::Role(obj_name),
16183 _ => Use::Object(obj_name),
16184 }
16185 };
16186
16187 Ok(Statement::Use(result))
16188 }
16189
16190 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16191 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16192 if self.parse_keyword(Keyword::NONE) {
16193 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16194 } else if self.parse_keyword(Keyword::ALL) {
16195 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16196 } else {
16197 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16198 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16199 }
16200 }
16201
16202 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16204 let relation = self.parse_table_factor()?;
16205 let joins = self.parse_joins()?;
16209 Ok(TableWithJoins { relation, joins })
16210 }
16211
16212 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16213 let mut joins = vec![];
16214 loop {
16215 let global = self.parse_keyword(Keyword::GLOBAL);
16216 let join = if self.parse_keyword(Keyword::CROSS) {
16217 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16218 JoinOperator::CrossJoin(JoinConstraint::None)
16219 } else if self.parse_keyword(Keyword::APPLY) {
16220 JoinOperator::CrossApply
16222 } else {
16223 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16224 };
16225 let relation = self.parse_table_factor()?;
16226 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16227 && self.dialect.supports_cross_join_constraint()
16228 {
16229 let constraint = self.parse_join_constraint(false)?;
16230 JoinOperator::CrossJoin(constraint)
16231 } else {
16232 join_operator
16233 };
16234 Join {
16235 relation,
16236 global,
16237 join_operator,
16238 }
16239 } else if self.parse_keyword(Keyword::OUTER) {
16240 self.expect_keyword_is(Keyword::APPLY)?;
16242 Join {
16243 relation: self.parse_table_factor()?,
16244 global,
16245 join_operator: JoinOperator::OuterApply,
16246 }
16247 } else if self.parse_keyword(Keyword::ASOF) {
16248 self.expect_keyword_is(Keyword::JOIN)?;
16249 let relation = self.parse_table_factor()?;
16250 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16251 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16252 Join {
16253 relation,
16254 global,
16255 join_operator: JoinOperator::AsOf {
16256 match_condition,
16257 constraint: self.parse_join_constraint(false)?,
16258 },
16259 }
16260 } else {
16261 let natural = self.parse_keyword(Keyword::NATURAL);
16262 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16263 w.keyword
16264 } else {
16265 Keyword::NoKeyword
16266 };
16267
16268 let join_operator_type = match peek_keyword {
16269 Keyword::INNER | Keyword::JOIN => {
16270 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16272 if inner {
16273 JoinOperator::Inner
16274 } else {
16275 JoinOperator::Join
16276 }
16277 }
16278 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16279 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16281 let join_type = self.parse_one_of_keywords(&[
16282 Keyword::OUTER,
16283 Keyword::SEMI,
16284 Keyword::ANTI,
16285 Keyword::JOIN,
16286 ]);
16287 match join_type {
16288 Some(Keyword::OUTER) => {
16289 self.expect_keyword_is(Keyword::JOIN)?;
16290 if is_left {
16291 JoinOperator::LeftOuter
16292 } else {
16293 JoinOperator::RightOuter
16294 }
16295 }
16296 Some(Keyword::SEMI) => {
16297 self.expect_keyword_is(Keyword::JOIN)?;
16298 if is_left {
16299 JoinOperator::LeftSemi
16300 } else {
16301 JoinOperator::RightSemi
16302 }
16303 }
16304 Some(Keyword::ANTI) => {
16305 self.expect_keyword_is(Keyword::JOIN)?;
16306 if is_left {
16307 JoinOperator::LeftAnti
16308 } else {
16309 JoinOperator::RightAnti
16310 }
16311 }
16312 Some(Keyword::JOIN) => {
16313 if is_left {
16314 JoinOperator::Left
16315 } else {
16316 JoinOperator::Right
16317 }
16318 }
16319 _ => {
16320 return Err(ParserError::ParserError(format!(
16321 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16322 )))
16323 }
16324 }
16325 }
16326 Keyword::ANTI => {
16327 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16329 JoinOperator::Anti
16330 }
16331 Keyword::SEMI => {
16332 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16334 JoinOperator::Semi
16335 }
16336 Keyword::FULL => {
16337 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16340 JoinOperator::FullOuter
16341 }
16342 Keyword::OUTER => {
16343 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16344 }
16345 Keyword::STRAIGHT_JOIN => {
16346 let _ = self.next_token(); JoinOperator::StraightJoin
16348 }
16349 _ if natural => {
16350 return self
16351 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16352 }
16353 _ => break,
16354 };
16355 let mut relation = self.parse_table_factor()?;
16356
16357 if !self
16358 .dialect
16359 .supports_left_associative_joins_without_parens()
16360 && self.peek_parens_less_nested_join()
16361 {
16362 let joins = self.parse_joins()?;
16363 relation = TableFactor::NestedJoin {
16364 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16365 alias: None,
16366 };
16367 }
16368
16369 let join_constraint = self.parse_join_constraint(natural)?;
16370 Join {
16371 relation,
16372 global,
16373 join_operator: join_operator_type(join_constraint),
16374 }
16375 };
16376 joins.push(join);
16377 }
16378 Ok(joins)
16379 }
16380
16381 fn peek_parens_less_nested_join(&self) -> bool {
16382 matches!(
16383 self.peek_token_ref().token,
16384 Token::Word(Word {
16385 keyword: Keyword::JOIN
16386 | Keyword::INNER
16387 | Keyword::LEFT
16388 | Keyword::RIGHT
16389 | Keyword::FULL,
16390 ..
16391 })
16392 )
16393 }
16394
16395 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16397 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16398 let _guard = self.recursion_counter.try_decrease()?;
16399 if self.parse_keyword(Keyword::LATERAL) {
16400 if self.consume_token(&Token::LParen) {
16402 self.parse_derived_table_factor(Lateral)
16403 } else {
16404 let name = self.parse_object_name(false)?;
16405 self.expect_token(&Token::LParen)?;
16406 let args = self.parse_optional_args()?;
16407 let alias = self.maybe_parse_table_alias()?;
16408 Ok(TableFactor::Function {
16409 lateral: true,
16410 name,
16411 args,
16412 alias,
16413 })
16414 }
16415 } else if self.parse_keyword(Keyword::TABLE) {
16416 self.expect_token(&Token::LParen)?;
16418 let expr = self.parse_expr()?;
16419 self.expect_token(&Token::RParen)?;
16420 let alias = self.maybe_parse_table_alias()?;
16421 Ok(TableFactor::TableFunction { expr, alias })
16422 } else if self.consume_token(&Token::LParen) {
16423 if let Some(mut table) =
16445 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16446 {
16447 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16448 {
16449 table = match kw {
16450 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16451 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16452 unexpected_keyword => return Err(ParserError::ParserError(
16453 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16454 )),
16455 }
16456 }
16457 return Ok(table);
16458 }
16459
16460 let mut table_and_joins = self.parse_table_and_joins()?;
16467
16468 #[allow(clippy::if_same_then_else)]
16469 if !table_and_joins.joins.is_empty() {
16470 self.expect_token(&Token::RParen)?;
16471 let alias = self.maybe_parse_table_alias()?;
16472 Ok(TableFactor::NestedJoin {
16473 table_with_joins: Box::new(table_and_joins),
16474 alias,
16475 }) } else if let TableFactor::NestedJoin {
16477 table_with_joins: _,
16478 alias: _,
16479 } = &table_and_joins.relation
16480 {
16481 self.expect_token(&Token::RParen)?;
16484 let alias = self.maybe_parse_table_alias()?;
16485 Ok(TableFactor::NestedJoin {
16486 table_with_joins: Box::new(table_and_joins),
16487 alias,
16488 })
16489 } else if self.dialect.supports_parens_around_table_factor() {
16490 self.expect_token(&Token::RParen)?;
16497
16498 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16499 match &mut table_and_joins.relation {
16502 TableFactor::Derived { alias, .. }
16503 | TableFactor::Table { alias, .. }
16504 | TableFactor::Function { alias, .. }
16505 | TableFactor::UNNEST { alias, .. }
16506 | TableFactor::JsonTable { alias, .. }
16507 | TableFactor::XmlTable { alias, .. }
16508 | TableFactor::OpenJsonTable { alias, .. }
16509 | TableFactor::TableFunction { alias, .. }
16510 | TableFactor::Pivot { alias, .. }
16511 | TableFactor::Unpivot { alias, .. }
16512 | TableFactor::MatchRecognize { alias, .. }
16513 | TableFactor::SemanticView { alias, .. }
16514 | TableFactor::NestedJoin { alias, .. } => {
16515 if let Some(inner_alias) = alias {
16517 return Err(ParserError::ParserError(format!(
16518 "duplicate alias {inner_alias}"
16519 )));
16520 }
16521 alias.replace(outer_alias);
16525 }
16526 };
16527 }
16528 Ok(table_and_joins.relation)
16530 } else {
16531 self.expected_ref("joined table", self.peek_token_ref())
16534 }
16535 } else if self.dialect.supports_values_as_table_factor()
16536 && matches!(
16537 self.peek_tokens(),
16538 [
16539 Token::Word(Word {
16540 keyword: Keyword::VALUES,
16541 ..
16542 }),
16543 Token::LParen
16544 ]
16545 )
16546 {
16547 self.expect_keyword_is(Keyword::VALUES)?;
16548
16549 let values = SetExpr::Values(self.parse_values(false, false)?);
16553 let alias = self.maybe_parse_table_alias()?;
16554 Ok(TableFactor::Derived {
16555 lateral: false,
16556 subquery: Box::new(Query {
16557 with: None,
16558 body: Box::new(values),
16559 order_by: None,
16560 limit_clause: None,
16561 fetch: None,
16562 locks: vec![],
16563 for_clause: None,
16564 settings: None,
16565 format_clause: None,
16566 pipe_operators: vec![],
16567 }),
16568 alias,
16569 sample: None,
16570 })
16571 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16572 && self.parse_keyword(Keyword::UNNEST)
16573 {
16574 self.expect_token(&Token::LParen)?;
16575 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16576 self.expect_token(&Token::RParen)?;
16577
16578 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16579 let alias = match self.maybe_parse_table_alias() {
16580 Ok(Some(alias)) => Some(alias),
16581 Ok(None) => None,
16582 Err(e) => return Err(e),
16583 };
16584
16585 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16586 Ok(()) => true,
16587 Err(_) => false,
16588 };
16589
16590 let with_offset_alias = if with_offset {
16591 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16592 Ok(Some(alias)) => Some(alias),
16593 Ok(None) => None,
16594 Err(e) => return Err(e),
16595 }
16596 } else {
16597 None
16598 };
16599
16600 Ok(TableFactor::UNNEST {
16601 alias,
16602 array_exprs,
16603 with_offset,
16604 with_offset_alias,
16605 with_ordinality,
16606 })
16607 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16608 let json_expr = self.parse_expr()?;
16609 self.expect_token(&Token::Comma)?;
16610 let json_path = self.parse_value()?;
16611 self.expect_keyword_is(Keyword::COLUMNS)?;
16612 self.expect_token(&Token::LParen)?;
16613 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16614 self.expect_token(&Token::RParen)?;
16615 self.expect_token(&Token::RParen)?;
16616 let alias = self.maybe_parse_table_alias()?;
16617 Ok(TableFactor::JsonTable {
16618 json_expr,
16619 json_path,
16620 columns,
16621 alias,
16622 })
16623 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16624 self.prev_token();
16625 self.parse_open_json_table_factor()
16626 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16627 self.prev_token();
16628 self.parse_xml_table_factor()
16629 } else if self.dialect.supports_semantic_view_table_factor()
16630 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16631 {
16632 self.parse_semantic_view_table_factor()
16633 } else if self.peek_token_ref().token == Token::AtSign {
16634 self.parse_snowflake_stage_table_factor()
16636 } else {
16637 let name = self.parse_object_name(true)?;
16638
16639 let json_path = match &self.peek_token_ref().token {
16640 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16641 _ => None,
16642 };
16643
16644 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16645 && self.parse_keyword(Keyword::PARTITION)
16646 {
16647 self.parse_parenthesized_identifiers()?
16648 } else {
16649 vec![]
16650 };
16651
16652 let version = self.maybe_parse_table_version()?;
16654
16655 let args = if self.consume_token(&Token::LParen) {
16657 Some(self.parse_table_function_args()?)
16658 } else {
16659 None
16660 };
16661
16662 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16663
16664 let mut sample = None;
16665 if self.dialect.supports_table_sample_before_alias() {
16666 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16667 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16668 }
16669 }
16670
16671 let alias = self.maybe_parse_table_alias()?;
16672
16673 let index_hints = if self.dialect.supports_table_hints() {
16675 self.maybe_parse(|p| p.parse_table_index_hints())?
16676 .unwrap_or(vec![])
16677 } else {
16678 vec![]
16679 };
16680
16681 let mut with_hints = vec![];
16683 if self.parse_keyword(Keyword::WITH) {
16684 if self.consume_token(&Token::LParen) {
16685 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16686 self.expect_token(&Token::RParen)?;
16687 } else {
16688 self.prev_token();
16690 }
16691 };
16692
16693 if !self.dialect.supports_table_sample_before_alias() {
16694 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16695 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16696 }
16697 }
16698
16699 let mut table = TableFactor::Table {
16700 name,
16701 alias,
16702 args,
16703 with_hints,
16704 version,
16705 partitions,
16706 with_ordinality,
16707 json_path,
16708 sample,
16709 index_hints,
16710 };
16711
16712 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16713 table = match kw {
16714 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16715 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16716 unexpected_keyword => return Err(ParserError::ParserError(
16717 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16718 )),
16719 }
16720 }
16721
16722 if self.dialect.supports_match_recognize()
16723 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16724 {
16725 table = self.parse_match_recognize(table)?;
16726 }
16727
16728 Ok(table)
16729 }
16730 }
16731
16732 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16737 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16739
16740 let args = if self.consume_token(&Token::LParen) {
16742 Some(self.parse_table_function_args()?)
16743 } else {
16744 None
16745 };
16746
16747 let alias = self.maybe_parse_table_alias()?;
16748
16749 Ok(TableFactor::Table {
16750 name,
16751 alias,
16752 args,
16753 with_hints: vec![],
16754 version: None,
16755 partitions: vec![],
16756 with_ordinality: false,
16757 json_path: None,
16758 sample: None,
16759 index_hints: vec![],
16760 })
16761 }
16762
16763 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16764 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16765 TableSampleModifier::TableSample
16766 } else if self.parse_keyword(Keyword::SAMPLE) {
16767 TableSampleModifier::Sample
16768 } else {
16769 return Ok(None);
16770 };
16771 self.parse_table_sample(modifier).map(Some)
16772 }
16773
16774 fn parse_table_sample(
16775 &mut self,
16776 modifier: TableSampleModifier,
16777 ) -> Result<Box<TableSample>, ParserError> {
16778 let name = match self.parse_one_of_keywords(&[
16779 Keyword::BERNOULLI,
16780 Keyword::ROW,
16781 Keyword::SYSTEM,
16782 Keyword::BLOCK,
16783 ]) {
16784 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16785 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16786 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16787 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16788 _ => None,
16789 };
16790
16791 let parenthesized = self.consume_token(&Token::LParen);
16792
16793 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16794 let selected_bucket = self.parse_number_value()?;
16795 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16796 let total = self.parse_number_value()?;
16797 let on = if self.parse_keyword(Keyword::ON) {
16798 Some(self.parse_expr()?)
16799 } else {
16800 None
16801 };
16802 (
16803 None,
16804 Some(TableSampleBucket {
16805 bucket: selected_bucket,
16806 total,
16807 on,
16808 }),
16809 )
16810 } else {
16811 let value = match self.maybe_parse(|p| p.parse_expr())? {
16812 Some(num) => num,
16813 None => {
16814 let next_token = self.next_token();
16815 if let Token::Word(w) = next_token.token {
16816 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16817 } else {
16818 return parser_err!(
16819 "Expecting number or byte length e.g. 100M",
16820 self.peek_token_ref().span.start
16821 );
16822 }
16823 }
16824 };
16825 let unit = if self.parse_keyword(Keyword::ROWS) {
16826 Some(TableSampleUnit::Rows)
16827 } else if self.parse_keyword(Keyword::PERCENT) {
16828 Some(TableSampleUnit::Percent)
16829 } else {
16830 None
16831 };
16832 (
16833 Some(TableSampleQuantity {
16834 parenthesized,
16835 value,
16836 unit,
16837 }),
16838 None,
16839 )
16840 };
16841 if parenthesized {
16842 self.expect_token(&Token::RParen)?;
16843 }
16844
16845 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
16846 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
16847 } else if self.parse_keyword(Keyword::SEED) {
16848 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
16849 } else {
16850 None
16851 };
16852
16853 let offset = if self.parse_keyword(Keyword::OFFSET) {
16854 Some(self.parse_expr()?)
16855 } else {
16856 None
16857 };
16858
16859 Ok(Box::new(TableSample {
16860 modifier,
16861 name,
16862 quantity,
16863 seed,
16864 bucket,
16865 offset,
16866 }))
16867 }
16868
16869 fn parse_table_sample_seed(
16870 &mut self,
16871 modifier: TableSampleSeedModifier,
16872 ) -> Result<TableSampleSeed, ParserError> {
16873 self.expect_token(&Token::LParen)?;
16874 let value = self.parse_number_value()?;
16875 self.expect_token(&Token::RParen)?;
16876 Ok(TableSampleSeed { modifier, value })
16877 }
16878
16879 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16882 self.expect_token(&Token::LParen)?;
16883 let json_expr = self.parse_expr()?;
16884 let json_path = if self.consume_token(&Token::Comma) {
16885 Some(self.parse_value()?)
16886 } else {
16887 None
16888 };
16889 self.expect_token(&Token::RParen)?;
16890 let columns = if self.parse_keyword(Keyword::WITH) {
16891 self.expect_token(&Token::LParen)?;
16892 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
16893 self.expect_token(&Token::RParen)?;
16894 columns
16895 } else {
16896 Vec::new()
16897 };
16898 let alias = self.maybe_parse_table_alias()?;
16899 Ok(TableFactor::OpenJsonTable {
16900 json_expr,
16901 json_path,
16902 columns,
16903 alias,
16904 })
16905 }
16906
16907 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16908 self.expect_token(&Token::LParen)?;
16909 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
16910 self.expect_token(&Token::LParen)?;
16911 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
16912 self.expect_token(&Token::RParen)?;
16913 self.expect_token(&Token::Comma)?;
16914 namespaces
16915 } else {
16916 vec![]
16917 };
16918 let row_expression = self.parse_expr()?;
16919 let passing = self.parse_xml_passing_clause()?;
16920 self.expect_keyword_is(Keyword::COLUMNS)?;
16921 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
16922 self.expect_token(&Token::RParen)?;
16923 let alias = self.maybe_parse_table_alias()?;
16924 Ok(TableFactor::XmlTable {
16925 namespaces,
16926 row_expression,
16927 passing,
16928 columns,
16929 alias,
16930 })
16931 }
16932
16933 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
16934 let uri = self.parse_expr()?;
16935 self.expect_keyword_is(Keyword::AS)?;
16936 let name = self.parse_identifier()?;
16937 Ok(XmlNamespaceDefinition { uri, name })
16938 }
16939
16940 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
16941 let name = self.parse_identifier()?;
16942
16943 let option = if self.parse_keyword(Keyword::FOR) {
16944 self.expect_keyword(Keyword::ORDINALITY)?;
16945 XmlTableColumnOption::ForOrdinality
16946 } else {
16947 let r#type = self.parse_data_type()?;
16948 let mut path = None;
16949 let mut default = None;
16950
16951 if self.parse_keyword(Keyword::PATH) {
16952 path = Some(self.parse_expr()?);
16953 }
16954
16955 if self.parse_keyword(Keyword::DEFAULT) {
16956 default = Some(self.parse_expr()?);
16957 }
16958
16959 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
16960 if !not_null {
16961 let _ = self.parse_keyword(Keyword::NULL);
16963 }
16964
16965 XmlTableColumnOption::NamedInfo {
16966 r#type,
16967 path,
16968 default,
16969 nullable: !not_null,
16970 }
16971 };
16972 Ok(XmlTableColumn { name, option })
16973 }
16974
16975 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
16976 let mut arguments = vec![];
16977 if self.parse_keyword(Keyword::PASSING) {
16978 loop {
16979 let by_value =
16980 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
16981 let expr = self.parse_expr()?;
16982 let alias = if self.parse_keyword(Keyword::AS) {
16983 Some(self.parse_identifier()?)
16984 } else {
16985 None
16986 };
16987 arguments.push(XmlPassingArgument {
16988 expr,
16989 alias,
16990 by_value,
16991 });
16992 if !self.consume_token(&Token::Comma) {
16993 break;
16994 }
16995 }
16996 }
16997 Ok(XmlPassingClause { arguments })
16998 }
16999
17000 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
17002 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
17003 self.expect_token(&Token::LParen)?;
17004
17005 let name = self.parse_object_name(true)?;
17006
17007 let mut dimensions = Vec::new();
17009 let mut metrics = Vec::new();
17010 let mut facts = Vec::new();
17011 let mut where_clause = None;
17012
17013 while self.peek_token_ref().token != Token::RParen {
17014 if self.parse_keyword(Keyword::DIMENSIONS) {
17015 if !dimensions.is_empty() {
17016 return Err(ParserError::ParserError(
17017 "DIMENSIONS clause can only be specified once".to_string(),
17018 ));
17019 }
17020 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17021 } else if self.parse_keyword(Keyword::METRICS) {
17022 if !metrics.is_empty() {
17023 return Err(ParserError::ParserError(
17024 "METRICS clause can only be specified once".to_string(),
17025 ));
17026 }
17027 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17028 } else if self.parse_keyword(Keyword::FACTS) {
17029 if !facts.is_empty() {
17030 return Err(ParserError::ParserError(
17031 "FACTS clause can only be specified once".to_string(),
17032 ));
17033 }
17034 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
17035 } else if self.parse_keyword(Keyword::WHERE) {
17036 if where_clause.is_some() {
17037 return Err(ParserError::ParserError(
17038 "WHERE clause can only be specified once".to_string(),
17039 ));
17040 }
17041 where_clause = Some(self.parse_expr()?);
17042 } else {
17043 let tok = self.peek_token_ref();
17044 return parser_err!(
17045 format!(
17046 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
17047 tok.token
17048 ),
17049 tok.span.start
17050 )?;
17051 }
17052 }
17053
17054 self.expect_token(&Token::RParen)?;
17055
17056 let alias = self.maybe_parse_table_alias()?;
17057
17058 Ok(TableFactor::SemanticView {
17059 name,
17060 dimensions,
17061 metrics,
17062 facts,
17063 where_clause,
17064 alias,
17065 })
17066 }
17067
17068 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
17069 self.expect_token(&Token::LParen)?;
17070
17071 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17072 self.parse_comma_separated(Parser::parse_expr)?
17073 } else {
17074 vec![]
17075 };
17076
17077 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17078 self.parse_comma_separated(Parser::parse_order_by_expr)?
17079 } else {
17080 vec![]
17081 };
17082
17083 let measures = if self.parse_keyword(Keyword::MEASURES) {
17084 self.parse_comma_separated(|p| {
17085 let expr = p.parse_expr()?;
17086 let _ = p.parse_keyword(Keyword::AS);
17087 let alias = p.parse_identifier()?;
17088 Ok(Measure { expr, alias })
17089 })?
17090 } else {
17091 vec![]
17092 };
17093
17094 let rows_per_match =
17095 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17096 Some(RowsPerMatch::OneRow)
17097 } else if self.parse_keywords(&[
17098 Keyword::ALL,
17099 Keyword::ROWS,
17100 Keyword::PER,
17101 Keyword::MATCH,
17102 ]) {
17103 Some(RowsPerMatch::AllRows(
17104 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17105 Some(EmptyMatchesMode::Show)
17106 } else if self.parse_keywords(&[
17107 Keyword::OMIT,
17108 Keyword::EMPTY,
17109 Keyword::MATCHES,
17110 ]) {
17111 Some(EmptyMatchesMode::Omit)
17112 } else if self.parse_keywords(&[
17113 Keyword::WITH,
17114 Keyword::UNMATCHED,
17115 Keyword::ROWS,
17116 ]) {
17117 Some(EmptyMatchesMode::WithUnmatched)
17118 } else {
17119 None
17120 },
17121 ))
17122 } else {
17123 None
17124 };
17125
17126 let after_match_skip =
17127 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17128 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17129 Some(AfterMatchSkip::PastLastRow)
17130 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17131 Some(AfterMatchSkip::ToNextRow)
17132 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17133 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17134 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17135 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17136 } else {
17137 let found = self.next_token();
17138 return self.expected("after match skip option", found);
17139 }
17140 } else {
17141 None
17142 };
17143
17144 self.expect_keyword_is(Keyword::PATTERN)?;
17145 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17146
17147 self.expect_keyword_is(Keyword::DEFINE)?;
17148
17149 let symbols = self.parse_comma_separated(|p| {
17150 let symbol = p.parse_identifier()?;
17151 p.expect_keyword_is(Keyword::AS)?;
17152 let definition = p.parse_expr()?;
17153 Ok(SymbolDefinition { symbol, definition })
17154 })?;
17155
17156 self.expect_token(&Token::RParen)?;
17157
17158 let alias = self.maybe_parse_table_alias()?;
17159
17160 Ok(TableFactor::MatchRecognize {
17161 table: Box::new(table),
17162 partition_by,
17163 order_by,
17164 measures,
17165 rows_per_match,
17166 after_match_skip,
17167 pattern,
17168 symbols,
17169 alias,
17170 })
17171 }
17172
17173 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17174 match self.next_token().token {
17175 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17176 Token::Placeholder(s) if s == "$" => {
17177 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17178 }
17179 Token::LBrace => {
17180 self.expect_token(&Token::Minus)?;
17181 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17182 self.expect_token(&Token::Minus)?;
17183 self.expect_token(&Token::RBrace)?;
17184 Ok(MatchRecognizePattern::Exclude(symbol))
17185 }
17186 Token::Word(Word {
17187 value,
17188 quote_style: None,
17189 ..
17190 }) if value == "PERMUTE" => {
17191 self.expect_token(&Token::LParen)?;
17192 let symbols = self.parse_comma_separated(|p| {
17193 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17194 })?;
17195 self.expect_token(&Token::RParen)?;
17196 Ok(MatchRecognizePattern::Permute(symbols))
17197 }
17198 Token::LParen => {
17199 let pattern = self.parse_pattern()?;
17200 self.expect_token(&Token::RParen)?;
17201 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17202 }
17203 _ => {
17204 self.prev_token();
17205 self.parse_identifier()
17206 .map(MatchRecognizeSymbol::Named)
17207 .map(MatchRecognizePattern::Symbol)
17208 }
17209 }
17210 }
17211
17212 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17213 let mut pattern = self.parse_base_pattern()?;
17214 loop {
17215 let token = self.next_token();
17216 let quantifier = match token.token {
17217 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17218 Token::Plus => RepetitionQuantifier::OneOrMore,
17219 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17220 Token::LBrace => {
17221 let token = self.next_token();
17223 match token.token {
17224 Token::Comma => {
17225 let next_token = self.next_token();
17226 let Token::Number(n, _) = next_token.token else {
17227 return self.expected("literal number", next_token);
17228 };
17229 self.expect_token(&Token::RBrace)?;
17230 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17231 }
17232 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17233 let next_token = self.next_token();
17234 match next_token.token {
17235 Token::Number(m, _) => {
17236 self.expect_token(&Token::RBrace)?;
17237 RepetitionQuantifier::Range(
17238 Self::parse(n, token.span.start)?,
17239 Self::parse(m, token.span.start)?,
17240 )
17241 }
17242 Token::RBrace => {
17243 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17244 }
17245 _ => {
17246 return self.expected("} or upper bound", next_token);
17247 }
17248 }
17249 }
17250 Token::Number(n, _) => {
17251 self.expect_token(&Token::RBrace)?;
17252 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17253 }
17254 _ => return self.expected("quantifier range", token),
17255 }
17256 }
17257 _ => {
17258 self.prev_token();
17259 break;
17260 }
17261 };
17262 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17263 }
17264 Ok(pattern)
17265 }
17266
17267 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17268 let mut patterns = vec![self.parse_repetition_pattern()?];
17269 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17270 patterns.push(self.parse_repetition_pattern()?);
17271 }
17272 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17273 Ok([pattern]) => Ok(pattern),
17274 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17275 }
17276 }
17277
17278 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17279 let pattern = self.parse_concat_pattern()?;
17280 if self.consume_token(&Token::Pipe) {
17281 match self.parse_pattern()? {
17282 MatchRecognizePattern::Alternation(mut patterns) => {
17284 patterns.insert(0, pattern);
17285 Ok(MatchRecognizePattern::Alternation(patterns))
17286 }
17287 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17288 }
17289 } else {
17290 Ok(pattern)
17291 }
17292 }
17293
17294 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17296 if self.dialect.supports_table_versioning() {
17297 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17298 {
17299 let expr = self.parse_expr()?;
17300 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17301 } else if self.peek_keyword(Keyword::CHANGES) {
17302 return self.parse_table_version_changes().map(Some);
17303 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17304 let func_name = self.parse_object_name(true)?;
17305 let func = self.parse_function(func_name)?;
17306 return Ok(Some(TableVersion::Function(func)));
17307 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17308 let expr = self.parse_expr()?;
17309 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17310 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17311 let expr = Expr::Value(self.parse_number_value()?);
17312 return Ok(Some(TableVersion::VersionAsOf(expr)));
17313 }
17314 }
17315 Ok(None)
17316 }
17317
17318 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17329 let changes_name = self.parse_object_name(true)?;
17330 let changes = self.parse_function(changes_name)?;
17331 let at_name = self.parse_object_name(true)?;
17332 let at = self.parse_function(at_name)?;
17333 let end = if self.peek_keyword(Keyword::END) {
17334 let end_name = self.parse_object_name(true)?;
17335 Some(self.parse_function(end_name)?)
17336 } else {
17337 None
17338 };
17339 Ok(TableVersion::Changes { changes, at, end })
17340 }
17341
17342 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17345 if self.parse_keyword(Keyword::NESTED) {
17346 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17347 let path = self.parse_value()?;
17348 self.expect_keyword_is(Keyword::COLUMNS)?;
17349 let columns = self.parse_parenthesized(|p| {
17350 p.parse_comma_separated(Self::parse_json_table_column_def)
17351 })?;
17352 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17353 path,
17354 columns,
17355 }));
17356 }
17357 let name = self.parse_identifier()?;
17358 if self.parse_keyword(Keyword::FOR) {
17359 self.expect_keyword_is(Keyword::ORDINALITY)?;
17360 return Ok(JsonTableColumn::ForOrdinality(name));
17361 }
17362 let r#type = self.parse_data_type()?;
17363 let exists = self.parse_keyword(Keyword::EXISTS);
17364 self.expect_keyword_is(Keyword::PATH)?;
17365 let path = self.parse_value()?;
17366 let mut on_empty = None;
17367 let mut on_error = None;
17368 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17369 if self.parse_keyword(Keyword::EMPTY) {
17370 on_empty = Some(error_handling);
17371 } else {
17372 self.expect_keyword_is(Keyword::ERROR)?;
17373 on_error = Some(error_handling);
17374 }
17375 }
17376 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17377 name,
17378 r#type,
17379 path,
17380 exists,
17381 on_empty,
17382 on_error,
17383 }))
17384 }
17385
17386 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17394 let name = self.parse_identifier()?;
17395 let r#type = self.parse_data_type()?;
17396 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17397 self.next_token();
17398 Some(path)
17399 } else {
17400 None
17401 };
17402 let as_json = self.parse_keyword(Keyword::AS);
17403 if as_json {
17404 self.expect_keyword_is(Keyword::JSON)?;
17405 }
17406 Ok(OpenJsonTableColumn {
17407 name,
17408 r#type,
17409 path,
17410 as_json,
17411 })
17412 }
17413
17414 fn parse_json_table_column_error_handling(
17415 &mut self,
17416 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17417 let res = if self.parse_keyword(Keyword::NULL) {
17418 JsonTableColumnErrorHandling::Null
17419 } else if self.parse_keyword(Keyword::ERROR) {
17420 JsonTableColumnErrorHandling::Error
17421 } else if self.parse_keyword(Keyword::DEFAULT) {
17422 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17423 } else {
17424 return Ok(None);
17425 };
17426 self.expect_keyword_is(Keyword::ON)?;
17427 Ok(Some(res))
17428 }
17429
17430 pub fn parse_derived_table_factor(
17432 &mut self,
17433 lateral: IsLateral,
17434 ) -> Result<TableFactor, ParserError> {
17435 let subquery = self.parse_query()?;
17436 self.expect_token(&Token::RParen)?;
17437 let alias = self.maybe_parse_table_alias()?;
17438
17439 let sample = self
17441 .maybe_parse_table_sample()?
17442 .map(TableSampleKind::AfterTableAlias);
17443
17444 Ok(TableFactor::Derived {
17445 lateral: match lateral {
17446 Lateral => true,
17447 NotLateral => false,
17448 },
17449 subquery,
17450 alias,
17451 sample,
17452 })
17453 }
17454
17455 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17478 let expr = self.parse_expr()?;
17479 let alias = if self.parse_keyword(Keyword::AS) {
17480 Some(self.parse_identifier()?)
17481 } else {
17482 None
17483 };
17484
17485 Ok(ExprWithAlias { expr, alias })
17486 }
17487
17488 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17492 let expr = self.parse_expr()?;
17493 let alias = self.parse_identifier_optional_alias()?;
17494 Ok(ExprWithAlias { expr, alias })
17495 }
17496
17497 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17499 let function_name = match self.next_token().token {
17500 Token::Word(w) => Ok(w.value),
17501 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17502 }?;
17503 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17504 let alias = {
17505 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17506 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17508 }
17509 self.parse_optional_alias_inner(None, validator)?
17510 };
17511 Ok(ExprWithAlias { expr, alias })
17512 }
17513
17514 pub fn parse_pivot_table_factor(
17516 &mut self,
17517 table: TableFactor,
17518 ) -> Result<TableFactor, ParserError> {
17519 self.expect_token(&Token::LParen)?;
17520 let aggregate_functions =
17521 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17522 self.expect_keyword_is(Keyword::FOR)?;
17523 let value_column = if self.peek_token_ref().token == Token::LParen {
17524 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17525 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17526 })?
17527 } else {
17528 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17529 };
17530 self.expect_keyword_is(Keyword::IN)?;
17531
17532 self.expect_token(&Token::LParen)?;
17533 let value_source = if self.parse_keyword(Keyword::ANY) {
17534 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17535 self.parse_comma_separated(Parser::parse_order_by_expr)?
17536 } else {
17537 vec![]
17538 };
17539 PivotValueSource::Any(order_by)
17540 } else if self.peek_sub_query() {
17541 PivotValueSource::Subquery(self.parse_query()?)
17542 } else {
17543 PivotValueSource::List(
17544 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17545 )
17546 };
17547 self.expect_token(&Token::RParen)?;
17548
17549 let default_on_null =
17550 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17551 self.expect_token(&Token::LParen)?;
17552 let expr = self.parse_expr()?;
17553 self.expect_token(&Token::RParen)?;
17554 Some(expr)
17555 } else {
17556 None
17557 };
17558
17559 self.expect_token(&Token::RParen)?;
17560 let alias = self.maybe_parse_table_alias()?;
17561 Ok(TableFactor::Pivot {
17562 table: Box::new(table),
17563 aggregate_functions,
17564 value_column,
17565 value_source,
17566 default_on_null,
17567 alias,
17568 })
17569 }
17570
17571 pub fn parse_unpivot_table_factor(
17573 &mut self,
17574 table: TableFactor,
17575 ) -> Result<TableFactor, ParserError> {
17576 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17577 self.expect_keyword_is(Keyword::NULLS)?;
17578 Some(NullInclusion::IncludeNulls)
17579 } else if self.parse_keyword(Keyword::EXCLUDE) {
17580 self.expect_keyword_is(Keyword::NULLS)?;
17581 Some(NullInclusion::ExcludeNulls)
17582 } else {
17583 None
17584 };
17585 self.expect_token(&Token::LParen)?;
17586 let value = self.parse_expr()?;
17587 self.expect_keyword_is(Keyword::FOR)?;
17588 let name = self.parse_identifier()?;
17589 self.expect_keyword_is(Keyword::IN)?;
17590 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17591 p.parse_expr_with_alias()
17592 })?;
17593 self.expect_token(&Token::RParen)?;
17594 let alias = self.maybe_parse_table_alias()?;
17595 Ok(TableFactor::Unpivot {
17596 table: Box::new(table),
17597 value,
17598 null_inclusion,
17599 name,
17600 columns,
17601 alias,
17602 })
17603 }
17604
17605 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17607 if natural {
17608 Ok(JoinConstraint::Natural)
17609 } else if self.parse_keyword(Keyword::ON) {
17610 let constraint = self.parse_expr()?;
17611 Ok(JoinConstraint::On(constraint))
17612 } else if self.parse_keyword(Keyword::USING) {
17613 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17614 Ok(JoinConstraint::Using(columns))
17615 } else {
17616 Ok(JoinConstraint::None)
17617 }
17619 }
17620
17621 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17623 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17624
17625 self.expect_keyword_is(Keyword::TO)?;
17626 let grantees = self.parse_grantees()?;
17627
17628 let with_grant_option =
17629 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17630
17631 let current_grants =
17632 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17633 Some(CurrentGrantsKind::CopyCurrentGrants)
17634 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17635 Some(CurrentGrantsKind::RevokeCurrentGrants)
17636 } else {
17637 None
17638 };
17639
17640 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17641 Some(self.parse_identifier()?)
17642 } else {
17643 None
17644 };
17645
17646 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17647 Some(self.parse_identifier()?)
17648 } else {
17649 None
17650 };
17651
17652 Ok(Grant {
17653 privileges,
17654 objects,
17655 grantees,
17656 with_grant_option,
17657 as_grantor,
17658 granted_by,
17659 current_grants,
17660 })
17661 }
17662
17663 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17664 let mut values = vec![];
17665 let mut grantee_type = GranteesType::None;
17666 loop {
17667 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17668 GranteesType::Role
17669 } else if self.parse_keyword(Keyword::USER) {
17670 GranteesType::User
17671 } else if self.parse_keyword(Keyword::SHARE) {
17672 GranteesType::Share
17673 } else if self.parse_keyword(Keyword::GROUP) {
17674 GranteesType::Group
17675 } else if self.parse_keyword(Keyword::PUBLIC) {
17676 GranteesType::Public
17677 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17678 GranteesType::DatabaseRole
17679 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17680 GranteesType::ApplicationRole
17681 } else if self.parse_keyword(Keyword::APPLICATION) {
17682 GranteesType::Application
17683 } else {
17684 grantee_type.clone() };
17686
17687 if self
17688 .dialect
17689 .get_reserved_grantees_types()
17690 .contains(&new_grantee_type)
17691 {
17692 self.prev_token();
17693 } else {
17694 grantee_type = new_grantee_type;
17695 }
17696
17697 let grantee = if grantee_type == GranteesType::Public {
17698 Grantee {
17699 grantee_type: grantee_type.clone(),
17700 name: None,
17701 }
17702 } else {
17703 let mut name = self.parse_grantee_name()?;
17704 if self.consume_token(&Token::Colon) {
17705 let ident = self.parse_identifier()?;
17709 if let GranteeName::ObjectName(namespace) = name {
17710 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17711 format!("{namespace}:{ident}"),
17712 )]));
17713 };
17714 }
17715 Grantee {
17716 grantee_type: grantee_type.clone(),
17717 name: Some(name),
17718 }
17719 };
17720
17721 values.push(grantee);
17722
17723 if !self.consume_token(&Token::Comma) {
17724 break;
17725 }
17726 }
17727
17728 Ok(values)
17729 }
17730
17731 pub fn parse_grant_deny_revoke_privileges_objects(
17733 &mut self,
17734 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17735 let privileges = if self.parse_keyword(Keyword::ALL) {
17736 Privileges::All {
17737 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17738 }
17739 } else {
17740 let actions = self.parse_actions_list()?;
17741 Privileges::Actions(actions)
17742 };
17743
17744 let objects = if self.parse_keyword(Keyword::ON) {
17745 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17746 Some(GrantObjects::AllTablesInSchema {
17747 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17748 })
17749 } else if self.parse_keywords(&[
17750 Keyword::ALL,
17751 Keyword::EXTERNAL,
17752 Keyword::TABLES,
17753 Keyword::IN,
17754 Keyword::SCHEMA,
17755 ]) {
17756 Some(GrantObjects::AllExternalTablesInSchema {
17757 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17758 })
17759 } else if self.parse_keywords(&[
17760 Keyword::ALL,
17761 Keyword::VIEWS,
17762 Keyword::IN,
17763 Keyword::SCHEMA,
17764 ]) {
17765 Some(GrantObjects::AllViewsInSchema {
17766 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17767 })
17768 } else if self.parse_keywords(&[
17769 Keyword::ALL,
17770 Keyword::MATERIALIZED,
17771 Keyword::VIEWS,
17772 Keyword::IN,
17773 Keyword::SCHEMA,
17774 ]) {
17775 Some(GrantObjects::AllMaterializedViewsInSchema {
17776 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17777 })
17778 } else if self.parse_keywords(&[
17779 Keyword::ALL,
17780 Keyword::FUNCTIONS,
17781 Keyword::IN,
17782 Keyword::SCHEMA,
17783 ]) {
17784 Some(GrantObjects::AllFunctionsInSchema {
17785 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17786 })
17787 } else if self.parse_keywords(&[
17788 Keyword::FUTURE,
17789 Keyword::SCHEMAS,
17790 Keyword::IN,
17791 Keyword::DATABASE,
17792 ]) {
17793 Some(GrantObjects::FutureSchemasInDatabase {
17794 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17795 })
17796 } else if self.parse_keywords(&[
17797 Keyword::FUTURE,
17798 Keyword::TABLES,
17799 Keyword::IN,
17800 Keyword::SCHEMA,
17801 ]) {
17802 Some(GrantObjects::FutureTablesInSchema {
17803 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17804 })
17805 } else if self.parse_keywords(&[
17806 Keyword::FUTURE,
17807 Keyword::EXTERNAL,
17808 Keyword::TABLES,
17809 Keyword::IN,
17810 Keyword::SCHEMA,
17811 ]) {
17812 Some(GrantObjects::FutureExternalTablesInSchema {
17813 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17814 })
17815 } else if self.parse_keywords(&[
17816 Keyword::FUTURE,
17817 Keyword::VIEWS,
17818 Keyword::IN,
17819 Keyword::SCHEMA,
17820 ]) {
17821 Some(GrantObjects::FutureViewsInSchema {
17822 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17823 })
17824 } else if self.parse_keywords(&[
17825 Keyword::FUTURE,
17826 Keyword::MATERIALIZED,
17827 Keyword::VIEWS,
17828 Keyword::IN,
17829 Keyword::SCHEMA,
17830 ]) {
17831 Some(GrantObjects::FutureMaterializedViewsInSchema {
17832 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17833 })
17834 } else if self.parse_keywords(&[
17835 Keyword::ALL,
17836 Keyword::SEQUENCES,
17837 Keyword::IN,
17838 Keyword::SCHEMA,
17839 ]) {
17840 Some(GrantObjects::AllSequencesInSchema {
17841 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17842 })
17843 } else if self.parse_keywords(&[
17844 Keyword::FUTURE,
17845 Keyword::SEQUENCES,
17846 Keyword::IN,
17847 Keyword::SCHEMA,
17848 ]) {
17849 Some(GrantObjects::FutureSequencesInSchema {
17850 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17851 })
17852 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
17853 Some(GrantObjects::ResourceMonitors(
17854 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17855 ))
17856 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17857 Some(GrantObjects::ComputePools(
17858 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17859 ))
17860 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17861 Some(GrantObjects::FailoverGroup(
17862 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17863 ))
17864 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17865 Some(GrantObjects::ReplicationGroup(
17866 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17867 ))
17868 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17869 Some(GrantObjects::ExternalVolumes(
17870 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17871 ))
17872 } else {
17873 let object_type = self.parse_one_of_keywords(&[
17874 Keyword::SEQUENCE,
17875 Keyword::DATABASE,
17876 Keyword::SCHEMA,
17877 Keyword::TABLE,
17878 Keyword::VIEW,
17879 Keyword::WAREHOUSE,
17880 Keyword::INTEGRATION,
17881 Keyword::VIEW,
17882 Keyword::WAREHOUSE,
17883 Keyword::INTEGRATION,
17884 Keyword::USER,
17885 Keyword::CONNECTION,
17886 Keyword::PROCEDURE,
17887 Keyword::FUNCTION,
17888 ]);
17889 let objects =
17890 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
17891 match object_type {
17892 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
17893 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
17894 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
17895 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
17896 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
17897 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
17898 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
17899 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
17900 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
17901 if let Some(name) = objects?.first() {
17902 self.parse_grant_procedure_or_function(name, &kw)?
17903 } else {
17904 self.expected_ref("procedure or function name", self.peek_token_ref())?
17905 }
17906 }
17907 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
17908 Some(unexpected_keyword) => return Err(ParserError::ParserError(
17909 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
17910 )),
17911 }
17912 }
17913 } else {
17914 None
17915 };
17916
17917 Ok((privileges, objects))
17918 }
17919
17920 fn parse_grant_procedure_or_function(
17921 &mut self,
17922 name: &ObjectName,
17923 kw: &Option<Keyword>,
17924 ) -> Result<Option<GrantObjects>, ParserError> {
17925 let arg_types = if self.consume_token(&Token::LParen) {
17926 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
17927 self.expect_token(&Token::RParen)?;
17928 list
17929 } else {
17930 vec![]
17931 };
17932 match kw {
17933 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
17934 name: name.clone(),
17935 arg_types,
17936 })),
17937 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
17938 name: name.clone(),
17939 arg_types,
17940 })),
17941 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
17942 }
17943 }
17944
17945 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
17947 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
17948 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
17949 if columns.is_empty() {
17950 Ok(None)
17951 } else {
17952 Ok(Some(columns))
17953 }
17954 }
17955
17956 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
17958 Ok(Action::ImportedPrivileges)
17959 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
17960 Ok(Action::AddSearchOptimization)
17961 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
17962 Ok(Action::AttachListing)
17963 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
17964 Ok(Action::AttachPolicy)
17965 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
17966 Ok(Action::BindServiceEndpoint)
17967 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17968 let role = self.parse_object_name(false)?;
17969 Ok(Action::DatabaseRole { role })
17970 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
17971 Ok(Action::EvolveSchema)
17972 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
17973 Ok(Action::ImportShare)
17974 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
17975 Ok(Action::ManageVersions)
17976 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
17977 Ok(Action::ManageReleases)
17978 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
17979 Ok(Action::OverrideShareRestrictions)
17980 } else if self.parse_keywords(&[
17981 Keyword::PURCHASE,
17982 Keyword::DATA,
17983 Keyword::EXCHANGE,
17984 Keyword::LISTING,
17985 ]) {
17986 Ok(Action::PurchaseDataExchangeListing)
17987 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
17988 Ok(Action::ResolveAll)
17989 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
17990 Ok(Action::ReadSession)
17991
17992 } else if self.parse_keyword(Keyword::APPLY) {
17994 let apply_type = self.parse_action_apply_type()?;
17995 Ok(Action::Apply { apply_type })
17996 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
17997 Ok(Action::ApplyBudget)
17998 } else if self.parse_keyword(Keyword::AUDIT) {
17999 Ok(Action::Audit)
18000 } else if self.parse_keyword(Keyword::CONNECT) {
18001 Ok(Action::Connect)
18002 } else if self.parse_keyword(Keyword::CREATE) {
18003 let obj_type = self.maybe_parse_action_create_object_type();
18004 Ok(Action::Create { obj_type })
18005 } else if self.parse_keyword(Keyword::DELETE) {
18006 Ok(Action::Delete)
18007 } else if self.parse_keyword(Keyword::EXEC) {
18008 let obj_type = self.maybe_parse_action_execute_obj_type();
18009 Ok(Action::Exec { obj_type })
18010 } else if self.parse_keyword(Keyword::EXECUTE) {
18011 let obj_type = self.maybe_parse_action_execute_obj_type();
18012 Ok(Action::Execute { obj_type })
18013 } else if self.parse_keyword(Keyword::FAILOVER) {
18014 Ok(Action::Failover)
18015 } else if self.parse_keyword(Keyword::INSERT) {
18016 Ok(Action::Insert {
18017 columns: parse_columns(self)?,
18018 })
18019 } else if self.parse_keyword(Keyword::MANAGE) {
18020 let manage_type = self.parse_action_manage_type()?;
18021 Ok(Action::Manage { manage_type })
18022 } else if self.parse_keyword(Keyword::MODIFY) {
18023 let modify_type = self.parse_action_modify_type();
18024 Ok(Action::Modify { modify_type })
18025 } else if self.parse_keyword(Keyword::MONITOR) {
18026 let monitor_type = self.parse_action_monitor_type();
18027 Ok(Action::Monitor { monitor_type })
18028 } else if self.parse_keyword(Keyword::OPERATE) {
18029 Ok(Action::Operate)
18030 } else if self.parse_keyword(Keyword::REFERENCES) {
18031 Ok(Action::References {
18032 columns: parse_columns(self)?,
18033 })
18034 } else if self.parse_keyword(Keyword::READ) {
18035 Ok(Action::Read)
18036 } else if self.parse_keyword(Keyword::REPLICATE) {
18037 Ok(Action::Replicate)
18038 } else if self.parse_keyword(Keyword::ROLE) {
18039 let role = self.parse_object_name(false)?;
18040 Ok(Action::Role { role })
18041 } else if self.parse_keyword(Keyword::SELECT) {
18042 Ok(Action::Select {
18043 columns: parse_columns(self)?,
18044 })
18045 } else if self.parse_keyword(Keyword::TEMPORARY) {
18046 Ok(Action::Temporary)
18047 } else if self.parse_keyword(Keyword::TRIGGER) {
18048 Ok(Action::Trigger)
18049 } else if self.parse_keyword(Keyword::TRUNCATE) {
18050 Ok(Action::Truncate)
18051 } else if self.parse_keyword(Keyword::UPDATE) {
18052 Ok(Action::Update {
18053 columns: parse_columns(self)?,
18054 })
18055 } else if self.parse_keyword(Keyword::USAGE) {
18056 Ok(Action::Usage)
18057 } else if self.parse_keyword(Keyword::OWNERSHIP) {
18058 Ok(Action::Ownership)
18059 } else if self.parse_keyword(Keyword::DROP) {
18060 Ok(Action::Drop)
18061 } else {
18062 self.expected_ref("a privilege keyword", self.peek_token_ref())?
18063 }
18064 }
18065
18066 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
18067 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
18069 Some(ActionCreateObjectType::ApplicationPackage)
18070 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
18071 Some(ActionCreateObjectType::ComputePool)
18072 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
18073 Some(ActionCreateObjectType::DataExchangeListing)
18074 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
18075 Some(ActionCreateObjectType::ExternalVolume)
18076 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
18077 Some(ActionCreateObjectType::FailoverGroup)
18078 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
18079 Some(ActionCreateObjectType::NetworkPolicy)
18080 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
18081 Some(ActionCreateObjectType::OrganiationListing)
18082 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
18083 Some(ActionCreateObjectType::ReplicationGroup)
18084 }
18085 else if self.parse_keyword(Keyword::ACCOUNT) {
18087 Some(ActionCreateObjectType::Account)
18088 } else if self.parse_keyword(Keyword::APPLICATION) {
18089 Some(ActionCreateObjectType::Application)
18090 } else if self.parse_keyword(Keyword::DATABASE) {
18091 Some(ActionCreateObjectType::Database)
18092 } else if self.parse_keyword(Keyword::INTEGRATION) {
18093 Some(ActionCreateObjectType::Integration)
18094 } else if self.parse_keyword(Keyword::ROLE) {
18095 Some(ActionCreateObjectType::Role)
18096 } else if self.parse_keyword(Keyword::SCHEMA) {
18097 Some(ActionCreateObjectType::Schema)
18098 } else if self.parse_keyword(Keyword::SHARE) {
18099 Some(ActionCreateObjectType::Share)
18100 } else if self.parse_keyword(Keyword::USER) {
18101 Some(ActionCreateObjectType::User)
18102 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18103 Some(ActionCreateObjectType::Warehouse)
18104 } else {
18105 None
18106 }
18107 }
18108
18109 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18110 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18111 Ok(ActionApplyType::AggregationPolicy)
18112 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18113 Ok(ActionApplyType::AuthenticationPolicy)
18114 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18115 Ok(ActionApplyType::JoinPolicy)
18116 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18117 Ok(ActionApplyType::MaskingPolicy)
18118 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18119 Ok(ActionApplyType::PackagesPolicy)
18120 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18121 Ok(ActionApplyType::PasswordPolicy)
18122 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18123 Ok(ActionApplyType::ProjectionPolicy)
18124 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18125 Ok(ActionApplyType::RowAccessPolicy)
18126 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18127 Ok(ActionApplyType::SessionPolicy)
18128 } else if self.parse_keyword(Keyword::TAG) {
18129 Ok(ActionApplyType::Tag)
18130 } else {
18131 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18132 }
18133 }
18134
18135 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18136 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18137 Some(ActionExecuteObjectType::DataMetricFunction)
18138 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18139 Some(ActionExecuteObjectType::ManagedAlert)
18140 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18141 Some(ActionExecuteObjectType::ManagedTask)
18142 } else if self.parse_keyword(Keyword::ALERT) {
18143 Some(ActionExecuteObjectType::Alert)
18144 } else if self.parse_keyword(Keyword::TASK) {
18145 Some(ActionExecuteObjectType::Task)
18146 } else {
18147 None
18148 }
18149 }
18150
18151 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18152 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18153 Ok(ActionManageType::AccountSupportCases)
18154 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18155 Ok(ActionManageType::EventSharing)
18156 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18157 Ok(ActionManageType::ListingAutoFulfillment)
18158 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18159 Ok(ActionManageType::OrganizationSupportCases)
18160 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18161 Ok(ActionManageType::UserSupportCases)
18162 } else if self.parse_keyword(Keyword::GRANTS) {
18163 Ok(ActionManageType::Grants)
18164 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18165 Ok(ActionManageType::Warehouses)
18166 } else {
18167 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18168 }
18169 }
18170
18171 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18172 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18173 Some(ActionModifyType::LogLevel)
18174 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18175 Some(ActionModifyType::TraceLevel)
18176 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18177 Some(ActionModifyType::SessionLogLevel)
18178 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18179 Some(ActionModifyType::SessionTraceLevel)
18180 } else {
18181 None
18182 }
18183 }
18184
18185 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18186 if self.parse_keyword(Keyword::EXECUTION) {
18187 Some(ActionMonitorType::Execution)
18188 } else if self.parse_keyword(Keyword::SECURITY) {
18189 Some(ActionMonitorType::Security)
18190 } else if self.parse_keyword(Keyword::USAGE) {
18191 Some(ActionMonitorType::Usage)
18192 } else {
18193 None
18194 }
18195 }
18196
18197 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18199 let mut name = self.parse_object_name(false)?;
18200 if self.dialect.supports_user_host_grantee()
18201 && name.0.len() == 1
18202 && name.0[0].as_ident().is_some()
18203 && self.consume_token(&Token::AtSign)
18204 {
18205 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18206 let host = self.parse_identifier()?;
18207 Ok(GranteeName::UserHost { user, host })
18208 } else {
18209 Ok(GranteeName::ObjectName(name))
18210 }
18211 }
18212
18213 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18215 self.expect_keyword(Keyword::DENY)?;
18216
18217 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18218 let objects = match objects {
18219 Some(o) => o,
18220 None => {
18221 return parser_err!(
18222 "DENY statements must specify an object",
18223 self.peek_token_ref().span.start
18224 )
18225 }
18226 };
18227
18228 self.expect_keyword_is(Keyword::TO)?;
18229 let grantees = self.parse_grantees()?;
18230 let cascade = self.parse_cascade_option();
18231 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18232 Some(self.parse_identifier()?)
18233 } else {
18234 None
18235 };
18236
18237 Ok(Statement::Deny(DenyStatement {
18238 privileges,
18239 objects,
18240 grantees,
18241 cascade,
18242 granted_by,
18243 }))
18244 }
18245
18246 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18248 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18249
18250 self.expect_keyword_is(Keyword::FROM)?;
18251 let grantees = self.parse_grantees()?;
18252
18253 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18254 Some(self.parse_identifier()?)
18255 } else {
18256 None
18257 };
18258
18259 let cascade = self.parse_cascade_option();
18260
18261 Ok(Revoke {
18262 privileges,
18263 objects,
18264 grantees,
18265 granted_by,
18266 cascade,
18267 })
18268 }
18269
18270 pub fn parse_replace(
18272 &mut self,
18273 replace_token: TokenWithSpan,
18274 ) -> Result<Statement, ParserError> {
18275 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18276 return parser_err!(
18277 "Unsupported statement REPLACE",
18278 self.peek_token_ref().span.start
18279 );
18280 }
18281
18282 let mut insert = self.parse_insert(replace_token)?;
18283 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18284 *replace_into = true;
18285 }
18286
18287 Ok(insert)
18288 }
18289
18290 fn parse_insert_setexpr_boxed(
18294 &mut self,
18295 insert_token: TokenWithSpan,
18296 ) -> Result<Box<SetExpr>, ParserError> {
18297 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18298 }
18299
18300 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18302 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18303 let or = self.parse_conflict_clause();
18304 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18305 None
18306 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18307 Some(MysqlInsertPriority::LowPriority)
18308 } else if self.parse_keyword(Keyword::DELAYED) {
18309 Some(MysqlInsertPriority::Delayed)
18310 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18311 Some(MysqlInsertPriority::HighPriority)
18312 } else {
18313 None
18314 };
18315
18316 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18317 && self.parse_keyword(Keyword::IGNORE);
18318
18319 let replace_into = false;
18320
18321 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18322 let into = self.parse_keyword(Keyword::INTO);
18323
18324 let local = self.parse_keyword(Keyword::LOCAL);
18325
18326 if self.parse_keyword(Keyword::DIRECTORY) {
18327 let path = self.parse_literal_string()?;
18328 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18329 Some(self.parse_file_format()?)
18330 } else {
18331 None
18332 };
18333 let source = self.parse_query()?;
18334 Ok(Statement::Directory {
18335 local,
18336 path,
18337 overwrite,
18338 file_format,
18339 source,
18340 })
18341 } else {
18342 let table = self.parse_keyword(Keyword::TABLE);
18344 let table_object = self.parse_table_object()?;
18345
18346 let table_alias = if self.dialect.supports_insert_table_alias()
18347 && !self.peek_sub_query()
18348 && self
18349 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18350 .is_none()
18351 {
18352 if self.parse_keyword(Keyword::AS) {
18353 Some(TableAliasWithoutColumns {
18354 explicit: true,
18355 alias: self.parse_identifier()?,
18356 })
18357 } else {
18358 self.maybe_parse(|parser| parser.parse_identifier())?
18359 .map(|alias| TableAliasWithoutColumns {
18360 explicit: false,
18361 alias,
18362 })
18363 }
18364 } else {
18365 None
18366 };
18367
18368 let is_mysql = dialect_of!(self is MySqlDialect);
18369
18370 let (columns, partitioned, after_columns, output, source, assignments) = if self
18371 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18372 {
18373 (vec![], None, vec![], None, None, vec![])
18374 } else {
18375 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18376 let columns =
18377 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18378
18379 let partitioned = self.parse_insert_partition()?;
18380 let after_columns = if dialect_of!(self is HiveDialect) {
18382 self.parse_parenthesized_column_list(Optional, false)?
18383 } else {
18384 vec![]
18385 };
18386 (columns, partitioned, after_columns)
18387 } else {
18388 Default::default()
18389 };
18390
18391 let output = self.maybe_parse_output_clause()?;
18392
18393 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18394 || self.peek_keyword(Keyword::SETTINGS)
18395 {
18396 (None, vec![])
18397 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18398 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18399 } else {
18400 (Some(self.parse_query()?), vec![])
18401 };
18402
18403 (
18404 columns,
18405 partitioned,
18406 after_columns,
18407 output,
18408 source,
18409 assignments,
18410 )
18411 };
18412
18413 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18414 let settings = self.parse_settings()?;
18417
18418 let format = if self.parse_keyword(Keyword::FORMAT) {
18419 Some(self.parse_input_format_clause()?)
18420 } else {
18421 None
18422 };
18423
18424 (format, settings)
18425 } else {
18426 Default::default()
18427 };
18428
18429 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18430 && self.parse_keyword(Keyword::AS)
18431 {
18432 let row_alias = self.parse_object_name(false)?;
18433 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18434 Some(InsertAliases {
18435 row_alias,
18436 col_aliases,
18437 })
18438 } else {
18439 None
18440 };
18441
18442 let on = if self.parse_keyword(Keyword::ON) {
18443 if self.parse_keyword(Keyword::CONFLICT) {
18444 let conflict_target =
18445 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18446 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18447 } else if self.peek_token_ref().token == Token::LParen {
18448 Some(ConflictTarget::Columns(
18449 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18450 ))
18451 } else {
18452 None
18453 };
18454
18455 self.expect_keyword_is(Keyword::DO)?;
18456 let action = if self.parse_keyword(Keyword::NOTHING) {
18457 OnConflictAction::DoNothing
18458 } else {
18459 self.expect_keyword_is(Keyword::UPDATE)?;
18460 self.expect_keyword_is(Keyword::SET)?;
18461 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18462 let selection = if self.parse_keyword(Keyword::WHERE) {
18463 Some(self.parse_expr()?)
18464 } else {
18465 None
18466 };
18467 OnConflictAction::DoUpdate(DoUpdate {
18468 assignments,
18469 selection,
18470 })
18471 };
18472
18473 Some(OnInsert::OnConflict(OnConflict {
18474 conflict_target,
18475 action,
18476 }))
18477 } else {
18478 self.expect_keyword_is(Keyword::DUPLICATE)?;
18479 self.expect_keyword_is(Keyword::KEY)?;
18480 self.expect_keyword_is(Keyword::UPDATE)?;
18481 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18482
18483 Some(OnInsert::DuplicateKeyUpdate(l))
18484 }
18485 } else {
18486 None
18487 };
18488
18489 let returning = if self.parse_keyword(Keyword::RETURNING) {
18490 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18491 } else {
18492 None
18493 };
18494
18495 Ok(Insert {
18496 insert_token: insert_token.into(),
18497 optimizer_hints,
18498 or,
18499 table: table_object,
18500 table_alias,
18501 ignore,
18502 into,
18503 overwrite,
18504 partitioned,
18505 columns,
18506 after_columns,
18507 source,
18508 assignments,
18509 has_table_keyword: table,
18510 on,
18511 returning,
18512 output,
18513 replace_into,
18514 priority,
18515 insert_alias,
18516 settings,
18517 format_clause,
18518 multi_table_insert_type: None,
18519 multi_table_into_clauses: vec![],
18520 multi_table_when_clauses: vec![],
18521 multi_table_else_clause: None,
18522 }
18523 .into())
18524 }
18525 }
18526
18527 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18531 let ident = self.parse_identifier()?;
18532 let values = self
18533 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18534 .unwrap_or_default();
18535
18536 Ok(InputFormatClause { ident, values })
18537 }
18538
18539 fn peek_subquery_start(&mut self) -> bool {
18542 matches!(
18543 self.peek_tokens_ref(),
18544 [
18545 TokenWithSpan {
18546 token: Token::LParen,
18547 ..
18548 },
18549 TokenWithSpan {
18550 token: Token::Word(Word {
18551 keyword: Keyword::SELECT,
18552 ..
18553 }),
18554 ..
18555 },
18556 ]
18557 )
18558 }
18559
18560 fn peek_subquery_or_cte_start(&mut self) -> bool {
18564 matches!(
18565 self.peek_tokens_ref(),
18566 [
18567 TokenWithSpan {
18568 token: Token::LParen,
18569 ..
18570 },
18571 TokenWithSpan {
18572 token: Token::Word(Word {
18573 keyword: Keyword::SELECT | Keyword::WITH,
18574 ..
18575 }),
18576 ..
18577 },
18578 ]
18579 )
18580 }
18581
18582 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18583 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18584 Some(SqliteOnConflict::Replace)
18585 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18586 Some(SqliteOnConflict::Rollback)
18587 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18588 Some(SqliteOnConflict::Abort)
18589 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18590 Some(SqliteOnConflict::Fail)
18591 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18592 Some(SqliteOnConflict::Ignore)
18593 } else if self.parse_keyword(Keyword::REPLACE) {
18594 Some(SqliteOnConflict::Replace)
18595 } else {
18596 None
18597 }
18598 }
18599
18600 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18602 if self.parse_keyword(Keyword::PARTITION) {
18603 self.expect_token(&Token::LParen)?;
18604 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18605 self.expect_token(&Token::RParen)?;
18606 Ok(partition_cols)
18607 } else {
18608 Ok(None)
18609 }
18610 }
18611
18612 pub fn parse_load_data_table_format(
18614 &mut self,
18615 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18616 if self.parse_keyword(Keyword::INPUTFORMAT) {
18617 let input_format = self.parse_expr()?;
18618 self.expect_keyword_is(Keyword::SERDE)?;
18619 let serde = self.parse_expr()?;
18620 Ok(Some(HiveLoadDataFormat {
18621 input_format,
18622 serde,
18623 }))
18624 } else {
18625 Ok(None)
18626 }
18627 }
18628
18629 fn parse_update_setexpr_boxed(
18633 &mut self,
18634 update_token: TokenWithSpan,
18635 ) -> Result<Box<SetExpr>, ParserError> {
18636 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18637 }
18638
18639 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18641 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18642 let or = self.parse_conflict_clause();
18643 let table = self.parse_table_and_joins()?;
18644 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18645 Some(UpdateTableFromKind::BeforeSet(
18646 self.parse_table_with_joins()?,
18647 ))
18648 } else {
18649 None
18650 };
18651 self.expect_keyword(Keyword::SET)?;
18652 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18653
18654 let output = self.maybe_parse_output_clause()?;
18655
18656 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18657 Some(UpdateTableFromKind::AfterSet(
18658 self.parse_table_with_joins()?,
18659 ))
18660 } else {
18661 from_before_set
18662 };
18663 let selection = if self.parse_keyword(Keyword::WHERE) {
18664 Some(self.parse_expr()?)
18665 } else {
18666 None
18667 };
18668 let returning = if self.parse_keyword(Keyword::RETURNING) {
18669 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18670 } else {
18671 None
18672 };
18673 let order_by = if self.dialect.supports_update_order_by()
18674 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18675 {
18676 self.parse_comma_separated(Parser::parse_order_by_expr)?
18677 } else {
18678 vec![]
18679 };
18680 let limit = if self.parse_keyword(Keyword::LIMIT) {
18681 Some(self.parse_expr()?)
18682 } else {
18683 None
18684 };
18685 Ok(Update {
18686 update_token: update_token.into(),
18687 optimizer_hints,
18688 table,
18689 assignments,
18690 from,
18691 selection,
18692 returning,
18693 output,
18694 or,
18695 order_by,
18696 limit,
18697 }
18698 .into())
18699 }
18700
18701 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18703 let target = self.parse_assignment_target()?;
18704 self.expect_token(&Token::Eq)?;
18705 let value = self.parse_expr()?;
18706 Ok(Assignment { target, value })
18707 }
18708
18709 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18711 if self.consume_token(&Token::LParen) {
18712 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18713 self.expect_token(&Token::RParen)?;
18714 Ok(AssignmentTarget::Tuple(columns))
18715 } else {
18716 let column = self.parse_object_name(false)?;
18717 Ok(AssignmentTarget::ColumnName(column))
18718 }
18719 }
18720
18721 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18723 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18724 self.maybe_parse(|p| {
18725 let name = p.parse_expr()?;
18726 let operator = p.parse_function_named_arg_operator()?;
18727 let arg = p.parse_wildcard_expr()?.into();
18728 Ok(FunctionArg::ExprNamed {
18729 name,
18730 arg,
18731 operator,
18732 })
18733 })?
18734 } else {
18735 self.maybe_parse(|p| {
18736 let name = p.parse_identifier()?;
18737 let operator = p.parse_function_named_arg_operator()?;
18738 let arg = p.parse_wildcard_expr()?.into();
18739 Ok(FunctionArg::Named {
18740 name,
18741 arg,
18742 operator,
18743 })
18744 })?
18745 };
18746 if let Some(arg) = arg {
18747 return Ok(arg);
18748 }
18749 let wildcard_expr = self.parse_wildcard_expr()?;
18750 let arg_expr: FunctionArgExpr = match wildcard_expr {
18751 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18752 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18755 if opts.opt_exclude.is_some()
18756 || opts.opt_except.is_some()
18757 || opts.opt_replace.is_some()
18758 || opts.opt_rename.is_some()
18759 || opts.opt_ilike.is_some()
18760 {
18761 FunctionArgExpr::WildcardWithOptions(opts)
18762 } else {
18763 wildcard_expr.into()
18764 }
18765 }
18766 other => other.into(),
18767 };
18768 Ok(FunctionArg::Unnamed(arg_expr))
18769 }
18770
18771 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18772 if self.parse_keyword(Keyword::VALUE) {
18773 return Ok(FunctionArgOperator::Value);
18774 }
18775 let tok = self.next_token();
18776 match tok.token {
18777 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18778 Ok(FunctionArgOperator::RightArrow)
18779 }
18780 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18781 Ok(FunctionArgOperator::Equals)
18782 }
18783 Token::Assignment
18784 if self
18785 .dialect
18786 .supports_named_fn_args_with_assignment_operator() =>
18787 {
18788 Ok(FunctionArgOperator::Assignment)
18789 }
18790 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18791 Ok(FunctionArgOperator::Colon)
18792 }
18793 _ => {
18794 self.prev_token();
18795 self.expected("argument operator", tok)
18796 }
18797 }
18798 }
18799
18800 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18802 if self.consume_token(&Token::RParen) {
18803 Ok(vec![])
18804 } else {
18805 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18806 self.expect_token(&Token::RParen)?;
18807 Ok(args)
18808 }
18809 }
18810
18811 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18812 if self.consume_token(&Token::RParen) {
18813 return Ok(TableFunctionArgs {
18814 args: vec![],
18815 settings: None,
18816 });
18817 }
18818 let mut args = vec![];
18819 let settings = loop {
18820 if let Some(settings) = self.parse_settings()? {
18821 break Some(settings);
18822 }
18823 args.push(self.parse_function_args()?);
18824 if self.is_parse_comma_separated_end() {
18825 break None;
18826 }
18827 };
18828 self.expect_token(&Token::RParen)?;
18829 Ok(TableFunctionArgs { args, settings })
18830 }
18831
18832 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
18841 let mut clauses = vec![];
18842
18843 if let Some(null_clause) = self.parse_json_null_clause() {
18846 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18847 }
18848
18849 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18850 clauses.push(FunctionArgumentClause::JsonReturningClause(
18851 json_returning_clause,
18852 ));
18853 }
18854
18855 if self.consume_token(&Token::RParen) {
18856 return Ok(FunctionArgumentList {
18857 duplicate_treatment: None,
18858 args: vec![],
18859 clauses,
18860 });
18861 }
18862
18863 let duplicate_treatment = self.parse_duplicate_treatment()?;
18864 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18865
18866 if self.dialect.supports_window_function_null_treatment_arg() {
18867 if let Some(null_treatment) = self.parse_null_treatment()? {
18868 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
18869 }
18870 }
18871
18872 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18873 clauses.push(FunctionArgumentClause::OrderBy(
18874 self.parse_comma_separated(Parser::parse_order_by_expr)?,
18875 ));
18876 }
18877
18878 if self.parse_keyword(Keyword::LIMIT) {
18879 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
18880 }
18881
18882 if dialect_of!(self is GenericDialect | BigQueryDialect)
18883 && self.parse_keyword(Keyword::HAVING)
18884 {
18885 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
18886 Keyword::MIN => HavingBoundKind::Min,
18887 Keyword::MAX => HavingBoundKind::Max,
18888 unexpected_keyword => return Err(ParserError::ParserError(
18889 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
18890 )),
18891 };
18892 clauses.push(FunctionArgumentClause::Having(HavingBound(
18893 kind,
18894 self.parse_expr()?,
18895 )))
18896 }
18897
18898 if dialect_of!(self is GenericDialect | MySqlDialect)
18899 && self.parse_keyword(Keyword::SEPARATOR)
18900 {
18901 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
18902 }
18903
18904 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
18905 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
18906 }
18907
18908 if let Some(null_clause) = self.parse_json_null_clause() {
18909 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18910 }
18911
18912 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18913 clauses.push(FunctionArgumentClause::JsonReturningClause(
18914 json_returning_clause,
18915 ));
18916 }
18917
18918 self.expect_token(&Token::RParen)?;
18919 Ok(FunctionArgumentList {
18920 duplicate_treatment,
18921 args,
18922 clauses,
18923 })
18924 }
18925
18926 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
18927 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
18928 Some(JsonNullClause::AbsentOnNull)
18929 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
18930 Some(JsonNullClause::NullOnNull)
18931 } else {
18932 None
18933 }
18934 }
18935
18936 fn maybe_parse_json_returning_clause(
18937 &mut self,
18938 ) -> Result<Option<JsonReturningClause>, ParserError> {
18939 if self.parse_keyword(Keyword::RETURNING) {
18940 let data_type = self.parse_data_type()?;
18941 Ok(Some(JsonReturningClause { data_type }))
18942 } else {
18943 Ok(None)
18944 }
18945 }
18946
18947 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
18948 let loc = self.peek_token_ref().span.start;
18949 match (
18950 self.parse_keyword(Keyword::ALL),
18951 self.parse_keyword(Keyword::DISTINCT),
18952 ) {
18953 (true, false) => Ok(Some(DuplicateTreatment::All)),
18954 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
18955 (false, false) => Ok(None),
18956 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
18957 }
18958 }
18959
18960 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
18962 let prefix = self
18963 .parse_one_of_keywords(
18964 self.dialect
18965 .get_reserved_keywords_for_select_item_operator(),
18966 )
18967 .map(|keyword| Ident::new(format!("{keyword:?}")));
18968
18969 match self.parse_wildcard_expr()? {
18970 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
18971 SelectItemQualifiedWildcardKind::ObjectName(prefix),
18972 self.parse_wildcard_additional_options(token.0)?,
18973 )),
18974 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
18975 self.parse_wildcard_additional_options(token.0)?,
18976 )),
18977 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
18978 parser_err!(
18979 format!("Expected an expression, found: {}", v),
18980 self.peek_token_ref().span.start
18981 )
18982 }
18983 Expr::BinaryOp {
18984 left,
18985 op: BinaryOperator::Eq,
18986 right,
18987 } if self.dialect.supports_eq_alias_assignment()
18988 && matches!(left.as_ref(), Expr::Identifier(_)) =>
18989 {
18990 let Expr::Identifier(alias) = *left else {
18991 return parser_err!(
18992 "BUG: expected identifier expression as alias",
18993 self.peek_token_ref().span.start
18994 );
18995 };
18996 Ok(SelectItem::ExprWithAlias {
18997 expr: *right,
18998 alias,
18999 })
19000 }
19001 expr if self.dialect.supports_select_expr_star()
19002 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
19003 {
19004 let wildcard_token = self.get_previous_token().clone();
19005 Ok(SelectItem::QualifiedWildcard(
19006 SelectItemQualifiedWildcardKind::Expr(expr),
19007 self.parse_wildcard_additional_options(wildcard_token)?,
19008 ))
19009 }
19010 expr if self.dialect.supports_select_item_multi_column_alias()
19011 && self.peek_keyword(Keyword::AS)
19012 && self.peek_nth_token(1).token == Token::LParen =>
19013 {
19014 self.expect_keyword(Keyword::AS)?;
19015 self.expect_token(&Token::LParen)?;
19016 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
19017 self.expect_token(&Token::RParen)?;
19018 Ok(SelectItem::ExprWithAliases {
19019 expr: maybe_prefixed_expr(expr, prefix),
19020 aliases,
19021 })
19022 }
19023 expr => self
19024 .maybe_parse_select_item_alias()
19025 .map(|alias| match alias {
19026 Some(alias) => SelectItem::ExprWithAlias {
19027 expr: maybe_prefixed_expr(expr, prefix),
19028 alias,
19029 },
19030 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
19031 }),
19032 }
19033 }
19034
19035 pub fn parse_wildcard_additional_options(
19039 &mut self,
19040 wildcard_token: TokenWithSpan,
19041 ) -> Result<WildcardAdditionalOptions, ParserError> {
19042 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
19043 self.parse_optional_select_item_ilike()?
19044 } else {
19045 None
19046 };
19047 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
19048 {
19049 self.parse_optional_select_item_exclude()?
19050 } else {
19051 None
19052 };
19053 let opt_except = if self.dialect.supports_select_wildcard_except() {
19054 self.parse_optional_select_item_except()?
19055 } else {
19056 None
19057 };
19058 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
19059 self.parse_optional_select_item_replace()?
19060 } else {
19061 None
19062 };
19063 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
19064 self.parse_optional_select_item_rename()?
19065 } else {
19066 None
19067 };
19068
19069 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
19070 self.maybe_parse_select_item_alias()?
19071 } else {
19072 None
19073 };
19074
19075 Ok(WildcardAdditionalOptions {
19076 wildcard_token: wildcard_token.into(),
19077 opt_ilike,
19078 opt_exclude,
19079 opt_except,
19080 opt_rename,
19081 opt_replace,
19082 opt_alias,
19083 })
19084 }
19085
19086 pub fn parse_optional_select_item_ilike(
19090 &mut self,
19091 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19092 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19093 let next_token = self.next_token();
19094 let pattern = match next_token.token {
19095 Token::SingleQuotedString(s) => s,
19096 _ => return self.expected("ilike pattern", next_token),
19097 };
19098 Some(IlikeSelectItem { pattern })
19099 } else {
19100 None
19101 };
19102 Ok(opt_ilike)
19103 }
19104
19105 pub fn parse_optional_select_item_exclude(
19109 &mut self,
19110 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19111 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19112 if self.consume_token(&Token::LParen) {
19113 let columns =
19114 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19115 self.expect_token(&Token::RParen)?;
19116 Some(ExcludeSelectItem::Multiple(columns))
19117 } else {
19118 let column = self.parse_object_name(false)?;
19119 Some(ExcludeSelectItem::Single(column))
19120 }
19121 } else {
19122 None
19123 };
19124
19125 Ok(opt_exclude)
19126 }
19127
19128 pub fn parse_optional_select_item_except(
19132 &mut self,
19133 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19134 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19135 if self.peek_token_ref().token == Token::LParen {
19136 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19137 match &idents[..] {
19138 [] => {
19139 return self.expected_ref(
19140 "at least one column should be parsed by the expect clause",
19141 self.peek_token_ref(),
19142 )?;
19143 }
19144 [first, idents @ ..] => Some(ExceptSelectItem {
19145 first_element: first.clone(),
19146 additional_elements: idents.to_vec(),
19147 }),
19148 }
19149 } else {
19150 let ident = self.parse_identifier()?;
19152 Some(ExceptSelectItem {
19153 first_element: ident,
19154 additional_elements: vec![],
19155 })
19156 }
19157 } else {
19158 None
19159 };
19160
19161 Ok(opt_except)
19162 }
19163
19164 pub fn parse_optional_select_item_rename(
19166 &mut self,
19167 ) -> Result<Option<RenameSelectItem>, ParserError> {
19168 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19169 if self.consume_token(&Token::LParen) {
19170 let idents =
19171 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19172 self.expect_token(&Token::RParen)?;
19173 Some(RenameSelectItem::Multiple(idents))
19174 } else {
19175 let ident = self.parse_identifier_with_alias()?;
19176 Some(RenameSelectItem::Single(ident))
19177 }
19178 } else {
19179 None
19180 };
19181
19182 Ok(opt_rename)
19183 }
19184
19185 pub fn parse_optional_select_item_replace(
19187 &mut self,
19188 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19189 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19190 if self.consume_token(&Token::LParen) {
19191 let items = self.parse_comma_separated(|parser| {
19192 Ok(Box::new(parser.parse_replace_elements()?))
19193 })?;
19194 self.expect_token(&Token::RParen)?;
19195 Some(ReplaceSelectItem { items })
19196 } else {
19197 let tok = self.next_token();
19198 return self.expected("( after REPLACE but", tok);
19199 }
19200 } else {
19201 None
19202 };
19203
19204 Ok(opt_replace)
19205 }
19206 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19208 let expr = self.parse_expr()?;
19209 let as_keyword = self.parse_keyword(Keyword::AS);
19210 let ident = self.parse_identifier()?;
19211 Ok(ReplaceSelectElement {
19212 expr,
19213 column_name: ident,
19214 as_keyword,
19215 })
19216 }
19217
19218 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19221 if self.parse_keyword(Keyword::ASC) {
19222 Some(true)
19223 } else if self.parse_keyword(Keyword::DESC) {
19224 Some(false)
19225 } else {
19226 None
19227 }
19228 }
19229
19230 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19232 self.parse_order_by_expr_inner(false)
19233 .map(|(order_by, _)| order_by)
19234 }
19235
19236 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19238 self.parse_order_by_expr_inner(true)
19239 .map(|(column, operator_class)| IndexColumn {
19240 column,
19241 operator_class,
19242 })
19243 }
19244
19245 fn parse_order_by_expr_inner(
19246 &mut self,
19247 with_operator_class: bool,
19248 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19249 let expr = self.parse_expr()?;
19250
19251 let operator_class: Option<ObjectName> = if with_operator_class {
19252 if self
19255 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19256 .is_some()
19257 {
19258 None
19259 } else {
19260 self.maybe_parse(|parser| parser.parse_object_name(false))?
19261 }
19262 } else {
19263 None
19264 };
19265
19266 let options = self.parse_order_by_options()?;
19267
19268 let with_fill = if self.dialect.supports_with_fill()
19269 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19270 {
19271 Some(self.parse_with_fill()?)
19272 } else {
19273 None
19274 };
19275
19276 Ok((
19277 OrderByExpr {
19278 expr,
19279 options,
19280 with_fill,
19281 },
19282 operator_class,
19283 ))
19284 }
19285
19286 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19287 let asc = self.parse_asc_desc();
19288
19289 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19290 Some(true)
19291 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19292 Some(false)
19293 } else {
19294 None
19295 };
19296
19297 Ok(OrderByOptions { asc, nulls_first })
19298 }
19299
19300 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19304 let from = if self.parse_keyword(Keyword::FROM) {
19305 Some(self.parse_expr()?)
19306 } else {
19307 None
19308 };
19309
19310 let to = if self.parse_keyword(Keyword::TO) {
19311 Some(self.parse_expr()?)
19312 } else {
19313 None
19314 };
19315
19316 let step = if self.parse_keyword(Keyword::STEP) {
19317 Some(self.parse_expr()?)
19318 } else {
19319 None
19320 };
19321
19322 Ok(WithFill { from, to, step })
19323 }
19324
19325 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19328 if !self.parse_keyword(Keyword::INTERPOLATE) {
19329 return Ok(None);
19330 }
19331
19332 if self.consume_token(&Token::LParen) {
19333 let interpolations =
19334 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19335 self.expect_token(&Token::RParen)?;
19336 return Ok(Some(Interpolate {
19338 exprs: Some(interpolations),
19339 }));
19340 }
19341
19342 Ok(Some(Interpolate { exprs: None }))
19344 }
19345
19346 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19348 let column = self.parse_identifier()?;
19349 let expr = if self.parse_keyword(Keyword::AS) {
19350 Some(self.parse_expr()?)
19351 } else {
19352 None
19353 };
19354 Ok(InterpolateExpr { column, expr })
19355 }
19356
19357 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19360 let quantity = if self.consume_token(&Token::LParen) {
19361 let quantity = self.parse_expr()?;
19362 self.expect_token(&Token::RParen)?;
19363 Some(TopQuantity::Expr(quantity))
19364 } else {
19365 let next_token = self.next_token();
19366 let quantity = match next_token.token {
19367 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19368 _ => self.expected("literal int", next_token)?,
19369 };
19370 Some(TopQuantity::Constant(quantity))
19371 };
19372
19373 let percent = self.parse_keyword(Keyword::PERCENT);
19374
19375 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19376
19377 Ok(Top {
19378 with_ties,
19379 percent,
19380 quantity,
19381 })
19382 }
19383
19384 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19386 if self.parse_keyword(Keyword::ALL) {
19387 Ok(None)
19388 } else {
19389 Ok(Some(self.parse_expr()?))
19390 }
19391 }
19392
19393 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19395 let value = self.parse_expr()?;
19396 let rows = if self.parse_keyword(Keyword::ROW) {
19397 OffsetRows::Row
19398 } else if self.parse_keyword(Keyword::ROWS) {
19399 OffsetRows::Rows
19400 } else {
19401 OffsetRows::None
19402 };
19403 Ok(Offset { value, rows })
19404 }
19405
19406 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19408 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19409
19410 let (quantity, percent) = if self
19411 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19412 .is_some()
19413 {
19414 (None, false)
19415 } else {
19416 let quantity = Expr::Value(self.parse_value()?);
19417 let percent = self.parse_keyword(Keyword::PERCENT);
19418 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19419 (Some(quantity), percent)
19420 };
19421
19422 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19423 false
19424 } else {
19425 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19426 };
19427
19428 Ok(Fetch {
19429 with_ties,
19430 percent,
19431 quantity,
19432 })
19433 }
19434
19435 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19437 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19438 Keyword::UPDATE => LockType::Update,
19439 Keyword::SHARE => LockType::Share,
19440 unexpected_keyword => return Err(ParserError::ParserError(
19441 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19442 )),
19443 };
19444 let of = if self.parse_keyword(Keyword::OF) {
19445 Some(self.parse_object_name(false)?)
19446 } else {
19447 None
19448 };
19449 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19450 Some(NonBlock::Nowait)
19451 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19452 Some(NonBlock::SkipLocked)
19453 } else {
19454 None
19455 };
19456 Ok(LockClause {
19457 lock_type,
19458 of,
19459 nonblock,
19460 })
19461 }
19462
19463 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19465 self.expect_keyword(Keyword::LOCK)?;
19466
19467 if self.peek_keyword(Keyword::TABLES) {
19468 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19469 }
19470
19471 let _ = self.parse_keyword(Keyword::TABLE);
19472 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19473 let lock_mode = if self.parse_keyword(Keyword::IN) {
19474 let lock_mode = self.parse_lock_table_mode()?;
19475 self.expect_keyword(Keyword::MODE)?;
19476 Some(lock_mode)
19477 } else {
19478 None
19479 };
19480 let nowait = self.parse_keyword(Keyword::NOWAIT);
19481
19482 Ok(Lock {
19483 tables,
19484 lock_mode,
19485 nowait,
19486 })
19487 }
19488
19489 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19490 let only = self.parse_keyword(Keyword::ONLY);
19491 let name = self.parse_object_name(false)?;
19492 let has_asterisk = self.consume_token(&Token::Mul);
19493
19494 Ok(LockTableTarget {
19495 name,
19496 only,
19497 has_asterisk,
19498 })
19499 }
19500
19501 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19502 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19503 Ok(LockTableMode::AccessShare)
19504 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19505 Ok(LockTableMode::AccessExclusive)
19506 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19507 Ok(LockTableMode::RowShare)
19508 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19509 Ok(LockTableMode::RowExclusive)
19510 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19511 Ok(LockTableMode::ShareUpdateExclusive)
19512 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19513 Ok(LockTableMode::ShareRowExclusive)
19514 } else if self.parse_keyword(Keyword::SHARE) {
19515 Ok(LockTableMode::Share)
19516 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19517 Ok(LockTableMode::Exclusive)
19518 } else {
19519 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19520 }
19521 }
19522
19523 pub fn parse_values(
19525 &mut self,
19526 allow_empty: bool,
19527 value_keyword: bool,
19528 ) -> Result<Values, ParserError> {
19529 let mut explicit_row = false;
19530
19531 let rows = self.parse_comma_separated(|parser| {
19532 if parser.parse_keyword(Keyword::ROW) {
19533 explicit_row = true;
19534 }
19535
19536 parser.expect_token(&Token::LParen)?;
19537 if allow_empty && parser.peek_token().token == Token::RParen {
19538 parser.next_token();
19539 Ok(vec![])
19540 } else {
19541 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19542 parser.expect_token(&Token::RParen)?;
19543 Ok(exprs)
19544 }
19545 })?;
19546 Ok(Values {
19547 explicit_row,
19548 rows,
19549 value_keyword,
19550 })
19551 }
19552
19553 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19555 self.expect_keyword_is(Keyword::TRANSACTION)?;
19556 Ok(Statement::StartTransaction {
19557 modes: self.parse_transaction_modes()?,
19558 begin: false,
19559 transaction: Some(BeginTransactionKind::Transaction),
19560 modifier: None,
19561 statements: vec![],
19562 exception: None,
19563 has_end_keyword: false,
19564 })
19565 }
19566
19567 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19569 if !self.dialect.supports_start_transaction_modifier() {
19570 None
19571 } else if self.parse_keyword(Keyword::DEFERRED) {
19572 Some(TransactionModifier::Deferred)
19573 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19574 Some(TransactionModifier::Immediate)
19575 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19576 Some(TransactionModifier::Exclusive)
19577 } else if self.parse_keyword(Keyword::TRY) {
19578 Some(TransactionModifier::Try)
19579 } else if self.parse_keyword(Keyword::CATCH) {
19580 Some(TransactionModifier::Catch)
19581 } else {
19582 None
19583 }
19584 }
19585
19586 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19588 let modifier = self.parse_transaction_modifier();
19589 let transaction =
19590 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19591 {
19592 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19593 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19594 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19595 _ => None,
19596 };
19597 Ok(Statement::StartTransaction {
19598 modes: self.parse_transaction_modes()?,
19599 begin: true,
19600 transaction,
19601 modifier,
19602 statements: vec![],
19603 exception: None,
19604 has_end_keyword: false,
19605 })
19606 }
19607
19608 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19610 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19611
19612 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19613 let mut when = Vec::new();
19614
19615 while !self.peek_keyword(Keyword::END) {
19617 self.expect_keyword(Keyword::WHEN)?;
19618
19619 let mut idents = Vec::new();
19623
19624 while !self.parse_keyword(Keyword::THEN) {
19625 let ident = self.parse_identifier()?;
19626 idents.push(ident);
19627
19628 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19629 }
19630
19631 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19632
19633 when.push(ExceptionWhen { idents, statements });
19634 }
19635
19636 Some(when)
19637 } else {
19638 None
19639 };
19640
19641 self.expect_keyword(Keyword::END)?;
19642
19643 Ok(Statement::StartTransaction {
19644 begin: true,
19645 statements,
19646 exception,
19647 has_end_keyword: true,
19648 transaction: None,
19649 modifier: None,
19650 modes: Default::default(),
19651 })
19652 }
19653
19654 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19656 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19657 None
19658 } else if self.parse_keyword(Keyword::TRY) {
19659 Some(TransactionModifier::Try)
19660 } else if self.parse_keyword(Keyword::CATCH) {
19661 Some(TransactionModifier::Catch)
19662 } else {
19663 None
19664 };
19665 Ok(Statement::Commit {
19666 chain: self.parse_commit_rollback_chain()?,
19667 end: true,
19668 modifier,
19669 })
19670 }
19671
19672 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19674 let mut modes = vec![];
19675 let mut required = false;
19676 loop {
19677 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19678 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19679 TransactionIsolationLevel::ReadUncommitted
19680 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19681 TransactionIsolationLevel::ReadCommitted
19682 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19683 TransactionIsolationLevel::RepeatableRead
19684 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19685 TransactionIsolationLevel::Serializable
19686 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19687 TransactionIsolationLevel::Snapshot
19688 } else {
19689 self.expected_ref("isolation level", self.peek_token_ref())?
19690 };
19691 TransactionMode::IsolationLevel(iso_level)
19692 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19693 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19694 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19695 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19696 } else if required {
19697 self.expected_ref("transaction mode", self.peek_token_ref())?
19698 } else {
19699 break;
19700 };
19701 modes.push(mode);
19702 required = self.consume_token(&Token::Comma);
19707 }
19708 Ok(modes)
19709 }
19710
19711 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19713 Ok(Statement::Commit {
19714 chain: self.parse_commit_rollback_chain()?,
19715 end: false,
19716 modifier: None,
19717 })
19718 }
19719
19720 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19722 let chain = self.parse_commit_rollback_chain()?;
19723 let savepoint = self.parse_rollback_savepoint()?;
19724
19725 Ok(Statement::Rollback { chain, savepoint })
19726 }
19727
19728 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19730 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19731 if self.parse_keyword(Keyword::AND) {
19732 let chain = !self.parse_keyword(Keyword::NO);
19733 self.expect_keyword_is(Keyword::CHAIN)?;
19734 Ok(chain)
19735 } else {
19736 Ok(false)
19737 }
19738 }
19739
19740 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19742 if self.parse_keyword(Keyword::TO) {
19743 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19744 let savepoint = self.parse_identifier()?;
19745
19746 Ok(Some(savepoint))
19747 } else {
19748 Ok(None)
19749 }
19750 }
19751
19752 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19754 self.expect_token(&Token::LParen)?;
19755 let message = Box::new(self.parse_expr()?);
19756 self.expect_token(&Token::Comma)?;
19757 let severity = Box::new(self.parse_expr()?);
19758 self.expect_token(&Token::Comma)?;
19759 let state = Box::new(self.parse_expr()?);
19760 let arguments = if self.consume_token(&Token::Comma) {
19761 self.parse_comma_separated(Parser::parse_expr)?
19762 } else {
19763 vec![]
19764 };
19765 self.expect_token(&Token::RParen)?;
19766 let options = if self.parse_keyword(Keyword::WITH) {
19767 self.parse_comma_separated(Parser::parse_raiserror_option)?
19768 } else {
19769 vec![]
19770 };
19771 Ok(Statement::RaisError {
19772 message,
19773 severity,
19774 state,
19775 arguments,
19776 options,
19777 })
19778 }
19779
19780 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19782 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19783 Keyword::LOG => Ok(RaisErrorOption::Log),
19784 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19785 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19786 _ => self.expected_ref(
19787 "LOG, NOWAIT OR SETERROR raiserror option",
19788 self.peek_token_ref(),
19789 ),
19790 }
19791 }
19792
19793 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19797 self.expect_keyword_is(Keyword::THROW)?;
19798
19799 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19800 let (message, state) = if error_number.is_some() {
19801 self.expect_token(&Token::Comma)?;
19802 let message = Box::new(self.parse_expr()?);
19803 self.expect_token(&Token::Comma)?;
19804 let state = Box::new(self.parse_expr()?);
19805 (Some(message), Some(state))
19806 } else {
19807 (None, None)
19808 };
19809
19810 Ok(ThrowStatement {
19811 error_number,
19812 message,
19813 state,
19814 })
19815 }
19816
19817 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
19819 let prepare = self.parse_keyword(Keyword::PREPARE);
19820 let name = self.parse_identifier()?;
19821 Ok(Statement::Deallocate { name, prepare })
19822 }
19823
19824 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
19826 let immediate =
19827 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
19828
19829 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
19835 None
19836 } else {
19837 Some(self.parse_object_name(false)?)
19838 };
19839
19840 let has_parentheses = self.consume_token(&Token::LParen);
19841
19842 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
19843 let end_token = match (has_parentheses, self.peek_token().token) {
19844 (true, _) => Token::RParen,
19845 (false, Token::EOF) => Token::EOF,
19846 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
19847 (false, _) => Token::SemiColon,
19848 };
19849
19850 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
19851
19852 if has_parentheses {
19853 self.expect_token(&Token::RParen)?;
19854 }
19855
19856 let into = if self.parse_keyword(Keyword::INTO) {
19857 self.parse_comma_separated(Self::parse_identifier)?
19858 } else {
19859 vec![]
19860 };
19861
19862 let using = if self.parse_keyword(Keyword::USING) {
19863 self.parse_comma_separated(Self::parse_expr_with_alias)?
19864 } else {
19865 vec![]
19866 };
19867
19868 let output = self.parse_keyword(Keyword::OUTPUT);
19869
19870 let default = self.parse_keyword(Keyword::DEFAULT);
19871
19872 Ok(Statement::Execute {
19873 immediate,
19874 name,
19875 parameters,
19876 has_parentheses,
19877 into,
19878 using,
19879 output,
19880 default,
19881 })
19882 }
19883
19884 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
19886 let name = self.parse_identifier()?;
19887
19888 let mut data_types = vec![];
19889 if self.consume_token(&Token::LParen) {
19890 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
19891 self.expect_token(&Token::RParen)?;
19892 }
19893
19894 self.expect_keyword_is(Keyword::AS)?;
19895 let statement = Box::new(self.parse_statement()?);
19896 Ok(Statement::Prepare {
19897 name,
19898 data_types,
19899 statement,
19900 })
19901 }
19902
19903 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
19905 self.expect_keyword(Keyword::UNLOAD)?;
19906 self.expect_token(&Token::LParen)?;
19907 let (query, query_text) =
19908 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
19909 (None, Some(self.parse_literal_string()?))
19910 } else {
19911 (Some(self.parse_query()?), None)
19912 };
19913 self.expect_token(&Token::RParen)?;
19914
19915 self.expect_keyword_is(Keyword::TO)?;
19916 let to = self.parse_identifier()?;
19917 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
19918 Some(self.parse_iam_role_kind()?)
19919 } else {
19920 None
19921 };
19922 let with = self.parse_options(Keyword::WITH)?;
19923 let mut options = vec![];
19924 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
19925 options.push(opt);
19926 }
19927 Ok(Statement::Unload {
19928 query,
19929 query_text,
19930 to,
19931 auth,
19932 with,
19933 options,
19934 })
19935 }
19936
19937 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
19938 let temporary = self
19939 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
19940 .is_some();
19941 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
19942 let table = self.parse_keyword(Keyword::TABLE);
19943 let name = self.parse_object_name(false)?;
19944
19945 Ok(SelectInto {
19946 temporary,
19947 unlogged,
19948 table,
19949 name,
19950 })
19951 }
19952
19953 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
19954 let v = self.parse_value()?;
19955 match &v.value {
19956 Value::SingleQuotedString(_) => Ok(v),
19957 Value::DoubleQuotedString(_) => Ok(v),
19958 Value::Number(_, _) => Ok(v),
19959 Value::Placeholder(_) => Ok(v),
19960 _ => {
19961 self.prev_token();
19962 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
19963 }
19964 }
19965 }
19966
19967 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
19969 let name = self.parse_object_name(false)?;
19970 if self.consume_token(&Token::LParen) {
19971 let value = self.parse_pragma_value()?;
19972 self.expect_token(&Token::RParen)?;
19973 Ok(Statement::Pragma {
19974 name,
19975 value: Some(value),
19976 is_eq: false,
19977 })
19978 } else if self.consume_token(&Token::Eq) {
19979 Ok(Statement::Pragma {
19980 name,
19981 value: Some(self.parse_pragma_value()?),
19982 is_eq: true,
19983 })
19984 } else {
19985 Ok(Statement::Pragma {
19986 name,
19987 value: None,
19988 is_eq: false,
19989 })
19990 }
19991 }
19992
19993 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
19995 let extension_name = self.parse_identifier()?;
19996
19997 Ok(Statement::Install { extension_name })
19998 }
19999
20000 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
20002 if self.dialect.supports_load_extension() {
20003 let extension_name = self.parse_identifier()?;
20004 Ok(Statement::Load { extension_name })
20005 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
20006 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
20007 self.expect_keyword_is(Keyword::INPATH)?;
20008 let inpath = self.parse_literal_string()?;
20009 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
20010 self.expect_keyword_is(Keyword::INTO)?;
20011 self.expect_keyword_is(Keyword::TABLE)?;
20012 let table_name = self.parse_object_name(false)?;
20013 let partitioned = self.parse_insert_partition()?;
20014 let table_format = self.parse_load_data_table_format()?;
20015 Ok(Statement::LoadData {
20016 local,
20017 inpath,
20018 overwrite,
20019 table_name,
20020 partitioned,
20021 table_format,
20022 })
20023 } else {
20024 self.expected_ref(
20025 "`DATA` or an extension name after `LOAD`",
20026 self.peek_token_ref(),
20027 )
20028 }
20029 }
20030
20031 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
20043 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
20044
20045 let name = self.parse_object_name(false)?;
20046
20047 let on_cluster = self.parse_optional_on_cluster()?;
20049
20050 let partition = if self.parse_keyword(Keyword::PARTITION) {
20051 if self.parse_keyword(Keyword::ID) {
20052 Some(Partition::Identifier(self.parse_identifier()?))
20053 } else {
20054 Some(Partition::Expr(self.parse_expr()?))
20055 }
20056 } else {
20057 None
20058 };
20059
20060 let include_final = self.parse_keyword(Keyword::FINAL);
20061
20062 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
20063 if self.parse_keyword(Keyword::BY) {
20064 Some(Deduplicate::ByExpression(self.parse_expr()?))
20065 } else {
20066 Some(Deduplicate::All)
20067 }
20068 } else {
20069 None
20070 };
20071
20072 let predicate = if self.parse_keyword(Keyword::WHERE) {
20074 Some(self.parse_expr()?)
20075 } else {
20076 None
20077 };
20078
20079 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
20080 self.expect_token(&Token::LParen)?;
20081 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
20082 self.expect_token(&Token::RParen)?;
20083 Some(columns)
20084 } else {
20085 None
20086 };
20087
20088 Ok(Statement::OptimizeTable {
20089 name,
20090 has_table_keyword,
20091 on_cluster,
20092 partition,
20093 include_final,
20094 deduplicate,
20095 predicate,
20096 zorder,
20097 })
20098 }
20099
20100 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20106 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20108 let name = self.parse_object_name(false)?;
20110 let mut data_type: Option<DataType> = None;
20112 if self.parse_keywords(&[Keyword::AS]) {
20113 data_type = Some(self.parse_data_type()?)
20114 }
20115 let sequence_options = self.parse_create_sequence_options()?;
20116 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20118 if self.parse_keywords(&[Keyword::NONE]) {
20119 Some(ObjectName::from(vec![Ident::new("NONE")]))
20120 } else {
20121 Some(self.parse_object_name(false)?)
20122 }
20123 } else {
20124 None
20125 };
20126 Ok(Statement::CreateSequence {
20127 temporary,
20128 if_not_exists,
20129 name,
20130 data_type,
20131 sequence_options,
20132 owned_by,
20133 })
20134 }
20135
20136 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20137 let mut sequence_options = vec![];
20138 if self.parse_keywords(&[Keyword::INCREMENT]) {
20140 if self.parse_keywords(&[Keyword::BY]) {
20141 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20142 } else {
20143 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20144 }
20145 }
20146 if self.parse_keyword(Keyword::MINVALUE) {
20148 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20149 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20150 sequence_options.push(SequenceOptions::MinValue(None));
20151 }
20152 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20154 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20155 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20156 sequence_options.push(SequenceOptions::MaxValue(None));
20157 }
20158
20159 if self.parse_keywords(&[Keyword::START]) {
20161 if self.parse_keywords(&[Keyword::WITH]) {
20162 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20163 } else {
20164 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20165 }
20166 }
20167 if self.parse_keywords(&[Keyword::CACHE]) {
20169 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20170 }
20171 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20173 sequence_options.push(SequenceOptions::Cycle(true));
20174 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20175 sequence_options.push(SequenceOptions::Cycle(false));
20176 }
20177
20178 Ok(sequence_options)
20179 }
20180
20181 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20185 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20186 let name = self.parse_object_name(false)?;
20187
20188 let server_type = if self.parse_keyword(Keyword::TYPE) {
20189 Some(self.parse_identifier()?)
20190 } else {
20191 None
20192 };
20193
20194 let version = if self.parse_keyword(Keyword::VERSION) {
20195 Some(self.parse_identifier()?)
20196 } else {
20197 None
20198 };
20199
20200 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20201 let foreign_data_wrapper = self.parse_object_name(false)?;
20202
20203 let mut options = None;
20204 if self.parse_keyword(Keyword::OPTIONS) {
20205 self.expect_token(&Token::LParen)?;
20206 options = Some(self.parse_comma_separated(|p| {
20207 let key = p.parse_identifier()?;
20208 let value = p.parse_identifier()?;
20209 Ok(CreateServerOption { key, value })
20210 })?);
20211 self.expect_token(&Token::RParen)?;
20212 }
20213
20214 Ok(Statement::CreateServer(CreateServerStatement {
20215 name,
20216 if_not_exists: ine,
20217 server_type,
20218 version,
20219 foreign_data_wrapper,
20220 options,
20221 }))
20222 }
20223
20224 pub fn parse_create_foreign_data_wrapper(
20228 &mut self,
20229 ) -> Result<CreateForeignDataWrapper, ParserError> {
20230 let name = self.parse_identifier()?;
20231
20232 let handler = if self.parse_keyword(Keyword::HANDLER) {
20233 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20234 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20235 Some(FdwRoutineClause::NoFunction)
20236 } else {
20237 None
20238 };
20239
20240 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20241 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20242 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20243 Some(FdwRoutineClause::NoFunction)
20244 } else {
20245 None
20246 };
20247
20248 let options = if self.parse_keyword(Keyword::OPTIONS) {
20249 self.expect_token(&Token::LParen)?;
20250 let opts = self.parse_comma_separated(|p| {
20251 let key = p.parse_identifier()?;
20252 let value = p.parse_identifier()?;
20253 Ok(CreateServerOption { key, value })
20254 })?;
20255 self.expect_token(&Token::RParen)?;
20256 Some(opts)
20257 } else {
20258 None
20259 };
20260
20261 Ok(CreateForeignDataWrapper {
20262 name,
20263 handler,
20264 validator,
20265 options,
20266 })
20267 }
20268
20269 pub fn parse_create_foreign_table(
20273 &mut self,
20274 ) -> Result<CreateForeignTable, ParserError> {
20275 let if_not_exists =
20276 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20277 let name = self.parse_object_name(false)?;
20278 let (columns, _constraints) = self.parse_columns()?;
20279 self.expect_keyword_is(Keyword::SERVER)?;
20280 let server_name = self.parse_identifier()?;
20281
20282 let options = if self.parse_keyword(Keyword::OPTIONS) {
20283 self.expect_token(&Token::LParen)?;
20284 let opts = self.parse_comma_separated(|p| {
20285 let key = p.parse_identifier()?;
20286 let value = p.parse_identifier()?;
20287 Ok(CreateServerOption { key, value })
20288 })?;
20289 self.expect_token(&Token::RParen)?;
20290 Some(opts)
20291 } else {
20292 None
20293 };
20294
20295 Ok(CreateForeignTable {
20296 name,
20297 if_not_exists,
20298 columns,
20299 server_name,
20300 options,
20301 })
20302 }
20303
20304 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20308 let name = self.parse_identifier()?;
20309
20310 let target = if self.parse_keyword(Keyword::FOR) {
20311 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20312 Some(PublicationTarget::AllTables)
20313 } else if self.parse_keyword(Keyword::TABLE) {
20314 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20315 Some(PublicationTarget::Tables(tables))
20316 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20317 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20318 Some(PublicationTarget::TablesInSchema(schemas))
20319 } else {
20320 return self.expected_ref(
20321 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20322 self.peek_token_ref(),
20323 );
20324 }
20325 } else {
20326 None
20327 };
20328
20329 let with_options = self.parse_options(Keyword::WITH)?;
20330
20331 Ok(CreatePublication {
20332 name,
20333 target,
20334 with_options,
20335 })
20336 }
20337
20338 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20342 let name = self.parse_identifier()?;
20343 self.expect_keyword_is(Keyword::CONNECTION)?;
20344 let connection = self.parse_value()?.value;
20345 self.expect_keyword_is(Keyword::PUBLICATION)?;
20346 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20347 let with_options = self.parse_options(Keyword::WITH)?;
20348
20349 Ok(CreateSubscription {
20350 name,
20351 connection,
20352 publications,
20353 with_options,
20354 })
20355 }
20356
20357 pub fn parse_create_cast(&mut self) -> Result<CreateCast, ParserError> {
20361 self.expect_token(&Token::LParen)?;
20362 let source_type = self.parse_data_type()?;
20363 self.expect_keyword_is(Keyword::AS)?;
20364 let target_type = self.parse_data_type()?;
20365 self.expect_token(&Token::RParen)?;
20366
20367 let function_kind = if self.parse_keywords(&[Keyword::WITHOUT, Keyword::FUNCTION]) {
20368 CastFunctionKind::WithoutFunction
20369 } else if self.parse_keywords(&[Keyword::WITH, Keyword::INOUT]) {
20370 CastFunctionKind::WithInout
20371 } else if self.parse_keywords(&[Keyword::WITH, Keyword::FUNCTION]) {
20372 let function_name = self.parse_object_name(false)?;
20373 let argument_types = if self.peek_token_ref().token == Token::LParen {
20374 self.expect_token(&Token::LParen)?;
20375 let types = if self.peek_token_ref().token == Token::RParen {
20376 vec![]
20377 } else {
20378 self.parse_comma_separated(|p| p.parse_data_type())?
20379 };
20380 self.expect_token(&Token::RParen)?;
20381 types
20382 } else {
20383 vec![]
20384 };
20385 CastFunctionKind::WithFunction {
20386 function_name,
20387 argument_types,
20388 }
20389 } else {
20390 return self.expected_ref(
20391 "WITH FUNCTION, WITHOUT FUNCTION, or WITH INOUT",
20392 self.peek_token_ref(),
20393 );
20394 };
20395
20396 let cast_context = if self.parse_keyword(Keyword::AS) {
20397 if self.parse_keyword(Keyword::ASSIGNMENT) {
20398 CastContext::Assignment
20399 } else if self.parse_keyword(Keyword::IMPLICIT) {
20400 CastContext::Implicit
20401 } else {
20402 return self.expected_ref("ASSIGNMENT or IMPLICIT after AS", self.peek_token_ref());
20403 }
20404 } else {
20405 CastContext::Explicit
20406 };
20407
20408 Ok(CreateCast {
20409 source_type,
20410 target_type,
20411 function_kind,
20412 cast_context,
20413 })
20414 }
20415
20416 pub fn parse_create_conversion(
20420 &mut self,
20421 is_default: bool,
20422 ) -> Result<CreateConversion, ParserError> {
20423 let name = self.parse_object_name(false)?;
20424 self.expect_keyword_is(Keyword::FOR)?;
20425 let source_encoding = self.parse_literal_string()?;
20426 self.expect_keyword_is(Keyword::TO)?;
20427 let destination_encoding = self.parse_literal_string()?;
20428 self.expect_keyword_is(Keyword::FROM)?;
20429 let function_name = self.parse_object_name(false)?;
20430
20431 Ok(CreateConversion {
20432 name,
20433 is_default,
20434 source_encoding,
20435 destination_encoding,
20436 function_name,
20437 })
20438 }
20439
20440 pub fn parse_create_language(
20444 &mut self,
20445 or_replace: bool,
20446 trusted: bool,
20447 procedural: bool,
20448 ) -> Result<CreateLanguage, ParserError> {
20449 let name = self.parse_identifier()?;
20450
20451 let handler = if self.parse_keyword(Keyword::HANDLER) {
20452 Some(self.parse_object_name(false)?)
20453 } else {
20454 None
20455 };
20456
20457 let inline_handler = if self.parse_keyword(Keyword::INLINE) {
20458 Some(self.parse_object_name(false)?)
20459 } else {
20460 None
20461 };
20462
20463 let validator = if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20464 None
20465 } else if self.parse_keyword(Keyword::VALIDATOR) {
20466 Some(self.parse_object_name(false)?)
20467 } else {
20468 None
20469 };
20470
20471 Ok(CreateLanguage {
20472 name,
20473 or_replace,
20474 trusted,
20475 procedural,
20476 handler,
20477 inline_handler,
20478 validator,
20479 })
20480 }
20481
20482 pub fn parse_create_rule(&mut self) -> Result<CreateRule, ParserError> {
20486 let name = self.parse_identifier()?;
20487 self.expect_keyword_is(Keyword::AS)?;
20488 self.expect_keyword_is(Keyword::ON)?;
20489
20490 let event = if self.parse_keyword(Keyword::SELECT) {
20491 RuleEvent::Select
20492 } else if self.parse_keyword(Keyword::INSERT) {
20493 RuleEvent::Insert
20494 } else if self.parse_keyword(Keyword::UPDATE) {
20495 RuleEvent::Update
20496 } else if self.parse_keyword(Keyword::DELETE) {
20497 RuleEvent::Delete
20498 } else {
20499 return self.expected_ref(
20500 "SELECT, INSERT, UPDATE, or DELETE after ON",
20501 self.peek_token_ref(),
20502 );
20503 };
20504
20505 self.expect_keyword_is(Keyword::TO)?;
20506 let table = self.parse_object_name(false)?;
20507
20508 let condition = if self.parse_keyword(Keyword::WHERE) {
20509 Some(self.parse_expr()?)
20510 } else {
20511 None
20512 };
20513
20514 self.expect_keyword_is(Keyword::DO)?;
20515
20516 let instead = if self.parse_keyword(Keyword::INSTEAD) {
20517 true
20518 } else if self.parse_keyword(Keyword::ALSO) {
20519 false
20520 } else {
20521 false
20522 };
20523
20524 let action = if self.parse_keyword(Keyword::NOTHING) {
20525 RuleAction::Nothing
20526 } else if self.peek_token_ref().token == Token::LParen {
20527 self.expect_token(&Token::LParen)?;
20528 let mut stmts = Vec::new();
20529 loop {
20530 stmts.push(self.parse_statement()?);
20531 if !self.consume_token(&Token::SemiColon) {
20532 break;
20533 }
20534 if self.peek_token_ref().token == Token::RParen {
20535 break;
20536 }
20537 }
20538 self.expect_token(&Token::RParen)?;
20539 RuleAction::Statements(stmts)
20540 } else {
20541 let stmt = self.parse_statement()?;
20542 RuleAction::Statements(vec![stmt])
20543 };
20544
20545 Ok(CreateRule {
20546 name,
20547 event,
20548 table,
20549 condition,
20550 instead,
20551 action,
20552 })
20553 }
20554
20555 pub fn parse_create_statistics(&mut self) -> Result<CreateStatistics, ParserError> {
20559 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20560 let name = self.parse_object_name(false)?;
20561
20562 let kinds = if self.consume_token(&Token::LParen) {
20563 let kinds = self.parse_comma_separated(|p| {
20564 let ident = p.parse_identifier()?;
20565 match ident.value.to_lowercase().as_str() {
20566 "ndistinct" => Ok(StatisticsKind::NDistinct),
20567 "dependencies" => Ok(StatisticsKind::Dependencies),
20568 "mcv" => Ok(StatisticsKind::Mcv),
20569 other => Err(ParserError::ParserError(format!(
20570 "Unknown statistics kind: {other}"
20571 ))),
20572 }
20573 })?;
20574 self.expect_token(&Token::RParen)?;
20575 kinds
20576 } else {
20577 vec![]
20578 };
20579
20580 self.expect_keyword_is(Keyword::ON)?;
20581 let on = self.parse_comma_separated(Parser::parse_expr)?;
20582 self.expect_keyword_is(Keyword::FROM)?;
20583 let from = self.parse_object_name(false)?;
20584
20585 Ok(CreateStatistics {
20586 if_not_exists,
20587 name,
20588 kinds,
20589 on,
20590 from,
20591 })
20592 }
20593
20594 pub fn parse_create_access_method(&mut self) -> Result<CreateAccessMethod, ParserError> {
20598 let name = self.parse_identifier()?;
20599 self.expect_keyword_is(Keyword::TYPE)?;
20600 let method_type = if self.parse_keyword(Keyword::INDEX) {
20601 AccessMethodType::Index
20602 } else if self.parse_keyword(Keyword::TABLE) {
20603 AccessMethodType::Table
20604 } else {
20605 return self.expected_ref("INDEX or TABLE after TYPE", self.peek_token_ref());
20606 };
20607 self.expect_keyword_is(Keyword::HANDLER)?;
20608 let handler = self.parse_object_name(false)?;
20609
20610 Ok(CreateAccessMethod {
20611 name,
20612 method_type,
20613 handler,
20614 })
20615 }
20616
20617 pub fn parse_create_event_trigger(&mut self) -> Result<CreateEventTrigger, ParserError> {
20621 let name = self.parse_identifier()?;
20622 self.expect_keyword_is(Keyword::ON)?;
20623 let event_ident = self.parse_identifier()?;
20624 let event = match event_ident.value.to_lowercase().as_str() {
20625 "ddl_command_start" => EventTriggerEvent::DdlCommandStart,
20626 "ddl_command_end" => EventTriggerEvent::DdlCommandEnd,
20627 "table_rewrite" => EventTriggerEvent::TableRewrite,
20628 "sql_drop" => EventTriggerEvent::SqlDrop,
20629 other => {
20630 return Err(ParserError::ParserError(format!(
20631 "Unknown event trigger event: {other}"
20632 )))
20633 }
20634 };
20635
20636 let when_tags = if self.parse_keyword(Keyword::WHEN) {
20637 self.expect_keyword_is(Keyword::TAG)?;
20638 self.expect_keyword_is(Keyword::IN)?;
20639 self.expect_token(&Token::LParen)?;
20640 let tags = self.parse_comma_separated(|p| p.parse_value().map(|v| v.value))?;
20641 self.expect_token(&Token::RParen)?;
20642 Some(tags)
20643 } else {
20644 None
20645 };
20646
20647 self.expect_keyword_is(Keyword::EXECUTE)?;
20648 let is_procedure = if self.parse_keyword(Keyword::FUNCTION) {
20649 false
20650 } else if self.parse_keyword(Keyword::PROCEDURE) {
20651 true
20652 } else {
20653 return self.expected_ref("FUNCTION or PROCEDURE after EXECUTE", self.peek_token_ref());
20654 };
20655 let execute = self.parse_object_name(false)?;
20656 self.expect_token(&Token::LParen)?;
20657 self.expect_token(&Token::RParen)?;
20658
20659 Ok(CreateEventTrigger {
20660 name,
20661 event,
20662 when_tags,
20663 execute,
20664 is_procedure,
20665 })
20666 }
20667
20668 pub fn parse_create_transform(&mut self, or_replace: bool) -> Result<CreateTransform, ParserError> {
20672 self.expect_keyword_is(Keyword::FOR)?;
20673 let type_name = self.parse_data_type()?;
20674 self.expect_keyword_is(Keyword::LANGUAGE)?;
20675 let language = self.parse_identifier()?;
20676 self.expect_token(&Token::LParen)?;
20677 let elements = self.parse_comma_separated(|p| {
20678 let is_from = if p.parse_keyword(Keyword::FROM) {
20679 true
20680 } else {
20681 p.expect_keyword_is(Keyword::TO)?;
20682 false
20683 };
20684 p.expect_keyword_is(Keyword::SQL)?;
20685 p.expect_keyword_is(Keyword::WITH)?;
20686 p.expect_keyword_is(Keyword::FUNCTION)?;
20687 let function = p.parse_object_name(false)?;
20688 p.expect_token(&Token::LParen)?;
20689 let arg_types = if p.peek_token().token == Token::RParen {
20690 vec![]
20691 } else {
20692 p.parse_comma_separated(|p| p.parse_data_type())?
20693 };
20694 p.expect_token(&Token::RParen)?;
20695 Ok(TransformElement {
20696 is_from,
20697 function,
20698 arg_types,
20699 })
20700 })?;
20701 self.expect_token(&Token::RParen)?;
20702
20703 Ok(CreateTransform {
20704 or_replace,
20705 type_name,
20706 language,
20707 elements,
20708 })
20709 }
20710
20711
20712 pub fn parse_security_label(&mut self) -> Result<SecurityLabel, ParserError> {
20716 self.expect_keyword_is(Keyword::LABEL)?;
20717
20718 let provider = if self.parse_keyword(Keyword::FOR) {
20719 Some(self.parse_identifier()?)
20720 } else {
20721 None
20722 };
20723
20724 self.expect_keyword_is(Keyword::ON)?;
20725
20726 let object_kind = if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
20727 SecurityLabelObjectKind::MaterializedView
20728 } else if self.parse_keyword(Keyword::TABLE) {
20729 SecurityLabelObjectKind::Table
20730 } else if self.parse_keyword(Keyword::COLUMN) {
20731 SecurityLabelObjectKind::Column
20732 } else if self.parse_keyword(Keyword::DATABASE) {
20733 SecurityLabelObjectKind::Database
20734 } else if self.parse_keyword(Keyword::DOMAIN) {
20735 SecurityLabelObjectKind::Domain
20736 } else if self.parse_keyword(Keyword::FUNCTION) {
20737 SecurityLabelObjectKind::Function
20738 } else if self.parse_keyword(Keyword::ROLE) {
20739 SecurityLabelObjectKind::Role
20740 } else if self.parse_keyword(Keyword::SCHEMA) {
20741 SecurityLabelObjectKind::Schema
20742 } else if self.parse_keyword(Keyword::SEQUENCE) {
20743 SecurityLabelObjectKind::Sequence
20744 } else if self.parse_keyword(Keyword::TYPE) {
20745 SecurityLabelObjectKind::Type
20746 } else if self.parse_keyword(Keyword::VIEW) {
20747 SecurityLabelObjectKind::View
20748 } else {
20749 return self.expected_ref(
20750 "TABLE, COLUMN, DATABASE, DOMAIN, FUNCTION, MATERIALIZED VIEW, ROLE, SCHEMA, SEQUENCE, TYPE, or VIEW after ON",
20751 self.peek_token_ref(),
20752 );
20753 };
20754
20755 let object_name = self.parse_object_name(false)?;
20756
20757 self.expect_keyword_is(Keyword::IS)?;
20758
20759 let label = if self.parse_keyword(Keyword::NULL) {
20760 None
20761 } else {
20762 Some(self.parse_value()?.value)
20763 };
20764
20765 Ok(SecurityLabel {
20766 provider,
20767 object_kind,
20768 object_name,
20769 label,
20770 })
20771 }
20772
20773 pub fn parse_create_user_mapping(&mut self) -> Result<CreateUserMapping, ParserError> {
20777 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20778
20779 self.expect_keyword_is(Keyword::FOR)?;
20780
20781 let user = if self.parse_keyword(Keyword::CURRENT_ROLE) {
20782 UserMappingUser::CurrentRole
20783 } else if self.parse_keyword(Keyword::CURRENT_USER) {
20784 UserMappingUser::CurrentUser
20785 } else if self.parse_keyword(Keyword::PUBLIC) {
20786 UserMappingUser::Public
20787 } else if self.parse_keyword(Keyword::USER) {
20788 UserMappingUser::User
20789 } else {
20790 UserMappingUser::Ident(self.parse_identifier()?)
20791 };
20792
20793 self.expect_keyword_is(Keyword::SERVER)?;
20794 let server_name = self.parse_identifier()?;
20795
20796 let options = if self.parse_keyword(Keyword::OPTIONS) {
20797 self.expect_token(&Token::LParen)?;
20798 let opts = self.parse_comma_separated(|p| {
20799 let key = p.parse_identifier()?;
20800 let value = p.parse_identifier()?;
20801 Ok(CreateServerOption { key, value })
20802 })?;
20803 self.expect_token(&Token::RParen)?;
20804 Some(opts)
20805 } else {
20806 None
20807 };
20808
20809 Ok(CreateUserMapping {
20810 if_not_exists,
20811 user,
20812 server_name,
20813 options,
20814 })
20815 }
20816
20817 pub fn parse_create_tablespace(&mut self) -> Result<CreateTablespace, ParserError> {
20821 let name = self.parse_identifier()?;
20822
20823 let owner = if self.parse_keyword(Keyword::OWNER) {
20824 Some(self.parse_identifier()?)
20825 } else {
20826 None
20827 };
20828
20829 self.expect_keyword_is(Keyword::LOCATION)?;
20830 let location = self.parse_value()?.value;
20831
20832 let with_options = self.parse_options(Keyword::WITH)?;
20833
20834 Ok(CreateTablespace {
20835 name,
20836 owner,
20837 location,
20838 with_options,
20839 })
20840 }
20841
20842 pub fn index(&self) -> usize {
20844 self.index
20845 }
20846
20847 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
20849 let ident = self.parse_identifier()?;
20850 self.expect_keyword_is(Keyword::AS)?;
20851
20852 let window_expr = if self.consume_token(&Token::LParen) {
20853 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
20854 } else if self.dialect.supports_window_clause_named_window_reference() {
20855 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
20856 } else {
20857 return self.expected_ref("(", self.peek_token_ref());
20858 };
20859
20860 Ok(NamedWindowDefinition(ident, window_expr))
20861 }
20862
20863 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
20865 let name = self.parse_object_name(false)?;
20866 let params = self.parse_optional_procedure_parameters()?;
20867
20868 let language = if self.parse_keyword(Keyword::LANGUAGE) {
20869 Some(self.parse_identifier()?)
20870 } else {
20871 None
20872 };
20873
20874 self.expect_keyword_is(Keyword::AS)?;
20875
20876 let body = self.parse_conditional_statements(&[Keyword::END])?;
20877
20878 Ok(Statement::CreateProcedure {
20879 name,
20880 or_alter,
20881 params,
20882 language,
20883 body,
20884 })
20885 }
20886
20887 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
20889 let window_name = match &self.peek_token_ref().token {
20890 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
20891 self.parse_optional_ident()?
20892 }
20893 _ => None,
20894 };
20895
20896 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
20897 self.parse_comma_separated(Parser::parse_expr)?
20898 } else {
20899 vec![]
20900 };
20901 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
20902 self.parse_comma_separated(Parser::parse_order_by_expr)?
20903 } else {
20904 vec![]
20905 };
20906
20907 let window_frame = if !self.consume_token(&Token::RParen) {
20908 let window_frame = self.parse_window_frame()?;
20909 self.expect_token(&Token::RParen)?;
20910 Some(window_frame)
20911 } else {
20912 None
20913 };
20914 Ok(WindowSpec {
20915 window_name,
20916 partition_by,
20917 order_by,
20918 window_frame,
20919 })
20920 }
20921
20922 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
20924 let name = self.parse_object_name(false)?;
20925
20926 let has_as = self.parse_keyword(Keyword::AS);
20928
20929 if !has_as {
20930 if self.consume_token(&Token::LParen) {
20932 let options = self.parse_create_type_sql_definition_options()?;
20934 self.expect_token(&Token::RParen)?;
20935 return Ok(Statement::CreateType {
20936 name,
20937 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
20938 });
20939 }
20940
20941 return Ok(Statement::CreateType {
20943 name,
20944 representation: None,
20945 });
20946 }
20947
20948 if self.parse_keyword(Keyword::ENUM) {
20950 self.parse_create_type_enum(name)
20952 } else if self.parse_keyword(Keyword::RANGE) {
20953 self.parse_create_type_range(name)
20955 } else if self.consume_token(&Token::LParen) {
20956 self.parse_create_type_composite(name)
20958 } else {
20959 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
20960 }
20961 }
20962
20963 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20967 if self.consume_token(&Token::RParen) {
20968 return Ok(Statement::CreateType {
20970 name,
20971 representation: Some(UserDefinedTypeRepresentation::Composite {
20972 attributes: vec![],
20973 }),
20974 });
20975 }
20976
20977 let mut attributes = vec![];
20978 loop {
20979 let attr_name = self.parse_identifier()?;
20980 let attr_data_type = self.parse_data_type()?;
20981 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
20982 Some(self.parse_object_name(false)?)
20983 } else {
20984 None
20985 };
20986 attributes.push(UserDefinedTypeCompositeAttributeDef {
20987 name: attr_name,
20988 data_type: attr_data_type,
20989 collation: attr_collation,
20990 });
20991
20992 if !self.consume_token(&Token::Comma) {
20993 break;
20994 }
20995 }
20996 self.expect_token(&Token::RParen)?;
20997
20998 Ok(Statement::CreateType {
20999 name,
21000 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
21001 })
21002 }
21003
21004 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21008 self.expect_token(&Token::LParen)?;
21009 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21010 self.expect_token(&Token::RParen)?;
21011
21012 Ok(Statement::CreateType {
21013 name,
21014 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
21015 })
21016 }
21017
21018 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
21022 self.expect_token(&Token::LParen)?;
21023 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
21024 self.expect_token(&Token::RParen)?;
21025
21026 Ok(Statement::CreateType {
21027 name,
21028 representation: Some(UserDefinedTypeRepresentation::Range { options }),
21029 })
21030 }
21031
21032 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
21034 let keyword = self.parse_one_of_keywords(&[
21035 Keyword::SUBTYPE,
21036 Keyword::SUBTYPE_OPCLASS,
21037 Keyword::COLLATION,
21038 Keyword::CANONICAL,
21039 Keyword::SUBTYPE_DIFF,
21040 Keyword::MULTIRANGE_TYPE_NAME,
21041 ]);
21042
21043 match keyword {
21044 Some(Keyword::SUBTYPE) => {
21045 self.expect_token(&Token::Eq)?;
21046 let data_type = self.parse_data_type()?;
21047 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
21048 }
21049 Some(Keyword::SUBTYPE_OPCLASS) => {
21050 self.expect_token(&Token::Eq)?;
21051 let name = self.parse_object_name(false)?;
21052 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
21053 }
21054 Some(Keyword::COLLATION) => {
21055 self.expect_token(&Token::Eq)?;
21056 let name = self.parse_object_name(false)?;
21057 Ok(UserDefinedTypeRangeOption::Collation(name))
21058 }
21059 Some(Keyword::CANONICAL) => {
21060 self.expect_token(&Token::Eq)?;
21061 let name = self.parse_object_name(false)?;
21062 Ok(UserDefinedTypeRangeOption::Canonical(name))
21063 }
21064 Some(Keyword::SUBTYPE_DIFF) => {
21065 self.expect_token(&Token::Eq)?;
21066 let name = self.parse_object_name(false)?;
21067 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
21068 }
21069 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
21070 self.expect_token(&Token::Eq)?;
21071 let name = self.parse_object_name(false)?;
21072 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
21073 }
21074 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
21075 }
21076 }
21077
21078 fn parse_create_type_sql_definition_options(
21080 &mut self,
21081 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
21082 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
21083 }
21084
21085 fn parse_sql_definition_option(
21087 &mut self,
21088 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
21089 let keyword = self.parse_one_of_keywords(&[
21090 Keyword::INPUT,
21091 Keyword::OUTPUT,
21092 Keyword::RECEIVE,
21093 Keyword::SEND,
21094 Keyword::TYPMOD_IN,
21095 Keyword::TYPMOD_OUT,
21096 Keyword::ANALYZE,
21097 Keyword::SUBSCRIPT,
21098 Keyword::INTERNALLENGTH,
21099 Keyword::PASSEDBYVALUE,
21100 Keyword::ALIGNMENT,
21101 Keyword::STORAGE,
21102 Keyword::LIKE,
21103 Keyword::CATEGORY,
21104 Keyword::PREFERRED,
21105 Keyword::DEFAULT,
21106 Keyword::ELEMENT,
21107 Keyword::DELIMITER,
21108 Keyword::COLLATABLE,
21109 ]);
21110
21111 match keyword {
21112 Some(Keyword::INPUT) => {
21113 self.expect_token(&Token::Eq)?;
21114 let name = self.parse_object_name(false)?;
21115 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
21116 }
21117 Some(Keyword::OUTPUT) => {
21118 self.expect_token(&Token::Eq)?;
21119 let name = self.parse_object_name(false)?;
21120 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
21121 }
21122 Some(Keyword::RECEIVE) => {
21123 self.expect_token(&Token::Eq)?;
21124 let name = self.parse_object_name(false)?;
21125 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
21126 }
21127 Some(Keyword::SEND) => {
21128 self.expect_token(&Token::Eq)?;
21129 let name = self.parse_object_name(false)?;
21130 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
21131 }
21132 Some(Keyword::TYPMOD_IN) => {
21133 self.expect_token(&Token::Eq)?;
21134 let name = self.parse_object_name(false)?;
21135 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
21136 }
21137 Some(Keyword::TYPMOD_OUT) => {
21138 self.expect_token(&Token::Eq)?;
21139 let name = self.parse_object_name(false)?;
21140 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
21141 }
21142 Some(Keyword::ANALYZE) => {
21143 self.expect_token(&Token::Eq)?;
21144 let name = self.parse_object_name(false)?;
21145 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
21146 }
21147 Some(Keyword::SUBSCRIPT) => {
21148 self.expect_token(&Token::Eq)?;
21149 let name = self.parse_object_name(false)?;
21150 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
21151 }
21152 Some(Keyword::INTERNALLENGTH) => {
21153 self.expect_token(&Token::Eq)?;
21154 if self.parse_keyword(Keyword::VARIABLE) {
21155 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21156 UserDefinedTypeInternalLength::Variable,
21157 ))
21158 } else {
21159 let value = self.parse_literal_uint()?;
21160 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
21161 UserDefinedTypeInternalLength::Fixed(value),
21162 ))
21163 }
21164 }
21165 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
21166 Some(Keyword::ALIGNMENT) => {
21167 self.expect_token(&Token::Eq)?;
21168 let align_keyword = self.parse_one_of_keywords(&[
21169 Keyword::CHAR,
21170 Keyword::INT2,
21171 Keyword::INT4,
21172 Keyword::DOUBLE,
21173 ]);
21174 match align_keyword {
21175 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21176 Alignment::Char,
21177 )),
21178 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21179 Alignment::Int2,
21180 )),
21181 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21182 Alignment::Int4,
21183 )),
21184 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
21185 Alignment::Double,
21186 )),
21187 _ => self.expected_ref(
21188 "alignment value (char, int2, int4, or double)",
21189 self.peek_token_ref(),
21190 ),
21191 }
21192 }
21193 Some(Keyword::STORAGE) => {
21194 self.expect_token(&Token::Eq)?;
21195 let storage_keyword = self.parse_one_of_keywords(&[
21196 Keyword::PLAIN,
21197 Keyword::EXTERNAL,
21198 Keyword::EXTENDED,
21199 Keyword::MAIN,
21200 ]);
21201 match storage_keyword {
21202 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21203 UserDefinedTypeStorage::Plain,
21204 )),
21205 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21206 UserDefinedTypeStorage::External,
21207 )),
21208 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21209 UserDefinedTypeStorage::Extended,
21210 )),
21211 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
21212 UserDefinedTypeStorage::Main,
21213 )),
21214 _ => self.expected_ref(
21215 "storage value (plain, external, extended, or main)",
21216 self.peek_token_ref(),
21217 ),
21218 }
21219 }
21220 Some(Keyword::LIKE) => {
21221 self.expect_token(&Token::Eq)?;
21222 let name = self.parse_object_name(false)?;
21223 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
21224 }
21225 Some(Keyword::CATEGORY) => {
21226 self.expect_token(&Token::Eq)?;
21227 let category_str = self.parse_literal_string()?;
21228 let category_char = category_str.chars().next().ok_or_else(|| {
21229 ParserError::ParserError(
21230 "CATEGORY value must be a single character".to_string(),
21231 )
21232 })?;
21233 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
21234 }
21235 Some(Keyword::PREFERRED) => {
21236 self.expect_token(&Token::Eq)?;
21237 let value =
21238 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21239 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
21240 }
21241 Some(Keyword::DEFAULT) => {
21242 self.expect_token(&Token::Eq)?;
21243 let expr = self.parse_expr()?;
21244 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
21245 }
21246 Some(Keyword::ELEMENT) => {
21247 self.expect_token(&Token::Eq)?;
21248 let data_type = self.parse_data_type()?;
21249 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
21250 }
21251 Some(Keyword::DELIMITER) => {
21252 self.expect_token(&Token::Eq)?;
21253 let delimiter = self.parse_literal_string()?;
21254 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
21255 }
21256 Some(Keyword::COLLATABLE) => {
21257 self.expect_token(&Token::Eq)?;
21258 let value =
21259 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
21260 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
21261 }
21262 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
21263 }
21264 }
21265
21266 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
21267 self.expect_token(&Token::LParen)?;
21268 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
21269 self.expect_token(&Token::RParen)?;
21270 Ok(idents)
21271 }
21272
21273 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
21274 if dialect_of!(self is MySqlDialect | GenericDialect) {
21275 if self.parse_keyword(Keyword::FIRST) {
21276 Ok(Some(MySQLColumnPosition::First))
21277 } else if self.parse_keyword(Keyword::AFTER) {
21278 let ident = self.parse_identifier()?;
21279 Ok(Some(MySQLColumnPosition::After(ident)))
21280 } else {
21281 Ok(None)
21282 }
21283 } else {
21284 Ok(None)
21285 }
21286 }
21287
21288 fn parse_print(&mut self) -> Result<Statement, ParserError> {
21290 Ok(Statement::Print(PrintStatement {
21291 message: Box::new(self.parse_expr()?),
21292 }))
21293 }
21294
21295 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
21299 let wait_type = if self.parse_keyword(Keyword::DELAY) {
21300 WaitForType::Delay
21301 } else if self.parse_keyword(Keyword::TIME) {
21302 WaitForType::Time
21303 } else {
21304 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
21305 };
21306 let expr = self.parse_expr()?;
21307 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
21308 }
21309
21310 fn parse_return(&mut self) -> Result<Statement, ParserError> {
21312 match self.maybe_parse(|p| p.parse_expr())? {
21313 Some(expr) => Ok(Statement::Return(ReturnStatement {
21314 value: Some(ReturnStatementValue::Expr(expr)),
21315 })),
21316 None => Ok(Statement::Return(ReturnStatement { value: None })),
21317 }
21318 }
21319
21320 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
21324 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
21325
21326 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
21327 Some(self.parse_object_name(false)?)
21328 } else {
21329 None
21330 };
21331 self.expect_keyword(Keyword::OPTIONS)?;
21332 self.expect_token(&Token::LParen)?;
21333 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
21334 self.expect_token(&Token::RParen)?;
21335 self.expect_keyword(Keyword::AS)?;
21336 let query = self.parse_query()?;
21337 Ok(Statement::ExportData(ExportData {
21338 options,
21339 query,
21340 connection,
21341 }))
21342 }
21343
21344 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
21345 self.expect_keyword(Keyword::VACUUM)?;
21346 let full = self.parse_keyword(Keyword::FULL);
21347 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
21348 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
21349 let reindex = self.parse_keyword(Keyword::REINDEX);
21350 let recluster = self.parse_keyword(Keyword::RECLUSTER);
21351 let (table_name, threshold, boost) =
21352 match self.maybe_parse(|p| p.parse_object_name(false))? {
21353 Some(table_name) => {
21354 let threshold = if self.parse_keyword(Keyword::TO) {
21355 let value = self.parse_value()?;
21356 self.expect_keyword(Keyword::PERCENT)?;
21357 Some(value)
21358 } else {
21359 None
21360 };
21361 let boost = self.parse_keyword(Keyword::BOOST);
21362 (Some(table_name), threshold, boost)
21363 }
21364 _ => (None, None, false),
21365 };
21366 Ok(Statement::Vacuum(VacuumStatement {
21367 full,
21368 sort_only,
21369 delete_only,
21370 reindex,
21371 recluster,
21372 table_name,
21373 threshold,
21374 boost,
21375 }))
21376 }
21377
21378 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
21380 self.tokens
21381 }
21382
21383 fn peek_sub_query(&mut self) -> bool {
21385 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
21386 .is_some()
21387 }
21388
21389 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
21390 let show_in;
21391 let mut filter_position = None;
21392 if self.dialect.supports_show_like_before_in() {
21393 if let Some(filter) = self.parse_show_statement_filter()? {
21394 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
21395 }
21396 show_in = self.maybe_parse_show_stmt_in()?;
21397 } else {
21398 show_in = self.maybe_parse_show_stmt_in()?;
21399 if let Some(filter) = self.parse_show_statement_filter()? {
21400 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
21401 }
21402 }
21403 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
21404 let limit = self.maybe_parse_show_stmt_limit()?;
21405 let from = self.maybe_parse_show_stmt_from()?;
21406 Ok(ShowStatementOptions {
21407 filter_position,
21408 show_in,
21409 starts_with,
21410 limit,
21411 limit_from: from,
21412 })
21413 }
21414
21415 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
21416 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
21417 Some(Keyword::FROM) => ShowStatementInClause::FROM,
21418 Some(Keyword::IN) => ShowStatementInClause::IN,
21419 None => return Ok(None),
21420 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
21421 };
21422
21423 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
21424 Keyword::ACCOUNT,
21425 Keyword::DATABASE,
21426 Keyword::SCHEMA,
21427 Keyword::TABLE,
21428 Keyword::VIEW,
21429 ]) {
21430 Some(Keyword::DATABASE)
21432 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21433 | self.peek_keyword(Keyword::LIMIT) =>
21434 {
21435 (Some(ShowStatementInParentType::Database), None)
21436 }
21437 Some(Keyword::SCHEMA)
21438 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
21439 | self.peek_keyword(Keyword::LIMIT) =>
21440 {
21441 (Some(ShowStatementInParentType::Schema), None)
21442 }
21443 Some(parent_kw) => {
21444 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
21448 match parent_kw {
21449 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
21450 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
21451 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
21452 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
21453 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
21454 _ => {
21455 return self.expected_ref(
21456 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
21457 self.peek_token_ref(),
21458 )
21459 }
21460 }
21461 }
21462 None => {
21463 let mut parent_name = self.parse_object_name(false)?;
21466 if self
21467 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
21468 .is_some()
21469 {
21470 parent_name
21471 .0
21472 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
21473 }
21474 (None, Some(parent_name))
21475 }
21476 };
21477
21478 Ok(Some(ShowStatementIn {
21479 clause,
21480 parent_type,
21481 parent_name,
21482 }))
21483 }
21484
21485 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21486 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
21487 Ok(Some(self.parse_value()?))
21488 } else {
21489 Ok(None)
21490 }
21491 }
21492
21493 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
21494 if self.parse_keyword(Keyword::LIMIT) {
21495 Ok(self.parse_limit()?)
21496 } else {
21497 Ok(None)
21498 }
21499 }
21500
21501 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
21502 if self.parse_keyword(Keyword::FROM) {
21503 Ok(Some(self.parse_value()?))
21504 } else {
21505 Ok(None)
21506 }
21507 }
21508
21509 pub(crate) fn in_column_definition_state(&self) -> bool {
21510 matches!(self.state, ColumnDefinition)
21511 }
21512
21513 pub(crate) fn parse_key_value_options(
21518 &mut self,
21519 parenthesized: bool,
21520 end_words: &[Keyword],
21521 ) -> Result<KeyValueOptions, ParserError> {
21522 let mut options: Vec<KeyValueOption> = Vec::new();
21523 let mut delimiter = KeyValueOptionsDelimiter::Space;
21524 if parenthesized {
21525 self.expect_token(&Token::LParen)?;
21526 }
21527 loop {
21528 match self.next_token().token {
21529 Token::RParen => {
21530 if parenthesized {
21531 break;
21532 } else {
21533 return self.expected_ref(" another option or EOF", self.peek_token_ref());
21534 }
21535 }
21536 Token::EOF | Token::SemiColon => break,
21537 Token::Comma => {
21538 delimiter = KeyValueOptionsDelimiter::Comma;
21539 continue;
21540 }
21541 Token::Word(w) if !end_words.contains(&w.keyword) => {
21542 options.push(self.parse_key_value_option(&w)?)
21543 }
21544 Token::Word(w) if end_words.contains(&w.keyword) => {
21545 self.prev_token();
21546 break;
21547 }
21548 _ => {
21549 return self.expected_ref(
21550 "another option, EOF, SemiColon, Comma or ')'",
21551 self.peek_token_ref(),
21552 )
21553 }
21554 };
21555 }
21556
21557 Ok(KeyValueOptions { delimiter, options })
21558 }
21559
21560 pub(crate) fn parse_key_value_option(
21562 &mut self,
21563 key: &Word,
21564 ) -> Result<KeyValueOption, ParserError> {
21565 self.expect_token(&Token::Eq)?;
21566 let peeked_token = self.peek_token();
21567 match peeked_token.token {
21568 Token::SingleQuotedString(_) => Ok(KeyValueOption {
21569 option_name: key.value.clone(),
21570 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21571 }),
21572 Token::Word(word)
21573 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21574 {
21575 Ok(KeyValueOption {
21576 option_name: key.value.clone(),
21577 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21578 })
21579 }
21580 Token::Number(..) => Ok(KeyValueOption {
21581 option_name: key.value.clone(),
21582 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21583 }),
21584 Token::Word(word) => {
21585 self.next_token();
21586 Ok(KeyValueOption {
21587 option_name: key.value.clone(),
21588 option_value: KeyValueOptionKind::Single(
21589 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21590 ),
21591 })
21592 }
21593 Token::LParen => {
21594 match self.maybe_parse(|parser| {
21598 parser.expect_token(&Token::LParen)?;
21599 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21600 parser.expect_token(&Token::RParen)?;
21601 values
21602 })? {
21603 Some(values) => Ok(KeyValueOption {
21604 option_name: key.value.clone(),
21605 option_value: KeyValueOptionKind::Multi(values),
21606 }),
21607 None => Ok(KeyValueOption {
21608 option_name: key.value.clone(),
21609 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21610 self.parse_key_value_options(true, &[])?,
21611 )),
21612 }),
21613 }
21614 }
21615 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21616 }
21617 }
21618
21619 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21621 if self.parse_keyword(Keyword::ALL) {
21622 return Ok(ResetStatement { reset: Reset::ALL });
21623 }
21624
21625 let obj = self.parse_object_name(false)?;
21626 Ok(ResetStatement {
21627 reset: Reset::ConfigurationParameter(obj),
21628 })
21629 }
21630}
21631
21632fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21633 if let Some(prefix) = prefix {
21634 Expr::Prefixed {
21635 prefix,
21636 value: Box::new(expr),
21637 }
21638 } else {
21639 expr
21640 }
21641}
21642
21643impl Word {
21644 pub fn to_ident(&self, span: Span) -> Ident {
21650 Ident {
21651 value: self.value.clone(),
21652 quote_style: self.quote_style,
21653 span,
21654 }
21655 }
21656
21657 pub fn into_ident(self, span: Span) -> Ident {
21662 Ident {
21663 value: self.value,
21664 quote_style: self.quote_style,
21665 span,
21666 }
21667 }
21668}
21669
21670#[cfg(test)]
21671mod tests {
21672 use crate::test_utils::{all_dialects, TestedDialects};
21673
21674 use super::*;
21675
21676 #[test]
21677 fn test_prev_index() {
21678 let sql = "SELECT version";
21679 all_dialects().run_parser_method(sql, |parser| {
21680 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21681 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21682 parser.prev_token();
21683 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21684 assert_eq!(parser.next_token(), Token::make_word("version", None));
21685 parser.prev_token();
21686 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21687 assert_eq!(parser.next_token(), Token::make_word("version", None));
21688 assert_eq!(parser.peek_token(), Token::EOF);
21689 parser.prev_token();
21690 assert_eq!(parser.next_token(), Token::make_word("version", None));
21691 assert_eq!(parser.next_token(), Token::EOF);
21692 assert_eq!(parser.next_token(), Token::EOF);
21693 parser.prev_token();
21694 });
21695 }
21696
21697 #[test]
21698 fn test_peek_tokens() {
21699 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21700 assert!(matches!(
21701 parser.peek_tokens(),
21702 [Token::Word(Word {
21703 keyword: Keyword::SELECT,
21704 ..
21705 })]
21706 ));
21707
21708 assert!(matches!(
21709 parser.peek_tokens(),
21710 [
21711 Token::Word(Word {
21712 keyword: Keyword::SELECT,
21713 ..
21714 }),
21715 Token::Word(_),
21716 Token::Word(Word {
21717 keyword: Keyword::AS,
21718 ..
21719 }),
21720 ]
21721 ));
21722
21723 for _ in 0..4 {
21724 parser.next_token();
21725 }
21726
21727 assert!(matches!(
21728 parser.peek_tokens(),
21729 [
21730 Token::Word(Word {
21731 keyword: Keyword::FROM,
21732 ..
21733 }),
21734 Token::Word(_),
21735 Token::EOF,
21736 Token::EOF,
21737 ]
21738 ))
21739 })
21740 }
21741
21742 #[cfg(test)]
21743 mod test_parse_data_type {
21744 use crate::ast::{
21745 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21746 };
21747 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21748 use crate::test_utils::TestedDialects;
21749
21750 macro_rules! test_parse_data_type {
21751 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21752 $dialect.run_parser_method(&*$input, |parser| {
21753 let data_type = parser.parse_data_type().unwrap();
21754 assert_eq!($expected_type, data_type);
21755 assert_eq!($input.to_string(), data_type.to_string());
21756 });
21757 }};
21758 }
21759
21760 #[test]
21761 fn test_ansii_character_string_types() {
21762 let dialect =
21764 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21765
21766 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21767
21768 test_parse_data_type!(
21769 dialect,
21770 "CHARACTER(20)",
21771 DataType::Character(Some(CharacterLength::IntegerLength {
21772 length: 20,
21773 unit: None
21774 }))
21775 );
21776
21777 test_parse_data_type!(
21778 dialect,
21779 "CHARACTER(20 CHARACTERS)",
21780 DataType::Character(Some(CharacterLength::IntegerLength {
21781 length: 20,
21782 unit: Some(CharLengthUnits::Characters)
21783 }))
21784 );
21785
21786 test_parse_data_type!(
21787 dialect,
21788 "CHARACTER(20 OCTETS)",
21789 DataType::Character(Some(CharacterLength::IntegerLength {
21790 length: 20,
21791 unit: Some(CharLengthUnits::Octets)
21792 }))
21793 );
21794
21795 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
21796
21797 test_parse_data_type!(
21798 dialect,
21799 "CHAR(20)",
21800 DataType::Char(Some(CharacterLength::IntegerLength {
21801 length: 20,
21802 unit: None
21803 }))
21804 );
21805
21806 test_parse_data_type!(
21807 dialect,
21808 "CHAR(20 CHARACTERS)",
21809 DataType::Char(Some(CharacterLength::IntegerLength {
21810 length: 20,
21811 unit: Some(CharLengthUnits::Characters)
21812 }))
21813 );
21814
21815 test_parse_data_type!(
21816 dialect,
21817 "CHAR(20 OCTETS)",
21818 DataType::Char(Some(CharacterLength::IntegerLength {
21819 length: 20,
21820 unit: Some(CharLengthUnits::Octets)
21821 }))
21822 );
21823
21824 test_parse_data_type!(
21825 dialect,
21826 "CHARACTER VARYING(20)",
21827 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21828 length: 20,
21829 unit: None
21830 }))
21831 );
21832
21833 test_parse_data_type!(
21834 dialect,
21835 "CHARACTER VARYING(20 CHARACTERS)",
21836 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21837 length: 20,
21838 unit: Some(CharLengthUnits::Characters)
21839 }))
21840 );
21841
21842 test_parse_data_type!(
21843 dialect,
21844 "CHARACTER VARYING(20 OCTETS)",
21845 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21846 length: 20,
21847 unit: Some(CharLengthUnits::Octets)
21848 }))
21849 );
21850
21851 test_parse_data_type!(
21852 dialect,
21853 "CHAR VARYING(20)",
21854 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21855 length: 20,
21856 unit: None
21857 }))
21858 );
21859
21860 test_parse_data_type!(
21861 dialect,
21862 "CHAR VARYING(20 CHARACTERS)",
21863 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21864 length: 20,
21865 unit: Some(CharLengthUnits::Characters)
21866 }))
21867 );
21868
21869 test_parse_data_type!(
21870 dialect,
21871 "CHAR VARYING(20 OCTETS)",
21872 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21873 length: 20,
21874 unit: Some(CharLengthUnits::Octets)
21875 }))
21876 );
21877
21878 test_parse_data_type!(
21879 dialect,
21880 "VARCHAR(20)",
21881 DataType::Varchar(Some(CharacterLength::IntegerLength {
21882 length: 20,
21883 unit: None
21884 }))
21885 );
21886 }
21887
21888 #[test]
21889 fn test_ansii_character_large_object_types() {
21890 let dialect =
21892 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21893
21894 test_parse_data_type!(
21895 dialect,
21896 "CHARACTER LARGE OBJECT",
21897 DataType::CharacterLargeObject(None)
21898 );
21899 test_parse_data_type!(
21900 dialect,
21901 "CHARACTER LARGE OBJECT(20)",
21902 DataType::CharacterLargeObject(Some(20))
21903 );
21904
21905 test_parse_data_type!(
21906 dialect,
21907 "CHAR LARGE OBJECT",
21908 DataType::CharLargeObject(None)
21909 );
21910 test_parse_data_type!(
21911 dialect,
21912 "CHAR LARGE OBJECT(20)",
21913 DataType::CharLargeObject(Some(20))
21914 );
21915
21916 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
21917 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
21918 }
21919
21920 #[test]
21921 fn test_parse_custom_types() {
21922 let dialect =
21923 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21924
21925 test_parse_data_type!(
21926 dialect,
21927 "GEOMETRY",
21928 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
21929 );
21930
21931 test_parse_data_type!(
21932 dialect,
21933 "GEOMETRY(POINT)",
21934 DataType::Custom(
21935 ObjectName::from(vec!["GEOMETRY".into()]),
21936 vec!["POINT".to_string()]
21937 )
21938 );
21939
21940 test_parse_data_type!(
21941 dialect,
21942 "GEOMETRY(POINT, 4326)",
21943 DataType::Custom(
21944 ObjectName::from(vec!["GEOMETRY".into()]),
21945 vec!["POINT".to_string(), "4326".to_string()]
21946 )
21947 );
21948 }
21949
21950 #[test]
21951 fn test_ansii_exact_numeric_types() {
21952 let dialect = TestedDialects::new(vec![
21954 Box::new(GenericDialect {}),
21955 Box::new(AnsiDialect {}),
21956 Box::new(PostgreSqlDialect {}),
21957 ]);
21958
21959 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
21960
21961 test_parse_data_type!(
21962 dialect,
21963 "NUMERIC(2)",
21964 DataType::Numeric(ExactNumberInfo::Precision(2))
21965 );
21966
21967 test_parse_data_type!(
21968 dialect,
21969 "NUMERIC(2,10)",
21970 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
21971 );
21972
21973 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
21974
21975 test_parse_data_type!(
21976 dialect,
21977 "DECIMAL(2)",
21978 DataType::Decimal(ExactNumberInfo::Precision(2))
21979 );
21980
21981 test_parse_data_type!(
21982 dialect,
21983 "DECIMAL(2,10)",
21984 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
21985 );
21986
21987 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
21988
21989 test_parse_data_type!(
21990 dialect,
21991 "DEC(2)",
21992 DataType::Dec(ExactNumberInfo::Precision(2))
21993 );
21994
21995 test_parse_data_type!(
21996 dialect,
21997 "DEC(2,10)",
21998 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
21999 );
22000
22001 test_parse_data_type!(
22003 dialect,
22004 "NUMERIC(10,-2)",
22005 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
22006 );
22007
22008 test_parse_data_type!(
22009 dialect,
22010 "DECIMAL(1000,-10)",
22011 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
22012 );
22013
22014 test_parse_data_type!(
22015 dialect,
22016 "DEC(5,-1000)",
22017 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
22018 );
22019
22020 test_parse_data_type!(
22021 dialect,
22022 "NUMERIC(10,-5)",
22023 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
22024 );
22025
22026 test_parse_data_type!(
22027 dialect,
22028 "DECIMAL(20,-10)",
22029 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
22030 );
22031
22032 test_parse_data_type!(
22033 dialect,
22034 "DEC(5,-2)",
22035 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
22036 );
22037
22038 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
22039 let data_type = parser.parse_data_type().unwrap();
22040 assert_eq!(
22041 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
22042 data_type
22043 );
22044 assert_eq!("NUMERIC(10,5)", data_type.to_string());
22046 });
22047 }
22048
22049 #[test]
22050 fn test_ansii_date_type() {
22051 let dialect =
22053 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
22054
22055 test_parse_data_type!(dialect, "DATE", DataType::Date);
22056
22057 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
22058
22059 test_parse_data_type!(
22060 dialect,
22061 "TIME(6)",
22062 DataType::Time(Some(6), TimezoneInfo::None)
22063 );
22064
22065 test_parse_data_type!(
22066 dialect,
22067 "TIME WITH TIME ZONE",
22068 DataType::Time(None, TimezoneInfo::WithTimeZone)
22069 );
22070
22071 test_parse_data_type!(
22072 dialect,
22073 "TIME(6) WITH TIME ZONE",
22074 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
22075 );
22076
22077 test_parse_data_type!(
22078 dialect,
22079 "TIME WITHOUT TIME ZONE",
22080 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
22081 );
22082
22083 test_parse_data_type!(
22084 dialect,
22085 "TIME(6) WITHOUT TIME ZONE",
22086 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
22087 );
22088
22089 test_parse_data_type!(
22090 dialect,
22091 "TIMESTAMP",
22092 DataType::Timestamp(None, TimezoneInfo::None)
22093 );
22094
22095 test_parse_data_type!(
22096 dialect,
22097 "TIMESTAMP(22)",
22098 DataType::Timestamp(Some(22), TimezoneInfo::None)
22099 );
22100
22101 test_parse_data_type!(
22102 dialect,
22103 "TIMESTAMP(22) WITH TIME ZONE",
22104 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
22105 );
22106
22107 test_parse_data_type!(
22108 dialect,
22109 "TIMESTAMP(33) WITHOUT TIME ZONE",
22110 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
22111 );
22112 }
22113 }
22114
22115 #[test]
22116 fn test_parse_schema_name() {
22117 macro_rules! test_parse_schema_name {
22119 ($input:expr, $expected_name:expr $(,)?) => {{
22120 all_dialects().run_parser_method(&*$input, |parser| {
22121 let schema_name = parser.parse_schema_name().unwrap();
22122 assert_eq!(schema_name, $expected_name);
22124 assert_eq!(schema_name.to_string(), $input.to_string());
22126 });
22127 }};
22128 }
22129
22130 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
22131 let dummy_authorization = Ident::new("dummy_authorization");
22132
22133 test_parse_schema_name!(
22134 format!("{dummy_name}"),
22135 SchemaName::Simple(dummy_name.clone())
22136 );
22137
22138 test_parse_schema_name!(
22139 format!("AUTHORIZATION {dummy_authorization}"),
22140 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
22141 );
22142 test_parse_schema_name!(
22143 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
22144 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
22145 );
22146 }
22147
22148 #[test]
22149 fn mysql_parse_index_table_constraint() {
22150 macro_rules! test_parse_table_constraint {
22151 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
22152 $dialect.run_parser_method(&*$input, |parser| {
22153 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
22154 assert_eq!(constraint, $expected);
22156 assert_eq!(constraint.to_string(), $input.to_string());
22158 });
22159 }};
22160 }
22161
22162 fn mk_expected_col(name: &str) -> IndexColumn {
22163 IndexColumn {
22164 column: OrderByExpr {
22165 expr: Expr::Identifier(name.into()),
22166 options: OrderByOptions {
22167 asc: None,
22168 nulls_first: None,
22169 },
22170 with_fill: None,
22171 },
22172 operator_class: None,
22173 }
22174 }
22175
22176 let dialect =
22177 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
22178
22179 test_parse_table_constraint!(
22180 dialect,
22181 "INDEX (c1)",
22182 IndexConstraint {
22183 display_as_key: false,
22184 name: None,
22185 index_type: None,
22186 columns: vec![mk_expected_col("c1")],
22187 index_options: vec![],
22188 }
22189 .into()
22190 );
22191
22192 test_parse_table_constraint!(
22193 dialect,
22194 "KEY (c1)",
22195 IndexConstraint {
22196 display_as_key: true,
22197 name: None,
22198 index_type: None,
22199 columns: vec![mk_expected_col("c1")],
22200 index_options: vec![],
22201 }
22202 .into()
22203 );
22204
22205 test_parse_table_constraint!(
22206 dialect,
22207 "INDEX 'index' (c1, c2)",
22208 TableConstraint::Index(IndexConstraint {
22209 display_as_key: false,
22210 name: Some(Ident::with_quote('\'', "index")),
22211 index_type: None,
22212 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
22213 index_options: vec![],
22214 })
22215 );
22216
22217 test_parse_table_constraint!(
22218 dialect,
22219 "INDEX USING BTREE (c1)",
22220 IndexConstraint {
22221 display_as_key: false,
22222 name: None,
22223 index_type: Some(IndexType::BTree),
22224 columns: vec![mk_expected_col("c1")],
22225 index_options: vec![],
22226 }
22227 .into()
22228 );
22229
22230 test_parse_table_constraint!(
22231 dialect,
22232 "INDEX USING HASH (c1)",
22233 IndexConstraint {
22234 display_as_key: false,
22235 name: None,
22236 index_type: Some(IndexType::Hash),
22237 columns: vec![mk_expected_col("c1")],
22238 index_options: vec![],
22239 }
22240 .into()
22241 );
22242
22243 test_parse_table_constraint!(
22244 dialect,
22245 "INDEX idx_name USING BTREE (c1)",
22246 IndexConstraint {
22247 display_as_key: false,
22248 name: Some(Ident::new("idx_name")),
22249 index_type: Some(IndexType::BTree),
22250 columns: vec![mk_expected_col("c1")],
22251 index_options: vec![],
22252 }
22253 .into()
22254 );
22255
22256 test_parse_table_constraint!(
22257 dialect,
22258 "INDEX idx_name USING HASH (c1)",
22259 IndexConstraint {
22260 display_as_key: false,
22261 name: Some(Ident::new("idx_name")),
22262 index_type: Some(IndexType::Hash),
22263 columns: vec![mk_expected_col("c1")],
22264 index_options: vec![],
22265 }
22266 .into()
22267 );
22268 }
22269
22270 #[test]
22271 fn test_tokenizer_error_loc() {
22272 let sql = "foo '";
22273 let ast = Parser::parse_sql(&GenericDialect, sql);
22274 assert_eq!(
22275 ast,
22276 Err(ParserError::TokenizerError(
22277 "Unterminated string literal at Line: 1, Column: 5".to_string()
22278 ))
22279 );
22280 }
22281
22282 #[test]
22283 fn test_parser_error_loc() {
22284 let sql = "SELECT this is a syntax error";
22285 let ast = Parser::parse_sql(&GenericDialect, sql);
22286 assert_eq!(
22287 ast,
22288 Err(ParserError::ParserError(
22289 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
22290 .to_string()
22291 ))
22292 );
22293 }
22294
22295 #[test]
22296 fn test_nested_explain_error() {
22297 let sql = "EXPLAIN EXPLAIN SELECT 1";
22298 let ast = Parser::parse_sql(&GenericDialect, sql);
22299 assert_eq!(
22300 ast,
22301 Err(ParserError::ParserError(
22302 "Explain must be root of the plan".to_string()
22303 ))
22304 );
22305 }
22306
22307 #[test]
22308 fn test_parse_multipart_identifier_positive() {
22309 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
22310
22311 let expected = vec![
22313 Ident {
22314 value: "CATALOG".to_string(),
22315 quote_style: None,
22316 span: Span::empty(),
22317 },
22318 Ident {
22319 value: "F(o)o. \"bar".to_string(),
22320 quote_style: Some('"'),
22321 span: Span::empty(),
22322 },
22323 Ident {
22324 value: "table".to_string(),
22325 quote_style: None,
22326 span: Span::empty(),
22327 },
22328 ];
22329 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
22330 let actual = parser.parse_multipart_identifier().unwrap();
22331 assert_eq!(expected, actual);
22332 });
22333
22334 let expected = vec![
22336 Ident {
22337 value: "CATALOG".to_string(),
22338 quote_style: None,
22339 span: Span::empty(),
22340 },
22341 Ident {
22342 value: "table".to_string(),
22343 quote_style: None,
22344 span: Span::empty(),
22345 },
22346 ];
22347 dialect.run_parser_method("CATALOG . table", |parser| {
22348 let actual = parser.parse_multipart_identifier().unwrap();
22349 assert_eq!(expected, actual);
22350 });
22351 }
22352
22353 #[test]
22354 fn test_parse_multipart_identifier_negative() {
22355 macro_rules! test_parse_multipart_identifier_error {
22356 ($input:expr, $expected_err:expr $(,)?) => {{
22357 all_dialects().run_parser_method(&*$input, |parser| {
22358 let actual_err = parser.parse_multipart_identifier().unwrap_err();
22359 assert_eq!(actual_err.to_string(), $expected_err);
22360 });
22361 }};
22362 }
22363
22364 test_parse_multipart_identifier_error!(
22365 "",
22366 "sql parser error: Empty input when parsing identifier",
22367 );
22368
22369 test_parse_multipart_identifier_error!(
22370 "*schema.table",
22371 "sql parser error: Unexpected token in identifier: *",
22372 );
22373
22374 test_parse_multipart_identifier_error!(
22375 "schema.table*",
22376 "sql parser error: Unexpected token in identifier: *",
22377 );
22378
22379 test_parse_multipart_identifier_error!(
22380 "schema.table.",
22381 "sql parser error: Trailing period in identifier",
22382 );
22383
22384 test_parse_multipart_identifier_error!(
22385 "schema.*",
22386 "sql parser error: Unexpected token following period in identifier: *",
22387 );
22388 }
22389
22390 #[test]
22391 fn test_mysql_partition_selection() {
22392 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
22393 let expected = vec!["p0", "p2"];
22394
22395 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
22396 assert_eq!(ast.len(), 1);
22397 if let Statement::Query(v) = &ast[0] {
22398 if let SetExpr::Select(select) = &*v.body {
22399 assert_eq!(select.from.len(), 1);
22400 let from: &TableWithJoins = &select.from[0];
22401 let table_factor = &from.relation;
22402 if let TableFactor::Table { partitions, .. } = table_factor {
22403 let actual: Vec<&str> = partitions
22404 .iter()
22405 .map(|ident| ident.value.as_str())
22406 .collect();
22407 assert_eq!(expected, actual);
22408 }
22409 }
22410 } else {
22411 panic!("fail to parse mysql partition selection");
22412 }
22413 }
22414
22415 #[test]
22416 fn test_replace_into_placeholders() {
22417 let sql = "REPLACE INTO t (a) VALUES (&a)";
22418
22419 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22420 }
22421
22422 #[test]
22423 fn test_replace_into_set_placeholder() {
22424 let sql = "REPLACE INTO t SET ?";
22425
22426 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
22427 }
22428
22429 #[test]
22430 fn test_replace_incomplete() {
22431 let sql = r#"REPLACE"#;
22432
22433 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
22434 }
22435
22436 #[test]
22437 fn test_placeholder_invalid_whitespace() {
22438 for w in [" ", "/*invalid*/"] {
22439 let sql = format!("\nSELECT\n :{w}fooBar");
22440 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
22441 }
22442 }
22443}