1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word) => {
512 if expecting_statement_delimiter && word.keyword == Keyword::END {
513 break;
514 }
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 _ => self.expected("an SQL statement", next_token),
723 },
724 Token::LParen => {
725 self.prev_token();
726 self.parse_query().map(Into::into)
727 }
728 _ => self.expected("an SQL statement", next_token),
729 }
730 }
731
732 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
736 let case_token = self.expect_keyword(Keyword::CASE)?;
737
738 let match_expr = if self.peek_keyword(Keyword::WHEN) {
739 None
740 } else {
741 Some(self.parse_expr()?)
742 };
743
744 self.expect_keyword_is(Keyword::WHEN)?;
745 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
746 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
747 })?;
748
749 let else_block = if self.parse_keyword(Keyword::ELSE) {
750 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
751 } else {
752 None
753 };
754
755 let mut end_case_token = self.expect_keyword(Keyword::END)?;
756 if self.peek_keyword(Keyword::CASE) {
757 end_case_token = self.expect_keyword(Keyword::CASE)?;
758 }
759
760 Ok(CaseStatement {
761 case_token: AttachedToken(case_token),
762 match_expr,
763 when_blocks,
764 else_block,
765 end_case_token: AttachedToken(end_case_token),
766 })
767 }
768
769 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
773 self.expect_keyword_is(Keyword::IF)?;
774 let if_block = self.parse_conditional_statement_block(&[
775 Keyword::ELSE,
776 Keyword::ELSEIF,
777 Keyword::END,
778 ])?;
779
780 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
781 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
782 parser.parse_conditional_statement_block(&[
783 Keyword::ELSEIF,
784 Keyword::ELSE,
785 Keyword::END,
786 ])
787 })?
788 } else {
789 vec![]
790 };
791
792 let else_block = if self.parse_keyword(Keyword::ELSE) {
793 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
794 } else {
795 None
796 };
797
798 self.expect_keyword_is(Keyword::END)?;
799 let end_token = self.expect_keyword(Keyword::IF)?;
800
801 Ok(IfStatement {
802 if_block,
803 elseif_blocks,
804 else_block,
805 end_token: Some(AttachedToken(end_token)),
806 })
807 }
808
809 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
813 self.expect_keyword_is(Keyword::WHILE)?;
814 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
815
816 Ok(WhileStatement { while_block })
817 }
818
819 fn parse_conditional_statement_block(
827 &mut self,
828 terminal_keywords: &[Keyword],
829 ) -> Result<ConditionalStatementBlock, ParserError> {
830 let start_token = self.get_current_token().clone(); let mut then_token = None;
832
833 let condition = match &start_token.token {
834 Token::Word(w) if w.keyword == Keyword::ELSE => None,
835 Token::Word(w) if w.keyword == Keyword::WHILE => {
836 let expr = self.parse_expr()?;
837 Some(expr)
838 }
839 _ => {
840 let expr = self.parse_expr()?;
841 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
842 Some(expr)
843 }
844 };
845
846 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
847
848 Ok(ConditionalStatementBlock {
849 start_token: AttachedToken(start_token),
850 condition,
851 then_token,
852 conditional_statements,
853 })
854 }
855
856 pub(crate) fn parse_conditional_statements(
859 &mut self,
860 terminal_keywords: &[Keyword],
861 ) -> Result<ConditionalStatements, ParserError> {
862 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
863 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
864 let statements = self.parse_statement_list(terminal_keywords)?;
865 let end_token = self.expect_keyword(Keyword::END)?;
866
867 ConditionalStatements::BeginEnd(BeginEndStatements {
868 begin_token: AttachedToken(begin_token),
869 statements,
870 end_token: AttachedToken(end_token),
871 })
872 } else {
873 ConditionalStatements::Sequence {
874 statements: self.parse_statement_list(terminal_keywords)?,
875 }
876 };
877 Ok(conditional_statements)
878 }
879
880 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
884 self.expect_keyword_is(Keyword::RAISE)?;
885
886 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
887 self.expect_token(&Token::Eq)?;
888 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
889 } else {
890 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
891 };
892
893 Ok(RaiseStatement { value })
894 }
895 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
899 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
900
901 self.expect_keyword_is(Keyword::ON)?;
902 let token = self.next_token();
903
904 let (object_type, object_name) = match token.token {
905 Token::Word(w) if w.keyword == Keyword::COLLATION => {
906 (CommentObject::Collation, self.parse_object_name(false)?)
907 }
908 Token::Word(w) if w.keyword == Keyword::COLUMN => {
909 (CommentObject::Column, self.parse_object_name(false)?)
910 }
911 Token::Word(w) if w.keyword == Keyword::DATABASE => {
912 (CommentObject::Database, self.parse_object_name(false)?)
913 }
914 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
915 (CommentObject::Domain, self.parse_object_name(false)?)
916 }
917 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
918 (CommentObject::Extension, self.parse_object_name(false)?)
919 }
920 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
921 (CommentObject::Function, self.parse_object_name(false)?)
922 }
923 Token::Word(w) if w.keyword == Keyword::INDEX => {
924 (CommentObject::Index, self.parse_object_name(false)?)
925 }
926 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
927 self.expect_keyword_is(Keyword::VIEW)?;
928 (
929 CommentObject::MaterializedView,
930 self.parse_object_name(false)?,
931 )
932 }
933 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
934 (CommentObject::Procedure, self.parse_object_name(false)?)
935 }
936 Token::Word(w) if w.keyword == Keyword::ROLE => {
937 (CommentObject::Role, self.parse_object_name(false)?)
938 }
939 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
940 (CommentObject::Schema, self.parse_object_name(false)?)
941 }
942 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
943 (CommentObject::Sequence, self.parse_object_name(false)?)
944 }
945 Token::Word(w) if w.keyword == Keyword::TABLE => {
946 (CommentObject::Table, self.parse_object_name(false)?)
947 }
948 Token::Word(w) if w.keyword == Keyword::TYPE => {
949 (CommentObject::Type, self.parse_object_name(false)?)
950 }
951 Token::Word(w) if w.keyword == Keyword::USER => {
952 (CommentObject::User, self.parse_object_name(false)?)
953 }
954 Token::Word(w) if w.keyword == Keyword::VIEW => {
955 (CommentObject::View, self.parse_object_name(false)?)
956 }
957 _ => self.expected("comment object_type", token)?,
958 };
959
960 self.expect_keyword_is(Keyword::IS)?;
961 let comment = if self.parse_keyword(Keyword::NULL) {
962 None
963 } else {
964 Some(self.parse_literal_string()?)
965 };
966 Ok(Statement::Comment {
967 object_type,
968 object_name,
969 comment,
970 if_exists,
971 })
972 }
973
974 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
976 let mut channel = None;
977 let mut tables: Vec<ObjectName> = vec![];
978 let mut read_lock = false;
979 let mut export = false;
980
981 if !dialect_of!(self is MySqlDialect | GenericDialect) {
982 return parser_err!(
983 "Unsupported statement FLUSH",
984 self.peek_token_ref().span.start
985 );
986 }
987
988 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
989 Some(FlushLocation::NoWriteToBinlog)
990 } else if self.parse_keyword(Keyword::LOCAL) {
991 Some(FlushLocation::Local)
992 } else {
993 None
994 };
995
996 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
997 FlushType::BinaryLogs
998 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
999 FlushType::EngineLogs
1000 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1001 FlushType::ErrorLogs
1002 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1003 FlushType::GeneralLogs
1004 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1005 FlushType::Hosts
1006 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1007 FlushType::Privileges
1008 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1009 FlushType::OptimizerCosts
1010 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1011 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1012 channel = Some(self.parse_object_name(false).unwrap().to_string());
1013 }
1014 FlushType::RelayLogs
1015 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1016 FlushType::SlowLogs
1017 } else if self.parse_keyword(Keyword::STATUS) {
1018 FlushType::Status
1019 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1020 FlushType::UserResources
1021 } else if self.parse_keywords(&[Keyword::LOGS]) {
1022 FlushType::Logs
1023 } else if self.parse_keywords(&[Keyword::TABLES]) {
1024 loop {
1025 let next_token = self.next_token();
1026 match &next_token.token {
1027 Token::Word(w) => match w.keyword {
1028 Keyword::WITH => {
1029 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1030 }
1031 Keyword::FOR => {
1032 export = self.parse_keyword(Keyword::EXPORT);
1033 }
1034 Keyword::NoKeyword => {
1035 self.prev_token();
1036 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1037 }
1038 _ => {}
1039 },
1040 _ => {
1041 break;
1042 }
1043 }
1044 }
1045
1046 FlushType::Tables
1047 } else {
1048 return self.expected_ref(
1049 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1050 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1051 self.peek_token_ref(),
1052 );
1053 };
1054
1055 Ok(Statement::Flush {
1056 object_type,
1057 location,
1058 channel,
1059 read_lock,
1060 export,
1061 tables,
1062 })
1063 }
1064
1065 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1067 let repair = self.parse_keyword(Keyword::REPAIR);
1068 self.expect_keyword_is(Keyword::TABLE)?;
1069 let table_name = self.parse_object_name(false)?;
1070 let partition_action = self
1071 .maybe_parse(|parser| {
1072 let pa = match parser.parse_one_of_keywords(&[
1073 Keyword::ADD,
1074 Keyword::DROP,
1075 Keyword::SYNC,
1076 ]) {
1077 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1078 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1079 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1080 _ => None,
1081 };
1082 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1083 Ok(pa)
1084 })?
1085 .unwrap_or_default();
1086 Ok(Msck {
1087 repair,
1088 table_name,
1089 partition_action,
1090 })
1091 }
1092
1093 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1095 let table = self.parse_keyword(Keyword::TABLE);
1096 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1097
1098 let table_names = self.parse_comma_separated(|p| {
1099 let only = p.parse_keyword(Keyword::ONLY);
1100 let name = p.parse_object_name(false)?;
1101 let has_asterisk = p.consume_token(&Token::Mul);
1102 Ok(TruncateTableTarget {
1103 name,
1104 only,
1105 has_asterisk,
1106 })
1107 })?;
1108
1109 let mut partitions = None;
1110 if self.parse_keyword(Keyword::PARTITION) {
1111 self.expect_token(&Token::LParen)?;
1112 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1113 self.expect_token(&Token::RParen)?;
1114 }
1115
1116 let mut identity = None;
1117 let mut cascade = None;
1118
1119 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1120 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1121 Some(TruncateIdentityOption::Restart)
1122 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1123 Some(TruncateIdentityOption::Continue)
1124 } else {
1125 None
1126 };
1127
1128 cascade = self.parse_cascade_option();
1129 };
1130
1131 let on_cluster = self.parse_optional_on_cluster()?;
1132
1133 Ok(Truncate {
1134 table_names,
1135 partitions,
1136 table,
1137 if_exists,
1138 identity,
1139 cascade,
1140 on_cluster,
1141 })
1142 }
1143
1144 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1145 if self.parse_keyword(Keyword::CASCADE) {
1146 Some(CascadeOption::Cascade)
1147 } else if self.parse_keyword(Keyword::RESTRICT) {
1148 Some(CascadeOption::Restrict)
1149 } else {
1150 None
1151 }
1152 }
1153
1154 pub fn parse_attach_duckdb_database_options(
1156 &mut self,
1157 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1158 if !self.consume_token(&Token::LParen) {
1159 return Ok(vec![]);
1160 }
1161
1162 let mut options = vec![];
1163 loop {
1164 if self.parse_keyword(Keyword::READ_ONLY) {
1165 let boolean = if self.parse_keyword(Keyword::TRUE) {
1166 Some(true)
1167 } else if self.parse_keyword(Keyword::FALSE) {
1168 Some(false)
1169 } else {
1170 None
1171 };
1172 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1173 } else if self.parse_keyword(Keyword::TYPE) {
1174 let ident = self.parse_identifier()?;
1175 options.push(AttachDuckDBDatabaseOption::Type(ident));
1176 } else {
1177 return self
1178 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1179 };
1180
1181 if self.consume_token(&Token::RParen) {
1182 return Ok(options);
1183 } else if self.consume_token(&Token::Comma) {
1184 continue;
1185 } else {
1186 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1187 }
1188 }
1189 }
1190
1191 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1193 let database = self.parse_keyword(Keyword::DATABASE);
1194 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1195 let database_path = self.parse_identifier()?;
1196 let database_alias = if self.parse_keyword(Keyword::AS) {
1197 Some(self.parse_identifier()?)
1198 } else {
1199 None
1200 };
1201
1202 let attach_options = self.parse_attach_duckdb_database_options()?;
1203 Ok(Statement::AttachDuckDBDatabase {
1204 if_not_exists,
1205 database,
1206 database_path,
1207 database_alias,
1208 attach_options,
1209 })
1210 }
1211
1212 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1214 let database = self.parse_keyword(Keyword::DATABASE);
1215 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1216 let database_alias = self.parse_identifier()?;
1217 Ok(Statement::DetachDuckDBDatabase {
1218 if_exists,
1219 database,
1220 database_alias,
1221 })
1222 }
1223
1224 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1226 let database = self.parse_keyword(Keyword::DATABASE);
1227 let database_file_name = self.parse_expr()?;
1228 self.expect_keyword_is(Keyword::AS)?;
1229 let schema_name = self.parse_identifier()?;
1230 Ok(Statement::AttachDatabase {
1231 database,
1232 schema_name,
1233 database_file_name,
1234 })
1235 }
1236
1237 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1239 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1240 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1241 let mut for_columns = false;
1242 let mut cache_metadata = false;
1243 let mut noscan = false;
1244 let mut partitions = None;
1245 let mut compute_statistics = false;
1246 let mut columns = vec![];
1247
1248 if table_name.is_some() && self.consume_token(&Token::LParen) {
1250 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1251 self.expect_token(&Token::RParen)?;
1252 }
1253
1254 loop {
1255 match self.parse_one_of_keywords(&[
1256 Keyword::PARTITION,
1257 Keyword::FOR,
1258 Keyword::CACHE,
1259 Keyword::NOSCAN,
1260 Keyword::COMPUTE,
1261 ]) {
1262 Some(Keyword::PARTITION) => {
1263 self.expect_token(&Token::LParen)?;
1264 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1265 self.expect_token(&Token::RParen)?;
1266 }
1267 Some(Keyword::NOSCAN) => noscan = true,
1268 Some(Keyword::FOR) => {
1269 self.expect_keyword_is(Keyword::COLUMNS)?;
1270
1271 columns = self
1272 .maybe_parse(|parser| {
1273 parser.parse_comma_separated(|p| p.parse_identifier())
1274 })?
1275 .unwrap_or_default();
1276 for_columns = true
1277 }
1278 Some(Keyword::CACHE) => {
1279 self.expect_keyword_is(Keyword::METADATA)?;
1280 cache_metadata = true
1281 }
1282 Some(Keyword::COMPUTE) => {
1283 self.expect_keyword_is(Keyword::STATISTICS)?;
1284 compute_statistics = true
1285 }
1286 _ => break,
1287 }
1288 }
1289
1290 Ok(Analyze {
1291 has_table_keyword,
1292 table_name,
1293 for_columns,
1294 columns,
1295 partitions,
1296 cache_metadata,
1297 noscan,
1298 compute_statistics,
1299 })
1300 }
1301
1302 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1304 let index = self.index;
1305
1306 let next_token = self.next_token();
1307 match next_token.token {
1308 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1309 if self.peek_token_ref().token == Token::Period {
1310 let mut id_parts: Vec<Ident> = vec![match t {
1311 Token::Word(w) => w.into_ident(next_token.span),
1312 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1313 _ => {
1314 return Err(ParserError::ParserError(
1315 "Internal parser error: unexpected token type".to_string(),
1316 ))
1317 }
1318 }];
1319
1320 while self.consume_token(&Token::Period) {
1321 let next_token = self.next_token();
1322 match next_token.token {
1323 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1324 Token::SingleQuotedString(s) => {
1325 id_parts.push(Ident::with_quote('\'', s))
1327 }
1328 Token::Placeholder(s) => {
1329 id_parts.push(Ident::new(s))
1332 }
1333 Token::Mul => {
1334 return Ok(Expr::QualifiedWildcard(
1335 ObjectName::from(id_parts),
1336 AttachedToken(next_token),
1337 ));
1338 }
1339 _ => {
1340 return self
1341 .expected("an identifier or a '*' after '.'", next_token);
1342 }
1343 }
1344 }
1345 }
1346 }
1347 Token::Mul => {
1348 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1349 }
1350 Token::LParen => {
1352 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1353 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1354 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1357 }
1358 }
1359 _ => (),
1360 };
1361
1362 self.index = index;
1363 self.parse_expr()
1364 }
1365
1366 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1368 self.parse_subexpr(self.dialect.prec_unknown())
1369 }
1370
1371 pub fn parse_expr_with_alias_and_order_by(
1373 &mut self,
1374 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1375 let expr = self.parse_expr()?;
1376
1377 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1378 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1379 }
1380 let alias = self.parse_optional_alias_inner(None, validator)?;
1381 let order_by = OrderByOptions {
1382 asc: self.parse_asc_desc(),
1383 nulls_first: None,
1384 };
1385 Ok(ExprWithAliasAndOrderBy {
1386 expr: ExprWithAlias { expr, alias },
1387 order_by,
1388 })
1389 }
1390
1391 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1393 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1394 let _guard = self.recursion_counter.try_decrease()?;
1395 debug!("parsing expr");
1396 let mut expr = self.parse_prefix()?;
1397
1398 expr = self.parse_compound_expr(expr, vec![])?;
1399
1400 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1404 expr = Expr::Collate {
1405 expr: Box::new(expr),
1406 collation: self.parse_object_name(false)?,
1407 };
1408 }
1409
1410 debug!("prefix: {expr:?}");
1411 loop {
1412 let next_precedence = self.get_next_precedence()?;
1413 debug!("next precedence: {next_precedence:?}");
1414
1415 if precedence >= next_precedence {
1416 break;
1417 }
1418
1419 if Token::Period == self.peek_token_ref().token {
1422 break;
1423 }
1424
1425 expr = self.parse_infix(expr, next_precedence)?;
1426 }
1427 Ok(expr)
1428 }
1429
1430 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1432 let condition = self.parse_expr()?;
1433 let message = if self.parse_keyword(Keyword::AS) {
1434 Some(self.parse_expr()?)
1435 } else {
1436 None
1437 };
1438
1439 Ok(Statement::Assert { condition, message })
1440 }
1441
1442 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1444 let name = self.parse_identifier()?;
1445 Ok(Statement::Savepoint { name })
1446 }
1447
1448 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1450 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1451 let name = self.parse_identifier()?;
1452
1453 Ok(Statement::ReleaseSavepoint { name })
1454 }
1455
1456 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1458 let channel = self.parse_identifier()?;
1459 Ok(Statement::LISTEN { channel })
1460 }
1461
1462 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1464 let channel = if self.consume_token(&Token::Mul) {
1465 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1466 } else {
1467 match self.parse_identifier() {
1468 Ok(expr) => expr,
1469 _ => {
1470 self.prev_token();
1471 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1472 }
1473 }
1474 };
1475 Ok(Statement::UNLISTEN { channel })
1476 }
1477
1478 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1480 let channel = self.parse_identifier()?;
1481 let payload = if self.consume_token(&Token::Comma) {
1482 Some(self.parse_literal_string()?)
1483 } else {
1484 None
1485 };
1486 Ok(Statement::NOTIFY { channel, payload })
1487 }
1488
1489 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1491 if self.peek_keyword(Keyword::TABLE) {
1492 self.expect_keyword(Keyword::TABLE)?;
1493 let rename_tables = self.parse_comma_separated(|parser| {
1494 let old_name = parser.parse_object_name(false)?;
1495 parser.expect_keyword(Keyword::TO)?;
1496 let new_name = parser.parse_object_name(false)?;
1497
1498 Ok(RenameTable { old_name, new_name })
1499 })?;
1500 Ok(rename_tables.into())
1501 } else {
1502 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1503 }
1504 }
1505
1506 fn parse_expr_prefix_by_reserved_word(
1509 &mut self,
1510 w: &Word,
1511 w_span: Span,
1512 ) -> Result<Option<Expr>, ParserError> {
1513 match w.keyword {
1514 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1515 self.prev_token();
1516 Ok(Some(Expr::Value(self.parse_value()?)))
1517 }
1518 Keyword::NULL => {
1519 self.prev_token();
1520 Ok(Some(Expr::Value(self.parse_value()?)))
1521 }
1522 Keyword::CURRENT_CATALOG
1523 | Keyword::CURRENT_USER
1524 | Keyword::SESSION_USER
1525 | Keyword::USER
1526 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1527 {
1528 Ok(Some(Expr::Function(Function {
1529 name: ObjectName::from(vec![w.to_ident(w_span)]),
1530 uses_odbc_syntax: false,
1531 parameters: FunctionArguments::None,
1532 args: FunctionArguments::None,
1533 null_treatment: None,
1534 filter: None,
1535 over: None,
1536 within_group: vec![],
1537 })))
1538 }
1539 Keyword::CURRENT_TIMESTAMP
1540 | Keyword::CURRENT_TIME
1541 | Keyword::CURRENT_DATE
1542 | Keyword::LOCALTIME
1543 | Keyword::LOCALTIMESTAMP => {
1544 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1545 }
1546 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1547 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1548 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1549 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1550 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1551 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1552 Keyword::EXISTS
1553 if !dialect_of!(self is DatabricksDialect)
1555 || matches!(
1556 self.peek_nth_token_ref(1).token,
1557 Token::Word(Word {
1558 keyword: Keyword::SELECT | Keyword::WITH,
1559 ..
1560 })
1561 ) =>
1562 {
1563 Ok(Some(self.parse_exists_expr(false)?))
1564 }
1565 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1566 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1567 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1568 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1569 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1570 }
1571 Keyword::SUBSTR | Keyword::SUBSTRING => {
1572 self.prev_token();
1573 Ok(Some(self.parse_substring()?))
1574 }
1575 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1576 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1577 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1578 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1580 self.expect_token(&Token::LBracket)?;
1581 Ok(Some(self.parse_array_expr(true)?))
1582 }
1583 Keyword::ARRAY
1584 if self.peek_token_ref().token == Token::LParen
1585 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1586 {
1587 self.expect_token(&Token::LParen)?;
1588 let query = self.parse_query()?;
1589 self.expect_token(&Token::RParen)?;
1590 Ok(Some(Expr::Function(Function {
1591 name: ObjectName::from(vec![w.to_ident(w_span)]),
1592 uses_odbc_syntax: false,
1593 parameters: FunctionArguments::None,
1594 args: FunctionArguments::Subquery(query),
1595 filter: None,
1596 null_treatment: None,
1597 over: None,
1598 within_group: vec![],
1599 })))
1600 }
1601 Keyword::NOT => Ok(Some(self.parse_not()?)),
1602 Keyword::MATCH if self.dialect.supports_match_against() => {
1603 Ok(Some(self.parse_match_against()?))
1604 }
1605 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1606 let struct_expr = self.parse_struct_literal()?;
1607 Ok(Some(struct_expr))
1608 }
1609 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1610 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1611 Ok(Some(Expr::Prior(Box::new(expr))))
1612 }
1613 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1614 Ok(Some(self.parse_duckdb_map_literal()?))
1615 }
1616 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1617 Ok(Some(self.parse_lambda_expr()?))
1618 }
1619 _ if self.dialect.supports_geometric_types() => match w.keyword {
1620 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1621 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1622 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1623 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1624 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1625 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1626 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1627 _ => Ok(None),
1628 },
1629 _ => Ok(None),
1630 }
1631 }
1632
1633 fn parse_expr_prefix_by_unreserved_word(
1635 &mut self,
1636 w: &Word,
1637 w_span: Span,
1638 ) -> Result<Expr, ParserError> {
1639 let is_outer_join = self.peek_outer_join_operator();
1640 match &self.peek_token_ref().token {
1641 Token::LParen if !is_outer_join => {
1642 let id_parts = vec![w.to_ident(w_span)];
1643 self.parse_function(ObjectName::from(id_parts))
1644 }
1645 Token::SingleQuotedString(_)
1647 | Token::DoubleQuotedString(_)
1648 | Token::HexStringLiteral(_)
1649 if w.value.starts_with('_') =>
1650 {
1651 Ok(Expr::Prefixed {
1652 prefix: w.to_ident(w_span),
1653 value: self.parse_introduced_string_expr()?.into(),
1654 })
1655 }
1656 Token::SingleQuotedString(_)
1658 | Token::DoubleQuotedString(_)
1659 | Token::HexStringLiteral(_)
1660 if w.value.starts_with('_') =>
1661 {
1662 Ok(Expr::Prefixed {
1663 prefix: w.to_ident(w_span),
1664 value: self.parse_introduced_string_expr()?.into(),
1665 })
1666 }
1667 Token::Arrow if self.dialect.supports_lambda_functions() => {
1671 self.expect_token(&Token::Arrow)?;
1672 Ok(Expr::Lambda(LambdaFunction {
1673 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1674 name: w.to_ident(w_span),
1675 data_type: None,
1676 }),
1677 body: Box::new(self.parse_expr()?),
1678 syntax: LambdaSyntax::Arrow,
1679 }))
1680 }
1681 Token::Word(_)
1685 if self.dialect.supports_lambda_functions()
1686 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1687 {
1688 let data_type = self.parse_data_type()?;
1689 self.expect_token(&Token::Arrow)?;
1690 Ok(Expr::Lambda(LambdaFunction {
1691 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1692 name: w.to_ident(w_span),
1693 data_type: Some(data_type),
1694 }),
1695 body: Box::new(self.parse_expr()?),
1696 syntax: LambdaSyntax::Arrow,
1697 }))
1698 }
1699 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1700 }
1701 }
1702
1703 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1706 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1707 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1708 } else {
1709 false
1710 }
1711 }
1712
1713 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1715 if let Some(prefix) = self.dialect.parse_prefix(self) {
1717 return prefix;
1718 }
1719
1720 let loc = self.peek_token_ref().span.start;
1737 let opt_expr = self.maybe_parse(|parser| {
1738 match parser.parse_data_type()? {
1739 DataType::Interval { .. } => parser.parse_interval(),
1740 DataType::Custom(ref name, ref modifiers)
1751 if modifiers.is_empty()
1752 && Self::is_simple_unquoted_object_name(name, "xml")
1753 && parser.dialect.supports_xml_expressions() =>
1754 {
1755 Ok(Expr::TypedString(TypedString {
1756 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1757 value: parser.parse_value()?,
1758 uses_odbc_syntax: false,
1759 }))
1760 }
1761 DataType::Custom(..) => parser_err!("dummy", loc),
1762 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1764 Ok(Expr::Cast {
1765 kind: CastKind::Cast,
1766 expr: Box::new(parser.parse_expr()?),
1767 data_type: DataType::Binary(None),
1768 array: false,
1769 format: None,
1770 })
1771 }
1772 data_type => Ok(Expr::TypedString(TypedString {
1773 data_type,
1774 value: parser.parse_value()?,
1775 uses_odbc_syntax: false,
1776 })),
1777 }
1778 })?;
1779
1780 if let Some(expr) = opt_expr {
1781 return Ok(expr);
1782 }
1783
1784 let dialect = self.dialect;
1788
1789 self.advance_token();
1790 let next_token_index = self.get_current_index();
1791 let next_token = self.get_current_token();
1792 let span = next_token.span;
1793 let expr = match &next_token.token {
1794 Token::Word(w) => {
1795 let w = w.clone();
1804 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1805 Ok(Some(expr)) => Ok(expr),
1807
1808 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1810
1811 Err(e) => {
1818 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1819 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1820 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1821 }) {
1822 return Ok(expr);
1823 }
1824 }
1825 return Err(e);
1826 }
1827 }
1828 } Token::LBracket => self.parse_array_expr(false),
1831 tok @ Token::Minus | tok @ Token::Plus => {
1832 let op = if *tok == Token::Plus {
1833 UnaryOperator::Plus
1834 } else {
1835 UnaryOperator::Minus
1836 };
1837 Ok(Expr::UnaryOp {
1838 op,
1839 expr: Box::new(
1840 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1841 ),
1842 })
1843 }
1844 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1845 op: UnaryOperator::BangNot,
1846 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1847 }),
1848 tok @ Token::DoubleExclamationMark
1849 | tok @ Token::PGSquareRoot
1850 | tok @ Token::PGCubeRoot
1851 | tok @ Token::AtSign
1852 if dialect_is!(dialect is PostgreSqlDialect) =>
1853 {
1854 let op = match tok {
1855 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1856 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1857 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1858 Token::AtSign => UnaryOperator::PGAbs,
1859 _ => {
1860 return Err(ParserError::ParserError(
1861 "Internal parser error: unexpected unary operator token".to_string(),
1862 ))
1863 }
1864 };
1865 Ok(Expr::UnaryOp {
1866 op,
1867 expr: Box::new(
1868 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1869 ),
1870 })
1871 }
1872 Token::Tilde => Ok(Expr::UnaryOp {
1873 op: UnaryOperator::BitwiseNot,
1874 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1875 }),
1876 tok @ Token::Sharp
1877 | tok @ Token::AtDashAt
1878 | tok @ Token::AtAt
1879 | tok @ Token::QuestionMarkDash
1880 | tok @ Token::QuestionPipe
1881 if self.dialect.supports_geometric_types() =>
1882 {
1883 let op = match tok {
1884 Token::Sharp => UnaryOperator::Hash,
1885 Token::AtDashAt => UnaryOperator::AtDashAt,
1886 Token::AtAt => UnaryOperator::DoubleAt,
1887 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1888 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1889 _ => {
1890 return Err(ParserError::ParserError(format!(
1891 "Unexpected token in unary operator parsing: {tok:?}"
1892 )))
1893 }
1894 };
1895 Ok(Expr::UnaryOp {
1896 op,
1897 expr: Box::new(
1898 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1899 ),
1900 })
1901 }
1902 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1903 {
1904 self.prev_token();
1905 Ok(Expr::Value(self.parse_value()?))
1906 }
1907 Token::UnicodeStringLiteral(_) => {
1908 self.prev_token();
1909 Ok(Expr::Value(self.parse_value()?))
1910 }
1911 Token::Number(_, _)
1912 | Token::SingleQuotedString(_)
1913 | Token::DoubleQuotedString(_)
1914 | Token::TripleSingleQuotedString(_)
1915 | Token::TripleDoubleQuotedString(_)
1916 | Token::DollarQuotedString(_)
1917 | Token::SingleQuotedByteStringLiteral(_)
1918 | Token::DoubleQuotedByteStringLiteral(_)
1919 | Token::TripleSingleQuotedByteStringLiteral(_)
1920 | Token::TripleDoubleQuotedByteStringLiteral(_)
1921 | Token::SingleQuotedRawStringLiteral(_)
1922 | Token::DoubleQuotedRawStringLiteral(_)
1923 | Token::TripleSingleQuotedRawStringLiteral(_)
1924 | Token::TripleDoubleQuotedRawStringLiteral(_)
1925 | Token::NationalStringLiteral(_)
1926 | Token::QuoteDelimitedStringLiteral(_)
1927 | Token::NationalQuoteDelimitedStringLiteral(_)
1928 | Token::HexStringLiteral(_) => {
1929 self.prev_token();
1930 Ok(Expr::Value(self.parse_value()?))
1931 }
1932 Token::LParen => {
1933 let expr =
1934 if let Some(expr) = self.try_parse_expr_sub_query()? {
1935 expr
1936 } else if let Some(lambda) = self.try_parse_lambda()? {
1937 return Ok(lambda);
1938 } else {
1939 let exprs = self.with_state(ParserState::Normal, |p| {
1950 p.parse_comma_separated(Parser::parse_expr)
1951 })?;
1952 match exprs.len() {
1953 0 => return Err(ParserError::ParserError(
1954 "Internal parser error: parse_comma_separated returned empty list"
1955 .to_string(),
1956 )),
1957 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1958 _ => Expr::Tuple(exprs),
1959 }
1960 };
1961 self.expect_token(&Token::RParen)?;
1962 Ok(expr)
1963 }
1964 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1965 self.prev_token();
1966 Ok(Expr::Value(self.parse_value()?))
1967 }
1968 Token::LBrace => {
1969 self.prev_token();
1970 self.parse_lbrace_expr()
1971 }
1972 _ => self.expected_at("an expression", next_token_index),
1973 }?;
1974
1975 Ok(expr)
1976 }
1977
1978 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1979 Ok(Expr::TypedString(TypedString {
1980 data_type: DataType::GeometricType(kind),
1981 value: self.parse_value()?,
1982 uses_odbc_syntax: false,
1983 }))
1984 }
1985
1986 pub fn parse_compound_expr(
1993 &mut self,
1994 root: Expr,
1995 mut chain: Vec<AccessExpr>,
1996 ) -> Result<Expr, ParserError> {
1997 let mut ending_wildcard: Option<TokenWithSpan> = None;
1998 loop {
1999 if self.consume_token(&Token::Period) {
2000 let next_token = self.peek_token_ref();
2001 match &next_token.token {
2002 Token::Mul => {
2003 if dialect_of!(self is PostgreSqlDialect) {
2006 ending_wildcard = Some(self.next_token());
2007 } else {
2008 self.prev_token(); }
2015
2016 break;
2017 }
2018 Token::SingleQuotedString(s) => {
2019 let expr =
2020 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2021 chain.push(AccessExpr::Dot(expr));
2022 self.advance_token(); }
2024 Token::Placeholder(s) => {
2025 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2028 chain.push(AccessExpr::Dot(expr));
2029 self.advance_token(); }
2031 _ => {
2036 let expr = self.maybe_parse(|parser| {
2037 let expr = parser
2038 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2039 match &expr {
2040 Expr::CompoundFieldAccess { .. }
2041 | Expr::CompoundIdentifier(_)
2042 | Expr::Identifier(_)
2043 | Expr::Value(_)
2044 | Expr::Function(_) => Ok(expr),
2045 _ => parser.expected_ref(
2046 "an identifier or value",
2047 parser.peek_token_ref(),
2048 ),
2049 }
2050 })?;
2051
2052 match expr {
2053 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2062 chain.push(AccessExpr::Dot(*root));
2063 chain.extend(access_chain);
2064 }
2065 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2066 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2067 ),
2068 Some(expr) => {
2069 chain.push(AccessExpr::Dot(expr));
2070 }
2071 None => {
2075 chain.push(AccessExpr::Dot(Expr::Identifier(
2076 self.parse_identifier()?,
2077 )));
2078 }
2079 }
2080 }
2081 }
2082 } else if !self.dialect.supports_partiql()
2083 && self.peek_token_ref().token == Token::LBracket
2084 {
2085 self.parse_multi_dim_subscript(&mut chain)?;
2086 } else {
2087 break;
2088 }
2089 }
2090
2091 let tok_index = self.get_current_index();
2092 if let Some(wildcard_token) = ending_wildcard {
2093 if !Self::is_all_ident(&root, &chain) {
2094 return self
2095 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2096 };
2097 Ok(Expr::QualifiedWildcard(
2098 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2099 AttachedToken(wildcard_token),
2100 ))
2101 } else if self.maybe_parse_outer_join_operator() {
2102 if !Self::is_all_ident(&root, &chain) {
2103 return self.expected_at("column identifier before (+)", tok_index);
2104 };
2105 let expr = if chain.is_empty() {
2106 root
2107 } else {
2108 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2109 };
2110 Ok(Expr::OuterJoin(expr.into()))
2111 } else {
2112 Self::build_compound_expr(root, chain)
2113 }
2114 }
2115
2116 fn build_compound_expr(
2121 root: Expr,
2122 mut access_chain: Vec<AccessExpr>,
2123 ) -> Result<Expr, ParserError> {
2124 if access_chain.is_empty() {
2125 return Ok(root);
2126 }
2127
2128 if Self::is_all_ident(&root, &access_chain) {
2129 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2130 root,
2131 access_chain,
2132 )?));
2133 }
2134
2135 if matches!(root, Expr::Identifier(_))
2140 && matches!(
2141 access_chain.last(),
2142 Some(AccessExpr::Dot(Expr::Function(_)))
2143 )
2144 && access_chain
2145 .iter()
2146 .rev()
2147 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2149 {
2150 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2151 return parser_err!("expected function expression", root.span().start);
2152 };
2153
2154 let compound_func_name = [root]
2155 .into_iter()
2156 .chain(access_chain.into_iter().flat_map(|access| match access {
2157 AccessExpr::Dot(expr) => Some(expr),
2158 _ => None,
2159 }))
2160 .flat_map(|expr| match expr {
2161 Expr::Identifier(ident) => Some(ident),
2162 _ => None,
2163 })
2164 .map(ObjectNamePart::Identifier)
2165 .chain(func.name.0)
2166 .collect::<Vec<_>>();
2167 func.name = ObjectName(compound_func_name);
2168
2169 return Ok(Expr::Function(func));
2170 }
2171
2172 if access_chain.len() == 1
2177 && matches!(
2178 access_chain.last(),
2179 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2180 )
2181 {
2182 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2183 return parser_err!("expected (+) expression", root.span().start);
2184 };
2185
2186 if !Self::is_all_ident(&root, &[]) {
2187 return parser_err!("column identifier before (+)", root.span().start);
2188 };
2189
2190 let token_start = root.span().start;
2191 let mut idents = Self::exprs_to_idents(root, vec![])?;
2192 match *inner_expr {
2193 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2194 Expr::Identifier(suffix) => idents.push(suffix),
2195 _ => {
2196 return parser_err!("column identifier before (+)", token_start);
2197 }
2198 }
2199
2200 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2201 }
2202
2203 Ok(Expr::CompoundFieldAccess {
2204 root: Box::new(root),
2205 access_chain,
2206 })
2207 }
2208
2209 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2210 match k {
2211 Keyword::LOCAL => Some(ContextModifier::Local),
2212 Keyword::GLOBAL => Some(ContextModifier::Global),
2213 Keyword::SESSION => Some(ContextModifier::Session),
2214 _ => None,
2215 }
2216 }
2217
2218 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2220 if !matches!(root, Expr::Identifier(_)) {
2221 return false;
2222 }
2223 fields
2224 .iter()
2225 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2226 }
2227
2228 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2230 let mut idents = vec![];
2231 if let Expr::Identifier(root) = root {
2232 idents.push(root);
2233 for x in fields {
2234 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2235 idents.push(ident);
2236 } else {
2237 return parser_err!(
2238 format!("Expected identifier, found: {}", x),
2239 x.span().start
2240 );
2241 }
2242 }
2243 Ok(idents)
2244 } else {
2245 parser_err!(
2246 format!("Expected identifier, found: {}", root),
2247 root.span().start
2248 )
2249 }
2250 }
2251
2252 fn peek_outer_join_operator(&mut self) -> bool {
2254 if !self.dialect.supports_outer_join_operator() {
2255 return false;
2256 }
2257
2258 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2259 Token::LParen == maybe_lparen.token
2260 && Token::Plus == maybe_plus.token
2261 && Token::RParen == maybe_rparen.token
2262 }
2263
2264 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2267 self.dialect.supports_outer_join_operator()
2268 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2269 }
2270
2271 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2273 self.expect_token(&Token::LParen)?;
2274 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2275 self.expect_token(&Token::RParen)?;
2276
2277 Ok(options)
2278 }
2279
2280 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2281 let name = self.parse_identifier()?;
2282
2283 let next_token = self.peek_token_ref();
2284 if next_token == &Token::Comma || next_token == &Token::RParen {
2285 return Ok(UtilityOption { name, arg: None });
2286 }
2287 let arg = self.parse_expr()?;
2288
2289 Ok(UtilityOption {
2290 name,
2291 arg: Some(arg),
2292 })
2293 }
2294
2295 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2296 if !self.peek_sub_query() {
2297 return Ok(None);
2298 }
2299
2300 Ok(Some(Expr::Subquery(self.parse_query()?)))
2301 }
2302
2303 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2304 if !self.dialect.supports_lambda_functions() {
2305 return Ok(None);
2306 }
2307 self.maybe_parse(|p| {
2308 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2309 p.expect_token(&Token::RParen)?;
2310 p.expect_token(&Token::Arrow)?;
2311 let expr = p.parse_expr()?;
2312 Ok(Expr::Lambda(LambdaFunction {
2313 params: OneOrManyWithParens::Many(params),
2314 body: Box::new(expr),
2315 syntax: LambdaSyntax::Arrow,
2316 }))
2317 })
2318 }
2319
2320 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2330 let params = self.parse_lambda_function_parameters()?;
2332 self.expect_token(&Token::Colon)?;
2334 let body = self.parse_expr()?;
2336 Ok(Expr::Lambda(LambdaFunction {
2337 params,
2338 body: Box::new(body),
2339 syntax: LambdaSyntax::LambdaKeyword,
2340 }))
2341 }
2342
2343 fn parse_lambda_function_parameters(
2345 &mut self,
2346 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2347 let params = if self.consume_token(&Token::LParen) {
2349 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2351 self.expect_token(&Token::RParen)?;
2352 OneOrManyWithParens::Many(params)
2353 } else {
2354 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2356 if params.len() == 1 {
2357 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2358 } else {
2359 OneOrManyWithParens::Many(params)
2360 }
2361 };
2362 Ok(params)
2363 }
2364
2365 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2367 let name = self.parse_identifier()?;
2368 let data_type = match &self.peek_token_ref().token {
2369 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2370 _ => None,
2371 };
2372 Ok(LambdaFunctionParameter { name, data_type })
2373 }
2374
2375 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2382 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2384 return Ok(Some(expr));
2385 }
2386 self.maybe_parse_odbc_body_datetime()
2388 }
2389
2390 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2401 self.maybe_parse(|p| {
2402 let token = p.next_token().clone();
2403 let word_string = token.token.to_string();
2404 let data_type = match word_string.as_str() {
2405 "t" => DataType::Time(None, TimezoneInfo::None),
2406 "d" => DataType::Date,
2407 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2408 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2409 };
2410 let value = p.parse_value()?;
2411 Ok(Expr::TypedString(TypedString {
2412 data_type,
2413 value,
2414 uses_odbc_syntax: true,
2415 }))
2416 })
2417 }
2418
2419 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2428 self.maybe_parse(|p| {
2429 p.expect_keyword(Keyword::FN)?;
2430 let fn_name = p.parse_object_name(false)?;
2431 let mut fn_call = p.parse_function_call(fn_name)?;
2432 fn_call.uses_odbc_syntax = true;
2433 Ok(Expr::Function(fn_call))
2434 })
2435 }
2436
2437 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2439 self.parse_function_call(name).map(Expr::Function)
2440 }
2441
2442 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2443 self.expect_token(&Token::LParen)?;
2444
2445 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2448 let subquery = self.parse_query()?;
2449 self.expect_token(&Token::RParen)?;
2450 return Ok(Function {
2451 name,
2452 uses_odbc_syntax: false,
2453 parameters: FunctionArguments::None,
2454 args: FunctionArguments::Subquery(subquery),
2455 filter: None,
2456 null_treatment: None,
2457 over: None,
2458 within_group: vec![],
2459 });
2460 }
2461
2462 let mut args = self.parse_function_argument_list()?;
2463 let mut parameters = FunctionArguments::None;
2464 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2467 && self.consume_token(&Token::LParen)
2468 {
2469 parameters = FunctionArguments::List(args);
2470 args = self.parse_function_argument_list()?;
2471 }
2472
2473 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2474 self.expect_token(&Token::LParen)?;
2475 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2476 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2477 self.expect_token(&Token::RParen)?;
2478 order_by
2479 } else {
2480 vec![]
2481 };
2482
2483 let filter = if self.dialect.supports_filter_during_aggregation()
2484 && self.parse_keyword(Keyword::FILTER)
2485 && self.consume_token(&Token::LParen)
2486 && self.parse_keyword(Keyword::WHERE)
2487 {
2488 let filter = Some(Box::new(self.parse_expr()?));
2489 self.expect_token(&Token::RParen)?;
2490 filter
2491 } else {
2492 None
2493 };
2494
2495 let null_treatment = if args
2498 .clauses
2499 .iter()
2500 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2501 {
2502 self.parse_null_treatment()?
2503 } else {
2504 None
2505 };
2506
2507 let over = if self.parse_keyword(Keyword::OVER) {
2508 if self.consume_token(&Token::LParen) {
2509 let window_spec = self.parse_window_spec()?;
2510 Some(WindowType::WindowSpec(window_spec))
2511 } else {
2512 Some(WindowType::NamedWindow(self.parse_identifier()?))
2513 }
2514 } else {
2515 None
2516 };
2517
2518 Ok(Function {
2519 name,
2520 uses_odbc_syntax: false,
2521 parameters,
2522 args: FunctionArguments::List(args),
2523 null_treatment,
2524 filter,
2525 over,
2526 within_group,
2527 })
2528 }
2529
2530 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2532 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2533 Some(keyword) => {
2534 self.expect_keyword_is(Keyword::NULLS)?;
2535
2536 Ok(match keyword {
2537 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2538 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2539 _ => None,
2540 })
2541 }
2542 None => Ok(None),
2543 }
2544 }
2545
2546 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2548 let args = if self.consume_token(&Token::LParen) {
2549 FunctionArguments::List(self.parse_function_argument_list()?)
2550 } else {
2551 FunctionArguments::None
2552 };
2553 Ok(Expr::Function(Function {
2554 name,
2555 uses_odbc_syntax: false,
2556 parameters: FunctionArguments::None,
2557 args,
2558 filter: None,
2559 over: None,
2560 null_treatment: None,
2561 within_group: vec![],
2562 }))
2563 }
2564
2565 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2567 let next_token = self.next_token();
2568 match &next_token.token {
2569 Token::Word(w) => match w.keyword {
2570 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2571 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2572 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2573 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2574 },
2575 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2576 }
2577 }
2578
2579 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2581 let units = self.parse_window_frame_units()?;
2582 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2583 let start_bound = self.parse_window_frame_bound()?;
2584 self.expect_keyword_is(Keyword::AND)?;
2585 let end_bound = Some(self.parse_window_frame_bound()?);
2586 (start_bound, end_bound)
2587 } else {
2588 (self.parse_window_frame_bound()?, None)
2589 };
2590 Ok(WindowFrame {
2591 units,
2592 start_bound,
2593 end_bound,
2594 })
2595 }
2596
2597 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2599 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2600 Ok(WindowFrameBound::CurrentRow)
2601 } else {
2602 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2603 None
2604 } else {
2605 Some(Box::new(match &self.peek_token_ref().token {
2606 Token::SingleQuotedString(_) => self.parse_interval()?,
2607 _ => self.parse_expr()?,
2608 }))
2609 };
2610 if self.parse_keyword(Keyword::PRECEDING) {
2611 Ok(WindowFrameBound::Preceding(rows))
2612 } else if self.parse_keyword(Keyword::FOLLOWING) {
2613 Ok(WindowFrameBound::Following(rows))
2614 } else {
2615 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2616 }
2617 }
2618 }
2619
2620 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2622 if self.dialect.supports_group_by_expr() {
2623 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2624 self.expect_token(&Token::LParen)?;
2625 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2626 self.expect_token(&Token::RParen)?;
2627 Ok(Expr::GroupingSets(result))
2628 } else if self.parse_keyword(Keyword::CUBE) {
2629 self.expect_token(&Token::LParen)?;
2630 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2631 self.expect_token(&Token::RParen)?;
2632 Ok(Expr::Cube(result))
2633 } else if self.parse_keyword(Keyword::ROLLUP) {
2634 self.expect_token(&Token::LParen)?;
2635 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2636 self.expect_token(&Token::RParen)?;
2637 Ok(Expr::Rollup(result))
2638 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2639 Ok(Expr::Tuple(vec![]))
2643 } else {
2644 self.parse_expr()
2645 }
2646 } else {
2647 self.parse_expr()
2649 }
2650 }
2651
2652 fn parse_tuple(
2656 &mut self,
2657 lift_singleton: bool,
2658 allow_empty: bool,
2659 ) -> Result<Vec<Expr>, ParserError> {
2660 if lift_singleton {
2661 if self.consume_token(&Token::LParen) {
2662 let result = if allow_empty && self.consume_token(&Token::RParen) {
2663 vec![]
2664 } else {
2665 let result = self.parse_comma_separated(Parser::parse_expr)?;
2666 self.expect_token(&Token::RParen)?;
2667 result
2668 };
2669 Ok(result)
2670 } else {
2671 Ok(vec![self.parse_expr()?])
2672 }
2673 } else {
2674 self.expect_token(&Token::LParen)?;
2675 let result = if allow_empty && self.consume_token(&Token::RParen) {
2676 vec![]
2677 } else {
2678 let result = self.parse_comma_separated(Parser::parse_expr)?;
2679 self.expect_token(&Token::RParen)?;
2680 result
2681 };
2682 Ok(result)
2683 }
2684 }
2685
2686 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2688 let case_token = AttachedToken(self.get_current_token().clone());
2689 let mut operand = None;
2690 if !self.parse_keyword(Keyword::WHEN) {
2691 operand = Some(Box::new(self.parse_expr()?));
2692 self.expect_keyword_is(Keyword::WHEN)?;
2693 }
2694 let mut conditions = vec![];
2695 loop {
2696 let condition = self.parse_expr()?;
2697 self.expect_keyword_is(Keyword::THEN)?;
2698 let result = self.parse_expr()?;
2699 conditions.push(CaseWhen { condition, result });
2700 if !self.parse_keyword(Keyword::WHEN) {
2701 break;
2702 }
2703 }
2704 let else_result = if self.parse_keyword(Keyword::ELSE) {
2705 Some(Box::new(self.parse_expr()?))
2706 } else {
2707 None
2708 };
2709 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2710 Ok(Expr::Case {
2711 case_token,
2712 end_token,
2713 operand,
2714 conditions,
2715 else_result,
2716 })
2717 }
2718
2719 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2721 if self.parse_keyword(Keyword::FORMAT) {
2722 let value = self.parse_value()?;
2723 match self.parse_optional_time_zone()? {
2724 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2725 None => Ok(Some(CastFormat::Value(value))),
2726 }
2727 } else {
2728 Ok(None)
2729 }
2730 }
2731
2732 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2734 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2735 self.parse_value().map(Some)
2736 } else {
2737 Ok(None)
2738 }
2739 }
2740
2741 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2743 self.expect_token(&Token::LParen)?;
2744 let data_type = self.parse_data_type()?;
2745 self.expect_token(&Token::Comma)?;
2746 let expr = self.parse_expr()?;
2747 let styles = if self.consume_token(&Token::Comma) {
2748 self.parse_comma_separated(Parser::parse_expr)?
2749 } else {
2750 Default::default()
2751 };
2752 self.expect_token(&Token::RParen)?;
2753 Ok(Expr::Convert {
2754 is_try,
2755 expr: Box::new(expr),
2756 data_type: Some(data_type),
2757 charset: None,
2758 target_before_value: true,
2759 styles,
2760 })
2761 }
2762
2763 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2768 if self.dialect.convert_type_before_value() {
2769 return self.parse_mssql_convert(is_try);
2770 }
2771 self.expect_token(&Token::LParen)?;
2772 let expr = self.parse_expr()?;
2773 if self.parse_keyword(Keyword::USING) {
2774 let charset = self.parse_object_name(false)?;
2775 self.expect_token(&Token::RParen)?;
2776 return Ok(Expr::Convert {
2777 is_try,
2778 expr: Box::new(expr),
2779 data_type: None,
2780 charset: Some(charset),
2781 target_before_value: false,
2782 styles: vec![],
2783 });
2784 }
2785 self.expect_token(&Token::Comma)?;
2786 let data_type = self.parse_data_type()?;
2787 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2788 Some(self.parse_object_name(false)?)
2789 } else {
2790 None
2791 };
2792 self.expect_token(&Token::RParen)?;
2793 Ok(Expr::Convert {
2794 is_try,
2795 expr: Box::new(expr),
2796 data_type: Some(data_type),
2797 charset,
2798 target_before_value: false,
2799 styles: vec![],
2800 })
2801 }
2802
2803 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2805 self.expect_token(&Token::LParen)?;
2806 let expr = self.parse_expr()?;
2807 self.expect_keyword_is(Keyword::AS)?;
2808 let data_type = self.parse_data_type()?;
2809 let array = self.parse_keyword(Keyword::ARRAY);
2810 let format = self.parse_optional_cast_format()?;
2811 self.expect_token(&Token::RParen)?;
2812 Ok(Expr::Cast {
2813 kind,
2814 expr: Box::new(expr),
2815 data_type,
2816 array,
2817 format,
2818 })
2819 }
2820
2821 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2823 self.expect_token(&Token::LParen)?;
2824 let exists_node = Expr::Exists {
2825 negated,
2826 subquery: self.parse_query()?,
2827 };
2828 self.expect_token(&Token::RParen)?;
2829 Ok(exists_node)
2830 }
2831
2832 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2834 self.expect_token(&Token::LParen)?;
2835 let field = self.parse_date_time_field()?;
2836
2837 let syntax = if self.parse_keyword(Keyword::FROM) {
2838 ExtractSyntax::From
2839 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2840 {
2841 ExtractSyntax::Comma
2842 } else {
2843 return Err(ParserError::ParserError(
2844 "Expected 'FROM' or ','".to_string(),
2845 ));
2846 };
2847
2848 let expr = self.parse_expr()?;
2849 self.expect_token(&Token::RParen)?;
2850 Ok(Expr::Extract {
2851 field,
2852 expr: Box::new(expr),
2853 syntax,
2854 })
2855 }
2856
2857 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2859 self.expect_token(&Token::LParen)?;
2860 let expr = self.parse_expr()?;
2861 let field = if self.parse_keyword(Keyword::TO) {
2863 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2865 } else if self.consume_token(&Token::Comma) {
2866 let v = self.parse_value()?;
2868 if matches!(v.value, Value::Number(_, _)) {
2869 CeilFloorKind::Scale(v)
2870 } else {
2871 return Err(ParserError::ParserError(
2872 "Scale field can only be of number type".to_string(),
2873 ));
2874 }
2875 } else {
2876 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2877 };
2878 self.expect_token(&Token::RParen)?;
2879 if is_ceil {
2880 Ok(Expr::Ceil {
2881 expr: Box::new(expr),
2882 field,
2883 })
2884 } else {
2885 Ok(Expr::Floor {
2886 expr: Box::new(expr),
2887 field,
2888 })
2889 }
2890 }
2891
2892 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2894 let between_prec = self.dialect.prec_value(Precedence::Between);
2895 let position_expr = self.maybe_parse(|p| {
2896 p.expect_token(&Token::LParen)?;
2898
2899 let expr = p.parse_subexpr(between_prec)?;
2901 p.expect_keyword_is(Keyword::IN)?;
2902 let from = p.parse_expr()?;
2903 p.expect_token(&Token::RParen)?;
2904 Ok(Expr::Position {
2905 expr: Box::new(expr),
2906 r#in: Box::new(from),
2907 })
2908 })?;
2909 match position_expr {
2910 Some(expr) => Ok(expr),
2911 None => self.parse_function(ObjectName::from(vec![ident])),
2914 }
2915 }
2916
2917 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2919 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2920 Keyword::SUBSTR => true,
2921 Keyword::SUBSTRING => false,
2922 _ => {
2923 self.prev_token();
2924 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2925 }
2926 };
2927 self.expect_token(&Token::LParen)?;
2928 let expr = self.parse_expr()?;
2929 let mut from_expr = None;
2930 let special = self.consume_token(&Token::Comma);
2931 if special || self.parse_keyword(Keyword::FROM) {
2932 from_expr = Some(self.parse_expr()?);
2933 }
2934
2935 let mut to_expr = None;
2936 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2937 to_expr = Some(self.parse_expr()?);
2938 }
2939 self.expect_token(&Token::RParen)?;
2940
2941 Ok(Expr::Substring {
2942 expr: Box::new(expr),
2943 substring_from: from_expr.map(Box::new),
2944 substring_for: to_expr.map(Box::new),
2945 special,
2946 shorthand,
2947 })
2948 }
2949
2950 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2954 self.expect_token(&Token::LParen)?;
2956 let expr = self.parse_expr()?;
2957 self.expect_keyword_is(Keyword::PLACING)?;
2958 let what_expr = self.parse_expr()?;
2959 self.expect_keyword_is(Keyword::FROM)?;
2960 let from_expr = self.parse_expr()?;
2961 let mut for_expr = None;
2962 if self.parse_keyword(Keyword::FOR) {
2963 for_expr = Some(self.parse_expr()?);
2964 }
2965 self.expect_token(&Token::RParen)?;
2966
2967 Ok(Expr::Overlay {
2968 expr: Box::new(expr),
2969 overlay_what: Box::new(what_expr),
2970 overlay_from: Box::new(from_expr),
2971 overlay_for: for_expr.map(Box::new),
2972 })
2973 }
2974
2975 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2981 self.expect_token(&Token::LParen)?;
2982 let mut trim_where = None;
2983 if let Token::Word(word) = &self.peek_token_ref().token {
2984 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2985 trim_where = Some(self.parse_trim_where()?);
2986 }
2987 }
2988 let expr = self.parse_expr()?;
2989 if self.parse_keyword(Keyword::FROM) {
2990 let trim_what = Box::new(expr);
2991 let expr = self.parse_expr()?;
2992 self.expect_token(&Token::RParen)?;
2993 Ok(Expr::Trim {
2994 expr: Box::new(expr),
2995 trim_where,
2996 trim_what: Some(trim_what),
2997 trim_characters: None,
2998 })
2999 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3000 {
3001 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3002 self.expect_token(&Token::RParen)?;
3003 Ok(Expr::Trim {
3004 expr: Box::new(expr),
3005 trim_where: None,
3006 trim_what: None,
3007 trim_characters: Some(characters),
3008 })
3009 } else {
3010 self.expect_token(&Token::RParen)?;
3011 Ok(Expr::Trim {
3012 expr: Box::new(expr),
3013 trim_where,
3014 trim_what: None,
3015 trim_characters: None,
3016 })
3017 }
3018 }
3019
3020 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3024 let next_token = self.next_token();
3025 match &next_token.token {
3026 Token::Word(w) => match w.keyword {
3027 Keyword::BOTH => Ok(TrimWhereField::Both),
3028 Keyword::LEADING => Ok(TrimWhereField::Leading),
3029 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3030 _ => self.expected("trim_where field", next_token)?,
3031 },
3032 _ => self.expected("trim_where field", next_token),
3033 }
3034 }
3035
3036 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3039 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3040 self.expect_token(&Token::RBracket)?;
3041 Ok(Expr::Array(Array { elem: exprs, named }))
3042 }
3043
3044 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3048 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3049 if self.parse_keyword(Keyword::ERROR) {
3050 Ok(Some(ListAggOnOverflow::Error))
3051 } else {
3052 self.expect_keyword_is(Keyword::TRUNCATE)?;
3053 let filler = match &self.peek_token_ref().token {
3054 Token::Word(w)
3055 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3056 {
3057 None
3058 }
3059 Token::SingleQuotedString(_)
3060 | Token::EscapedStringLiteral(_)
3061 | Token::UnicodeStringLiteral(_)
3062 | Token::NationalStringLiteral(_)
3063 | Token::QuoteDelimitedStringLiteral(_)
3064 | Token::NationalQuoteDelimitedStringLiteral(_)
3065 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3066 _ => self.expected_ref(
3067 "either filler, WITH, or WITHOUT in LISTAGG",
3068 self.peek_token_ref(),
3069 )?,
3070 };
3071 let with_count = self.parse_keyword(Keyword::WITH);
3072 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3073 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3074 }
3075 self.expect_keyword_is(Keyword::COUNT)?;
3076 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3077 }
3078 } else {
3079 Ok(None)
3080 }
3081 }
3082
3083 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3090 let next_token = self.next_token();
3091 match &next_token.token {
3092 Token::Word(w) => match w.keyword {
3093 Keyword::YEAR => Ok(DateTimeField::Year),
3094 Keyword::YEARS => Ok(DateTimeField::Years),
3095 Keyword::MONTH => Ok(DateTimeField::Month),
3096 Keyword::MONTHS => Ok(DateTimeField::Months),
3097 Keyword::WEEK => {
3098 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3099 && self.consume_token(&Token::LParen)
3100 {
3101 let week_day = self.parse_identifier()?;
3102 self.expect_token(&Token::RParen)?;
3103 Some(week_day)
3104 } else {
3105 None
3106 };
3107 Ok(DateTimeField::Week(week_day))
3108 }
3109 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3110 Keyword::DAY => Ok(DateTimeField::Day),
3111 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3112 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3113 Keyword::DAYS => Ok(DateTimeField::Days),
3114 Keyword::DATE => Ok(DateTimeField::Date),
3115 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3116 Keyword::HOUR => Ok(DateTimeField::Hour),
3117 Keyword::HOURS => Ok(DateTimeField::Hours),
3118 Keyword::MINUTE => Ok(DateTimeField::Minute),
3119 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3120 Keyword::SECOND => Ok(DateTimeField::Second),
3121 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3122 Keyword::CENTURY => Ok(DateTimeField::Century),
3123 Keyword::DECADE => Ok(DateTimeField::Decade),
3124 Keyword::DOY => Ok(DateTimeField::Doy),
3125 Keyword::DOW => Ok(DateTimeField::Dow),
3126 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3127 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3128 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3129 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3130 Keyword::JULIAN => Ok(DateTimeField::Julian),
3131 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3132 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3133 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3134 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3135 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3136 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3137 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3138 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3139 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3140 Keyword::TIME => Ok(DateTimeField::Time),
3141 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3142 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3143 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3144 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3145 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3146 _ if self.dialect.allow_extract_custom() => {
3147 self.prev_token();
3148 let custom = self.parse_identifier()?;
3149 Ok(DateTimeField::Custom(custom))
3150 }
3151 _ => self.expected("date/time field", next_token),
3152 },
3153 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3154 self.prev_token();
3155 let custom = self.parse_identifier()?;
3156 Ok(DateTimeField::Custom(custom))
3157 }
3158 _ => self.expected("date/time field", next_token),
3159 }
3160 }
3161
3162 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3166 match &self.peek_token_ref().token {
3167 Token::Word(w) => match w.keyword {
3168 Keyword::EXISTS => {
3169 let negated = true;
3170 let _ = self.parse_keyword(Keyword::EXISTS);
3171 self.parse_exists_expr(negated)
3172 }
3173 _ => Ok(Expr::UnaryOp {
3174 op: UnaryOperator::Not,
3175 expr: Box::new(
3176 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3177 ),
3178 }),
3179 },
3180 _ => Ok(Expr::UnaryOp {
3181 op: UnaryOperator::Not,
3182 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3183 }),
3184 }
3185 }
3186
3187 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3197 let token = self.expect_token(&Token::LBrace)?;
3198
3199 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3200 self.expect_token(&Token::RBrace)?;
3201 return Ok(fn_expr);
3202 }
3203
3204 if self.dialect.supports_dictionary_syntax() {
3205 self.prev_token(); return self.parse_dictionary();
3207 }
3208
3209 self.expected("an expression", token)
3210 }
3211
3212 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3218 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3219
3220 self.expect_keyword_is(Keyword::AGAINST)?;
3221
3222 self.expect_token(&Token::LParen)?;
3223
3224 let match_value = self.parse_value()?;
3226
3227 let in_natural_language_mode_keywords = &[
3228 Keyword::IN,
3229 Keyword::NATURAL,
3230 Keyword::LANGUAGE,
3231 Keyword::MODE,
3232 ];
3233
3234 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3235
3236 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3237
3238 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3239 if self.parse_keywords(with_query_expansion_keywords) {
3240 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3241 } else {
3242 Some(SearchModifier::InNaturalLanguageMode)
3243 }
3244 } else if self.parse_keywords(in_boolean_mode_keywords) {
3245 Some(SearchModifier::InBooleanMode)
3246 } else if self.parse_keywords(with_query_expansion_keywords) {
3247 Some(SearchModifier::WithQueryExpansion)
3248 } else {
3249 None
3250 };
3251
3252 self.expect_token(&Token::RParen)?;
3253
3254 Ok(Expr::MatchAgainst {
3255 columns,
3256 match_value,
3257 opt_search_modifier,
3258 })
3259 }
3260
3261 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3277 let value = if self.dialect.require_interval_qualifier() {
3286 self.parse_expr()?
3288 } else {
3289 self.parse_prefix()?
3292 };
3293
3294 let leading_field = if self.next_token_is_temporal_unit() {
3300 Some(self.parse_date_time_field()?)
3301 } else if self.dialect.require_interval_qualifier() {
3302 return parser_err!(
3303 "INTERVAL requires a unit after the literal value",
3304 self.peek_token_ref().span.start
3305 );
3306 } else {
3307 None
3308 };
3309
3310 let (leading_precision, last_field, fsec_precision) =
3311 if leading_field == Some(DateTimeField::Second) {
3312 let last_field = None;
3318 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3319 (leading_precision, last_field, fsec_precision)
3320 } else {
3321 let leading_precision = self.parse_optional_precision()?;
3322 if self.parse_keyword(Keyword::TO) {
3323 let last_field = Some(self.parse_date_time_field()?);
3324 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3325 self.parse_optional_precision()?
3326 } else {
3327 None
3328 };
3329 (leading_precision, last_field, fsec_precision)
3330 } else {
3331 (leading_precision, None, None)
3332 }
3333 };
3334
3335 Ok(Expr::Interval(Interval {
3336 value: Box::new(value),
3337 leading_field,
3338 leading_precision,
3339 last_field,
3340 fractional_seconds_precision: fsec_precision,
3341 }))
3342 }
3343
3344 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3347 if let Token::Word(word) = &self.peek_token_ref().token {
3348 matches!(
3349 word.keyword,
3350 Keyword::YEAR
3351 | Keyword::YEARS
3352 | Keyword::MONTH
3353 | Keyword::MONTHS
3354 | Keyword::WEEK
3355 | Keyword::WEEKS
3356 | Keyword::DAY
3357 | Keyword::DAYS
3358 | Keyword::HOUR
3359 | Keyword::HOURS
3360 | Keyword::MINUTE
3361 | Keyword::MINUTES
3362 | Keyword::SECOND
3363 | Keyword::SECONDS
3364 | Keyword::CENTURY
3365 | Keyword::DECADE
3366 | Keyword::DOW
3367 | Keyword::DOY
3368 | Keyword::EPOCH
3369 | Keyword::ISODOW
3370 | Keyword::ISOYEAR
3371 | Keyword::JULIAN
3372 | Keyword::MICROSECOND
3373 | Keyword::MICROSECONDS
3374 | Keyword::MILLENIUM
3375 | Keyword::MILLENNIUM
3376 | Keyword::MILLISECOND
3377 | Keyword::MILLISECONDS
3378 | Keyword::NANOSECOND
3379 | Keyword::NANOSECONDS
3380 | Keyword::QUARTER
3381 | Keyword::TIMEZONE
3382 | Keyword::TIMEZONE_HOUR
3383 | Keyword::TIMEZONE_MINUTE
3384 )
3385 } else {
3386 false
3387 }
3388 }
3389
3390 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3398 self.prev_token();
3400 let (fields, trailing_bracket) =
3401 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3402 if trailing_bracket.0 {
3403 return parser_err!(
3404 "unmatched > in STRUCT literal",
3405 self.peek_token_ref().span.start
3406 );
3407 }
3408
3409 self.expect_token(&Token::LParen)?;
3411 let values = self
3412 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3413 self.expect_token(&Token::RParen)?;
3414
3415 Ok(Expr::Struct { values, fields })
3416 }
3417
3418 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3432 let expr = self.parse_expr()?;
3433 if self.parse_keyword(Keyword::AS) {
3434 if typed_syntax {
3435 return parser_err!("Typed syntax does not allow AS", {
3436 self.prev_token();
3437 self.peek_token_ref().span.start
3438 });
3439 }
3440 let field_name = self.parse_identifier()?;
3441 Ok(Expr::Named {
3442 expr: expr.into(),
3443 name: field_name,
3444 })
3445 } else {
3446 Ok(expr)
3447 }
3448 }
3449
3450 fn parse_struct_type_def<F>(
3463 &mut self,
3464 mut elem_parser: F,
3465 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3466 where
3467 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3468 {
3469 self.expect_keyword_is(Keyword::STRUCT)?;
3470
3471 if self.peek_token_ref().token != Token::Lt {
3473 return Ok((Default::default(), false.into()));
3474 }
3475 self.next_token();
3476
3477 let mut field_defs = vec![];
3478 let trailing_bracket = loop {
3479 let (def, trailing_bracket) = elem_parser(self)?;
3480 field_defs.push(def);
3481 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3483 break trailing_bracket;
3484 }
3485 };
3486
3487 Ok((
3488 field_defs,
3489 self.expect_closing_angle_bracket(trailing_bracket)?,
3490 ))
3491 }
3492
3493 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3495 self.expect_keyword_is(Keyword::STRUCT)?;
3496 self.expect_token(&Token::LParen)?;
3497 let struct_body = self.parse_comma_separated(|parser| {
3498 let field_name = parser.parse_identifier()?;
3499 let field_type = parser.parse_data_type()?;
3500
3501 Ok(StructField {
3502 field_name: Some(field_name),
3503 field_type,
3504 options: None,
3505 })
3506 });
3507 self.expect_token(&Token::RParen)?;
3508 struct_body
3509 }
3510
3511 fn parse_struct_field_def(
3523 &mut self,
3524 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3525 let is_named_field = matches!(
3528 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3529 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3530 );
3531
3532 let field_name = if is_named_field {
3533 let name = self.parse_identifier()?;
3534 let _ = self.consume_token(&Token::Colon);
3535 Some(name)
3536 } else {
3537 None
3538 };
3539
3540 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3541
3542 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3543 Ok((
3544 StructField {
3545 field_name,
3546 field_type,
3547 options,
3548 },
3549 trailing_bracket,
3550 ))
3551 }
3552
3553 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3563 self.expect_keyword_is(Keyword::UNION)?;
3564
3565 self.expect_token(&Token::LParen)?;
3566
3567 let fields = self.parse_comma_separated(|p| {
3568 Ok(UnionField {
3569 field_name: p.parse_identifier()?,
3570 field_type: p.parse_data_type()?,
3571 })
3572 })?;
3573
3574 self.expect_token(&Token::RParen)?;
3575
3576 Ok(fields)
3577 }
3578
3579 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3590 self.expect_token(&Token::LBrace)?;
3591
3592 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3593
3594 self.expect_token(&Token::RBrace)?;
3595
3596 Ok(Expr::Dictionary(fields))
3597 }
3598
3599 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3610 let key = self.parse_identifier()?;
3611
3612 self.expect_token(&Token::Colon)?;
3613
3614 let expr = self.parse_expr()?;
3615
3616 Ok(DictionaryField {
3617 key,
3618 value: Box::new(expr),
3619 })
3620 }
3621
3622 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3632 self.expect_token(&Token::LBrace)?;
3633 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3634 self.expect_token(&Token::RBrace)?;
3635 Ok(Expr::Map(Map { entries: fields }))
3636 }
3637
3638 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3648 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3650
3651 self.expect_token(&Token::Colon)?;
3652
3653 let value = self.parse_expr()?;
3654
3655 Ok(MapEntry {
3656 key: Box::new(key),
3657 value: Box::new(value),
3658 })
3659 }
3660
3661 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3671 self.expect_keyword_is(Keyword::MAP)?;
3672 self.expect_token(&Token::LParen)?;
3673 let key_data_type = self.parse_data_type()?;
3674 self.expect_token(&Token::Comma)?;
3675 let value_data_type = self.parse_data_type()?;
3676 self.expect_token(&Token::RParen)?;
3677
3678 Ok((key_data_type, value_data_type))
3679 }
3680
3681 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3691 self.expect_keyword_is(Keyword::TUPLE)?;
3692 self.expect_token(&Token::LParen)?;
3693 let mut field_defs = vec![];
3694 loop {
3695 let (def, _) = self.parse_struct_field_def()?;
3696 field_defs.push(def);
3697 if !self.consume_token(&Token::Comma) {
3698 break;
3699 }
3700 }
3701 self.expect_token(&Token::RParen)?;
3702
3703 Ok(field_defs)
3704 }
3705
3706 fn expect_closing_angle_bracket(
3711 &mut self,
3712 trailing_bracket: MatchedTrailingBracket,
3713 ) -> Result<MatchedTrailingBracket, ParserError> {
3714 let trailing_bracket = if !trailing_bracket.0 {
3715 match &self.peek_token_ref().token {
3716 Token::Gt => {
3717 self.next_token();
3718 false.into()
3719 }
3720 Token::ShiftRight => {
3721 self.next_token();
3722 true.into()
3723 }
3724 _ => return self.expected_ref(">", self.peek_token_ref()),
3725 }
3726 } else {
3727 false.into()
3728 };
3729
3730 Ok(trailing_bracket)
3731 }
3732
3733 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3735 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3737 return infix;
3738 }
3739
3740 let dialect = self.dialect;
3741
3742 self.advance_token();
3743 let tok = self.get_current_token();
3744 debug!("infix: {tok:?}");
3745 let tok_index = self.get_current_index();
3746 let span = tok.span;
3747 let regular_binary_operator = match &tok.token {
3748 Token::Spaceship => Some(BinaryOperator::Spaceship),
3749 Token::DoubleEq => Some(BinaryOperator::Eq),
3750 Token::Assignment => Some(BinaryOperator::Assignment),
3751 Token::Eq => Some(BinaryOperator::Eq),
3752 Token::Neq => Some(BinaryOperator::NotEq),
3753 Token::Gt => Some(BinaryOperator::Gt),
3754 Token::GtEq => Some(BinaryOperator::GtEq),
3755 Token::Lt => Some(BinaryOperator::Lt),
3756 Token::LtEq => Some(BinaryOperator::LtEq),
3757 Token::Plus => Some(BinaryOperator::Plus),
3758 Token::Minus => Some(BinaryOperator::Minus),
3759 Token::Mul => Some(BinaryOperator::Multiply),
3760 Token::Mod => Some(BinaryOperator::Modulo),
3761 Token::StringConcat => Some(BinaryOperator::StringConcat),
3762 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3763 Token::Caret => {
3764 if dialect_is!(dialect is PostgreSqlDialect) {
3767 Some(BinaryOperator::PGExp)
3768 } else {
3769 Some(BinaryOperator::BitwiseXor)
3770 }
3771 }
3772 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3773 Token::Div => Some(BinaryOperator::Divide),
3774 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3775 Some(BinaryOperator::DuckIntegerDivide)
3776 }
3777 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3778 Some(BinaryOperator::PGBitwiseShiftLeft)
3779 }
3780 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3781 Some(BinaryOperator::PGBitwiseShiftRight)
3782 }
3783 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3784 Some(BinaryOperator::PGBitwiseXor)
3785 }
3786 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3787 Some(BinaryOperator::PGOverlap)
3788 }
3789 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3790 Some(BinaryOperator::PGOverlap)
3791 }
3792 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3793 Some(BinaryOperator::And)
3794 }
3795 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3796 Some(BinaryOperator::PGStartsWith)
3797 }
3798 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3799 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3800 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3801 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3802 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3803 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3804 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3805 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3806 Token::Arrow => Some(BinaryOperator::Arrow),
3807 Token::LongArrow => Some(BinaryOperator::LongArrow),
3808 Token::HashArrow => Some(BinaryOperator::HashArrow),
3809 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3810 Token::AtArrow => Some(BinaryOperator::AtArrow),
3811 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3812 Token::HashMinus => Some(BinaryOperator::HashMinus),
3813 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3814 Token::AtAt => Some(BinaryOperator::AtAt),
3815 Token::Question => Some(BinaryOperator::Question),
3816 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3817 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3818 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3819 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3820 Some(BinaryOperator::DoubleHash)
3821 }
3822
3823 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3824 Some(BinaryOperator::AndLt)
3825 }
3826 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3827 Some(BinaryOperator::AndGt)
3828 }
3829 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3830 Some(BinaryOperator::QuestionDash)
3831 }
3832 Token::AmpersandLeftAngleBracketVerticalBar
3833 if self.dialect.supports_geometric_types() =>
3834 {
3835 Some(BinaryOperator::AndLtPipe)
3836 }
3837 Token::VerticalBarAmpersandRightAngleBracket
3838 if self.dialect.supports_geometric_types() =>
3839 {
3840 Some(BinaryOperator::PipeAndGt)
3841 }
3842 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3843 Some(BinaryOperator::LtDashGt)
3844 }
3845 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3846 Some(BinaryOperator::LtCaret)
3847 }
3848 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3849 Some(BinaryOperator::GtCaret)
3850 }
3851 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3852 Some(BinaryOperator::QuestionHash)
3853 }
3854 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3855 Some(BinaryOperator::QuestionDoublePipe)
3856 }
3857 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3858 Some(BinaryOperator::QuestionDashPipe)
3859 }
3860 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3861 Some(BinaryOperator::TildeEq)
3862 }
3863 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3864 Some(BinaryOperator::LtLtPipe)
3865 }
3866 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3867 Some(BinaryOperator::PipeGtGt)
3868 }
3869 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3870
3871 Token::Word(w) => match w.keyword {
3872 Keyword::AND => Some(BinaryOperator::And),
3873 Keyword::OR => Some(BinaryOperator::Or),
3874 Keyword::XOR => Some(BinaryOperator::Xor),
3875 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3876 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3877 self.expect_token(&Token::LParen)?;
3878 let mut idents = vec![];
3883 loop {
3884 self.advance_token();
3885 idents.push(self.get_current_token().to_string());
3886 if !self.consume_token(&Token::Period) {
3887 break;
3888 }
3889 }
3890 self.expect_token(&Token::RParen)?;
3891 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3892 }
3893 _ => None,
3894 },
3895 _ => None,
3896 };
3897
3898 let tok = self.token_at(tok_index);
3899 if let Some(op) = regular_binary_operator {
3900 if let Some(keyword) =
3901 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3902 {
3903 self.expect_token(&Token::LParen)?;
3904 let right = if self.peek_sub_query() {
3905 self.prev_token(); self.parse_subexpr(precedence)?
3909 } else {
3910 let right = self.parse_subexpr(precedence)?;
3912 self.expect_token(&Token::RParen)?;
3913 right
3914 };
3915
3916 if !matches!(
3917 op,
3918 BinaryOperator::Gt
3919 | BinaryOperator::Lt
3920 | BinaryOperator::GtEq
3921 | BinaryOperator::LtEq
3922 | BinaryOperator::Eq
3923 | BinaryOperator::NotEq
3924 | BinaryOperator::PGRegexMatch
3925 | BinaryOperator::PGRegexIMatch
3926 | BinaryOperator::PGRegexNotMatch
3927 | BinaryOperator::PGRegexNotIMatch
3928 | BinaryOperator::PGLikeMatch
3929 | BinaryOperator::PGILikeMatch
3930 | BinaryOperator::PGNotLikeMatch
3931 | BinaryOperator::PGNotILikeMatch
3932 ) {
3933 return parser_err!(
3934 format!(
3935 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3936 ),
3937 span.start
3938 );
3939 };
3940
3941 Ok(match keyword {
3942 Keyword::ALL => Expr::AllOp {
3943 left: Box::new(expr),
3944 compare_op: op,
3945 right: Box::new(right),
3946 },
3947 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3948 left: Box::new(expr),
3949 compare_op: op,
3950 right: Box::new(right),
3951 is_some: keyword == Keyword::SOME,
3952 },
3953 unexpected_keyword => return Err(ParserError::ParserError(
3954 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3955 )),
3956 })
3957 } else {
3958 Ok(Expr::BinaryOp {
3959 left: Box::new(expr),
3960 op,
3961 right: Box::new(self.parse_subexpr(precedence)?),
3962 })
3963 }
3964 } else if let Token::Word(w) = &tok.token {
3965 match w.keyword {
3966 Keyword::IS => {
3967 if self.parse_keyword(Keyword::NULL) {
3968 Ok(Expr::IsNull(Box::new(expr)))
3969 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3970 Ok(Expr::IsNotNull(Box::new(expr)))
3971 } else if self.parse_keywords(&[Keyword::TRUE]) {
3972 Ok(Expr::IsTrue(Box::new(expr)))
3973 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3974 Ok(Expr::IsNotTrue(Box::new(expr)))
3975 } else if self.parse_keywords(&[Keyword::FALSE]) {
3976 Ok(Expr::IsFalse(Box::new(expr)))
3977 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3978 Ok(Expr::IsNotFalse(Box::new(expr)))
3979 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3980 Ok(Expr::IsUnknown(Box::new(expr)))
3981 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3982 Ok(Expr::IsNotUnknown(Box::new(expr)))
3983 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3984 let expr2 = self.parse_expr()?;
3985 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3986 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3987 {
3988 let expr2 = self.parse_expr()?;
3989 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3990 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3991 Ok(is_normalized)
3992 } else {
3993 self.expected_ref(
3994 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3995 self.peek_token_ref(),
3996 )
3997 }
3998 }
3999 Keyword::AT => {
4000 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4001 Ok(Expr::AtTimeZone {
4002 timestamp: Box::new(expr),
4003 time_zone: Box::new(self.parse_subexpr(precedence)?),
4004 })
4005 }
4006 Keyword::NOT
4007 | Keyword::IN
4008 | Keyword::BETWEEN
4009 | Keyword::LIKE
4010 | Keyword::ILIKE
4011 | Keyword::SIMILAR
4012 | Keyword::REGEXP
4013 | Keyword::RLIKE => {
4014 self.prev_token();
4015 let negated = self.parse_keyword(Keyword::NOT);
4016 let regexp = self.parse_keyword(Keyword::REGEXP);
4017 let rlike = self.parse_keyword(Keyword::RLIKE);
4018 let null = if !self.in_column_definition_state() {
4019 self.parse_keyword(Keyword::NULL)
4020 } else {
4021 false
4022 };
4023 if regexp || rlike {
4024 Ok(Expr::RLike {
4025 negated,
4026 expr: Box::new(expr),
4027 pattern: Box::new(
4028 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4029 ),
4030 regexp,
4031 })
4032 } else if negated && null {
4033 Ok(Expr::IsNotNull(Box::new(expr)))
4034 } else if self.parse_keyword(Keyword::IN) {
4035 self.parse_in(expr, negated)
4036 } else if self.parse_keyword(Keyword::BETWEEN) {
4037 self.parse_between(expr, negated)
4038 } else if self.parse_keyword(Keyword::LIKE) {
4039 Ok(Expr::Like {
4040 negated,
4041 any: self.parse_keyword(Keyword::ANY),
4042 expr: Box::new(expr),
4043 pattern: Box::new(
4044 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4045 ),
4046 escape_char: self.parse_escape_char()?,
4047 })
4048 } else if self.parse_keyword(Keyword::ILIKE) {
4049 Ok(Expr::ILike {
4050 negated,
4051 any: self.parse_keyword(Keyword::ANY),
4052 expr: Box::new(expr),
4053 pattern: Box::new(
4054 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4055 ),
4056 escape_char: self.parse_escape_char()?,
4057 })
4058 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4059 Ok(Expr::SimilarTo {
4060 negated,
4061 expr: Box::new(expr),
4062 pattern: Box::new(
4063 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4064 ),
4065 escape_char: self.parse_escape_char()?,
4066 })
4067 } else {
4068 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4069 }
4070 }
4071 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4072 Ok(Expr::IsNotNull(Box::new(expr)))
4073 }
4074 Keyword::MEMBER => {
4075 if self.parse_keyword(Keyword::OF) {
4076 self.expect_token(&Token::LParen)?;
4077 let array = self.parse_expr()?;
4078 self.expect_token(&Token::RParen)?;
4079 Ok(Expr::MemberOf(MemberOf {
4080 value: Box::new(expr),
4081 array: Box::new(array),
4082 }))
4083 } else {
4084 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4085 }
4086 }
4087 _ => parser_err!(
4089 format!("No infix parser for token {:?}", tok.token),
4090 tok.span.start
4091 ),
4092 }
4093 } else if Token::DoubleColon == *tok {
4094 Ok(Expr::Cast {
4095 kind: CastKind::DoubleColon,
4096 expr: Box::new(expr),
4097 data_type: self.parse_data_type()?,
4098 array: false,
4099 format: None,
4100 })
4101 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4102 Ok(Expr::UnaryOp {
4103 op: UnaryOperator::PGPostfixFactorial,
4104 expr: Box::new(expr),
4105 })
4106 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4107 || (Token::Colon == *tok)
4108 {
4109 self.prev_token();
4110 self.parse_json_access(expr)
4111 } else {
4112 parser_err!(
4114 format!("No infix parser for token {:?}", tok.token),
4115 tok.span.start
4116 )
4117 }
4118 }
4119
4120 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4122 if self.parse_keyword(Keyword::ESCAPE) {
4123 Ok(Some(self.parse_value()?))
4124 } else {
4125 Ok(None)
4126 }
4127 }
4128
4129 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4139 let lower_bound = if self.consume_token(&Token::Colon) {
4141 None
4142 } else {
4143 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4145 };
4146
4147 if self.consume_token(&Token::RBracket) {
4149 if let Some(lower_bound) = lower_bound {
4150 return Ok(Subscript::Index { index: lower_bound });
4151 };
4152 return Ok(Subscript::Slice {
4153 lower_bound,
4154 upper_bound: None,
4155 stride: None,
4156 });
4157 }
4158
4159 if lower_bound.is_some() {
4161 self.expect_token(&Token::Colon)?;
4162 }
4163
4164 let upper_bound = if self.consume_token(&Token::RBracket) {
4166 return Ok(Subscript::Slice {
4167 lower_bound,
4168 upper_bound: None,
4169 stride: None,
4170 });
4171 } else {
4172 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4174 };
4175
4176 if self.consume_token(&Token::RBracket) {
4178 return Ok(Subscript::Slice {
4179 lower_bound,
4180 upper_bound,
4181 stride: None,
4182 });
4183 }
4184
4185 self.expect_token(&Token::Colon)?;
4187 let stride = if self.consume_token(&Token::RBracket) {
4188 None
4189 } else {
4190 Some(self.parse_expr()?)
4191 };
4192
4193 if stride.is_some() {
4194 self.expect_token(&Token::RBracket)?;
4195 }
4196
4197 Ok(Subscript::Slice {
4198 lower_bound,
4199 upper_bound,
4200 stride,
4201 })
4202 }
4203
4204 pub fn parse_multi_dim_subscript(
4206 &mut self,
4207 chain: &mut Vec<AccessExpr>,
4208 ) -> Result<(), ParserError> {
4209 while self.consume_token(&Token::LBracket) {
4210 self.parse_subscript(chain)?;
4211 }
4212 Ok(())
4213 }
4214
4215 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4219 let subscript = self.parse_subscript_inner()?;
4220 chain.push(AccessExpr::Subscript(subscript));
4221 Ok(())
4222 }
4223
4224 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4225 let token = self.next_token();
4226 match token.token {
4227 Token::Word(Word {
4228 value,
4229 quote_style: quote_style @ (Some('"') | Some('`') | None),
4232 keyword: _,
4235 }) => Ok(JsonPathElem::Dot {
4236 key: value,
4237 quoted: quote_style.is_some(),
4238 }),
4239
4240 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4244
4245 _ => self.expected("variant object key name", token),
4246 }
4247 }
4248
4249 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4250 let path = self.parse_json_path()?;
4251 Ok(Expr::JsonAccess {
4252 value: Box::new(expr),
4253 path,
4254 })
4255 }
4256
4257 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4258 let mut path = Vec::new();
4259 loop {
4260 match self.next_token().token {
4261 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4262 self.next_token();
4263 let key = self.parse_wildcard_expr()?;
4264 self.expect_token(&Token::RBracket)?;
4265 path.push(JsonPathElem::ColonBracket { key });
4266 }
4267 Token::Colon if path.is_empty() => {
4268 path.push(self.parse_json_path_object_key()?);
4269 }
4270 Token::Period if !path.is_empty() => {
4271 path.push(self.parse_json_path_object_key()?);
4272 }
4273 Token::LBracket => {
4274 let key = self.parse_wildcard_expr()?;
4275 self.expect_token(&Token::RBracket)?;
4276
4277 path.push(JsonPathElem::Bracket { key });
4278 }
4279 _ => {
4280 self.prev_token();
4281 break;
4282 }
4283 };
4284 }
4285
4286 debug_assert!(!path.is_empty());
4287 Ok(JsonPath { path })
4288 }
4289
4290 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4292 if self.parse_keyword(Keyword::UNNEST) {
4295 self.expect_token(&Token::LParen)?;
4296 let array_expr = self.parse_expr()?;
4297 self.expect_token(&Token::RParen)?;
4298 return Ok(Expr::InUnnest {
4299 expr: Box::new(expr),
4300 array_expr: Box::new(array_expr),
4301 negated,
4302 });
4303 }
4304 self.expect_token(&Token::LParen)?;
4305 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4306 Some(subquery) => Expr::InSubquery {
4307 expr: Box::new(expr),
4308 subquery,
4309 negated,
4310 },
4311 None => Expr::InList {
4312 expr: Box::new(expr),
4313 list: if self.dialect.supports_in_empty_list() {
4314 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4315 } else {
4316 self.parse_comma_separated(Parser::parse_expr)?
4317 },
4318 negated,
4319 },
4320 };
4321 self.expect_token(&Token::RParen)?;
4322 Ok(in_op)
4323 }
4324
4325 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4327 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4330 self.expect_keyword_is(Keyword::AND)?;
4331 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4332 Ok(Expr::Between {
4333 expr: Box::new(expr),
4334 negated,
4335 low: Box::new(low),
4336 high: Box::new(high),
4337 })
4338 }
4339
4340 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4342 Ok(Expr::Cast {
4343 kind: CastKind::DoubleColon,
4344 expr: Box::new(expr),
4345 data_type: self.parse_data_type()?,
4346 array: false,
4347 format: None,
4348 })
4349 }
4350
4351 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4353 self.dialect.get_next_precedence_default(self)
4354 }
4355
4356 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4359 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4360 }
4361
4362 pub fn peek_token(&self) -> TokenWithSpan {
4367 self.peek_nth_token(0)
4368 }
4369
4370 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4373 self.peek_nth_token_ref(0)
4374 }
4375
4376 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4399 self.peek_tokens_with_location()
4400 .map(|with_loc| with_loc.token)
4401 }
4402
4403 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4408 let mut index = self.index;
4409 core::array::from_fn(|_| loop {
4410 let token = self.tokens.get(index);
4411 index += 1;
4412 if let Some(TokenWithSpan {
4413 token: Token::Whitespace(_),
4414 span: _,
4415 }) = token
4416 {
4417 continue;
4418 }
4419 break token.cloned().unwrap_or(TokenWithSpan {
4420 token: Token::EOF,
4421 span: Span::empty(),
4422 });
4423 })
4424 }
4425
4426 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4431 let mut index = self.index;
4432 core::array::from_fn(|_| loop {
4433 let token = self.tokens.get(index);
4434 index += 1;
4435 if let Some(TokenWithSpan {
4436 token: Token::Whitespace(_),
4437 span: _,
4438 }) = token
4439 {
4440 continue;
4441 }
4442 break token.unwrap_or(&EOF_TOKEN);
4443 })
4444 }
4445
4446 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4448 self.peek_nth_token_ref(n).clone()
4449 }
4450
4451 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4453 let mut index = self.index;
4454 loop {
4455 index += 1;
4456 match self.tokens.get(index - 1) {
4457 Some(TokenWithSpan {
4458 token: Token::Whitespace(_),
4459 span: _,
4460 }) => continue,
4461 non_whitespace => {
4462 if n == 0 {
4463 return non_whitespace.unwrap_or(&EOF_TOKEN);
4464 }
4465 n -= 1;
4466 }
4467 }
4468 }
4469 }
4470
4471 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4474 self.peek_nth_token_no_skip(0)
4475 }
4476
4477 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4479 self.tokens
4480 .get(self.index + n)
4481 .cloned()
4482 .unwrap_or(TokenWithSpan {
4483 token: Token::EOF,
4484 span: Span::empty(),
4485 })
4486 }
4487
4488 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4490 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4491 }
4492
4493 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4497 let index = self.index;
4498 let matched = self.parse_keywords(expected);
4499 self.index = index;
4500 matched
4501 }
4502
4503 pub fn next_token(&mut self) -> TokenWithSpan {
4508 self.advance_token();
4509 self.get_current_token().clone()
4510 }
4511
4512 pub fn get_current_index(&self) -> usize {
4517 self.index.saturating_sub(1)
4518 }
4519
4520 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4522 self.index += 1;
4523 self.tokens.get(self.index - 1)
4524 }
4525
4526 pub fn advance_token(&mut self) {
4530 loop {
4531 self.index += 1;
4532 match self.tokens.get(self.index - 1) {
4533 Some(TokenWithSpan {
4534 token: Token::Whitespace(_),
4535 span: _,
4536 }) => continue,
4537 _ => break,
4538 }
4539 }
4540 }
4541
4542 pub fn get_current_token(&self) -> &TokenWithSpan {
4546 self.token_at(self.index.saturating_sub(1))
4547 }
4548
4549 pub fn get_previous_token(&self) -> &TokenWithSpan {
4553 self.token_at(self.index.saturating_sub(2))
4554 }
4555
4556 pub fn get_next_token(&self) -> &TokenWithSpan {
4560 self.token_at(self.index)
4561 }
4562
4563 pub fn prev_token(&mut self) {
4570 loop {
4571 assert!(self.index > 0);
4572 self.index -= 1;
4573 if let Some(TokenWithSpan {
4574 token: Token::Whitespace(_),
4575 span: _,
4576 }) = self.tokens.get(self.index)
4577 {
4578 continue;
4579 }
4580 return;
4581 }
4582 }
4583
4584 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4586 parser_err!(
4587 format!("Expected: {expected}, found: {found}"),
4588 found.span.start
4589 )
4590 }
4591
4592 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4594 parser_err!(
4595 format!("Expected: {expected}, found: {found}"),
4596 found.span.start
4597 )
4598 }
4599
4600 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4602 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4603 parser_err!(
4604 format!("Expected: {expected}, found: {found}"),
4605 found.span.start
4606 )
4607 }
4608
4609 #[must_use]
4612 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4613 if self.peek_keyword(expected) {
4614 self.advance_token();
4615 true
4616 } else {
4617 false
4618 }
4619 }
4620
4621 #[must_use]
4622 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4626 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4627 }
4628
4629 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4637 self.keyword_with_tokens(expected, tokens, true)
4638 }
4639
4640 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4645 self.keyword_with_tokens(expected, tokens, false)
4646 }
4647
4648 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4649 match &self.peek_token_ref().token {
4650 Token::Word(w) if expected == w.keyword => {
4651 for (idx, token) in tokens.iter().enumerate() {
4652 if self.peek_nth_token_ref(idx + 1).token != *token {
4653 return false;
4654 }
4655 }
4656
4657 if consume {
4658 for _ in 0..(tokens.len() + 1) {
4659 self.advance_token();
4660 }
4661 }
4662
4663 true
4664 }
4665 _ => false,
4666 }
4667 }
4668
4669 #[must_use]
4673 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4674 self.parse_keywords_indexed(keywords).is_some()
4675 }
4676
4677 #[must_use]
4680 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4681 let start_index = self.index;
4682 let mut first_keyword_index = None;
4683 for &keyword in keywords {
4684 if !self.parse_keyword(keyword) {
4685 self.index = start_index;
4686 return None;
4687 }
4688 if first_keyword_index.is_none() {
4689 first_keyword_index = Some(self.index.saturating_sub(1));
4690 }
4691 }
4692 first_keyword_index
4693 }
4694
4695 #[must_use]
4698 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4699 for keyword in keywords {
4700 if self.peek_keyword(*keyword) {
4701 return Some(*keyword);
4702 }
4703 }
4704 None
4705 }
4706
4707 #[must_use]
4711 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4712 match &self.peek_token_ref().token {
4713 Token::Word(w) => {
4714 keywords
4715 .iter()
4716 .find(|keyword| **keyword == w.keyword)
4717 .map(|keyword| {
4718 self.advance_token();
4719 *keyword
4720 })
4721 }
4722 _ => None,
4723 }
4724 }
4725
4726 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4729 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4730 Ok(keyword)
4731 } else {
4732 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4733 self.expected_ref(
4734 &format!("one of {}", keywords.join(" or ")),
4735 self.peek_token_ref(),
4736 )
4737 }
4738 }
4739
4740 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4745 if self.parse_keyword(expected) {
4746 Ok(self.get_current_token().clone())
4747 } else {
4748 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4749 }
4750 }
4751
4752 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4758 if self.parse_keyword(expected) {
4759 Ok(())
4760 } else {
4761 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4762 }
4763 }
4764
4765 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4768 for &kw in expected {
4769 self.expect_keyword_is(kw)?;
4770 }
4771 Ok(())
4772 }
4773
4774 #[must_use]
4778 pub fn consume_token(&mut self, expected: &Token) -> bool {
4779 if self.peek_token_ref() == expected {
4780 self.advance_token();
4781 true
4782 } else {
4783 false
4784 }
4785 }
4786
4787 #[must_use]
4791 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4792 let index = self.index;
4793 for token in tokens {
4794 if !self.consume_token(token) {
4795 self.index = index;
4796 return false;
4797 }
4798 }
4799 true
4800 }
4801
4802 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4804 if self.peek_token_ref() == expected {
4805 Ok(self.next_token())
4806 } else {
4807 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4808 }
4809 }
4810
4811 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4812 where
4813 <T as FromStr>::Err: Display,
4814 {
4815 s.parse::<T>().map_err(|e| {
4816 ParserError::ParserError(format!(
4817 "Could not parse '{s}' as {}: {e}{loc}",
4818 core::any::type_name::<T>()
4819 ))
4820 })
4821 }
4822
4823 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4825 let trailing_commas =
4831 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4832
4833 self.parse_comma_separated_with_trailing_commas(
4834 |p| p.parse_select_item(),
4835 trailing_commas,
4836 Self::is_reserved_for_column_alias,
4837 )
4838 }
4839
4840 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4842 let mut values = vec![];
4843 loop {
4844 values.push(self.parse_grant_permission()?);
4845 if !self.consume_token(&Token::Comma) {
4846 break;
4847 } else if self.options.trailing_commas {
4848 match &self.peek_token_ref().token {
4849 Token::Word(kw) if kw.keyword == Keyword::ON => {
4850 break;
4851 }
4852 Token::RParen
4853 | Token::SemiColon
4854 | Token::EOF
4855 | Token::RBracket
4856 | Token::RBrace => break,
4857 _ => continue,
4858 }
4859 }
4860 }
4861 Ok(values)
4862 }
4863
4864 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4866 let trailing_commas = self.dialect.supports_from_trailing_commas();
4867
4868 self.parse_comma_separated_with_trailing_commas(
4869 Parser::parse_table_and_joins,
4870 trailing_commas,
4871 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4872 )
4873 }
4874
4875 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4882 &mut self,
4883 trailing_commas: bool,
4884 is_reserved_keyword: &R,
4885 ) -> bool
4886 where
4887 R: Fn(&Keyword, &mut Parser) -> bool,
4888 {
4889 if !self.consume_token(&Token::Comma) {
4890 true
4891 } else if trailing_commas {
4892 let token = self.next_token().token;
4893 let is_end = match token {
4894 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4895 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4896 true
4897 }
4898 _ => false,
4899 };
4900 self.prev_token();
4901
4902 is_end
4903 } else {
4904 false
4905 }
4906 }
4907
4908 fn is_parse_comma_separated_end(&mut self) -> bool {
4911 self.is_parse_comma_separated_end_with_trailing_commas(
4912 self.options.trailing_commas,
4913 &Self::is_reserved_for_column_alias,
4914 )
4915 }
4916
4917 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4919 where
4920 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4921 {
4922 self.parse_comma_separated_with_trailing_commas(
4923 f,
4924 self.options.trailing_commas,
4925 Self::is_reserved_for_column_alias,
4926 )
4927 }
4928
4929 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4934 &mut self,
4935 mut f: F,
4936 trailing_commas: bool,
4937 is_reserved_keyword: R,
4938 ) -> Result<Vec<T>, ParserError>
4939 where
4940 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4941 R: Fn(&Keyword, &mut Parser) -> bool,
4942 {
4943 let mut values = vec![];
4944 loop {
4945 values.push(f(self)?);
4946 if self.is_parse_comma_separated_end_with_trailing_commas(
4947 trailing_commas,
4948 &is_reserved_keyword,
4949 ) {
4950 break;
4951 }
4952 }
4953 Ok(values)
4954 }
4955
4956 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4958 where
4959 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4960 {
4961 let mut values = vec![];
4962 loop {
4963 values.push(f(self)?);
4964 if !self.consume_token(&Token::Period) {
4965 break;
4966 }
4967 }
4968 Ok(values)
4969 }
4970
4971 pub fn parse_keyword_separated<T, F>(
4973 &mut self,
4974 keyword: Keyword,
4975 mut f: F,
4976 ) -> Result<Vec<T>, ParserError>
4977 where
4978 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4979 {
4980 let mut values = vec![];
4981 loop {
4982 values.push(f(self)?);
4983 if !self.parse_keyword(keyword) {
4984 break;
4985 }
4986 }
4987 Ok(values)
4988 }
4989
4990 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4992 where
4993 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4994 {
4995 self.expect_token(&Token::LParen)?;
4996 let res = f(self)?;
4997 self.expect_token(&Token::RParen)?;
4998 Ok(res)
4999 }
5000
5001 pub fn parse_comma_separated0<T, F>(
5004 &mut self,
5005 f: F,
5006 end_token: Token,
5007 ) -> Result<Vec<T>, ParserError>
5008 where
5009 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5010 {
5011 if self.peek_token_ref().token == end_token {
5012 return Ok(vec![]);
5013 }
5014
5015 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5016 let _ = self.consume_token(&Token::Comma);
5017 return Ok(vec![]);
5018 }
5019
5020 self.parse_comma_separated(f)
5021 }
5022
5023 pub(crate) fn parse_statement_list(
5027 &mut self,
5028 terminal_keywords: &[Keyword],
5029 ) -> Result<Vec<Statement>, ParserError> {
5030 let mut values = vec![];
5031 loop {
5032 match &self.peek_nth_token_ref(0).token {
5033 Token::EOF => break,
5034 Token::Word(w) => {
5035 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5036 break;
5037 }
5038 }
5039 _ => {}
5040 }
5041
5042 values.push(self.parse_statement()?);
5043 self.expect_token(&Token::SemiColon)?;
5044 }
5045 Ok(values)
5046 }
5047
5048 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5052 !parser.dialect.is_column_alias(kw, parser)
5053 }
5054
5055 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5059 where
5060 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5061 {
5062 match self.try_parse(f) {
5063 Ok(t) => Ok(Some(t)),
5064 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5065 _ => Ok(None),
5066 }
5067 }
5068
5069 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5071 where
5072 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5073 {
5074 let index = self.index;
5075 match f(self) {
5076 Ok(t) => Ok(t),
5077 Err(e) => {
5078 self.index = index;
5080 Err(e)
5081 }
5082 }
5083 }
5084
5085 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5088 let loc = self.peek_token_ref().span.start;
5089 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5090 Some(Keyword::ALL) => {
5091 if self.peek_keyword(Keyword::DISTINCT) {
5092 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5093 }
5094 Some(Distinct::All)
5095 }
5096 Some(Keyword::DISTINCT) => {
5097 if self.peek_keyword(Keyword::ALL) {
5098 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5099 }
5100 Some(Distinct::Distinct)
5101 }
5102 None => return Ok(None),
5103 _ => return parser_err!("ALL or DISTINCT", loc),
5104 };
5105
5106 let Some(Distinct::Distinct) = distinct else {
5107 return Ok(distinct);
5108 };
5109 if !self.parse_keyword(Keyword::ON) {
5110 return Ok(Some(Distinct::Distinct));
5111 }
5112
5113 self.expect_token(&Token::LParen)?;
5114 let col_names = if self.consume_token(&Token::RParen) {
5115 self.prev_token();
5116 Vec::new()
5117 } else {
5118 self.parse_comma_separated(Parser::parse_expr)?
5119 };
5120 self.expect_token(&Token::RParen)?;
5121 Ok(Some(Distinct::On(col_names)))
5122 }
5123
5124 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5126 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5127 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5128 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5129 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5130 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5131 let global: Option<bool> = if global {
5132 Some(true)
5133 } else if local {
5134 Some(false)
5135 } else {
5136 None
5137 };
5138 let temporary = self
5139 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5140 .is_some();
5141 let persistent = dialect_of!(self is DuckDbDialect)
5142 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5143 let create_view_params = self.parse_create_view_params()?;
5144 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5145 self.parse_create_snapshot_table().map(Into::into)
5146 } else if self.parse_keyword(Keyword::TABLE) {
5147 self.parse_create_table(or_replace, temporary, global, transient)
5148 .map(Into::into)
5149 } else if self.peek_keyword(Keyword::MATERIALIZED)
5150 || self.peek_keyword(Keyword::VIEW)
5151 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5152 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5153 {
5154 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5155 .map(Into::into)
5156 } else if self.parse_keyword(Keyword::POLICY) {
5157 self.parse_create_policy().map(Into::into)
5158 } else if self.parse_keyword(Keyword::EXTERNAL) {
5159 self.parse_create_external_table(or_replace).map(Into::into)
5160 } else if self.parse_keyword(Keyword::FUNCTION) {
5161 self.parse_create_function(or_alter, or_replace, temporary)
5162 } else if self.parse_keyword(Keyword::DOMAIN) {
5163 self.parse_create_domain().map(Into::into)
5164 } else if self.parse_keyword(Keyword::TRIGGER) {
5165 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5166 .map(Into::into)
5167 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5168 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5169 .map(Into::into)
5170 } else if self.parse_keyword(Keyword::MACRO) {
5171 self.parse_create_macro(or_replace, temporary)
5172 } else if self.parse_keyword(Keyword::SECRET) {
5173 self.parse_create_secret(or_replace, temporary, persistent)
5174 } else if self.parse_keyword(Keyword::USER) {
5175 self.parse_create_user(or_replace).map(Into::into)
5176 } else if or_replace {
5177 self.expected_ref(
5178 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5179 self.peek_token_ref(),
5180 )
5181 } else if self.parse_keyword(Keyword::EXTENSION) {
5182 self.parse_create_extension().map(Into::into)
5183 } else if self.parse_keyword(Keyword::INDEX) {
5184 self.parse_create_index(false).map(Into::into)
5185 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5186 self.parse_create_index(true).map(Into::into)
5187 } else if self.parse_keyword(Keyword::VIRTUAL) {
5188 self.parse_create_virtual_table()
5189 } else if self.parse_keyword(Keyword::SCHEMA) {
5190 self.parse_create_schema()
5191 } else if self.parse_keyword(Keyword::DATABASE) {
5192 self.parse_create_database()
5193 } else if self.parse_keyword(Keyword::ROLE) {
5194 self.parse_create_role().map(Into::into)
5195 } else if self.parse_keyword(Keyword::SEQUENCE) {
5196 self.parse_create_sequence(temporary)
5197 } else if self.parse_keyword(Keyword::COLLATION) {
5198 self.parse_create_collation().map(Into::into)
5199 } else if self.parse_keyword(Keyword::TYPE) {
5200 self.parse_create_type()
5201 } else if self.parse_keyword(Keyword::PROCEDURE) {
5202 self.parse_create_procedure(or_alter)
5203 } else if self.parse_keyword(Keyword::CONNECTOR) {
5204 self.parse_create_connector().map(Into::into)
5205 } else if self.parse_keyword(Keyword::OPERATOR) {
5206 if self.parse_keyword(Keyword::FAMILY) {
5208 self.parse_create_operator_family().map(Into::into)
5209 } else if self.parse_keyword(Keyword::CLASS) {
5210 self.parse_create_operator_class().map(Into::into)
5211 } else {
5212 self.parse_create_operator().map(Into::into)
5213 }
5214 } else if self.parse_keyword(Keyword::SERVER) {
5215 self.parse_pg_create_server()
5216 } else {
5217 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5218 }
5219 }
5220
5221 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5222 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5223 let name = self.parse_identifier()?;
5224 let options = self
5225 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5226 .options;
5227 let with_tags = self.parse_keyword(Keyword::WITH);
5228 let tags = if self.parse_keyword(Keyword::TAG) {
5229 self.parse_key_value_options(true, &[])?.options
5230 } else {
5231 vec![]
5232 };
5233 Ok(CreateUser {
5234 or_replace,
5235 if_not_exists,
5236 name,
5237 options: KeyValueOptions {
5238 options,
5239 delimiter: KeyValueOptionsDelimiter::Space,
5240 },
5241 with_tags,
5242 tags: KeyValueOptions {
5243 options: tags,
5244 delimiter: KeyValueOptionsDelimiter::Comma,
5245 },
5246 })
5247 }
5248
5249 pub fn parse_create_secret(
5251 &mut self,
5252 or_replace: bool,
5253 temporary: bool,
5254 persistent: bool,
5255 ) -> Result<Statement, ParserError> {
5256 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5257
5258 let mut storage_specifier = None;
5259 let mut name = None;
5260 if self.peek_token_ref().token != Token::LParen {
5261 if self.parse_keyword(Keyword::IN) {
5262 storage_specifier = self.parse_identifier().ok()
5263 } else {
5264 name = self.parse_identifier().ok();
5265 }
5266
5267 if storage_specifier.is_none()
5269 && self.peek_token_ref().token != Token::LParen
5270 && self.parse_keyword(Keyword::IN)
5271 {
5272 storage_specifier = self.parse_identifier().ok();
5273 }
5274 }
5275
5276 self.expect_token(&Token::LParen)?;
5277 self.expect_keyword_is(Keyword::TYPE)?;
5278 let secret_type = self.parse_identifier()?;
5279
5280 let mut options = Vec::new();
5281 if self.consume_token(&Token::Comma) {
5282 options.append(&mut self.parse_comma_separated(|p| {
5283 let key = p.parse_identifier()?;
5284 let value = p.parse_identifier()?;
5285 Ok(SecretOption { key, value })
5286 })?);
5287 }
5288 self.expect_token(&Token::RParen)?;
5289
5290 let temp = match (temporary, persistent) {
5291 (true, false) => Some(true),
5292 (false, true) => Some(false),
5293 (false, false) => None,
5294 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5295 };
5296
5297 Ok(Statement::CreateSecret {
5298 or_replace,
5299 temporary: temp,
5300 if_not_exists,
5301 name,
5302 storage_specifier,
5303 secret_type,
5304 options,
5305 })
5306 }
5307
5308 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5310 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5311 if self.parse_keyword(Keyword::TABLE) {
5312 let table_name = self.parse_object_name(false)?;
5313 if self.peek_token_ref().token != Token::EOF {
5314 if let Token::Word(word) = &self.peek_token_ref().token {
5315 if word.keyword == Keyword::OPTIONS {
5316 options = self.parse_options(Keyword::OPTIONS)?
5317 }
5318 };
5319
5320 if self.peek_token_ref().token != Token::EOF {
5321 let (a, q) = self.parse_as_query()?;
5322 has_as = a;
5323 query = Some(q);
5324 }
5325
5326 Ok(Statement::Cache {
5327 table_flag,
5328 table_name,
5329 has_as,
5330 options,
5331 query,
5332 })
5333 } else {
5334 Ok(Statement::Cache {
5335 table_flag,
5336 table_name,
5337 has_as,
5338 options,
5339 query,
5340 })
5341 }
5342 } else {
5343 table_flag = Some(self.parse_object_name(false)?);
5344 if self.parse_keyword(Keyword::TABLE) {
5345 let table_name = self.parse_object_name(false)?;
5346 if self.peek_token_ref().token != Token::EOF {
5347 if let Token::Word(word) = &self.peek_token_ref().token {
5348 if word.keyword == Keyword::OPTIONS {
5349 options = self.parse_options(Keyword::OPTIONS)?
5350 }
5351 };
5352
5353 if self.peek_token_ref().token != Token::EOF {
5354 let (a, q) = self.parse_as_query()?;
5355 has_as = a;
5356 query = Some(q);
5357 }
5358
5359 Ok(Statement::Cache {
5360 table_flag,
5361 table_name,
5362 has_as,
5363 options,
5364 query,
5365 })
5366 } else {
5367 Ok(Statement::Cache {
5368 table_flag,
5369 table_name,
5370 has_as,
5371 options,
5372 query,
5373 })
5374 }
5375 } else {
5376 if self.peek_token_ref().token == Token::EOF {
5377 self.prev_token();
5378 }
5379 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5380 }
5381 }
5382 }
5383
5384 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5386 match &self.peek_token_ref().token {
5387 Token::Word(word) => match word.keyword {
5388 Keyword::AS => {
5389 self.next_token();
5390 Ok((true, self.parse_query()?))
5391 }
5392 _ => Ok((false, self.parse_query()?)),
5393 },
5394 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5395 }
5396 }
5397
5398 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5400 self.expect_keyword_is(Keyword::TABLE)?;
5401 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5402 let table_name = self.parse_object_name(false)?;
5403 Ok(Statement::UNCache {
5404 table_name,
5405 if_exists,
5406 })
5407 }
5408
5409 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5411 self.expect_keyword_is(Keyword::TABLE)?;
5412 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5413 let table_name = self.parse_object_name(false)?;
5414 self.expect_keyword_is(Keyword::USING)?;
5415 let module_name = self.parse_identifier()?;
5416 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5421 Ok(Statement::CreateVirtualTable {
5422 name: table_name,
5423 if_not_exists,
5424 module_name,
5425 module_args,
5426 })
5427 }
5428
5429 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5431 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5432
5433 let schema_name = self.parse_schema_name()?;
5434
5435 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5436 Some(self.parse_expr()?)
5437 } else {
5438 None
5439 };
5440
5441 let with = if self.peek_keyword(Keyword::WITH) {
5442 Some(self.parse_options(Keyword::WITH)?)
5443 } else {
5444 None
5445 };
5446
5447 let options = if self.peek_keyword(Keyword::OPTIONS) {
5448 Some(self.parse_options(Keyword::OPTIONS)?)
5449 } else {
5450 None
5451 };
5452
5453 let clone = if self.parse_keyword(Keyword::CLONE) {
5454 Some(self.parse_object_name(false)?)
5455 } else {
5456 None
5457 };
5458
5459 Ok(Statement::CreateSchema {
5460 schema_name,
5461 if_not_exists,
5462 with,
5463 options,
5464 default_collate_spec,
5465 clone,
5466 })
5467 }
5468
5469 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5470 if self.parse_keyword(Keyword::AUTHORIZATION) {
5471 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5472 } else {
5473 let name = self.parse_object_name(false)?;
5474
5475 if self.parse_keyword(Keyword::AUTHORIZATION) {
5476 Ok(SchemaName::NamedAuthorization(
5477 name,
5478 self.parse_identifier()?,
5479 ))
5480 } else {
5481 Ok(SchemaName::Simple(name))
5482 }
5483 }
5484 }
5485
5486 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5488 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5489 let db_name = self.parse_object_name(false)?;
5490 let mut location = None;
5491 let mut managed_location = None;
5492 loop {
5493 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5494 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5495 Some(Keyword::MANAGEDLOCATION) => {
5496 managed_location = Some(self.parse_literal_string()?)
5497 }
5498 _ => break,
5499 }
5500 }
5501 let clone = if self.parse_keyword(Keyword::CLONE) {
5502 Some(self.parse_object_name(false)?)
5503 } else {
5504 None
5505 };
5506
5507 let mut default_charset = None;
5515 let mut default_collation = None;
5516 loop {
5517 let has_default = self.parse_keyword(Keyword::DEFAULT);
5518 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5519 || self.parse_keyword(Keyword::CHARSET)
5520 {
5521 let _ = self.consume_token(&Token::Eq);
5522 default_charset = Some(self.parse_identifier()?.value);
5523 } else if self.parse_keyword(Keyword::COLLATE) {
5524 let _ = self.consume_token(&Token::Eq);
5525 default_collation = Some(self.parse_identifier()?.value);
5526 } else if has_default {
5527 self.prev_token();
5529 break;
5530 } else {
5531 break;
5532 }
5533 }
5534
5535 Ok(Statement::CreateDatabase {
5536 db_name,
5537 if_not_exists: ine,
5538 location,
5539 managed_location,
5540 or_replace: false,
5541 transient: false,
5542 clone,
5543 data_retention_time_in_days: None,
5544 max_data_extension_time_in_days: None,
5545 external_volume: None,
5546 catalog: None,
5547 replace_invalid_characters: None,
5548 default_ddl_collation: None,
5549 storage_serialization_policy: None,
5550 comment: None,
5551 default_charset,
5552 default_collation,
5553 catalog_sync: None,
5554 catalog_sync_namespace_mode: None,
5555 catalog_sync_namespace_flatten_delimiter: None,
5556 with_tags: None,
5557 with_contacts: None,
5558 })
5559 }
5560
5561 pub fn parse_optional_create_function_using(
5563 &mut self,
5564 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5565 if !self.parse_keyword(Keyword::USING) {
5566 return Ok(None);
5567 };
5568 let keyword =
5569 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5570
5571 let uri = self.parse_literal_string()?;
5572
5573 match keyword {
5574 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5575 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5576 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5577 _ => self.expected(
5578 "JAR, FILE or ARCHIVE, got {:?}",
5579 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5580 ),
5581 }
5582 }
5583
5584 pub fn parse_create_function(
5586 &mut self,
5587 or_alter: bool,
5588 or_replace: bool,
5589 temporary: bool,
5590 ) -> Result<Statement, ParserError> {
5591 if dialect_of!(self is HiveDialect) {
5592 self.parse_hive_create_function(or_replace, temporary)
5593 .map(Into::into)
5594 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5595 self.parse_postgres_create_function(or_replace, temporary)
5596 .map(Into::into)
5597 } else if dialect_of!(self is DuckDbDialect) {
5598 self.parse_create_macro(or_replace, temporary)
5599 } else if dialect_of!(self is BigQueryDialect) {
5600 self.parse_bigquery_create_function(or_replace, temporary)
5601 .map(Into::into)
5602 } else if dialect_of!(self is MsSqlDialect) {
5603 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5604 .map(Into::into)
5605 } else {
5606 self.prev_token();
5607 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5608 }
5609 }
5610
5611 fn parse_postgres_create_function(
5615 &mut self,
5616 or_replace: bool,
5617 temporary: bool,
5618 ) -> Result<CreateFunction, ParserError> {
5619 let name = self.parse_object_name(false)?;
5620
5621 self.expect_token(&Token::LParen)?;
5622 let args = if Token::RParen != self.peek_token_ref().token {
5623 self.parse_comma_separated(Parser::parse_function_arg)?
5624 } else {
5625 vec![]
5626 };
5627 self.expect_token(&Token::RParen)?;
5628
5629 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5630 Some(self.parse_function_return_type()?)
5631 } else {
5632 None
5633 };
5634
5635 #[derive(Default)]
5636 struct Body {
5637 language: Option<Ident>,
5638 behavior: Option<FunctionBehavior>,
5639 function_body: Option<CreateFunctionBody>,
5640 called_on_null: Option<FunctionCalledOnNull>,
5641 parallel: Option<FunctionParallel>,
5642 security: Option<FunctionSecurity>,
5643 }
5644 let mut body = Body::default();
5645 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5646 loop {
5647 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5648 if field.is_some() {
5649 return Err(ParserError::ParserError(format!(
5650 "{name} specified more than once",
5651 )));
5652 }
5653 Ok(())
5654 }
5655 if self.parse_keyword(Keyword::AS) {
5656 ensure_not_set(&body.function_body, "AS")?;
5657 body.function_body = Some(self.parse_create_function_body_string()?);
5658 } else if self.parse_keyword(Keyword::LANGUAGE) {
5659 ensure_not_set(&body.language, "LANGUAGE")?;
5660 body.language = Some(self.parse_identifier()?);
5661 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5662 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5663 body.behavior = Some(FunctionBehavior::Immutable);
5664 } else if self.parse_keyword(Keyword::STABLE) {
5665 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5666 body.behavior = Some(FunctionBehavior::Stable);
5667 } else if self.parse_keyword(Keyword::VOLATILE) {
5668 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5669 body.behavior = Some(FunctionBehavior::Volatile);
5670 } else if self.parse_keywords(&[
5671 Keyword::CALLED,
5672 Keyword::ON,
5673 Keyword::NULL,
5674 Keyword::INPUT,
5675 ]) {
5676 ensure_not_set(
5677 &body.called_on_null,
5678 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5679 )?;
5680 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5681 } else if self.parse_keywords(&[
5682 Keyword::RETURNS,
5683 Keyword::NULL,
5684 Keyword::ON,
5685 Keyword::NULL,
5686 Keyword::INPUT,
5687 ]) {
5688 ensure_not_set(
5689 &body.called_on_null,
5690 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5691 )?;
5692 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5693 } else if self.parse_keyword(Keyword::STRICT) {
5694 ensure_not_set(
5695 &body.called_on_null,
5696 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5697 )?;
5698 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5699 } else if self.parse_keyword(Keyword::PARALLEL) {
5700 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5701 if self.parse_keyword(Keyword::UNSAFE) {
5702 body.parallel = Some(FunctionParallel::Unsafe);
5703 } else if self.parse_keyword(Keyword::RESTRICTED) {
5704 body.parallel = Some(FunctionParallel::Restricted);
5705 } else if self.parse_keyword(Keyword::SAFE) {
5706 body.parallel = Some(FunctionParallel::Safe);
5707 } else {
5708 return self
5709 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5710 }
5711 } else if self.parse_keyword(Keyword::SECURITY) {
5712 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5713 if self.parse_keyword(Keyword::DEFINER) {
5714 body.security = Some(FunctionSecurity::Definer);
5715 } else if self.parse_keyword(Keyword::INVOKER) {
5716 body.security = Some(FunctionSecurity::Invoker);
5717 } else {
5718 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5719 }
5720 } else if self.parse_keyword(Keyword::SET) {
5721 let name = self.parse_object_name(false)?;
5722 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5723 FunctionSetValue::FromCurrent
5724 } else {
5725 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5726 return self.expected_ref("= or TO", self.peek_token_ref());
5727 }
5728 if self.parse_keyword(Keyword::DEFAULT) {
5729 FunctionSetValue::Default
5730 } else {
5731 let values = self.parse_comma_separated(Parser::parse_expr)?;
5732 FunctionSetValue::Values(values)
5733 }
5734 };
5735 set_params.push(FunctionDefinitionSetParam { name, value });
5736 } else if self.parse_keyword(Keyword::RETURN) {
5737 ensure_not_set(&body.function_body, "RETURN")?;
5738 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5739 } else {
5740 break;
5741 }
5742 }
5743
5744 Ok(CreateFunction {
5745 or_alter: false,
5746 or_replace,
5747 temporary,
5748 name,
5749 args: Some(args),
5750 return_type,
5751 behavior: body.behavior,
5752 called_on_null: body.called_on_null,
5753 parallel: body.parallel,
5754 security: body.security,
5755 set_params,
5756 language: body.language,
5757 function_body: body.function_body,
5758 if_not_exists: false,
5759 using: None,
5760 determinism_specifier: None,
5761 options: None,
5762 remote_connection: None,
5763 })
5764 }
5765
5766 fn parse_hive_create_function(
5770 &mut self,
5771 or_replace: bool,
5772 temporary: bool,
5773 ) -> Result<CreateFunction, ParserError> {
5774 let name = self.parse_object_name(false)?;
5775 self.expect_keyword_is(Keyword::AS)?;
5776
5777 let body = self.parse_create_function_body_string()?;
5778 let using = self.parse_optional_create_function_using()?;
5779
5780 Ok(CreateFunction {
5781 or_alter: false,
5782 or_replace,
5783 temporary,
5784 name,
5785 function_body: Some(body),
5786 using,
5787 if_not_exists: false,
5788 args: None,
5789 return_type: None,
5790 behavior: None,
5791 called_on_null: None,
5792 parallel: None,
5793 security: None,
5794 set_params: vec![],
5795 language: None,
5796 determinism_specifier: None,
5797 options: None,
5798 remote_connection: None,
5799 })
5800 }
5801
5802 fn parse_bigquery_create_function(
5806 &mut self,
5807 or_replace: bool,
5808 temporary: bool,
5809 ) -> Result<CreateFunction, ParserError> {
5810 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5811 let (name, args) = self.parse_create_function_name_and_params()?;
5812
5813 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5814 Some(self.parse_function_return_type()?)
5815 } else {
5816 None
5817 };
5818
5819 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5820 Some(FunctionDeterminismSpecifier::Deterministic)
5821 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5822 Some(FunctionDeterminismSpecifier::NotDeterministic)
5823 } else {
5824 None
5825 };
5826
5827 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5828 Some(self.parse_identifier()?)
5829 } else {
5830 None
5831 };
5832
5833 let remote_connection =
5834 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5835 Some(self.parse_object_name(false)?)
5836 } else {
5837 None
5838 };
5839
5840 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5843
5844 let function_body = if remote_connection.is_none() {
5845 self.expect_keyword_is(Keyword::AS)?;
5846 let expr = self.parse_expr()?;
5847 if options.is_none() {
5848 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5849 Some(CreateFunctionBody::AsBeforeOptions {
5850 body: expr,
5851 link_symbol: None,
5852 })
5853 } else {
5854 Some(CreateFunctionBody::AsAfterOptions(expr))
5855 }
5856 } else {
5857 None
5858 };
5859
5860 Ok(CreateFunction {
5861 or_alter: false,
5862 or_replace,
5863 temporary,
5864 if_not_exists,
5865 name,
5866 args: Some(args),
5867 return_type,
5868 function_body,
5869 language,
5870 determinism_specifier,
5871 options,
5872 remote_connection,
5873 using: None,
5874 behavior: None,
5875 called_on_null: None,
5876 parallel: None,
5877 security: None,
5878 set_params: vec![],
5879 })
5880 }
5881
5882 fn parse_mssql_create_function(
5886 &mut self,
5887 or_alter: bool,
5888 or_replace: bool,
5889 temporary: bool,
5890 ) -> Result<CreateFunction, ParserError> {
5891 let (name, args) = self.parse_create_function_name_and_params()?;
5892
5893 self.expect_keyword(Keyword::RETURNS)?;
5894
5895 let return_table = self.maybe_parse(|p| {
5896 let return_table_name = p.parse_identifier()?;
5897
5898 p.expect_keyword_is(Keyword::TABLE)?;
5899 p.prev_token();
5900
5901 let table_column_defs = match p.parse_data_type()? {
5902 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5903 table_column_defs
5904 }
5905 _ => parser_err!(
5906 "Expected table column definitions after TABLE keyword",
5907 p.peek_token_ref().span.start
5908 )?,
5909 };
5910
5911 Ok(DataType::NamedTable {
5912 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5913 columns: table_column_defs,
5914 })
5915 })?;
5916
5917 let data_type = match return_table {
5918 Some(table_type) => table_type,
5919 None => self.parse_data_type()?,
5920 };
5921 let return_type = Some(FunctionReturnType::DataType(data_type));
5922
5923 let _ = self.parse_keyword(Keyword::AS);
5924
5925 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5926 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5927 let statements = self.parse_statement_list(&[Keyword::END])?;
5928 let end_token = self.expect_keyword(Keyword::END)?;
5929
5930 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5931 begin_token: AttachedToken(begin_token),
5932 statements,
5933 end_token: AttachedToken(end_token),
5934 }))
5935 } else if self.parse_keyword(Keyword::RETURN) {
5936 if self.peek_token_ref().token == Token::LParen {
5937 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5938 } else if self.peek_keyword(Keyword::SELECT) {
5939 let select = self.parse_select()?;
5940 Some(CreateFunctionBody::AsReturnSelect(select))
5941 } else {
5942 parser_err!(
5943 "Expected a subquery (or bare SELECT statement) after RETURN",
5944 self.peek_token_ref().span.start
5945 )?
5946 }
5947 } else {
5948 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
5949 };
5950
5951 Ok(CreateFunction {
5952 or_alter,
5953 or_replace,
5954 temporary,
5955 if_not_exists: false,
5956 name,
5957 args: Some(args),
5958 return_type,
5959 function_body,
5960 language: None,
5961 determinism_specifier: None,
5962 options: None,
5963 remote_connection: None,
5964 using: None,
5965 behavior: None,
5966 called_on_null: None,
5967 parallel: None,
5968 security: None,
5969 set_params: vec![],
5970 })
5971 }
5972
5973 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
5974 if self.parse_keyword(Keyword::SETOF) {
5975 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
5976 } else {
5977 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
5978 }
5979 }
5980
5981 fn parse_create_function_name_and_params(
5982 &mut self,
5983 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5984 let name = self.parse_object_name(false)?;
5985 let parse_function_param =
5986 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5987 let name = parser.parse_identifier()?;
5988 let data_type = parser.parse_data_type()?;
5989 let default_expr = if parser.consume_token(&Token::Eq) {
5990 Some(parser.parse_expr()?)
5991 } else {
5992 None
5993 };
5994
5995 Ok(OperateFunctionArg {
5996 mode: None,
5997 name: Some(name),
5998 data_type,
5999 default_expr,
6000 })
6001 };
6002 self.expect_token(&Token::LParen)?;
6003 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6004 self.expect_token(&Token::RParen)?;
6005 Ok((name, args))
6006 }
6007
6008 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6009 let mode = if self.parse_keyword(Keyword::IN) {
6010 Some(ArgMode::In)
6011 } else if self.parse_keyword(Keyword::OUT) {
6012 Some(ArgMode::Out)
6013 } else if self.parse_keyword(Keyword::INOUT) {
6014 Some(ArgMode::InOut)
6015 } else if self.parse_keyword(Keyword::VARIADIC) {
6016 Some(ArgMode::Variadic)
6017 } else {
6018 None
6019 };
6020
6021 let mut name = None;
6023 let mut data_type = self.parse_data_type()?;
6024
6025 let data_type_idx = self.get_current_index();
6029
6030 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6032 if parser.peek_keyword(Keyword::DEFAULT) {
6033 parser_err!(
6035 "The DEFAULT keyword is not a type",
6036 parser.peek_token_ref().span.start
6037 )
6038 } else {
6039 parser.parse_data_type()
6040 }
6041 }
6042
6043 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6044 let token = self.token_at(data_type_idx);
6045
6046 if !matches!(token.token, Token::Word(_)) {
6048 return self.expected("a name or type", token.clone());
6049 }
6050
6051 name = Some(Ident::new(token.to_string()));
6052 data_type = next_data_type;
6053 }
6054
6055 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6056 {
6057 Some(self.parse_expr()?)
6058 } else {
6059 None
6060 };
6061 Ok(OperateFunctionArg {
6062 mode,
6063 name,
6064 data_type,
6065 default_expr,
6066 })
6067 }
6068
6069 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6070 let mode = if self.parse_keyword(Keyword::IN) {
6071 Some(ArgMode::In)
6072 } else {
6073 if self
6074 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6075 .is_some()
6076 {
6077 return self.expected_ref(
6078 "IN or argument type in aggregate signature",
6079 self.peek_token_ref(),
6080 );
6081 }
6082 None
6083 };
6084
6085 let mut name = None;
6088 let mut data_type = self.parse_data_type()?;
6089 let data_type_idx = self.get_current_index();
6090
6091 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6092 if parser.peek_keyword(Keyword::DEFAULT)
6093 || parser.peek_keyword(Keyword::ORDER)
6094 || parser.peek_token_ref().token == Token::Comma
6095 || parser.peek_token_ref().token == Token::RParen
6096 {
6097 parser_err!(
6099 "The current token cannot start an aggregate argument type",
6100 parser.peek_token_ref().span.start
6101 )
6102 } else {
6103 parser.parse_data_type()
6104 }
6105 }
6106
6107 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6108 let token = self.token_at(data_type_idx);
6109 if !matches!(token.token, Token::Word(_)) {
6110 return self.expected("a name or type", token.clone());
6111 }
6112
6113 name = Some(Ident::new(token.to_string()));
6114 data_type = next_data_type;
6115 }
6116
6117 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6118 return self.expected_ref(
6119 "',' or ')' or ORDER BY after aggregate argument type",
6120 self.peek_token_ref(),
6121 );
6122 }
6123
6124 Ok(OperateFunctionArg {
6125 mode,
6126 name,
6127 data_type,
6128 default_expr: None,
6129 })
6130 }
6131
6132 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6138 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6139 {
6140 self.prev_token();
6141 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6142 }
6143 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6144 let trigger_name = self.parse_object_name(false)?;
6145 let table_name = if self.parse_keyword(Keyword::ON) {
6146 Some(self.parse_object_name(false)?)
6147 } else {
6148 None
6149 };
6150 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6151 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6152 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6153 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6154 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6155 )),
6156 None => None,
6157 };
6158 Ok(DropTrigger {
6159 if_exists,
6160 trigger_name,
6161 table_name,
6162 option,
6163 })
6164 }
6165
6166 pub fn parse_create_trigger(
6168 &mut self,
6169 temporary: bool,
6170 or_alter: bool,
6171 or_replace: bool,
6172 is_constraint: bool,
6173 ) -> Result<CreateTrigger, ParserError> {
6174 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6175 {
6176 self.prev_token();
6177 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6178 }
6179
6180 let name = self.parse_object_name(false)?;
6181 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6182
6183 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6184 self.expect_keyword_is(Keyword::ON)?;
6185 let table_name = self.parse_object_name(false)?;
6186
6187 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6188 self.parse_object_name(true).ok()
6189 } else {
6190 None
6191 };
6192
6193 let characteristics = self.parse_constraint_characteristics()?;
6194
6195 let mut referencing = vec![];
6196 if self.parse_keyword(Keyword::REFERENCING) {
6197 while let Some(refer) = self.parse_trigger_referencing()? {
6198 referencing.push(refer);
6199 }
6200 }
6201
6202 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6203 let include_each = self.parse_keyword(Keyword::EACH);
6204 let trigger_object =
6205 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6206 Keyword::ROW => TriggerObject::Row,
6207 Keyword::STATEMENT => TriggerObject::Statement,
6208 unexpected_keyword => return Err(ParserError::ParserError(
6209 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6210 )),
6211 };
6212
6213 Some(if include_each {
6214 TriggerObjectKind::ForEach(trigger_object)
6215 } else {
6216 TriggerObjectKind::For(trigger_object)
6217 })
6218 } else {
6219 let _ = self.parse_keyword(Keyword::FOR);
6220
6221 None
6222 };
6223
6224 let condition = self
6225 .parse_keyword(Keyword::WHEN)
6226 .then(|| self.parse_expr())
6227 .transpose()?;
6228
6229 let mut exec_body = None;
6230 let mut statements = None;
6231 if self.parse_keyword(Keyword::EXECUTE) {
6232 exec_body = Some(self.parse_trigger_exec_body()?);
6233 } else {
6234 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6235 }
6236
6237 Ok(CreateTrigger {
6238 or_alter,
6239 temporary,
6240 or_replace,
6241 is_constraint,
6242 name,
6243 period,
6244 period_before_table: true,
6245 events,
6246 table_name,
6247 referenced_table_name,
6248 referencing,
6249 trigger_object,
6250 condition,
6251 exec_body,
6252 statements_as: false,
6253 statements,
6254 characteristics,
6255 })
6256 }
6257
6258 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6260 Ok(
6261 match self.expect_one_of_keywords(&[
6262 Keyword::FOR,
6263 Keyword::BEFORE,
6264 Keyword::AFTER,
6265 Keyword::INSTEAD,
6266 ])? {
6267 Keyword::FOR => TriggerPeriod::For,
6268 Keyword::BEFORE => TriggerPeriod::Before,
6269 Keyword::AFTER => TriggerPeriod::After,
6270 Keyword::INSTEAD => self
6271 .expect_keyword_is(Keyword::OF)
6272 .map(|_| TriggerPeriod::InsteadOf)?,
6273 unexpected_keyword => return Err(ParserError::ParserError(
6274 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6275 )),
6276 },
6277 )
6278 }
6279
6280 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6282 Ok(
6283 match self.expect_one_of_keywords(&[
6284 Keyword::INSERT,
6285 Keyword::UPDATE,
6286 Keyword::DELETE,
6287 Keyword::TRUNCATE,
6288 ])? {
6289 Keyword::INSERT => TriggerEvent::Insert,
6290 Keyword::UPDATE => {
6291 if self.parse_keyword(Keyword::OF) {
6292 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6293 TriggerEvent::Update(cols)
6294 } else {
6295 TriggerEvent::Update(vec![])
6296 }
6297 }
6298 Keyword::DELETE => TriggerEvent::Delete,
6299 Keyword::TRUNCATE => TriggerEvent::Truncate,
6300 unexpected_keyword => return Err(ParserError::ParserError(
6301 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6302 )),
6303 },
6304 )
6305 }
6306
6307 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6309 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6310 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6311 TriggerReferencingType::OldTable
6312 }
6313 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6314 TriggerReferencingType::NewTable
6315 }
6316 _ => {
6317 return Ok(None);
6318 }
6319 };
6320
6321 let is_as = self.parse_keyword(Keyword::AS);
6322 let transition_relation_name = self.parse_object_name(false)?;
6323 Ok(Some(TriggerReferencing {
6324 refer_type,
6325 is_as,
6326 transition_relation_name,
6327 }))
6328 }
6329
6330 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6332 Ok(TriggerExecBody {
6333 exec_type: match self
6334 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6335 {
6336 Keyword::FUNCTION => TriggerExecBodyType::Function,
6337 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6338 unexpected_keyword => return Err(ParserError::ParserError(
6339 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
6340 )),
6341 },
6342 func_desc: self.parse_function_desc()?,
6343 })
6344 }
6345
6346 pub fn parse_create_macro(
6348 &mut self,
6349 or_replace: bool,
6350 temporary: bool,
6351 ) -> Result<Statement, ParserError> {
6352 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6353 let name = self.parse_object_name(false)?;
6354 self.expect_token(&Token::LParen)?;
6355 let args = if self.consume_token(&Token::RParen) {
6356 self.prev_token();
6357 None
6358 } else {
6359 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6360 };
6361
6362 self.expect_token(&Token::RParen)?;
6363 self.expect_keyword_is(Keyword::AS)?;
6364
6365 Ok(Statement::CreateMacro {
6366 or_replace,
6367 temporary,
6368 name,
6369 args,
6370 definition: if self.parse_keyword(Keyword::TABLE) {
6371 MacroDefinition::Table(self.parse_query()?)
6372 } else {
6373 MacroDefinition::Expr(self.parse_expr()?)
6374 },
6375 })
6376 } else {
6377 self.prev_token();
6378 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6379 }
6380 }
6381
6382 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6383 let name = self.parse_identifier()?;
6384
6385 let default_expr =
6386 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6387 Some(self.parse_expr()?)
6388 } else {
6389 None
6390 };
6391 Ok(MacroArg { name, default_expr })
6392 }
6393
6394 pub fn parse_create_external_table(
6396 &mut self,
6397 or_replace: bool,
6398 ) -> Result<CreateTable, ParserError> {
6399 self.expect_keyword_is(Keyword::TABLE)?;
6400 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6401 let table_name = self.parse_object_name(false)?;
6402 let (columns, constraints) = self.parse_columns()?;
6403
6404 let hive_distribution = self.parse_hive_distribution()?;
6405 let hive_formats = self.parse_hive_formats()?;
6406
6407 let file_format = if let Some(ref hf) = hive_formats {
6408 if let Some(ref ff) = hf.storage {
6409 match ff {
6410 HiveIOFormat::FileFormat { format } => Some(*format),
6411 _ => None,
6412 }
6413 } else {
6414 None
6415 }
6416 } else {
6417 None
6418 };
6419 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6420 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6421 let table_options = if !table_properties.is_empty() {
6422 CreateTableOptions::TableProperties(table_properties)
6423 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6424 CreateTableOptions::Options(options)
6425 } else {
6426 CreateTableOptions::None
6427 };
6428 Ok(CreateTableBuilder::new(table_name)
6429 .columns(columns)
6430 .constraints(constraints)
6431 .hive_distribution(hive_distribution)
6432 .hive_formats(hive_formats)
6433 .table_options(table_options)
6434 .or_replace(or_replace)
6435 .if_not_exists(if_not_exists)
6436 .external(true)
6437 .file_format(file_format)
6438 .location(location)
6439 .build())
6440 }
6441
6442 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6446 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6447 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6448 let table_name = self.parse_object_name(true)?;
6449
6450 self.expect_keyword_is(Keyword::CLONE)?;
6451 let clone = Some(self.parse_object_name(true)?);
6452
6453 let version =
6454 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6455 {
6456 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6457 } else {
6458 None
6459 };
6460
6461 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6462 CreateTableOptions::Options(options)
6463 } else {
6464 CreateTableOptions::None
6465 };
6466
6467 Ok(CreateTableBuilder::new(table_name)
6468 .snapshot(true)
6469 .if_not_exists(if_not_exists)
6470 .clone_clause(clone)
6471 .version(version)
6472 .table_options(table_options)
6473 .build())
6474 }
6475
6476 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6478 let next_token = self.next_token();
6479 match &next_token.token {
6480 Token::Word(w) => match w.keyword {
6481 Keyword::AVRO => Ok(FileFormat::AVRO),
6482 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6483 Keyword::ORC => Ok(FileFormat::ORC),
6484 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6485 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6486 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6487 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6488 _ => self.expected("fileformat", next_token),
6489 },
6490 _ => self.expected("fileformat", next_token),
6491 }
6492 }
6493
6494 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6495 if self.consume_token(&Token::Eq) {
6496 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6497 } else {
6498 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6499 }
6500 }
6501
6502 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6504 let next_token = self.next_token();
6505 match &next_token.token {
6506 Token::Word(w) => match w.keyword {
6507 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6508 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6509 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6510 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6511 _ => self.expected("fileformat", next_token),
6512 },
6513 _ => self.expected("fileformat", next_token),
6514 }
6515 }
6516
6517 pub fn parse_create_view(
6519 &mut self,
6520 or_alter: bool,
6521 or_replace: bool,
6522 temporary: bool,
6523 create_view_params: Option<CreateViewParams>,
6524 ) -> Result<CreateView, ParserError> {
6525 let secure = self.parse_keyword(Keyword::SECURE);
6526 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6527 self.expect_keyword_is(Keyword::VIEW)?;
6528 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6529 let if_not_exists_first =
6532 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6533 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6534 let name_before_not_exists = !if_not_exists_first
6535 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6536 let if_not_exists = if_not_exists_first || name_before_not_exists;
6537 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6538 let columns = self.parse_view_columns()?;
6541 let mut options = CreateTableOptions::None;
6542 let with_options = self.parse_options(Keyword::WITH)?;
6543 if !with_options.is_empty() {
6544 options = CreateTableOptions::With(with_options);
6545 }
6546
6547 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6548 self.expect_keyword_is(Keyword::BY)?;
6549 self.parse_parenthesized_column_list(Optional, false)?
6550 } else {
6551 vec![]
6552 };
6553
6554 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6555 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6556 if !opts.is_empty() {
6557 options = CreateTableOptions::Options(opts);
6558 }
6559 };
6560 }
6561
6562 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6563 && self.parse_keyword(Keyword::TO)
6564 {
6565 Some(self.parse_object_name(false)?)
6566 } else {
6567 None
6568 };
6569
6570 let comment = if self.dialect.supports_create_view_comment_syntax()
6571 && self.parse_keyword(Keyword::COMMENT)
6572 {
6573 self.expect_token(&Token::Eq)?;
6574 Some(self.parse_comment_value()?)
6575 } else {
6576 None
6577 };
6578
6579 self.expect_keyword_is(Keyword::AS)?;
6580 let query = self.parse_query()?;
6581 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6584 && self.parse_keywords(&[
6585 Keyword::WITH,
6586 Keyword::NO,
6587 Keyword::SCHEMA,
6588 Keyword::BINDING,
6589 ]);
6590
6591 Ok(CreateView {
6592 or_alter,
6593 name,
6594 columns,
6595 query,
6596 materialized,
6597 secure,
6598 or_replace,
6599 options,
6600 cluster_by,
6601 comment,
6602 with_no_schema_binding,
6603 if_not_exists,
6604 temporary,
6605 copy_grants,
6606 to,
6607 params: create_view_params,
6608 name_before_not_exists,
6609 })
6610 }
6611
6612 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6616 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6617 self.expect_token(&Token::Eq)?;
6618 Some(
6619 match self.expect_one_of_keywords(&[
6620 Keyword::UNDEFINED,
6621 Keyword::MERGE,
6622 Keyword::TEMPTABLE,
6623 ])? {
6624 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6625 Keyword::MERGE => CreateViewAlgorithm::Merge,
6626 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6627 _ => {
6628 self.prev_token();
6629 let found = self.next_token();
6630 return self
6631 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6632 }
6633 },
6634 )
6635 } else {
6636 None
6637 };
6638 let definer = if self.parse_keyword(Keyword::DEFINER) {
6639 self.expect_token(&Token::Eq)?;
6640 Some(self.parse_grantee_name()?)
6641 } else {
6642 None
6643 };
6644 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6645 Some(
6646 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6647 Keyword::DEFINER => CreateViewSecurity::Definer,
6648 Keyword::INVOKER => CreateViewSecurity::Invoker,
6649 _ => {
6650 self.prev_token();
6651 let found = self.next_token();
6652 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6653 }
6654 },
6655 )
6656 } else {
6657 None
6658 };
6659 if algorithm.is_some() || definer.is_some() || security.is_some() {
6660 Ok(Some(CreateViewParams {
6661 algorithm,
6662 definer,
6663 security,
6664 }))
6665 } else {
6666 Ok(None)
6667 }
6668 }
6669
6670 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6672 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6673 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6674
6675 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6678 vec![Keyword::AUTHORIZATION]
6679 } else if dialect_of!(self is PostgreSqlDialect) {
6680 vec![
6681 Keyword::LOGIN,
6682 Keyword::NOLOGIN,
6683 Keyword::INHERIT,
6684 Keyword::NOINHERIT,
6685 Keyword::BYPASSRLS,
6686 Keyword::NOBYPASSRLS,
6687 Keyword::PASSWORD,
6688 Keyword::CREATEDB,
6689 Keyword::NOCREATEDB,
6690 Keyword::CREATEROLE,
6691 Keyword::NOCREATEROLE,
6692 Keyword::SUPERUSER,
6693 Keyword::NOSUPERUSER,
6694 Keyword::REPLICATION,
6695 Keyword::NOREPLICATION,
6696 Keyword::CONNECTION,
6697 Keyword::VALID,
6698 Keyword::IN,
6699 Keyword::ROLE,
6700 Keyword::ADMIN,
6701 Keyword::USER,
6702 ]
6703 } else {
6704 vec![]
6705 };
6706
6707 let mut authorization_owner = None;
6709 let mut login = None;
6711 let mut inherit = None;
6712 let mut bypassrls = None;
6713 let mut password = None;
6714 let mut create_db = None;
6715 let mut create_role = None;
6716 let mut superuser = None;
6717 let mut replication = None;
6718 let mut connection_limit = None;
6719 let mut valid_until = None;
6720 let mut in_role = vec![];
6721 let mut in_group = vec![];
6722 let mut role = vec![];
6723 let mut user = vec![];
6724 let mut admin = vec![];
6725
6726 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6727 let loc = self
6728 .tokens
6729 .get(self.index - 1)
6730 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6731 match keyword {
6732 Keyword::AUTHORIZATION => {
6733 if authorization_owner.is_some() {
6734 parser_err!("Found multiple AUTHORIZATION", loc)
6735 } else {
6736 authorization_owner = Some(self.parse_object_name(false)?);
6737 Ok(())
6738 }
6739 }
6740 Keyword::LOGIN | Keyword::NOLOGIN => {
6741 if login.is_some() {
6742 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6743 } else {
6744 login = Some(keyword == Keyword::LOGIN);
6745 Ok(())
6746 }
6747 }
6748 Keyword::INHERIT | Keyword::NOINHERIT => {
6749 if inherit.is_some() {
6750 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6751 } else {
6752 inherit = Some(keyword == Keyword::INHERIT);
6753 Ok(())
6754 }
6755 }
6756 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6757 if bypassrls.is_some() {
6758 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6759 } else {
6760 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6761 Ok(())
6762 }
6763 }
6764 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6765 if create_db.is_some() {
6766 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6767 } else {
6768 create_db = Some(keyword == Keyword::CREATEDB);
6769 Ok(())
6770 }
6771 }
6772 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6773 if create_role.is_some() {
6774 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6775 } else {
6776 create_role = Some(keyword == Keyword::CREATEROLE);
6777 Ok(())
6778 }
6779 }
6780 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6781 if superuser.is_some() {
6782 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6783 } else {
6784 superuser = Some(keyword == Keyword::SUPERUSER);
6785 Ok(())
6786 }
6787 }
6788 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6789 if replication.is_some() {
6790 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6791 } else {
6792 replication = Some(keyword == Keyword::REPLICATION);
6793 Ok(())
6794 }
6795 }
6796 Keyword::PASSWORD => {
6797 if password.is_some() {
6798 parser_err!("Found multiple PASSWORD", loc)
6799 } else {
6800 password = if self.parse_keyword(Keyword::NULL) {
6801 Some(Password::NullPassword)
6802 } else {
6803 Some(Password::Password(Expr::Value(self.parse_value()?)))
6804 };
6805 Ok(())
6806 }
6807 }
6808 Keyword::CONNECTION => {
6809 self.expect_keyword_is(Keyword::LIMIT)?;
6810 if connection_limit.is_some() {
6811 parser_err!("Found multiple CONNECTION LIMIT", loc)
6812 } else {
6813 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6814 Ok(())
6815 }
6816 }
6817 Keyword::VALID => {
6818 self.expect_keyword_is(Keyword::UNTIL)?;
6819 if valid_until.is_some() {
6820 parser_err!("Found multiple VALID UNTIL", loc)
6821 } else {
6822 valid_until = Some(Expr::Value(self.parse_value()?));
6823 Ok(())
6824 }
6825 }
6826 Keyword::IN => {
6827 if self.parse_keyword(Keyword::ROLE) {
6828 if !in_role.is_empty() {
6829 parser_err!("Found multiple IN ROLE", loc)
6830 } else {
6831 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6832 Ok(())
6833 }
6834 } else if self.parse_keyword(Keyword::GROUP) {
6835 if !in_group.is_empty() {
6836 parser_err!("Found multiple IN GROUP", loc)
6837 } else {
6838 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6839 Ok(())
6840 }
6841 } else {
6842 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6843 }
6844 }
6845 Keyword::ROLE => {
6846 if !role.is_empty() {
6847 parser_err!("Found multiple ROLE", loc)
6848 } else {
6849 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6850 Ok(())
6851 }
6852 }
6853 Keyword::USER => {
6854 if !user.is_empty() {
6855 parser_err!("Found multiple USER", loc)
6856 } else {
6857 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6858 Ok(())
6859 }
6860 }
6861 Keyword::ADMIN => {
6862 if !admin.is_empty() {
6863 parser_err!("Found multiple ADMIN", loc)
6864 } else {
6865 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6866 Ok(())
6867 }
6868 }
6869 _ => break,
6870 }?
6871 }
6872
6873 Ok(CreateRole {
6874 names,
6875 if_not_exists,
6876 login,
6877 inherit,
6878 bypassrls,
6879 password,
6880 create_db,
6881 create_role,
6882 replication,
6883 superuser,
6884 connection_limit,
6885 valid_until,
6886 in_role,
6887 in_group,
6888 role,
6889 user,
6890 admin,
6891 authorization_owner,
6892 })
6893 }
6894
6895 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6897 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6898 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6899 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6900 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6901 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6902 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6903 )),
6904 None => {
6905 match self.parse_identifier() {
6906 Ok(ident) => Owner::Ident(ident),
6907 Err(e) => {
6908 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6909 }
6910 }
6911 }
6912 };
6913 Ok(owner)
6914 }
6915
6916 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
6918 let name = self.parse_object_name(false)?;
6919 self.expect_keyword_is(Keyword::AS)?;
6920 let data_type = self.parse_data_type()?;
6921 let collation = if self.parse_keyword(Keyword::COLLATE) {
6922 Some(self.parse_identifier()?)
6923 } else {
6924 None
6925 };
6926 let default = if self.parse_keyword(Keyword::DEFAULT) {
6927 Some(self.parse_expr()?)
6928 } else {
6929 None
6930 };
6931 let mut constraints = Vec::new();
6932 while let Some(constraint) = self.parse_optional_table_constraint()? {
6933 constraints.push(constraint);
6934 }
6935
6936 Ok(CreateDomain {
6937 name,
6938 data_type,
6939 collation,
6940 default,
6941 constraints,
6942 })
6943 }
6944
6945 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
6955 let name = self.parse_identifier()?;
6956 self.expect_keyword_is(Keyword::ON)?;
6957 let table_name = self.parse_object_name(false)?;
6958
6959 let policy_type = if self.parse_keyword(Keyword::AS) {
6960 let keyword =
6961 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6962 Some(match keyword {
6963 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6964 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6965 unexpected_keyword => return Err(ParserError::ParserError(
6966 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6967 )),
6968 })
6969 } else {
6970 None
6971 };
6972
6973 let command = if self.parse_keyword(Keyword::FOR) {
6974 let keyword = self.expect_one_of_keywords(&[
6975 Keyword::ALL,
6976 Keyword::SELECT,
6977 Keyword::INSERT,
6978 Keyword::UPDATE,
6979 Keyword::DELETE,
6980 ])?;
6981 Some(match keyword {
6982 Keyword::ALL => CreatePolicyCommand::All,
6983 Keyword::SELECT => CreatePolicyCommand::Select,
6984 Keyword::INSERT => CreatePolicyCommand::Insert,
6985 Keyword::UPDATE => CreatePolicyCommand::Update,
6986 Keyword::DELETE => CreatePolicyCommand::Delete,
6987 unexpected_keyword => return Err(ParserError::ParserError(
6988 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6989 )),
6990 })
6991 } else {
6992 None
6993 };
6994
6995 let to = if self.parse_keyword(Keyword::TO) {
6996 Some(self.parse_comma_separated(|p| p.parse_owner())?)
6997 } else {
6998 None
6999 };
7000
7001 let using = if self.parse_keyword(Keyword::USING) {
7002 self.expect_token(&Token::LParen)?;
7003 let expr = self.parse_expr()?;
7004 self.expect_token(&Token::RParen)?;
7005 Some(expr)
7006 } else {
7007 None
7008 };
7009
7010 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7011 self.expect_token(&Token::LParen)?;
7012 let expr = self.parse_expr()?;
7013 self.expect_token(&Token::RParen)?;
7014 Some(expr)
7015 } else {
7016 None
7017 };
7018
7019 Ok(CreatePolicy {
7020 name,
7021 table_name,
7022 policy_type,
7023 command,
7024 to,
7025 using,
7026 with_check,
7027 })
7028 }
7029
7030 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7040 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7041 let name = self.parse_identifier()?;
7042
7043 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7044 Some(self.parse_literal_string()?)
7045 } else {
7046 None
7047 };
7048
7049 let url = if self.parse_keyword(Keyword::URL) {
7050 Some(self.parse_literal_string()?)
7051 } else {
7052 None
7053 };
7054
7055 let comment = self.parse_optional_inline_comment()?;
7056
7057 let with_dcproperties =
7058 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7059 properties if !properties.is_empty() => Some(properties),
7060 _ => None,
7061 };
7062
7063 Ok(CreateConnector {
7064 name,
7065 if_not_exists,
7066 connector_type,
7067 url,
7068 comment,
7069 with_dcproperties,
7070 })
7071 }
7072
7073 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7079 let mut parts = vec![];
7080 loop {
7081 parts.push(ObjectNamePart::Identifier(Ident::new(
7082 self.next_token().to_string(),
7083 )));
7084 if !self.consume_token(&Token::Period) {
7085 break;
7086 }
7087 }
7088 Ok(ObjectName(parts))
7089 }
7090
7091 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7095 let name = self.parse_operator_name()?;
7096 self.expect_token(&Token::LParen)?;
7097
7098 let mut function: Option<ObjectName> = None;
7099 let mut is_procedure = false;
7100 let mut left_arg: Option<DataType> = None;
7101 let mut right_arg: Option<DataType> = None;
7102 let mut options: Vec<OperatorOption> = Vec::new();
7103
7104 loop {
7105 let keyword = self.expect_one_of_keywords(&[
7106 Keyword::FUNCTION,
7107 Keyword::PROCEDURE,
7108 Keyword::LEFTARG,
7109 Keyword::RIGHTARG,
7110 Keyword::COMMUTATOR,
7111 Keyword::NEGATOR,
7112 Keyword::RESTRICT,
7113 Keyword::JOIN,
7114 Keyword::HASHES,
7115 Keyword::MERGES,
7116 ])?;
7117
7118 match keyword {
7119 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7120 options.push(OperatorOption::Hashes);
7121 }
7122 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7123 options.push(OperatorOption::Merges);
7124 }
7125 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7126 self.expect_token(&Token::Eq)?;
7127 function = Some(self.parse_object_name(false)?);
7128 is_procedure = keyword == Keyword::PROCEDURE;
7129 }
7130 Keyword::LEFTARG if left_arg.is_none() => {
7131 self.expect_token(&Token::Eq)?;
7132 left_arg = Some(self.parse_data_type()?);
7133 }
7134 Keyword::RIGHTARG if right_arg.is_none() => {
7135 self.expect_token(&Token::Eq)?;
7136 right_arg = Some(self.parse_data_type()?);
7137 }
7138 Keyword::COMMUTATOR
7139 if !options
7140 .iter()
7141 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7142 {
7143 self.expect_token(&Token::Eq)?;
7144 if self.parse_keyword(Keyword::OPERATOR) {
7145 self.expect_token(&Token::LParen)?;
7146 let op = self.parse_operator_name()?;
7147 self.expect_token(&Token::RParen)?;
7148 options.push(OperatorOption::Commutator(op));
7149 } else {
7150 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7151 }
7152 }
7153 Keyword::NEGATOR
7154 if !options
7155 .iter()
7156 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7157 {
7158 self.expect_token(&Token::Eq)?;
7159 if self.parse_keyword(Keyword::OPERATOR) {
7160 self.expect_token(&Token::LParen)?;
7161 let op = self.parse_operator_name()?;
7162 self.expect_token(&Token::RParen)?;
7163 options.push(OperatorOption::Negator(op));
7164 } else {
7165 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7166 }
7167 }
7168 Keyword::RESTRICT
7169 if !options
7170 .iter()
7171 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7172 {
7173 self.expect_token(&Token::Eq)?;
7174 options.push(OperatorOption::Restrict(Some(
7175 self.parse_object_name(false)?,
7176 )));
7177 }
7178 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7179 self.expect_token(&Token::Eq)?;
7180 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7181 }
7182 _ => {
7183 return Err(ParserError::ParserError(format!(
7184 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7185 keyword
7186 )))
7187 }
7188 }
7189
7190 if !self.consume_token(&Token::Comma) {
7191 break;
7192 }
7193 }
7194
7195 self.expect_token(&Token::RParen)?;
7197
7198 let function = function.ok_or_else(|| {
7200 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7201 })?;
7202
7203 Ok(CreateOperator {
7204 name,
7205 function,
7206 is_procedure,
7207 left_arg,
7208 right_arg,
7209 options,
7210 })
7211 }
7212
7213 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7217 let name = self.parse_object_name(false)?;
7218 self.expect_keyword(Keyword::USING)?;
7219 let using = self.parse_identifier()?;
7220
7221 Ok(CreateOperatorFamily { name, using })
7222 }
7223
7224 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7228 let name = self.parse_object_name(false)?;
7229 let default = self.parse_keyword(Keyword::DEFAULT);
7230 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7231 let for_type = self.parse_data_type()?;
7232 self.expect_keyword(Keyword::USING)?;
7233 let using = self.parse_identifier()?;
7234
7235 let family = if self.parse_keyword(Keyword::FAMILY) {
7236 Some(self.parse_object_name(false)?)
7237 } else {
7238 None
7239 };
7240
7241 self.expect_keyword(Keyword::AS)?;
7242
7243 let mut items = vec![];
7244 loop {
7245 if self.parse_keyword(Keyword::OPERATOR) {
7246 let strategy_number = self.parse_literal_uint()?;
7247 let operator_name = self.parse_operator_name()?;
7248
7249 let op_types = if self.consume_token(&Token::LParen) {
7251 let left = self.parse_data_type()?;
7252 self.expect_token(&Token::Comma)?;
7253 let right = self.parse_data_type()?;
7254 self.expect_token(&Token::RParen)?;
7255 Some(OperatorArgTypes { left, right })
7256 } else {
7257 None
7258 };
7259
7260 let purpose = if self.parse_keyword(Keyword::FOR) {
7262 if self.parse_keyword(Keyword::SEARCH) {
7263 Some(OperatorPurpose::ForSearch)
7264 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7265 let sort_family = self.parse_object_name(false)?;
7266 Some(OperatorPurpose::ForOrderBy { sort_family })
7267 } else {
7268 return self
7269 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7270 }
7271 } else {
7272 None
7273 };
7274
7275 items.push(OperatorClassItem::Operator {
7276 strategy_number,
7277 operator_name,
7278 op_types,
7279 purpose,
7280 });
7281 } else if self.parse_keyword(Keyword::FUNCTION) {
7282 let support_number = self.parse_literal_uint()?;
7283
7284 let op_types = if self.consume_token(&Token::LParen)
7286 && self.peek_token_ref().token != Token::RParen
7287 {
7288 let mut types = vec![];
7289 loop {
7290 types.push(self.parse_data_type()?);
7291 if !self.consume_token(&Token::Comma) {
7292 break;
7293 }
7294 }
7295 self.expect_token(&Token::RParen)?;
7296 Some(types)
7297 } else if self.consume_token(&Token::LParen) {
7298 self.expect_token(&Token::RParen)?;
7299 Some(vec![])
7300 } else {
7301 None
7302 };
7303
7304 let function_name = self.parse_object_name(false)?;
7305
7306 let argument_types = if self.consume_token(&Token::LParen) {
7308 let mut types = vec![];
7309 loop {
7310 if self.peek_token_ref().token == Token::RParen {
7311 break;
7312 }
7313 types.push(self.parse_data_type()?);
7314 if !self.consume_token(&Token::Comma) {
7315 break;
7316 }
7317 }
7318 self.expect_token(&Token::RParen)?;
7319 types
7320 } else {
7321 vec![]
7322 };
7323
7324 items.push(OperatorClassItem::Function {
7325 support_number,
7326 op_types,
7327 function_name,
7328 argument_types,
7329 });
7330 } else if self.parse_keyword(Keyword::STORAGE) {
7331 let storage_type = self.parse_data_type()?;
7332 items.push(OperatorClassItem::Storage { storage_type });
7333 } else {
7334 break;
7335 }
7336
7337 if !self.consume_token(&Token::Comma) {
7339 break;
7340 }
7341 }
7342
7343 Ok(CreateOperatorClass {
7344 name,
7345 default,
7346 for_type,
7347 using,
7348 family,
7349 items,
7350 })
7351 }
7352
7353 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7355 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7357 && self.parse_keyword(Keyword::TEMPORARY);
7358 let persistent = dialect_of!(self is DuckDbDialect)
7359 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7360
7361 let object_type = if self.parse_keyword(Keyword::TABLE) {
7362 ObjectType::Table
7363 } else if self.parse_keyword(Keyword::COLLATION) {
7364 ObjectType::Collation
7365 } else if self.parse_keyword(Keyword::VIEW) {
7366 ObjectType::View
7367 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7368 ObjectType::MaterializedView
7369 } else if self.parse_keyword(Keyword::INDEX) {
7370 ObjectType::Index
7371 } else if self.parse_keyword(Keyword::ROLE) {
7372 ObjectType::Role
7373 } else if self.parse_keyword(Keyword::SCHEMA) {
7374 ObjectType::Schema
7375 } else if self.parse_keyword(Keyword::DATABASE) {
7376 ObjectType::Database
7377 } else if self.parse_keyword(Keyword::SEQUENCE) {
7378 ObjectType::Sequence
7379 } else if self.parse_keyword(Keyword::STAGE) {
7380 ObjectType::Stage
7381 } else if self.parse_keyword(Keyword::TYPE) {
7382 ObjectType::Type
7383 } else if self.parse_keyword(Keyword::USER) {
7384 ObjectType::User
7385 } else if self.parse_keyword(Keyword::STREAM) {
7386 ObjectType::Stream
7387 } else if self.parse_keyword(Keyword::FUNCTION) {
7388 return self.parse_drop_function().map(Into::into);
7389 } else if self.parse_keyword(Keyword::POLICY) {
7390 return self.parse_drop_policy().map(Into::into);
7391 } else if self.parse_keyword(Keyword::CONNECTOR) {
7392 return self.parse_drop_connector();
7393 } else if self.parse_keyword(Keyword::DOMAIN) {
7394 return self.parse_drop_domain().map(Into::into);
7395 } else if self.parse_keyword(Keyword::PROCEDURE) {
7396 return self.parse_drop_procedure();
7397 } else if self.parse_keyword(Keyword::SECRET) {
7398 return self.parse_drop_secret(temporary, persistent);
7399 } else if self.parse_keyword(Keyword::TRIGGER) {
7400 return self.parse_drop_trigger().map(Into::into);
7401 } else if self.parse_keyword(Keyword::EXTENSION) {
7402 return self.parse_drop_extension();
7403 } else if self.parse_keyword(Keyword::OPERATOR) {
7404 return if self.parse_keyword(Keyword::FAMILY) {
7406 self.parse_drop_operator_family()
7407 } else if self.parse_keyword(Keyword::CLASS) {
7408 self.parse_drop_operator_class()
7409 } else {
7410 self.parse_drop_operator()
7411 };
7412 } else {
7413 return self.expected_ref(
7414 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7415 self.peek_token_ref(),
7416 );
7417 };
7418 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7421 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7422
7423 let loc = self.peek_token_ref().span.start;
7424 let cascade = self.parse_keyword(Keyword::CASCADE);
7425 let restrict = self.parse_keyword(Keyword::RESTRICT);
7426 let purge = self.parse_keyword(Keyword::PURGE);
7427 if cascade && restrict {
7428 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7429 }
7430 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7431 return parser_err!(
7432 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7433 loc
7434 );
7435 }
7436 let table = if self.parse_keyword(Keyword::ON) {
7437 Some(self.parse_object_name(false)?)
7438 } else {
7439 None
7440 };
7441 Ok(Statement::Drop {
7442 object_type,
7443 if_exists,
7444 names,
7445 cascade,
7446 restrict,
7447 purge,
7448 temporary,
7449 table,
7450 })
7451 }
7452
7453 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7454 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7455 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7456 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7457 _ => None,
7458 }
7459 }
7460
7461 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7466 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7467 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7468 let drop_behavior = self.parse_optional_drop_behavior();
7469 Ok(DropFunction {
7470 if_exists,
7471 func_desc,
7472 drop_behavior,
7473 })
7474 }
7475
7476 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7482 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7483 let name = self.parse_identifier()?;
7484 self.expect_keyword_is(Keyword::ON)?;
7485 let table_name = self.parse_object_name(false)?;
7486 let drop_behavior = self.parse_optional_drop_behavior();
7487 Ok(DropPolicy {
7488 if_exists,
7489 name,
7490 table_name,
7491 drop_behavior,
7492 })
7493 }
7494 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7500 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7501 let name = self.parse_identifier()?;
7502 Ok(Statement::DropConnector { if_exists, name })
7503 }
7504
7505 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7509 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7510 let name = self.parse_object_name(false)?;
7511 let drop_behavior = self.parse_optional_drop_behavior();
7512 Ok(DropDomain {
7513 if_exists,
7514 name,
7515 drop_behavior,
7516 })
7517 }
7518
7519 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7524 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7525 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7526 let drop_behavior = self.parse_optional_drop_behavior();
7527 Ok(Statement::DropProcedure {
7528 if_exists,
7529 proc_desc,
7530 drop_behavior,
7531 })
7532 }
7533
7534 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7535 let name = self.parse_object_name(false)?;
7536
7537 let args = if self.consume_token(&Token::LParen) {
7538 if self.consume_token(&Token::RParen) {
7539 Some(vec![])
7540 } else {
7541 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7542 self.expect_token(&Token::RParen)?;
7543 Some(args)
7544 }
7545 } else {
7546 None
7547 };
7548
7549 Ok(FunctionDesc { name, args })
7550 }
7551
7552 fn parse_drop_secret(
7554 &mut self,
7555 temporary: bool,
7556 persistent: bool,
7557 ) -> Result<Statement, ParserError> {
7558 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7559 let name = self.parse_identifier()?;
7560 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7561 self.parse_identifier().ok()
7562 } else {
7563 None
7564 };
7565 let temp = match (temporary, persistent) {
7566 (true, false) => Some(true),
7567 (false, true) => Some(false),
7568 (false, false) => None,
7569 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7570 };
7571
7572 Ok(Statement::DropSecret {
7573 if_exists,
7574 temporary: temp,
7575 name,
7576 storage_specifier,
7577 })
7578 }
7579
7580 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7590 if dialect_of!(self is BigQueryDialect) {
7591 return self.parse_big_query_declare();
7592 }
7593 if dialect_of!(self is SnowflakeDialect) {
7594 return self.parse_snowflake_declare();
7595 }
7596 if dialect_of!(self is MsSqlDialect) {
7597 return self.parse_mssql_declare();
7598 }
7599
7600 let name = self.parse_identifier()?;
7601
7602 let binary = Some(self.parse_keyword(Keyword::BINARY));
7603 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7604 Some(true)
7605 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7606 Some(false)
7607 } else {
7608 None
7609 };
7610 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7611 Some(true)
7612 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7613 Some(false)
7614 } else {
7615 None
7616 };
7617
7618 self.expect_keyword_is(Keyword::CURSOR)?;
7619 let declare_type = Some(DeclareType::Cursor);
7620
7621 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7622 Some(keyword) => {
7623 self.expect_keyword_is(Keyword::HOLD)?;
7624
7625 match keyword {
7626 Keyword::WITH => Some(true),
7627 Keyword::WITHOUT => Some(false),
7628 unexpected_keyword => return Err(ParserError::ParserError(
7629 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7630 )),
7631 }
7632 }
7633 None => None,
7634 };
7635
7636 self.expect_keyword_is(Keyword::FOR)?;
7637
7638 let query = Some(self.parse_query()?);
7639
7640 Ok(Statement::Declare {
7641 stmts: vec![Declare {
7642 names: vec![name],
7643 data_type: None,
7644 assignment: None,
7645 declare_type,
7646 binary,
7647 sensitive,
7648 scroll,
7649 hold,
7650 for_query: query,
7651 }],
7652 })
7653 }
7654
7655 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7663 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7664
7665 let data_type = match &self.peek_token_ref().token {
7666 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7667 _ => Some(self.parse_data_type()?),
7668 };
7669
7670 let expr = if data_type.is_some() {
7671 if self.parse_keyword(Keyword::DEFAULT) {
7672 Some(self.parse_expr()?)
7673 } else {
7674 None
7675 }
7676 } else {
7677 self.expect_keyword_is(Keyword::DEFAULT)?;
7680 Some(self.parse_expr()?)
7681 };
7682
7683 Ok(Statement::Declare {
7684 stmts: vec![Declare {
7685 names,
7686 data_type,
7687 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7688 declare_type: None,
7689 binary: None,
7690 sensitive: None,
7691 scroll: None,
7692 hold: None,
7693 for_query: None,
7694 }],
7695 })
7696 }
7697
7698 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7723 let mut stmts = vec![];
7724 loop {
7725 let name = self.parse_identifier()?;
7726 let (declare_type, for_query, assigned_expr, data_type) =
7727 if self.parse_keyword(Keyword::CURSOR) {
7728 self.expect_keyword_is(Keyword::FOR)?;
7729 match &self.peek_token_ref().token {
7730 Token::Word(w) if w.keyword == Keyword::SELECT => (
7731 Some(DeclareType::Cursor),
7732 Some(self.parse_query()?),
7733 None,
7734 None,
7735 ),
7736 _ => (
7737 Some(DeclareType::Cursor),
7738 None,
7739 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7740 None,
7741 ),
7742 }
7743 } else if self.parse_keyword(Keyword::RESULTSET) {
7744 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
7745 self.parse_snowflake_variable_declaration_expression()?
7746 } else {
7747 None
7749 };
7750
7751 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7752 } else if self.parse_keyword(Keyword::EXCEPTION) {
7753 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
7754 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7755 } else {
7756 None
7758 };
7759
7760 (Some(DeclareType::Exception), None, assigned_expr, None)
7761 } else {
7762 let (assigned_expr, data_type) = if let Some(assigned_expr) =
7764 self.parse_snowflake_variable_declaration_expression()?
7765 {
7766 (Some(assigned_expr), None)
7767 } else if let Token::Word(_) = &self.peek_token_ref().token {
7768 let data_type = self.parse_data_type()?;
7769 (
7770 self.parse_snowflake_variable_declaration_expression()?,
7771 Some(data_type),
7772 )
7773 } else {
7774 (None, None)
7775 };
7776 (None, None, assigned_expr, data_type)
7777 };
7778 let stmt = Declare {
7779 names: vec![name],
7780 data_type,
7781 assignment: assigned_expr,
7782 declare_type,
7783 binary: None,
7784 sensitive: None,
7785 scroll: None,
7786 hold: None,
7787 for_query,
7788 };
7789
7790 stmts.push(stmt);
7791 if self.consume_token(&Token::SemiColon) {
7792 match &self.peek_token_ref().token {
7793 Token::Word(w)
7794 if ALL_KEYWORDS
7795 .binary_search(&w.value.to_uppercase().as_str())
7796 .is_err() =>
7797 {
7798 continue;
7800 }
7801 _ => {
7802 self.prev_token();
7804 }
7805 }
7806 }
7807
7808 break;
7809 }
7810
7811 Ok(Statement::Declare { stmts })
7812 }
7813
7814 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7826 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7827
7828 Ok(Statement::Declare { stmts })
7829 }
7830
7831 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7842 let name = {
7843 let ident = self.parse_identifier()?;
7844 if !ident.value.starts_with('@')
7845 && !matches!(
7846 &self.peek_token_ref().token,
7847 Token::Word(w) if w.keyword == Keyword::CURSOR
7848 )
7849 {
7850 Err(ParserError::TokenizerError(
7851 "Invalid MsSql variable declaration.".to_string(),
7852 ))
7853 } else {
7854 Ok(ident)
7855 }
7856 }?;
7857
7858 let (declare_type, data_type) = match &self.peek_token_ref().token {
7859 Token::Word(w) => match w.keyword {
7860 Keyword::CURSOR => {
7861 self.next_token();
7862 (Some(DeclareType::Cursor), None)
7863 }
7864 Keyword::AS => {
7865 self.next_token();
7866 (None, Some(self.parse_data_type()?))
7867 }
7868 _ => (None, Some(self.parse_data_type()?)),
7869 },
7870 _ => (None, Some(self.parse_data_type()?)),
7871 };
7872
7873 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7874 self.next_token();
7875 let query = Some(self.parse_query()?);
7876 (query, None)
7877 } else {
7878 let assignment = self.parse_mssql_variable_declaration_expression()?;
7879 (None, assignment)
7880 };
7881
7882 Ok(Declare {
7883 names: vec![name],
7884 data_type,
7885 assignment,
7886 declare_type,
7887 binary: None,
7888 sensitive: None,
7889 scroll: None,
7890 hold: None,
7891 for_query,
7892 })
7893 }
7894
7895 pub fn parse_snowflake_variable_declaration_expression(
7903 &mut self,
7904 ) -> Result<Option<DeclareAssignment>, ParserError> {
7905 Ok(match &self.peek_token_ref().token {
7906 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7907 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7909 }
7910 Token::Assignment => {
7911 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
7913 self.parse_expr()?,
7914 )))
7915 }
7916 _ => None,
7917 })
7918 }
7919
7920 pub fn parse_mssql_variable_declaration_expression(
7927 &mut self,
7928 ) -> Result<Option<DeclareAssignment>, ParserError> {
7929 Ok(match &self.peek_token_ref().token {
7930 Token::Eq => {
7931 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
7933 self.parse_expr()?,
7934 )))
7935 }
7936 _ => None,
7937 })
7938 }
7939
7940 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7942 let direction = if self.parse_keyword(Keyword::NEXT) {
7943 FetchDirection::Next
7944 } else if self.parse_keyword(Keyword::PRIOR) {
7945 FetchDirection::Prior
7946 } else if self.parse_keyword(Keyword::FIRST) {
7947 FetchDirection::First
7948 } else if self.parse_keyword(Keyword::LAST) {
7949 FetchDirection::Last
7950 } else if self.parse_keyword(Keyword::ABSOLUTE) {
7951 FetchDirection::Absolute {
7952 limit: self.parse_number_value()?,
7953 }
7954 } else if self.parse_keyword(Keyword::RELATIVE) {
7955 FetchDirection::Relative {
7956 limit: self.parse_number_value()?,
7957 }
7958 } else if self.parse_keyword(Keyword::FORWARD) {
7959 if self.parse_keyword(Keyword::ALL) {
7960 FetchDirection::ForwardAll
7961 } else {
7962 FetchDirection::Forward {
7963 limit: Some(self.parse_number_value()?),
7965 }
7966 }
7967 } else if self.parse_keyword(Keyword::BACKWARD) {
7968 if self.parse_keyword(Keyword::ALL) {
7969 FetchDirection::BackwardAll
7970 } else {
7971 FetchDirection::Backward {
7972 limit: Some(self.parse_number_value()?),
7974 }
7975 }
7976 } else if self.parse_keyword(Keyword::ALL) {
7977 FetchDirection::All
7978 } else {
7979 FetchDirection::Count {
7980 limit: self.parse_number_value()?,
7981 }
7982 };
7983
7984 let position = if self.peek_keyword(Keyword::FROM) {
7985 self.expect_keyword(Keyword::FROM)?;
7986 FetchPosition::From
7987 } else if self.peek_keyword(Keyword::IN) {
7988 self.expect_keyword(Keyword::IN)?;
7989 FetchPosition::In
7990 } else {
7991 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
7992 };
7993
7994 let name = self.parse_identifier()?;
7995
7996 let into = if self.parse_keyword(Keyword::INTO) {
7997 Some(self.parse_object_name(false)?)
7998 } else {
7999 None
8000 };
8001
8002 Ok(Statement::Fetch {
8003 name,
8004 direction,
8005 position,
8006 into,
8007 })
8008 }
8009
8010 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8012 let object_type = if self.parse_keyword(Keyword::ALL) {
8013 DiscardObject::ALL
8014 } else if self.parse_keyword(Keyword::PLANS) {
8015 DiscardObject::PLANS
8016 } else if self.parse_keyword(Keyword::SEQUENCES) {
8017 DiscardObject::SEQUENCES
8018 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8019 DiscardObject::TEMP
8020 } else {
8021 return self.expected_ref(
8022 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8023 self.peek_token_ref(),
8024 );
8025 };
8026 Ok(Statement::Discard { object_type })
8027 }
8028
8029 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8031 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8032 let r#async = self.parse_keyword(Keyword::ASYNC);
8033 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8034
8035 let mut using = None;
8036
8037 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8038 let index_name = self.parse_object_name(false)?;
8039 using = self.parse_optional_using_then_index_type()?;
8041 self.expect_keyword_is(Keyword::ON)?;
8042 Some(index_name)
8043 } else {
8044 None
8045 };
8046
8047 let table_name = self.parse_object_name(false)?;
8048
8049 using = self.parse_optional_using_then_index_type()?.or(using);
8052
8053 let columns = self.parse_parenthesized_index_column_list()?;
8054
8055 let include = if self.parse_keyword(Keyword::INCLUDE) {
8056 self.expect_token(&Token::LParen)?;
8057 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8058 self.expect_token(&Token::RParen)?;
8059 columns
8060 } else {
8061 vec![]
8062 };
8063
8064 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8065 let not = self.parse_keyword(Keyword::NOT);
8066 self.expect_keyword_is(Keyword::DISTINCT)?;
8067 Some(!not)
8068 } else {
8069 None
8070 };
8071
8072 let with = if self.dialect.supports_create_index_with_clause()
8073 && self.parse_keyword(Keyword::WITH)
8074 {
8075 self.expect_token(&Token::LParen)?;
8076 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8077 self.expect_token(&Token::RParen)?;
8078 with_params
8079 } else {
8080 Vec::new()
8081 };
8082
8083 let predicate = if self.parse_keyword(Keyword::WHERE) {
8084 Some(self.parse_expr()?)
8085 } else {
8086 None
8087 };
8088
8089 let index_options = self.parse_index_options()?;
8095
8096 let mut alter_options = Vec::new();
8098 while self
8099 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8100 .is_some()
8101 {
8102 alter_options.push(self.parse_alter_table_operation()?)
8103 }
8104
8105 Ok(CreateIndex {
8106 name: index_name,
8107 table_name,
8108 using,
8109 columns,
8110 unique,
8111 concurrently,
8112 r#async,
8113 if_not_exists,
8114 include,
8115 nulls_distinct,
8116 with,
8117 predicate,
8118 index_options,
8119 alter_options,
8120 })
8121 }
8122
8123 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8125 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8126 let name = self.parse_identifier()?;
8127
8128 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8129 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8130 Some(self.parse_identifier()?)
8131 } else {
8132 None
8133 };
8134
8135 let version = if self.parse_keyword(Keyword::VERSION) {
8136 Some(self.parse_identifier()?)
8137 } else {
8138 None
8139 };
8140
8141 let cascade = self.parse_keyword(Keyword::CASCADE);
8142
8143 (schema, version, cascade)
8144 } else {
8145 (None, None, false)
8146 };
8147
8148 Ok(CreateExtension {
8149 name,
8150 if_not_exists,
8151 schema,
8152 version,
8153 cascade,
8154 })
8155 }
8156
8157 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8159 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8160 let name = self.parse_object_name(false)?;
8161
8162 let definition = if self.parse_keyword(Keyword::FROM) {
8163 CreateCollationDefinition::From(self.parse_object_name(false)?)
8164 } else if self.consume_token(&Token::LParen) {
8165 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8166 self.expect_token(&Token::RParen)?;
8167 CreateCollationDefinition::Options(options)
8168 } else {
8169 return self.expected_ref(
8170 "FROM or parenthesized option list after CREATE COLLATION name",
8171 self.peek_token_ref(),
8172 );
8173 };
8174
8175 Ok(CreateCollation {
8176 if_not_exists,
8177 name,
8178 definition,
8179 })
8180 }
8181
8182 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8184 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8185 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8186 let cascade_or_restrict =
8187 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8188 Ok(Statement::DropExtension(DropExtension {
8189 names,
8190 if_exists,
8191 cascade_or_restrict: cascade_or_restrict
8192 .map(|k| match k {
8193 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8194 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8195 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8196 })
8197 .transpose()?,
8198 }))
8199 }
8200
8201 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8204 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8205 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8206 let drop_behavior = self.parse_optional_drop_behavior();
8207 Ok(Statement::DropOperator(DropOperator {
8208 if_exists,
8209 operators,
8210 drop_behavior,
8211 }))
8212 }
8213
8214 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8217 let name = self.parse_operator_name()?;
8218 self.expect_token(&Token::LParen)?;
8219
8220 let left_type = if self.parse_keyword(Keyword::NONE) {
8222 None
8223 } else {
8224 Some(self.parse_data_type()?)
8225 };
8226
8227 self.expect_token(&Token::Comma)?;
8228
8229 let right_type = self.parse_data_type()?;
8231
8232 self.expect_token(&Token::RParen)?;
8233
8234 Ok(DropOperatorSignature {
8235 name,
8236 left_type,
8237 right_type,
8238 })
8239 }
8240
8241 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8245 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8246 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8247 self.expect_keyword(Keyword::USING)?;
8248 let using = self.parse_identifier()?;
8249 let drop_behavior = self.parse_optional_drop_behavior();
8250 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8251 if_exists,
8252 names,
8253 using,
8254 drop_behavior,
8255 }))
8256 }
8257
8258 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8262 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8263 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8264 self.expect_keyword(Keyword::USING)?;
8265 let using = self.parse_identifier()?;
8266 let drop_behavior = self.parse_optional_drop_behavior();
8267 Ok(Statement::DropOperatorClass(DropOperatorClass {
8268 if_exists,
8269 names,
8270 using,
8271 drop_behavior,
8272 }))
8273 }
8274
8275 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8279 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8280 self.expect_token(&Token::LParen)?;
8281 let columns =
8282 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8283 self.expect_token(&Token::RParen)?;
8284 Ok(HiveDistributionStyle::PARTITIONED { columns })
8285 } else {
8286 Ok(HiveDistributionStyle::NONE)
8287 }
8288 }
8289
8290 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8294 let token = self.next_token();
8295 match &token.token {
8296 Token::Word(w) => match w.keyword {
8297 Keyword::AUTO => Ok(DistStyle::Auto),
8298 Keyword::EVEN => Ok(DistStyle::Even),
8299 Keyword::KEY => Ok(DistStyle::Key),
8300 Keyword::ALL => Ok(DistStyle::All),
8301 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8302 },
8303 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8304 }
8305 }
8306
8307 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8309 let mut hive_format: Option<HiveFormat> = None;
8310 loop {
8311 match self.parse_one_of_keywords(&[
8312 Keyword::ROW,
8313 Keyword::STORED,
8314 Keyword::LOCATION,
8315 Keyword::WITH,
8316 ]) {
8317 Some(Keyword::ROW) => {
8318 hive_format
8319 .get_or_insert_with(HiveFormat::default)
8320 .row_format = Some(self.parse_row_format()?);
8321 }
8322 Some(Keyword::STORED) => {
8323 self.expect_keyword_is(Keyword::AS)?;
8324 if self.parse_keyword(Keyword::INPUTFORMAT) {
8325 let input_format = self.parse_expr()?;
8326 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8327 let output_format = self.parse_expr()?;
8328 hive_format.get_or_insert_with(HiveFormat::default).storage =
8329 Some(HiveIOFormat::IOF {
8330 input_format,
8331 output_format,
8332 });
8333 } else {
8334 let format = self.parse_file_format()?;
8335 hive_format.get_or_insert_with(HiveFormat::default).storage =
8336 Some(HiveIOFormat::FileFormat { format });
8337 }
8338 }
8339 Some(Keyword::LOCATION) => {
8340 hive_format.get_or_insert_with(HiveFormat::default).location =
8341 Some(self.parse_literal_string()?);
8342 }
8343 Some(Keyword::WITH) => {
8344 self.prev_token();
8345 let properties = self
8346 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8347 if !properties.is_empty() {
8348 hive_format
8349 .get_or_insert_with(HiveFormat::default)
8350 .serde_properties = Some(properties);
8351 } else {
8352 break;
8353 }
8354 }
8355 None => break,
8356 _ => break,
8357 }
8358 }
8359
8360 Ok(hive_format)
8361 }
8362
8363 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8365 self.expect_keyword_is(Keyword::FORMAT)?;
8366 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8367 Some(Keyword::SERDE) => {
8368 let class = self.parse_literal_string()?;
8369 Ok(HiveRowFormat::SERDE { class })
8370 }
8371 _ => {
8372 let mut row_delimiters = vec![];
8373
8374 loop {
8375 match self.parse_one_of_keywords(&[
8376 Keyword::FIELDS,
8377 Keyword::COLLECTION,
8378 Keyword::MAP,
8379 Keyword::LINES,
8380 Keyword::NULL,
8381 ]) {
8382 Some(Keyword::FIELDS) => {
8383 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8384 row_delimiters.push(HiveRowDelimiter {
8385 delimiter: HiveDelimiter::FieldsTerminatedBy,
8386 char: self.parse_identifier()?,
8387 });
8388
8389 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8390 row_delimiters.push(HiveRowDelimiter {
8391 delimiter: HiveDelimiter::FieldsEscapedBy,
8392 char: self.parse_identifier()?,
8393 });
8394 }
8395 } else {
8396 break;
8397 }
8398 }
8399 Some(Keyword::COLLECTION) => {
8400 if self.parse_keywords(&[
8401 Keyword::ITEMS,
8402 Keyword::TERMINATED,
8403 Keyword::BY,
8404 ]) {
8405 row_delimiters.push(HiveRowDelimiter {
8406 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8407 char: self.parse_identifier()?,
8408 });
8409 } else {
8410 break;
8411 }
8412 }
8413 Some(Keyword::MAP) => {
8414 if self.parse_keywords(&[
8415 Keyword::KEYS,
8416 Keyword::TERMINATED,
8417 Keyword::BY,
8418 ]) {
8419 row_delimiters.push(HiveRowDelimiter {
8420 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8421 char: self.parse_identifier()?,
8422 });
8423 } else {
8424 break;
8425 }
8426 }
8427 Some(Keyword::LINES) => {
8428 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8429 row_delimiters.push(HiveRowDelimiter {
8430 delimiter: HiveDelimiter::LinesTerminatedBy,
8431 char: self.parse_identifier()?,
8432 });
8433 } else {
8434 break;
8435 }
8436 }
8437 Some(Keyword::NULL) => {
8438 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8439 row_delimiters.push(HiveRowDelimiter {
8440 delimiter: HiveDelimiter::NullDefinedAs,
8441 char: self.parse_identifier()?,
8442 });
8443 } else {
8444 break;
8445 }
8446 }
8447 _ => {
8448 break;
8449 }
8450 }
8451 }
8452
8453 Ok(HiveRowFormat::DELIMITED {
8454 delimiters: row_delimiters,
8455 })
8456 }
8457 }
8458 }
8459
8460 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8461 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8462 Ok(Some(self.parse_identifier()?))
8463 } else {
8464 Ok(None)
8465 }
8466 }
8467
8468 pub fn parse_create_table(
8470 &mut self,
8471 or_replace: bool,
8472 temporary: bool,
8473 global: Option<bool>,
8474 transient: bool,
8475 ) -> Result<CreateTable, ParserError> {
8476 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8477 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8478 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8479
8480 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8491 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8492 } else {
8493 None
8494 };
8495
8496 let on_cluster = self.parse_optional_on_cluster()?;
8498
8499 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8500
8501 let clone = if self.parse_keyword(Keyword::CLONE) {
8502 self.parse_object_name(allow_unquoted_hyphen).ok()
8503 } else {
8504 None
8505 };
8506
8507 let (columns, constraints) = self.parse_columns()?;
8509 let comment_after_column_def =
8510 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8511 let next_token = self.next_token();
8512 match next_token.token {
8513 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8514 _ => self.expected("comment", next_token)?,
8515 }
8516 } else {
8517 None
8518 };
8519
8520 let for_values = if partition_of.is_some() {
8522 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8523 Some(self.parse_partition_for_values()?)
8524 } else {
8525 return self.expected_ref(
8526 "FOR VALUES or DEFAULT after PARTITION OF",
8527 self.peek_token_ref(),
8528 );
8529 }
8530 } else {
8531 None
8532 };
8533
8534 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8536
8537 let hive_distribution = self.parse_hive_distribution()?;
8538 let clustered_by = self.parse_optional_clustered_by()?;
8539 let hive_formats = self.parse_hive_formats()?;
8540
8541 let create_table_config = self.parse_optional_create_table_config()?;
8542
8543 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8546 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8547 {
8548 Some(Box::new(self.parse_expr()?))
8549 } else {
8550 None
8551 };
8552
8553 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8554 if self.consume_token(&Token::LParen) {
8555 let columns = if self.peek_token_ref().token != Token::RParen {
8556 self.parse_comma_separated(|p| p.parse_expr())?
8557 } else {
8558 vec![]
8559 };
8560 self.expect_token(&Token::RParen)?;
8561 Some(OneOrManyWithParens::Many(columns))
8562 } else {
8563 Some(OneOrManyWithParens::One(self.parse_expr()?))
8564 }
8565 } else {
8566 None
8567 };
8568
8569 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8570 Some(self.parse_create_table_on_commit()?)
8571 } else {
8572 None
8573 };
8574
8575 let strict = self.parse_keyword(Keyword::STRICT);
8576
8577 let backup = if self.parse_keyword(Keyword::BACKUP) {
8579 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8580 Some(keyword == Keyword::YES)
8581 } else {
8582 None
8583 };
8584
8585 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8587 Some(self.parse_dist_style()?)
8588 } else {
8589 None
8590 };
8591 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8592 self.expect_token(&Token::LParen)?;
8593 let expr = self.parse_expr()?;
8594 self.expect_token(&Token::RParen)?;
8595 Some(expr)
8596 } else {
8597 None
8598 };
8599 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8600 self.expect_token(&Token::LParen)?;
8601 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8602 self.expect_token(&Token::RParen)?;
8603 Some(columns)
8604 } else {
8605 None
8606 };
8607
8608 let query = if self.parse_keyword(Keyword::AS) {
8610 Some(self.parse_query()?)
8611 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8612 {
8613 self.prev_token();
8615 Some(self.parse_query()?)
8616 } else {
8617 None
8618 };
8619
8620 Ok(CreateTableBuilder::new(table_name)
8621 .temporary(temporary)
8622 .columns(columns)
8623 .constraints(constraints)
8624 .or_replace(or_replace)
8625 .if_not_exists(if_not_exists)
8626 .transient(transient)
8627 .hive_distribution(hive_distribution)
8628 .hive_formats(hive_formats)
8629 .global(global)
8630 .query(query)
8631 .without_rowid(without_rowid)
8632 .like(like)
8633 .clone_clause(clone)
8634 .comment_after_column_def(comment_after_column_def)
8635 .order_by(order_by)
8636 .on_commit(on_commit)
8637 .on_cluster(on_cluster)
8638 .clustered_by(clustered_by)
8639 .partition_by(create_table_config.partition_by)
8640 .cluster_by(create_table_config.cluster_by)
8641 .inherits(create_table_config.inherits)
8642 .partition_of(partition_of)
8643 .for_values(for_values)
8644 .table_options(create_table_config.table_options)
8645 .primary_key(primary_key)
8646 .strict(strict)
8647 .backup(backup)
8648 .diststyle(diststyle)
8649 .distkey(distkey)
8650 .sortkey(sortkey)
8651 .build())
8652 }
8653
8654 fn maybe_parse_create_table_like(
8655 &mut self,
8656 allow_unquoted_hyphen: bool,
8657 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8658 let like = if self.dialect.supports_create_table_like_parenthesized()
8659 && self.consume_token(&Token::LParen)
8660 {
8661 if self.parse_keyword(Keyword::LIKE) {
8662 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8663 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8664 Some(CreateTableLikeDefaults::Including)
8665 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8666 Some(CreateTableLikeDefaults::Excluding)
8667 } else {
8668 None
8669 };
8670 self.expect_token(&Token::RParen)?;
8671 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8672 name,
8673 defaults,
8674 }))
8675 } else {
8676 self.prev_token();
8678 None
8679 }
8680 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8681 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8682 Some(CreateTableLikeKind::Plain(CreateTableLike {
8683 name,
8684 defaults: None,
8685 }))
8686 } else {
8687 None
8688 };
8689 Ok(like)
8690 }
8691
8692 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8693 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8694 Ok(OnCommit::DeleteRows)
8695 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8696 Ok(OnCommit::PreserveRows)
8697 } else if self.parse_keywords(&[Keyword::DROP]) {
8698 Ok(OnCommit::Drop)
8699 } else {
8700 parser_err!(
8701 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8702 self.peek_token_ref()
8703 )
8704 }
8705 }
8706
8707 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8713 if self.parse_keyword(Keyword::DEFAULT) {
8714 return Ok(ForValues::Default);
8715 }
8716
8717 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8718
8719 if self.parse_keyword(Keyword::IN) {
8720 self.expect_token(&Token::LParen)?;
8722 if self.peek_token_ref().token == Token::RParen {
8723 return self.expected_ref("at least one value", self.peek_token_ref());
8724 }
8725 let values = self.parse_comma_separated(Parser::parse_expr)?;
8726 self.expect_token(&Token::RParen)?;
8727 Ok(ForValues::In(values))
8728 } else if self.parse_keyword(Keyword::FROM) {
8729 self.expect_token(&Token::LParen)?;
8731 if self.peek_token_ref().token == Token::RParen {
8732 return self.expected_ref("at least one value", self.peek_token_ref());
8733 }
8734 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8735 self.expect_token(&Token::RParen)?;
8736 self.expect_keyword(Keyword::TO)?;
8737 self.expect_token(&Token::LParen)?;
8738 if self.peek_token_ref().token == Token::RParen {
8739 return self.expected_ref("at least one value", self.peek_token_ref());
8740 }
8741 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8742 self.expect_token(&Token::RParen)?;
8743 Ok(ForValues::From { from, to })
8744 } else if self.parse_keyword(Keyword::WITH) {
8745 self.expect_token(&Token::LParen)?;
8747 self.expect_keyword(Keyword::MODULUS)?;
8748 let modulus = self.parse_literal_uint()?;
8749 self.expect_token(&Token::Comma)?;
8750 self.expect_keyword(Keyword::REMAINDER)?;
8751 let remainder = self.parse_literal_uint()?;
8752 self.expect_token(&Token::RParen)?;
8753 Ok(ForValues::With { modulus, remainder })
8754 } else {
8755 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
8756 }
8757 }
8758
8759 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8761 if self.parse_keyword(Keyword::MINVALUE) {
8762 Ok(PartitionBoundValue::MinValue)
8763 } else if self.parse_keyword(Keyword::MAXVALUE) {
8764 Ok(PartitionBoundValue::MaxValue)
8765 } else {
8766 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8767 }
8768 }
8769
8770 fn parse_optional_create_table_config(
8776 &mut self,
8777 ) -> Result<CreateTableConfiguration, ParserError> {
8778 let mut table_options = CreateTableOptions::None;
8779
8780 let inherits = if self.parse_keyword(Keyword::INHERITS) {
8781 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8782 } else {
8783 None
8784 };
8785
8786 let with_options = self.parse_options(Keyword::WITH)?;
8788 if !with_options.is_empty() {
8789 table_options = CreateTableOptions::With(with_options)
8790 }
8791
8792 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8793 if !table_properties.is_empty() {
8794 table_options = CreateTableOptions::TableProperties(table_properties);
8795 }
8796 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8797 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8798 {
8799 Some(Box::new(self.parse_expr()?))
8800 } else {
8801 None
8802 };
8803
8804 let mut cluster_by = None;
8805 if dialect_of!(self is BigQueryDialect | GenericDialect) {
8806 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8807 cluster_by = Some(WrappedCollection::NoWrapping(
8808 self.parse_comma_separated(|p| p.parse_expr())?,
8809 ));
8810 };
8811
8812 if let Token::Word(word) = &self.peek_token_ref().token {
8813 if word.keyword == Keyword::OPTIONS {
8814 table_options =
8815 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8816 }
8817 };
8818 }
8819
8820 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8821 let plain_options = self.parse_plain_options()?;
8822 if !plain_options.is_empty() {
8823 table_options = CreateTableOptions::Plain(plain_options)
8824 }
8825 };
8826
8827 Ok(CreateTableConfiguration {
8828 partition_by,
8829 cluster_by,
8830 inherits,
8831 table_options,
8832 })
8833 }
8834
8835 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8836 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8839 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8840 }
8841
8842 if self.parse_keywords(&[Keyword::COMMENT]) {
8845 let has_eq = self.consume_token(&Token::Eq);
8846 let value = self.next_token();
8847
8848 let comment = match (has_eq, value.token) {
8849 (true, Token::SingleQuotedString(s)) => {
8850 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8851 }
8852 (false, Token::SingleQuotedString(s)) => {
8853 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8854 }
8855 (_, token) => {
8856 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8857 }
8858 };
8859 return comment;
8860 }
8861
8862 if self.parse_keywords(&[Keyword::ENGINE]) {
8865 let _ = self.consume_token(&Token::Eq);
8866 let value = self.next_token();
8867
8868 let engine = match value.token {
8869 Token::Word(w) => {
8870 let parameters = if self.peek_token_ref().token == Token::LParen {
8871 self.parse_parenthesized_identifiers()?
8872 } else {
8873 vec![]
8874 };
8875
8876 Ok(Some(SqlOption::NamedParenthesizedList(
8877 NamedParenthesizedList {
8878 key: Ident::new("ENGINE"),
8879 name: Some(Ident::new(w.value)),
8880 values: parameters,
8881 },
8882 )))
8883 }
8884 _ => {
8885 return self.expected("Token::Word", value)?;
8886 }
8887 };
8888
8889 return engine;
8890 }
8891
8892 if self.parse_keywords(&[Keyword::TABLESPACE]) {
8894 let _ = self.consume_token(&Token::Eq);
8895 let value = self.next_token();
8896
8897 let tablespace = match value.token {
8898 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8899 let storage = match self.parse_keyword(Keyword::STORAGE) {
8900 true => {
8901 let _ = self.consume_token(&Token::Eq);
8902 let storage_token = self.next_token();
8903 match &storage_token.token {
8904 Token::Word(w) => match w.value.to_uppercase().as_str() {
8905 "DISK" => Some(StorageType::Disk),
8906 "MEMORY" => Some(StorageType::Memory),
8907 _ => self
8908 .expected("Storage type (DISK or MEMORY)", storage_token)?,
8909 },
8910 _ => self.expected("Token::Word", storage_token)?,
8911 }
8912 }
8913 false => None,
8914 };
8915
8916 Ok(Some(SqlOption::TableSpace(TablespaceOption {
8917 name,
8918 storage,
8919 })))
8920 }
8921 _ => {
8922 return self.expected("Token::Word", value)?;
8923 }
8924 };
8925
8926 return tablespace;
8927 }
8928
8929 if self.parse_keyword(Keyword::UNION) {
8931 let _ = self.consume_token(&Token::Eq);
8932 let value = self.next_token();
8933
8934 match value.token {
8935 Token::LParen => {
8936 let tables: Vec<Ident> =
8937 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8938 self.expect_token(&Token::RParen)?;
8939
8940 return Ok(Some(SqlOption::NamedParenthesizedList(
8941 NamedParenthesizedList {
8942 key: Ident::new("UNION"),
8943 name: None,
8944 values: tables,
8945 },
8946 )));
8947 }
8948 _ => {
8949 return self.expected("Token::LParen", value)?;
8950 }
8951 }
8952 }
8953
8954 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8956 Ident::new("DEFAULT CHARSET")
8957 } else if self.parse_keyword(Keyword::CHARSET) {
8958 Ident::new("CHARSET")
8959 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8960 Ident::new("DEFAULT CHARACTER SET")
8961 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8962 Ident::new("CHARACTER SET")
8963 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8964 Ident::new("DEFAULT COLLATE")
8965 } else if self.parse_keyword(Keyword::COLLATE) {
8966 Ident::new("COLLATE")
8967 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8968 Ident::new("DATA DIRECTORY")
8969 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8970 Ident::new("INDEX DIRECTORY")
8971 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8972 Ident::new("KEY_BLOCK_SIZE")
8973 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8974 Ident::new("ROW_FORMAT")
8975 } else if self.parse_keyword(Keyword::PACK_KEYS) {
8976 Ident::new("PACK_KEYS")
8977 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8978 Ident::new("STATS_AUTO_RECALC")
8979 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8980 Ident::new("STATS_PERSISTENT")
8981 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8982 Ident::new("STATS_SAMPLE_PAGES")
8983 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8984 Ident::new("DELAY_KEY_WRITE")
8985 } else if self.parse_keyword(Keyword::COMPRESSION) {
8986 Ident::new("COMPRESSION")
8987 } else if self.parse_keyword(Keyword::ENCRYPTION) {
8988 Ident::new("ENCRYPTION")
8989 } else if self.parse_keyword(Keyword::MAX_ROWS) {
8990 Ident::new("MAX_ROWS")
8991 } else if self.parse_keyword(Keyword::MIN_ROWS) {
8992 Ident::new("MIN_ROWS")
8993 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8994 Ident::new("AUTOEXTEND_SIZE")
8995 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8996 Ident::new("AVG_ROW_LENGTH")
8997 } else if self.parse_keyword(Keyword::CHECKSUM) {
8998 Ident::new("CHECKSUM")
8999 } else if self.parse_keyword(Keyword::CONNECTION) {
9000 Ident::new("CONNECTION")
9001 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9002 Ident::new("ENGINE_ATTRIBUTE")
9003 } else if self.parse_keyword(Keyword::PASSWORD) {
9004 Ident::new("PASSWORD")
9005 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9006 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9007 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9008 Ident::new("INSERT_METHOD")
9009 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9010 Ident::new("AUTO_INCREMENT")
9011 } else {
9012 return Ok(None);
9013 };
9014
9015 let _ = self.consume_token(&Token::Eq);
9016
9017 let value = match self
9018 .maybe_parse(|parser| parser.parse_value())?
9019 .map(Expr::Value)
9020 {
9021 Some(expr) => expr,
9022 None => Expr::Identifier(self.parse_identifier()?),
9023 };
9024
9025 Ok(Some(SqlOption::KeyValue { key, value }))
9026 }
9027
9028 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9030 let mut options = Vec::new();
9031
9032 while let Some(option) = self.parse_plain_option()? {
9033 options.push(option);
9034 let _ = self.consume_token(&Token::Comma);
9037 }
9038
9039 Ok(options)
9040 }
9041
9042 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9044 let comment = if self.parse_keyword(Keyword::COMMENT) {
9045 let has_eq = self.consume_token(&Token::Eq);
9046 let comment = self.parse_comment_value()?;
9047 Some(if has_eq {
9048 CommentDef::WithEq(comment)
9049 } else {
9050 CommentDef::WithoutEq(comment)
9051 })
9052 } else {
9053 None
9054 };
9055 Ok(comment)
9056 }
9057
9058 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9060 let next_token = self.next_token();
9061 let value = match next_token.token {
9062 Token::SingleQuotedString(str) => str,
9063 Token::DollarQuotedString(str) => str.value,
9064 _ => self.expected("string literal", next_token)?,
9065 };
9066 Ok(value)
9067 }
9068
9069 pub fn parse_optional_procedure_parameters(
9071 &mut self,
9072 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9073 let mut params = vec![];
9074 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9075 return Ok(Some(params));
9076 }
9077 loop {
9078 if let Token::Word(_) = &self.peek_token_ref().token {
9079 params.push(self.parse_procedure_param()?)
9080 }
9081 let comma = self.consume_token(&Token::Comma);
9082 if self.consume_token(&Token::RParen) {
9083 break;
9085 } else if !comma {
9086 return self.expected_ref(
9087 "',' or ')' after parameter definition",
9088 self.peek_token_ref(),
9089 );
9090 }
9091 }
9092 Ok(Some(params))
9093 }
9094
9095 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9097 let mut columns = vec![];
9098 let mut constraints = vec![];
9099 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9100 return Ok((columns, constraints));
9101 }
9102
9103 loop {
9104 if let Some(constraint) = self.parse_optional_table_constraint()? {
9105 constraints.push(constraint);
9106 } else if let Token::Word(_) = &self.peek_token_ref().token {
9107 columns.push(self.parse_column_def()?);
9108 } else {
9109 return self.expected_ref(
9110 "column name or constraint definition",
9111 self.peek_token_ref(),
9112 );
9113 }
9114
9115 let comma = self.consume_token(&Token::Comma);
9116 let rparen = self.peek_token_ref().token == Token::RParen;
9117
9118 if !comma && !rparen {
9119 return self
9120 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9121 };
9122
9123 if rparen
9124 && (!comma
9125 || self.dialect.supports_column_definition_trailing_commas()
9126 || self.options.trailing_commas)
9127 {
9128 let _ = self.consume_token(&Token::RParen);
9129 break;
9130 }
9131 }
9132
9133 Ok((columns, constraints))
9134 }
9135
9136 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9138 let mode = if self.parse_keyword(Keyword::IN) {
9139 Some(ArgMode::In)
9140 } else if self.parse_keyword(Keyword::OUT) {
9141 Some(ArgMode::Out)
9142 } else if self.parse_keyword(Keyword::INOUT) {
9143 Some(ArgMode::InOut)
9144 } else {
9145 None
9146 };
9147 let name = self.parse_identifier()?;
9148 let data_type = self.parse_data_type()?;
9149 let default = if self.consume_token(&Token::Eq) {
9150 Some(self.parse_expr()?)
9151 } else {
9152 None
9153 };
9154
9155 Ok(ProcedureParam {
9156 name,
9157 data_type,
9158 mode,
9159 default,
9160 })
9161 }
9162
9163 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9165 self.parse_column_def_inner(false)
9166 }
9167
9168 fn parse_column_def_inner(
9169 &mut self,
9170 optional_data_type: bool,
9171 ) -> Result<ColumnDef, ParserError> {
9172 let col_name = self.parse_identifier()?;
9173 let data_type = if self.is_column_type_sqlite_unspecified() {
9174 DataType::Unspecified
9175 } else if optional_data_type {
9176 self.maybe_parse(|parser| parser.parse_data_type())?
9177 .unwrap_or(DataType::Unspecified)
9178 } else {
9179 self.parse_data_type()?
9180 };
9181 let mut options = vec![];
9182 loop {
9183 if self.parse_keyword(Keyword::CONSTRAINT) {
9184 let name = Some(self.parse_identifier()?);
9185 if let Some(option) = self.parse_optional_column_option()? {
9186 options.push(ColumnOptionDef { name, option });
9187 } else {
9188 return self.expected_ref(
9189 "constraint details after CONSTRAINT <name>",
9190 self.peek_token_ref(),
9191 );
9192 }
9193 } else if let Some(option) = self.parse_optional_column_option()? {
9194 options.push(ColumnOptionDef { name: None, option });
9195 } else {
9196 break;
9197 };
9198 }
9199 Ok(ColumnDef {
9200 name: col_name,
9201 data_type,
9202 options,
9203 })
9204 }
9205
9206 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9207 if dialect_of!(self is SQLiteDialect) {
9208 match &self.peek_token_ref().token {
9209 Token::Word(word) => matches!(
9210 word.keyword,
9211 Keyword::CONSTRAINT
9212 | Keyword::PRIMARY
9213 | Keyword::NOT
9214 | Keyword::UNIQUE
9215 | Keyword::CHECK
9216 | Keyword::DEFAULT
9217 | Keyword::COLLATE
9218 | Keyword::REFERENCES
9219 | Keyword::GENERATED
9220 | Keyword::AS
9221 ),
9222 _ => true, }
9224 } else {
9225 false
9226 }
9227 }
9228
9229 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9231 if let Some(option) = self.dialect.parse_column_option(self)? {
9232 return option;
9233 }
9234
9235 self.with_state(
9236 ColumnDefinition,
9237 |parser| -> Result<Option<ColumnOption>, ParserError> {
9238 parser.parse_optional_column_option_inner()
9239 },
9240 )
9241 }
9242
9243 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9244 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9245 Ok(Some(ColumnOption::CharacterSet(
9246 self.parse_object_name(false)?,
9247 )))
9248 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9249 Ok(Some(ColumnOption::Collation(
9250 self.parse_object_name(false)?,
9251 )))
9252 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9253 Ok(Some(ColumnOption::NotNull))
9254 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9255 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9256 } else if self.parse_keyword(Keyword::NULL) {
9257 Ok(Some(ColumnOption::Null))
9258 } else if self.parse_keyword(Keyword::DEFAULT) {
9259 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9260 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9261 && self.parse_keyword(Keyword::MATERIALIZED)
9262 {
9263 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9264 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9265 && self.parse_keyword(Keyword::ALIAS)
9266 {
9267 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9268 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9269 && self.parse_keyword(Keyword::EPHEMERAL)
9270 {
9271 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9274 Ok(Some(ColumnOption::Ephemeral(None)))
9275 } else {
9276 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9277 }
9278 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9279 let characteristics = self.parse_constraint_characteristics()?;
9280 Ok(Some(
9281 PrimaryKeyConstraint {
9282 name: None,
9283 index_name: None,
9284 index_type: None,
9285 columns: vec![],
9286 index_options: vec![],
9287 characteristics,
9288 }
9289 .into(),
9290 ))
9291 } else if self.parse_keyword(Keyword::UNIQUE) {
9292 let index_type_display =
9293 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9294 KeyOrIndexDisplay::Key
9295 } else {
9296 KeyOrIndexDisplay::None
9297 };
9298 let characteristics = self.parse_constraint_characteristics()?;
9299 Ok(Some(
9300 UniqueConstraint {
9301 name: None,
9302 index_name: None,
9303 index_type_display,
9304 index_type: None,
9305 columns: vec![],
9306 index_options: vec![],
9307 characteristics,
9308 nulls_distinct: NullsDistinctOption::None,
9309 }
9310 .into(),
9311 ))
9312 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9313 let characteristics = self.parse_constraint_characteristics()?;
9316 Ok(Some(
9317 PrimaryKeyConstraint {
9318 name: None,
9319 index_name: None,
9320 index_type: None,
9321 columns: vec![],
9322 index_options: vec![],
9323 characteristics,
9324 }
9325 .into(),
9326 ))
9327 } else if self.parse_keyword(Keyword::REFERENCES) {
9328 let foreign_table = self.parse_object_name(false)?;
9329 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9332 let mut match_kind = None;
9333 let mut on_delete = None;
9334 let mut on_update = None;
9335 loop {
9336 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9337 match_kind = Some(self.parse_match_kind()?);
9338 } else if on_delete.is_none()
9339 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9340 {
9341 on_delete = Some(self.parse_referential_action()?);
9342 } else if on_update.is_none()
9343 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9344 {
9345 on_update = Some(self.parse_referential_action()?);
9346 } else {
9347 break;
9348 }
9349 }
9350 let characteristics = self.parse_constraint_characteristics()?;
9351
9352 Ok(Some(
9353 ForeignKeyConstraint {
9354 name: None, index_name: None, columns: vec![], foreign_table,
9358 referred_columns,
9359 on_delete,
9360 on_update,
9361 match_kind,
9362 characteristics,
9363 }
9364 .into(),
9365 ))
9366 } else if self.parse_keyword(Keyword::CHECK) {
9367 self.expect_token(&Token::LParen)?;
9368 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9370 self.expect_token(&Token::RParen)?;
9371
9372 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9373 Some(true)
9374 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9375 Some(false)
9376 } else {
9377 None
9378 };
9379
9380 Ok(Some(
9381 CheckConstraint {
9382 name: None, expr: Box::new(expr),
9384 enforced,
9385 }
9386 .into(),
9387 ))
9388 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9389 && dialect_of!(self is MySqlDialect | GenericDialect)
9390 {
9391 Ok(Some(ColumnOption::DialectSpecific(vec![
9393 Token::make_keyword("AUTO_INCREMENT"),
9394 ])))
9395 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9396 && dialect_of!(self is SQLiteDialect | GenericDialect)
9397 {
9398 Ok(Some(ColumnOption::DialectSpecific(vec![
9400 Token::make_keyword("AUTOINCREMENT"),
9401 ])))
9402 } else if self.parse_keyword(Keyword::ASC)
9403 && self.dialect.supports_asc_desc_in_column_definition()
9404 {
9405 Ok(Some(ColumnOption::DialectSpecific(vec![
9407 Token::make_keyword("ASC"),
9408 ])))
9409 } else if self.parse_keyword(Keyword::DESC)
9410 && self.dialect.supports_asc_desc_in_column_definition()
9411 {
9412 Ok(Some(ColumnOption::DialectSpecific(vec![
9414 Token::make_keyword("DESC"),
9415 ])))
9416 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9417 && dialect_of!(self is MySqlDialect | GenericDialect)
9418 {
9419 let expr = self.parse_expr()?;
9420 Ok(Some(ColumnOption::OnUpdate(expr)))
9421 } else if self.parse_keyword(Keyword::GENERATED) {
9422 self.parse_optional_column_option_generated()
9423 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9424 && self.parse_keyword(Keyword::OPTIONS)
9425 {
9426 self.prev_token();
9427 Ok(Some(ColumnOption::Options(
9428 self.parse_options(Keyword::OPTIONS)?,
9429 )))
9430 } else if self.parse_keyword(Keyword::AS)
9431 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9432 {
9433 self.parse_optional_column_option_as()
9434 } else if self.parse_keyword(Keyword::SRID)
9435 && dialect_of!(self is MySqlDialect | GenericDialect)
9436 {
9437 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9438 } else if self.parse_keyword(Keyword::IDENTITY)
9439 && dialect_of!(self is MsSqlDialect | GenericDialect)
9440 {
9441 let parameters = if self.consume_token(&Token::LParen) {
9442 let seed = self.parse_number()?;
9443 self.expect_token(&Token::Comma)?;
9444 let increment = self.parse_number()?;
9445 self.expect_token(&Token::RParen)?;
9446
9447 Some(IdentityPropertyFormatKind::FunctionCall(
9448 IdentityParameters { seed, increment },
9449 ))
9450 } else {
9451 None
9452 };
9453 Ok(Some(ColumnOption::Identity(
9454 IdentityPropertyKind::Identity(IdentityProperty {
9455 parameters,
9456 order: None,
9457 }),
9458 )))
9459 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9460 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9461 {
9462 Ok(Some(ColumnOption::OnConflict(
9464 self.expect_one_of_keywords(&[
9465 Keyword::ROLLBACK,
9466 Keyword::ABORT,
9467 Keyword::FAIL,
9468 Keyword::IGNORE,
9469 Keyword::REPLACE,
9470 ])?,
9471 )))
9472 } else if self.parse_keyword(Keyword::INVISIBLE) {
9473 Ok(Some(ColumnOption::Invisible))
9474 } else {
9475 Ok(None)
9476 }
9477 }
9478
9479 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9480 let name = self.parse_object_name(false)?;
9481 self.expect_token(&Token::Eq)?;
9482 let value = self.parse_literal_string()?;
9483
9484 Ok(Tag::new(name, value))
9485 }
9486
9487 fn parse_optional_column_option_generated(
9488 &mut self,
9489 ) -> Result<Option<ColumnOption>, ParserError> {
9490 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9491 let mut sequence_options = vec![];
9492 if self.expect_token(&Token::LParen).is_ok() {
9493 sequence_options = self.parse_create_sequence_options()?;
9494 self.expect_token(&Token::RParen)?;
9495 }
9496 Ok(Some(ColumnOption::Generated {
9497 generated_as: GeneratedAs::Always,
9498 sequence_options: Some(sequence_options),
9499 generation_expr: None,
9500 generation_expr_mode: None,
9501 generated_keyword: true,
9502 }))
9503 } else if self.parse_keywords(&[
9504 Keyword::BY,
9505 Keyword::DEFAULT,
9506 Keyword::AS,
9507 Keyword::IDENTITY,
9508 ]) {
9509 let mut sequence_options = vec![];
9510 if self.expect_token(&Token::LParen).is_ok() {
9511 sequence_options = self.parse_create_sequence_options()?;
9512 self.expect_token(&Token::RParen)?;
9513 }
9514 Ok(Some(ColumnOption::Generated {
9515 generated_as: GeneratedAs::ByDefault,
9516 sequence_options: Some(sequence_options),
9517 generation_expr: None,
9518 generation_expr_mode: None,
9519 generated_keyword: true,
9520 }))
9521 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9522 if self.expect_token(&Token::LParen).is_ok() {
9523 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9524 self.expect_token(&Token::RParen)?;
9525 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9526 Ok((
9527 GeneratedAs::ExpStored,
9528 Some(GeneratedExpressionMode::Stored),
9529 ))
9530 } else if dialect_of!(self is PostgreSqlDialect) {
9531 self.expected_ref("STORED", self.peek_token_ref())
9533 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9534 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9535 } else {
9536 Ok((GeneratedAs::Always, None))
9537 }?;
9538
9539 Ok(Some(ColumnOption::Generated {
9540 generated_as: gen_as,
9541 sequence_options: None,
9542 generation_expr: Some(expr),
9543 generation_expr_mode: expr_mode,
9544 generated_keyword: true,
9545 }))
9546 } else {
9547 Ok(None)
9548 }
9549 } else {
9550 Ok(None)
9551 }
9552 }
9553
9554 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9555 self.expect_token(&Token::LParen)?;
9557 let expr = self.parse_expr()?;
9558 self.expect_token(&Token::RParen)?;
9559
9560 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9561 (
9562 GeneratedAs::ExpStored,
9563 Some(GeneratedExpressionMode::Stored),
9564 )
9565 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9566 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9567 } else {
9568 (GeneratedAs::Always, None)
9569 };
9570
9571 Ok(Some(ColumnOption::Generated {
9572 generated_as: gen_as,
9573 sequence_options: None,
9574 generation_expr: Some(expr),
9575 generation_expr_mode: expr_mode,
9576 generated_keyword: false,
9577 }))
9578 }
9579
9580 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9582 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9583 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9584 {
9585 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9586
9587 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9588 self.expect_token(&Token::LParen)?;
9589 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9590 self.expect_token(&Token::RParen)?;
9591 Some(sorted_by_columns)
9592 } else {
9593 None
9594 };
9595
9596 self.expect_keyword_is(Keyword::INTO)?;
9597 let num_buckets = self.parse_number_value()?.value;
9598 self.expect_keyword_is(Keyword::BUCKETS)?;
9599 Some(ClusteredBy {
9600 columns,
9601 sorted_by,
9602 num_buckets,
9603 })
9604 } else {
9605 None
9606 };
9607 Ok(clustered_by)
9608 }
9609
9610 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9614 if self.parse_keyword(Keyword::RESTRICT) {
9615 Ok(ReferentialAction::Restrict)
9616 } else if self.parse_keyword(Keyword::CASCADE) {
9617 Ok(ReferentialAction::Cascade)
9618 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9619 Ok(ReferentialAction::SetNull)
9620 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9621 Ok(ReferentialAction::NoAction)
9622 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9623 Ok(ReferentialAction::SetDefault)
9624 } else {
9625 self.expected_ref(
9626 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9627 self.peek_token_ref(),
9628 )
9629 }
9630 }
9631
9632 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9634 if self.parse_keyword(Keyword::FULL) {
9635 Ok(ConstraintReferenceMatchKind::Full)
9636 } else if self.parse_keyword(Keyword::PARTIAL) {
9637 Ok(ConstraintReferenceMatchKind::Partial)
9638 } else if self.parse_keyword(Keyword::SIMPLE) {
9639 Ok(ConstraintReferenceMatchKind::Simple)
9640 } else {
9641 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9642 }
9643 }
9644
9645 fn parse_constraint_using_index(
9648 &mut self,
9649 name: Option<Ident>,
9650 ) -> Result<ConstraintUsingIndex, ParserError> {
9651 let index_name = self.parse_identifier()?;
9652 let characteristics = self.parse_constraint_characteristics()?;
9653 Ok(ConstraintUsingIndex {
9654 name,
9655 index_name,
9656 characteristics,
9657 })
9658 }
9659
9660 pub fn parse_constraint_characteristics(
9662 &mut self,
9663 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9664 let mut cc = ConstraintCharacteristics::default();
9665
9666 loop {
9667 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9668 {
9669 cc.deferrable = Some(false);
9670 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9671 cc.deferrable = Some(true);
9672 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9673 if self.parse_keyword(Keyword::DEFERRED) {
9674 cc.initially = Some(DeferrableInitial::Deferred);
9675 } else if self.parse_keyword(Keyword::IMMEDIATE) {
9676 cc.initially = Some(DeferrableInitial::Immediate);
9677 } else {
9678 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
9679 }
9680 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9681 cc.enforced = Some(true);
9682 } else if cc.enforced.is_none()
9683 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9684 {
9685 cc.enforced = Some(false);
9686 } else {
9687 break;
9688 }
9689 }
9690
9691 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9692 Ok(Some(cc))
9693 } else {
9694 Ok(None)
9695 }
9696 }
9697
9698 pub fn parse_optional_table_constraint(
9700 &mut self,
9701 ) -> Result<Option<TableConstraint>, ParserError> {
9702 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9703 if self.dialect.supports_constraint_keyword_without_name()
9704 && self
9705 .peek_one_of_keywords(&[
9706 Keyword::CHECK,
9707 Keyword::PRIMARY,
9708 Keyword::UNIQUE,
9709 Keyword::FOREIGN,
9710 ])
9711 .is_some()
9712 {
9713 None
9714 } else {
9715 Some(self.parse_identifier()?)
9716 }
9717 } else {
9718 None
9719 };
9720
9721 let next_token = self.next_token();
9722 match next_token.token {
9723 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
9724 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9727 return Ok(Some(TableConstraint::UniqueUsingIndex(
9728 self.parse_constraint_using_index(name)?,
9729 )));
9730 }
9731
9732 let index_type_display = self.parse_index_type_display();
9733 if !dialect_of!(self is GenericDialect | MySqlDialect)
9734 && !index_type_display.is_none()
9735 {
9736 return self.expected_ref(
9737 "`index_name` or `(column_name [, ...])`",
9738 self.peek_token_ref(),
9739 );
9740 }
9741
9742 let nulls_distinct = self.parse_optional_nulls_distinct()?;
9743
9744 let index_name = self.parse_optional_ident()?;
9746 let index_type = self.parse_optional_using_then_index_type()?;
9747
9748 let columns = self.parse_parenthesized_index_column_list()?;
9749 let index_options = self.parse_index_options()?;
9750 let characteristics = self.parse_constraint_characteristics()?;
9751 Ok(Some(
9752 UniqueConstraint {
9753 name,
9754 index_name,
9755 index_type_display,
9756 index_type,
9757 columns,
9758 index_options,
9759 characteristics,
9760 nulls_distinct,
9761 }
9762 .into(),
9763 ))
9764 }
9765 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9766 self.expect_keyword_is(Keyword::KEY)?;
9768
9769 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9772 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
9773 self.parse_constraint_using_index(name)?,
9774 )));
9775 }
9776
9777 let index_name = self.parse_optional_ident()?;
9779 let index_type = self.parse_optional_using_then_index_type()?;
9780
9781 let columns = self.parse_parenthesized_index_column_list()?;
9782 let index_options = self.parse_index_options()?;
9783 let characteristics = self.parse_constraint_characteristics()?;
9784 Ok(Some(
9785 PrimaryKeyConstraint {
9786 name,
9787 index_name,
9788 index_type,
9789 columns,
9790 index_options,
9791 characteristics,
9792 }
9793 .into(),
9794 ))
9795 }
9796 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9797 self.expect_keyword_is(Keyword::KEY)?;
9798 let index_name = self.parse_optional_ident()?;
9799 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9800 self.expect_keyword_is(Keyword::REFERENCES)?;
9801 let foreign_table = self.parse_object_name(false)?;
9802 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9803 let mut match_kind = None;
9804 let mut on_delete = None;
9805 let mut on_update = None;
9806 loop {
9807 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9808 match_kind = Some(self.parse_match_kind()?);
9809 } else if on_delete.is_none()
9810 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9811 {
9812 on_delete = Some(self.parse_referential_action()?);
9813 } else if on_update.is_none()
9814 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9815 {
9816 on_update = Some(self.parse_referential_action()?);
9817 } else {
9818 break;
9819 }
9820 }
9821
9822 let characteristics = self.parse_constraint_characteristics()?;
9823
9824 Ok(Some(
9825 ForeignKeyConstraint {
9826 name,
9827 index_name,
9828 columns,
9829 foreign_table,
9830 referred_columns,
9831 on_delete,
9832 on_update,
9833 match_kind,
9834 characteristics,
9835 }
9836 .into(),
9837 ))
9838 }
9839 Token::Word(w) if w.keyword == Keyword::CHECK => {
9840 self.expect_token(&Token::LParen)?;
9841 let expr = Box::new(self.parse_expr()?);
9842 self.expect_token(&Token::RParen)?;
9843
9844 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9845 Some(true)
9846 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9847 Some(false)
9848 } else {
9849 None
9850 };
9851
9852 Ok(Some(
9853 CheckConstraint {
9854 name,
9855 expr,
9856 enforced,
9857 }
9858 .into(),
9859 ))
9860 }
9861 Token::Word(w)
9862 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9863 && dialect_of!(self is GenericDialect | MySqlDialect)
9864 && name.is_none() =>
9865 {
9866 let display_as_key = w.keyword == Keyword::KEY;
9867
9868 let name = match &self.peek_token_ref().token {
9869 Token::Word(word) if word.keyword == Keyword::USING => None,
9870 _ => self.parse_optional_ident()?,
9871 };
9872
9873 let index_type = self.parse_optional_using_then_index_type()?;
9874 let columns = self.parse_parenthesized_index_column_list()?;
9875 let index_options = self.parse_index_options()?;
9876
9877 Ok(Some(
9878 IndexConstraint {
9879 display_as_key,
9880 name,
9881 index_type,
9882 columns,
9883 index_options,
9884 }
9885 .into(),
9886 ))
9887 }
9888 Token::Word(w)
9889 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9890 && dialect_of!(self is GenericDialect | MySqlDialect) =>
9891 {
9892 if let Some(name) = name {
9893 return self.expected(
9894 "FULLTEXT or SPATIAL option without constraint name",
9895 TokenWithSpan {
9896 token: Token::make_keyword(&name.to_string()),
9897 span: next_token.span,
9898 },
9899 );
9900 }
9901
9902 let fulltext = w.keyword == Keyword::FULLTEXT;
9903
9904 let index_type_display = self.parse_index_type_display();
9905
9906 let opt_index_name = self.parse_optional_ident()?;
9907
9908 let columns = self.parse_parenthesized_index_column_list()?;
9909
9910 Ok(Some(
9911 FullTextOrSpatialConstraint {
9912 fulltext,
9913 index_type_display,
9914 opt_index_name,
9915 columns,
9916 }
9917 .into(),
9918 ))
9919 }
9920 _ => {
9921 if name.is_some() {
9922 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9923 } else {
9924 self.prev_token();
9925 Ok(None)
9926 }
9927 }
9928 }
9929 }
9930
9931 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9932 Ok(if self.parse_keyword(Keyword::NULLS) {
9933 let not = self.parse_keyword(Keyword::NOT);
9934 self.expect_keyword_is(Keyword::DISTINCT)?;
9935 if not {
9936 NullsDistinctOption::NotDistinct
9937 } else {
9938 NullsDistinctOption::Distinct
9939 }
9940 } else {
9941 NullsDistinctOption::None
9942 })
9943 }
9944
9945 pub fn maybe_parse_options(
9947 &mut self,
9948 keyword: Keyword,
9949 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9950 if let Token::Word(word) = &self.peek_token_ref().token {
9951 if word.keyword == keyword {
9952 return Ok(Some(self.parse_options(keyword)?));
9953 }
9954 };
9955 Ok(None)
9956 }
9957
9958 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9960 if self.parse_keyword(keyword) {
9961 self.expect_token(&Token::LParen)?;
9962 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9963 self.expect_token(&Token::RParen)?;
9964 Ok(options)
9965 } else {
9966 Ok(vec![])
9967 }
9968 }
9969
9970 pub fn parse_options_with_keywords(
9972 &mut self,
9973 keywords: &[Keyword],
9974 ) -> Result<Vec<SqlOption>, ParserError> {
9975 if self.parse_keywords(keywords) {
9976 self.expect_token(&Token::LParen)?;
9977 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9978 self.expect_token(&Token::RParen)?;
9979 Ok(options)
9980 } else {
9981 Ok(vec![])
9982 }
9983 }
9984
9985 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9987 Ok(if self.parse_keyword(Keyword::BTREE) {
9988 IndexType::BTree
9989 } else if self.parse_keyword(Keyword::HASH) {
9990 IndexType::Hash
9991 } else if self.parse_keyword(Keyword::GIN) {
9992 IndexType::GIN
9993 } else if self.parse_keyword(Keyword::GIST) {
9994 IndexType::GiST
9995 } else if self.parse_keyword(Keyword::SPGIST) {
9996 IndexType::SPGiST
9997 } else if self.parse_keyword(Keyword::BRIN) {
9998 IndexType::BRIN
9999 } else if self.parse_keyword(Keyword::BLOOM) {
10000 IndexType::Bloom
10001 } else {
10002 IndexType::Custom(self.parse_identifier()?)
10003 })
10004 }
10005
10006 pub fn parse_optional_using_then_index_type(
10013 &mut self,
10014 ) -> Result<Option<IndexType>, ParserError> {
10015 if self.parse_keyword(Keyword::USING) {
10016 Ok(Some(self.parse_index_type()?))
10017 } else {
10018 Ok(None)
10019 }
10020 }
10021
10022 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10026 self.maybe_parse(|parser| parser.parse_identifier())
10027 }
10028
10029 #[must_use]
10030 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10032 if self.parse_keyword(Keyword::KEY) {
10033 KeyOrIndexDisplay::Key
10034 } else if self.parse_keyword(Keyword::INDEX) {
10035 KeyOrIndexDisplay::Index
10036 } else {
10037 KeyOrIndexDisplay::None
10038 }
10039 }
10040
10041 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10043 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10044 Ok(Some(IndexOption::Using(index_type)))
10045 } else if self.parse_keyword(Keyword::COMMENT) {
10046 let s = self.parse_literal_string()?;
10047 Ok(Some(IndexOption::Comment(s)))
10048 } else {
10049 Ok(None)
10050 }
10051 }
10052
10053 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10055 let mut options = Vec::new();
10056
10057 loop {
10058 match self.parse_optional_index_option()? {
10059 Some(index_option) => options.push(index_option),
10060 None => return Ok(options),
10061 }
10062 }
10063 }
10064
10065 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10067 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10068
10069 match &self.peek_token_ref().token {
10070 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10071 Ok(SqlOption::Ident(self.parse_identifier()?))
10072 }
10073 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10074 self.parse_option_partition()
10075 }
10076 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10077 self.parse_option_clustered()
10078 }
10079 _ => {
10080 let name = self.parse_identifier()?;
10081 self.expect_token(&Token::Eq)?;
10082 let value = self.parse_expr()?;
10083
10084 Ok(SqlOption::KeyValue { key: name, value })
10085 }
10086 }
10087 }
10088
10089 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10091 if self.parse_keywords(&[
10092 Keyword::CLUSTERED,
10093 Keyword::COLUMNSTORE,
10094 Keyword::INDEX,
10095 Keyword::ORDER,
10096 ]) {
10097 Ok(SqlOption::Clustered(
10098 TableOptionsClustered::ColumnstoreIndexOrder(
10099 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10100 ),
10101 ))
10102 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10103 Ok(SqlOption::Clustered(
10104 TableOptionsClustered::ColumnstoreIndex,
10105 ))
10106 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10107 self.expect_token(&Token::LParen)?;
10108
10109 let columns = self.parse_comma_separated(|p| {
10110 let name = p.parse_identifier()?;
10111 let asc = p.parse_asc_desc();
10112
10113 Ok(ClusteredIndex { name, asc })
10114 })?;
10115
10116 self.expect_token(&Token::RParen)?;
10117
10118 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10119 } else {
10120 Err(ParserError::ParserError(
10121 "invalid CLUSTERED sequence".to_string(),
10122 ))
10123 }
10124 }
10125
10126 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10128 self.expect_keyword_is(Keyword::PARTITION)?;
10129 self.expect_token(&Token::LParen)?;
10130 let column_name = self.parse_identifier()?;
10131
10132 self.expect_keyword_is(Keyword::RANGE)?;
10133 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10134 Some(PartitionRangeDirection::Left)
10135 } else if self.parse_keyword(Keyword::RIGHT) {
10136 Some(PartitionRangeDirection::Right)
10137 } else {
10138 None
10139 };
10140
10141 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10142 self.expect_token(&Token::LParen)?;
10143
10144 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10145
10146 self.expect_token(&Token::RParen)?;
10147 self.expect_token(&Token::RParen)?;
10148
10149 Ok(SqlOption::Partition {
10150 column_name,
10151 range_direction,
10152 for_values,
10153 })
10154 }
10155
10156 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10158 self.expect_token(&Token::LParen)?;
10159 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10160 self.expect_token(&Token::RParen)?;
10161 Ok(Partition::Partitions(partitions))
10162 }
10163
10164 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10166 self.expect_token(&Token::LParen)?;
10167 self.expect_keyword_is(Keyword::SELECT)?;
10168 let projection = self.parse_projection()?;
10169 let group_by = self.parse_optional_group_by()?;
10170 let order_by = self.parse_optional_order_by()?;
10171 self.expect_token(&Token::RParen)?;
10172 Ok(ProjectionSelect {
10173 projection,
10174 group_by,
10175 order_by,
10176 })
10177 }
10178 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10180 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10181 let name = self.parse_identifier()?;
10182 let query = self.parse_projection_select()?;
10183 Ok(AlterTableOperation::AddProjection {
10184 if_not_exists,
10185 name,
10186 select: query,
10187 })
10188 }
10189
10190 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10194 self.expect_keyword_is(Keyword::ALTER)?;
10195 self.expect_keyword_is(Keyword::SORTKEY)?;
10196 self.expect_token(&Token::LParen)?;
10197 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10198 self.expect_token(&Token::RParen)?;
10199 Ok(AlterTableOperation::AlterSortKey { columns })
10200 }
10201
10202 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10204 let operation = if self.parse_keyword(Keyword::ADD) {
10205 if let Some(constraint) = self.parse_optional_table_constraint()? {
10206 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10207 AlterTableOperation::AddConstraint {
10208 constraint,
10209 not_valid,
10210 }
10211 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10212 && self.parse_keyword(Keyword::PROJECTION)
10213 {
10214 return self.parse_alter_table_add_projection();
10215 } else {
10216 let if_not_exists =
10217 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10218 let mut new_partitions = vec![];
10219 loop {
10220 if self.parse_keyword(Keyword::PARTITION) {
10221 new_partitions.push(self.parse_partition()?);
10222 } else {
10223 break;
10224 }
10225 }
10226 if !new_partitions.is_empty() {
10227 AlterTableOperation::AddPartitions {
10228 if_not_exists,
10229 new_partitions,
10230 }
10231 } else {
10232 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10233
10234 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10235 {
10236 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10237 || if_not_exists
10238 } else {
10239 false
10240 };
10241
10242 let column_def = self.parse_column_def()?;
10243
10244 let column_position = self.parse_column_position()?;
10245
10246 AlterTableOperation::AddColumn {
10247 column_keyword,
10248 if_not_exists,
10249 column_def,
10250 column_position,
10251 }
10252 }
10253 }
10254 } else if self.parse_keyword(Keyword::RENAME) {
10255 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10256 let old_name = self.parse_identifier()?;
10257 self.expect_keyword_is(Keyword::TO)?;
10258 let new_name = self.parse_identifier()?;
10259 AlterTableOperation::RenameConstraint { old_name, new_name }
10260 } else if self.parse_keyword(Keyword::TO) {
10261 let table_name = self.parse_object_name(false)?;
10262 AlterTableOperation::RenameTable {
10263 table_name: RenameTableNameKind::To(table_name),
10264 }
10265 } else if self.parse_keyword(Keyword::AS) {
10266 let table_name = self.parse_object_name(false)?;
10267 AlterTableOperation::RenameTable {
10268 table_name: RenameTableNameKind::As(table_name),
10269 }
10270 } else {
10271 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10273 self.expect_keyword_is(Keyword::TO)?;
10274 let new_column_name = self.parse_identifier()?;
10275 AlterTableOperation::RenameColumn {
10276 old_column_name,
10277 new_column_name,
10278 }
10279 }
10280 } else if self.parse_keyword(Keyword::DISABLE) {
10281 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10282 AlterTableOperation::DisableRowLevelSecurity {}
10283 } else if self.parse_keyword(Keyword::RULE) {
10284 let name = self.parse_identifier()?;
10285 AlterTableOperation::DisableRule { name }
10286 } else if self.parse_keyword(Keyword::TRIGGER) {
10287 let name = self.parse_identifier()?;
10288 AlterTableOperation::DisableTrigger { name }
10289 } else {
10290 return self.expected_ref(
10291 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10292 self.peek_token_ref(),
10293 );
10294 }
10295 } else if self.parse_keyword(Keyword::ENABLE) {
10296 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10297 let name = self.parse_identifier()?;
10298 AlterTableOperation::EnableAlwaysRule { name }
10299 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10300 let name = self.parse_identifier()?;
10301 AlterTableOperation::EnableAlwaysTrigger { name }
10302 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10303 AlterTableOperation::EnableRowLevelSecurity {}
10304 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10305 let name = self.parse_identifier()?;
10306 AlterTableOperation::EnableReplicaRule { name }
10307 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10308 let name = self.parse_identifier()?;
10309 AlterTableOperation::EnableReplicaTrigger { name }
10310 } else if self.parse_keyword(Keyword::RULE) {
10311 let name = self.parse_identifier()?;
10312 AlterTableOperation::EnableRule { name }
10313 } else if self.parse_keyword(Keyword::TRIGGER) {
10314 let name = self.parse_identifier()?;
10315 AlterTableOperation::EnableTrigger { name }
10316 } else {
10317 return self.expected_ref(
10318 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10319 self.peek_token_ref(),
10320 );
10321 }
10322 } else if self.parse_keywords(&[
10323 Keyword::FORCE,
10324 Keyword::ROW,
10325 Keyword::LEVEL,
10326 Keyword::SECURITY,
10327 ]) {
10328 AlterTableOperation::ForceRowLevelSecurity
10329 } else if self.parse_keywords(&[
10330 Keyword::NO,
10331 Keyword::FORCE,
10332 Keyword::ROW,
10333 Keyword::LEVEL,
10334 Keyword::SECURITY,
10335 ]) {
10336 AlterTableOperation::NoForceRowLevelSecurity
10337 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10338 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10339 {
10340 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10341 let name = self.parse_identifier()?;
10342 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10343 Some(self.parse_identifier()?)
10344 } else {
10345 None
10346 };
10347 AlterTableOperation::ClearProjection {
10348 if_exists,
10349 name,
10350 partition,
10351 }
10352 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10353 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10354 {
10355 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10356 let name = self.parse_identifier()?;
10357 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10358 Some(self.parse_identifier()?)
10359 } else {
10360 None
10361 };
10362 AlterTableOperation::MaterializeProjection {
10363 if_exists,
10364 name,
10365 partition,
10366 }
10367 } else if self.parse_keyword(Keyword::DROP) {
10368 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10369 self.expect_token(&Token::LParen)?;
10370 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10371 self.expect_token(&Token::RParen)?;
10372 AlterTableOperation::DropPartitions {
10373 partitions,
10374 if_exists: true,
10375 }
10376 } else if self.parse_keyword(Keyword::PARTITION) {
10377 self.expect_token(&Token::LParen)?;
10378 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10379 self.expect_token(&Token::RParen)?;
10380 AlterTableOperation::DropPartitions {
10381 partitions,
10382 if_exists: false,
10383 }
10384 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10385 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10386 let name = self.parse_identifier()?;
10387 let drop_behavior = self.parse_optional_drop_behavior();
10388 AlterTableOperation::DropConstraint {
10389 if_exists,
10390 name,
10391 drop_behavior,
10392 }
10393 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10394 let drop_behavior = self.parse_optional_drop_behavior();
10395 AlterTableOperation::DropPrimaryKey { drop_behavior }
10396 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10397 let name = self.parse_identifier()?;
10398 let drop_behavior = self.parse_optional_drop_behavior();
10399 AlterTableOperation::DropForeignKey {
10400 name,
10401 drop_behavior,
10402 }
10403 } else if self.parse_keyword(Keyword::INDEX) {
10404 let name = self.parse_identifier()?;
10405 AlterTableOperation::DropIndex { name }
10406 } else if self.parse_keyword(Keyword::PROJECTION)
10407 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10408 {
10409 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10410 let name = self.parse_identifier()?;
10411 AlterTableOperation::DropProjection { if_exists, name }
10412 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10413 AlterTableOperation::DropClusteringKey
10414 } else {
10415 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10417 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10418 self.parse_comma_separated(Parser::parse_identifier)?
10419 } else {
10420 vec![self.parse_identifier()?]
10421 };
10422 let drop_behavior = self.parse_optional_drop_behavior();
10423 AlterTableOperation::DropColumn {
10424 has_column_keyword,
10425 column_names,
10426 if_exists,
10427 drop_behavior,
10428 }
10429 }
10430 } else if self.parse_keyword(Keyword::PARTITION) {
10431 self.expect_token(&Token::LParen)?;
10432 let before = self.parse_comma_separated(Parser::parse_expr)?;
10433 self.expect_token(&Token::RParen)?;
10434 self.expect_keyword_is(Keyword::RENAME)?;
10435 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10436 self.expect_token(&Token::LParen)?;
10437 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10438 self.expect_token(&Token::RParen)?;
10439 AlterTableOperation::RenamePartitions {
10440 old_partitions: before,
10441 new_partitions: renames,
10442 }
10443 } else if self.parse_keyword(Keyword::CHANGE) {
10444 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10446 let new_name = self.parse_identifier()?;
10447 let data_type = self.parse_data_type()?;
10448 let mut options = vec![];
10449 while let Some(option) = self.parse_optional_column_option()? {
10450 options.push(option);
10451 }
10452
10453 let column_position = self.parse_column_position()?;
10454
10455 AlterTableOperation::ChangeColumn {
10456 old_name,
10457 new_name,
10458 data_type,
10459 options,
10460 column_position,
10461 }
10462 } else if self.parse_keyword(Keyword::MODIFY) {
10463 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10465 let data_type = self.parse_data_type()?;
10466 let mut options = vec![];
10467 while let Some(option) = self.parse_optional_column_option()? {
10468 options.push(option);
10469 }
10470
10471 let column_position = self.parse_column_position()?;
10472
10473 AlterTableOperation::ModifyColumn {
10474 col_name,
10475 data_type,
10476 options,
10477 column_position,
10478 }
10479 } else if self.parse_keyword(Keyword::ALTER) {
10480 if self.peek_keyword(Keyword::SORTKEY) {
10481 self.prev_token();
10482 return self.parse_alter_sort_key();
10483 }
10484
10485 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10487 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10488
10489 let op: AlterColumnOperation = if self.parse_keywords(&[
10490 Keyword::SET,
10491 Keyword::NOT,
10492 Keyword::NULL,
10493 ]) {
10494 AlterColumnOperation::SetNotNull {}
10495 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10496 AlterColumnOperation::DropNotNull {}
10497 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10498 AlterColumnOperation::SetDefault {
10499 value: self.parse_expr()?,
10500 }
10501 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10502 AlterColumnOperation::DropDefault {}
10503 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10504 self.parse_set_data_type(true)?
10505 } else if self.parse_keyword(Keyword::TYPE) {
10506 self.parse_set_data_type(false)?
10507 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10508 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10509 Some(GeneratedAs::Always)
10510 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10511 Some(GeneratedAs::ByDefault)
10512 } else {
10513 None
10514 };
10515
10516 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10517
10518 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10519
10520 if self.peek_token_ref().token == Token::LParen {
10521 self.expect_token(&Token::LParen)?;
10522 sequence_options = Some(self.parse_create_sequence_options()?);
10523 self.expect_token(&Token::RParen)?;
10524 }
10525
10526 AlterColumnOperation::AddGenerated {
10527 generated_as,
10528 sequence_options,
10529 }
10530 } else {
10531 let message = if is_postgresql {
10532 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10533 } else {
10534 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10535 };
10536
10537 return self.expected_ref(message, self.peek_token_ref());
10538 };
10539 AlterTableOperation::AlterColumn { column_name, op }
10540 } else if self.parse_keyword(Keyword::SWAP) {
10541 self.expect_keyword_is(Keyword::WITH)?;
10542 let table_name = self.parse_object_name(false)?;
10543 AlterTableOperation::SwapWith { table_name }
10544 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10545 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10546 {
10547 let new_owner = self.parse_owner()?;
10548 AlterTableOperation::OwnerTo { new_owner }
10549 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10550 && self.parse_keyword(Keyword::ATTACH)
10551 {
10552 AlterTableOperation::AttachPartition {
10553 partition: self.parse_part_or_partition()?,
10554 }
10555 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10556 && self.parse_keyword(Keyword::DETACH)
10557 {
10558 AlterTableOperation::DetachPartition {
10559 partition: self.parse_part_or_partition()?,
10560 }
10561 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10562 && self.parse_keyword(Keyword::FREEZE)
10563 {
10564 let partition = self.parse_part_or_partition()?;
10565 let with_name = if self.parse_keyword(Keyword::WITH) {
10566 self.expect_keyword_is(Keyword::NAME)?;
10567 Some(self.parse_identifier()?)
10568 } else {
10569 None
10570 };
10571 AlterTableOperation::FreezePartition {
10572 partition,
10573 with_name,
10574 }
10575 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10576 && self.parse_keyword(Keyword::UNFREEZE)
10577 {
10578 let partition = self.parse_part_or_partition()?;
10579 let with_name = if self.parse_keyword(Keyword::WITH) {
10580 self.expect_keyword_is(Keyword::NAME)?;
10581 Some(self.parse_identifier()?)
10582 } else {
10583 None
10584 };
10585 AlterTableOperation::UnfreezePartition {
10586 partition,
10587 with_name,
10588 }
10589 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10590 self.expect_token(&Token::LParen)?;
10591 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10592 self.expect_token(&Token::RParen)?;
10593 AlterTableOperation::ClusterBy { exprs }
10594 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10595 AlterTableOperation::SuspendRecluster
10596 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10597 AlterTableOperation::ResumeRecluster
10598 } else if self.parse_keyword(Keyword::LOCK) {
10599 let equals = self.consume_token(&Token::Eq);
10600 let lock = match self.parse_one_of_keywords(&[
10601 Keyword::DEFAULT,
10602 Keyword::EXCLUSIVE,
10603 Keyword::NONE,
10604 Keyword::SHARED,
10605 ]) {
10606 Some(Keyword::DEFAULT) => AlterTableLock::Default,
10607 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10608 Some(Keyword::NONE) => AlterTableLock::None,
10609 Some(Keyword::SHARED) => AlterTableLock::Shared,
10610 _ => self.expected_ref(
10611 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
10612 self.peek_token_ref(),
10613 )?,
10614 };
10615 AlterTableOperation::Lock { equals, lock }
10616 } else if self.parse_keyword(Keyword::ALGORITHM) {
10617 let equals = self.consume_token(&Token::Eq);
10618 let algorithm = match self.parse_one_of_keywords(&[
10619 Keyword::DEFAULT,
10620 Keyword::INSTANT,
10621 Keyword::INPLACE,
10622 Keyword::COPY,
10623 ]) {
10624 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
10625 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
10626 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
10627 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
10628 _ => self.expected_ref(
10629 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
10630 self.peek_token_ref(),
10631 )?,
10632 };
10633 AlterTableOperation::Algorithm { equals, algorithm }
10634 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
10635 let equals = self.consume_token(&Token::Eq);
10636 let value = self.parse_number_value()?;
10637 AlterTableOperation::AutoIncrement { equals, value }
10638 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
10639 let identity = if self.parse_keyword(Keyword::NOTHING) {
10640 ReplicaIdentity::Nothing
10641 } else if self.parse_keyword(Keyword::FULL) {
10642 ReplicaIdentity::Full
10643 } else if self.parse_keyword(Keyword::DEFAULT) {
10644 ReplicaIdentity::Default
10645 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10646 ReplicaIdentity::Index(self.parse_identifier()?)
10647 } else {
10648 return self.expected_ref(
10649 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
10650 self.peek_token_ref(),
10651 );
10652 };
10653
10654 AlterTableOperation::ReplicaIdentity { identity }
10655 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
10656 let name = self.parse_identifier()?;
10657 AlterTableOperation::ValidateConstraint { name }
10658 } else {
10659 let mut options =
10660 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
10661 if !options.is_empty() {
10662 AlterTableOperation::SetTblProperties {
10663 table_properties: options,
10664 }
10665 } else {
10666 options = self.parse_options(Keyword::SET)?;
10667 if !options.is_empty() {
10668 AlterTableOperation::SetOptionsParens { options }
10669 } else {
10670 return self.expected_ref(
10671 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
10672 self.peek_token_ref(),
10673 );
10674 }
10675 }
10676 };
10677 Ok(operation)
10678 }
10679
10680 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
10681 let data_type = self.parse_data_type()?;
10682 let using = if self.dialect.supports_alter_column_type_using()
10683 && self.parse_keyword(Keyword::USING)
10684 {
10685 Some(self.parse_expr()?)
10686 } else {
10687 None
10688 };
10689 Ok(AlterColumnOperation::SetDataType {
10690 data_type,
10691 using,
10692 had_set,
10693 })
10694 }
10695
10696 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
10697 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
10698 match keyword {
10699 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
10700 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
10701 unexpected_keyword => Err(ParserError::ParserError(
10703 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
10704 )),
10705 }
10706 }
10707
10708 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
10710 let object_type = self.expect_one_of_keywords(&[
10711 Keyword::VIEW,
10712 Keyword::TYPE,
10713 Keyword::COLLATION,
10714 Keyword::TABLE,
10715 Keyword::INDEX,
10716 Keyword::FUNCTION,
10717 Keyword::AGGREGATE,
10718 Keyword::ROLE,
10719 Keyword::POLICY,
10720 Keyword::CONNECTOR,
10721 Keyword::ICEBERG,
10722 Keyword::SCHEMA,
10723 Keyword::USER,
10724 Keyword::OPERATOR,
10725 ])?;
10726 match object_type {
10727 Keyword::SCHEMA => {
10728 self.prev_token();
10729 self.prev_token();
10730 self.parse_alter_schema()
10731 }
10732 Keyword::VIEW => self.parse_alter_view(),
10733 Keyword::TYPE => self.parse_alter_type(),
10734 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
10735 Keyword::TABLE => self.parse_alter_table(false),
10736 Keyword::ICEBERG => {
10737 self.expect_keyword(Keyword::TABLE)?;
10738 self.parse_alter_table(true)
10739 }
10740 Keyword::INDEX => {
10741 let index_name = self.parse_object_name(false)?;
10742 let operation = if self.parse_keyword(Keyword::RENAME) {
10743 if self.parse_keyword(Keyword::TO) {
10744 let index_name = self.parse_object_name(false)?;
10745 AlterIndexOperation::RenameIndex { index_name }
10746 } else {
10747 return self.expected_ref("TO after RENAME", self.peek_token_ref());
10748 }
10749 } else {
10750 return self.expected_ref("RENAME after ALTER INDEX", self.peek_token_ref());
10751 };
10752
10753 Ok(Statement::AlterIndex {
10754 name: index_name,
10755 operation,
10756 })
10757 }
10758 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
10759 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
10760 Keyword::OPERATOR => {
10761 if self.parse_keyword(Keyword::FAMILY) {
10762 self.parse_alter_operator_family().map(Into::into)
10763 } else if self.parse_keyword(Keyword::CLASS) {
10764 self.parse_alter_operator_class().map(Into::into)
10765 } else {
10766 self.parse_alter_operator().map(Into::into)
10767 }
10768 }
10769 Keyword::ROLE => self.parse_alter_role(),
10770 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
10771 Keyword::CONNECTOR => self.parse_alter_connector(),
10772 Keyword::USER => self.parse_alter_user().map(Into::into),
10773 unexpected_keyword => Err(ParserError::ParserError(
10775 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
10776 )),
10777 }
10778 }
10779
10780 fn parse_alter_aggregate_signature(
10781 &mut self,
10782 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
10783 let name = self.parse_object_name(false)?;
10784 self.expect_token(&Token::LParen)?;
10785
10786 if self.consume_token(&Token::Mul) {
10787 self.expect_token(&Token::RParen)?;
10788 return Ok((
10789 FunctionDesc {
10790 name,
10791 args: Some(vec![]),
10792 },
10793 true,
10794 None,
10795 ));
10796 }
10797
10798 let args =
10799 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
10800 vec![]
10801 } else {
10802 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
10803 };
10804
10805 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10806 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
10807 } else {
10808 None
10809 };
10810
10811 self.expect_token(&Token::RParen)?;
10812 Ok((
10813 FunctionDesc {
10814 name,
10815 args: Some(args),
10816 },
10817 false,
10818 aggregate_order_by,
10819 ))
10820 }
10821
10822 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
10823 let action = if self.parse_keywords(&[
10824 Keyword::CALLED,
10825 Keyword::ON,
10826 Keyword::NULL,
10827 Keyword::INPUT,
10828 ]) {
10829 Some(AlterFunctionAction::CalledOnNull(
10830 FunctionCalledOnNull::CalledOnNullInput,
10831 ))
10832 } else if self.parse_keywords(&[
10833 Keyword::RETURNS,
10834 Keyword::NULL,
10835 Keyword::ON,
10836 Keyword::NULL,
10837 Keyword::INPUT,
10838 ]) {
10839 Some(AlterFunctionAction::CalledOnNull(
10840 FunctionCalledOnNull::ReturnsNullOnNullInput,
10841 ))
10842 } else if self.parse_keyword(Keyword::STRICT) {
10843 Some(AlterFunctionAction::CalledOnNull(
10844 FunctionCalledOnNull::Strict,
10845 ))
10846 } else if self.parse_keyword(Keyword::IMMUTABLE) {
10847 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
10848 } else if self.parse_keyword(Keyword::STABLE) {
10849 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
10850 } else if self.parse_keyword(Keyword::VOLATILE) {
10851 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
10852 } else if self.parse_keyword(Keyword::NOT) {
10853 self.expect_keyword(Keyword::LEAKPROOF)?;
10854 Some(AlterFunctionAction::Leakproof(false))
10855 } else if self.parse_keyword(Keyword::LEAKPROOF) {
10856 Some(AlterFunctionAction::Leakproof(true))
10857 } else if self.parse_keyword(Keyword::EXTERNAL) {
10858 self.expect_keyword(Keyword::SECURITY)?;
10859 let security = if self.parse_keyword(Keyword::DEFINER) {
10860 FunctionSecurity::Definer
10861 } else if self.parse_keyword(Keyword::INVOKER) {
10862 FunctionSecurity::Invoker
10863 } else {
10864 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
10865 };
10866 Some(AlterFunctionAction::Security {
10867 external: true,
10868 security,
10869 })
10870 } else if self.parse_keyword(Keyword::SECURITY) {
10871 let security = if self.parse_keyword(Keyword::DEFINER) {
10872 FunctionSecurity::Definer
10873 } else if self.parse_keyword(Keyword::INVOKER) {
10874 FunctionSecurity::Invoker
10875 } else {
10876 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
10877 };
10878 Some(AlterFunctionAction::Security {
10879 external: false,
10880 security,
10881 })
10882 } else if self.parse_keyword(Keyword::PARALLEL) {
10883 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
10884 FunctionParallel::Unsafe
10885 } else if self.parse_keyword(Keyword::RESTRICTED) {
10886 FunctionParallel::Restricted
10887 } else if self.parse_keyword(Keyword::SAFE) {
10888 FunctionParallel::Safe
10889 } else {
10890 return self
10891 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
10892 };
10893 Some(AlterFunctionAction::Parallel(parallel))
10894 } else if self.parse_keyword(Keyword::COST) {
10895 Some(AlterFunctionAction::Cost(self.parse_number()?))
10896 } else if self.parse_keyword(Keyword::ROWS) {
10897 Some(AlterFunctionAction::Rows(self.parse_number()?))
10898 } else if self.parse_keyword(Keyword::SUPPORT) {
10899 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
10900 } else if self.parse_keyword(Keyword::SET) {
10901 let name = self.parse_object_name(false)?;
10902 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
10903 FunctionSetValue::FromCurrent
10904 } else {
10905 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
10906 return self.expected_ref("= or TO", self.peek_token_ref());
10907 }
10908 if self.parse_keyword(Keyword::DEFAULT) {
10909 FunctionSetValue::Default
10910 } else {
10911 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
10912 }
10913 };
10914 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
10915 name,
10916 value,
10917 }))
10918 } else if self.parse_keyword(Keyword::RESET) {
10919 let reset_config = if self.parse_keyword(Keyword::ALL) {
10920 ResetConfig::ALL
10921 } else {
10922 ResetConfig::ConfigName(self.parse_object_name(false)?)
10923 };
10924 Some(AlterFunctionAction::Reset(reset_config))
10925 } else {
10926 None
10927 };
10928
10929 Ok(action)
10930 }
10931
10932 fn parse_alter_function_actions(
10933 &mut self,
10934 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
10935 let mut actions = vec![];
10936 while let Some(action) = self.parse_alter_function_action()? {
10937 actions.push(action);
10938 }
10939 if actions.is_empty() {
10940 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
10941 }
10942 let restrict = self.parse_keyword(Keyword::RESTRICT);
10943 Ok((actions, restrict))
10944 }
10945
10946 pub fn parse_alter_function(
10948 &mut self,
10949 kind: AlterFunctionKind,
10950 ) -> Result<Statement, ParserError> {
10951 let (function, aggregate_star, aggregate_order_by) = match kind {
10952 AlterFunctionKind::Function => (self.parse_function_desc()?, false, None),
10953 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
10954 };
10955
10956 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10957 let new_name = self.parse_identifier()?;
10958 AlterFunctionOperation::RenameTo { new_name }
10959 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10960 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
10961 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10962 AlterFunctionOperation::SetSchema {
10963 schema_name: self.parse_object_name(false)?,
10964 }
10965 } else if matches!(kind, AlterFunctionKind::Function) && self.parse_keyword(Keyword::NO) {
10966 if !self.parse_keyword(Keyword::DEPENDS) {
10967 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
10968 }
10969 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
10970 AlterFunctionOperation::DependsOnExtension {
10971 no: true,
10972 extension_name: self.parse_object_name(false)?,
10973 }
10974 } else if matches!(kind, AlterFunctionKind::Function)
10975 && self.parse_keyword(Keyword::DEPENDS)
10976 {
10977 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
10978 AlterFunctionOperation::DependsOnExtension {
10979 no: false,
10980 extension_name: self.parse_object_name(false)?,
10981 }
10982 } else if matches!(kind, AlterFunctionKind::Function) {
10983 let (actions, restrict) = self.parse_alter_function_actions()?;
10984 AlterFunctionOperation::Actions { actions, restrict }
10985 } else {
10986 return self.expected_ref(
10987 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
10988 self.peek_token_ref(),
10989 );
10990 };
10991
10992 Ok(Statement::AlterFunction(AlterFunction {
10993 kind,
10994 function,
10995 aggregate_order_by,
10996 aggregate_star,
10997 operation,
10998 }))
10999 }
11000
11001 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11003 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11004 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11006 let on_cluster = self.parse_optional_on_cluster()?;
11007 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11008
11009 let mut location = None;
11010 if self.parse_keyword(Keyword::LOCATION) {
11011 location = Some(HiveSetLocation {
11012 has_set: false,
11013 location: self.parse_identifier()?,
11014 });
11015 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11016 location = Some(HiveSetLocation {
11017 has_set: true,
11018 location: self.parse_identifier()?,
11019 });
11020 }
11021
11022 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11023 self.peek_token_ref().clone()
11024 } else {
11025 self.get_current_token().clone()
11026 };
11027
11028 Ok(AlterTable {
11029 name: table_name,
11030 if_exists,
11031 only,
11032 operations,
11033 location,
11034 on_cluster,
11035 table_type: if iceberg {
11036 Some(AlterTableType::Iceberg)
11037 } else {
11038 None
11039 },
11040 end_token: AttachedToken(end_token),
11041 }
11042 .into())
11043 }
11044
11045 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11047 let name = self.parse_object_name(false)?;
11048 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11049
11050 let with_options = self.parse_options(Keyword::WITH)?;
11051
11052 self.expect_keyword_is(Keyword::AS)?;
11053 let query = self.parse_query()?;
11054
11055 Ok(Statement::AlterView {
11056 name,
11057 columns,
11058 query,
11059 with_options,
11060 })
11061 }
11062
11063 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11065 let name = self.parse_object_name(false)?;
11066
11067 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11068 let new_name = self.parse_identifier()?;
11069 Ok(Statement::AlterType(AlterType {
11070 name,
11071 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
11072 }))
11073 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11074 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11075 let new_enum_value = self.parse_identifier()?;
11076 let position = if self.parse_keyword(Keyword::BEFORE) {
11077 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11078 } else if self.parse_keyword(Keyword::AFTER) {
11079 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11080 } else {
11081 None
11082 };
11083
11084 Ok(Statement::AlterType(AlterType {
11085 name,
11086 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
11087 if_not_exists,
11088 value: new_enum_value,
11089 position,
11090 }),
11091 }))
11092 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11093 let existing_enum_value = self.parse_identifier()?;
11094 self.expect_keyword(Keyword::TO)?;
11095 let new_enum_value = self.parse_identifier()?;
11096
11097 Ok(Statement::AlterType(AlterType {
11098 name,
11099 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11100 from: existing_enum_value,
11101 to: new_enum_value,
11102 }),
11103 }))
11104 } else {
11105 self.expected_ref(
11106 "{RENAME TO | { RENAME | ADD } VALUE}",
11107 self.peek_token_ref(),
11108 )
11109 }
11110 }
11111
11112 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11116 let name = self.parse_object_name(false)?;
11117 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11118 AlterCollationOperation::RenameTo {
11119 new_name: self.parse_identifier()?,
11120 }
11121 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11122 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11123 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11124 AlterCollationOperation::SetSchema {
11125 schema_name: self.parse_object_name(false)?,
11126 }
11127 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11128 AlterCollationOperation::RefreshVersion
11129 } else {
11130 return self.expected_ref(
11131 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11132 self.peek_token_ref(),
11133 );
11134 };
11135
11136 Ok(AlterCollation { name, operation })
11137 }
11138
11139 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11143 let name = self.parse_operator_name()?;
11144
11145 self.expect_token(&Token::LParen)?;
11147
11148 let left_type = if self.parse_keyword(Keyword::NONE) {
11149 None
11150 } else {
11151 Some(self.parse_data_type()?)
11152 };
11153
11154 self.expect_token(&Token::Comma)?;
11155 let right_type = self.parse_data_type()?;
11156 self.expect_token(&Token::RParen)?;
11157
11158 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11160 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11161 Owner::CurrentRole
11162 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11163 Owner::CurrentUser
11164 } else if self.parse_keyword(Keyword::SESSION_USER) {
11165 Owner::SessionUser
11166 } else {
11167 Owner::Ident(self.parse_identifier()?)
11168 };
11169 AlterOperatorOperation::OwnerTo(owner)
11170 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11171 let schema_name = self.parse_object_name(false)?;
11172 AlterOperatorOperation::SetSchema { schema_name }
11173 } else if self.parse_keyword(Keyword::SET) {
11174 self.expect_token(&Token::LParen)?;
11175
11176 let mut options = Vec::new();
11177 loop {
11178 let keyword = self.expect_one_of_keywords(&[
11179 Keyword::RESTRICT,
11180 Keyword::JOIN,
11181 Keyword::COMMUTATOR,
11182 Keyword::NEGATOR,
11183 Keyword::HASHES,
11184 Keyword::MERGES,
11185 ])?;
11186
11187 match keyword {
11188 Keyword::RESTRICT => {
11189 self.expect_token(&Token::Eq)?;
11190 let proc_name = if self.parse_keyword(Keyword::NONE) {
11191 None
11192 } else {
11193 Some(self.parse_object_name(false)?)
11194 };
11195 options.push(OperatorOption::Restrict(proc_name));
11196 }
11197 Keyword::JOIN => {
11198 self.expect_token(&Token::Eq)?;
11199 let proc_name = if self.parse_keyword(Keyword::NONE) {
11200 None
11201 } else {
11202 Some(self.parse_object_name(false)?)
11203 };
11204 options.push(OperatorOption::Join(proc_name));
11205 }
11206 Keyword::COMMUTATOR => {
11207 self.expect_token(&Token::Eq)?;
11208 let op_name = self.parse_operator_name()?;
11209 options.push(OperatorOption::Commutator(op_name));
11210 }
11211 Keyword::NEGATOR => {
11212 self.expect_token(&Token::Eq)?;
11213 let op_name = self.parse_operator_name()?;
11214 options.push(OperatorOption::Negator(op_name));
11215 }
11216 Keyword::HASHES => {
11217 options.push(OperatorOption::Hashes);
11218 }
11219 Keyword::MERGES => {
11220 options.push(OperatorOption::Merges);
11221 }
11222 unexpected_keyword => return Err(ParserError::ParserError(
11223 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11224 )),
11225 }
11226
11227 if !self.consume_token(&Token::Comma) {
11228 break;
11229 }
11230 }
11231
11232 self.expect_token(&Token::RParen)?;
11233 AlterOperatorOperation::Set { options }
11234 } else {
11235 return self.expected_ref(
11236 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11237 self.peek_token_ref(),
11238 );
11239 };
11240
11241 Ok(AlterOperator {
11242 name,
11243 left_type,
11244 right_type,
11245 operation,
11246 })
11247 }
11248
11249 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11251 let strategy_number = self.parse_literal_uint()?;
11252 let operator_name = self.parse_operator_name()?;
11253
11254 self.expect_token(&Token::LParen)?;
11256 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11257 self.expect_token(&Token::RParen)?;
11258
11259 let purpose = if self.parse_keyword(Keyword::FOR) {
11261 if self.parse_keyword(Keyword::SEARCH) {
11262 Some(OperatorPurpose::ForSearch)
11263 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11264 let sort_family = self.parse_object_name(false)?;
11265 Some(OperatorPurpose::ForOrderBy { sort_family })
11266 } else {
11267 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11268 }
11269 } else {
11270 None
11271 };
11272
11273 Ok(OperatorFamilyItem::Operator {
11274 strategy_number,
11275 operator_name,
11276 op_types,
11277 purpose,
11278 })
11279 }
11280
11281 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11283 let support_number = self.parse_literal_uint()?;
11284
11285 let op_types =
11287 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11288 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11289 self.expect_token(&Token::RParen)?;
11290 Some(types)
11291 } else if self.consume_token(&Token::LParen) {
11292 self.expect_token(&Token::RParen)?;
11293 Some(vec![])
11294 } else {
11295 None
11296 };
11297
11298 let function_name = self.parse_object_name(false)?;
11299
11300 let argument_types = if self.consume_token(&Token::LParen) {
11302 if self.peek_token_ref().token == Token::RParen {
11303 self.expect_token(&Token::RParen)?;
11304 vec![]
11305 } else {
11306 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11307 self.expect_token(&Token::RParen)?;
11308 types
11309 }
11310 } else {
11311 vec![]
11312 };
11313
11314 Ok(OperatorFamilyItem::Function {
11315 support_number,
11316 op_types,
11317 function_name,
11318 argument_types,
11319 })
11320 }
11321
11322 fn parse_operator_family_drop_operator(
11324 &mut self,
11325 ) -> Result<OperatorFamilyDropItem, ParserError> {
11326 let strategy_number = self.parse_literal_uint()?;
11327
11328 self.expect_token(&Token::LParen)?;
11330 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11331 self.expect_token(&Token::RParen)?;
11332
11333 Ok(OperatorFamilyDropItem::Operator {
11334 strategy_number,
11335 op_types,
11336 })
11337 }
11338
11339 fn parse_operator_family_drop_function(
11341 &mut self,
11342 ) -> Result<OperatorFamilyDropItem, ParserError> {
11343 let support_number = self.parse_literal_uint()?;
11344
11345 self.expect_token(&Token::LParen)?;
11347 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11348 self.expect_token(&Token::RParen)?;
11349
11350 Ok(OperatorFamilyDropItem::Function {
11351 support_number,
11352 op_types,
11353 })
11354 }
11355
11356 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11358 if self.parse_keyword(Keyword::OPERATOR) {
11359 self.parse_operator_family_add_operator()
11360 } else if self.parse_keyword(Keyword::FUNCTION) {
11361 self.parse_operator_family_add_function()
11362 } else {
11363 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11364 }
11365 }
11366
11367 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
11369 if self.parse_keyword(Keyword::OPERATOR) {
11370 self.parse_operator_family_drop_operator()
11371 } else if self.parse_keyword(Keyword::FUNCTION) {
11372 self.parse_operator_family_drop_function()
11373 } else {
11374 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11375 }
11376 }
11377
11378 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
11381 let name = self.parse_object_name(false)?;
11382 self.expect_keyword(Keyword::USING)?;
11383 let using = self.parse_identifier()?;
11384
11385 let operation = if self.parse_keyword(Keyword::ADD) {
11386 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
11387 AlterOperatorFamilyOperation::Add { items }
11388 } else if self.parse_keyword(Keyword::DROP) {
11389 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
11390 AlterOperatorFamilyOperation::Drop { items }
11391 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11392 let new_name = self.parse_object_name(false)?;
11393 AlterOperatorFamilyOperation::RenameTo { new_name }
11394 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11395 let owner = self.parse_owner()?;
11396 AlterOperatorFamilyOperation::OwnerTo(owner)
11397 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11398 let schema_name = self.parse_object_name(false)?;
11399 AlterOperatorFamilyOperation::SetSchema { schema_name }
11400 } else {
11401 return self.expected_ref(
11402 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
11403 self.peek_token_ref(),
11404 );
11405 };
11406
11407 Ok(AlterOperatorFamily {
11408 name,
11409 using,
11410 operation,
11411 })
11412 }
11413
11414 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
11418 let name = self.parse_object_name(false)?;
11419 self.expect_keyword(Keyword::USING)?;
11420 let using = self.parse_identifier()?;
11421
11422 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11423 let new_name = self.parse_object_name(false)?;
11424 AlterOperatorClassOperation::RenameTo { new_name }
11425 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11426 let owner = self.parse_owner()?;
11427 AlterOperatorClassOperation::OwnerTo(owner)
11428 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11429 let schema_name = self.parse_object_name(false)?;
11430 AlterOperatorClassOperation::SetSchema { schema_name }
11431 } else {
11432 return self.expected_ref(
11433 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
11434 self.peek_token_ref(),
11435 );
11436 };
11437
11438 Ok(AlterOperatorClass {
11439 name,
11440 using,
11441 operation,
11442 })
11443 }
11444
11445 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
11449 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
11450 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11451 let name = self.parse_object_name(false)?;
11452 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
11453 self.prev_token();
11454 let options = self.parse_options(Keyword::OPTIONS)?;
11455 AlterSchemaOperation::SetOptionsParens { options }
11456 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
11457 let collate = self.parse_expr()?;
11458 AlterSchemaOperation::SetDefaultCollate { collate }
11459 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
11460 let replica = self.parse_identifier()?;
11461 let options = if self.peek_keyword(Keyword::OPTIONS) {
11462 Some(self.parse_options(Keyword::OPTIONS)?)
11463 } else {
11464 None
11465 };
11466 AlterSchemaOperation::AddReplica { replica, options }
11467 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
11468 let replica = self.parse_identifier()?;
11469 AlterSchemaOperation::DropReplica { replica }
11470 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11471 let new_name = self.parse_object_name(false)?;
11472 AlterSchemaOperation::Rename { name: new_name }
11473 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11474 let owner = self.parse_owner()?;
11475 AlterSchemaOperation::OwnerTo { owner }
11476 } else {
11477 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
11478 };
11479 Ok(Statement::AlterSchema(AlterSchema {
11480 name,
11481 if_exists,
11482 operations: vec![operation],
11483 }))
11484 }
11485
11486 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
11489 let object_name = self.parse_object_name(false)?;
11490 if self.peek_token_ref().token == Token::LParen {
11491 match self.parse_function(object_name)? {
11492 Expr::Function(f) => Ok(Statement::Call(f)),
11493 other => parser_err!(
11494 format!("Expected a simple procedure call but found: {other}"),
11495 self.peek_token_ref().span.start
11496 ),
11497 }
11498 } else {
11499 Ok(Statement::Call(Function {
11500 name: object_name,
11501 uses_odbc_syntax: false,
11502 parameters: FunctionArguments::None,
11503 args: FunctionArguments::None,
11504 over: None,
11505 filter: None,
11506 null_treatment: None,
11507 within_group: vec![],
11508 }))
11509 }
11510 }
11511
11512 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
11514 let source;
11515 if self.consume_token(&Token::LParen) {
11516 source = CopySource::Query(self.parse_query()?);
11517 self.expect_token(&Token::RParen)?;
11518 } else {
11519 let table_name = self.parse_object_name(false)?;
11520 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11521 source = CopySource::Table {
11522 table_name,
11523 columns,
11524 };
11525 }
11526 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
11527 Some(Keyword::FROM) => false,
11528 Some(Keyword::TO) => true,
11529 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
11530 };
11531 if !to {
11532 if let CopySource::Query(_) = source {
11535 return Err(ParserError::ParserError(
11536 "COPY ... FROM does not support query as a source".to_string(),
11537 ));
11538 }
11539 }
11540 let target = if self.parse_keyword(Keyword::STDIN) {
11541 CopyTarget::Stdin
11542 } else if self.parse_keyword(Keyword::STDOUT) {
11543 CopyTarget::Stdout
11544 } else if self.parse_keyword(Keyword::PROGRAM) {
11545 CopyTarget::Program {
11546 command: self.parse_literal_string()?,
11547 }
11548 } else {
11549 CopyTarget::File {
11550 filename: self.parse_literal_string()?,
11551 }
11552 };
11553 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
11555 if self.consume_token(&Token::LParen) {
11556 options = self.parse_comma_separated(Parser::parse_copy_option)?;
11557 self.expect_token(&Token::RParen)?;
11558 }
11559 let mut legacy_options = vec![];
11560 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
11561 legacy_options.push(opt);
11562 }
11563 let values =
11564 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
11565 self.expect_token(&Token::SemiColon)?;
11566 self.parse_tsv()
11567 } else {
11568 vec![]
11569 };
11570 Ok(Statement::Copy {
11571 source,
11572 to,
11573 target,
11574 options,
11575 legacy_options,
11576 values,
11577 })
11578 }
11579
11580 fn parse_open(&mut self) -> Result<Statement, ParserError> {
11582 self.expect_keyword(Keyword::OPEN)?;
11583 Ok(Statement::Open(OpenStatement {
11584 cursor_name: self.parse_identifier()?,
11585 }))
11586 }
11587
11588 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
11590 let cursor = if self.parse_keyword(Keyword::ALL) {
11591 CloseCursor::All
11592 } else {
11593 let name = self.parse_identifier()?;
11594
11595 CloseCursor::Specific { name }
11596 };
11597
11598 Ok(Statement::Close { cursor })
11599 }
11600
11601 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
11602 let ret = match self.parse_one_of_keywords(&[
11603 Keyword::FORMAT,
11604 Keyword::FREEZE,
11605 Keyword::DELIMITER,
11606 Keyword::NULL,
11607 Keyword::HEADER,
11608 Keyword::QUOTE,
11609 Keyword::ESCAPE,
11610 Keyword::FORCE_QUOTE,
11611 Keyword::FORCE_NOT_NULL,
11612 Keyword::FORCE_NULL,
11613 Keyword::ENCODING,
11614 ]) {
11615 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
11616 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
11617 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
11618 Some(Keyword::FALSE)
11619 )),
11620 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
11621 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
11622 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
11623 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
11624 Some(Keyword::FALSE)
11625 )),
11626 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
11627 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
11628 Some(Keyword::FORCE_QUOTE) => {
11629 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
11630 }
11631 Some(Keyword::FORCE_NOT_NULL) => {
11632 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
11633 }
11634 Some(Keyword::FORCE_NULL) => {
11635 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
11636 }
11637 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
11638 _ => self.expected_ref("option", self.peek_token_ref())?,
11639 };
11640 Ok(ret)
11641 }
11642
11643 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
11644 if self.parse_keyword(Keyword::FORMAT) {
11646 let _ = self.parse_keyword(Keyword::AS);
11647 }
11648
11649 let ret = match self.parse_one_of_keywords(&[
11650 Keyword::ACCEPTANYDATE,
11651 Keyword::ACCEPTINVCHARS,
11652 Keyword::ADDQUOTES,
11653 Keyword::ALLOWOVERWRITE,
11654 Keyword::BINARY,
11655 Keyword::BLANKSASNULL,
11656 Keyword::BZIP2,
11657 Keyword::CLEANPATH,
11658 Keyword::COMPUPDATE,
11659 Keyword::CREDENTIALS,
11660 Keyword::CSV,
11661 Keyword::DATEFORMAT,
11662 Keyword::DELIMITER,
11663 Keyword::EMPTYASNULL,
11664 Keyword::ENCRYPTED,
11665 Keyword::ESCAPE,
11666 Keyword::EXTENSION,
11667 Keyword::FIXEDWIDTH,
11668 Keyword::GZIP,
11669 Keyword::HEADER,
11670 Keyword::IAM_ROLE,
11671 Keyword::IGNOREHEADER,
11672 Keyword::JSON,
11673 Keyword::MANIFEST,
11674 Keyword::MAXFILESIZE,
11675 Keyword::NULL,
11676 Keyword::PARALLEL,
11677 Keyword::PARQUET,
11678 Keyword::PARTITION,
11679 Keyword::REGION,
11680 Keyword::REMOVEQUOTES,
11681 Keyword::ROWGROUPSIZE,
11682 Keyword::STATUPDATE,
11683 Keyword::TIMEFORMAT,
11684 Keyword::TRUNCATECOLUMNS,
11685 Keyword::ZSTD,
11686 ]) {
11687 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
11688 Some(Keyword::ACCEPTINVCHARS) => {
11689 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11691 Some(self.parse_literal_string()?)
11692 } else {
11693 None
11694 };
11695 CopyLegacyOption::AcceptInvChars(ch)
11696 }
11697 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
11698 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
11699 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
11700 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
11701 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
11702 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
11703 Some(Keyword::COMPUPDATE) => {
11704 let preset = self.parse_keyword(Keyword::PRESET);
11705 let enabled = match self.parse_one_of_keywords(&[
11706 Keyword::TRUE,
11707 Keyword::FALSE,
11708 Keyword::ON,
11709 Keyword::OFF,
11710 ]) {
11711 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11712 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11713 _ => None,
11714 };
11715 CopyLegacyOption::CompUpdate { preset, enabled }
11716 }
11717 Some(Keyword::CREDENTIALS) => {
11718 CopyLegacyOption::Credentials(self.parse_literal_string()?)
11719 }
11720 Some(Keyword::CSV) => CopyLegacyOption::Csv({
11721 let mut opts = vec![];
11722 while let Some(opt) =
11723 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
11724 {
11725 opts.push(opt);
11726 }
11727 opts
11728 }),
11729 Some(Keyword::DATEFORMAT) => {
11730 let _ = self.parse_keyword(Keyword::AS);
11731 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11732 Some(self.parse_literal_string()?)
11733 } else {
11734 None
11735 };
11736 CopyLegacyOption::DateFormat(fmt)
11737 }
11738 Some(Keyword::DELIMITER) => {
11739 let _ = self.parse_keyword(Keyword::AS);
11740 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
11741 }
11742 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
11743 Some(Keyword::ENCRYPTED) => {
11744 let auto = self.parse_keyword(Keyword::AUTO);
11745 CopyLegacyOption::Encrypted { auto }
11746 }
11747 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
11748 Some(Keyword::EXTENSION) => {
11749 let ext = self.parse_literal_string()?;
11750 CopyLegacyOption::Extension(ext)
11751 }
11752 Some(Keyword::FIXEDWIDTH) => {
11753 let spec = self.parse_literal_string()?;
11754 CopyLegacyOption::FixedWidth(spec)
11755 }
11756 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
11757 Some(Keyword::HEADER) => CopyLegacyOption::Header,
11758 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
11759 Some(Keyword::IGNOREHEADER) => {
11760 let _ = self.parse_keyword(Keyword::AS);
11761 let num_rows = self.parse_literal_uint()?;
11762 CopyLegacyOption::IgnoreHeader(num_rows)
11763 }
11764 Some(Keyword::JSON) => {
11765 let _ = self.parse_keyword(Keyword::AS);
11766 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11767 Some(self.parse_literal_string()?)
11768 } else {
11769 None
11770 };
11771 CopyLegacyOption::Json(fmt)
11772 }
11773 Some(Keyword::MANIFEST) => {
11774 let verbose = self.parse_keyword(Keyword::VERBOSE);
11775 CopyLegacyOption::Manifest { verbose }
11776 }
11777 Some(Keyword::MAXFILESIZE) => {
11778 let _ = self.parse_keyword(Keyword::AS);
11779 let size = self.parse_number_value()?;
11780 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
11781 Some(Keyword::MB) => Some(FileSizeUnit::MB),
11782 Some(Keyword::GB) => Some(FileSizeUnit::GB),
11783 _ => None,
11784 };
11785 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
11786 }
11787 Some(Keyword::NULL) => {
11788 let _ = self.parse_keyword(Keyword::AS);
11789 CopyLegacyOption::Null(self.parse_literal_string()?)
11790 }
11791 Some(Keyword::PARALLEL) => {
11792 let enabled = match self.parse_one_of_keywords(&[
11793 Keyword::TRUE,
11794 Keyword::FALSE,
11795 Keyword::ON,
11796 Keyword::OFF,
11797 ]) {
11798 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11799 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11800 _ => None,
11801 };
11802 CopyLegacyOption::Parallel(enabled)
11803 }
11804 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
11805 Some(Keyword::PARTITION) => {
11806 self.expect_keyword(Keyword::BY)?;
11807 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
11808 let include = self.parse_keyword(Keyword::INCLUDE);
11809 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
11810 }
11811 Some(Keyword::REGION) => {
11812 let _ = self.parse_keyword(Keyword::AS);
11813 let region = self.parse_literal_string()?;
11814 CopyLegacyOption::Region(region)
11815 }
11816 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
11817 Some(Keyword::ROWGROUPSIZE) => {
11818 let _ = self.parse_keyword(Keyword::AS);
11819 let file_size = self.parse_file_size()?;
11820 CopyLegacyOption::RowGroupSize(file_size)
11821 }
11822 Some(Keyword::STATUPDATE) => {
11823 let enabled = match self.parse_one_of_keywords(&[
11824 Keyword::TRUE,
11825 Keyword::FALSE,
11826 Keyword::ON,
11827 Keyword::OFF,
11828 ]) {
11829 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11830 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11831 _ => None,
11832 };
11833 CopyLegacyOption::StatUpdate(enabled)
11834 }
11835 Some(Keyword::TIMEFORMAT) => {
11836 let _ = self.parse_keyword(Keyword::AS);
11837 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11838 Some(self.parse_literal_string()?)
11839 } else {
11840 None
11841 };
11842 CopyLegacyOption::TimeFormat(fmt)
11843 }
11844 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
11845 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
11846 _ => self.expected_ref("option", self.peek_token_ref())?,
11847 };
11848 Ok(ret)
11849 }
11850
11851 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
11852 let size = self.parse_number_value()?;
11853 let unit = self.maybe_parse_file_size_unit();
11854 Ok(FileSize { size, unit })
11855 }
11856
11857 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
11858 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
11859 Some(Keyword::MB) => Some(FileSizeUnit::MB),
11860 Some(Keyword::GB) => Some(FileSizeUnit::GB),
11861 _ => None,
11862 }
11863 }
11864
11865 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
11866 if self.parse_keyword(Keyword::DEFAULT) {
11867 Ok(IamRoleKind::Default)
11868 } else {
11869 let arn = self.parse_literal_string()?;
11870 Ok(IamRoleKind::Arn(arn))
11871 }
11872 }
11873
11874 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
11875 let ret = match self.parse_one_of_keywords(&[
11876 Keyword::HEADER,
11877 Keyword::QUOTE,
11878 Keyword::ESCAPE,
11879 Keyword::FORCE,
11880 ]) {
11881 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
11882 Some(Keyword::QUOTE) => {
11883 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
11885 }
11886 Some(Keyword::ESCAPE) => {
11887 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
11889 }
11890 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
11891 CopyLegacyCsvOption::ForceNotNull(
11892 self.parse_comma_separated(|p| p.parse_identifier())?,
11893 )
11894 }
11895 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
11896 CopyLegacyCsvOption::ForceQuote(
11897 self.parse_comma_separated(|p| p.parse_identifier())?,
11898 )
11899 }
11900 _ => self.expected_ref("csv option", self.peek_token_ref())?,
11901 };
11902 Ok(ret)
11903 }
11904
11905 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
11906 let s = self.parse_literal_string()?;
11907 if s.len() != 1 {
11908 let loc = self
11909 .tokens
11910 .get(self.index - 1)
11911 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
11912 return parser_err!(format!("Expect a char, found {s:?}"), loc);
11913 }
11914 Ok(s.chars().next().unwrap())
11915 }
11916
11917 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
11920 self.parse_tab_value()
11921 }
11922
11923 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
11925 let mut values = vec![];
11926 let mut content = String::new();
11927 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
11928 match t {
11929 Token::Whitespace(Whitespace::Tab) => {
11930 values.push(Some(core::mem::take(&mut content)));
11931 }
11932 Token::Whitespace(Whitespace::Newline) => {
11933 values.push(Some(core::mem::take(&mut content)));
11934 }
11935 Token::Backslash => {
11936 if self.consume_token(&Token::Period) {
11937 return values;
11938 }
11939 if let Token::Word(w) = self.next_token().token {
11940 if w.value == "N" {
11941 values.push(None);
11942 }
11943 }
11944 }
11945 _ => {
11946 content.push_str(&t.to_string());
11947 }
11948 }
11949 }
11950 values
11951 }
11952
11953 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
11955 let next_token = self.next_token();
11956 let span = next_token.span;
11957 let ok_value = |value: Value| Ok(value.with_span(span));
11958 match next_token.token {
11959 Token::Word(w) => match w.keyword {
11960 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
11961 ok_value(Value::Boolean(true))
11962 }
11963 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
11964 ok_value(Value::Boolean(false))
11965 }
11966 Keyword::NULL => ok_value(Value::Null),
11967 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
11968 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
11969 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
11970 _ => self.expected(
11971 "A value?",
11972 TokenWithSpan {
11973 token: Token::Word(w),
11974 span,
11975 },
11976 )?,
11977 },
11978 _ => self.expected(
11979 "a concrete value",
11980 TokenWithSpan {
11981 token: Token::Word(w),
11982 span,
11983 },
11984 ),
11985 },
11986 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
11990 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
11991 self.maybe_concat_string_literal(s.to_string()),
11992 )),
11993 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
11994 self.maybe_concat_string_literal(s.to_string()),
11995 )),
11996 Token::TripleSingleQuotedString(ref s) => {
11997 ok_value(Value::TripleSingleQuotedString(s.to_string()))
11998 }
11999 Token::TripleDoubleQuotedString(ref s) => {
12000 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12001 }
12002 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12003 Token::SingleQuotedByteStringLiteral(ref s) => {
12004 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12005 }
12006 Token::DoubleQuotedByteStringLiteral(ref s) => {
12007 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12008 }
12009 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12010 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12011 }
12012 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12013 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12014 }
12015 Token::SingleQuotedRawStringLiteral(ref s) => {
12016 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12017 }
12018 Token::DoubleQuotedRawStringLiteral(ref s) => {
12019 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12020 }
12021 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12022 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12023 }
12024 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12025 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12026 }
12027 Token::NationalStringLiteral(ref s) => {
12028 ok_value(Value::NationalStringLiteral(s.to_string()))
12029 }
12030 Token::QuoteDelimitedStringLiteral(v) => {
12031 ok_value(Value::QuoteDelimitedStringLiteral(v))
12032 }
12033 Token::NationalQuoteDelimitedStringLiteral(v) => {
12034 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12035 }
12036 Token::EscapedStringLiteral(ref s) => {
12037 ok_value(Value::EscapedStringLiteral(s.to_string()))
12038 }
12039 Token::UnicodeStringLiteral(ref s) => {
12040 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12041 }
12042 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12043 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12044 tok @ Token::Colon | tok @ Token::AtSign => {
12045 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12053 let ident = match next_token.token {
12054 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12055 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12056 _ => self.expected("placeholder", next_token),
12057 }?;
12058 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12059 .with_span(Span::new(span.start, ident.span.end)))
12060 }
12061 unexpected => self.expected(
12062 "a value",
12063 TokenWithSpan {
12064 token: unexpected,
12065 span,
12066 },
12067 ),
12068 }
12069 }
12070
12071 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12072 if self.dialect.supports_string_literal_concatenation() {
12073 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12074 self.peek_token_ref().token
12075 {
12076 str.push_str(s);
12077 self.advance_token();
12078 }
12079 } else if self
12080 .dialect
12081 .supports_string_literal_concatenation_with_newline()
12082 {
12083 let mut after_newline = false;
12086 loop {
12087 match self.peek_token_no_skip().token {
12088 Token::Whitespace(Whitespace::Newline) => {
12089 after_newline = true;
12090 self.next_token_no_skip();
12091 }
12092 Token::Whitespace(_) => {
12093 self.next_token_no_skip();
12094 }
12095 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12096 if after_newline =>
12097 {
12098 str.push_str(s.clone().as_str());
12099 self.next_token_no_skip();
12100 after_newline = false;
12101 }
12102 _ => break,
12103 }
12104 }
12105 }
12106
12107 str
12108 }
12109
12110 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12112 let value_wrapper = self.parse_value()?;
12113 match &value_wrapper.value {
12114 Value::Number(_, _) => Ok(value_wrapper),
12115 Value::Placeholder(_) => Ok(value_wrapper),
12116 _ => {
12117 self.prev_token();
12118 self.expected_ref("literal number", self.peek_token_ref())
12119 }
12120 }
12121 }
12122
12123 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12126 let next_token = self.next_token();
12127 match next_token.token {
12128 Token::Plus => Ok(Expr::UnaryOp {
12129 op: UnaryOperator::Plus,
12130 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12131 }),
12132 Token::Minus => Ok(Expr::UnaryOp {
12133 op: UnaryOperator::Minus,
12134 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12135 }),
12136 _ => {
12137 self.prev_token();
12138 Ok(Expr::Value(self.parse_number_value()?))
12139 }
12140 }
12141 }
12142
12143 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12144 let next_token = self.next_token();
12145 let span = next_token.span;
12146 match next_token.token {
12147 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12148 Value::SingleQuotedString(s.to_string()).with_span(span),
12149 )),
12150 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12151 Value::DoubleQuotedString(s.to_string()).with_span(span),
12152 )),
12153 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12154 Value::HexStringLiteral(s.to_string()).with_span(span),
12155 )),
12156 unexpected => self.expected(
12157 "a string value",
12158 TokenWithSpan {
12159 token: unexpected,
12160 span,
12161 },
12162 ),
12163 }
12164 }
12165
12166 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12168 let next_token = self.next_token();
12169 match next_token.token {
12170 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12171 _ => self.expected("literal int", next_token),
12172 }
12173 }
12174
12175 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12178 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12179 let peek_token = parser.peek_token();
12180 let span = peek_token.span;
12181 match peek_token.token {
12182 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12183 {
12184 parser.next_token();
12185 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12186 }
12187 _ => Ok(Expr::Value(
12188 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12189 )),
12190 }
12191 };
12192
12193 Ok(CreateFunctionBody::AsBeforeOptions {
12194 body: parse_string_expr(self)?,
12195 link_symbol: if self.consume_token(&Token::Comma) {
12196 Some(parse_string_expr(self)?)
12197 } else {
12198 None
12199 },
12200 })
12201 }
12202
12203 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12205 let next_token = self.next_token();
12206 match next_token.token {
12207 Token::Word(Word {
12208 value,
12209 keyword: Keyword::NoKeyword,
12210 ..
12211 }) => Ok(value),
12212 Token::SingleQuotedString(s) => Ok(s),
12213 Token::DoubleQuotedString(s) => Ok(s),
12214 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12215 Ok(s)
12216 }
12217 Token::UnicodeStringLiteral(s) => Ok(s),
12218 _ => self.expected("literal string", next_token),
12219 }
12220 }
12221
12222 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12224 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12225 Some(Keyword::TRUE) => Ok(true),
12226 Some(Keyword::FALSE) => Ok(false),
12227 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12228 }
12229 }
12230
12231 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12233 let neg = self.parse_keyword(Keyword::NOT);
12234 let normalized_form = self.maybe_parse(|parser| {
12235 match parser.parse_one_of_keywords(&[
12236 Keyword::NFC,
12237 Keyword::NFD,
12238 Keyword::NFKC,
12239 Keyword::NFKD,
12240 ]) {
12241 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12242 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12243 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12244 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12245 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12246 }
12247 })?;
12248 if self.parse_keyword(Keyword::NORMALIZED) {
12249 return Ok(Expr::IsNormalized {
12250 expr: Box::new(expr),
12251 form: normalized_form,
12252 negated: neg,
12253 });
12254 }
12255 self.expected_ref("unicode normalization form", self.peek_token_ref())
12256 }
12257
12258 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12260 self.expect_token(&Token::LParen)?;
12261 let values = self.parse_comma_separated(|parser| {
12262 let name = parser.parse_literal_string()?;
12263 let e = if parser.consume_token(&Token::Eq) {
12264 let value = parser.parse_number()?;
12265 EnumMember::NamedValue(name, value)
12266 } else {
12267 EnumMember::Name(name)
12268 };
12269 Ok(e)
12270 })?;
12271 self.expect_token(&Token::RParen)?;
12272
12273 Ok(values)
12274 }
12275
12276 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12278 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12279 if trailing_bracket.0 {
12280 return parser_err!(
12281 format!("unmatched > after parsing data type {ty}"),
12282 self.peek_token_ref()
12283 );
12284 }
12285
12286 Ok(ty)
12287 }
12288
12289 fn parse_data_type_helper(
12290 &mut self,
12291 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12292 let dialect = self.dialect;
12293 self.advance_token();
12294 let next_token = self.get_current_token();
12295 let next_token_index = self.get_current_index();
12296
12297 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12298 let mut data = match &next_token.token {
12299 Token::Word(w) => match w.keyword {
12300 Keyword::BOOLEAN => Ok(DataType::Boolean),
12301 Keyword::BOOL => Ok(DataType::Bool),
12302 Keyword::FLOAT => {
12303 let precision = self.parse_exact_number_optional_precision_scale()?;
12304
12305 if self.parse_keyword(Keyword::UNSIGNED) {
12306 Ok(DataType::FloatUnsigned(precision))
12307 } else {
12308 Ok(DataType::Float(precision))
12309 }
12310 }
12311 Keyword::REAL => {
12312 if self.parse_keyword(Keyword::UNSIGNED) {
12313 Ok(DataType::RealUnsigned)
12314 } else {
12315 Ok(DataType::Real)
12316 }
12317 }
12318 Keyword::FLOAT4 => Ok(DataType::Float4),
12319 Keyword::FLOAT32 => Ok(DataType::Float32),
12320 Keyword::FLOAT64 => Ok(DataType::Float64),
12321 Keyword::FLOAT8 => Ok(DataType::Float8),
12322 Keyword::DOUBLE => {
12323 if self.parse_keyword(Keyword::PRECISION) {
12324 if self.parse_keyword(Keyword::UNSIGNED) {
12325 Ok(DataType::DoublePrecisionUnsigned)
12326 } else {
12327 Ok(DataType::DoublePrecision)
12328 }
12329 } else {
12330 let precision = self.parse_exact_number_optional_precision_scale()?;
12331
12332 if self.parse_keyword(Keyword::UNSIGNED) {
12333 Ok(DataType::DoubleUnsigned(precision))
12334 } else {
12335 Ok(DataType::Double(precision))
12336 }
12337 }
12338 }
12339 Keyword::TINYINT => {
12340 let optional_precision = self.parse_optional_precision();
12341 if self.parse_keyword(Keyword::UNSIGNED) {
12342 Ok(DataType::TinyIntUnsigned(optional_precision?))
12343 } else {
12344 if dialect.supports_data_type_signed_suffix() {
12345 let _ = self.parse_keyword(Keyword::SIGNED);
12346 }
12347 Ok(DataType::TinyInt(optional_precision?))
12348 }
12349 }
12350 Keyword::INT2 => {
12351 let optional_precision = self.parse_optional_precision();
12352 if self.parse_keyword(Keyword::UNSIGNED) {
12353 Ok(DataType::Int2Unsigned(optional_precision?))
12354 } else {
12355 Ok(DataType::Int2(optional_precision?))
12356 }
12357 }
12358 Keyword::SMALLINT => {
12359 let optional_precision = self.parse_optional_precision();
12360 if self.parse_keyword(Keyword::UNSIGNED) {
12361 Ok(DataType::SmallIntUnsigned(optional_precision?))
12362 } else {
12363 if dialect.supports_data_type_signed_suffix() {
12364 let _ = self.parse_keyword(Keyword::SIGNED);
12365 }
12366 Ok(DataType::SmallInt(optional_precision?))
12367 }
12368 }
12369 Keyword::MEDIUMINT => {
12370 let optional_precision = self.parse_optional_precision();
12371 if self.parse_keyword(Keyword::UNSIGNED) {
12372 Ok(DataType::MediumIntUnsigned(optional_precision?))
12373 } else {
12374 if dialect.supports_data_type_signed_suffix() {
12375 let _ = self.parse_keyword(Keyword::SIGNED);
12376 }
12377 Ok(DataType::MediumInt(optional_precision?))
12378 }
12379 }
12380 Keyword::INT => {
12381 let optional_precision = self.parse_optional_precision();
12382 if self.parse_keyword(Keyword::UNSIGNED) {
12383 Ok(DataType::IntUnsigned(optional_precision?))
12384 } else {
12385 if dialect.supports_data_type_signed_suffix() {
12386 let _ = self.parse_keyword(Keyword::SIGNED);
12387 }
12388 Ok(DataType::Int(optional_precision?))
12389 }
12390 }
12391 Keyword::INT4 => {
12392 let optional_precision = self.parse_optional_precision();
12393 if self.parse_keyword(Keyword::UNSIGNED) {
12394 Ok(DataType::Int4Unsigned(optional_precision?))
12395 } else {
12396 Ok(DataType::Int4(optional_precision?))
12397 }
12398 }
12399 Keyword::INT8 => {
12400 let optional_precision = self.parse_optional_precision();
12401 if self.parse_keyword(Keyword::UNSIGNED) {
12402 Ok(DataType::Int8Unsigned(optional_precision?))
12403 } else {
12404 Ok(DataType::Int8(optional_precision?))
12405 }
12406 }
12407 Keyword::INT16 => Ok(DataType::Int16),
12408 Keyword::INT32 => Ok(DataType::Int32),
12409 Keyword::INT64 => Ok(DataType::Int64),
12410 Keyword::INT128 => Ok(DataType::Int128),
12411 Keyword::INT256 => Ok(DataType::Int256),
12412 Keyword::INTEGER => {
12413 let optional_precision = self.parse_optional_precision();
12414 if self.parse_keyword(Keyword::UNSIGNED) {
12415 Ok(DataType::IntegerUnsigned(optional_precision?))
12416 } else {
12417 if dialect.supports_data_type_signed_suffix() {
12418 let _ = self.parse_keyword(Keyword::SIGNED);
12419 }
12420 Ok(DataType::Integer(optional_precision?))
12421 }
12422 }
12423 Keyword::BIGINT => {
12424 let optional_precision = self.parse_optional_precision();
12425 if self.parse_keyword(Keyword::UNSIGNED) {
12426 Ok(DataType::BigIntUnsigned(optional_precision?))
12427 } else {
12428 if dialect.supports_data_type_signed_suffix() {
12429 let _ = self.parse_keyword(Keyword::SIGNED);
12430 }
12431 Ok(DataType::BigInt(optional_precision?))
12432 }
12433 }
12434 Keyword::HUGEINT => Ok(DataType::HugeInt),
12435 Keyword::UBIGINT => Ok(DataType::UBigInt),
12436 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
12437 Keyword::USMALLINT => Ok(DataType::USmallInt),
12438 Keyword::UTINYINT => Ok(DataType::UTinyInt),
12439 Keyword::UINT8 => Ok(DataType::UInt8),
12440 Keyword::UINT16 => Ok(DataType::UInt16),
12441 Keyword::UINT32 => Ok(DataType::UInt32),
12442 Keyword::UINT64 => Ok(DataType::UInt64),
12443 Keyword::UINT128 => Ok(DataType::UInt128),
12444 Keyword::UINT256 => Ok(DataType::UInt256),
12445 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
12446 Keyword::NVARCHAR => {
12447 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
12448 }
12449 Keyword::CHARACTER => {
12450 if self.parse_keyword(Keyword::VARYING) {
12451 Ok(DataType::CharacterVarying(
12452 self.parse_optional_character_length()?,
12453 ))
12454 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12455 Ok(DataType::CharacterLargeObject(
12456 self.parse_optional_precision()?,
12457 ))
12458 } else {
12459 Ok(DataType::Character(self.parse_optional_character_length()?))
12460 }
12461 }
12462 Keyword::CHAR => {
12463 if self.parse_keyword(Keyword::VARYING) {
12464 Ok(DataType::CharVarying(
12465 self.parse_optional_character_length()?,
12466 ))
12467 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12468 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
12469 } else {
12470 Ok(DataType::Char(self.parse_optional_character_length()?))
12471 }
12472 }
12473 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
12474 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
12475 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
12476 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
12477 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
12478 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
12479 Keyword::LONGBLOB => Ok(DataType::LongBlob),
12480 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
12481 Keyword::BIT => {
12482 if self.parse_keyword(Keyword::VARYING) {
12483 Ok(DataType::BitVarying(self.parse_optional_precision()?))
12484 } else {
12485 Ok(DataType::Bit(self.parse_optional_precision()?))
12486 }
12487 }
12488 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
12489 Keyword::UUID => Ok(DataType::Uuid),
12490 Keyword::DATE => Ok(DataType::Date),
12491 Keyword::DATE32 => Ok(DataType::Date32),
12492 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
12493 Keyword::DATETIME64 => {
12494 self.prev_token();
12495 let (precision, time_zone) = self.parse_datetime_64()?;
12496 Ok(DataType::Datetime64(precision, time_zone))
12497 }
12498 Keyword::TIMESTAMP => {
12499 let precision = self.parse_optional_precision()?;
12500 let tz = if self.parse_keyword(Keyword::WITH) {
12501 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12502 TimezoneInfo::WithTimeZone
12503 } else if self.parse_keyword(Keyword::WITHOUT) {
12504 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12505 TimezoneInfo::WithoutTimeZone
12506 } else {
12507 TimezoneInfo::None
12508 };
12509 Ok(DataType::Timestamp(precision, tz))
12510 }
12511 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
12512 self.parse_optional_precision()?,
12513 TimezoneInfo::Tz,
12514 )),
12515 Keyword::TIMESTAMP_NTZ => {
12516 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
12517 }
12518 Keyword::TIME => {
12519 let precision = self.parse_optional_precision()?;
12520 let tz = if self.parse_keyword(Keyword::WITH) {
12521 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12522 TimezoneInfo::WithTimeZone
12523 } else if self.parse_keyword(Keyword::WITHOUT) {
12524 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12525 TimezoneInfo::WithoutTimeZone
12526 } else {
12527 TimezoneInfo::None
12528 };
12529 Ok(DataType::Time(precision, tz))
12530 }
12531 Keyword::TIMETZ => Ok(DataType::Time(
12532 self.parse_optional_precision()?,
12533 TimezoneInfo::Tz,
12534 )),
12535 Keyword::INTERVAL => {
12536 if self.dialect.supports_interval_options() {
12537 let fields = self.maybe_parse_optional_interval_fields()?;
12538 let precision = self.parse_optional_precision()?;
12539 Ok(DataType::Interval { fields, precision })
12540 } else {
12541 Ok(DataType::Interval {
12542 fields: None,
12543 precision: None,
12544 })
12545 }
12546 }
12547 Keyword::JSON => Ok(DataType::JSON),
12548 Keyword::JSONB => Ok(DataType::JSONB),
12549 Keyword::REGCLASS => Ok(DataType::Regclass),
12550 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
12551 Keyword::FIXEDSTRING => {
12552 self.expect_token(&Token::LParen)?;
12553 let character_length = self.parse_literal_uint()?;
12554 self.expect_token(&Token::RParen)?;
12555 Ok(DataType::FixedString(character_length))
12556 }
12557 Keyword::TEXT => Ok(DataType::Text),
12558 Keyword::TINYTEXT => Ok(DataType::TinyText),
12559 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
12560 Keyword::LONGTEXT => Ok(DataType::LongText),
12561 Keyword::BYTEA => Ok(DataType::Bytea),
12562 Keyword::NUMERIC => Ok(DataType::Numeric(
12563 self.parse_exact_number_optional_precision_scale()?,
12564 )),
12565 Keyword::DECIMAL => {
12566 let precision = self.parse_exact_number_optional_precision_scale()?;
12567
12568 if self.parse_keyword(Keyword::UNSIGNED) {
12569 Ok(DataType::DecimalUnsigned(precision))
12570 } else {
12571 Ok(DataType::Decimal(precision))
12572 }
12573 }
12574 Keyword::DEC => {
12575 let precision = self.parse_exact_number_optional_precision_scale()?;
12576
12577 if self.parse_keyword(Keyword::UNSIGNED) {
12578 Ok(DataType::DecUnsigned(precision))
12579 } else {
12580 Ok(DataType::Dec(precision))
12581 }
12582 }
12583 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
12584 self.parse_exact_number_optional_precision_scale()?,
12585 )),
12586 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
12587 self.parse_exact_number_optional_precision_scale()?,
12588 )),
12589 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
12590 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
12591 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
12592 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
12593 Keyword::ARRAY => {
12594 if self.dialect.supports_array_typedef_without_element_type() {
12595 Ok(DataType::Array(ArrayElemTypeDef::None))
12596 } else if dialect_of!(self is ClickHouseDialect) {
12597 Ok(self.parse_sub_type(|internal_type| {
12598 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
12599 })?)
12600 } else {
12601 self.expect_token(&Token::Lt)?;
12602 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
12603 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
12604 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
12605 inside_type,
12606 ))))
12607 }
12608 }
12609 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
12610 self.prev_token();
12611 let field_defs = self.parse_duckdb_struct_type_def()?;
12612 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
12613 }
12614 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
12615 {
12616 self.prev_token();
12617 let (field_defs, _trailing_bracket) =
12618 self.parse_struct_type_def(Self::parse_struct_field_def)?;
12619 trailing_bracket = _trailing_bracket;
12620 Ok(DataType::Struct(
12621 field_defs,
12622 StructBracketKind::AngleBrackets,
12623 ))
12624 }
12625 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
12626 self.prev_token();
12627 let fields = self.parse_union_type_def()?;
12628 Ok(DataType::Union(fields))
12629 }
12630 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12631 Ok(self.parse_sub_type(DataType::Nullable)?)
12632 }
12633 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12634 Ok(self.parse_sub_type(DataType::LowCardinality)?)
12635 }
12636 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12637 self.prev_token();
12638 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
12639 Ok(DataType::Map(
12640 Box::new(key_data_type),
12641 Box::new(value_data_type),
12642 ))
12643 }
12644 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12645 self.expect_token(&Token::LParen)?;
12646 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
12647 self.expect_token(&Token::RParen)?;
12648 Ok(DataType::Nested(field_defs))
12649 }
12650 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12651 self.prev_token();
12652 let field_defs = self.parse_click_house_tuple_def()?;
12653 Ok(DataType::Tuple(field_defs))
12654 }
12655 Keyword::TRIGGER => Ok(DataType::Trigger),
12656 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
12657 let _ = self.parse_keyword(Keyword::TYPE);
12658 Ok(DataType::AnyType)
12659 }
12660 Keyword::TABLE => {
12661 if self.peek_token_ref().token == Token::LParen {
12664 let columns = self.parse_returns_table_columns()?;
12665 Ok(DataType::Table(Some(columns)))
12666 } else {
12667 Ok(DataType::Table(None))
12668 }
12669 }
12670 Keyword::SIGNED => {
12671 if self.parse_keyword(Keyword::INTEGER) {
12672 Ok(DataType::SignedInteger)
12673 } else {
12674 Ok(DataType::Signed)
12675 }
12676 }
12677 Keyword::UNSIGNED => {
12678 if self.parse_keyword(Keyword::INTEGER) {
12679 Ok(DataType::UnsignedInteger)
12680 } else {
12681 Ok(DataType::Unsigned)
12682 }
12683 }
12684 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
12685 Ok(DataType::TsVector)
12686 }
12687 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
12688 Ok(DataType::TsQuery)
12689 }
12690 _ => {
12691 self.prev_token();
12692 let type_name = self.parse_object_name(false)?;
12693 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
12694 Ok(DataType::Custom(type_name, modifiers))
12695 } else {
12696 Ok(DataType::Custom(type_name, vec![]))
12697 }
12698 }
12699 },
12700 _ => self.expected_at("a data type name", next_token_index),
12701 }?;
12702
12703 if self.dialect.supports_array_typedef_with_brackets() {
12704 while self.consume_token(&Token::LBracket) {
12705 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
12707 self.expect_token(&Token::RBracket)?;
12708 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
12709 }
12710 }
12711 Ok((data, trailing_bracket))
12712 }
12713
12714 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
12715 self.parse_column_def()
12716 }
12717
12718 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
12719 self.expect_token(&Token::LParen)?;
12720 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
12721 self.expect_token(&Token::RParen)?;
12722 Ok(columns)
12723 }
12724
12725 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
12727 self.expect_token(&Token::LParen)?;
12728 let mut values = Vec::new();
12729 loop {
12730 let next_token = self.next_token();
12731 match next_token.token {
12732 Token::SingleQuotedString(value) => values.push(value),
12733 _ => self.expected("a string", next_token)?,
12734 }
12735 let next_token = self.next_token();
12736 match next_token.token {
12737 Token::Comma => (),
12738 Token::RParen => break,
12739 _ => self.expected(", or }", next_token)?,
12740 }
12741 }
12742 Ok(values)
12743 }
12744
12745 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
12747 let ident = self.parse_identifier()?;
12748 self.expect_keyword_is(Keyword::AS)?;
12749 let alias = self.parse_identifier()?;
12750 Ok(IdentWithAlias { ident, alias })
12751 }
12752
12753 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
12755 let ident = self.parse_identifier()?;
12756 let _after_as = self.parse_keyword(Keyword::AS);
12757 let alias = self.parse_identifier()?;
12758 Ok(IdentWithAlias { ident, alias })
12759 }
12760
12761 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
12763 self.parse_comma_separated(|parser| {
12764 parser.expect_token(&Token::LParen)?;
12765 let query = parser.parse_query()?;
12766 parser.expect_token(&Token::RParen)?;
12767 Ok(*query)
12768 })
12769 }
12770
12771 fn parse_distinct_required_set_quantifier(
12773 &mut self,
12774 operator_name: &str,
12775 ) -> Result<SetQuantifier, ParserError> {
12776 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
12777 match quantifier {
12778 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
12779 _ => Err(ParserError::ParserError(format!(
12780 "{operator_name} pipe operator requires DISTINCT modifier",
12781 ))),
12782 }
12783 }
12784
12785 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
12787 if self.parse_keyword(Keyword::AS) {
12788 Ok(Some(self.parse_identifier()?))
12789 } else {
12790 self.maybe_parse(|parser| parser.parse_identifier())
12792 }
12793 }
12794
12795 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
12797 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
12798 parser.dialect.is_select_item_alias(explicit, kw, parser)
12799 }
12800 self.parse_optional_alias_inner(None, validator)
12801 }
12802
12803 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
12807 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
12808 parser.dialect.is_table_factor_alias(explicit, kw, parser)
12809 }
12810 let explicit = self.peek_keyword(Keyword::AS);
12811 match self.parse_optional_alias_inner(None, validator)? {
12812 Some(name) => {
12813 let columns = self.parse_table_alias_column_defs()?;
12814 Ok(Some(TableAlias {
12815 explicit,
12816 name,
12817 columns,
12818 }))
12819 }
12820 None => Ok(None),
12821 }
12822 }
12823
12824 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
12825 let mut hints = vec![];
12826 while let Some(hint_type) =
12827 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
12828 {
12829 let hint_type = match hint_type {
12830 Keyword::USE => TableIndexHintType::Use,
12831 Keyword::IGNORE => TableIndexHintType::Ignore,
12832 Keyword::FORCE => TableIndexHintType::Force,
12833 _ => {
12834 return self.expected_ref(
12835 "expected to match USE/IGNORE/FORCE keyword",
12836 self.peek_token_ref(),
12837 )
12838 }
12839 };
12840 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
12841 Some(Keyword::INDEX) => TableIndexType::Index,
12842 Some(Keyword::KEY) => TableIndexType::Key,
12843 _ => {
12844 return self
12845 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
12846 }
12847 };
12848 let for_clause = if self.parse_keyword(Keyword::FOR) {
12849 let clause = if self.parse_keyword(Keyword::JOIN) {
12850 TableIndexHintForClause::Join
12851 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12852 TableIndexHintForClause::OrderBy
12853 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12854 TableIndexHintForClause::GroupBy
12855 } else {
12856 return self.expected_ref(
12857 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
12858 self.peek_token_ref(),
12859 );
12860 };
12861 Some(clause)
12862 } else {
12863 None
12864 };
12865
12866 self.expect_token(&Token::LParen)?;
12867 let index_names = if self.peek_token_ref().token != Token::RParen {
12868 self.parse_comma_separated(Parser::parse_identifier)?
12869 } else {
12870 vec![]
12871 };
12872 self.expect_token(&Token::RParen)?;
12873 hints.push(TableIndexHints {
12874 hint_type,
12875 index_type,
12876 for_clause,
12877 index_names,
12878 });
12879 }
12880 Ok(hints)
12881 }
12882
12883 pub fn parse_optional_alias(
12887 &mut self,
12888 reserved_kwds: &[Keyword],
12889 ) -> Result<Option<Ident>, ParserError> {
12890 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
12891 false
12892 }
12893 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
12894 }
12895
12896 fn parse_optional_alias_inner<F>(
12903 &mut self,
12904 reserved_kwds: Option<&[Keyword]>,
12905 validator: F,
12906 ) -> Result<Option<Ident>, ParserError>
12907 where
12908 F: Fn(bool, &Keyword, &mut Parser) -> bool,
12909 {
12910 let after_as = self.parse_keyword(Keyword::AS);
12911
12912 let next_token = self.next_token();
12913 match next_token.token {
12914 Token::Word(w)
12917 if reserved_kwds.is_some()
12918 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
12919 {
12920 Ok(Some(w.into_ident(next_token.span)))
12921 }
12922 Token::Word(w) if validator(after_as, &w.keyword, self) => {
12926 Ok(Some(w.into_ident(next_token.span)))
12927 }
12928 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
12930 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
12931 _ => {
12932 if after_as {
12933 return self.expected("an identifier after AS", next_token);
12934 }
12935 self.prev_token();
12936 Ok(None) }
12938 }
12939 }
12940
12941 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
12943 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12944 let expressions = if self.parse_keyword(Keyword::ALL) {
12945 None
12946 } else {
12947 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
12948 };
12949
12950 let mut modifiers = vec![];
12951 if self.dialect.supports_group_by_with_modifier() {
12952 loop {
12953 if !self.parse_keyword(Keyword::WITH) {
12954 break;
12955 }
12956 let keyword = self.expect_one_of_keywords(&[
12957 Keyword::ROLLUP,
12958 Keyword::CUBE,
12959 Keyword::TOTALS,
12960 ])?;
12961 modifiers.push(match keyword {
12962 Keyword::ROLLUP => GroupByWithModifier::Rollup,
12963 Keyword::CUBE => GroupByWithModifier::Cube,
12964 Keyword::TOTALS => GroupByWithModifier::Totals,
12965 _ => {
12966 return parser_err!(
12967 "BUG: expected to match GroupBy modifier keyword",
12968 self.peek_token_ref().span.start
12969 )
12970 }
12971 });
12972 }
12973 }
12974 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
12975 self.expect_token(&Token::LParen)?;
12976 let result = self.parse_comma_separated(|p| {
12977 if p.peek_token_ref().token == Token::LParen {
12978 p.parse_tuple(true, true)
12979 } else {
12980 Ok(vec![p.parse_expr()?])
12981 }
12982 })?;
12983 self.expect_token(&Token::RParen)?;
12984 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
12985 result,
12986 )));
12987 };
12988 let group_by = match expressions {
12989 None => GroupByExpr::All(modifiers),
12990 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
12991 };
12992 Ok(Some(group_by))
12993 } else {
12994 Ok(None)
12995 }
12996 }
12997
12998 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13000 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13001 let order_by =
13002 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13003 let order_by_options = self.parse_order_by_options()?;
13004 OrderBy {
13005 kind: OrderByKind::All(order_by_options),
13006 interpolate: None,
13007 }
13008 } else {
13009 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13010 let interpolate = if self.dialect.supports_interpolate() {
13011 self.parse_interpolations()?
13012 } else {
13013 None
13014 };
13015 OrderBy {
13016 kind: OrderByKind::Expressions(exprs),
13017 interpolate,
13018 }
13019 };
13020 Ok(Some(order_by))
13021 } else {
13022 Ok(None)
13023 }
13024 }
13025
13026 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13027 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13028 Some(self.parse_offset()?)
13029 } else {
13030 None
13031 };
13032
13033 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13034 let expr = self.parse_limit()?;
13035
13036 if self.dialect.supports_limit_comma()
13037 && offset.is_none()
13038 && expr.is_some() && self.consume_token(&Token::Comma)
13040 {
13041 let offset = expr.ok_or_else(|| {
13042 ParserError::ParserError(
13043 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13044 )
13045 })?;
13046 return Ok(Some(LimitClause::OffsetCommaLimit {
13047 offset,
13048 limit: self.parse_expr()?,
13049 }));
13050 }
13051
13052 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13053 Some(self.parse_comma_separated(Parser::parse_expr)?)
13054 } else {
13055 None
13056 };
13057
13058 (Some(expr), limit_by)
13059 } else {
13060 (None, None)
13061 };
13062
13063 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13064 offset = Some(self.parse_offset()?);
13065 }
13066
13067 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13068 Ok(Some(LimitClause::LimitOffset {
13069 limit: limit.unwrap_or_default(),
13070 offset,
13071 limit_by: limit_by.unwrap_or_default(),
13072 }))
13073 } else {
13074 Ok(None)
13075 }
13076 }
13077
13078 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13081 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13082 let fn_name = self.parse_object_name(false)?;
13083 self.parse_function_call(fn_name)
13084 .map(TableObject::TableFunction)
13085 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13086 self.parse_parenthesized(|p| p.parse_query())
13087 .map(TableObject::TableQuery)
13088 } else {
13089 self.parse_object_name(false).map(TableObject::TableName)
13090 }
13091 }
13092
13093 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13100 self.parse_object_name_inner(in_table_clause, false)
13101 }
13102
13103 fn parse_object_name_inner(
13113 &mut self,
13114 in_table_clause: bool,
13115 allow_wildcards: bool,
13116 ) -> Result<ObjectName, ParserError> {
13117 let mut parts = vec![];
13118 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13119 loop {
13120 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13121 parts.push(ObjectNamePart::Identifier(ident));
13122 if !self.consume_token(&Token::Period) && !end_with_period {
13123 break;
13124 }
13125 }
13126 } else {
13127 loop {
13128 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13129 let span = self.next_token().span;
13130 parts.push(ObjectNamePart::Identifier(Ident {
13131 value: Token::Mul.to_string(),
13132 quote_style: None,
13133 span,
13134 }));
13135 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13136 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13137 parts.push(ObjectNamePart::Identifier(ident));
13138 if !self.consume_token(&Token::Period) && !end_with_period {
13139 break;
13140 }
13141 } else if self.dialect.supports_object_name_double_dot_notation()
13142 && parts.len() == 1
13143 && matches!(self.peek_token_ref().token, Token::Period)
13144 {
13145 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13147 } else {
13148 let ident = self.parse_identifier()?;
13149 let part = if self
13150 .dialect
13151 .is_identifier_generating_function_name(&ident, &parts)
13152 {
13153 self.expect_token(&Token::LParen)?;
13154 let args: Vec<FunctionArg> =
13155 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13156 self.expect_token(&Token::RParen)?;
13157 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13158 } else {
13159 ObjectNamePart::Identifier(ident)
13160 };
13161 parts.push(part);
13162 }
13163
13164 if !self.consume_token(&Token::Period) {
13165 break;
13166 }
13167 }
13168 }
13169
13170 if dialect_of!(self is BigQueryDialect)
13173 && parts.iter().any(|part| {
13174 part.as_ident()
13175 .is_some_and(|ident| ident.value.contains('.'))
13176 })
13177 {
13178 parts = parts
13179 .into_iter()
13180 .flat_map(|part| match part.as_ident() {
13181 Some(ident) => ident
13182 .value
13183 .split('.')
13184 .map(|value| {
13185 ObjectNamePart::Identifier(Ident {
13186 value: value.into(),
13187 quote_style: ident.quote_style,
13188 span: ident.span,
13189 })
13190 })
13191 .collect::<Vec<_>>(),
13192 None => vec![part],
13193 })
13194 .collect()
13195 }
13196
13197 Ok(ObjectName(parts))
13198 }
13199
13200 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13202 let mut idents = vec![];
13203 loop {
13204 let token = self.peek_token_ref();
13205 match &token.token {
13206 Token::Word(w) => {
13207 idents.push(w.to_ident(token.span));
13208 }
13209 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13210 break
13211 }
13212 _ => {}
13213 }
13214 self.advance_token();
13215 }
13216 Ok(idents)
13217 }
13218
13219 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13259 let mut idents = vec![];
13260
13261 let next_token = self.next_token();
13263 match next_token.token {
13264 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13265 Token::EOF => {
13266 return Err(ParserError::ParserError(
13267 "Empty input when parsing identifier".to_string(),
13268 ))?
13269 }
13270 token => {
13271 return Err(ParserError::ParserError(format!(
13272 "Unexpected token in identifier: {token}"
13273 )))?
13274 }
13275 };
13276
13277 loop {
13279 match self.next_token().token {
13280 Token::Period => {
13282 let next_token = self.next_token();
13283 match next_token.token {
13284 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13285 Token::EOF => {
13286 return Err(ParserError::ParserError(
13287 "Trailing period in identifier".to_string(),
13288 ))?
13289 }
13290 token => {
13291 return Err(ParserError::ParserError(format!(
13292 "Unexpected token following period in identifier: {token}"
13293 )))?
13294 }
13295 }
13296 }
13297 Token::EOF => break,
13298 token => {
13299 return Err(ParserError::ParserError(format!(
13300 "Unexpected token in identifier: {token}"
13301 )))?;
13302 }
13303 }
13304 }
13305
13306 Ok(idents)
13307 }
13308
13309 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
13311 let next_token = self.next_token();
13312 match next_token.token {
13313 Token::Word(w) => Ok(w.into_ident(next_token.span)),
13314 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
13315 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
13316 _ => self.expected("identifier", next_token),
13317 }
13318 }
13319
13320 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
13331 match self.peek_token().token {
13332 Token::Word(w) => {
13333 let quote_style_is_none = w.quote_style.is_none();
13334 let mut requires_whitespace = false;
13335 let mut ident = w.into_ident(self.next_token().span);
13336 if quote_style_is_none {
13337 while matches!(self.peek_token_no_skip().token, Token::Minus) {
13338 self.next_token();
13339 ident.value.push('-');
13340
13341 let token = self
13342 .next_token_no_skip()
13343 .cloned()
13344 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
13345 requires_whitespace = match token.token {
13346 Token::Word(next_word) if next_word.quote_style.is_none() => {
13347 ident.value.push_str(&next_word.value);
13348 false
13349 }
13350 Token::Number(s, false) => {
13351 if s.ends_with('.') {
13358 let Some(s) = s.split('.').next().filter(|s| {
13359 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
13360 }) else {
13361 return self.expected(
13362 "continuation of hyphenated identifier",
13363 TokenWithSpan::new(Token::Number(s, false), token.span),
13364 );
13365 };
13366 ident.value.push_str(s);
13367 return Ok((ident, true));
13368 } else {
13369 ident.value.push_str(&s);
13370 }
13371 !matches!(self.peek_token_ref().token, Token::Period)
13374 }
13375 _ => {
13376 return self
13377 .expected("continuation of hyphenated identifier", token);
13378 }
13379 }
13380 }
13381
13382 if requires_whitespace {
13385 let token = self.next_token();
13386 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
13387 return self
13388 .expected("whitespace following hyphenated identifier", token);
13389 }
13390 }
13391 }
13392 Ok((ident, false))
13393 }
13394 _ => Ok((self.parse_identifier()?, false)),
13395 }
13396 }
13397
13398 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
13400 if self.consume_token(&Token::LParen) {
13401 if self.peek_token_ref().token == Token::RParen {
13402 self.next_token();
13403 Ok(vec![])
13404 } else {
13405 let cols = self.parse_comma_separated_with_trailing_commas(
13406 Parser::parse_view_column,
13407 self.dialect.supports_column_definition_trailing_commas(),
13408 Self::is_reserved_for_column_alias,
13409 )?;
13410 self.expect_token(&Token::RParen)?;
13411 Ok(cols)
13412 }
13413 } else {
13414 Ok(vec![])
13415 }
13416 }
13417
13418 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
13420 let name = self.parse_identifier()?;
13421 let options = self.parse_view_column_options()?;
13422 let data_type = if dialect_of!(self is ClickHouseDialect) {
13423 Some(self.parse_data_type()?)
13424 } else {
13425 None
13426 };
13427 Ok(ViewColumnDef {
13428 name,
13429 data_type,
13430 options,
13431 })
13432 }
13433
13434 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
13435 let mut options = Vec::new();
13436 loop {
13437 let option = self.parse_optional_column_option()?;
13438 if let Some(option) = option {
13439 options.push(option);
13440 } else {
13441 break;
13442 }
13443 }
13444 if options.is_empty() {
13445 Ok(None)
13446 } else if self.dialect.supports_space_separated_column_options() {
13447 Ok(Some(ColumnOptions::SpaceSeparated(options)))
13448 } else {
13449 Ok(Some(ColumnOptions::CommaSeparated(options)))
13450 }
13451 }
13452
13453 pub fn parse_parenthesized_column_list(
13456 &mut self,
13457 optional: IsOptional,
13458 allow_empty: bool,
13459 ) -> Result<Vec<Ident>, ParserError> {
13460 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
13461 }
13462
13463 pub fn parse_parenthesized_compound_identifier_list(
13465 &mut self,
13466 optional: IsOptional,
13467 allow_empty: bool,
13468 ) -> Result<Vec<Expr>, ParserError> {
13469 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13470 Ok(Expr::CompoundIdentifier(
13471 p.parse_period_separated(|p| p.parse_identifier())?,
13472 ))
13473 })
13474 }
13475
13476 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
13479 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
13480 p.parse_create_index_expr()
13481 })
13482 }
13483
13484 pub fn parse_parenthesized_qualified_column_list(
13487 &mut self,
13488 optional: IsOptional,
13489 allow_empty: bool,
13490 ) -> Result<Vec<ObjectName>, ParserError> {
13491 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13492 p.parse_object_name(true)
13493 })
13494 }
13495
13496 fn parse_parenthesized_column_list_inner<F, T>(
13499 &mut self,
13500 optional: IsOptional,
13501 allow_empty: bool,
13502 mut f: F,
13503 ) -> Result<Vec<T>, ParserError>
13504 where
13505 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13506 {
13507 if self.consume_token(&Token::LParen) {
13508 if allow_empty && self.peek_token_ref().token == Token::RParen {
13509 self.next_token();
13510 Ok(vec![])
13511 } else {
13512 let cols = self.parse_comma_separated(|p| f(p))?;
13513 self.expect_token(&Token::RParen)?;
13514 Ok(cols)
13515 }
13516 } else if optional == Optional {
13517 Ok(vec![])
13518 } else {
13519 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
13520 }
13521 }
13522
13523 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
13525 if self.consume_token(&Token::LParen) {
13526 let cols = self.parse_comma_separated(|p| {
13527 let name = p.parse_identifier()?;
13528 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
13529 Ok(TableAliasColumnDef { name, data_type })
13530 })?;
13531 self.expect_token(&Token::RParen)?;
13532 Ok(cols)
13533 } else {
13534 Ok(vec![])
13535 }
13536 }
13537
13538 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
13540 self.expect_token(&Token::LParen)?;
13541 let n = self.parse_literal_uint()?;
13542 self.expect_token(&Token::RParen)?;
13543 Ok(n)
13544 }
13545
13546 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
13548 if self.consume_token(&Token::LParen) {
13549 let n = self.parse_literal_uint()?;
13550 self.expect_token(&Token::RParen)?;
13551 Ok(Some(n))
13552 } else {
13553 Ok(None)
13554 }
13555 }
13556
13557 fn maybe_parse_optional_interval_fields(
13558 &mut self,
13559 ) -> Result<Option<IntervalFields>, ParserError> {
13560 match self.parse_one_of_keywords(&[
13561 Keyword::YEAR,
13563 Keyword::DAY,
13564 Keyword::HOUR,
13565 Keyword::MINUTE,
13566 Keyword::MONTH,
13568 Keyword::SECOND,
13569 ]) {
13570 Some(Keyword::YEAR) => {
13571 if self.peek_keyword(Keyword::TO) {
13572 self.expect_keyword(Keyword::TO)?;
13573 self.expect_keyword(Keyword::MONTH)?;
13574 Ok(Some(IntervalFields::YearToMonth))
13575 } else {
13576 Ok(Some(IntervalFields::Year))
13577 }
13578 }
13579 Some(Keyword::DAY) => {
13580 if self.peek_keyword(Keyword::TO) {
13581 self.expect_keyword(Keyword::TO)?;
13582 match self.expect_one_of_keywords(&[
13583 Keyword::HOUR,
13584 Keyword::MINUTE,
13585 Keyword::SECOND,
13586 ])? {
13587 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
13588 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
13589 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
13590 _ => {
13591 self.prev_token();
13592 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
13593 }
13594 }
13595 } else {
13596 Ok(Some(IntervalFields::Day))
13597 }
13598 }
13599 Some(Keyword::HOUR) => {
13600 if self.peek_keyword(Keyword::TO) {
13601 self.expect_keyword(Keyword::TO)?;
13602 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
13603 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
13604 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
13605 _ => {
13606 self.prev_token();
13607 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
13608 }
13609 }
13610 } else {
13611 Ok(Some(IntervalFields::Hour))
13612 }
13613 }
13614 Some(Keyword::MINUTE) => {
13615 if self.peek_keyword(Keyword::TO) {
13616 self.expect_keyword(Keyword::TO)?;
13617 self.expect_keyword(Keyword::SECOND)?;
13618 Ok(Some(IntervalFields::MinuteToSecond))
13619 } else {
13620 Ok(Some(IntervalFields::Minute))
13621 }
13622 }
13623 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
13624 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
13625 Some(_) => {
13626 self.prev_token();
13627 self.expected_ref(
13628 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
13629 self.peek_token_ref(),
13630 )
13631 }
13632 None => Ok(None),
13633 }
13634 }
13635
13636 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
13644 self.expect_keyword_is(Keyword::DATETIME64)?;
13645 self.expect_token(&Token::LParen)?;
13646 let precision = self.parse_literal_uint()?;
13647 let time_zone = if self.consume_token(&Token::Comma) {
13648 Some(self.parse_literal_string()?)
13649 } else {
13650 None
13651 };
13652 self.expect_token(&Token::RParen)?;
13653 Ok((precision, time_zone))
13654 }
13655
13656 pub fn parse_optional_character_length(
13658 &mut self,
13659 ) -> Result<Option<CharacterLength>, ParserError> {
13660 if self.consume_token(&Token::LParen) {
13661 let character_length = self.parse_character_length()?;
13662 self.expect_token(&Token::RParen)?;
13663 Ok(Some(character_length))
13664 } else {
13665 Ok(None)
13666 }
13667 }
13668
13669 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
13671 if self.consume_token(&Token::LParen) {
13672 let binary_length = self.parse_binary_length()?;
13673 self.expect_token(&Token::RParen)?;
13674 Ok(Some(binary_length))
13675 } else {
13676 Ok(None)
13677 }
13678 }
13679
13680 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
13682 if self.parse_keyword(Keyword::MAX) {
13683 return Ok(CharacterLength::Max);
13684 }
13685 let length = self.parse_literal_uint()?;
13686 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
13687 Some(CharLengthUnits::Characters)
13688 } else if self.parse_keyword(Keyword::OCTETS) {
13689 Some(CharLengthUnits::Octets)
13690 } else {
13691 None
13692 };
13693 Ok(CharacterLength::IntegerLength { length, unit })
13694 }
13695
13696 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
13698 if self.parse_keyword(Keyword::MAX) {
13699 return Ok(BinaryLength::Max);
13700 }
13701 let length = self.parse_literal_uint()?;
13702 Ok(BinaryLength::IntegerLength { length })
13703 }
13704
13705 pub fn parse_optional_precision_scale(
13707 &mut self,
13708 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
13709 if self.consume_token(&Token::LParen) {
13710 let n = self.parse_literal_uint()?;
13711 let scale = if self.consume_token(&Token::Comma) {
13712 Some(self.parse_literal_uint()?)
13713 } else {
13714 None
13715 };
13716 self.expect_token(&Token::RParen)?;
13717 Ok((Some(n), scale))
13718 } else {
13719 Ok((None, None))
13720 }
13721 }
13722
13723 pub fn parse_exact_number_optional_precision_scale(
13725 &mut self,
13726 ) -> Result<ExactNumberInfo, ParserError> {
13727 if self.consume_token(&Token::LParen) {
13728 let precision = self.parse_literal_uint()?;
13729 let scale = if self.consume_token(&Token::Comma) {
13730 Some(self.parse_signed_integer()?)
13731 } else {
13732 None
13733 };
13734
13735 self.expect_token(&Token::RParen)?;
13736
13737 match scale {
13738 None => Ok(ExactNumberInfo::Precision(precision)),
13739 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
13740 }
13741 } else {
13742 Ok(ExactNumberInfo::None)
13743 }
13744 }
13745
13746 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
13748 let is_negative = self.consume_token(&Token::Minus);
13749
13750 if !is_negative {
13751 let _ = self.consume_token(&Token::Plus);
13752 }
13753
13754 let current_token = self.peek_token_ref();
13755 match ¤t_token.token {
13756 Token::Number(s, _) => {
13757 let s = s.clone();
13758 let span_start = current_token.span.start;
13759 self.advance_token();
13760 let value = Self::parse::<i64>(s, span_start)?;
13761 Ok(if is_negative { -value } else { value })
13762 }
13763 _ => self.expected_ref("number", current_token),
13764 }
13765 }
13766
13767 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
13769 if self.consume_token(&Token::LParen) {
13770 let mut modifiers = Vec::new();
13771 loop {
13772 let next_token = self.next_token();
13773 match next_token.token {
13774 Token::Word(w) => modifiers.push(w.to_string()),
13775 Token::Number(n, _) => modifiers.push(n),
13776 Token::SingleQuotedString(s) => modifiers.push(s),
13777
13778 Token::Comma => {
13779 continue;
13780 }
13781 Token::RParen => {
13782 break;
13783 }
13784 _ => self.expected("type modifiers", next_token)?,
13785 }
13786 }
13787
13788 Ok(Some(modifiers))
13789 } else {
13790 Ok(None)
13791 }
13792 }
13793
13794 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
13796 where
13797 F: FnOnce(Box<DataType>) -> DataType,
13798 {
13799 self.expect_token(&Token::LParen)?;
13800 let inside_type = self.parse_data_type()?;
13801 self.expect_token(&Token::RParen)?;
13802 Ok(parent_type(inside_type.into()))
13803 }
13804
13805 fn parse_delete_setexpr_boxed(
13809 &mut self,
13810 delete_token: TokenWithSpan,
13811 ) -> Result<Box<SetExpr>, ParserError> {
13812 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
13813 }
13814
13815 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
13817 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
13818 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
13819 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
13822 (vec![], false)
13823 } else {
13824 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
13825 self.expect_keyword_is(Keyword::FROM)?;
13826 (tables, true)
13827 }
13828 } else {
13829 (vec![], true)
13830 };
13831
13832 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
13833
13834 let output = self.maybe_parse_output_clause()?;
13835
13836 let using = if self.parse_keyword(Keyword::USING) {
13837 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
13838 } else {
13839 None
13840 };
13841 let selection = if self.parse_keyword(Keyword::WHERE) {
13842 Some(self.parse_expr()?)
13843 } else {
13844 None
13845 };
13846 let returning = if self.parse_keyword(Keyword::RETURNING) {
13847 Some(self.parse_comma_separated(Parser::parse_select_item)?)
13848 } else {
13849 None
13850 };
13851 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13852 self.parse_comma_separated(Parser::parse_order_by_expr)?
13853 } else {
13854 vec![]
13855 };
13856 let limit = if self.parse_keyword(Keyword::LIMIT) {
13857 self.parse_limit()?
13858 } else {
13859 None
13860 };
13861
13862 Ok(Statement::Delete(Delete {
13863 delete_token: delete_token.into(),
13864 optimizer_hints,
13865 tables,
13866 from: if with_from_keyword {
13867 FromTable::WithFromKeyword(from)
13868 } else {
13869 FromTable::WithoutKeyword(from)
13870 },
13871 using,
13872 selection,
13873 returning,
13874 output,
13875 order_by,
13876 limit,
13877 }))
13878 }
13879
13880 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
13883 let modifier_keyword =
13884 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
13885
13886 let id = self.parse_literal_uint()?;
13887
13888 let modifier = match modifier_keyword {
13889 Some(Keyword::CONNECTION) => Some(KillType::Connection),
13890 Some(Keyword::QUERY) => Some(KillType::Query),
13891 Some(Keyword::MUTATION) => {
13892 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
13893 Some(KillType::Mutation)
13894 } else {
13895 self.expected_ref(
13896 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
13897 self.peek_token_ref(),
13898 )?
13899 }
13900 }
13901 _ => None,
13902 };
13903
13904 Ok(Statement::Kill { modifier, id })
13905 }
13906
13907 pub fn parse_explain(
13909 &mut self,
13910 describe_alias: DescribeAlias,
13911 ) -> Result<Statement, ParserError> {
13912 let mut analyze = false;
13913 let mut verbose = false;
13914 let mut query_plan = false;
13915 let mut estimate = false;
13916 let mut format = None;
13917 let mut options = None;
13918
13919 if describe_alias == DescribeAlias::Explain
13922 && self.dialect.supports_explain_with_utility_options()
13923 && self.peek_token_ref().token == Token::LParen
13924 {
13925 options = Some(self.parse_utility_options()?)
13926 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
13927 query_plan = true;
13928 } else if self.parse_keyword(Keyword::ESTIMATE) {
13929 estimate = true;
13930 } else {
13931 analyze = self.parse_keyword(Keyword::ANALYZE);
13932 verbose = self.parse_keyword(Keyword::VERBOSE);
13933 if self.parse_keyword(Keyword::FORMAT) {
13934 format = Some(self.parse_analyze_format_kind()?);
13935 }
13936 }
13937
13938 match self.maybe_parse(|parser| parser.parse_statement())? {
13939 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
13940 ParserError::ParserError("Explain must be root of the plan".to_string()),
13941 ),
13942 Some(statement) => Ok(Statement::Explain {
13943 describe_alias,
13944 analyze,
13945 verbose,
13946 query_plan,
13947 estimate,
13948 statement: Box::new(statement),
13949 format,
13950 options,
13951 }),
13952 _ => {
13953 let hive_format =
13954 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
13955 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
13956 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
13957 _ => None,
13958 };
13959
13960 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
13961 self.parse_keyword(Keyword::TABLE)
13963 } else {
13964 false
13965 };
13966
13967 let table_name = self.parse_object_name(false)?;
13968 Ok(Statement::ExplainTable {
13969 describe_alias,
13970 hive_format,
13971 has_table_keyword,
13972 table_name,
13973 })
13974 }
13975 }
13976 }
13977
13978 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
13983 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
13984 let _guard = self.recursion_counter.try_decrease()?;
13985 let with = if self.parse_keyword(Keyword::WITH) {
13986 let with_token = self.get_current_token();
13987 Some(With {
13988 with_token: with_token.clone().into(),
13989 recursive: self.parse_keyword(Keyword::RECURSIVE),
13990 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
13991 })
13992 } else {
13993 None
13994 };
13995 if self.parse_keyword(Keyword::INSERT) {
13996 Ok(Query {
13997 with,
13998 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
13999 order_by: None,
14000 limit_clause: None,
14001 fetch: None,
14002 locks: vec![],
14003 for_clause: None,
14004 settings: None,
14005 format_clause: None,
14006 pipe_operators: vec![],
14007 }
14008 .into())
14009 } else if self.parse_keyword(Keyword::UPDATE) {
14010 Ok(Query {
14011 with,
14012 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14013 order_by: None,
14014 limit_clause: None,
14015 fetch: None,
14016 locks: vec![],
14017 for_clause: None,
14018 settings: None,
14019 format_clause: None,
14020 pipe_operators: vec![],
14021 }
14022 .into())
14023 } else if self.parse_keyword(Keyword::DELETE) {
14024 Ok(Query {
14025 with,
14026 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14027 limit_clause: None,
14028 order_by: None,
14029 fetch: None,
14030 locks: vec![],
14031 for_clause: None,
14032 settings: None,
14033 format_clause: None,
14034 pipe_operators: vec![],
14035 }
14036 .into())
14037 } else if self.parse_keyword(Keyword::MERGE) {
14038 Ok(Query {
14039 with,
14040 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14041 limit_clause: None,
14042 order_by: None,
14043 fetch: None,
14044 locks: vec![],
14045 for_clause: None,
14046 settings: None,
14047 format_clause: None,
14048 pipe_operators: vec![],
14049 }
14050 .into())
14051 } else {
14052 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14053
14054 let order_by = self.parse_optional_order_by()?;
14055
14056 let limit_clause = self.parse_optional_limit_clause()?;
14057
14058 let settings = self.parse_settings()?;
14059
14060 let fetch = if self.parse_keyword(Keyword::FETCH) {
14061 Some(self.parse_fetch()?)
14062 } else {
14063 None
14064 };
14065
14066 let mut for_clause = None;
14067 let mut locks = Vec::new();
14068 while self.parse_keyword(Keyword::FOR) {
14069 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14070 for_clause = Some(parsed_for_clause);
14071 break;
14072 } else {
14073 locks.push(self.parse_lock()?);
14074 }
14075 }
14076 let format_clause =
14077 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14078 if self.parse_keyword(Keyword::NULL) {
14079 Some(FormatClause::Null)
14080 } else {
14081 let ident = self.parse_identifier()?;
14082 Some(FormatClause::Identifier(ident))
14083 }
14084 } else {
14085 None
14086 };
14087
14088 let pipe_operators = if self.dialect.supports_pipe_operator() {
14089 self.parse_pipe_operators()?
14090 } else {
14091 Vec::new()
14092 };
14093
14094 Ok(Query {
14095 with,
14096 body,
14097 order_by,
14098 limit_clause,
14099 fetch,
14100 locks,
14101 for_clause,
14102 settings,
14103 format_clause,
14104 pipe_operators,
14105 }
14106 .into())
14107 }
14108 }
14109
14110 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14111 let mut pipe_operators = Vec::new();
14112
14113 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14114 let kw = self.expect_one_of_keywords(&[
14115 Keyword::SELECT,
14116 Keyword::EXTEND,
14117 Keyword::SET,
14118 Keyword::DROP,
14119 Keyword::AS,
14120 Keyword::WHERE,
14121 Keyword::LIMIT,
14122 Keyword::AGGREGATE,
14123 Keyword::ORDER,
14124 Keyword::TABLESAMPLE,
14125 Keyword::RENAME,
14126 Keyword::UNION,
14127 Keyword::INTERSECT,
14128 Keyword::EXCEPT,
14129 Keyword::CALL,
14130 Keyword::PIVOT,
14131 Keyword::UNPIVOT,
14132 Keyword::JOIN,
14133 Keyword::INNER,
14134 Keyword::LEFT,
14135 Keyword::RIGHT,
14136 Keyword::FULL,
14137 Keyword::CROSS,
14138 ])?;
14139 match kw {
14140 Keyword::SELECT => {
14141 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14142 pipe_operators.push(PipeOperator::Select { exprs })
14143 }
14144 Keyword::EXTEND => {
14145 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14146 pipe_operators.push(PipeOperator::Extend { exprs })
14147 }
14148 Keyword::SET => {
14149 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14150 pipe_operators.push(PipeOperator::Set { assignments })
14151 }
14152 Keyword::DROP => {
14153 let columns = self.parse_identifiers()?;
14154 pipe_operators.push(PipeOperator::Drop { columns })
14155 }
14156 Keyword::AS => {
14157 let alias = self.parse_identifier()?;
14158 pipe_operators.push(PipeOperator::As { alias })
14159 }
14160 Keyword::WHERE => {
14161 let expr = self.parse_expr()?;
14162 pipe_operators.push(PipeOperator::Where { expr })
14163 }
14164 Keyword::LIMIT => {
14165 let expr = self.parse_expr()?;
14166 let offset = if self.parse_keyword(Keyword::OFFSET) {
14167 Some(self.parse_expr()?)
14168 } else {
14169 None
14170 };
14171 pipe_operators.push(PipeOperator::Limit { expr, offset })
14172 }
14173 Keyword::AGGREGATE => {
14174 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14175 vec![]
14176 } else {
14177 self.parse_comma_separated(|parser| {
14178 parser.parse_expr_with_alias_and_order_by()
14179 })?
14180 };
14181
14182 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14183 self.parse_comma_separated(|parser| {
14184 parser.parse_expr_with_alias_and_order_by()
14185 })?
14186 } else {
14187 vec![]
14188 };
14189
14190 pipe_operators.push(PipeOperator::Aggregate {
14191 full_table_exprs,
14192 group_by_expr,
14193 })
14194 }
14195 Keyword::ORDER => {
14196 self.expect_one_of_keywords(&[Keyword::BY])?;
14197 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14198 pipe_operators.push(PipeOperator::OrderBy { exprs })
14199 }
14200 Keyword::TABLESAMPLE => {
14201 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14202 pipe_operators.push(PipeOperator::TableSample { sample });
14203 }
14204 Keyword::RENAME => {
14205 let mappings =
14206 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14207 pipe_operators.push(PipeOperator::Rename { mappings });
14208 }
14209 Keyword::UNION => {
14210 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14211 let queries = self.parse_pipe_operator_queries()?;
14212 pipe_operators.push(PipeOperator::Union {
14213 set_quantifier,
14214 queries,
14215 });
14216 }
14217 Keyword::INTERSECT => {
14218 let set_quantifier =
14219 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14220 let queries = self.parse_pipe_operator_queries()?;
14221 pipe_operators.push(PipeOperator::Intersect {
14222 set_quantifier,
14223 queries,
14224 });
14225 }
14226 Keyword::EXCEPT => {
14227 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14228 let queries = self.parse_pipe_operator_queries()?;
14229 pipe_operators.push(PipeOperator::Except {
14230 set_quantifier,
14231 queries,
14232 });
14233 }
14234 Keyword::CALL => {
14235 let function_name = self.parse_object_name(false)?;
14236 let function_expr = self.parse_function(function_name)?;
14237 if let Expr::Function(function) = function_expr {
14238 let alias = self.parse_identifier_optional_alias()?;
14239 pipe_operators.push(PipeOperator::Call { function, alias });
14240 } else {
14241 return Err(ParserError::ParserError(
14242 "Expected function call after CALL".to_string(),
14243 ));
14244 }
14245 }
14246 Keyword::PIVOT => {
14247 self.expect_token(&Token::LParen)?;
14248 let aggregate_functions =
14249 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14250 self.expect_keyword_is(Keyword::FOR)?;
14251 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14252 self.expect_keyword_is(Keyword::IN)?;
14253
14254 self.expect_token(&Token::LParen)?;
14255 let value_source = if self.parse_keyword(Keyword::ANY) {
14256 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14257 self.parse_comma_separated(Parser::parse_order_by_expr)?
14258 } else {
14259 vec![]
14260 };
14261 PivotValueSource::Any(order_by)
14262 } else if self.peek_sub_query() {
14263 PivotValueSource::Subquery(self.parse_query()?)
14264 } else {
14265 PivotValueSource::List(
14266 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14267 )
14268 };
14269 self.expect_token(&Token::RParen)?;
14270 self.expect_token(&Token::RParen)?;
14271
14272 let alias = self.parse_identifier_optional_alias()?;
14273
14274 pipe_operators.push(PipeOperator::Pivot {
14275 aggregate_functions,
14276 value_column,
14277 value_source,
14278 alias,
14279 });
14280 }
14281 Keyword::UNPIVOT => {
14282 self.expect_token(&Token::LParen)?;
14283 let value_column = self.parse_identifier()?;
14284 self.expect_keyword(Keyword::FOR)?;
14285 let name_column = self.parse_identifier()?;
14286 self.expect_keyword(Keyword::IN)?;
14287
14288 self.expect_token(&Token::LParen)?;
14289 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14290 self.expect_token(&Token::RParen)?;
14291
14292 self.expect_token(&Token::RParen)?;
14293
14294 let alias = self.parse_identifier_optional_alias()?;
14295
14296 pipe_operators.push(PipeOperator::Unpivot {
14297 value_column,
14298 name_column,
14299 unpivot_columns,
14300 alias,
14301 });
14302 }
14303 Keyword::JOIN
14304 | Keyword::INNER
14305 | Keyword::LEFT
14306 | Keyword::RIGHT
14307 | Keyword::FULL
14308 | Keyword::CROSS => {
14309 self.prev_token();
14310 let mut joins = self.parse_joins()?;
14311 if joins.len() != 1 {
14312 return Err(ParserError::ParserError(
14313 "Join pipe operator must have a single join".to_string(),
14314 ));
14315 }
14316 let join = joins.swap_remove(0);
14317 pipe_operators.push(PipeOperator::Join(join))
14318 }
14319 unhandled => {
14320 return Err(ParserError::ParserError(format!(
14321 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
14322 )))
14323 }
14324 }
14325 }
14326 Ok(pipe_operators)
14327 }
14328
14329 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
14330 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
14331 {
14332 let key_values = self.parse_comma_separated(|p| {
14333 let key = p.parse_identifier()?;
14334 p.expect_token(&Token::Eq)?;
14335 let value = p.parse_expr()?;
14336 Ok(Setting { key, value })
14337 })?;
14338 Some(key_values)
14339 } else {
14340 None
14341 };
14342 Ok(settings)
14343 }
14344
14345 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
14347 if self.parse_keyword(Keyword::XML) {
14348 Ok(Some(self.parse_for_xml()?))
14349 } else if self.parse_keyword(Keyword::JSON) {
14350 Ok(Some(self.parse_for_json()?))
14351 } else if self.parse_keyword(Keyword::BROWSE) {
14352 Ok(Some(ForClause::Browse))
14353 } else {
14354 Ok(None)
14355 }
14356 }
14357
14358 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
14360 let for_xml = if self.parse_keyword(Keyword::RAW) {
14361 let mut element_name = None;
14362 if self.peek_token_ref().token == Token::LParen {
14363 self.expect_token(&Token::LParen)?;
14364 element_name = Some(self.parse_literal_string()?);
14365 self.expect_token(&Token::RParen)?;
14366 }
14367 ForXml::Raw(element_name)
14368 } else if self.parse_keyword(Keyword::AUTO) {
14369 ForXml::Auto
14370 } else if self.parse_keyword(Keyword::EXPLICIT) {
14371 ForXml::Explicit
14372 } else if self.parse_keyword(Keyword::PATH) {
14373 let mut element_name = None;
14374 if self.peek_token_ref().token == Token::LParen {
14375 self.expect_token(&Token::LParen)?;
14376 element_name = Some(self.parse_literal_string()?);
14377 self.expect_token(&Token::RParen)?;
14378 }
14379 ForXml::Path(element_name)
14380 } else {
14381 return Err(ParserError::ParserError(
14382 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
14383 ));
14384 };
14385 let mut elements = false;
14386 let mut binary_base64 = false;
14387 let mut root = None;
14388 let mut r#type = false;
14389 while self.peek_token_ref().token == Token::Comma {
14390 self.next_token();
14391 if self.parse_keyword(Keyword::ELEMENTS) {
14392 elements = true;
14393 } else if self.parse_keyword(Keyword::BINARY) {
14394 self.expect_keyword_is(Keyword::BASE64)?;
14395 binary_base64 = true;
14396 } else if self.parse_keyword(Keyword::ROOT) {
14397 self.expect_token(&Token::LParen)?;
14398 root = Some(self.parse_literal_string()?);
14399 self.expect_token(&Token::RParen)?;
14400 } else if self.parse_keyword(Keyword::TYPE) {
14401 r#type = true;
14402 }
14403 }
14404 Ok(ForClause::Xml {
14405 for_xml,
14406 elements,
14407 binary_base64,
14408 root,
14409 r#type,
14410 })
14411 }
14412
14413 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
14415 let for_json = if self.parse_keyword(Keyword::AUTO) {
14416 ForJson::Auto
14417 } else if self.parse_keyword(Keyword::PATH) {
14418 ForJson::Path
14419 } else {
14420 return Err(ParserError::ParserError(
14421 "Expected FOR JSON [AUTO | PATH ]".to_string(),
14422 ));
14423 };
14424 let mut root = None;
14425 let mut include_null_values = false;
14426 let mut without_array_wrapper = false;
14427 while self.peek_token_ref().token == Token::Comma {
14428 self.next_token();
14429 if self.parse_keyword(Keyword::ROOT) {
14430 self.expect_token(&Token::LParen)?;
14431 root = Some(self.parse_literal_string()?);
14432 self.expect_token(&Token::RParen)?;
14433 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
14434 include_null_values = true;
14435 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
14436 without_array_wrapper = true;
14437 }
14438 }
14439 Ok(ForClause::Json {
14440 for_json,
14441 root,
14442 include_null_values,
14443 without_array_wrapper,
14444 })
14445 }
14446
14447 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
14449 let name = self.parse_identifier()?;
14450
14451 let as_optional = self.dialect.supports_cte_without_as();
14452
14453 if as_optional && !self.peek_keyword(Keyword::AS) {
14455 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
14456 p.expect_token(&Token::LParen)?;
14457 let query = p.parse_query()?;
14458 let closing_paren_token = p.expect_token(&Token::RParen)?;
14459 Ok((query, closing_paren_token))
14460 })? {
14461 let mut cte = Cte {
14462 alias: TableAlias {
14463 explicit: false,
14464 name,
14465 columns: vec![],
14466 },
14467 query,
14468 from: None,
14469 materialized: None,
14470 closing_paren_token: closing_paren_token.into(),
14471 };
14472 if self.parse_keyword(Keyword::FROM) {
14473 cte.from = Some(self.parse_identifier()?);
14474 }
14475 return Ok(cte);
14476 }
14477 }
14478
14479 let columns = if self.parse_keyword(Keyword::AS) {
14481 vec![]
14482 } else {
14483 let columns = self.parse_table_alias_column_defs()?;
14484 if as_optional {
14485 let _ = self.parse_keyword(Keyword::AS);
14486 } else {
14487 self.expect_keyword_is(Keyword::AS)?;
14488 }
14489 columns
14490 };
14491
14492 let mut is_materialized = None;
14493 if dialect_of!(self is PostgreSqlDialect) {
14494 if self.parse_keyword(Keyword::MATERIALIZED) {
14495 is_materialized = Some(CteAsMaterialized::Materialized);
14496 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
14497 is_materialized = Some(CteAsMaterialized::NotMaterialized);
14498 }
14499 }
14500
14501 self.expect_token(&Token::LParen)?;
14502 let query = self.parse_query()?;
14503 let closing_paren_token = self.expect_token(&Token::RParen)?;
14504
14505 let mut cte = Cte {
14506 alias: TableAlias {
14507 explicit: false,
14508 name,
14509 columns,
14510 },
14511 query,
14512 from: None,
14513 materialized: is_materialized,
14514 closing_paren_token: closing_paren_token.into(),
14515 };
14516 if self.parse_keyword(Keyword::FROM) {
14517 cte.from = Some(self.parse_identifier()?);
14518 }
14519 Ok(cte)
14520 }
14521
14522 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
14531 let expr = if self.peek_keyword(Keyword::SELECT)
14534 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
14535 {
14536 SetExpr::Select(self.parse_select().map(Box::new)?)
14537 } else if self.consume_token(&Token::LParen) {
14538 let subquery = self.parse_query()?;
14540 self.expect_token(&Token::RParen)?;
14541 SetExpr::Query(subquery)
14542 } else if self.parse_keyword(Keyword::VALUES) {
14543 let is_mysql = dialect_of!(self is MySqlDialect);
14544 SetExpr::Values(self.parse_values(is_mysql, false)?)
14545 } else if self.parse_keyword(Keyword::VALUE) {
14546 let is_mysql = dialect_of!(self is MySqlDialect);
14547 SetExpr::Values(self.parse_values(is_mysql, true)?)
14548 } else if self.parse_keyword(Keyword::TABLE) {
14549 SetExpr::Table(Box::new(self.parse_as_table()?))
14550 } else {
14551 return self.expected_ref(
14552 "SELECT, VALUES, or a subquery in the query body",
14553 self.peek_token_ref(),
14554 );
14555 };
14556
14557 self.parse_remaining_set_exprs(expr, precedence)
14558 }
14559
14560 fn parse_remaining_set_exprs(
14564 &mut self,
14565 mut expr: SetExpr,
14566 precedence: u8,
14567 ) -> Result<Box<SetExpr>, ParserError> {
14568 loop {
14569 let op = self.parse_set_operator(&self.peek_token().token);
14571 let next_precedence = match op {
14572 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
14574 10
14575 }
14576 Some(SetOperator::Intersect) => 20,
14578 None => break,
14580 };
14581 if precedence >= next_precedence {
14582 break;
14583 }
14584 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
14586 expr = SetExpr::SetOperation {
14587 left: Box::new(expr),
14588 op: op.unwrap(),
14589 set_quantifier,
14590 right: self.parse_query_body(next_precedence)?,
14591 };
14592 }
14593
14594 Ok(expr.into())
14595 }
14596
14597 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
14599 match token {
14600 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
14601 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
14602 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
14603 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
14604 _ => None,
14605 }
14606 }
14607
14608 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
14610 match op {
14611 Some(
14612 SetOperator::Except
14613 | SetOperator::Intersect
14614 | SetOperator::Union
14615 | SetOperator::Minus,
14616 ) => {
14617 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
14618 SetQuantifier::DistinctByName
14619 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
14620 SetQuantifier::ByName
14621 } else if self.parse_keyword(Keyword::ALL) {
14622 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
14623 SetQuantifier::AllByName
14624 } else {
14625 SetQuantifier::All
14626 }
14627 } else if self.parse_keyword(Keyword::DISTINCT) {
14628 SetQuantifier::Distinct
14629 } else {
14630 SetQuantifier::None
14631 }
14632 }
14633 _ => SetQuantifier::None,
14634 }
14635 }
14636
14637 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
14639 let mut from_first = None;
14640
14641 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
14642 let from_token = self.expect_keyword(Keyword::FROM)?;
14643 let from = self.parse_table_with_joins()?;
14644 if !self.peek_keyword(Keyword::SELECT) {
14645 return Ok(Select {
14646 select_token: AttachedToken(from_token),
14647 optimizer_hints: vec![],
14648 distinct: None,
14649 select_modifiers: None,
14650 top: None,
14651 top_before_distinct: false,
14652 projection: vec![],
14653 exclude: None,
14654 into: None,
14655 from,
14656 lateral_views: vec![],
14657 prewhere: None,
14658 selection: None,
14659 group_by: GroupByExpr::Expressions(vec![], vec![]),
14660 cluster_by: vec![],
14661 distribute_by: vec![],
14662 sort_by: vec![],
14663 having: None,
14664 named_window: vec![],
14665 window_before_qualify: false,
14666 qualify: None,
14667 value_table_mode: None,
14668 connect_by: vec![],
14669 flavor: SelectFlavor::FromFirstNoSelect,
14670 });
14671 }
14672 from_first = Some(from);
14673 }
14674
14675 let select_token = self.expect_keyword(Keyword::SELECT)?;
14676 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14677 let value_table_mode = self.parse_value_table_mode()?;
14678
14679 let (select_modifiers, distinct_select_modifier) =
14680 if self.dialect.supports_select_modifiers() {
14681 self.parse_select_modifiers()?
14682 } else {
14683 (None, None)
14684 };
14685
14686 let mut top_before_distinct = false;
14687 let mut top = None;
14688 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
14689 top = Some(self.parse_top()?);
14690 top_before_distinct = true;
14691 }
14692
14693 let distinct = if distinct_select_modifier.is_some() {
14694 distinct_select_modifier
14695 } else {
14696 self.parse_all_or_distinct()?
14697 };
14698
14699 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
14700 top = Some(self.parse_top()?);
14701 }
14702
14703 let projection =
14704 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
14705 vec![]
14706 } else {
14707 self.parse_projection()?
14708 };
14709
14710 let exclude = if self.dialect.supports_select_exclude() {
14711 self.parse_optional_select_item_exclude()?
14712 } else {
14713 None
14714 };
14715
14716 let into = if self.parse_keyword(Keyword::INTO) {
14717 Some(self.parse_select_into()?)
14718 } else {
14719 None
14720 };
14721
14722 let (from, from_first) = if let Some(from) = from_first.take() {
14728 (from, true)
14729 } else if self.parse_keyword(Keyword::FROM) {
14730 (self.parse_table_with_joins()?, false)
14731 } else {
14732 (vec![], false)
14733 };
14734
14735 let mut lateral_views = vec![];
14736 loop {
14737 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
14738 let outer = self.parse_keyword(Keyword::OUTER);
14739 let lateral_view = self.parse_expr()?;
14740 let lateral_view_name = self.parse_object_name(false)?;
14741 let lateral_col_alias = self
14742 .parse_comma_separated(|parser| {
14743 parser.parse_optional_alias(&[
14744 Keyword::WHERE,
14745 Keyword::GROUP,
14746 Keyword::CLUSTER,
14747 Keyword::HAVING,
14748 Keyword::LATERAL,
14749 ]) })?
14751 .into_iter()
14752 .flatten()
14753 .collect();
14754
14755 lateral_views.push(LateralView {
14756 lateral_view,
14757 lateral_view_name,
14758 lateral_col_alias,
14759 outer,
14760 });
14761 } else {
14762 break;
14763 }
14764 }
14765
14766 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
14767 {
14768 Some(self.parse_expr()?)
14769 } else {
14770 None
14771 };
14772
14773 let selection = if self.parse_keyword(Keyword::WHERE) {
14774 Some(self.parse_expr()?)
14775 } else {
14776 None
14777 };
14778
14779 let connect_by = self.maybe_parse_connect_by()?;
14780
14781 let group_by = self
14782 .parse_optional_group_by()?
14783 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
14784
14785 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
14786 self.parse_comma_separated(Parser::parse_expr)?
14787 } else {
14788 vec![]
14789 };
14790
14791 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
14792 self.parse_comma_separated(Parser::parse_expr)?
14793 } else {
14794 vec![]
14795 };
14796
14797 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
14798 self.parse_comma_separated(Parser::parse_order_by_expr)?
14799 } else {
14800 vec![]
14801 };
14802
14803 let having = if self.parse_keyword(Keyword::HAVING) {
14804 Some(self.parse_expr()?)
14805 } else {
14806 None
14807 };
14808
14809 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
14811 {
14812 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
14813 if self.parse_keyword(Keyword::QUALIFY) {
14814 (named_windows, Some(self.parse_expr()?), true)
14815 } else {
14816 (named_windows, None, true)
14817 }
14818 } else if self.parse_keyword(Keyword::QUALIFY) {
14819 let qualify = Some(self.parse_expr()?);
14820 if self.parse_keyword(Keyword::WINDOW) {
14821 (
14822 self.parse_comma_separated(Parser::parse_named_window)?,
14823 qualify,
14824 false,
14825 )
14826 } else {
14827 (Default::default(), qualify, false)
14828 }
14829 } else {
14830 Default::default()
14831 };
14832
14833 Ok(Select {
14834 select_token: AttachedToken(select_token),
14835 optimizer_hints,
14836 distinct,
14837 select_modifiers,
14838 top,
14839 top_before_distinct,
14840 projection,
14841 exclude,
14842 into,
14843 from,
14844 lateral_views,
14845 prewhere,
14846 selection,
14847 group_by,
14848 cluster_by,
14849 distribute_by,
14850 sort_by,
14851 having,
14852 named_window: named_windows,
14853 window_before_qualify,
14854 qualify,
14855 value_table_mode,
14856 connect_by,
14857 flavor: if from_first {
14858 SelectFlavor::FromFirst
14859 } else {
14860 SelectFlavor::Standard
14861 },
14862 })
14863 }
14864
14865 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
14874 let supports_hints = self.dialect.supports_comment_optimizer_hint();
14875 if !supports_hints {
14876 return Ok(vec![]);
14877 }
14878 let mut hints = vec![];
14879 loop {
14880 let t = self.peek_nth_token_no_skip_ref(0);
14881 let Token::Whitespace(ws) = &t.token else {
14882 break;
14883 };
14884 match ws {
14885 Whitespace::SingleLineComment { comment, prefix } => {
14886 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
14887 hints.push(OptimizerHint {
14888 prefix: hint_prefix,
14889 text,
14890 style: OptimizerHintStyle::SingleLine {
14891 prefix: prefix.clone(),
14892 },
14893 });
14894 }
14895 self.next_token_no_skip();
14896 }
14897 Whitespace::MultiLineComment(comment) => {
14898 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
14899 hints.push(OptimizerHint {
14900 prefix: hint_prefix,
14901 text,
14902 style: OptimizerHintStyle::MultiLine,
14903 });
14904 }
14905 self.next_token_no_skip();
14906 }
14907 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
14908 self.next_token_no_skip();
14909 }
14910 }
14911 }
14912 Ok(hints)
14913 }
14914
14915 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
14918 let (before_plus, text) = comment.split_once('+')?;
14919 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
14920 Some((before_plus.to_string(), text.to_string()))
14921 } else {
14922 None
14923 }
14924 }
14925
14926 fn parse_select_modifiers(
14933 &mut self,
14934 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
14935 let mut modifiers = SelectModifiers::default();
14936 let mut distinct = None;
14937
14938 let keywords = &[
14939 Keyword::ALL,
14940 Keyword::DISTINCT,
14941 Keyword::DISTINCTROW,
14942 Keyword::HIGH_PRIORITY,
14943 Keyword::STRAIGHT_JOIN,
14944 Keyword::SQL_SMALL_RESULT,
14945 Keyword::SQL_BIG_RESULT,
14946 Keyword::SQL_BUFFER_RESULT,
14947 Keyword::SQL_NO_CACHE,
14948 Keyword::SQL_CALC_FOUND_ROWS,
14949 ];
14950
14951 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
14952 match keyword {
14953 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
14954 self.prev_token();
14955 distinct = self.parse_all_or_distinct()?;
14956 }
14957 Keyword::DISTINCTROW if distinct.is_none() => {
14959 distinct = Some(Distinct::Distinct);
14960 }
14961 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
14962 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
14963 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
14964 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
14965 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
14966 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
14967 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
14968 _ => {
14969 self.prev_token();
14970 return self.expected_ref(
14971 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
14972 self.peek_token_ref(),
14973 );
14974 }
14975 }
14976 }
14977
14978 let select_modifiers = if modifiers.is_any_set() {
14981 Some(modifiers)
14982 } else {
14983 None
14984 };
14985 Ok((select_modifiers, distinct))
14986 }
14987
14988 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
14989 if !dialect_of!(self is BigQueryDialect) {
14990 return Ok(None);
14991 }
14992
14993 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
14994 Some(ValueTableMode::DistinctAsValue)
14995 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
14996 Some(ValueTableMode::DistinctAsStruct)
14997 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
14998 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
14999 {
15000 Some(ValueTableMode::AsValue)
15001 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15002 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15003 {
15004 Some(ValueTableMode::AsStruct)
15005 } else if self.parse_keyword(Keyword::AS) {
15006 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15007 } else {
15008 None
15009 };
15010
15011 Ok(mode)
15012 }
15013
15014 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15018 where
15019 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15020 {
15021 let current_state = self.state;
15022 self.state = state;
15023 let res = f(self);
15024 self.state = current_state;
15025 res
15026 }
15027
15028 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15030 let mut clauses = Vec::with_capacity(2);
15031 loop {
15032 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15033 clauses.push(ConnectByKind::StartWith {
15034 start_token: self.token_at(idx).clone().into(),
15035 condition: self.parse_expr()?.into(),
15036 });
15037 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15038 {
15039 clauses.push(ConnectByKind::ConnectBy {
15040 connect_token: self.token_at(idx).clone().into(),
15041 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15042 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15043 parser.parse_comma_separated(Parser::parse_expr)
15044 })?,
15045 });
15046 } else {
15047 break;
15048 }
15049 }
15050 Ok(clauses)
15051 }
15052
15053 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15055 let token1 = self.next_token();
15056 let token2 = self.next_token();
15057 let token3 = self.next_token();
15058
15059 let table_name;
15060 let schema_name;
15061 if token2 == Token::Period {
15062 match token1.token {
15063 Token::Word(w) => {
15064 schema_name = w.value;
15065 }
15066 _ => {
15067 return self.expected("Schema name", token1);
15068 }
15069 }
15070 match token3.token {
15071 Token::Word(w) => {
15072 table_name = w.value;
15073 }
15074 _ => {
15075 return self.expected("Table name", token3);
15076 }
15077 }
15078 Ok(Table {
15079 table_name: Some(table_name),
15080 schema_name: Some(schema_name),
15081 })
15082 } else {
15083 match token1.token {
15084 Token::Word(w) => {
15085 table_name = w.value;
15086 }
15087 _ => {
15088 return self.expected("Table name", token1);
15089 }
15090 }
15091 Ok(Table {
15092 table_name: Some(table_name),
15093 schema_name: None,
15094 })
15095 }
15096 }
15097
15098 fn parse_set_role(
15100 &mut self,
15101 modifier: Option<ContextModifier>,
15102 ) -> Result<Statement, ParserError> {
15103 self.expect_keyword_is(Keyword::ROLE)?;
15104
15105 let role_name = if self.parse_keyword(Keyword::NONE) {
15106 None
15107 } else {
15108 Some(self.parse_identifier()?)
15109 };
15110 Ok(Statement::Set(Set::SetRole {
15111 context_modifier: modifier,
15112 role_name,
15113 }))
15114 }
15115
15116 fn parse_set_values(
15117 &mut self,
15118 parenthesized_assignment: bool,
15119 ) -> Result<Vec<Expr>, ParserError> {
15120 let mut values = vec![];
15121
15122 if parenthesized_assignment {
15123 self.expect_token(&Token::LParen)?;
15124 }
15125
15126 loop {
15127 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15128 expr
15129 } else if let Ok(expr) = self.parse_expr() {
15130 expr
15131 } else {
15132 self.expected_ref("variable value", self.peek_token_ref())?
15133 };
15134
15135 values.push(value);
15136 if self.consume_token(&Token::Comma) {
15137 continue;
15138 }
15139
15140 if parenthesized_assignment {
15141 self.expect_token(&Token::RParen)?;
15142 }
15143 return Ok(values);
15144 }
15145 }
15146
15147 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15148 let modifier =
15149 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15150
15151 Self::keyword_to_modifier(modifier)
15152 }
15153
15154 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15156 let scope = self.parse_context_modifier();
15157
15158 let name = if self.dialect.supports_parenthesized_set_variables()
15159 && self.consume_token(&Token::LParen)
15160 {
15161 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15165 } else {
15166 self.parse_object_name(false)?
15167 };
15168
15169 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15170 return self.expected_ref("assignment operator", self.peek_token_ref());
15171 }
15172
15173 let value = self.parse_expr()?;
15174
15175 Ok(SetAssignment { scope, name, value })
15176 }
15177
15178 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15179 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15180
15181 let scope = if !hivevar {
15183 self.parse_context_modifier()
15184 } else {
15185 None
15186 };
15187
15188 if hivevar {
15189 self.expect_token(&Token::Colon)?;
15190 }
15191
15192 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15193 return Ok(set_role_stmt);
15194 }
15195
15196 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15198 || self.parse_keyword(Keyword::TIMEZONE)
15199 {
15200 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15201 return Ok(Set::SingleAssignment {
15202 scope,
15203 hivevar,
15204 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15205 values: self.parse_set_values(false)?,
15206 }
15207 .into());
15208 } else {
15209 return Ok(Set::SetTimeZone {
15213 local: scope == Some(ContextModifier::Local),
15214 value: self.parse_expr()?,
15215 }
15216 .into());
15217 }
15218 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15219 if self.parse_keyword(Keyword::DEFAULT) {
15220 return Ok(Set::SetNamesDefault {}.into());
15221 }
15222 let charset_name = self.parse_identifier()?;
15223 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15224 Some(self.parse_literal_string()?)
15225 } else {
15226 None
15227 };
15228
15229 return Ok(Set::SetNames {
15230 charset_name,
15231 collation_name,
15232 }
15233 .into());
15234 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15235 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15236 return Ok(Set::SetTransaction {
15237 modes: self.parse_transaction_modes()?,
15238 snapshot: None,
15239 session: true,
15240 }
15241 .into());
15242 } else if self.parse_keyword(Keyword::TRANSACTION) {
15243 if self.parse_keyword(Keyword::SNAPSHOT) {
15244 let snapshot_id = self.parse_value()?;
15245 return Ok(Set::SetTransaction {
15246 modes: vec![],
15247 snapshot: Some(snapshot_id),
15248 session: false,
15249 }
15250 .into());
15251 }
15252 return Ok(Set::SetTransaction {
15253 modes: self.parse_transaction_modes()?,
15254 snapshot: None,
15255 session: false,
15256 }
15257 .into());
15258 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15259 let scope = match scope {
15260 Some(s) => s,
15261 None => {
15262 return self.expected_at(
15263 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15264 self.get_current_index(),
15265 )
15266 }
15267 };
15268 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15269 SetSessionAuthorizationParamKind::Default
15270 } else {
15271 let value = self.parse_identifier()?;
15272 SetSessionAuthorizationParamKind::User(value)
15273 };
15274 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15275 scope,
15276 kind: auth_value,
15277 })
15278 .into());
15279 }
15280
15281 if self.dialect.supports_comma_separated_set_assignments() {
15282 if scope.is_some() {
15283 self.prev_token();
15284 }
15285
15286 if let Some(assignments) = self
15287 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15288 {
15289 return if assignments.len() > 1 {
15290 Ok(Set::MultipleAssignments { assignments }.into())
15291 } else {
15292 let SetAssignment { scope, name, value } =
15293 assignments.into_iter().next().ok_or_else(|| {
15294 ParserError::ParserError("Expected at least one assignment".to_string())
15295 })?;
15296
15297 Ok(Set::SingleAssignment {
15298 scope,
15299 hivevar,
15300 variable: name,
15301 values: vec![value],
15302 }
15303 .into())
15304 };
15305 }
15306 }
15307
15308 let variables = if self.dialect.supports_parenthesized_set_variables()
15309 && self.consume_token(&Token::LParen)
15310 {
15311 let vars = OneOrManyWithParens::Many(
15312 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
15313 .into_iter()
15314 .map(|ident| ObjectName::from(vec![ident]))
15315 .collect(),
15316 );
15317 self.expect_token(&Token::RParen)?;
15318 vars
15319 } else {
15320 OneOrManyWithParens::One(self.parse_object_name(false)?)
15321 };
15322
15323 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15324 let stmt = match variables {
15325 OneOrManyWithParens::One(var) => Set::SingleAssignment {
15326 scope,
15327 hivevar,
15328 variable: var,
15329 values: self.parse_set_values(false)?,
15330 },
15331 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
15332 variables: vars,
15333 values: self.parse_set_values(true)?,
15334 },
15335 };
15336
15337 return Ok(stmt.into());
15338 }
15339
15340 if self.dialect.supports_set_stmt_without_operator() {
15341 self.prev_token();
15342 return self.parse_set_session_params();
15343 };
15344
15345 self.expected_ref("equals sign or TO", self.peek_token_ref())
15346 }
15347
15348 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
15350 if self.parse_keyword(Keyword::STATISTICS) {
15351 let topic = match self.parse_one_of_keywords(&[
15352 Keyword::IO,
15353 Keyword::PROFILE,
15354 Keyword::TIME,
15355 Keyword::XML,
15356 ]) {
15357 Some(Keyword::IO) => SessionParamStatsTopic::IO,
15358 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
15359 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
15360 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
15361 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
15362 };
15363 let value = self.parse_session_param_value()?;
15364 Ok(
15365 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
15366 topic,
15367 value,
15368 }))
15369 .into(),
15370 )
15371 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
15372 let obj = self.parse_object_name(false)?;
15373 let value = self.parse_session_param_value()?;
15374 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
15375 SetSessionParamIdentityInsert { obj, value },
15376 ))
15377 .into())
15378 } else if self.parse_keyword(Keyword::OFFSETS) {
15379 let keywords = self.parse_comma_separated(|parser| {
15380 let next_token = parser.next_token();
15381 match &next_token.token {
15382 Token::Word(w) => Ok(w.to_string()),
15383 _ => parser.expected("SQL keyword", next_token),
15384 }
15385 })?;
15386 let value = self.parse_session_param_value()?;
15387 Ok(
15388 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
15389 keywords,
15390 value,
15391 }))
15392 .into(),
15393 )
15394 } else {
15395 let names = self.parse_comma_separated(|parser| {
15396 let next_token = parser.next_token();
15397 match next_token.token {
15398 Token::Word(w) => Ok(w.to_string()),
15399 _ => parser.expected("Session param name", next_token),
15400 }
15401 })?;
15402 let value = self.parse_expr()?.to_string();
15403 Ok(
15404 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
15405 names,
15406 value,
15407 }))
15408 .into(),
15409 )
15410 }
15411 }
15412
15413 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
15414 if self.parse_keyword(Keyword::ON) {
15415 Ok(SessionParamValue::On)
15416 } else if self.parse_keyword(Keyword::OFF) {
15417 Ok(SessionParamValue::Off)
15418 } else {
15419 self.expected_ref("ON or OFF", self.peek_token_ref())
15420 }
15421 }
15422
15423 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
15425 let terse = self.parse_keyword(Keyword::TERSE);
15426 let extended = self.parse_keyword(Keyword::EXTENDED);
15427 let full = self.parse_keyword(Keyword::FULL);
15428 let session = self.parse_keyword(Keyword::SESSION);
15429 let global = self.parse_keyword(Keyword::GLOBAL);
15430 let external = self.parse_keyword(Keyword::EXTERNAL);
15431 if self
15432 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
15433 .is_some()
15434 {
15435 Ok(self.parse_show_columns(extended, full)?)
15436 } else if self.parse_keyword(Keyword::TABLES) {
15437 Ok(self.parse_show_tables(terse, extended, full, external)?)
15438 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
15439 Ok(self.parse_show_views(terse, true)?)
15440 } else if self.parse_keyword(Keyword::VIEWS) {
15441 Ok(self.parse_show_views(terse, false)?)
15442 } else if self.parse_keyword(Keyword::FUNCTIONS) {
15443 Ok(self.parse_show_functions()?)
15444 } else if self.parse_keyword(Keyword::PROCESSLIST) {
15445 Ok(Statement::ShowProcessList { full })
15446 } else if extended || full {
15447 Err(ParserError::ParserError(
15448 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
15449 ))
15450 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
15451 Ok(self.parse_show_create()?)
15452 } else if self.parse_keyword(Keyword::COLLATION) {
15453 Ok(self.parse_show_collation()?)
15454 } else if self.parse_keyword(Keyword::VARIABLES)
15455 && dialect_of!(self is MySqlDialect | GenericDialect)
15456 {
15457 Ok(Statement::ShowVariables {
15458 filter: self.parse_show_statement_filter()?,
15459 session,
15460 global,
15461 })
15462 } else if self.parse_keyword(Keyword::STATUS)
15463 && dialect_of!(self is MySqlDialect | GenericDialect)
15464 {
15465 Ok(Statement::ShowStatus {
15466 filter: self.parse_show_statement_filter()?,
15467 session,
15468 global,
15469 })
15470 } else if self.parse_keyword(Keyword::DATABASES) {
15471 self.parse_show_databases(terse)
15472 } else if self.parse_keyword(Keyword::SCHEMAS) {
15473 self.parse_show_schemas(terse)
15474 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
15475 self.parse_show_charset(false)
15476 } else if self.parse_keyword(Keyword::CHARSET) {
15477 self.parse_show_charset(true)
15478 } else {
15479 Ok(Statement::ShowVariable {
15480 variable: self.parse_identifiers()?,
15481 })
15482 }
15483 }
15484
15485 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
15486 Ok(Statement::ShowCharset(ShowCharset {
15488 is_shorthand,
15489 filter: self.parse_show_statement_filter()?,
15490 }))
15491 }
15492
15493 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
15494 let history = self.parse_keyword(Keyword::HISTORY);
15495 let show_options = self.parse_show_stmt_options()?;
15496 Ok(Statement::ShowDatabases {
15497 terse,
15498 history,
15499 show_options,
15500 })
15501 }
15502
15503 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
15504 let history = self.parse_keyword(Keyword::HISTORY);
15505 let show_options = self.parse_show_stmt_options()?;
15506 Ok(Statement::ShowSchemas {
15507 terse,
15508 history,
15509 show_options,
15510 })
15511 }
15512
15513 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
15515 let obj_type = match self.expect_one_of_keywords(&[
15516 Keyword::TABLE,
15517 Keyword::TRIGGER,
15518 Keyword::FUNCTION,
15519 Keyword::PROCEDURE,
15520 Keyword::EVENT,
15521 Keyword::VIEW,
15522 ])? {
15523 Keyword::TABLE => Ok(ShowCreateObject::Table),
15524 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
15525 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
15526 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
15527 Keyword::EVENT => Ok(ShowCreateObject::Event),
15528 Keyword::VIEW => Ok(ShowCreateObject::View),
15529 keyword => Err(ParserError::ParserError(format!(
15530 "Unable to map keyword to ShowCreateObject: {keyword:?}"
15531 ))),
15532 }?;
15533
15534 let obj_name = self.parse_object_name(false)?;
15535
15536 Ok(Statement::ShowCreate { obj_type, obj_name })
15537 }
15538
15539 pub fn parse_show_columns(
15541 &mut self,
15542 extended: bool,
15543 full: bool,
15544 ) -> Result<Statement, ParserError> {
15545 let show_options = self.parse_show_stmt_options()?;
15546 Ok(Statement::ShowColumns {
15547 extended,
15548 full,
15549 show_options,
15550 })
15551 }
15552
15553 fn parse_show_tables(
15554 &mut self,
15555 terse: bool,
15556 extended: bool,
15557 full: bool,
15558 external: bool,
15559 ) -> Result<Statement, ParserError> {
15560 let history = !external && self.parse_keyword(Keyword::HISTORY);
15561 let show_options = self.parse_show_stmt_options()?;
15562 Ok(Statement::ShowTables {
15563 terse,
15564 history,
15565 extended,
15566 full,
15567 external,
15568 show_options,
15569 })
15570 }
15571
15572 fn parse_show_views(
15573 &mut self,
15574 terse: bool,
15575 materialized: bool,
15576 ) -> Result<Statement, ParserError> {
15577 let show_options = self.parse_show_stmt_options()?;
15578 Ok(Statement::ShowViews {
15579 materialized,
15580 terse,
15581 show_options,
15582 })
15583 }
15584
15585 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
15587 let filter = self.parse_show_statement_filter()?;
15588 Ok(Statement::ShowFunctions { filter })
15589 }
15590
15591 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
15593 let filter = self.parse_show_statement_filter()?;
15594 Ok(Statement::ShowCollation { filter })
15595 }
15596
15597 pub fn parse_show_statement_filter(
15599 &mut self,
15600 ) -> Result<Option<ShowStatementFilter>, ParserError> {
15601 if self.parse_keyword(Keyword::LIKE) {
15602 Ok(Some(ShowStatementFilter::Like(
15603 self.parse_literal_string()?,
15604 )))
15605 } else if self.parse_keyword(Keyword::ILIKE) {
15606 Ok(Some(ShowStatementFilter::ILike(
15607 self.parse_literal_string()?,
15608 )))
15609 } else if self.parse_keyword(Keyword::WHERE) {
15610 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
15611 } else {
15612 self.maybe_parse(|parser| -> Result<String, ParserError> {
15613 parser.parse_literal_string()
15614 })?
15615 .map_or(Ok(None), |filter| {
15616 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
15617 })
15618 }
15619 }
15620
15621 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
15623 let parsed_keyword = if dialect_of!(self is HiveDialect) {
15625 if self.parse_keyword(Keyword::DEFAULT) {
15627 return Ok(Statement::Use(Use::Default));
15628 }
15629 None } else if dialect_of!(self is DatabricksDialect) {
15631 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
15632 } else if dialect_of!(self is SnowflakeDialect) {
15633 self.parse_one_of_keywords(&[
15634 Keyword::DATABASE,
15635 Keyword::SCHEMA,
15636 Keyword::WAREHOUSE,
15637 Keyword::ROLE,
15638 Keyword::SECONDARY,
15639 ])
15640 } else {
15641 None };
15643
15644 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
15645 self.parse_secondary_roles()?
15646 } else {
15647 let obj_name = self.parse_object_name(false)?;
15648 match parsed_keyword {
15649 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
15650 Some(Keyword::DATABASE) => Use::Database(obj_name),
15651 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
15652 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
15653 Some(Keyword::ROLE) => Use::Role(obj_name),
15654 _ => Use::Object(obj_name),
15655 }
15656 };
15657
15658 Ok(Statement::Use(result))
15659 }
15660
15661 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
15662 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
15663 if self.parse_keyword(Keyword::NONE) {
15664 Ok(Use::SecondaryRoles(SecondaryRoles::None))
15665 } else if self.parse_keyword(Keyword::ALL) {
15666 Ok(Use::SecondaryRoles(SecondaryRoles::All))
15667 } else {
15668 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
15669 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
15670 }
15671 }
15672
15673 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
15675 let relation = self.parse_table_factor()?;
15676 let joins = self.parse_joins()?;
15680 Ok(TableWithJoins { relation, joins })
15681 }
15682
15683 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
15684 let mut joins = vec![];
15685 loop {
15686 let global = self.parse_keyword(Keyword::GLOBAL);
15687 let join = if self.parse_keyword(Keyword::CROSS) {
15688 let join_operator = if self.parse_keyword(Keyword::JOIN) {
15689 JoinOperator::CrossJoin(JoinConstraint::None)
15690 } else if self.parse_keyword(Keyword::APPLY) {
15691 JoinOperator::CrossApply
15693 } else {
15694 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
15695 };
15696 let relation = self.parse_table_factor()?;
15697 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
15698 && self.dialect.supports_cross_join_constraint()
15699 {
15700 let constraint = self.parse_join_constraint(false)?;
15701 JoinOperator::CrossJoin(constraint)
15702 } else {
15703 join_operator
15704 };
15705 Join {
15706 relation,
15707 global,
15708 join_operator,
15709 }
15710 } else if self.parse_keyword(Keyword::OUTER) {
15711 self.expect_keyword_is(Keyword::APPLY)?;
15713 Join {
15714 relation: self.parse_table_factor()?,
15715 global,
15716 join_operator: JoinOperator::OuterApply,
15717 }
15718 } else if self.parse_keyword(Keyword::ASOF) {
15719 self.expect_keyword_is(Keyword::JOIN)?;
15720 let relation = self.parse_table_factor()?;
15721 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
15722 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
15723 Join {
15724 relation,
15725 global,
15726 join_operator: JoinOperator::AsOf {
15727 match_condition,
15728 constraint: self.parse_join_constraint(false)?,
15729 },
15730 }
15731 } else {
15732 let natural = self.parse_keyword(Keyword::NATURAL);
15733 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
15734 w.keyword
15735 } else {
15736 Keyword::NoKeyword
15737 };
15738
15739 let join_operator_type = match peek_keyword {
15740 Keyword::INNER | Keyword::JOIN => {
15741 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
15743 if inner {
15744 JoinOperator::Inner
15745 } else {
15746 JoinOperator::Join
15747 }
15748 }
15749 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
15750 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
15752 let join_type = self.parse_one_of_keywords(&[
15753 Keyword::OUTER,
15754 Keyword::SEMI,
15755 Keyword::ANTI,
15756 Keyword::JOIN,
15757 ]);
15758 match join_type {
15759 Some(Keyword::OUTER) => {
15760 self.expect_keyword_is(Keyword::JOIN)?;
15761 if is_left {
15762 JoinOperator::LeftOuter
15763 } else {
15764 JoinOperator::RightOuter
15765 }
15766 }
15767 Some(Keyword::SEMI) => {
15768 self.expect_keyword_is(Keyword::JOIN)?;
15769 if is_left {
15770 JoinOperator::LeftSemi
15771 } else {
15772 JoinOperator::RightSemi
15773 }
15774 }
15775 Some(Keyword::ANTI) => {
15776 self.expect_keyword_is(Keyword::JOIN)?;
15777 if is_left {
15778 JoinOperator::LeftAnti
15779 } else {
15780 JoinOperator::RightAnti
15781 }
15782 }
15783 Some(Keyword::JOIN) => {
15784 if is_left {
15785 JoinOperator::Left
15786 } else {
15787 JoinOperator::Right
15788 }
15789 }
15790 _ => {
15791 return Err(ParserError::ParserError(format!(
15792 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
15793 )))
15794 }
15795 }
15796 }
15797 Keyword::ANTI => {
15798 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
15800 JoinOperator::Anti
15801 }
15802 Keyword::SEMI => {
15803 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
15805 JoinOperator::Semi
15806 }
15807 Keyword::FULL => {
15808 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
15811 JoinOperator::FullOuter
15812 }
15813 Keyword::OUTER => {
15814 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
15815 }
15816 Keyword::STRAIGHT_JOIN => {
15817 let _ = self.next_token(); JoinOperator::StraightJoin
15819 }
15820 _ if natural => {
15821 return self
15822 .expected_ref("a join type after NATURAL", self.peek_token_ref());
15823 }
15824 _ => break,
15825 };
15826 let mut relation = self.parse_table_factor()?;
15827
15828 if !self
15829 .dialect
15830 .supports_left_associative_joins_without_parens()
15831 && self.peek_parens_less_nested_join()
15832 {
15833 let joins = self.parse_joins()?;
15834 relation = TableFactor::NestedJoin {
15835 table_with_joins: Box::new(TableWithJoins { relation, joins }),
15836 alias: None,
15837 };
15838 }
15839
15840 let join_constraint = self.parse_join_constraint(natural)?;
15841 Join {
15842 relation,
15843 global,
15844 join_operator: join_operator_type(join_constraint),
15845 }
15846 };
15847 joins.push(join);
15848 }
15849 Ok(joins)
15850 }
15851
15852 fn peek_parens_less_nested_join(&self) -> bool {
15853 matches!(
15854 self.peek_token_ref().token,
15855 Token::Word(Word {
15856 keyword: Keyword::JOIN
15857 | Keyword::INNER
15858 | Keyword::LEFT
15859 | Keyword::RIGHT
15860 | Keyword::FULL,
15861 ..
15862 })
15863 )
15864 }
15865
15866 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
15868 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15869 let _guard = self.recursion_counter.try_decrease()?;
15870 if self.parse_keyword(Keyword::LATERAL) {
15871 if self.consume_token(&Token::LParen) {
15873 self.parse_derived_table_factor(Lateral)
15874 } else {
15875 let name = self.parse_object_name(false)?;
15876 self.expect_token(&Token::LParen)?;
15877 let args = self.parse_optional_args()?;
15878 let alias = self.maybe_parse_table_alias()?;
15879 Ok(TableFactor::Function {
15880 lateral: true,
15881 name,
15882 args,
15883 alias,
15884 })
15885 }
15886 } else if self.parse_keyword(Keyword::TABLE) {
15887 self.expect_token(&Token::LParen)?;
15889 let expr = self.parse_expr()?;
15890 self.expect_token(&Token::RParen)?;
15891 let alias = self.maybe_parse_table_alias()?;
15892 Ok(TableFactor::TableFunction { expr, alias })
15893 } else if self.consume_token(&Token::LParen) {
15894 if let Some(mut table) =
15916 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
15917 {
15918 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
15919 {
15920 table = match kw {
15921 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
15922 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
15923 unexpected_keyword => return Err(ParserError::ParserError(
15924 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
15925 )),
15926 }
15927 }
15928 return Ok(table);
15929 }
15930
15931 let mut table_and_joins = self.parse_table_and_joins()?;
15938
15939 #[allow(clippy::if_same_then_else)]
15940 if !table_and_joins.joins.is_empty() {
15941 self.expect_token(&Token::RParen)?;
15942 let alias = self.maybe_parse_table_alias()?;
15943 Ok(TableFactor::NestedJoin {
15944 table_with_joins: Box::new(table_and_joins),
15945 alias,
15946 }) } else if let TableFactor::NestedJoin {
15948 table_with_joins: _,
15949 alias: _,
15950 } = &table_and_joins.relation
15951 {
15952 self.expect_token(&Token::RParen)?;
15955 let alias = self.maybe_parse_table_alias()?;
15956 Ok(TableFactor::NestedJoin {
15957 table_with_joins: Box::new(table_and_joins),
15958 alias,
15959 })
15960 } else if self.dialect.supports_parens_around_table_factor() {
15961 self.expect_token(&Token::RParen)?;
15968
15969 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
15970 match &mut table_and_joins.relation {
15973 TableFactor::Derived { alias, .. }
15974 | TableFactor::Table { alias, .. }
15975 | TableFactor::Function { alias, .. }
15976 | TableFactor::UNNEST { alias, .. }
15977 | TableFactor::JsonTable { alias, .. }
15978 | TableFactor::XmlTable { alias, .. }
15979 | TableFactor::OpenJsonTable { alias, .. }
15980 | TableFactor::TableFunction { alias, .. }
15981 | TableFactor::Pivot { alias, .. }
15982 | TableFactor::Unpivot { alias, .. }
15983 | TableFactor::MatchRecognize { alias, .. }
15984 | TableFactor::SemanticView { alias, .. }
15985 | TableFactor::NestedJoin { alias, .. } => {
15986 if let Some(inner_alias) = alias {
15988 return Err(ParserError::ParserError(format!(
15989 "duplicate alias {inner_alias}"
15990 )));
15991 }
15992 alias.replace(outer_alias);
15996 }
15997 };
15998 }
15999 Ok(table_and_joins.relation)
16001 } else {
16002 self.expected_ref("joined table", self.peek_token_ref())
16005 }
16006 } else if self.dialect.supports_values_as_table_factor()
16007 && matches!(
16008 self.peek_tokens(),
16009 [
16010 Token::Word(Word {
16011 keyword: Keyword::VALUES,
16012 ..
16013 }),
16014 Token::LParen
16015 ]
16016 )
16017 {
16018 self.expect_keyword_is(Keyword::VALUES)?;
16019
16020 let values = SetExpr::Values(self.parse_values(false, false)?);
16024 let alias = self.maybe_parse_table_alias()?;
16025 Ok(TableFactor::Derived {
16026 lateral: false,
16027 subquery: Box::new(Query {
16028 with: None,
16029 body: Box::new(values),
16030 order_by: None,
16031 limit_clause: None,
16032 fetch: None,
16033 locks: vec![],
16034 for_clause: None,
16035 settings: None,
16036 format_clause: None,
16037 pipe_operators: vec![],
16038 }),
16039 alias,
16040 sample: None,
16041 })
16042 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16043 && self.parse_keyword(Keyword::UNNEST)
16044 {
16045 self.expect_token(&Token::LParen)?;
16046 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16047 self.expect_token(&Token::RParen)?;
16048
16049 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16050 let alias = match self.maybe_parse_table_alias() {
16051 Ok(Some(alias)) => Some(alias),
16052 Ok(None) => None,
16053 Err(e) => return Err(e),
16054 };
16055
16056 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16057 Ok(()) => true,
16058 Err(_) => false,
16059 };
16060
16061 let with_offset_alias = if with_offset {
16062 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16063 Ok(Some(alias)) => Some(alias),
16064 Ok(None) => None,
16065 Err(e) => return Err(e),
16066 }
16067 } else {
16068 None
16069 };
16070
16071 Ok(TableFactor::UNNEST {
16072 alias,
16073 array_exprs,
16074 with_offset,
16075 with_offset_alias,
16076 with_ordinality,
16077 })
16078 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16079 let json_expr = self.parse_expr()?;
16080 self.expect_token(&Token::Comma)?;
16081 let json_path = self.parse_value()?;
16082 self.expect_keyword_is(Keyword::COLUMNS)?;
16083 self.expect_token(&Token::LParen)?;
16084 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16085 self.expect_token(&Token::RParen)?;
16086 self.expect_token(&Token::RParen)?;
16087 let alias = self.maybe_parse_table_alias()?;
16088 Ok(TableFactor::JsonTable {
16089 json_expr,
16090 json_path,
16091 columns,
16092 alias,
16093 })
16094 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16095 self.prev_token();
16096 self.parse_open_json_table_factor()
16097 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16098 self.prev_token();
16099 self.parse_xml_table_factor()
16100 } else if self.dialect.supports_semantic_view_table_factor()
16101 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16102 {
16103 self.parse_semantic_view_table_factor()
16104 } else if self.peek_token_ref().token == Token::AtSign {
16105 self.parse_snowflake_stage_table_factor()
16107 } else {
16108 let name = self.parse_object_name(true)?;
16109
16110 let json_path = match &self.peek_token_ref().token {
16111 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16112 _ => None,
16113 };
16114
16115 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16116 && self.parse_keyword(Keyword::PARTITION)
16117 {
16118 self.parse_parenthesized_identifiers()?
16119 } else {
16120 vec![]
16121 };
16122
16123 let version = self.maybe_parse_table_version()?;
16125
16126 let args = if self.consume_token(&Token::LParen) {
16128 Some(self.parse_table_function_args()?)
16129 } else {
16130 None
16131 };
16132
16133 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16134
16135 let mut sample = None;
16136 if self.dialect.supports_table_sample_before_alias() {
16137 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16138 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16139 }
16140 }
16141
16142 let alias = self.maybe_parse_table_alias()?;
16143
16144 let index_hints = if self.dialect.supports_table_hints() {
16146 self.maybe_parse(|p| p.parse_table_index_hints())?
16147 .unwrap_or(vec![])
16148 } else {
16149 vec![]
16150 };
16151
16152 let mut with_hints = vec![];
16154 if self.parse_keyword(Keyword::WITH) {
16155 if self.consume_token(&Token::LParen) {
16156 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16157 self.expect_token(&Token::RParen)?;
16158 } else {
16159 self.prev_token();
16161 }
16162 };
16163
16164 if !self.dialect.supports_table_sample_before_alias() {
16165 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16166 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16167 }
16168 }
16169
16170 let mut table = TableFactor::Table {
16171 name,
16172 alias,
16173 args,
16174 with_hints,
16175 version,
16176 partitions,
16177 with_ordinality,
16178 json_path,
16179 sample,
16180 index_hints,
16181 };
16182
16183 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16184 table = match kw {
16185 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16186 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16187 unexpected_keyword => return Err(ParserError::ParserError(
16188 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16189 )),
16190 }
16191 }
16192
16193 if self.dialect.supports_match_recognize()
16194 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16195 {
16196 table = self.parse_match_recognize(table)?;
16197 }
16198
16199 Ok(table)
16200 }
16201 }
16202
16203 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16208 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16210
16211 let args = if self.consume_token(&Token::LParen) {
16213 Some(self.parse_table_function_args()?)
16214 } else {
16215 None
16216 };
16217
16218 let alias = self.maybe_parse_table_alias()?;
16219
16220 Ok(TableFactor::Table {
16221 name,
16222 alias,
16223 args,
16224 with_hints: vec![],
16225 version: None,
16226 partitions: vec![],
16227 with_ordinality: false,
16228 json_path: None,
16229 sample: None,
16230 index_hints: vec![],
16231 })
16232 }
16233
16234 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16235 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16236 TableSampleModifier::TableSample
16237 } else if self.parse_keyword(Keyword::SAMPLE) {
16238 TableSampleModifier::Sample
16239 } else {
16240 return Ok(None);
16241 };
16242 self.parse_table_sample(modifier).map(Some)
16243 }
16244
16245 fn parse_table_sample(
16246 &mut self,
16247 modifier: TableSampleModifier,
16248 ) -> Result<Box<TableSample>, ParserError> {
16249 let name = match self.parse_one_of_keywords(&[
16250 Keyword::BERNOULLI,
16251 Keyword::ROW,
16252 Keyword::SYSTEM,
16253 Keyword::BLOCK,
16254 ]) {
16255 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16256 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16257 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16258 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16259 _ => None,
16260 };
16261
16262 let parenthesized = self.consume_token(&Token::LParen);
16263
16264 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16265 let selected_bucket = self.parse_number_value()?;
16266 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16267 let total = self.parse_number_value()?;
16268 let on = if self.parse_keyword(Keyword::ON) {
16269 Some(self.parse_expr()?)
16270 } else {
16271 None
16272 };
16273 (
16274 None,
16275 Some(TableSampleBucket {
16276 bucket: selected_bucket,
16277 total,
16278 on,
16279 }),
16280 )
16281 } else {
16282 let value = match self.maybe_parse(|p| p.parse_expr())? {
16283 Some(num) => num,
16284 None => {
16285 let next_token = self.next_token();
16286 if let Token::Word(w) = next_token.token {
16287 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16288 } else {
16289 return parser_err!(
16290 "Expecting number or byte length e.g. 100M",
16291 self.peek_token_ref().span.start
16292 );
16293 }
16294 }
16295 };
16296 let unit = if self.parse_keyword(Keyword::ROWS) {
16297 Some(TableSampleUnit::Rows)
16298 } else if self.parse_keyword(Keyword::PERCENT) {
16299 Some(TableSampleUnit::Percent)
16300 } else {
16301 None
16302 };
16303 (
16304 Some(TableSampleQuantity {
16305 parenthesized,
16306 value,
16307 unit,
16308 }),
16309 None,
16310 )
16311 };
16312 if parenthesized {
16313 self.expect_token(&Token::RParen)?;
16314 }
16315
16316 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
16317 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
16318 } else if self.parse_keyword(Keyword::SEED) {
16319 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
16320 } else {
16321 None
16322 };
16323
16324 let offset = if self.parse_keyword(Keyword::OFFSET) {
16325 Some(self.parse_expr()?)
16326 } else {
16327 None
16328 };
16329
16330 Ok(Box::new(TableSample {
16331 modifier,
16332 name,
16333 quantity,
16334 seed,
16335 bucket,
16336 offset,
16337 }))
16338 }
16339
16340 fn parse_table_sample_seed(
16341 &mut self,
16342 modifier: TableSampleSeedModifier,
16343 ) -> Result<TableSampleSeed, ParserError> {
16344 self.expect_token(&Token::LParen)?;
16345 let value = self.parse_number_value()?;
16346 self.expect_token(&Token::RParen)?;
16347 Ok(TableSampleSeed { modifier, value })
16348 }
16349
16350 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16353 self.expect_token(&Token::LParen)?;
16354 let json_expr = self.parse_expr()?;
16355 let json_path = if self.consume_token(&Token::Comma) {
16356 Some(self.parse_value()?)
16357 } else {
16358 None
16359 };
16360 self.expect_token(&Token::RParen)?;
16361 let columns = if self.parse_keyword(Keyword::WITH) {
16362 self.expect_token(&Token::LParen)?;
16363 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
16364 self.expect_token(&Token::RParen)?;
16365 columns
16366 } else {
16367 Vec::new()
16368 };
16369 let alias = self.maybe_parse_table_alias()?;
16370 Ok(TableFactor::OpenJsonTable {
16371 json_expr,
16372 json_path,
16373 columns,
16374 alias,
16375 })
16376 }
16377
16378 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16379 self.expect_token(&Token::LParen)?;
16380 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
16381 self.expect_token(&Token::LParen)?;
16382 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
16383 self.expect_token(&Token::RParen)?;
16384 self.expect_token(&Token::Comma)?;
16385 namespaces
16386 } else {
16387 vec![]
16388 };
16389 let row_expression = self.parse_expr()?;
16390 let passing = self.parse_xml_passing_clause()?;
16391 self.expect_keyword_is(Keyword::COLUMNS)?;
16392 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
16393 self.expect_token(&Token::RParen)?;
16394 let alias = self.maybe_parse_table_alias()?;
16395 Ok(TableFactor::XmlTable {
16396 namespaces,
16397 row_expression,
16398 passing,
16399 columns,
16400 alias,
16401 })
16402 }
16403
16404 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
16405 let uri = self.parse_expr()?;
16406 self.expect_keyword_is(Keyword::AS)?;
16407 let name = self.parse_identifier()?;
16408 Ok(XmlNamespaceDefinition { uri, name })
16409 }
16410
16411 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
16412 let name = self.parse_identifier()?;
16413
16414 let option = if self.parse_keyword(Keyword::FOR) {
16415 self.expect_keyword(Keyword::ORDINALITY)?;
16416 XmlTableColumnOption::ForOrdinality
16417 } else {
16418 let r#type = self.parse_data_type()?;
16419 let mut path = None;
16420 let mut default = None;
16421
16422 if self.parse_keyword(Keyword::PATH) {
16423 path = Some(self.parse_expr()?);
16424 }
16425
16426 if self.parse_keyword(Keyword::DEFAULT) {
16427 default = Some(self.parse_expr()?);
16428 }
16429
16430 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
16431 if !not_null {
16432 let _ = self.parse_keyword(Keyword::NULL);
16434 }
16435
16436 XmlTableColumnOption::NamedInfo {
16437 r#type,
16438 path,
16439 default,
16440 nullable: !not_null,
16441 }
16442 };
16443 Ok(XmlTableColumn { name, option })
16444 }
16445
16446 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
16447 let mut arguments = vec![];
16448 if self.parse_keyword(Keyword::PASSING) {
16449 loop {
16450 let by_value =
16451 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
16452 let expr = self.parse_expr()?;
16453 let alias = if self.parse_keyword(Keyword::AS) {
16454 Some(self.parse_identifier()?)
16455 } else {
16456 None
16457 };
16458 arguments.push(XmlPassingArgument {
16459 expr,
16460 alias,
16461 by_value,
16462 });
16463 if !self.consume_token(&Token::Comma) {
16464 break;
16465 }
16466 }
16467 }
16468 Ok(XmlPassingClause { arguments })
16469 }
16470
16471 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16473 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
16474 self.expect_token(&Token::LParen)?;
16475
16476 let name = self.parse_object_name(true)?;
16477
16478 let mut dimensions = Vec::new();
16480 let mut metrics = Vec::new();
16481 let mut facts = Vec::new();
16482 let mut where_clause = None;
16483
16484 while self.peek_token_ref().token != Token::RParen {
16485 if self.parse_keyword(Keyword::DIMENSIONS) {
16486 if !dimensions.is_empty() {
16487 return Err(ParserError::ParserError(
16488 "DIMENSIONS clause can only be specified once".to_string(),
16489 ));
16490 }
16491 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16492 } else if self.parse_keyword(Keyword::METRICS) {
16493 if !metrics.is_empty() {
16494 return Err(ParserError::ParserError(
16495 "METRICS clause can only be specified once".to_string(),
16496 ));
16497 }
16498 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16499 } else if self.parse_keyword(Keyword::FACTS) {
16500 if !facts.is_empty() {
16501 return Err(ParserError::ParserError(
16502 "FACTS clause can only be specified once".to_string(),
16503 ));
16504 }
16505 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16506 } else if self.parse_keyword(Keyword::WHERE) {
16507 if where_clause.is_some() {
16508 return Err(ParserError::ParserError(
16509 "WHERE clause can only be specified once".to_string(),
16510 ));
16511 }
16512 where_clause = Some(self.parse_expr()?);
16513 } else {
16514 let tok = self.peek_token_ref();
16515 return parser_err!(
16516 format!(
16517 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
16518 tok.token
16519 ),
16520 tok.span.start
16521 )?;
16522 }
16523 }
16524
16525 self.expect_token(&Token::RParen)?;
16526
16527 let alias = self.maybe_parse_table_alias()?;
16528
16529 Ok(TableFactor::SemanticView {
16530 name,
16531 dimensions,
16532 metrics,
16533 facts,
16534 where_clause,
16535 alias,
16536 })
16537 }
16538
16539 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
16540 self.expect_token(&Token::LParen)?;
16541
16542 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
16543 self.parse_comma_separated(Parser::parse_expr)?
16544 } else {
16545 vec![]
16546 };
16547
16548 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16549 self.parse_comma_separated(Parser::parse_order_by_expr)?
16550 } else {
16551 vec![]
16552 };
16553
16554 let measures = if self.parse_keyword(Keyword::MEASURES) {
16555 self.parse_comma_separated(|p| {
16556 let expr = p.parse_expr()?;
16557 let _ = p.parse_keyword(Keyword::AS);
16558 let alias = p.parse_identifier()?;
16559 Ok(Measure { expr, alias })
16560 })?
16561 } else {
16562 vec![]
16563 };
16564
16565 let rows_per_match =
16566 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
16567 Some(RowsPerMatch::OneRow)
16568 } else if self.parse_keywords(&[
16569 Keyword::ALL,
16570 Keyword::ROWS,
16571 Keyword::PER,
16572 Keyword::MATCH,
16573 ]) {
16574 Some(RowsPerMatch::AllRows(
16575 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
16576 Some(EmptyMatchesMode::Show)
16577 } else if self.parse_keywords(&[
16578 Keyword::OMIT,
16579 Keyword::EMPTY,
16580 Keyword::MATCHES,
16581 ]) {
16582 Some(EmptyMatchesMode::Omit)
16583 } else if self.parse_keywords(&[
16584 Keyword::WITH,
16585 Keyword::UNMATCHED,
16586 Keyword::ROWS,
16587 ]) {
16588 Some(EmptyMatchesMode::WithUnmatched)
16589 } else {
16590 None
16591 },
16592 ))
16593 } else {
16594 None
16595 };
16596
16597 let after_match_skip =
16598 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
16599 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
16600 Some(AfterMatchSkip::PastLastRow)
16601 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
16602 Some(AfterMatchSkip::ToNextRow)
16603 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
16604 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
16605 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
16606 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
16607 } else {
16608 let found = self.next_token();
16609 return self.expected("after match skip option", found);
16610 }
16611 } else {
16612 None
16613 };
16614
16615 self.expect_keyword_is(Keyword::PATTERN)?;
16616 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
16617
16618 self.expect_keyword_is(Keyword::DEFINE)?;
16619
16620 let symbols = self.parse_comma_separated(|p| {
16621 let symbol = p.parse_identifier()?;
16622 p.expect_keyword_is(Keyword::AS)?;
16623 let definition = p.parse_expr()?;
16624 Ok(SymbolDefinition { symbol, definition })
16625 })?;
16626
16627 self.expect_token(&Token::RParen)?;
16628
16629 let alias = self.maybe_parse_table_alias()?;
16630
16631 Ok(TableFactor::MatchRecognize {
16632 table: Box::new(table),
16633 partition_by,
16634 order_by,
16635 measures,
16636 rows_per_match,
16637 after_match_skip,
16638 pattern,
16639 symbols,
16640 alias,
16641 })
16642 }
16643
16644 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16645 match self.next_token().token {
16646 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
16647 Token::Placeholder(s) if s == "$" => {
16648 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
16649 }
16650 Token::LBrace => {
16651 self.expect_token(&Token::Minus)?;
16652 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
16653 self.expect_token(&Token::Minus)?;
16654 self.expect_token(&Token::RBrace)?;
16655 Ok(MatchRecognizePattern::Exclude(symbol))
16656 }
16657 Token::Word(Word {
16658 value,
16659 quote_style: None,
16660 ..
16661 }) if value == "PERMUTE" => {
16662 self.expect_token(&Token::LParen)?;
16663 let symbols = self.parse_comma_separated(|p| {
16664 p.parse_identifier().map(MatchRecognizeSymbol::Named)
16665 })?;
16666 self.expect_token(&Token::RParen)?;
16667 Ok(MatchRecognizePattern::Permute(symbols))
16668 }
16669 Token::LParen => {
16670 let pattern = self.parse_pattern()?;
16671 self.expect_token(&Token::RParen)?;
16672 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
16673 }
16674 _ => {
16675 self.prev_token();
16676 self.parse_identifier()
16677 .map(MatchRecognizeSymbol::Named)
16678 .map(MatchRecognizePattern::Symbol)
16679 }
16680 }
16681 }
16682
16683 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16684 let mut pattern = self.parse_base_pattern()?;
16685 loop {
16686 let token = self.next_token();
16687 let quantifier = match token.token {
16688 Token::Mul => RepetitionQuantifier::ZeroOrMore,
16689 Token::Plus => RepetitionQuantifier::OneOrMore,
16690 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
16691 Token::LBrace => {
16692 let token = self.next_token();
16694 match token.token {
16695 Token::Comma => {
16696 let next_token = self.next_token();
16697 let Token::Number(n, _) = next_token.token else {
16698 return self.expected("literal number", next_token);
16699 };
16700 self.expect_token(&Token::RBrace)?;
16701 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
16702 }
16703 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
16704 let next_token = self.next_token();
16705 match next_token.token {
16706 Token::Number(m, _) => {
16707 self.expect_token(&Token::RBrace)?;
16708 RepetitionQuantifier::Range(
16709 Self::parse(n, token.span.start)?,
16710 Self::parse(m, token.span.start)?,
16711 )
16712 }
16713 Token::RBrace => {
16714 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
16715 }
16716 _ => {
16717 return self.expected("} or upper bound", next_token);
16718 }
16719 }
16720 }
16721 Token::Number(n, _) => {
16722 self.expect_token(&Token::RBrace)?;
16723 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
16724 }
16725 _ => return self.expected("quantifier range", token),
16726 }
16727 }
16728 _ => {
16729 self.prev_token();
16730 break;
16731 }
16732 };
16733 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
16734 }
16735 Ok(pattern)
16736 }
16737
16738 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16739 let mut patterns = vec![self.parse_repetition_pattern()?];
16740 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
16741 patterns.push(self.parse_repetition_pattern()?);
16742 }
16743 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
16744 Ok([pattern]) => Ok(pattern),
16745 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
16746 }
16747 }
16748
16749 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16750 let pattern = self.parse_concat_pattern()?;
16751 if self.consume_token(&Token::Pipe) {
16752 match self.parse_pattern()? {
16753 MatchRecognizePattern::Alternation(mut patterns) => {
16755 patterns.insert(0, pattern);
16756 Ok(MatchRecognizePattern::Alternation(patterns))
16757 }
16758 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
16759 }
16760 } else {
16761 Ok(pattern)
16762 }
16763 }
16764
16765 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
16767 if self.dialect.supports_table_versioning() {
16768 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
16769 {
16770 let expr = self.parse_expr()?;
16771 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
16772 } else if self.peek_keyword(Keyword::CHANGES) {
16773 return self.parse_table_version_changes().map(Some);
16774 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
16775 let func_name = self.parse_object_name(true)?;
16776 let func = self.parse_function(func_name)?;
16777 return Ok(Some(TableVersion::Function(func)));
16778 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
16779 let expr = self.parse_expr()?;
16780 return Ok(Some(TableVersion::TimestampAsOf(expr)));
16781 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
16782 let expr = Expr::Value(self.parse_number_value()?);
16783 return Ok(Some(TableVersion::VersionAsOf(expr)));
16784 }
16785 }
16786 Ok(None)
16787 }
16788
16789 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
16800 let changes_name = self.parse_object_name(true)?;
16801 let changes = self.parse_function(changes_name)?;
16802 let at_name = self.parse_object_name(true)?;
16803 let at = self.parse_function(at_name)?;
16804 let end = if self.peek_keyword(Keyword::END) {
16805 let end_name = self.parse_object_name(true)?;
16806 Some(self.parse_function(end_name)?)
16807 } else {
16808 None
16809 };
16810 Ok(TableVersion::Changes { changes, at, end })
16811 }
16812
16813 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
16816 if self.parse_keyword(Keyword::NESTED) {
16817 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
16818 let path = self.parse_value()?;
16819 self.expect_keyword_is(Keyword::COLUMNS)?;
16820 let columns = self.parse_parenthesized(|p| {
16821 p.parse_comma_separated(Self::parse_json_table_column_def)
16822 })?;
16823 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
16824 path,
16825 columns,
16826 }));
16827 }
16828 let name = self.parse_identifier()?;
16829 if self.parse_keyword(Keyword::FOR) {
16830 self.expect_keyword_is(Keyword::ORDINALITY)?;
16831 return Ok(JsonTableColumn::ForOrdinality(name));
16832 }
16833 let r#type = self.parse_data_type()?;
16834 let exists = self.parse_keyword(Keyword::EXISTS);
16835 self.expect_keyword_is(Keyword::PATH)?;
16836 let path = self.parse_value()?;
16837 let mut on_empty = None;
16838 let mut on_error = None;
16839 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
16840 if self.parse_keyword(Keyword::EMPTY) {
16841 on_empty = Some(error_handling);
16842 } else {
16843 self.expect_keyword_is(Keyword::ERROR)?;
16844 on_error = Some(error_handling);
16845 }
16846 }
16847 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
16848 name,
16849 r#type,
16850 path,
16851 exists,
16852 on_empty,
16853 on_error,
16854 }))
16855 }
16856
16857 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
16865 let name = self.parse_identifier()?;
16866 let r#type = self.parse_data_type()?;
16867 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
16868 self.next_token();
16869 Some(path)
16870 } else {
16871 None
16872 };
16873 let as_json = self.parse_keyword(Keyword::AS);
16874 if as_json {
16875 self.expect_keyword_is(Keyword::JSON)?;
16876 }
16877 Ok(OpenJsonTableColumn {
16878 name,
16879 r#type,
16880 path,
16881 as_json,
16882 })
16883 }
16884
16885 fn parse_json_table_column_error_handling(
16886 &mut self,
16887 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
16888 let res = if self.parse_keyword(Keyword::NULL) {
16889 JsonTableColumnErrorHandling::Null
16890 } else if self.parse_keyword(Keyword::ERROR) {
16891 JsonTableColumnErrorHandling::Error
16892 } else if self.parse_keyword(Keyword::DEFAULT) {
16893 JsonTableColumnErrorHandling::Default(self.parse_value()?)
16894 } else {
16895 return Ok(None);
16896 };
16897 self.expect_keyword_is(Keyword::ON)?;
16898 Ok(Some(res))
16899 }
16900
16901 pub fn parse_derived_table_factor(
16903 &mut self,
16904 lateral: IsLateral,
16905 ) -> Result<TableFactor, ParserError> {
16906 let subquery = self.parse_query()?;
16907 self.expect_token(&Token::RParen)?;
16908 let alias = self.maybe_parse_table_alias()?;
16909
16910 let sample = self
16912 .maybe_parse_table_sample()?
16913 .map(TableSampleKind::AfterTableAlias);
16914
16915 Ok(TableFactor::Derived {
16916 lateral: match lateral {
16917 Lateral => true,
16918 NotLateral => false,
16919 },
16920 subquery,
16921 alias,
16922 sample,
16923 })
16924 }
16925
16926 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
16949 let expr = self.parse_expr()?;
16950 let alias = if self.parse_keyword(Keyword::AS) {
16951 Some(self.parse_identifier()?)
16952 } else {
16953 None
16954 };
16955
16956 Ok(ExprWithAlias { expr, alias })
16957 }
16958
16959 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
16963 let expr = self.parse_expr()?;
16964 let alias = self.parse_identifier_optional_alias()?;
16965 Ok(ExprWithAlias { expr, alias })
16966 }
16967
16968 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
16970 let function_name = match self.next_token().token {
16971 Token::Word(w) => Ok(w.value),
16972 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
16973 }?;
16974 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
16975 let alias = {
16976 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
16977 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
16979 }
16980 self.parse_optional_alias_inner(None, validator)?
16981 };
16982 Ok(ExprWithAlias { expr, alias })
16983 }
16984
16985 pub fn parse_pivot_table_factor(
16987 &mut self,
16988 table: TableFactor,
16989 ) -> Result<TableFactor, ParserError> {
16990 self.expect_token(&Token::LParen)?;
16991 let aggregate_functions =
16992 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
16993 self.expect_keyword_is(Keyword::FOR)?;
16994 let value_column = if self.peek_token_ref().token == Token::LParen {
16995 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
16996 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
16997 })?
16998 } else {
16999 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17000 };
17001 self.expect_keyword_is(Keyword::IN)?;
17002
17003 self.expect_token(&Token::LParen)?;
17004 let value_source = if self.parse_keyword(Keyword::ANY) {
17005 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17006 self.parse_comma_separated(Parser::parse_order_by_expr)?
17007 } else {
17008 vec![]
17009 };
17010 PivotValueSource::Any(order_by)
17011 } else if self.peek_sub_query() {
17012 PivotValueSource::Subquery(self.parse_query()?)
17013 } else {
17014 PivotValueSource::List(
17015 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17016 )
17017 };
17018 self.expect_token(&Token::RParen)?;
17019
17020 let default_on_null =
17021 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17022 self.expect_token(&Token::LParen)?;
17023 let expr = self.parse_expr()?;
17024 self.expect_token(&Token::RParen)?;
17025 Some(expr)
17026 } else {
17027 None
17028 };
17029
17030 self.expect_token(&Token::RParen)?;
17031 let alias = self.maybe_parse_table_alias()?;
17032 Ok(TableFactor::Pivot {
17033 table: Box::new(table),
17034 aggregate_functions,
17035 value_column,
17036 value_source,
17037 default_on_null,
17038 alias,
17039 })
17040 }
17041
17042 pub fn parse_unpivot_table_factor(
17044 &mut self,
17045 table: TableFactor,
17046 ) -> Result<TableFactor, ParserError> {
17047 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17048 self.expect_keyword_is(Keyword::NULLS)?;
17049 Some(NullInclusion::IncludeNulls)
17050 } else if self.parse_keyword(Keyword::EXCLUDE) {
17051 self.expect_keyword_is(Keyword::NULLS)?;
17052 Some(NullInclusion::ExcludeNulls)
17053 } else {
17054 None
17055 };
17056 self.expect_token(&Token::LParen)?;
17057 let value = self.parse_expr()?;
17058 self.expect_keyword_is(Keyword::FOR)?;
17059 let name = self.parse_identifier()?;
17060 self.expect_keyword_is(Keyword::IN)?;
17061 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17062 p.parse_expr_with_alias()
17063 })?;
17064 self.expect_token(&Token::RParen)?;
17065 let alias = self.maybe_parse_table_alias()?;
17066 Ok(TableFactor::Unpivot {
17067 table: Box::new(table),
17068 value,
17069 null_inclusion,
17070 name,
17071 columns,
17072 alias,
17073 })
17074 }
17075
17076 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17078 if natural {
17079 Ok(JoinConstraint::Natural)
17080 } else if self.parse_keyword(Keyword::ON) {
17081 let constraint = self.parse_expr()?;
17082 Ok(JoinConstraint::On(constraint))
17083 } else if self.parse_keyword(Keyword::USING) {
17084 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17085 Ok(JoinConstraint::Using(columns))
17086 } else {
17087 Ok(JoinConstraint::None)
17088 }
17090 }
17091
17092 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17094 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17095
17096 self.expect_keyword_is(Keyword::TO)?;
17097 let grantees = self.parse_grantees()?;
17098
17099 let with_grant_option =
17100 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17101
17102 let current_grants =
17103 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17104 Some(CurrentGrantsKind::CopyCurrentGrants)
17105 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17106 Some(CurrentGrantsKind::RevokeCurrentGrants)
17107 } else {
17108 None
17109 };
17110
17111 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17112 Some(self.parse_identifier()?)
17113 } else {
17114 None
17115 };
17116
17117 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17118 Some(self.parse_identifier()?)
17119 } else {
17120 None
17121 };
17122
17123 Ok(Grant {
17124 privileges,
17125 objects,
17126 grantees,
17127 with_grant_option,
17128 as_grantor,
17129 granted_by,
17130 current_grants,
17131 })
17132 }
17133
17134 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17135 let mut values = vec![];
17136 let mut grantee_type = GranteesType::None;
17137 loop {
17138 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17139 GranteesType::Role
17140 } else if self.parse_keyword(Keyword::USER) {
17141 GranteesType::User
17142 } else if self.parse_keyword(Keyword::SHARE) {
17143 GranteesType::Share
17144 } else if self.parse_keyword(Keyword::GROUP) {
17145 GranteesType::Group
17146 } else if self.parse_keyword(Keyword::PUBLIC) {
17147 GranteesType::Public
17148 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17149 GranteesType::DatabaseRole
17150 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17151 GranteesType::ApplicationRole
17152 } else if self.parse_keyword(Keyword::APPLICATION) {
17153 GranteesType::Application
17154 } else {
17155 grantee_type.clone() };
17157
17158 if self
17159 .dialect
17160 .get_reserved_grantees_types()
17161 .contains(&new_grantee_type)
17162 {
17163 self.prev_token();
17164 } else {
17165 grantee_type = new_grantee_type;
17166 }
17167
17168 let grantee = if grantee_type == GranteesType::Public {
17169 Grantee {
17170 grantee_type: grantee_type.clone(),
17171 name: None,
17172 }
17173 } else {
17174 let mut name = self.parse_grantee_name()?;
17175 if self.consume_token(&Token::Colon) {
17176 let ident = self.parse_identifier()?;
17180 if let GranteeName::ObjectName(namespace) = name {
17181 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17182 format!("{namespace}:{ident}"),
17183 )]));
17184 };
17185 }
17186 Grantee {
17187 grantee_type: grantee_type.clone(),
17188 name: Some(name),
17189 }
17190 };
17191
17192 values.push(grantee);
17193
17194 if !self.consume_token(&Token::Comma) {
17195 break;
17196 }
17197 }
17198
17199 Ok(values)
17200 }
17201
17202 pub fn parse_grant_deny_revoke_privileges_objects(
17204 &mut self,
17205 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17206 let privileges = if self.parse_keyword(Keyword::ALL) {
17207 Privileges::All {
17208 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17209 }
17210 } else {
17211 let actions = self.parse_actions_list()?;
17212 Privileges::Actions(actions)
17213 };
17214
17215 let objects = if self.parse_keyword(Keyword::ON) {
17216 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17217 Some(GrantObjects::AllTablesInSchema {
17218 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17219 })
17220 } else if self.parse_keywords(&[
17221 Keyword::ALL,
17222 Keyword::EXTERNAL,
17223 Keyword::TABLES,
17224 Keyword::IN,
17225 Keyword::SCHEMA,
17226 ]) {
17227 Some(GrantObjects::AllExternalTablesInSchema {
17228 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17229 })
17230 } else if self.parse_keywords(&[
17231 Keyword::ALL,
17232 Keyword::VIEWS,
17233 Keyword::IN,
17234 Keyword::SCHEMA,
17235 ]) {
17236 Some(GrantObjects::AllViewsInSchema {
17237 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17238 })
17239 } else if self.parse_keywords(&[
17240 Keyword::ALL,
17241 Keyword::MATERIALIZED,
17242 Keyword::VIEWS,
17243 Keyword::IN,
17244 Keyword::SCHEMA,
17245 ]) {
17246 Some(GrantObjects::AllMaterializedViewsInSchema {
17247 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17248 })
17249 } else if self.parse_keywords(&[
17250 Keyword::ALL,
17251 Keyword::FUNCTIONS,
17252 Keyword::IN,
17253 Keyword::SCHEMA,
17254 ]) {
17255 Some(GrantObjects::AllFunctionsInSchema {
17256 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17257 })
17258 } else if self.parse_keywords(&[
17259 Keyword::FUTURE,
17260 Keyword::SCHEMAS,
17261 Keyword::IN,
17262 Keyword::DATABASE,
17263 ]) {
17264 Some(GrantObjects::FutureSchemasInDatabase {
17265 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17266 })
17267 } else if self.parse_keywords(&[
17268 Keyword::FUTURE,
17269 Keyword::TABLES,
17270 Keyword::IN,
17271 Keyword::SCHEMA,
17272 ]) {
17273 Some(GrantObjects::FutureTablesInSchema {
17274 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17275 })
17276 } else if self.parse_keywords(&[
17277 Keyword::FUTURE,
17278 Keyword::EXTERNAL,
17279 Keyword::TABLES,
17280 Keyword::IN,
17281 Keyword::SCHEMA,
17282 ]) {
17283 Some(GrantObjects::FutureExternalTablesInSchema {
17284 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17285 })
17286 } else if self.parse_keywords(&[
17287 Keyword::FUTURE,
17288 Keyword::VIEWS,
17289 Keyword::IN,
17290 Keyword::SCHEMA,
17291 ]) {
17292 Some(GrantObjects::FutureViewsInSchema {
17293 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17294 })
17295 } else if self.parse_keywords(&[
17296 Keyword::FUTURE,
17297 Keyword::MATERIALIZED,
17298 Keyword::VIEWS,
17299 Keyword::IN,
17300 Keyword::SCHEMA,
17301 ]) {
17302 Some(GrantObjects::FutureMaterializedViewsInSchema {
17303 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17304 })
17305 } else if self.parse_keywords(&[
17306 Keyword::ALL,
17307 Keyword::SEQUENCES,
17308 Keyword::IN,
17309 Keyword::SCHEMA,
17310 ]) {
17311 Some(GrantObjects::AllSequencesInSchema {
17312 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17313 })
17314 } else if self.parse_keywords(&[
17315 Keyword::FUTURE,
17316 Keyword::SEQUENCES,
17317 Keyword::IN,
17318 Keyword::SCHEMA,
17319 ]) {
17320 Some(GrantObjects::FutureSequencesInSchema {
17321 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17322 })
17323 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
17324 Some(GrantObjects::ResourceMonitors(
17325 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17326 ))
17327 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17328 Some(GrantObjects::ComputePools(
17329 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17330 ))
17331 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17332 Some(GrantObjects::FailoverGroup(
17333 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17334 ))
17335 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17336 Some(GrantObjects::ReplicationGroup(
17337 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17338 ))
17339 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17340 Some(GrantObjects::ExternalVolumes(
17341 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17342 ))
17343 } else {
17344 let object_type = self.parse_one_of_keywords(&[
17345 Keyword::SEQUENCE,
17346 Keyword::DATABASE,
17347 Keyword::SCHEMA,
17348 Keyword::TABLE,
17349 Keyword::VIEW,
17350 Keyword::WAREHOUSE,
17351 Keyword::INTEGRATION,
17352 Keyword::VIEW,
17353 Keyword::WAREHOUSE,
17354 Keyword::INTEGRATION,
17355 Keyword::USER,
17356 Keyword::CONNECTION,
17357 Keyword::PROCEDURE,
17358 Keyword::FUNCTION,
17359 ]);
17360 let objects =
17361 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
17362 match object_type {
17363 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
17364 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
17365 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
17366 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
17367 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
17368 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
17369 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
17370 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
17371 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
17372 if let Some(name) = objects?.first() {
17373 self.parse_grant_procedure_or_function(name, &kw)?
17374 } else {
17375 self.expected_ref("procedure or function name", self.peek_token_ref())?
17376 }
17377 }
17378 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
17379 Some(unexpected_keyword) => return Err(ParserError::ParserError(
17380 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
17381 )),
17382 }
17383 }
17384 } else {
17385 None
17386 };
17387
17388 Ok((privileges, objects))
17389 }
17390
17391 fn parse_grant_procedure_or_function(
17392 &mut self,
17393 name: &ObjectName,
17394 kw: &Option<Keyword>,
17395 ) -> Result<Option<GrantObjects>, ParserError> {
17396 let arg_types = if self.consume_token(&Token::LParen) {
17397 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
17398 self.expect_token(&Token::RParen)?;
17399 list
17400 } else {
17401 vec![]
17402 };
17403 match kw {
17404 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
17405 name: name.clone(),
17406 arg_types,
17407 })),
17408 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
17409 name: name.clone(),
17410 arg_types,
17411 })),
17412 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
17413 }
17414 }
17415
17416 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
17418 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
17419 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
17420 if columns.is_empty() {
17421 Ok(None)
17422 } else {
17423 Ok(Some(columns))
17424 }
17425 }
17426
17427 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
17429 Ok(Action::ImportedPrivileges)
17430 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
17431 Ok(Action::AddSearchOptimization)
17432 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
17433 Ok(Action::AttachListing)
17434 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
17435 Ok(Action::AttachPolicy)
17436 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
17437 Ok(Action::BindServiceEndpoint)
17438 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17439 let role = self.parse_object_name(false)?;
17440 Ok(Action::DatabaseRole { role })
17441 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
17442 Ok(Action::EvolveSchema)
17443 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
17444 Ok(Action::ImportShare)
17445 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
17446 Ok(Action::ManageVersions)
17447 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
17448 Ok(Action::ManageReleases)
17449 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
17450 Ok(Action::OverrideShareRestrictions)
17451 } else if self.parse_keywords(&[
17452 Keyword::PURCHASE,
17453 Keyword::DATA,
17454 Keyword::EXCHANGE,
17455 Keyword::LISTING,
17456 ]) {
17457 Ok(Action::PurchaseDataExchangeListing)
17458 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
17459 Ok(Action::ResolveAll)
17460 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
17461 Ok(Action::ReadSession)
17462
17463 } else if self.parse_keyword(Keyword::APPLY) {
17465 let apply_type = self.parse_action_apply_type()?;
17466 Ok(Action::Apply { apply_type })
17467 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
17468 Ok(Action::ApplyBudget)
17469 } else if self.parse_keyword(Keyword::AUDIT) {
17470 Ok(Action::Audit)
17471 } else if self.parse_keyword(Keyword::CONNECT) {
17472 Ok(Action::Connect)
17473 } else if self.parse_keyword(Keyword::CREATE) {
17474 let obj_type = self.maybe_parse_action_create_object_type();
17475 Ok(Action::Create { obj_type })
17476 } else if self.parse_keyword(Keyword::DELETE) {
17477 Ok(Action::Delete)
17478 } else if self.parse_keyword(Keyword::EXEC) {
17479 let obj_type = self.maybe_parse_action_execute_obj_type();
17480 Ok(Action::Exec { obj_type })
17481 } else if self.parse_keyword(Keyword::EXECUTE) {
17482 let obj_type = self.maybe_parse_action_execute_obj_type();
17483 Ok(Action::Execute { obj_type })
17484 } else if self.parse_keyword(Keyword::FAILOVER) {
17485 Ok(Action::Failover)
17486 } else if self.parse_keyword(Keyword::INSERT) {
17487 Ok(Action::Insert {
17488 columns: parse_columns(self)?,
17489 })
17490 } else if self.parse_keyword(Keyword::MANAGE) {
17491 let manage_type = self.parse_action_manage_type()?;
17492 Ok(Action::Manage { manage_type })
17493 } else if self.parse_keyword(Keyword::MODIFY) {
17494 let modify_type = self.parse_action_modify_type();
17495 Ok(Action::Modify { modify_type })
17496 } else if self.parse_keyword(Keyword::MONITOR) {
17497 let monitor_type = self.parse_action_monitor_type();
17498 Ok(Action::Monitor { monitor_type })
17499 } else if self.parse_keyword(Keyword::OPERATE) {
17500 Ok(Action::Operate)
17501 } else if self.parse_keyword(Keyword::REFERENCES) {
17502 Ok(Action::References {
17503 columns: parse_columns(self)?,
17504 })
17505 } else if self.parse_keyword(Keyword::READ) {
17506 Ok(Action::Read)
17507 } else if self.parse_keyword(Keyword::REPLICATE) {
17508 Ok(Action::Replicate)
17509 } else if self.parse_keyword(Keyword::ROLE) {
17510 let role = self.parse_object_name(false)?;
17511 Ok(Action::Role { role })
17512 } else if self.parse_keyword(Keyword::SELECT) {
17513 Ok(Action::Select {
17514 columns: parse_columns(self)?,
17515 })
17516 } else if self.parse_keyword(Keyword::TEMPORARY) {
17517 Ok(Action::Temporary)
17518 } else if self.parse_keyword(Keyword::TRIGGER) {
17519 Ok(Action::Trigger)
17520 } else if self.parse_keyword(Keyword::TRUNCATE) {
17521 Ok(Action::Truncate)
17522 } else if self.parse_keyword(Keyword::UPDATE) {
17523 Ok(Action::Update {
17524 columns: parse_columns(self)?,
17525 })
17526 } else if self.parse_keyword(Keyword::USAGE) {
17527 Ok(Action::Usage)
17528 } else if self.parse_keyword(Keyword::OWNERSHIP) {
17529 Ok(Action::Ownership)
17530 } else if self.parse_keyword(Keyword::DROP) {
17531 Ok(Action::Drop)
17532 } else {
17533 self.expected_ref("a privilege keyword", self.peek_token_ref())?
17534 }
17535 }
17536
17537 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
17538 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
17540 Some(ActionCreateObjectType::ApplicationPackage)
17541 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17542 Some(ActionCreateObjectType::ComputePool)
17543 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
17544 Some(ActionCreateObjectType::DataExchangeListing)
17545 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17546 Some(ActionCreateObjectType::ExternalVolume)
17547 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17548 Some(ActionCreateObjectType::FailoverGroup)
17549 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
17550 Some(ActionCreateObjectType::NetworkPolicy)
17551 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
17552 Some(ActionCreateObjectType::OrganiationListing)
17553 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17554 Some(ActionCreateObjectType::ReplicationGroup)
17555 }
17556 else if self.parse_keyword(Keyword::ACCOUNT) {
17558 Some(ActionCreateObjectType::Account)
17559 } else if self.parse_keyword(Keyword::APPLICATION) {
17560 Some(ActionCreateObjectType::Application)
17561 } else if self.parse_keyword(Keyword::DATABASE) {
17562 Some(ActionCreateObjectType::Database)
17563 } else if self.parse_keyword(Keyword::INTEGRATION) {
17564 Some(ActionCreateObjectType::Integration)
17565 } else if self.parse_keyword(Keyword::ROLE) {
17566 Some(ActionCreateObjectType::Role)
17567 } else if self.parse_keyword(Keyword::SCHEMA) {
17568 Some(ActionCreateObjectType::Schema)
17569 } else if self.parse_keyword(Keyword::SHARE) {
17570 Some(ActionCreateObjectType::Share)
17571 } else if self.parse_keyword(Keyword::USER) {
17572 Some(ActionCreateObjectType::User)
17573 } else if self.parse_keyword(Keyword::WAREHOUSE) {
17574 Some(ActionCreateObjectType::Warehouse)
17575 } else {
17576 None
17577 }
17578 }
17579
17580 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
17581 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
17582 Ok(ActionApplyType::AggregationPolicy)
17583 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
17584 Ok(ActionApplyType::AuthenticationPolicy)
17585 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
17586 Ok(ActionApplyType::JoinPolicy)
17587 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
17588 Ok(ActionApplyType::MaskingPolicy)
17589 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
17590 Ok(ActionApplyType::PackagesPolicy)
17591 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
17592 Ok(ActionApplyType::PasswordPolicy)
17593 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
17594 Ok(ActionApplyType::ProjectionPolicy)
17595 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
17596 Ok(ActionApplyType::RowAccessPolicy)
17597 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
17598 Ok(ActionApplyType::SessionPolicy)
17599 } else if self.parse_keyword(Keyword::TAG) {
17600 Ok(ActionApplyType::Tag)
17601 } else {
17602 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
17603 }
17604 }
17605
17606 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
17607 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
17608 Some(ActionExecuteObjectType::DataMetricFunction)
17609 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
17610 Some(ActionExecuteObjectType::ManagedAlert)
17611 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
17612 Some(ActionExecuteObjectType::ManagedTask)
17613 } else if self.parse_keyword(Keyword::ALERT) {
17614 Some(ActionExecuteObjectType::Alert)
17615 } else if self.parse_keyword(Keyword::TASK) {
17616 Some(ActionExecuteObjectType::Task)
17617 } else {
17618 None
17619 }
17620 }
17621
17622 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
17623 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
17624 Ok(ActionManageType::AccountSupportCases)
17625 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
17626 Ok(ActionManageType::EventSharing)
17627 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
17628 Ok(ActionManageType::ListingAutoFulfillment)
17629 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
17630 Ok(ActionManageType::OrganizationSupportCases)
17631 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
17632 Ok(ActionManageType::UserSupportCases)
17633 } else if self.parse_keyword(Keyword::GRANTS) {
17634 Ok(ActionManageType::Grants)
17635 } else if self.parse_keyword(Keyword::WAREHOUSES) {
17636 Ok(ActionManageType::Warehouses)
17637 } else {
17638 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
17639 }
17640 }
17641
17642 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
17643 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
17644 Some(ActionModifyType::LogLevel)
17645 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
17646 Some(ActionModifyType::TraceLevel)
17647 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
17648 Some(ActionModifyType::SessionLogLevel)
17649 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
17650 Some(ActionModifyType::SessionTraceLevel)
17651 } else {
17652 None
17653 }
17654 }
17655
17656 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
17657 if self.parse_keyword(Keyword::EXECUTION) {
17658 Some(ActionMonitorType::Execution)
17659 } else if self.parse_keyword(Keyword::SECURITY) {
17660 Some(ActionMonitorType::Security)
17661 } else if self.parse_keyword(Keyword::USAGE) {
17662 Some(ActionMonitorType::Usage)
17663 } else {
17664 None
17665 }
17666 }
17667
17668 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
17670 let mut name = self.parse_object_name(false)?;
17671 if self.dialect.supports_user_host_grantee()
17672 && name.0.len() == 1
17673 && name.0[0].as_ident().is_some()
17674 && self.consume_token(&Token::AtSign)
17675 {
17676 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
17677 let host = self.parse_identifier()?;
17678 Ok(GranteeName::UserHost { user, host })
17679 } else {
17680 Ok(GranteeName::ObjectName(name))
17681 }
17682 }
17683
17684 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
17686 self.expect_keyword(Keyword::DENY)?;
17687
17688 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17689 let objects = match objects {
17690 Some(o) => o,
17691 None => {
17692 return parser_err!(
17693 "DENY statements must specify an object",
17694 self.peek_token_ref().span.start
17695 )
17696 }
17697 };
17698
17699 self.expect_keyword_is(Keyword::TO)?;
17700 let grantees = self.parse_grantees()?;
17701 let cascade = self.parse_cascade_option();
17702 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
17703 Some(self.parse_identifier()?)
17704 } else {
17705 None
17706 };
17707
17708 Ok(Statement::Deny(DenyStatement {
17709 privileges,
17710 objects,
17711 grantees,
17712 cascade,
17713 granted_by,
17714 }))
17715 }
17716
17717 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
17719 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17720
17721 self.expect_keyword_is(Keyword::FROM)?;
17722 let grantees = self.parse_grantees()?;
17723
17724 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17725 Some(self.parse_identifier()?)
17726 } else {
17727 None
17728 };
17729
17730 let cascade = self.parse_cascade_option();
17731
17732 Ok(Revoke {
17733 privileges,
17734 objects,
17735 grantees,
17736 granted_by,
17737 cascade,
17738 })
17739 }
17740
17741 pub fn parse_replace(
17743 &mut self,
17744 replace_token: TokenWithSpan,
17745 ) -> Result<Statement, ParserError> {
17746 if !dialect_of!(self is MySqlDialect | GenericDialect) {
17747 return parser_err!(
17748 "Unsupported statement REPLACE",
17749 self.peek_token_ref().span.start
17750 );
17751 }
17752
17753 let mut insert = self.parse_insert(replace_token)?;
17754 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
17755 *replace_into = true;
17756 }
17757
17758 Ok(insert)
17759 }
17760
17761 fn parse_insert_setexpr_boxed(
17765 &mut self,
17766 insert_token: TokenWithSpan,
17767 ) -> Result<Box<SetExpr>, ParserError> {
17768 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
17769 }
17770
17771 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
17773 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
17774 let or = self.parse_conflict_clause();
17775 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
17776 None
17777 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
17778 Some(MysqlInsertPriority::LowPriority)
17779 } else if self.parse_keyword(Keyword::DELAYED) {
17780 Some(MysqlInsertPriority::Delayed)
17781 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
17782 Some(MysqlInsertPriority::HighPriority)
17783 } else {
17784 None
17785 };
17786
17787 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
17788 && self.parse_keyword(Keyword::IGNORE);
17789
17790 let replace_into = false;
17791
17792 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
17793 let into = self.parse_keyword(Keyword::INTO);
17794
17795 let local = self.parse_keyword(Keyword::LOCAL);
17796
17797 if self.parse_keyword(Keyword::DIRECTORY) {
17798 let path = self.parse_literal_string()?;
17799 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
17800 Some(self.parse_file_format()?)
17801 } else {
17802 None
17803 };
17804 let source = self.parse_query()?;
17805 Ok(Statement::Directory {
17806 local,
17807 path,
17808 overwrite,
17809 file_format,
17810 source,
17811 })
17812 } else {
17813 let table = self.parse_keyword(Keyword::TABLE);
17815 let table_object = self.parse_table_object()?;
17816
17817 let table_alias = if self.dialect.supports_insert_table_alias()
17818 && !self.peek_sub_query()
17819 && self
17820 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
17821 .is_none()
17822 {
17823 if self.parse_keyword(Keyword::AS) {
17824 Some(TableAliasWithoutColumns {
17825 explicit: true,
17826 alias: self.parse_identifier()?,
17827 })
17828 } else {
17829 self.maybe_parse(|parser| parser.parse_identifier())?
17830 .map(|alias| TableAliasWithoutColumns {
17831 explicit: false,
17832 alias,
17833 })
17834 }
17835 } else {
17836 None
17837 };
17838
17839 let is_mysql = dialect_of!(self is MySqlDialect);
17840
17841 let (columns, partitioned, after_columns, output, source, assignments) = if self
17842 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
17843 {
17844 (vec![], None, vec![], None, None, vec![])
17845 } else {
17846 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
17847 let columns =
17848 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
17849
17850 let partitioned = self.parse_insert_partition()?;
17851 let after_columns = if dialect_of!(self is HiveDialect) {
17853 self.parse_parenthesized_column_list(Optional, false)?
17854 } else {
17855 vec![]
17856 };
17857 (columns, partitioned, after_columns)
17858 } else {
17859 Default::default()
17860 };
17861
17862 let output = self.maybe_parse_output_clause()?;
17863
17864 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
17865 || self.peek_keyword(Keyword::SETTINGS)
17866 {
17867 (None, vec![])
17868 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
17869 (None, self.parse_comma_separated(Parser::parse_assignment)?)
17870 } else {
17871 (Some(self.parse_query()?), vec![])
17872 };
17873
17874 (
17875 columns,
17876 partitioned,
17877 after_columns,
17878 output,
17879 source,
17880 assignments,
17881 )
17882 };
17883
17884 let (format_clause, settings) = if self.dialect.supports_insert_format() {
17885 let settings = self.parse_settings()?;
17888
17889 let format = if self.parse_keyword(Keyword::FORMAT) {
17890 Some(self.parse_input_format_clause()?)
17891 } else {
17892 None
17893 };
17894
17895 (format, settings)
17896 } else {
17897 Default::default()
17898 };
17899
17900 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
17901 && self.parse_keyword(Keyword::AS)
17902 {
17903 let row_alias = self.parse_object_name(false)?;
17904 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
17905 Some(InsertAliases {
17906 row_alias,
17907 col_aliases,
17908 })
17909 } else {
17910 None
17911 };
17912
17913 let on = if self.parse_keyword(Keyword::ON) {
17914 if self.parse_keyword(Keyword::CONFLICT) {
17915 let conflict_target =
17916 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
17917 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
17918 } else if self.peek_token_ref().token == Token::LParen {
17919 Some(ConflictTarget::Columns(
17920 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
17921 ))
17922 } else {
17923 None
17924 };
17925
17926 self.expect_keyword_is(Keyword::DO)?;
17927 let action = if self.parse_keyword(Keyword::NOTHING) {
17928 OnConflictAction::DoNothing
17929 } else {
17930 self.expect_keyword_is(Keyword::UPDATE)?;
17931 self.expect_keyword_is(Keyword::SET)?;
17932 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
17933 let selection = if self.parse_keyword(Keyword::WHERE) {
17934 Some(self.parse_expr()?)
17935 } else {
17936 None
17937 };
17938 OnConflictAction::DoUpdate(DoUpdate {
17939 assignments,
17940 selection,
17941 })
17942 };
17943
17944 Some(OnInsert::OnConflict(OnConflict {
17945 conflict_target,
17946 action,
17947 }))
17948 } else {
17949 self.expect_keyword_is(Keyword::DUPLICATE)?;
17950 self.expect_keyword_is(Keyword::KEY)?;
17951 self.expect_keyword_is(Keyword::UPDATE)?;
17952 let l = self.parse_comma_separated(Parser::parse_assignment)?;
17953
17954 Some(OnInsert::DuplicateKeyUpdate(l))
17955 }
17956 } else {
17957 None
17958 };
17959
17960 let returning = if self.parse_keyword(Keyword::RETURNING) {
17961 Some(self.parse_comma_separated(Parser::parse_select_item)?)
17962 } else {
17963 None
17964 };
17965
17966 Ok(Insert {
17967 insert_token: insert_token.into(),
17968 optimizer_hints,
17969 or,
17970 table: table_object,
17971 table_alias,
17972 ignore,
17973 into,
17974 overwrite,
17975 partitioned,
17976 columns,
17977 after_columns,
17978 source,
17979 assignments,
17980 has_table_keyword: table,
17981 on,
17982 returning,
17983 output,
17984 replace_into,
17985 priority,
17986 insert_alias,
17987 settings,
17988 format_clause,
17989 multi_table_insert_type: None,
17990 multi_table_into_clauses: vec![],
17991 multi_table_when_clauses: vec![],
17992 multi_table_else_clause: None,
17993 }
17994 .into())
17995 }
17996 }
17997
17998 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18002 let ident = self.parse_identifier()?;
18003 let values = self
18004 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18005 .unwrap_or_default();
18006
18007 Ok(InputFormatClause { ident, values })
18008 }
18009
18010 fn peek_subquery_start(&mut self) -> bool {
18013 matches!(
18014 self.peek_tokens_ref(),
18015 [
18016 TokenWithSpan {
18017 token: Token::LParen,
18018 ..
18019 },
18020 TokenWithSpan {
18021 token: Token::Word(Word {
18022 keyword: Keyword::SELECT,
18023 ..
18024 }),
18025 ..
18026 },
18027 ]
18028 )
18029 }
18030
18031 fn peek_subquery_or_cte_start(&mut self) -> bool {
18035 matches!(
18036 self.peek_tokens_ref(),
18037 [
18038 TokenWithSpan {
18039 token: Token::LParen,
18040 ..
18041 },
18042 TokenWithSpan {
18043 token: Token::Word(Word {
18044 keyword: Keyword::SELECT | Keyword::WITH,
18045 ..
18046 }),
18047 ..
18048 },
18049 ]
18050 )
18051 }
18052
18053 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18054 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18055 Some(SqliteOnConflict::Replace)
18056 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18057 Some(SqliteOnConflict::Rollback)
18058 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18059 Some(SqliteOnConflict::Abort)
18060 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18061 Some(SqliteOnConflict::Fail)
18062 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18063 Some(SqliteOnConflict::Ignore)
18064 } else if self.parse_keyword(Keyword::REPLACE) {
18065 Some(SqliteOnConflict::Replace)
18066 } else {
18067 None
18068 }
18069 }
18070
18071 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18073 if self.parse_keyword(Keyword::PARTITION) {
18074 self.expect_token(&Token::LParen)?;
18075 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18076 self.expect_token(&Token::RParen)?;
18077 Ok(partition_cols)
18078 } else {
18079 Ok(None)
18080 }
18081 }
18082
18083 pub fn parse_load_data_table_format(
18085 &mut self,
18086 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18087 if self.parse_keyword(Keyword::INPUTFORMAT) {
18088 let input_format = self.parse_expr()?;
18089 self.expect_keyword_is(Keyword::SERDE)?;
18090 let serde = self.parse_expr()?;
18091 Ok(Some(HiveLoadDataFormat {
18092 input_format,
18093 serde,
18094 }))
18095 } else {
18096 Ok(None)
18097 }
18098 }
18099
18100 fn parse_update_setexpr_boxed(
18104 &mut self,
18105 update_token: TokenWithSpan,
18106 ) -> Result<Box<SetExpr>, ParserError> {
18107 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18108 }
18109
18110 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18112 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18113 let or = self.parse_conflict_clause();
18114 let table = self.parse_table_and_joins()?;
18115 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18116 Some(UpdateTableFromKind::BeforeSet(
18117 self.parse_table_with_joins()?,
18118 ))
18119 } else {
18120 None
18121 };
18122 self.expect_keyword(Keyword::SET)?;
18123 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18124
18125 let output = self.maybe_parse_output_clause()?;
18126
18127 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18128 Some(UpdateTableFromKind::AfterSet(
18129 self.parse_table_with_joins()?,
18130 ))
18131 } else {
18132 from_before_set
18133 };
18134 let selection = if self.parse_keyword(Keyword::WHERE) {
18135 Some(self.parse_expr()?)
18136 } else {
18137 None
18138 };
18139 let returning = if self.parse_keyword(Keyword::RETURNING) {
18140 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18141 } else {
18142 None
18143 };
18144 let order_by = if self.dialect.supports_update_order_by()
18145 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18146 {
18147 self.parse_comma_separated(Parser::parse_order_by_expr)?
18148 } else {
18149 vec![]
18150 };
18151 let limit = if self.parse_keyword(Keyword::LIMIT) {
18152 Some(self.parse_expr()?)
18153 } else {
18154 None
18155 };
18156 Ok(Update {
18157 update_token: update_token.into(),
18158 optimizer_hints,
18159 table,
18160 assignments,
18161 from,
18162 selection,
18163 returning,
18164 output,
18165 or,
18166 order_by,
18167 limit,
18168 }
18169 .into())
18170 }
18171
18172 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18174 let target = self.parse_assignment_target()?;
18175 self.expect_token(&Token::Eq)?;
18176 let value = self.parse_expr()?;
18177 Ok(Assignment { target, value })
18178 }
18179
18180 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18182 if self.consume_token(&Token::LParen) {
18183 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18184 self.expect_token(&Token::RParen)?;
18185 Ok(AssignmentTarget::Tuple(columns))
18186 } else {
18187 let column = self.parse_object_name(false)?;
18188 Ok(AssignmentTarget::ColumnName(column))
18189 }
18190 }
18191
18192 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18194 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18195 self.maybe_parse(|p| {
18196 let name = p.parse_expr()?;
18197 let operator = p.parse_function_named_arg_operator()?;
18198 let arg = p.parse_wildcard_expr()?.into();
18199 Ok(FunctionArg::ExprNamed {
18200 name,
18201 arg,
18202 operator,
18203 })
18204 })?
18205 } else {
18206 self.maybe_parse(|p| {
18207 let name = p.parse_identifier()?;
18208 let operator = p.parse_function_named_arg_operator()?;
18209 let arg = p.parse_wildcard_expr()?.into();
18210 Ok(FunctionArg::Named {
18211 name,
18212 arg,
18213 operator,
18214 })
18215 })?
18216 };
18217 if let Some(arg) = arg {
18218 return Ok(arg);
18219 }
18220 let wildcard_expr = self.parse_wildcard_expr()?;
18221 let arg_expr: FunctionArgExpr = match wildcard_expr {
18222 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18223 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18226 if opts.opt_exclude.is_some()
18227 || opts.opt_except.is_some()
18228 || opts.opt_replace.is_some()
18229 || opts.opt_rename.is_some()
18230 || opts.opt_ilike.is_some()
18231 {
18232 FunctionArgExpr::WildcardWithOptions(opts)
18233 } else {
18234 wildcard_expr.into()
18235 }
18236 }
18237 other => other.into(),
18238 };
18239 Ok(FunctionArg::Unnamed(arg_expr))
18240 }
18241
18242 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18243 if self.parse_keyword(Keyword::VALUE) {
18244 return Ok(FunctionArgOperator::Value);
18245 }
18246 let tok = self.next_token();
18247 match tok.token {
18248 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18249 Ok(FunctionArgOperator::RightArrow)
18250 }
18251 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18252 Ok(FunctionArgOperator::Equals)
18253 }
18254 Token::Assignment
18255 if self
18256 .dialect
18257 .supports_named_fn_args_with_assignment_operator() =>
18258 {
18259 Ok(FunctionArgOperator::Assignment)
18260 }
18261 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18262 Ok(FunctionArgOperator::Colon)
18263 }
18264 _ => {
18265 self.prev_token();
18266 self.expected("argument operator", tok)
18267 }
18268 }
18269 }
18270
18271 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18273 if self.consume_token(&Token::RParen) {
18274 Ok(vec![])
18275 } else {
18276 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18277 self.expect_token(&Token::RParen)?;
18278 Ok(args)
18279 }
18280 }
18281
18282 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18283 if self.consume_token(&Token::RParen) {
18284 return Ok(TableFunctionArgs {
18285 args: vec![],
18286 settings: None,
18287 });
18288 }
18289 let mut args = vec![];
18290 let settings = loop {
18291 if let Some(settings) = self.parse_settings()? {
18292 break Some(settings);
18293 }
18294 args.push(self.parse_function_args()?);
18295 if self.is_parse_comma_separated_end() {
18296 break None;
18297 }
18298 };
18299 self.expect_token(&Token::RParen)?;
18300 Ok(TableFunctionArgs { args, settings })
18301 }
18302
18303 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
18312 let mut clauses = vec![];
18313
18314 if let Some(null_clause) = self.parse_json_null_clause() {
18317 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18318 }
18319
18320 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18321 clauses.push(FunctionArgumentClause::JsonReturningClause(
18322 json_returning_clause,
18323 ));
18324 }
18325
18326 if self.consume_token(&Token::RParen) {
18327 return Ok(FunctionArgumentList {
18328 duplicate_treatment: None,
18329 args: vec![],
18330 clauses,
18331 });
18332 }
18333
18334 let duplicate_treatment = self.parse_duplicate_treatment()?;
18335 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18336
18337 if self.dialect.supports_window_function_null_treatment_arg() {
18338 if let Some(null_treatment) = self.parse_null_treatment()? {
18339 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
18340 }
18341 }
18342
18343 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18344 clauses.push(FunctionArgumentClause::OrderBy(
18345 self.parse_comma_separated(Parser::parse_order_by_expr)?,
18346 ));
18347 }
18348
18349 if self.parse_keyword(Keyword::LIMIT) {
18350 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
18351 }
18352
18353 if dialect_of!(self is GenericDialect | BigQueryDialect)
18354 && self.parse_keyword(Keyword::HAVING)
18355 {
18356 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
18357 Keyword::MIN => HavingBoundKind::Min,
18358 Keyword::MAX => HavingBoundKind::Max,
18359 unexpected_keyword => return Err(ParserError::ParserError(
18360 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
18361 )),
18362 };
18363 clauses.push(FunctionArgumentClause::Having(HavingBound(
18364 kind,
18365 self.parse_expr()?,
18366 )))
18367 }
18368
18369 if dialect_of!(self is GenericDialect | MySqlDialect)
18370 && self.parse_keyword(Keyword::SEPARATOR)
18371 {
18372 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
18373 }
18374
18375 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
18376 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
18377 }
18378
18379 if let Some(null_clause) = self.parse_json_null_clause() {
18380 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18381 }
18382
18383 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18384 clauses.push(FunctionArgumentClause::JsonReturningClause(
18385 json_returning_clause,
18386 ));
18387 }
18388
18389 self.expect_token(&Token::RParen)?;
18390 Ok(FunctionArgumentList {
18391 duplicate_treatment,
18392 args,
18393 clauses,
18394 })
18395 }
18396
18397 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
18398 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
18399 Some(JsonNullClause::AbsentOnNull)
18400 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
18401 Some(JsonNullClause::NullOnNull)
18402 } else {
18403 None
18404 }
18405 }
18406
18407 fn maybe_parse_json_returning_clause(
18408 &mut self,
18409 ) -> Result<Option<JsonReturningClause>, ParserError> {
18410 if self.parse_keyword(Keyword::RETURNING) {
18411 let data_type = self.parse_data_type()?;
18412 Ok(Some(JsonReturningClause { data_type }))
18413 } else {
18414 Ok(None)
18415 }
18416 }
18417
18418 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
18419 let loc = self.peek_token_ref().span.start;
18420 match (
18421 self.parse_keyword(Keyword::ALL),
18422 self.parse_keyword(Keyword::DISTINCT),
18423 ) {
18424 (true, false) => Ok(Some(DuplicateTreatment::All)),
18425 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
18426 (false, false) => Ok(None),
18427 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
18428 }
18429 }
18430
18431 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
18433 let prefix = self
18434 .parse_one_of_keywords(
18435 self.dialect
18436 .get_reserved_keywords_for_select_item_operator(),
18437 )
18438 .map(|keyword| Ident::new(format!("{keyword:?}")));
18439
18440 match self.parse_wildcard_expr()? {
18441 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
18442 SelectItemQualifiedWildcardKind::ObjectName(prefix),
18443 self.parse_wildcard_additional_options(token.0)?,
18444 )),
18445 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
18446 self.parse_wildcard_additional_options(token.0)?,
18447 )),
18448 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
18449 parser_err!(
18450 format!("Expected an expression, found: {}", v),
18451 self.peek_token_ref().span.start
18452 )
18453 }
18454 Expr::BinaryOp {
18455 left,
18456 op: BinaryOperator::Eq,
18457 right,
18458 } if self.dialect.supports_eq_alias_assignment()
18459 && matches!(left.as_ref(), Expr::Identifier(_)) =>
18460 {
18461 let Expr::Identifier(alias) = *left else {
18462 return parser_err!(
18463 "BUG: expected identifier expression as alias",
18464 self.peek_token_ref().span.start
18465 );
18466 };
18467 Ok(SelectItem::ExprWithAlias {
18468 expr: *right,
18469 alias,
18470 })
18471 }
18472 expr if self.dialect.supports_select_expr_star()
18473 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
18474 {
18475 let wildcard_token = self.get_previous_token().clone();
18476 Ok(SelectItem::QualifiedWildcard(
18477 SelectItemQualifiedWildcardKind::Expr(expr),
18478 self.parse_wildcard_additional_options(wildcard_token)?,
18479 ))
18480 }
18481 expr if self.dialect.supports_select_item_multi_column_alias()
18482 && self.peek_keyword(Keyword::AS)
18483 && self.peek_nth_token(1).token == Token::LParen =>
18484 {
18485 self.expect_keyword(Keyword::AS)?;
18486 self.expect_token(&Token::LParen)?;
18487 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
18488 self.expect_token(&Token::RParen)?;
18489 Ok(SelectItem::ExprWithAliases {
18490 expr: maybe_prefixed_expr(expr, prefix),
18491 aliases,
18492 })
18493 }
18494 expr => self
18495 .maybe_parse_select_item_alias()
18496 .map(|alias| match alias {
18497 Some(alias) => SelectItem::ExprWithAlias {
18498 expr: maybe_prefixed_expr(expr, prefix),
18499 alias,
18500 },
18501 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
18502 }),
18503 }
18504 }
18505
18506 pub fn parse_wildcard_additional_options(
18510 &mut self,
18511 wildcard_token: TokenWithSpan,
18512 ) -> Result<WildcardAdditionalOptions, ParserError> {
18513 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
18514 self.parse_optional_select_item_ilike()?
18515 } else {
18516 None
18517 };
18518 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
18519 {
18520 self.parse_optional_select_item_exclude()?
18521 } else {
18522 None
18523 };
18524 let opt_except = if self.dialect.supports_select_wildcard_except() {
18525 self.parse_optional_select_item_except()?
18526 } else {
18527 None
18528 };
18529 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
18530 self.parse_optional_select_item_replace()?
18531 } else {
18532 None
18533 };
18534 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
18535 self.parse_optional_select_item_rename()?
18536 } else {
18537 None
18538 };
18539
18540 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
18541 self.maybe_parse_select_item_alias()?
18542 } else {
18543 None
18544 };
18545
18546 Ok(WildcardAdditionalOptions {
18547 wildcard_token: wildcard_token.into(),
18548 opt_ilike,
18549 opt_exclude,
18550 opt_except,
18551 opt_rename,
18552 opt_replace,
18553 opt_alias,
18554 })
18555 }
18556
18557 pub fn parse_optional_select_item_ilike(
18561 &mut self,
18562 ) -> Result<Option<IlikeSelectItem>, ParserError> {
18563 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
18564 let next_token = self.next_token();
18565 let pattern = match next_token.token {
18566 Token::SingleQuotedString(s) => s,
18567 _ => return self.expected("ilike pattern", next_token),
18568 };
18569 Some(IlikeSelectItem { pattern })
18570 } else {
18571 None
18572 };
18573 Ok(opt_ilike)
18574 }
18575
18576 pub fn parse_optional_select_item_exclude(
18580 &mut self,
18581 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
18582 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
18583 if self.consume_token(&Token::LParen) {
18584 let columns =
18585 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
18586 self.expect_token(&Token::RParen)?;
18587 Some(ExcludeSelectItem::Multiple(columns))
18588 } else {
18589 let column = self.parse_object_name(false)?;
18590 Some(ExcludeSelectItem::Single(column))
18591 }
18592 } else {
18593 None
18594 };
18595
18596 Ok(opt_exclude)
18597 }
18598
18599 pub fn parse_optional_select_item_except(
18603 &mut self,
18604 ) -> Result<Option<ExceptSelectItem>, ParserError> {
18605 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
18606 if self.peek_token_ref().token == Token::LParen {
18607 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
18608 match &idents[..] {
18609 [] => {
18610 return self.expected_ref(
18611 "at least one column should be parsed by the expect clause",
18612 self.peek_token_ref(),
18613 )?;
18614 }
18615 [first, idents @ ..] => Some(ExceptSelectItem {
18616 first_element: first.clone(),
18617 additional_elements: idents.to_vec(),
18618 }),
18619 }
18620 } else {
18621 let ident = self.parse_identifier()?;
18623 Some(ExceptSelectItem {
18624 first_element: ident,
18625 additional_elements: vec![],
18626 })
18627 }
18628 } else {
18629 None
18630 };
18631
18632 Ok(opt_except)
18633 }
18634
18635 pub fn parse_optional_select_item_rename(
18637 &mut self,
18638 ) -> Result<Option<RenameSelectItem>, ParserError> {
18639 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
18640 if self.consume_token(&Token::LParen) {
18641 let idents =
18642 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
18643 self.expect_token(&Token::RParen)?;
18644 Some(RenameSelectItem::Multiple(idents))
18645 } else {
18646 let ident = self.parse_identifier_with_alias()?;
18647 Some(RenameSelectItem::Single(ident))
18648 }
18649 } else {
18650 None
18651 };
18652
18653 Ok(opt_rename)
18654 }
18655
18656 pub fn parse_optional_select_item_replace(
18658 &mut self,
18659 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
18660 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
18661 if self.consume_token(&Token::LParen) {
18662 let items = self.parse_comma_separated(|parser| {
18663 Ok(Box::new(parser.parse_replace_elements()?))
18664 })?;
18665 self.expect_token(&Token::RParen)?;
18666 Some(ReplaceSelectItem { items })
18667 } else {
18668 let tok = self.next_token();
18669 return self.expected("( after REPLACE but", tok);
18670 }
18671 } else {
18672 None
18673 };
18674
18675 Ok(opt_replace)
18676 }
18677 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
18679 let expr = self.parse_expr()?;
18680 let as_keyword = self.parse_keyword(Keyword::AS);
18681 let ident = self.parse_identifier()?;
18682 Ok(ReplaceSelectElement {
18683 expr,
18684 column_name: ident,
18685 as_keyword,
18686 })
18687 }
18688
18689 pub fn parse_asc_desc(&mut self) -> Option<bool> {
18692 if self.parse_keyword(Keyword::ASC) {
18693 Some(true)
18694 } else if self.parse_keyword(Keyword::DESC) {
18695 Some(false)
18696 } else {
18697 None
18698 }
18699 }
18700
18701 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
18703 self.parse_order_by_expr_inner(false)
18704 .map(|(order_by, _)| order_by)
18705 }
18706
18707 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
18709 self.parse_order_by_expr_inner(true)
18710 .map(|(column, operator_class)| IndexColumn {
18711 column,
18712 operator_class,
18713 })
18714 }
18715
18716 fn parse_order_by_expr_inner(
18717 &mut self,
18718 with_operator_class: bool,
18719 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
18720 let expr = self.parse_expr()?;
18721
18722 let operator_class: Option<ObjectName> = if with_operator_class {
18723 if self
18726 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
18727 .is_some()
18728 {
18729 None
18730 } else {
18731 self.maybe_parse(|parser| parser.parse_object_name(false))?
18732 }
18733 } else {
18734 None
18735 };
18736
18737 let options = self.parse_order_by_options()?;
18738
18739 let with_fill = if self.dialect.supports_with_fill()
18740 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
18741 {
18742 Some(self.parse_with_fill()?)
18743 } else {
18744 None
18745 };
18746
18747 Ok((
18748 OrderByExpr {
18749 expr,
18750 options,
18751 with_fill,
18752 },
18753 operator_class,
18754 ))
18755 }
18756
18757 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
18758 let asc = self.parse_asc_desc();
18759
18760 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
18761 Some(true)
18762 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
18763 Some(false)
18764 } else {
18765 None
18766 };
18767
18768 Ok(OrderByOptions { asc, nulls_first })
18769 }
18770
18771 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
18775 let from = if self.parse_keyword(Keyword::FROM) {
18776 Some(self.parse_expr()?)
18777 } else {
18778 None
18779 };
18780
18781 let to = if self.parse_keyword(Keyword::TO) {
18782 Some(self.parse_expr()?)
18783 } else {
18784 None
18785 };
18786
18787 let step = if self.parse_keyword(Keyword::STEP) {
18788 Some(self.parse_expr()?)
18789 } else {
18790 None
18791 };
18792
18793 Ok(WithFill { from, to, step })
18794 }
18795
18796 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
18799 if !self.parse_keyword(Keyword::INTERPOLATE) {
18800 return Ok(None);
18801 }
18802
18803 if self.consume_token(&Token::LParen) {
18804 let interpolations =
18805 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
18806 self.expect_token(&Token::RParen)?;
18807 return Ok(Some(Interpolate {
18809 exprs: Some(interpolations),
18810 }));
18811 }
18812
18813 Ok(Some(Interpolate { exprs: None }))
18815 }
18816
18817 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
18819 let column = self.parse_identifier()?;
18820 let expr = if self.parse_keyword(Keyword::AS) {
18821 Some(self.parse_expr()?)
18822 } else {
18823 None
18824 };
18825 Ok(InterpolateExpr { column, expr })
18826 }
18827
18828 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
18831 let quantity = if self.consume_token(&Token::LParen) {
18832 let quantity = self.parse_expr()?;
18833 self.expect_token(&Token::RParen)?;
18834 Some(TopQuantity::Expr(quantity))
18835 } else {
18836 let next_token = self.next_token();
18837 let quantity = match next_token.token {
18838 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
18839 _ => self.expected("literal int", next_token)?,
18840 };
18841 Some(TopQuantity::Constant(quantity))
18842 };
18843
18844 let percent = self.parse_keyword(Keyword::PERCENT);
18845
18846 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
18847
18848 Ok(Top {
18849 with_ties,
18850 percent,
18851 quantity,
18852 })
18853 }
18854
18855 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18857 if self.parse_keyword(Keyword::ALL) {
18858 Ok(None)
18859 } else {
18860 Ok(Some(self.parse_expr()?))
18861 }
18862 }
18863
18864 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
18866 let value = self.parse_expr()?;
18867 let rows = if self.parse_keyword(Keyword::ROW) {
18868 OffsetRows::Row
18869 } else if self.parse_keyword(Keyword::ROWS) {
18870 OffsetRows::Rows
18871 } else {
18872 OffsetRows::None
18873 };
18874 Ok(Offset { value, rows })
18875 }
18876
18877 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
18879 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
18880
18881 let (quantity, percent) = if self
18882 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
18883 .is_some()
18884 {
18885 (None, false)
18886 } else {
18887 let quantity = Expr::Value(self.parse_value()?);
18888 let percent = self.parse_keyword(Keyword::PERCENT);
18889 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
18890 (Some(quantity), percent)
18891 };
18892
18893 let with_ties = if self.parse_keyword(Keyword::ONLY) {
18894 false
18895 } else {
18896 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
18897 };
18898
18899 Ok(Fetch {
18900 with_ties,
18901 percent,
18902 quantity,
18903 })
18904 }
18905
18906 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
18908 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
18909 Keyword::UPDATE => LockType::Update,
18910 Keyword::SHARE => LockType::Share,
18911 unexpected_keyword => return Err(ParserError::ParserError(
18912 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
18913 )),
18914 };
18915 let of = if self.parse_keyword(Keyword::OF) {
18916 Some(self.parse_object_name(false)?)
18917 } else {
18918 None
18919 };
18920 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
18921 Some(NonBlock::Nowait)
18922 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
18923 Some(NonBlock::SkipLocked)
18924 } else {
18925 None
18926 };
18927 Ok(LockClause {
18928 lock_type,
18929 of,
18930 nonblock,
18931 })
18932 }
18933
18934 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
18936 self.expect_keyword(Keyword::LOCK)?;
18937
18938 if self.peek_keyword(Keyword::TABLES) {
18939 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
18940 }
18941
18942 let _ = self.parse_keyword(Keyword::TABLE);
18943 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
18944 let lock_mode = if self.parse_keyword(Keyword::IN) {
18945 let lock_mode = self.parse_lock_table_mode()?;
18946 self.expect_keyword(Keyword::MODE)?;
18947 Some(lock_mode)
18948 } else {
18949 None
18950 };
18951 let nowait = self.parse_keyword(Keyword::NOWAIT);
18952
18953 Ok(Lock {
18954 tables,
18955 lock_mode,
18956 nowait,
18957 })
18958 }
18959
18960 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
18961 let only = self.parse_keyword(Keyword::ONLY);
18962 let name = self.parse_object_name(false)?;
18963 let has_asterisk = self.consume_token(&Token::Mul);
18964
18965 Ok(LockTableTarget {
18966 name,
18967 only,
18968 has_asterisk,
18969 })
18970 }
18971
18972 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
18973 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
18974 Ok(LockTableMode::AccessShare)
18975 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
18976 Ok(LockTableMode::AccessExclusive)
18977 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
18978 Ok(LockTableMode::RowShare)
18979 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
18980 Ok(LockTableMode::RowExclusive)
18981 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
18982 Ok(LockTableMode::ShareUpdateExclusive)
18983 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
18984 Ok(LockTableMode::ShareRowExclusive)
18985 } else if self.parse_keyword(Keyword::SHARE) {
18986 Ok(LockTableMode::Share)
18987 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
18988 Ok(LockTableMode::Exclusive)
18989 } else {
18990 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
18991 }
18992 }
18993
18994 pub fn parse_values(
18996 &mut self,
18997 allow_empty: bool,
18998 value_keyword: bool,
18999 ) -> Result<Values, ParserError> {
19000 let mut explicit_row = false;
19001
19002 let rows = self.parse_comma_separated(|parser| {
19003 if parser.parse_keyword(Keyword::ROW) {
19004 explicit_row = true;
19005 }
19006
19007 parser.expect_token(&Token::LParen)?;
19008 if allow_empty && parser.peek_token().token == Token::RParen {
19009 parser.next_token();
19010 Ok(vec![])
19011 } else {
19012 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19013 parser.expect_token(&Token::RParen)?;
19014 Ok(exprs)
19015 }
19016 })?;
19017 Ok(Values {
19018 explicit_row,
19019 rows,
19020 value_keyword,
19021 })
19022 }
19023
19024 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19026 self.expect_keyword_is(Keyword::TRANSACTION)?;
19027 Ok(Statement::StartTransaction {
19028 modes: self.parse_transaction_modes()?,
19029 begin: false,
19030 transaction: Some(BeginTransactionKind::Transaction),
19031 modifier: None,
19032 statements: vec![],
19033 exception: None,
19034 has_end_keyword: false,
19035 })
19036 }
19037
19038 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19040 if !self.dialect.supports_start_transaction_modifier() {
19041 None
19042 } else if self.parse_keyword(Keyword::DEFERRED) {
19043 Some(TransactionModifier::Deferred)
19044 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19045 Some(TransactionModifier::Immediate)
19046 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19047 Some(TransactionModifier::Exclusive)
19048 } else if self.parse_keyword(Keyword::TRY) {
19049 Some(TransactionModifier::Try)
19050 } else if self.parse_keyword(Keyword::CATCH) {
19051 Some(TransactionModifier::Catch)
19052 } else {
19053 None
19054 }
19055 }
19056
19057 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19059 let modifier = self.parse_transaction_modifier();
19060 let transaction =
19061 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19062 {
19063 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19064 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19065 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19066 _ => None,
19067 };
19068 Ok(Statement::StartTransaction {
19069 modes: self.parse_transaction_modes()?,
19070 begin: true,
19071 transaction,
19072 modifier,
19073 statements: vec![],
19074 exception: None,
19075 has_end_keyword: false,
19076 })
19077 }
19078
19079 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19081 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19082
19083 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19084 let mut when = Vec::new();
19085
19086 while !self.peek_keyword(Keyword::END) {
19088 self.expect_keyword(Keyword::WHEN)?;
19089
19090 let mut idents = Vec::new();
19094
19095 while !self.parse_keyword(Keyword::THEN) {
19096 let ident = self.parse_identifier()?;
19097 idents.push(ident);
19098
19099 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19100 }
19101
19102 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19103
19104 when.push(ExceptionWhen { idents, statements });
19105 }
19106
19107 Some(when)
19108 } else {
19109 None
19110 };
19111
19112 self.expect_keyword(Keyword::END)?;
19113
19114 Ok(Statement::StartTransaction {
19115 begin: true,
19116 statements,
19117 exception,
19118 has_end_keyword: true,
19119 transaction: None,
19120 modifier: None,
19121 modes: Default::default(),
19122 })
19123 }
19124
19125 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19127 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19128 None
19129 } else if self.parse_keyword(Keyword::TRY) {
19130 Some(TransactionModifier::Try)
19131 } else if self.parse_keyword(Keyword::CATCH) {
19132 Some(TransactionModifier::Catch)
19133 } else {
19134 None
19135 };
19136 Ok(Statement::Commit {
19137 chain: self.parse_commit_rollback_chain()?,
19138 end: true,
19139 modifier,
19140 })
19141 }
19142
19143 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19145 let mut modes = vec![];
19146 let mut required = false;
19147 loop {
19148 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19149 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19150 TransactionIsolationLevel::ReadUncommitted
19151 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19152 TransactionIsolationLevel::ReadCommitted
19153 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19154 TransactionIsolationLevel::RepeatableRead
19155 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19156 TransactionIsolationLevel::Serializable
19157 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19158 TransactionIsolationLevel::Snapshot
19159 } else {
19160 self.expected_ref("isolation level", self.peek_token_ref())?
19161 };
19162 TransactionMode::IsolationLevel(iso_level)
19163 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19164 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19165 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19166 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19167 } else if required {
19168 self.expected_ref("transaction mode", self.peek_token_ref())?
19169 } else {
19170 break;
19171 };
19172 modes.push(mode);
19173 required = self.consume_token(&Token::Comma);
19178 }
19179 Ok(modes)
19180 }
19181
19182 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19184 Ok(Statement::Commit {
19185 chain: self.parse_commit_rollback_chain()?,
19186 end: false,
19187 modifier: None,
19188 })
19189 }
19190
19191 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19193 let chain = self.parse_commit_rollback_chain()?;
19194 let savepoint = self.parse_rollback_savepoint()?;
19195
19196 Ok(Statement::Rollback { chain, savepoint })
19197 }
19198
19199 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19201 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19202 if self.parse_keyword(Keyword::AND) {
19203 let chain = !self.parse_keyword(Keyword::NO);
19204 self.expect_keyword_is(Keyword::CHAIN)?;
19205 Ok(chain)
19206 } else {
19207 Ok(false)
19208 }
19209 }
19210
19211 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19213 if self.parse_keyword(Keyword::TO) {
19214 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19215 let savepoint = self.parse_identifier()?;
19216
19217 Ok(Some(savepoint))
19218 } else {
19219 Ok(None)
19220 }
19221 }
19222
19223 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19225 self.expect_token(&Token::LParen)?;
19226 let message = Box::new(self.parse_expr()?);
19227 self.expect_token(&Token::Comma)?;
19228 let severity = Box::new(self.parse_expr()?);
19229 self.expect_token(&Token::Comma)?;
19230 let state = Box::new(self.parse_expr()?);
19231 let arguments = if self.consume_token(&Token::Comma) {
19232 self.parse_comma_separated(Parser::parse_expr)?
19233 } else {
19234 vec![]
19235 };
19236 self.expect_token(&Token::RParen)?;
19237 let options = if self.parse_keyword(Keyword::WITH) {
19238 self.parse_comma_separated(Parser::parse_raiserror_option)?
19239 } else {
19240 vec![]
19241 };
19242 Ok(Statement::RaisError {
19243 message,
19244 severity,
19245 state,
19246 arguments,
19247 options,
19248 })
19249 }
19250
19251 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19253 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19254 Keyword::LOG => Ok(RaisErrorOption::Log),
19255 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19256 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19257 _ => self.expected_ref(
19258 "LOG, NOWAIT OR SETERROR raiserror option",
19259 self.peek_token_ref(),
19260 ),
19261 }
19262 }
19263
19264 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19268 self.expect_keyword_is(Keyword::THROW)?;
19269
19270 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19271 let (message, state) = if error_number.is_some() {
19272 self.expect_token(&Token::Comma)?;
19273 let message = Box::new(self.parse_expr()?);
19274 self.expect_token(&Token::Comma)?;
19275 let state = Box::new(self.parse_expr()?);
19276 (Some(message), Some(state))
19277 } else {
19278 (None, None)
19279 };
19280
19281 Ok(ThrowStatement {
19282 error_number,
19283 message,
19284 state,
19285 })
19286 }
19287
19288 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
19290 let prepare = self.parse_keyword(Keyword::PREPARE);
19291 let name = self.parse_identifier()?;
19292 Ok(Statement::Deallocate { name, prepare })
19293 }
19294
19295 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
19297 let immediate =
19298 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
19299
19300 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
19306 None
19307 } else {
19308 Some(self.parse_object_name(false)?)
19309 };
19310
19311 let has_parentheses = self.consume_token(&Token::LParen);
19312
19313 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
19314 let end_token = match (has_parentheses, self.peek_token().token) {
19315 (true, _) => Token::RParen,
19316 (false, Token::EOF) => Token::EOF,
19317 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
19318 (false, _) => Token::SemiColon,
19319 };
19320
19321 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
19322
19323 if has_parentheses {
19324 self.expect_token(&Token::RParen)?;
19325 }
19326
19327 let into = if self.parse_keyword(Keyword::INTO) {
19328 self.parse_comma_separated(Self::parse_identifier)?
19329 } else {
19330 vec![]
19331 };
19332
19333 let using = if self.parse_keyword(Keyword::USING) {
19334 self.parse_comma_separated(Self::parse_expr_with_alias)?
19335 } else {
19336 vec![]
19337 };
19338
19339 let output = self.parse_keyword(Keyword::OUTPUT);
19340
19341 let default = self.parse_keyword(Keyword::DEFAULT);
19342
19343 Ok(Statement::Execute {
19344 immediate,
19345 name,
19346 parameters,
19347 has_parentheses,
19348 into,
19349 using,
19350 output,
19351 default,
19352 })
19353 }
19354
19355 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
19357 let name = self.parse_identifier()?;
19358
19359 let mut data_types = vec![];
19360 if self.consume_token(&Token::LParen) {
19361 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
19362 self.expect_token(&Token::RParen)?;
19363 }
19364
19365 self.expect_keyword_is(Keyword::AS)?;
19366 let statement = Box::new(self.parse_statement()?);
19367 Ok(Statement::Prepare {
19368 name,
19369 data_types,
19370 statement,
19371 })
19372 }
19373
19374 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
19376 self.expect_keyword(Keyword::UNLOAD)?;
19377 self.expect_token(&Token::LParen)?;
19378 let (query, query_text) =
19379 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
19380 (None, Some(self.parse_literal_string()?))
19381 } else {
19382 (Some(self.parse_query()?), None)
19383 };
19384 self.expect_token(&Token::RParen)?;
19385
19386 self.expect_keyword_is(Keyword::TO)?;
19387 let to = self.parse_identifier()?;
19388 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
19389 Some(self.parse_iam_role_kind()?)
19390 } else {
19391 None
19392 };
19393 let with = self.parse_options(Keyword::WITH)?;
19394 let mut options = vec![];
19395 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
19396 options.push(opt);
19397 }
19398 Ok(Statement::Unload {
19399 query,
19400 query_text,
19401 to,
19402 auth,
19403 with,
19404 options,
19405 })
19406 }
19407
19408 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
19409 let temporary = self
19410 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
19411 .is_some();
19412 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
19413 let table = self.parse_keyword(Keyword::TABLE);
19414 let name = self.parse_object_name(false)?;
19415
19416 Ok(SelectInto {
19417 temporary,
19418 unlogged,
19419 table,
19420 name,
19421 })
19422 }
19423
19424 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
19425 let v = self.parse_value()?;
19426 match &v.value {
19427 Value::SingleQuotedString(_) => Ok(v),
19428 Value::DoubleQuotedString(_) => Ok(v),
19429 Value::Number(_, _) => Ok(v),
19430 Value::Placeholder(_) => Ok(v),
19431 _ => {
19432 self.prev_token();
19433 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
19434 }
19435 }
19436 }
19437
19438 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
19440 let name = self.parse_object_name(false)?;
19441 if self.consume_token(&Token::LParen) {
19442 let value = self.parse_pragma_value()?;
19443 self.expect_token(&Token::RParen)?;
19444 Ok(Statement::Pragma {
19445 name,
19446 value: Some(value),
19447 is_eq: false,
19448 })
19449 } else if self.consume_token(&Token::Eq) {
19450 Ok(Statement::Pragma {
19451 name,
19452 value: Some(self.parse_pragma_value()?),
19453 is_eq: true,
19454 })
19455 } else {
19456 Ok(Statement::Pragma {
19457 name,
19458 value: None,
19459 is_eq: false,
19460 })
19461 }
19462 }
19463
19464 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
19466 let extension_name = self.parse_identifier()?;
19467
19468 Ok(Statement::Install { extension_name })
19469 }
19470
19471 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
19473 if self.dialect.supports_load_extension() {
19474 let extension_name = self.parse_identifier()?;
19475 Ok(Statement::Load { extension_name })
19476 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
19477 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
19478 self.expect_keyword_is(Keyword::INPATH)?;
19479 let inpath = self.parse_literal_string()?;
19480 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
19481 self.expect_keyword_is(Keyword::INTO)?;
19482 self.expect_keyword_is(Keyword::TABLE)?;
19483 let table_name = self.parse_object_name(false)?;
19484 let partitioned = self.parse_insert_partition()?;
19485 let table_format = self.parse_load_data_table_format()?;
19486 Ok(Statement::LoadData {
19487 local,
19488 inpath,
19489 overwrite,
19490 table_name,
19491 partitioned,
19492 table_format,
19493 })
19494 } else {
19495 self.expected_ref(
19496 "`DATA` or an extension name after `LOAD`",
19497 self.peek_token_ref(),
19498 )
19499 }
19500 }
19501
19502 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
19514 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
19515
19516 let name = self.parse_object_name(false)?;
19517
19518 let on_cluster = self.parse_optional_on_cluster()?;
19520
19521 let partition = if self.parse_keyword(Keyword::PARTITION) {
19522 if self.parse_keyword(Keyword::ID) {
19523 Some(Partition::Identifier(self.parse_identifier()?))
19524 } else {
19525 Some(Partition::Expr(self.parse_expr()?))
19526 }
19527 } else {
19528 None
19529 };
19530
19531 let include_final = self.parse_keyword(Keyword::FINAL);
19532
19533 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
19534 if self.parse_keyword(Keyword::BY) {
19535 Some(Deduplicate::ByExpression(self.parse_expr()?))
19536 } else {
19537 Some(Deduplicate::All)
19538 }
19539 } else {
19540 None
19541 };
19542
19543 let predicate = if self.parse_keyword(Keyword::WHERE) {
19545 Some(self.parse_expr()?)
19546 } else {
19547 None
19548 };
19549
19550 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
19551 self.expect_token(&Token::LParen)?;
19552 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
19553 self.expect_token(&Token::RParen)?;
19554 Some(columns)
19555 } else {
19556 None
19557 };
19558
19559 Ok(Statement::OptimizeTable {
19560 name,
19561 has_table_keyword,
19562 on_cluster,
19563 partition,
19564 include_final,
19565 deduplicate,
19566 predicate,
19567 zorder,
19568 })
19569 }
19570
19571 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
19577 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
19579 let name = self.parse_object_name(false)?;
19581 let mut data_type: Option<DataType> = None;
19583 if self.parse_keywords(&[Keyword::AS]) {
19584 data_type = Some(self.parse_data_type()?)
19585 }
19586 let sequence_options = self.parse_create_sequence_options()?;
19587 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
19589 if self.parse_keywords(&[Keyword::NONE]) {
19590 Some(ObjectName::from(vec![Ident::new("NONE")]))
19591 } else {
19592 Some(self.parse_object_name(false)?)
19593 }
19594 } else {
19595 None
19596 };
19597 Ok(Statement::CreateSequence {
19598 temporary,
19599 if_not_exists,
19600 name,
19601 data_type,
19602 sequence_options,
19603 owned_by,
19604 })
19605 }
19606
19607 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
19608 let mut sequence_options = vec![];
19609 if self.parse_keywords(&[Keyword::INCREMENT]) {
19611 if self.parse_keywords(&[Keyword::BY]) {
19612 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
19613 } else {
19614 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
19615 }
19616 }
19617 if self.parse_keyword(Keyword::MINVALUE) {
19619 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
19620 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
19621 sequence_options.push(SequenceOptions::MinValue(None));
19622 }
19623 if self.parse_keywords(&[Keyword::MAXVALUE]) {
19625 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
19626 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
19627 sequence_options.push(SequenceOptions::MaxValue(None));
19628 }
19629
19630 if self.parse_keywords(&[Keyword::START]) {
19632 if self.parse_keywords(&[Keyword::WITH]) {
19633 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
19634 } else {
19635 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
19636 }
19637 }
19638 if self.parse_keywords(&[Keyword::CACHE]) {
19640 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
19641 }
19642 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
19644 sequence_options.push(SequenceOptions::Cycle(true));
19645 } else if self.parse_keywords(&[Keyword::CYCLE]) {
19646 sequence_options.push(SequenceOptions::Cycle(false));
19647 }
19648
19649 Ok(sequence_options)
19650 }
19651
19652 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
19656 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
19657 let name = self.parse_object_name(false)?;
19658
19659 let server_type = if self.parse_keyword(Keyword::TYPE) {
19660 Some(self.parse_identifier()?)
19661 } else {
19662 None
19663 };
19664
19665 let version = if self.parse_keyword(Keyword::VERSION) {
19666 Some(self.parse_identifier()?)
19667 } else {
19668 None
19669 };
19670
19671 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
19672 let foreign_data_wrapper = self.parse_object_name(false)?;
19673
19674 let mut options = None;
19675 if self.parse_keyword(Keyword::OPTIONS) {
19676 self.expect_token(&Token::LParen)?;
19677 options = Some(self.parse_comma_separated(|p| {
19678 let key = p.parse_identifier()?;
19679 let value = p.parse_identifier()?;
19680 Ok(CreateServerOption { key, value })
19681 })?);
19682 self.expect_token(&Token::RParen)?;
19683 }
19684
19685 Ok(Statement::CreateServer(CreateServerStatement {
19686 name,
19687 if_not_exists: ine,
19688 server_type,
19689 version,
19690 foreign_data_wrapper,
19691 options,
19692 }))
19693 }
19694
19695 pub fn index(&self) -> usize {
19697 self.index
19698 }
19699
19700 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
19702 let ident = self.parse_identifier()?;
19703 self.expect_keyword_is(Keyword::AS)?;
19704
19705 let window_expr = if self.consume_token(&Token::LParen) {
19706 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
19707 } else if self.dialect.supports_window_clause_named_window_reference() {
19708 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
19709 } else {
19710 return self.expected_ref("(", self.peek_token_ref());
19711 };
19712
19713 Ok(NamedWindowDefinition(ident, window_expr))
19714 }
19715
19716 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
19718 let name = self.parse_object_name(false)?;
19719 let params = self.parse_optional_procedure_parameters()?;
19720
19721 let language = if self.parse_keyword(Keyword::LANGUAGE) {
19722 Some(self.parse_identifier()?)
19723 } else {
19724 None
19725 };
19726
19727 self.expect_keyword_is(Keyword::AS)?;
19728
19729 let body = self.parse_conditional_statements(&[Keyword::END])?;
19730
19731 Ok(Statement::CreateProcedure {
19732 name,
19733 or_alter,
19734 params,
19735 language,
19736 body,
19737 })
19738 }
19739
19740 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
19742 let window_name = match &self.peek_token_ref().token {
19743 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
19744 self.parse_optional_ident()?
19745 }
19746 _ => None,
19747 };
19748
19749 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
19750 self.parse_comma_separated(Parser::parse_expr)?
19751 } else {
19752 vec![]
19753 };
19754 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
19755 self.parse_comma_separated(Parser::parse_order_by_expr)?
19756 } else {
19757 vec![]
19758 };
19759
19760 let window_frame = if !self.consume_token(&Token::RParen) {
19761 let window_frame = self.parse_window_frame()?;
19762 self.expect_token(&Token::RParen)?;
19763 Some(window_frame)
19764 } else {
19765 None
19766 };
19767 Ok(WindowSpec {
19768 window_name,
19769 partition_by,
19770 order_by,
19771 window_frame,
19772 })
19773 }
19774
19775 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
19777 let name = self.parse_object_name(false)?;
19778
19779 let has_as = self.parse_keyword(Keyword::AS);
19781
19782 if !has_as {
19783 if self.consume_token(&Token::LParen) {
19785 let options = self.parse_create_type_sql_definition_options()?;
19787 self.expect_token(&Token::RParen)?;
19788 return Ok(Statement::CreateType {
19789 name,
19790 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
19791 });
19792 }
19793
19794 return Ok(Statement::CreateType {
19796 name,
19797 representation: None,
19798 });
19799 }
19800
19801 if self.parse_keyword(Keyword::ENUM) {
19803 self.parse_create_type_enum(name)
19805 } else if self.parse_keyword(Keyword::RANGE) {
19806 self.parse_create_type_range(name)
19808 } else if self.consume_token(&Token::LParen) {
19809 self.parse_create_type_composite(name)
19811 } else {
19812 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
19813 }
19814 }
19815
19816 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
19820 if self.consume_token(&Token::RParen) {
19821 return Ok(Statement::CreateType {
19823 name,
19824 representation: Some(UserDefinedTypeRepresentation::Composite {
19825 attributes: vec![],
19826 }),
19827 });
19828 }
19829
19830 let mut attributes = vec![];
19831 loop {
19832 let attr_name = self.parse_identifier()?;
19833 let attr_data_type = self.parse_data_type()?;
19834 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
19835 Some(self.parse_object_name(false)?)
19836 } else {
19837 None
19838 };
19839 attributes.push(UserDefinedTypeCompositeAttributeDef {
19840 name: attr_name,
19841 data_type: attr_data_type,
19842 collation: attr_collation,
19843 });
19844
19845 if !self.consume_token(&Token::Comma) {
19846 break;
19847 }
19848 }
19849 self.expect_token(&Token::RParen)?;
19850
19851 Ok(Statement::CreateType {
19852 name,
19853 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
19854 })
19855 }
19856
19857 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
19861 self.expect_token(&Token::LParen)?;
19862 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
19863 self.expect_token(&Token::RParen)?;
19864
19865 Ok(Statement::CreateType {
19866 name,
19867 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
19868 })
19869 }
19870
19871 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
19875 self.expect_token(&Token::LParen)?;
19876 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
19877 self.expect_token(&Token::RParen)?;
19878
19879 Ok(Statement::CreateType {
19880 name,
19881 representation: Some(UserDefinedTypeRepresentation::Range { options }),
19882 })
19883 }
19884
19885 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
19887 let keyword = self.parse_one_of_keywords(&[
19888 Keyword::SUBTYPE,
19889 Keyword::SUBTYPE_OPCLASS,
19890 Keyword::COLLATION,
19891 Keyword::CANONICAL,
19892 Keyword::SUBTYPE_DIFF,
19893 Keyword::MULTIRANGE_TYPE_NAME,
19894 ]);
19895
19896 match keyword {
19897 Some(Keyword::SUBTYPE) => {
19898 self.expect_token(&Token::Eq)?;
19899 let data_type = self.parse_data_type()?;
19900 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
19901 }
19902 Some(Keyword::SUBTYPE_OPCLASS) => {
19903 self.expect_token(&Token::Eq)?;
19904 let name = self.parse_object_name(false)?;
19905 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
19906 }
19907 Some(Keyword::COLLATION) => {
19908 self.expect_token(&Token::Eq)?;
19909 let name = self.parse_object_name(false)?;
19910 Ok(UserDefinedTypeRangeOption::Collation(name))
19911 }
19912 Some(Keyword::CANONICAL) => {
19913 self.expect_token(&Token::Eq)?;
19914 let name = self.parse_object_name(false)?;
19915 Ok(UserDefinedTypeRangeOption::Canonical(name))
19916 }
19917 Some(Keyword::SUBTYPE_DIFF) => {
19918 self.expect_token(&Token::Eq)?;
19919 let name = self.parse_object_name(false)?;
19920 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
19921 }
19922 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
19923 self.expect_token(&Token::Eq)?;
19924 let name = self.parse_object_name(false)?;
19925 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
19926 }
19927 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
19928 }
19929 }
19930
19931 fn parse_create_type_sql_definition_options(
19933 &mut self,
19934 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
19935 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
19936 }
19937
19938 fn parse_sql_definition_option(
19940 &mut self,
19941 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
19942 let keyword = self.parse_one_of_keywords(&[
19943 Keyword::INPUT,
19944 Keyword::OUTPUT,
19945 Keyword::RECEIVE,
19946 Keyword::SEND,
19947 Keyword::TYPMOD_IN,
19948 Keyword::TYPMOD_OUT,
19949 Keyword::ANALYZE,
19950 Keyword::SUBSCRIPT,
19951 Keyword::INTERNALLENGTH,
19952 Keyword::PASSEDBYVALUE,
19953 Keyword::ALIGNMENT,
19954 Keyword::STORAGE,
19955 Keyword::LIKE,
19956 Keyword::CATEGORY,
19957 Keyword::PREFERRED,
19958 Keyword::DEFAULT,
19959 Keyword::ELEMENT,
19960 Keyword::DELIMITER,
19961 Keyword::COLLATABLE,
19962 ]);
19963
19964 match keyword {
19965 Some(Keyword::INPUT) => {
19966 self.expect_token(&Token::Eq)?;
19967 let name = self.parse_object_name(false)?;
19968 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
19969 }
19970 Some(Keyword::OUTPUT) => {
19971 self.expect_token(&Token::Eq)?;
19972 let name = self.parse_object_name(false)?;
19973 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
19974 }
19975 Some(Keyword::RECEIVE) => {
19976 self.expect_token(&Token::Eq)?;
19977 let name = self.parse_object_name(false)?;
19978 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
19979 }
19980 Some(Keyword::SEND) => {
19981 self.expect_token(&Token::Eq)?;
19982 let name = self.parse_object_name(false)?;
19983 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
19984 }
19985 Some(Keyword::TYPMOD_IN) => {
19986 self.expect_token(&Token::Eq)?;
19987 let name = self.parse_object_name(false)?;
19988 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
19989 }
19990 Some(Keyword::TYPMOD_OUT) => {
19991 self.expect_token(&Token::Eq)?;
19992 let name = self.parse_object_name(false)?;
19993 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
19994 }
19995 Some(Keyword::ANALYZE) => {
19996 self.expect_token(&Token::Eq)?;
19997 let name = self.parse_object_name(false)?;
19998 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
19999 }
20000 Some(Keyword::SUBSCRIPT) => {
20001 self.expect_token(&Token::Eq)?;
20002 let name = self.parse_object_name(false)?;
20003 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
20004 }
20005 Some(Keyword::INTERNALLENGTH) => {
20006 self.expect_token(&Token::Eq)?;
20007 if self.parse_keyword(Keyword::VARIABLE) {
20008 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20009 UserDefinedTypeInternalLength::Variable,
20010 ))
20011 } else {
20012 let value = self.parse_literal_uint()?;
20013 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20014 UserDefinedTypeInternalLength::Fixed(value),
20015 ))
20016 }
20017 }
20018 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
20019 Some(Keyword::ALIGNMENT) => {
20020 self.expect_token(&Token::Eq)?;
20021 let align_keyword = self.parse_one_of_keywords(&[
20022 Keyword::CHAR,
20023 Keyword::INT2,
20024 Keyword::INT4,
20025 Keyword::DOUBLE,
20026 ]);
20027 match align_keyword {
20028 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20029 Alignment::Char,
20030 )),
20031 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20032 Alignment::Int2,
20033 )),
20034 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20035 Alignment::Int4,
20036 )),
20037 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20038 Alignment::Double,
20039 )),
20040 _ => self.expected_ref(
20041 "alignment value (char, int2, int4, or double)",
20042 self.peek_token_ref(),
20043 ),
20044 }
20045 }
20046 Some(Keyword::STORAGE) => {
20047 self.expect_token(&Token::Eq)?;
20048 let storage_keyword = self.parse_one_of_keywords(&[
20049 Keyword::PLAIN,
20050 Keyword::EXTERNAL,
20051 Keyword::EXTENDED,
20052 Keyword::MAIN,
20053 ]);
20054 match storage_keyword {
20055 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20056 UserDefinedTypeStorage::Plain,
20057 )),
20058 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20059 UserDefinedTypeStorage::External,
20060 )),
20061 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20062 UserDefinedTypeStorage::Extended,
20063 )),
20064 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20065 UserDefinedTypeStorage::Main,
20066 )),
20067 _ => self.expected_ref(
20068 "storage value (plain, external, extended, or main)",
20069 self.peek_token_ref(),
20070 ),
20071 }
20072 }
20073 Some(Keyword::LIKE) => {
20074 self.expect_token(&Token::Eq)?;
20075 let name = self.parse_object_name(false)?;
20076 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
20077 }
20078 Some(Keyword::CATEGORY) => {
20079 self.expect_token(&Token::Eq)?;
20080 let category_str = self.parse_literal_string()?;
20081 let category_char = category_str.chars().next().ok_or_else(|| {
20082 ParserError::ParserError(
20083 "CATEGORY value must be a single character".to_string(),
20084 )
20085 })?;
20086 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
20087 }
20088 Some(Keyword::PREFERRED) => {
20089 self.expect_token(&Token::Eq)?;
20090 let value =
20091 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20092 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
20093 }
20094 Some(Keyword::DEFAULT) => {
20095 self.expect_token(&Token::Eq)?;
20096 let expr = self.parse_expr()?;
20097 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
20098 }
20099 Some(Keyword::ELEMENT) => {
20100 self.expect_token(&Token::Eq)?;
20101 let data_type = self.parse_data_type()?;
20102 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
20103 }
20104 Some(Keyword::DELIMITER) => {
20105 self.expect_token(&Token::Eq)?;
20106 let delimiter = self.parse_literal_string()?;
20107 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
20108 }
20109 Some(Keyword::COLLATABLE) => {
20110 self.expect_token(&Token::Eq)?;
20111 let value =
20112 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20113 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
20114 }
20115 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
20116 }
20117 }
20118
20119 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
20120 self.expect_token(&Token::LParen)?;
20121 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
20122 self.expect_token(&Token::RParen)?;
20123 Ok(idents)
20124 }
20125
20126 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
20127 if dialect_of!(self is MySqlDialect | GenericDialect) {
20128 if self.parse_keyword(Keyword::FIRST) {
20129 Ok(Some(MySQLColumnPosition::First))
20130 } else if self.parse_keyword(Keyword::AFTER) {
20131 let ident = self.parse_identifier()?;
20132 Ok(Some(MySQLColumnPosition::After(ident)))
20133 } else {
20134 Ok(None)
20135 }
20136 } else {
20137 Ok(None)
20138 }
20139 }
20140
20141 fn parse_print(&mut self) -> Result<Statement, ParserError> {
20143 Ok(Statement::Print(PrintStatement {
20144 message: Box::new(self.parse_expr()?),
20145 }))
20146 }
20147
20148 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
20152 let wait_type = if self.parse_keyword(Keyword::DELAY) {
20153 WaitForType::Delay
20154 } else if self.parse_keyword(Keyword::TIME) {
20155 WaitForType::Time
20156 } else {
20157 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
20158 };
20159 let expr = self.parse_expr()?;
20160 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
20161 }
20162
20163 fn parse_return(&mut self) -> Result<Statement, ParserError> {
20165 match self.maybe_parse(|p| p.parse_expr())? {
20166 Some(expr) => Ok(Statement::Return(ReturnStatement {
20167 value: Some(ReturnStatementValue::Expr(expr)),
20168 })),
20169 None => Ok(Statement::Return(ReturnStatement { value: None })),
20170 }
20171 }
20172
20173 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
20177 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
20178
20179 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
20180 Some(self.parse_object_name(false)?)
20181 } else {
20182 None
20183 };
20184 self.expect_keyword(Keyword::OPTIONS)?;
20185 self.expect_token(&Token::LParen)?;
20186 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
20187 self.expect_token(&Token::RParen)?;
20188 self.expect_keyword(Keyword::AS)?;
20189 let query = self.parse_query()?;
20190 Ok(Statement::ExportData(ExportData {
20191 options,
20192 query,
20193 connection,
20194 }))
20195 }
20196
20197 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
20198 self.expect_keyword(Keyword::VACUUM)?;
20199 let full = self.parse_keyword(Keyword::FULL);
20200 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
20201 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
20202 let reindex = self.parse_keyword(Keyword::REINDEX);
20203 let recluster = self.parse_keyword(Keyword::RECLUSTER);
20204 let (table_name, threshold, boost) =
20205 match self.maybe_parse(|p| p.parse_object_name(false))? {
20206 Some(table_name) => {
20207 let threshold = if self.parse_keyword(Keyword::TO) {
20208 let value = self.parse_value()?;
20209 self.expect_keyword(Keyword::PERCENT)?;
20210 Some(value)
20211 } else {
20212 None
20213 };
20214 let boost = self.parse_keyword(Keyword::BOOST);
20215 (Some(table_name), threshold, boost)
20216 }
20217 _ => (None, None, false),
20218 };
20219 Ok(Statement::Vacuum(VacuumStatement {
20220 full,
20221 sort_only,
20222 delete_only,
20223 reindex,
20224 recluster,
20225 table_name,
20226 threshold,
20227 boost,
20228 }))
20229 }
20230
20231 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
20233 self.tokens
20234 }
20235
20236 fn peek_sub_query(&mut self) -> bool {
20238 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
20239 .is_some()
20240 }
20241
20242 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
20243 let show_in;
20244 let mut filter_position = None;
20245 if self.dialect.supports_show_like_before_in() {
20246 if let Some(filter) = self.parse_show_statement_filter()? {
20247 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
20248 }
20249 show_in = self.maybe_parse_show_stmt_in()?;
20250 } else {
20251 show_in = self.maybe_parse_show_stmt_in()?;
20252 if let Some(filter) = self.parse_show_statement_filter()? {
20253 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
20254 }
20255 }
20256 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
20257 let limit = self.maybe_parse_show_stmt_limit()?;
20258 let from = self.maybe_parse_show_stmt_from()?;
20259 Ok(ShowStatementOptions {
20260 filter_position,
20261 show_in,
20262 starts_with,
20263 limit,
20264 limit_from: from,
20265 })
20266 }
20267
20268 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
20269 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
20270 Some(Keyword::FROM) => ShowStatementInClause::FROM,
20271 Some(Keyword::IN) => ShowStatementInClause::IN,
20272 None => return Ok(None),
20273 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
20274 };
20275
20276 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
20277 Keyword::ACCOUNT,
20278 Keyword::DATABASE,
20279 Keyword::SCHEMA,
20280 Keyword::TABLE,
20281 Keyword::VIEW,
20282 ]) {
20283 Some(Keyword::DATABASE)
20285 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20286 | self.peek_keyword(Keyword::LIMIT) =>
20287 {
20288 (Some(ShowStatementInParentType::Database), None)
20289 }
20290 Some(Keyword::SCHEMA)
20291 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20292 | self.peek_keyword(Keyword::LIMIT) =>
20293 {
20294 (Some(ShowStatementInParentType::Schema), None)
20295 }
20296 Some(parent_kw) => {
20297 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
20301 match parent_kw {
20302 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
20303 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
20304 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
20305 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
20306 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
20307 _ => {
20308 return self.expected_ref(
20309 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
20310 self.peek_token_ref(),
20311 )
20312 }
20313 }
20314 }
20315 None => {
20316 let mut parent_name = self.parse_object_name(false)?;
20319 if self
20320 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
20321 .is_some()
20322 {
20323 parent_name
20324 .0
20325 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
20326 }
20327 (None, Some(parent_name))
20328 }
20329 };
20330
20331 Ok(Some(ShowStatementIn {
20332 clause,
20333 parent_type,
20334 parent_name,
20335 }))
20336 }
20337
20338 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20339 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
20340 Ok(Some(self.parse_value()?))
20341 } else {
20342 Ok(None)
20343 }
20344 }
20345
20346 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
20347 if self.parse_keyword(Keyword::LIMIT) {
20348 Ok(self.parse_limit()?)
20349 } else {
20350 Ok(None)
20351 }
20352 }
20353
20354 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20355 if self.parse_keyword(Keyword::FROM) {
20356 Ok(Some(self.parse_value()?))
20357 } else {
20358 Ok(None)
20359 }
20360 }
20361
20362 pub(crate) fn in_column_definition_state(&self) -> bool {
20363 matches!(self.state, ColumnDefinition)
20364 }
20365
20366 pub(crate) fn parse_key_value_options(
20371 &mut self,
20372 parenthesized: bool,
20373 end_words: &[Keyword],
20374 ) -> Result<KeyValueOptions, ParserError> {
20375 let mut options: Vec<KeyValueOption> = Vec::new();
20376 let mut delimiter = KeyValueOptionsDelimiter::Space;
20377 if parenthesized {
20378 self.expect_token(&Token::LParen)?;
20379 }
20380 loop {
20381 match self.next_token().token {
20382 Token::RParen => {
20383 if parenthesized {
20384 break;
20385 } else {
20386 return self.expected_ref(" another option or EOF", self.peek_token_ref());
20387 }
20388 }
20389 Token::EOF | Token::SemiColon => break,
20390 Token::Comma => {
20391 delimiter = KeyValueOptionsDelimiter::Comma;
20392 continue;
20393 }
20394 Token::Word(w) if !end_words.contains(&w.keyword) => {
20395 options.push(self.parse_key_value_option(&w)?)
20396 }
20397 Token::Word(w) if end_words.contains(&w.keyword) => {
20398 self.prev_token();
20399 break;
20400 }
20401 _ => {
20402 return self.expected_ref(
20403 "another option, EOF, SemiColon, Comma or ')'",
20404 self.peek_token_ref(),
20405 )
20406 }
20407 };
20408 }
20409
20410 Ok(KeyValueOptions { delimiter, options })
20411 }
20412
20413 pub(crate) fn parse_key_value_option(
20415 &mut self,
20416 key: &Word,
20417 ) -> Result<KeyValueOption, ParserError> {
20418 self.expect_token(&Token::Eq)?;
20419 let peeked_token = self.peek_token();
20420 match peeked_token.token {
20421 Token::SingleQuotedString(_) => Ok(KeyValueOption {
20422 option_name: key.value.clone(),
20423 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20424 }),
20425 Token::Word(word)
20426 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
20427 {
20428 Ok(KeyValueOption {
20429 option_name: key.value.clone(),
20430 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20431 })
20432 }
20433 Token::Number(..) => Ok(KeyValueOption {
20434 option_name: key.value.clone(),
20435 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20436 }),
20437 Token::Word(word) => {
20438 self.next_token();
20439 Ok(KeyValueOption {
20440 option_name: key.value.clone(),
20441 option_value: KeyValueOptionKind::Single(
20442 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
20443 ),
20444 })
20445 }
20446 Token::LParen => {
20447 match self.maybe_parse(|parser| {
20451 parser.expect_token(&Token::LParen)?;
20452 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
20453 parser.expect_token(&Token::RParen)?;
20454 values
20455 })? {
20456 Some(values) => Ok(KeyValueOption {
20457 option_name: key.value.clone(),
20458 option_value: KeyValueOptionKind::Multi(values),
20459 }),
20460 None => Ok(KeyValueOption {
20461 option_name: key.value.clone(),
20462 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
20463 self.parse_key_value_options(true, &[])?,
20464 )),
20465 }),
20466 }
20467 }
20468 _ => self.expected_ref("expected option value", self.peek_token_ref()),
20469 }
20470 }
20471
20472 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
20474 if self.parse_keyword(Keyword::ALL) {
20475 return Ok(ResetStatement { reset: Reset::ALL });
20476 }
20477
20478 let obj = self.parse_object_name(false)?;
20479 Ok(ResetStatement {
20480 reset: Reset::ConfigurationParameter(obj),
20481 })
20482 }
20483}
20484
20485fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
20486 if let Some(prefix) = prefix {
20487 Expr::Prefixed {
20488 prefix,
20489 value: Box::new(expr),
20490 }
20491 } else {
20492 expr
20493 }
20494}
20495
20496impl Word {
20497 pub fn to_ident(&self, span: Span) -> Ident {
20503 Ident {
20504 value: self.value.clone(),
20505 quote_style: self.quote_style,
20506 span,
20507 }
20508 }
20509
20510 pub fn into_ident(self, span: Span) -> Ident {
20515 Ident {
20516 value: self.value,
20517 quote_style: self.quote_style,
20518 span,
20519 }
20520 }
20521}
20522
20523#[cfg(test)]
20524mod tests {
20525 use crate::test_utils::{all_dialects, TestedDialects};
20526
20527 use super::*;
20528
20529 #[test]
20530 fn test_prev_index() {
20531 let sql = "SELECT version";
20532 all_dialects().run_parser_method(sql, |parser| {
20533 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
20534 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
20535 parser.prev_token();
20536 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
20537 assert_eq!(parser.next_token(), Token::make_word("version", None));
20538 parser.prev_token();
20539 assert_eq!(parser.peek_token(), Token::make_word("version", None));
20540 assert_eq!(parser.next_token(), Token::make_word("version", None));
20541 assert_eq!(parser.peek_token(), Token::EOF);
20542 parser.prev_token();
20543 assert_eq!(parser.next_token(), Token::make_word("version", None));
20544 assert_eq!(parser.next_token(), Token::EOF);
20545 assert_eq!(parser.next_token(), Token::EOF);
20546 parser.prev_token();
20547 });
20548 }
20549
20550 #[test]
20551 fn test_peek_tokens() {
20552 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
20553 assert!(matches!(
20554 parser.peek_tokens(),
20555 [Token::Word(Word {
20556 keyword: Keyword::SELECT,
20557 ..
20558 })]
20559 ));
20560
20561 assert!(matches!(
20562 parser.peek_tokens(),
20563 [
20564 Token::Word(Word {
20565 keyword: Keyword::SELECT,
20566 ..
20567 }),
20568 Token::Word(_),
20569 Token::Word(Word {
20570 keyword: Keyword::AS,
20571 ..
20572 }),
20573 ]
20574 ));
20575
20576 for _ in 0..4 {
20577 parser.next_token();
20578 }
20579
20580 assert!(matches!(
20581 parser.peek_tokens(),
20582 [
20583 Token::Word(Word {
20584 keyword: Keyword::FROM,
20585 ..
20586 }),
20587 Token::Word(_),
20588 Token::EOF,
20589 Token::EOF,
20590 ]
20591 ))
20592 })
20593 }
20594
20595 #[cfg(test)]
20596 mod test_parse_data_type {
20597 use crate::ast::{
20598 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
20599 };
20600 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
20601 use crate::test_utils::TestedDialects;
20602
20603 macro_rules! test_parse_data_type {
20604 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
20605 $dialect.run_parser_method(&*$input, |parser| {
20606 let data_type = parser.parse_data_type().unwrap();
20607 assert_eq!($expected_type, data_type);
20608 assert_eq!($input.to_string(), data_type.to_string());
20609 });
20610 }};
20611 }
20612
20613 #[test]
20614 fn test_ansii_character_string_types() {
20615 let dialect =
20617 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20618
20619 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
20620
20621 test_parse_data_type!(
20622 dialect,
20623 "CHARACTER(20)",
20624 DataType::Character(Some(CharacterLength::IntegerLength {
20625 length: 20,
20626 unit: None
20627 }))
20628 );
20629
20630 test_parse_data_type!(
20631 dialect,
20632 "CHARACTER(20 CHARACTERS)",
20633 DataType::Character(Some(CharacterLength::IntegerLength {
20634 length: 20,
20635 unit: Some(CharLengthUnits::Characters)
20636 }))
20637 );
20638
20639 test_parse_data_type!(
20640 dialect,
20641 "CHARACTER(20 OCTETS)",
20642 DataType::Character(Some(CharacterLength::IntegerLength {
20643 length: 20,
20644 unit: Some(CharLengthUnits::Octets)
20645 }))
20646 );
20647
20648 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
20649
20650 test_parse_data_type!(
20651 dialect,
20652 "CHAR(20)",
20653 DataType::Char(Some(CharacterLength::IntegerLength {
20654 length: 20,
20655 unit: None
20656 }))
20657 );
20658
20659 test_parse_data_type!(
20660 dialect,
20661 "CHAR(20 CHARACTERS)",
20662 DataType::Char(Some(CharacterLength::IntegerLength {
20663 length: 20,
20664 unit: Some(CharLengthUnits::Characters)
20665 }))
20666 );
20667
20668 test_parse_data_type!(
20669 dialect,
20670 "CHAR(20 OCTETS)",
20671 DataType::Char(Some(CharacterLength::IntegerLength {
20672 length: 20,
20673 unit: Some(CharLengthUnits::Octets)
20674 }))
20675 );
20676
20677 test_parse_data_type!(
20678 dialect,
20679 "CHARACTER VARYING(20)",
20680 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
20681 length: 20,
20682 unit: None
20683 }))
20684 );
20685
20686 test_parse_data_type!(
20687 dialect,
20688 "CHARACTER VARYING(20 CHARACTERS)",
20689 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
20690 length: 20,
20691 unit: Some(CharLengthUnits::Characters)
20692 }))
20693 );
20694
20695 test_parse_data_type!(
20696 dialect,
20697 "CHARACTER VARYING(20 OCTETS)",
20698 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
20699 length: 20,
20700 unit: Some(CharLengthUnits::Octets)
20701 }))
20702 );
20703
20704 test_parse_data_type!(
20705 dialect,
20706 "CHAR VARYING(20)",
20707 DataType::CharVarying(Some(CharacterLength::IntegerLength {
20708 length: 20,
20709 unit: None
20710 }))
20711 );
20712
20713 test_parse_data_type!(
20714 dialect,
20715 "CHAR VARYING(20 CHARACTERS)",
20716 DataType::CharVarying(Some(CharacterLength::IntegerLength {
20717 length: 20,
20718 unit: Some(CharLengthUnits::Characters)
20719 }))
20720 );
20721
20722 test_parse_data_type!(
20723 dialect,
20724 "CHAR VARYING(20 OCTETS)",
20725 DataType::CharVarying(Some(CharacterLength::IntegerLength {
20726 length: 20,
20727 unit: Some(CharLengthUnits::Octets)
20728 }))
20729 );
20730
20731 test_parse_data_type!(
20732 dialect,
20733 "VARCHAR(20)",
20734 DataType::Varchar(Some(CharacterLength::IntegerLength {
20735 length: 20,
20736 unit: None
20737 }))
20738 );
20739 }
20740
20741 #[test]
20742 fn test_ansii_character_large_object_types() {
20743 let dialect =
20745 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20746
20747 test_parse_data_type!(
20748 dialect,
20749 "CHARACTER LARGE OBJECT",
20750 DataType::CharacterLargeObject(None)
20751 );
20752 test_parse_data_type!(
20753 dialect,
20754 "CHARACTER LARGE OBJECT(20)",
20755 DataType::CharacterLargeObject(Some(20))
20756 );
20757
20758 test_parse_data_type!(
20759 dialect,
20760 "CHAR LARGE OBJECT",
20761 DataType::CharLargeObject(None)
20762 );
20763 test_parse_data_type!(
20764 dialect,
20765 "CHAR LARGE OBJECT(20)",
20766 DataType::CharLargeObject(Some(20))
20767 );
20768
20769 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
20770 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
20771 }
20772
20773 #[test]
20774 fn test_parse_custom_types() {
20775 let dialect =
20776 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20777
20778 test_parse_data_type!(
20779 dialect,
20780 "GEOMETRY",
20781 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
20782 );
20783
20784 test_parse_data_type!(
20785 dialect,
20786 "GEOMETRY(POINT)",
20787 DataType::Custom(
20788 ObjectName::from(vec!["GEOMETRY".into()]),
20789 vec!["POINT".to_string()]
20790 )
20791 );
20792
20793 test_parse_data_type!(
20794 dialect,
20795 "GEOMETRY(POINT, 4326)",
20796 DataType::Custom(
20797 ObjectName::from(vec!["GEOMETRY".into()]),
20798 vec!["POINT".to_string(), "4326".to_string()]
20799 )
20800 );
20801 }
20802
20803 #[test]
20804 fn test_ansii_exact_numeric_types() {
20805 let dialect = TestedDialects::new(vec![
20807 Box::new(GenericDialect {}),
20808 Box::new(AnsiDialect {}),
20809 Box::new(PostgreSqlDialect {}),
20810 ]);
20811
20812 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
20813
20814 test_parse_data_type!(
20815 dialect,
20816 "NUMERIC(2)",
20817 DataType::Numeric(ExactNumberInfo::Precision(2))
20818 );
20819
20820 test_parse_data_type!(
20821 dialect,
20822 "NUMERIC(2,10)",
20823 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
20824 );
20825
20826 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
20827
20828 test_parse_data_type!(
20829 dialect,
20830 "DECIMAL(2)",
20831 DataType::Decimal(ExactNumberInfo::Precision(2))
20832 );
20833
20834 test_parse_data_type!(
20835 dialect,
20836 "DECIMAL(2,10)",
20837 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
20838 );
20839
20840 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
20841
20842 test_parse_data_type!(
20843 dialect,
20844 "DEC(2)",
20845 DataType::Dec(ExactNumberInfo::Precision(2))
20846 );
20847
20848 test_parse_data_type!(
20849 dialect,
20850 "DEC(2,10)",
20851 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
20852 );
20853
20854 test_parse_data_type!(
20856 dialect,
20857 "NUMERIC(10,-2)",
20858 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
20859 );
20860
20861 test_parse_data_type!(
20862 dialect,
20863 "DECIMAL(1000,-10)",
20864 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
20865 );
20866
20867 test_parse_data_type!(
20868 dialect,
20869 "DEC(5,-1000)",
20870 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
20871 );
20872
20873 test_parse_data_type!(
20874 dialect,
20875 "NUMERIC(10,-5)",
20876 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
20877 );
20878
20879 test_parse_data_type!(
20880 dialect,
20881 "DECIMAL(20,-10)",
20882 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
20883 );
20884
20885 test_parse_data_type!(
20886 dialect,
20887 "DEC(5,-2)",
20888 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
20889 );
20890
20891 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
20892 let data_type = parser.parse_data_type().unwrap();
20893 assert_eq!(
20894 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
20895 data_type
20896 );
20897 assert_eq!("NUMERIC(10,5)", data_type.to_string());
20899 });
20900 }
20901
20902 #[test]
20903 fn test_ansii_date_type() {
20904 let dialect =
20906 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20907
20908 test_parse_data_type!(dialect, "DATE", DataType::Date);
20909
20910 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
20911
20912 test_parse_data_type!(
20913 dialect,
20914 "TIME(6)",
20915 DataType::Time(Some(6), TimezoneInfo::None)
20916 );
20917
20918 test_parse_data_type!(
20919 dialect,
20920 "TIME WITH TIME ZONE",
20921 DataType::Time(None, TimezoneInfo::WithTimeZone)
20922 );
20923
20924 test_parse_data_type!(
20925 dialect,
20926 "TIME(6) WITH TIME ZONE",
20927 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
20928 );
20929
20930 test_parse_data_type!(
20931 dialect,
20932 "TIME WITHOUT TIME ZONE",
20933 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
20934 );
20935
20936 test_parse_data_type!(
20937 dialect,
20938 "TIME(6) WITHOUT TIME ZONE",
20939 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
20940 );
20941
20942 test_parse_data_type!(
20943 dialect,
20944 "TIMESTAMP",
20945 DataType::Timestamp(None, TimezoneInfo::None)
20946 );
20947
20948 test_parse_data_type!(
20949 dialect,
20950 "TIMESTAMP(22)",
20951 DataType::Timestamp(Some(22), TimezoneInfo::None)
20952 );
20953
20954 test_parse_data_type!(
20955 dialect,
20956 "TIMESTAMP(22) WITH TIME ZONE",
20957 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
20958 );
20959
20960 test_parse_data_type!(
20961 dialect,
20962 "TIMESTAMP(33) WITHOUT TIME ZONE",
20963 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
20964 );
20965 }
20966 }
20967
20968 #[test]
20969 fn test_parse_schema_name() {
20970 macro_rules! test_parse_schema_name {
20972 ($input:expr, $expected_name:expr $(,)?) => {{
20973 all_dialects().run_parser_method(&*$input, |parser| {
20974 let schema_name = parser.parse_schema_name().unwrap();
20975 assert_eq!(schema_name, $expected_name);
20977 assert_eq!(schema_name.to_string(), $input.to_string());
20979 });
20980 }};
20981 }
20982
20983 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
20984 let dummy_authorization = Ident::new("dummy_authorization");
20985
20986 test_parse_schema_name!(
20987 format!("{dummy_name}"),
20988 SchemaName::Simple(dummy_name.clone())
20989 );
20990
20991 test_parse_schema_name!(
20992 format!("AUTHORIZATION {dummy_authorization}"),
20993 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
20994 );
20995 test_parse_schema_name!(
20996 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
20997 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
20998 );
20999 }
21000
21001 #[test]
21002 fn mysql_parse_index_table_constraint() {
21003 macro_rules! test_parse_table_constraint {
21004 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
21005 $dialect.run_parser_method(&*$input, |parser| {
21006 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
21007 assert_eq!(constraint, $expected);
21009 assert_eq!(constraint.to_string(), $input.to_string());
21011 });
21012 }};
21013 }
21014
21015 fn mk_expected_col(name: &str) -> IndexColumn {
21016 IndexColumn {
21017 column: OrderByExpr {
21018 expr: Expr::Identifier(name.into()),
21019 options: OrderByOptions {
21020 asc: None,
21021 nulls_first: None,
21022 },
21023 with_fill: None,
21024 },
21025 operator_class: None,
21026 }
21027 }
21028
21029 let dialect =
21030 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
21031
21032 test_parse_table_constraint!(
21033 dialect,
21034 "INDEX (c1)",
21035 IndexConstraint {
21036 display_as_key: false,
21037 name: None,
21038 index_type: None,
21039 columns: vec![mk_expected_col("c1")],
21040 index_options: vec![],
21041 }
21042 .into()
21043 );
21044
21045 test_parse_table_constraint!(
21046 dialect,
21047 "KEY (c1)",
21048 IndexConstraint {
21049 display_as_key: true,
21050 name: None,
21051 index_type: None,
21052 columns: vec![mk_expected_col("c1")],
21053 index_options: vec![],
21054 }
21055 .into()
21056 );
21057
21058 test_parse_table_constraint!(
21059 dialect,
21060 "INDEX 'index' (c1, c2)",
21061 TableConstraint::Index(IndexConstraint {
21062 display_as_key: false,
21063 name: Some(Ident::with_quote('\'', "index")),
21064 index_type: None,
21065 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
21066 index_options: vec![],
21067 })
21068 );
21069
21070 test_parse_table_constraint!(
21071 dialect,
21072 "INDEX USING BTREE (c1)",
21073 IndexConstraint {
21074 display_as_key: false,
21075 name: None,
21076 index_type: Some(IndexType::BTree),
21077 columns: vec![mk_expected_col("c1")],
21078 index_options: vec![],
21079 }
21080 .into()
21081 );
21082
21083 test_parse_table_constraint!(
21084 dialect,
21085 "INDEX USING HASH (c1)",
21086 IndexConstraint {
21087 display_as_key: false,
21088 name: None,
21089 index_type: Some(IndexType::Hash),
21090 columns: vec![mk_expected_col("c1")],
21091 index_options: vec![],
21092 }
21093 .into()
21094 );
21095
21096 test_parse_table_constraint!(
21097 dialect,
21098 "INDEX idx_name USING BTREE (c1)",
21099 IndexConstraint {
21100 display_as_key: false,
21101 name: Some(Ident::new("idx_name")),
21102 index_type: Some(IndexType::BTree),
21103 columns: vec![mk_expected_col("c1")],
21104 index_options: vec![],
21105 }
21106 .into()
21107 );
21108
21109 test_parse_table_constraint!(
21110 dialect,
21111 "INDEX idx_name USING HASH (c1)",
21112 IndexConstraint {
21113 display_as_key: false,
21114 name: Some(Ident::new("idx_name")),
21115 index_type: Some(IndexType::Hash),
21116 columns: vec![mk_expected_col("c1")],
21117 index_options: vec![],
21118 }
21119 .into()
21120 );
21121 }
21122
21123 #[test]
21124 fn test_tokenizer_error_loc() {
21125 let sql = "foo '";
21126 let ast = Parser::parse_sql(&GenericDialect, sql);
21127 assert_eq!(
21128 ast,
21129 Err(ParserError::TokenizerError(
21130 "Unterminated string literal at Line: 1, Column: 5".to_string()
21131 ))
21132 );
21133 }
21134
21135 #[test]
21136 fn test_parser_error_loc() {
21137 let sql = "SELECT this is a syntax error";
21138 let ast = Parser::parse_sql(&GenericDialect, sql);
21139 assert_eq!(
21140 ast,
21141 Err(ParserError::ParserError(
21142 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
21143 .to_string()
21144 ))
21145 );
21146 }
21147
21148 #[test]
21149 fn test_nested_explain_error() {
21150 let sql = "EXPLAIN EXPLAIN SELECT 1";
21151 let ast = Parser::parse_sql(&GenericDialect, sql);
21152 assert_eq!(
21153 ast,
21154 Err(ParserError::ParserError(
21155 "Explain must be root of the plan".to_string()
21156 ))
21157 );
21158 }
21159
21160 #[test]
21161 fn test_parse_multipart_identifier_positive() {
21162 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
21163
21164 let expected = vec![
21166 Ident {
21167 value: "CATALOG".to_string(),
21168 quote_style: None,
21169 span: Span::empty(),
21170 },
21171 Ident {
21172 value: "F(o)o. \"bar".to_string(),
21173 quote_style: Some('"'),
21174 span: Span::empty(),
21175 },
21176 Ident {
21177 value: "table".to_string(),
21178 quote_style: None,
21179 span: Span::empty(),
21180 },
21181 ];
21182 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
21183 let actual = parser.parse_multipart_identifier().unwrap();
21184 assert_eq!(expected, actual);
21185 });
21186
21187 let expected = vec![
21189 Ident {
21190 value: "CATALOG".to_string(),
21191 quote_style: None,
21192 span: Span::empty(),
21193 },
21194 Ident {
21195 value: "table".to_string(),
21196 quote_style: None,
21197 span: Span::empty(),
21198 },
21199 ];
21200 dialect.run_parser_method("CATALOG . table", |parser| {
21201 let actual = parser.parse_multipart_identifier().unwrap();
21202 assert_eq!(expected, actual);
21203 });
21204 }
21205
21206 #[test]
21207 fn test_parse_multipart_identifier_negative() {
21208 macro_rules! test_parse_multipart_identifier_error {
21209 ($input:expr, $expected_err:expr $(,)?) => {{
21210 all_dialects().run_parser_method(&*$input, |parser| {
21211 let actual_err = parser.parse_multipart_identifier().unwrap_err();
21212 assert_eq!(actual_err.to_string(), $expected_err);
21213 });
21214 }};
21215 }
21216
21217 test_parse_multipart_identifier_error!(
21218 "",
21219 "sql parser error: Empty input when parsing identifier",
21220 );
21221
21222 test_parse_multipart_identifier_error!(
21223 "*schema.table",
21224 "sql parser error: Unexpected token in identifier: *",
21225 );
21226
21227 test_parse_multipart_identifier_error!(
21228 "schema.table*",
21229 "sql parser error: Unexpected token in identifier: *",
21230 );
21231
21232 test_parse_multipart_identifier_error!(
21233 "schema.table.",
21234 "sql parser error: Trailing period in identifier",
21235 );
21236
21237 test_parse_multipart_identifier_error!(
21238 "schema.*",
21239 "sql parser error: Unexpected token following period in identifier: *",
21240 );
21241 }
21242
21243 #[test]
21244 fn test_mysql_partition_selection() {
21245 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
21246 let expected = vec!["p0", "p2"];
21247
21248 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
21249 assert_eq!(ast.len(), 1);
21250 if let Statement::Query(v) = &ast[0] {
21251 if let SetExpr::Select(select) = &*v.body {
21252 assert_eq!(select.from.len(), 1);
21253 let from: &TableWithJoins = &select.from[0];
21254 let table_factor = &from.relation;
21255 if let TableFactor::Table { partitions, .. } = table_factor {
21256 let actual: Vec<&str> = partitions
21257 .iter()
21258 .map(|ident| ident.value.as_str())
21259 .collect();
21260 assert_eq!(expected, actual);
21261 }
21262 }
21263 } else {
21264 panic!("fail to parse mysql partition selection");
21265 }
21266 }
21267
21268 #[test]
21269 fn test_replace_into_placeholders() {
21270 let sql = "REPLACE INTO t (a) VALUES (&a)";
21271
21272 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21273 }
21274
21275 #[test]
21276 fn test_replace_into_set_placeholder() {
21277 let sql = "REPLACE INTO t SET ?";
21278
21279 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21280 }
21281
21282 #[test]
21283 fn test_replace_incomplete() {
21284 let sql = r#"REPLACE"#;
21285
21286 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
21287 }
21288
21289 #[test]
21290 fn test_placeholder_invalid_whitespace() {
21291 for w in [" ", "/*invalid*/"] {
21292 let sql = format!("\nSELECT\n :{w}fooBar");
21293 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
21294 }
21295 }
21296}