1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word) => {
512 if expecting_statement_delimiter && word.keyword == Keyword::END {
513 break;
514 }
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 _ => self.expected("an SQL statement", next_token),
723 },
724 Token::LParen => {
725 self.prev_token();
726 self.parse_query().map(Into::into)
727 }
728 _ => self.expected("an SQL statement", next_token),
729 }
730 }
731
732 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
736 let case_token = self.expect_keyword(Keyword::CASE)?;
737
738 let match_expr = if self.peek_keyword(Keyword::WHEN) {
739 None
740 } else {
741 Some(self.parse_expr()?)
742 };
743
744 self.expect_keyword_is(Keyword::WHEN)?;
745 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
746 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
747 })?;
748
749 let else_block = if self.parse_keyword(Keyword::ELSE) {
750 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
751 } else {
752 None
753 };
754
755 let mut end_case_token = self.expect_keyword(Keyword::END)?;
756 if self.peek_keyword(Keyword::CASE) {
757 end_case_token = self.expect_keyword(Keyword::CASE)?;
758 }
759
760 Ok(CaseStatement {
761 case_token: AttachedToken(case_token),
762 match_expr,
763 when_blocks,
764 else_block,
765 end_case_token: AttachedToken(end_case_token),
766 })
767 }
768
769 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
773 self.expect_keyword_is(Keyword::IF)?;
774 let if_block = self.parse_conditional_statement_block(&[
775 Keyword::ELSE,
776 Keyword::ELSEIF,
777 Keyword::END,
778 ])?;
779
780 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
781 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
782 parser.parse_conditional_statement_block(&[
783 Keyword::ELSEIF,
784 Keyword::ELSE,
785 Keyword::END,
786 ])
787 })?
788 } else {
789 vec![]
790 };
791
792 let else_block = if self.parse_keyword(Keyword::ELSE) {
793 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
794 } else {
795 None
796 };
797
798 self.expect_keyword_is(Keyword::END)?;
799 let end_token = self.expect_keyword(Keyword::IF)?;
800
801 Ok(IfStatement {
802 if_block,
803 elseif_blocks,
804 else_block,
805 end_token: Some(AttachedToken(end_token)),
806 })
807 }
808
809 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
813 self.expect_keyword_is(Keyword::WHILE)?;
814 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
815
816 Ok(WhileStatement { while_block })
817 }
818
819 fn parse_conditional_statement_block(
827 &mut self,
828 terminal_keywords: &[Keyword],
829 ) -> Result<ConditionalStatementBlock, ParserError> {
830 let start_token = self.get_current_token().clone(); let mut then_token = None;
832
833 let condition = match &start_token.token {
834 Token::Word(w) if w.keyword == Keyword::ELSE => None,
835 Token::Word(w) if w.keyword == Keyword::WHILE => {
836 let expr = self.parse_expr()?;
837 Some(expr)
838 }
839 _ => {
840 let expr = self.parse_expr()?;
841 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
842 Some(expr)
843 }
844 };
845
846 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
847
848 Ok(ConditionalStatementBlock {
849 start_token: AttachedToken(start_token),
850 condition,
851 then_token,
852 conditional_statements,
853 })
854 }
855
856 pub(crate) fn parse_conditional_statements(
859 &mut self,
860 terminal_keywords: &[Keyword],
861 ) -> Result<ConditionalStatements, ParserError> {
862 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
863 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
864 let statements = self.parse_statement_list(terminal_keywords)?;
865 let end_token = self.expect_keyword(Keyword::END)?;
866
867 ConditionalStatements::BeginEnd(BeginEndStatements {
868 begin_token: AttachedToken(begin_token),
869 statements,
870 end_token: AttachedToken(end_token),
871 })
872 } else {
873 ConditionalStatements::Sequence {
874 statements: self.parse_statement_list(terminal_keywords)?,
875 }
876 };
877 Ok(conditional_statements)
878 }
879
880 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
884 self.expect_keyword_is(Keyword::RAISE)?;
885
886 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
887 self.expect_token(&Token::Eq)?;
888 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
889 } else {
890 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
891 };
892
893 Ok(RaiseStatement { value })
894 }
895 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
899 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
900
901 self.expect_keyword_is(Keyword::ON)?;
902 let token = self.next_token();
903
904 let (object_type, object_name) = match token.token {
905 Token::Word(w) if w.keyword == Keyword::COLLATION => {
906 (CommentObject::Collation, self.parse_object_name(false)?)
907 }
908 Token::Word(w) if w.keyword == Keyword::COLUMN => {
909 (CommentObject::Column, self.parse_object_name(false)?)
910 }
911 Token::Word(w) if w.keyword == Keyword::DATABASE => {
912 (CommentObject::Database, self.parse_object_name(false)?)
913 }
914 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
915 (CommentObject::Domain, self.parse_object_name(false)?)
916 }
917 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
918 (CommentObject::Extension, self.parse_object_name(false)?)
919 }
920 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
921 (CommentObject::Function, self.parse_object_name(false)?)
922 }
923 Token::Word(w) if w.keyword == Keyword::INDEX => {
924 (CommentObject::Index, self.parse_object_name(false)?)
925 }
926 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
927 self.expect_keyword_is(Keyword::VIEW)?;
928 (
929 CommentObject::MaterializedView,
930 self.parse_object_name(false)?,
931 )
932 }
933 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
934 (CommentObject::Procedure, self.parse_object_name(false)?)
935 }
936 Token::Word(w) if w.keyword == Keyword::ROLE => {
937 (CommentObject::Role, self.parse_object_name(false)?)
938 }
939 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
940 (CommentObject::Schema, self.parse_object_name(false)?)
941 }
942 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
943 (CommentObject::Sequence, self.parse_object_name(false)?)
944 }
945 Token::Word(w) if w.keyword == Keyword::TABLE => {
946 (CommentObject::Table, self.parse_object_name(false)?)
947 }
948 Token::Word(w) if w.keyword == Keyword::TYPE => {
949 (CommentObject::Type, self.parse_object_name(false)?)
950 }
951 Token::Word(w) if w.keyword == Keyword::USER => {
952 (CommentObject::User, self.parse_object_name(false)?)
953 }
954 Token::Word(w) if w.keyword == Keyword::VIEW => {
955 (CommentObject::View, self.parse_object_name(false)?)
956 }
957 _ => self.expected("comment object_type", token)?,
958 };
959
960 self.expect_keyword_is(Keyword::IS)?;
961 let comment = if self.parse_keyword(Keyword::NULL) {
962 None
963 } else {
964 Some(self.parse_literal_string()?)
965 };
966 Ok(Statement::Comment {
967 object_type,
968 object_name,
969 comment,
970 if_exists,
971 })
972 }
973
974 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
976 let mut channel = None;
977 let mut tables: Vec<ObjectName> = vec![];
978 let mut read_lock = false;
979 let mut export = false;
980
981 if !dialect_of!(self is MySqlDialect | GenericDialect) {
982 return parser_err!(
983 "Unsupported statement FLUSH",
984 self.peek_token_ref().span.start
985 );
986 }
987
988 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
989 Some(FlushLocation::NoWriteToBinlog)
990 } else if self.parse_keyword(Keyword::LOCAL) {
991 Some(FlushLocation::Local)
992 } else {
993 None
994 };
995
996 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
997 FlushType::BinaryLogs
998 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
999 FlushType::EngineLogs
1000 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1001 FlushType::ErrorLogs
1002 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1003 FlushType::GeneralLogs
1004 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1005 FlushType::Hosts
1006 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1007 FlushType::Privileges
1008 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1009 FlushType::OptimizerCosts
1010 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1011 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1012 channel = Some(self.parse_object_name(false).unwrap().to_string());
1013 }
1014 FlushType::RelayLogs
1015 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1016 FlushType::SlowLogs
1017 } else if self.parse_keyword(Keyword::STATUS) {
1018 FlushType::Status
1019 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1020 FlushType::UserResources
1021 } else if self.parse_keywords(&[Keyword::LOGS]) {
1022 FlushType::Logs
1023 } else if self.parse_keywords(&[Keyword::TABLES]) {
1024 loop {
1025 let next_token = self.next_token();
1026 match &next_token.token {
1027 Token::Word(w) => match w.keyword {
1028 Keyword::WITH => {
1029 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1030 }
1031 Keyword::FOR => {
1032 export = self.parse_keyword(Keyword::EXPORT);
1033 }
1034 Keyword::NoKeyword => {
1035 self.prev_token();
1036 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1037 }
1038 _ => {}
1039 },
1040 _ => {
1041 break;
1042 }
1043 }
1044 }
1045
1046 FlushType::Tables
1047 } else {
1048 return self.expected_ref(
1049 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1050 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1051 self.peek_token_ref(),
1052 );
1053 };
1054
1055 Ok(Statement::Flush {
1056 object_type,
1057 location,
1058 channel,
1059 read_lock,
1060 export,
1061 tables,
1062 })
1063 }
1064
1065 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1067 let repair = self.parse_keyword(Keyword::REPAIR);
1068 self.expect_keyword_is(Keyword::TABLE)?;
1069 let table_name = self.parse_object_name(false)?;
1070 let partition_action = self
1071 .maybe_parse(|parser| {
1072 let pa = match parser.parse_one_of_keywords(&[
1073 Keyword::ADD,
1074 Keyword::DROP,
1075 Keyword::SYNC,
1076 ]) {
1077 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1078 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1079 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1080 _ => None,
1081 };
1082 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1083 Ok(pa)
1084 })?
1085 .unwrap_or_default();
1086 Ok(Msck {
1087 repair,
1088 table_name,
1089 partition_action,
1090 })
1091 }
1092
1093 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1095 let table = self.parse_keyword(Keyword::TABLE);
1096 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1097
1098 let table_names = self.parse_comma_separated(|p| {
1099 let only = p.parse_keyword(Keyword::ONLY);
1100 let name = p.parse_object_name(false)?;
1101 let has_asterisk = p.consume_token(&Token::Mul);
1102 Ok(TruncateTableTarget {
1103 name,
1104 only,
1105 has_asterisk,
1106 })
1107 })?;
1108
1109 let mut partitions = None;
1110 if self.parse_keyword(Keyword::PARTITION) {
1111 self.expect_token(&Token::LParen)?;
1112 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1113 self.expect_token(&Token::RParen)?;
1114 }
1115
1116 let mut identity = None;
1117 let mut cascade = None;
1118
1119 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1120 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1121 Some(TruncateIdentityOption::Restart)
1122 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1123 Some(TruncateIdentityOption::Continue)
1124 } else {
1125 None
1126 };
1127
1128 cascade = self.parse_cascade_option();
1129 };
1130
1131 let on_cluster = self.parse_optional_on_cluster()?;
1132
1133 Ok(Truncate {
1134 table_names,
1135 partitions,
1136 table,
1137 if_exists,
1138 identity,
1139 cascade,
1140 on_cluster,
1141 })
1142 }
1143
1144 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1145 if self.parse_keyword(Keyword::CASCADE) {
1146 Some(CascadeOption::Cascade)
1147 } else if self.parse_keyword(Keyword::RESTRICT) {
1148 Some(CascadeOption::Restrict)
1149 } else {
1150 None
1151 }
1152 }
1153
1154 pub fn parse_attach_duckdb_database_options(
1156 &mut self,
1157 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1158 if !self.consume_token(&Token::LParen) {
1159 return Ok(vec![]);
1160 }
1161
1162 let mut options = vec![];
1163 loop {
1164 if self.parse_keyword(Keyword::READ_ONLY) {
1165 let boolean = if self.parse_keyword(Keyword::TRUE) {
1166 Some(true)
1167 } else if self.parse_keyword(Keyword::FALSE) {
1168 Some(false)
1169 } else {
1170 None
1171 };
1172 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1173 } else if self.parse_keyword(Keyword::TYPE) {
1174 let ident = self.parse_identifier()?;
1175 options.push(AttachDuckDBDatabaseOption::Type(ident));
1176 } else {
1177 return self
1178 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1179 };
1180
1181 if self.consume_token(&Token::RParen) {
1182 return Ok(options);
1183 } else if self.consume_token(&Token::Comma) {
1184 continue;
1185 } else {
1186 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1187 }
1188 }
1189 }
1190
1191 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1193 let database = self.parse_keyword(Keyword::DATABASE);
1194 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1195 let database_path = self.parse_identifier()?;
1196 let database_alias = if self.parse_keyword(Keyword::AS) {
1197 Some(self.parse_identifier()?)
1198 } else {
1199 None
1200 };
1201
1202 let attach_options = self.parse_attach_duckdb_database_options()?;
1203 Ok(Statement::AttachDuckDBDatabase {
1204 if_not_exists,
1205 database,
1206 database_path,
1207 database_alias,
1208 attach_options,
1209 })
1210 }
1211
1212 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1214 let database = self.parse_keyword(Keyword::DATABASE);
1215 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1216 let database_alias = self.parse_identifier()?;
1217 Ok(Statement::DetachDuckDBDatabase {
1218 if_exists,
1219 database,
1220 database_alias,
1221 })
1222 }
1223
1224 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1226 let database = self.parse_keyword(Keyword::DATABASE);
1227 let database_file_name = self.parse_expr()?;
1228 self.expect_keyword_is(Keyword::AS)?;
1229 let schema_name = self.parse_identifier()?;
1230 Ok(Statement::AttachDatabase {
1231 database,
1232 schema_name,
1233 database_file_name,
1234 })
1235 }
1236
1237 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1239 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1240 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1241 let mut for_columns = false;
1242 let mut cache_metadata = false;
1243 let mut noscan = false;
1244 let mut partitions = None;
1245 let mut compute_statistics = false;
1246 let mut columns = vec![];
1247
1248 if table_name.is_some() && self.consume_token(&Token::LParen) {
1250 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1251 self.expect_token(&Token::RParen)?;
1252 }
1253
1254 loop {
1255 match self.parse_one_of_keywords(&[
1256 Keyword::PARTITION,
1257 Keyword::FOR,
1258 Keyword::CACHE,
1259 Keyword::NOSCAN,
1260 Keyword::COMPUTE,
1261 ]) {
1262 Some(Keyword::PARTITION) => {
1263 self.expect_token(&Token::LParen)?;
1264 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1265 self.expect_token(&Token::RParen)?;
1266 }
1267 Some(Keyword::NOSCAN) => noscan = true,
1268 Some(Keyword::FOR) => {
1269 self.expect_keyword_is(Keyword::COLUMNS)?;
1270
1271 columns = self
1272 .maybe_parse(|parser| {
1273 parser.parse_comma_separated(|p| p.parse_identifier())
1274 })?
1275 .unwrap_or_default();
1276 for_columns = true
1277 }
1278 Some(Keyword::CACHE) => {
1279 self.expect_keyword_is(Keyword::METADATA)?;
1280 cache_metadata = true
1281 }
1282 Some(Keyword::COMPUTE) => {
1283 self.expect_keyword_is(Keyword::STATISTICS)?;
1284 compute_statistics = true
1285 }
1286 _ => break,
1287 }
1288 }
1289
1290 Ok(Analyze {
1291 has_table_keyword,
1292 table_name,
1293 for_columns,
1294 columns,
1295 partitions,
1296 cache_metadata,
1297 noscan,
1298 compute_statistics,
1299 })
1300 }
1301
1302 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1304 let index = self.index;
1305
1306 let next_token = self.next_token();
1307 match next_token.token {
1308 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1309 if self.peek_token_ref().token == Token::Period {
1310 let mut id_parts: Vec<Ident> = vec![match t {
1311 Token::Word(w) => w.into_ident(next_token.span),
1312 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1313 _ => {
1314 return Err(ParserError::ParserError(
1315 "Internal parser error: unexpected token type".to_string(),
1316 ))
1317 }
1318 }];
1319
1320 while self.consume_token(&Token::Period) {
1321 let next_token = self.next_token();
1322 match next_token.token {
1323 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1324 Token::SingleQuotedString(s) => {
1325 id_parts.push(Ident::with_quote('\'', s))
1327 }
1328 Token::Placeholder(s) => {
1329 id_parts.push(Ident::new(s))
1332 }
1333 Token::Mul => {
1334 return Ok(Expr::QualifiedWildcard(
1335 ObjectName::from(id_parts),
1336 AttachedToken(next_token),
1337 ));
1338 }
1339 _ => {
1340 return self
1341 .expected("an identifier or a '*' after '.'", next_token);
1342 }
1343 }
1344 }
1345 }
1346 }
1347 Token::Mul => {
1348 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1349 }
1350 Token::LParen => {
1352 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1353 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1354 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1357 }
1358 }
1359 _ => (),
1360 };
1361
1362 self.index = index;
1363 self.parse_expr()
1364 }
1365
1366 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1368 self.parse_subexpr(self.dialect.prec_unknown())
1369 }
1370
1371 pub fn parse_expr_with_alias_and_order_by(
1373 &mut self,
1374 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1375 let expr = self.parse_expr()?;
1376
1377 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1378 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1379 }
1380 let alias = self.parse_optional_alias_inner(None, validator)?;
1381 let order_by = OrderByOptions {
1382 asc: self.parse_asc_desc(),
1383 nulls_first: None,
1384 };
1385 Ok(ExprWithAliasAndOrderBy {
1386 expr: ExprWithAlias { expr, alias },
1387 order_by,
1388 })
1389 }
1390
1391 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1393 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1394 let _guard = self.recursion_counter.try_decrease()?;
1395 debug!("parsing expr");
1396 let mut expr = self.parse_prefix()?;
1397
1398 expr = self.parse_compound_expr(expr, vec![])?;
1399
1400 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1404 expr = Expr::Collate {
1405 expr: Box::new(expr),
1406 collation: self.parse_object_name(false)?,
1407 };
1408 }
1409
1410 debug!("prefix: {expr:?}");
1411 loop {
1412 let next_precedence = self.get_next_precedence()?;
1413 debug!("next precedence: {next_precedence:?}");
1414
1415 if precedence >= next_precedence {
1416 break;
1417 }
1418
1419 if Token::Period == self.peek_token_ref().token {
1422 break;
1423 }
1424
1425 expr = self.parse_infix(expr, next_precedence)?;
1426 }
1427 Ok(expr)
1428 }
1429
1430 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1432 let condition = self.parse_expr()?;
1433 let message = if self.parse_keyword(Keyword::AS) {
1434 Some(self.parse_expr()?)
1435 } else {
1436 None
1437 };
1438
1439 Ok(Statement::Assert { condition, message })
1440 }
1441
1442 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1444 let name = self.parse_identifier()?;
1445 Ok(Statement::Savepoint { name })
1446 }
1447
1448 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1450 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1451 let name = self.parse_identifier()?;
1452
1453 Ok(Statement::ReleaseSavepoint { name })
1454 }
1455
1456 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1458 let channel = self.parse_identifier()?;
1459 Ok(Statement::LISTEN { channel })
1460 }
1461
1462 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1464 let channel = if self.consume_token(&Token::Mul) {
1465 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1466 } else {
1467 match self.parse_identifier() {
1468 Ok(expr) => expr,
1469 _ => {
1470 self.prev_token();
1471 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1472 }
1473 }
1474 };
1475 Ok(Statement::UNLISTEN { channel })
1476 }
1477
1478 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1480 let channel = self.parse_identifier()?;
1481 let payload = if self.consume_token(&Token::Comma) {
1482 Some(self.parse_literal_string()?)
1483 } else {
1484 None
1485 };
1486 Ok(Statement::NOTIFY { channel, payload })
1487 }
1488
1489 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1491 if self.peek_keyword(Keyword::TABLE) {
1492 self.expect_keyword(Keyword::TABLE)?;
1493 let rename_tables = self.parse_comma_separated(|parser| {
1494 let old_name = parser.parse_object_name(false)?;
1495 parser.expect_keyword(Keyword::TO)?;
1496 let new_name = parser.parse_object_name(false)?;
1497
1498 Ok(RenameTable { old_name, new_name })
1499 })?;
1500 Ok(rename_tables.into())
1501 } else {
1502 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1503 }
1504 }
1505
1506 fn parse_expr_prefix_by_reserved_word(
1509 &mut self,
1510 w: &Word,
1511 w_span: Span,
1512 ) -> Result<Option<Expr>, ParserError> {
1513 match w.keyword {
1514 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1515 self.prev_token();
1516 Ok(Some(Expr::Value(self.parse_value()?)))
1517 }
1518 Keyword::NULL => {
1519 self.prev_token();
1520 Ok(Some(Expr::Value(self.parse_value()?)))
1521 }
1522 Keyword::CURRENT_CATALOG
1523 | Keyword::CURRENT_USER
1524 | Keyword::SESSION_USER
1525 | Keyword::USER
1526 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1527 {
1528 Ok(Some(Expr::Function(Function {
1529 name: ObjectName::from(vec![w.to_ident(w_span)]),
1530 uses_odbc_syntax: false,
1531 parameters: FunctionArguments::None,
1532 args: FunctionArguments::None,
1533 null_treatment: None,
1534 filter: None,
1535 over: None,
1536 within_group: vec![],
1537 })))
1538 }
1539 Keyword::CURRENT_TIMESTAMP
1540 | Keyword::CURRENT_TIME
1541 | Keyword::CURRENT_DATE
1542 | Keyword::LOCALTIME
1543 | Keyword::LOCALTIMESTAMP => {
1544 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1545 }
1546 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1547 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1548 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1549 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1550 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1551 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1552 Keyword::EXISTS
1553 if !dialect_of!(self is DatabricksDialect)
1555 || matches!(
1556 self.peek_nth_token_ref(1).token,
1557 Token::Word(Word {
1558 keyword: Keyword::SELECT | Keyword::WITH,
1559 ..
1560 })
1561 ) =>
1562 {
1563 Ok(Some(self.parse_exists_expr(false)?))
1564 }
1565 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1566 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1567 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1568 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1569 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1570 }
1571 Keyword::SUBSTR | Keyword::SUBSTRING => {
1572 self.prev_token();
1573 Ok(Some(self.parse_substring()?))
1574 }
1575 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1576 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1577 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1578 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1580 self.expect_token(&Token::LBracket)?;
1581 Ok(Some(self.parse_array_expr(true)?))
1582 }
1583 Keyword::ARRAY
1584 if self.peek_token_ref().token == Token::LParen
1585 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1586 {
1587 self.expect_token(&Token::LParen)?;
1588 let query = self.parse_query()?;
1589 self.expect_token(&Token::RParen)?;
1590 Ok(Some(Expr::Function(Function {
1591 name: ObjectName::from(vec![w.to_ident(w_span)]),
1592 uses_odbc_syntax: false,
1593 parameters: FunctionArguments::None,
1594 args: FunctionArguments::Subquery(query),
1595 filter: None,
1596 null_treatment: None,
1597 over: None,
1598 within_group: vec![],
1599 })))
1600 }
1601 Keyword::NOT => Ok(Some(self.parse_not()?)),
1602 Keyword::MATCH if self.dialect.supports_match_against() => {
1603 Ok(Some(self.parse_match_against()?))
1604 }
1605 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1606 let struct_expr = self.parse_struct_literal()?;
1607 Ok(Some(struct_expr))
1608 }
1609 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1610 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1611 Ok(Some(Expr::Prior(Box::new(expr))))
1612 }
1613 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1614 Ok(Some(self.parse_duckdb_map_literal()?))
1615 }
1616 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1617 Ok(Some(self.parse_lambda_expr()?))
1618 }
1619 _ if self.dialect.supports_geometric_types() => match w.keyword {
1620 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1621 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1622 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1623 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1624 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1625 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1626 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1627 _ => Ok(None),
1628 },
1629 _ => Ok(None),
1630 }
1631 }
1632
1633 fn parse_expr_prefix_by_unreserved_word(
1635 &mut self,
1636 w: &Word,
1637 w_span: Span,
1638 ) -> Result<Expr, ParserError> {
1639 let is_outer_join = self.peek_outer_join_operator();
1640 match &self.peek_token_ref().token {
1641 Token::LParen if !is_outer_join => {
1642 let id_parts = vec![w.to_ident(w_span)];
1643 self.parse_function(ObjectName::from(id_parts))
1644 }
1645 Token::SingleQuotedString(_)
1647 | Token::DoubleQuotedString(_)
1648 | Token::HexStringLiteral(_)
1649 if w.value.starts_with('_') =>
1650 {
1651 Ok(Expr::Prefixed {
1652 prefix: w.to_ident(w_span),
1653 value: self.parse_introduced_string_expr()?.into(),
1654 })
1655 }
1656 Token::SingleQuotedString(_)
1658 | Token::DoubleQuotedString(_)
1659 | Token::HexStringLiteral(_)
1660 if w.value.starts_with('_') =>
1661 {
1662 Ok(Expr::Prefixed {
1663 prefix: w.to_ident(w_span),
1664 value: self.parse_introduced_string_expr()?.into(),
1665 })
1666 }
1667 Token::Arrow if self.dialect.supports_lambda_functions() => {
1671 self.expect_token(&Token::Arrow)?;
1672 Ok(Expr::Lambda(LambdaFunction {
1673 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1674 name: w.to_ident(w_span),
1675 data_type: None,
1676 }),
1677 body: Box::new(self.parse_expr()?),
1678 syntax: LambdaSyntax::Arrow,
1679 }))
1680 }
1681 Token::Word(_)
1685 if self.dialect.supports_lambda_functions()
1686 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1687 {
1688 let data_type = self.parse_data_type()?;
1689 self.expect_token(&Token::Arrow)?;
1690 Ok(Expr::Lambda(LambdaFunction {
1691 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1692 name: w.to_ident(w_span),
1693 data_type: Some(data_type),
1694 }),
1695 body: Box::new(self.parse_expr()?),
1696 syntax: LambdaSyntax::Arrow,
1697 }))
1698 }
1699 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1700 }
1701 }
1702
1703 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1706 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1707 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1708 } else {
1709 false
1710 }
1711 }
1712
1713 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1715 if let Some(prefix) = self.dialect.parse_prefix(self) {
1717 return prefix;
1718 }
1719
1720 let loc = self.peek_token_ref().span.start;
1737 let opt_expr = self.maybe_parse(|parser| {
1738 match parser.parse_data_type()? {
1739 DataType::Interval { .. } => parser.parse_interval(),
1740 DataType::Custom(ref name, ref modifiers)
1751 if modifiers.is_empty()
1752 && Self::is_simple_unquoted_object_name(name, "xml")
1753 && parser.dialect.supports_xml_expressions() =>
1754 {
1755 Ok(Expr::TypedString(TypedString {
1756 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1757 value: parser.parse_value()?,
1758 uses_odbc_syntax: false,
1759 }))
1760 }
1761 DataType::Custom(..) => parser_err!("dummy", loc),
1762 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1764 Ok(Expr::Cast {
1765 kind: CastKind::Cast,
1766 expr: Box::new(parser.parse_expr()?),
1767 data_type: DataType::Binary(None),
1768 array: false,
1769 format: None,
1770 })
1771 }
1772 data_type => Ok(Expr::TypedString(TypedString {
1773 data_type,
1774 value: parser.parse_value()?,
1775 uses_odbc_syntax: false,
1776 })),
1777 }
1778 })?;
1779
1780 if let Some(expr) = opt_expr {
1781 return Ok(expr);
1782 }
1783
1784 let dialect = self.dialect;
1788
1789 self.advance_token();
1790 let next_token_index = self.get_current_index();
1791 let next_token = self.get_current_token();
1792 let span = next_token.span;
1793 let expr = match &next_token.token {
1794 Token::Word(w) => {
1795 let w = w.clone();
1804 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1805 Ok(Some(expr)) => Ok(expr),
1807
1808 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1810
1811 Err(e) => {
1818 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1819 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1820 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1821 }) {
1822 return Ok(expr);
1823 }
1824 }
1825 return Err(e);
1826 }
1827 }
1828 } Token::LBracket => self.parse_array_expr(false),
1831 tok @ Token::Minus | tok @ Token::Plus => {
1832 let op = if *tok == Token::Plus {
1833 UnaryOperator::Plus
1834 } else {
1835 UnaryOperator::Minus
1836 };
1837 Ok(Expr::UnaryOp {
1838 op,
1839 expr: Box::new(
1840 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1841 ),
1842 })
1843 }
1844 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1845 op: UnaryOperator::BangNot,
1846 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1847 }),
1848 tok @ Token::DoubleExclamationMark
1849 | tok @ Token::PGSquareRoot
1850 | tok @ Token::PGCubeRoot
1851 | tok @ Token::AtSign
1852 if dialect_is!(dialect is PostgreSqlDialect) =>
1853 {
1854 let op = match tok {
1855 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1856 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1857 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1858 Token::AtSign => UnaryOperator::PGAbs,
1859 _ => {
1860 return Err(ParserError::ParserError(
1861 "Internal parser error: unexpected unary operator token".to_string(),
1862 ))
1863 }
1864 };
1865 Ok(Expr::UnaryOp {
1866 op,
1867 expr: Box::new(
1868 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1869 ),
1870 })
1871 }
1872 Token::Tilde => Ok(Expr::UnaryOp {
1873 op: UnaryOperator::BitwiseNot,
1874 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1875 }),
1876 tok @ Token::Sharp
1877 | tok @ Token::AtDashAt
1878 | tok @ Token::AtAt
1879 | tok @ Token::QuestionMarkDash
1880 | tok @ Token::QuestionPipe
1881 if self.dialect.supports_geometric_types() =>
1882 {
1883 let op = match tok {
1884 Token::Sharp => UnaryOperator::Hash,
1885 Token::AtDashAt => UnaryOperator::AtDashAt,
1886 Token::AtAt => UnaryOperator::DoubleAt,
1887 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1888 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1889 _ => {
1890 return Err(ParserError::ParserError(format!(
1891 "Unexpected token in unary operator parsing: {tok:?}"
1892 )))
1893 }
1894 };
1895 Ok(Expr::UnaryOp {
1896 op,
1897 expr: Box::new(
1898 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1899 ),
1900 })
1901 }
1902 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1903 {
1904 self.prev_token();
1905 Ok(Expr::Value(self.parse_value()?))
1906 }
1907 Token::UnicodeStringLiteral(_) => {
1908 self.prev_token();
1909 Ok(Expr::Value(self.parse_value()?))
1910 }
1911 Token::Number(_, _)
1912 | Token::SingleQuotedString(_)
1913 | Token::DoubleQuotedString(_)
1914 | Token::TripleSingleQuotedString(_)
1915 | Token::TripleDoubleQuotedString(_)
1916 | Token::DollarQuotedString(_)
1917 | Token::SingleQuotedByteStringLiteral(_)
1918 | Token::DoubleQuotedByteStringLiteral(_)
1919 | Token::TripleSingleQuotedByteStringLiteral(_)
1920 | Token::TripleDoubleQuotedByteStringLiteral(_)
1921 | Token::SingleQuotedRawStringLiteral(_)
1922 | Token::DoubleQuotedRawStringLiteral(_)
1923 | Token::TripleSingleQuotedRawStringLiteral(_)
1924 | Token::TripleDoubleQuotedRawStringLiteral(_)
1925 | Token::NationalStringLiteral(_)
1926 | Token::QuoteDelimitedStringLiteral(_)
1927 | Token::NationalQuoteDelimitedStringLiteral(_)
1928 | Token::HexStringLiteral(_) => {
1929 self.prev_token();
1930 Ok(Expr::Value(self.parse_value()?))
1931 }
1932 Token::LParen => {
1933 let expr =
1934 if let Some(expr) = self.try_parse_expr_sub_query()? {
1935 expr
1936 } else if let Some(lambda) = self.try_parse_lambda()? {
1937 return Ok(lambda);
1938 } else {
1939 let exprs = self.with_state(ParserState::Normal, |p| {
1950 p.parse_comma_separated(Parser::parse_expr)
1951 })?;
1952 match exprs.len() {
1953 0 => return Err(ParserError::ParserError(
1954 "Internal parser error: parse_comma_separated returned empty list"
1955 .to_string(),
1956 )),
1957 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1958 _ => Expr::Tuple(exprs),
1959 }
1960 };
1961 self.expect_token(&Token::RParen)?;
1962 Ok(expr)
1963 }
1964 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1965 self.prev_token();
1966 Ok(Expr::Value(self.parse_value()?))
1967 }
1968 Token::LBrace => {
1969 self.prev_token();
1970 self.parse_lbrace_expr()
1971 }
1972 _ => self.expected_at("an expression", next_token_index),
1973 }?;
1974
1975 Ok(expr)
1976 }
1977
1978 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1979 Ok(Expr::TypedString(TypedString {
1980 data_type: DataType::GeometricType(kind),
1981 value: self.parse_value()?,
1982 uses_odbc_syntax: false,
1983 }))
1984 }
1985
1986 pub fn parse_compound_expr(
1993 &mut self,
1994 root: Expr,
1995 mut chain: Vec<AccessExpr>,
1996 ) -> Result<Expr, ParserError> {
1997 let mut ending_wildcard: Option<TokenWithSpan> = None;
1998 loop {
1999 if self.consume_token(&Token::Period) {
2000 let next_token = self.peek_token_ref();
2001 match &next_token.token {
2002 Token::Mul => {
2003 if dialect_of!(self is PostgreSqlDialect) {
2006 ending_wildcard = Some(self.next_token());
2007 } else {
2008 self.prev_token(); }
2015
2016 break;
2017 }
2018 Token::SingleQuotedString(s) => {
2019 let expr =
2020 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2021 chain.push(AccessExpr::Dot(expr));
2022 self.advance_token(); }
2024 Token::Placeholder(s) => {
2025 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2028 chain.push(AccessExpr::Dot(expr));
2029 self.advance_token(); }
2031 _ => {
2036 let expr = self.maybe_parse(|parser| {
2037 let expr = parser
2038 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2039 match &expr {
2040 Expr::CompoundFieldAccess { .. }
2041 | Expr::CompoundIdentifier(_)
2042 | Expr::Identifier(_)
2043 | Expr::Value(_)
2044 | Expr::Function(_) => Ok(expr),
2045 _ => parser.expected_ref(
2046 "an identifier or value",
2047 parser.peek_token_ref(),
2048 ),
2049 }
2050 })?;
2051
2052 match expr {
2053 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2062 chain.push(AccessExpr::Dot(*root));
2063 chain.extend(access_chain);
2064 }
2065 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2066 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2067 ),
2068 Some(expr) => {
2069 chain.push(AccessExpr::Dot(expr));
2070 }
2071 None => {
2075 chain.push(AccessExpr::Dot(Expr::Identifier(
2076 self.parse_identifier()?,
2077 )));
2078 }
2079 }
2080 }
2081 }
2082 } else if !self.dialect.supports_partiql()
2083 && self.peek_token_ref().token == Token::LBracket
2084 {
2085 self.parse_multi_dim_subscript(&mut chain)?;
2086 } else {
2087 break;
2088 }
2089 }
2090
2091 let tok_index = self.get_current_index();
2092 if let Some(wildcard_token) = ending_wildcard {
2093 if !Self::is_all_ident(&root, &chain) {
2094 return self
2095 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2096 };
2097 Ok(Expr::QualifiedWildcard(
2098 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2099 AttachedToken(wildcard_token),
2100 ))
2101 } else if self.maybe_parse_outer_join_operator() {
2102 if !Self::is_all_ident(&root, &chain) {
2103 return self.expected_at("column identifier before (+)", tok_index);
2104 };
2105 let expr = if chain.is_empty() {
2106 root
2107 } else {
2108 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2109 };
2110 Ok(Expr::OuterJoin(expr.into()))
2111 } else {
2112 Self::build_compound_expr(root, chain)
2113 }
2114 }
2115
2116 fn build_compound_expr(
2121 root: Expr,
2122 mut access_chain: Vec<AccessExpr>,
2123 ) -> Result<Expr, ParserError> {
2124 if access_chain.is_empty() {
2125 return Ok(root);
2126 }
2127
2128 if Self::is_all_ident(&root, &access_chain) {
2129 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2130 root,
2131 access_chain,
2132 )?));
2133 }
2134
2135 if matches!(root, Expr::Identifier(_))
2140 && matches!(
2141 access_chain.last(),
2142 Some(AccessExpr::Dot(Expr::Function(_)))
2143 )
2144 && access_chain
2145 .iter()
2146 .rev()
2147 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2149 {
2150 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2151 return parser_err!("expected function expression", root.span().start);
2152 };
2153
2154 let compound_func_name = [root]
2155 .into_iter()
2156 .chain(access_chain.into_iter().flat_map(|access| match access {
2157 AccessExpr::Dot(expr) => Some(expr),
2158 _ => None,
2159 }))
2160 .flat_map(|expr| match expr {
2161 Expr::Identifier(ident) => Some(ident),
2162 _ => None,
2163 })
2164 .map(ObjectNamePart::Identifier)
2165 .chain(func.name.0)
2166 .collect::<Vec<_>>();
2167 func.name = ObjectName(compound_func_name);
2168
2169 return Ok(Expr::Function(func));
2170 }
2171
2172 if access_chain.len() == 1
2177 && matches!(
2178 access_chain.last(),
2179 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2180 )
2181 {
2182 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2183 return parser_err!("expected (+) expression", root.span().start);
2184 };
2185
2186 if !Self::is_all_ident(&root, &[]) {
2187 return parser_err!("column identifier before (+)", root.span().start);
2188 };
2189
2190 let token_start = root.span().start;
2191 let mut idents = Self::exprs_to_idents(root, vec![])?;
2192 match *inner_expr {
2193 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2194 Expr::Identifier(suffix) => idents.push(suffix),
2195 _ => {
2196 return parser_err!("column identifier before (+)", token_start);
2197 }
2198 }
2199
2200 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2201 }
2202
2203 Ok(Expr::CompoundFieldAccess {
2204 root: Box::new(root),
2205 access_chain,
2206 })
2207 }
2208
2209 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2210 match k {
2211 Keyword::LOCAL => Some(ContextModifier::Local),
2212 Keyword::GLOBAL => Some(ContextModifier::Global),
2213 Keyword::SESSION => Some(ContextModifier::Session),
2214 _ => None,
2215 }
2216 }
2217
2218 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2220 if !matches!(root, Expr::Identifier(_)) {
2221 return false;
2222 }
2223 fields
2224 .iter()
2225 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2226 }
2227
2228 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2230 let mut idents = vec![];
2231 if let Expr::Identifier(root) = root {
2232 idents.push(root);
2233 for x in fields {
2234 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2235 idents.push(ident);
2236 } else {
2237 return parser_err!(
2238 format!("Expected identifier, found: {}", x),
2239 x.span().start
2240 );
2241 }
2242 }
2243 Ok(idents)
2244 } else {
2245 parser_err!(
2246 format!("Expected identifier, found: {}", root),
2247 root.span().start
2248 )
2249 }
2250 }
2251
2252 fn peek_outer_join_operator(&mut self) -> bool {
2254 if !self.dialect.supports_outer_join_operator() {
2255 return false;
2256 }
2257
2258 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2259 Token::LParen == maybe_lparen.token
2260 && Token::Plus == maybe_plus.token
2261 && Token::RParen == maybe_rparen.token
2262 }
2263
2264 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2267 self.dialect.supports_outer_join_operator()
2268 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2269 }
2270
2271 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2273 self.expect_token(&Token::LParen)?;
2274 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2275 self.expect_token(&Token::RParen)?;
2276
2277 Ok(options)
2278 }
2279
2280 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2281 let name = self.parse_identifier()?;
2282
2283 let next_token = self.peek_token_ref();
2284 if next_token == &Token::Comma || next_token == &Token::RParen {
2285 return Ok(UtilityOption { name, arg: None });
2286 }
2287 let arg = self.parse_expr()?;
2288
2289 Ok(UtilityOption {
2290 name,
2291 arg: Some(arg),
2292 })
2293 }
2294
2295 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2296 if !self.peek_sub_query() {
2297 return Ok(None);
2298 }
2299
2300 Ok(Some(Expr::Subquery(self.parse_query()?)))
2301 }
2302
2303 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2304 if !self.dialect.supports_lambda_functions() {
2305 return Ok(None);
2306 }
2307 self.maybe_parse(|p| {
2308 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2309 p.expect_token(&Token::RParen)?;
2310 p.expect_token(&Token::Arrow)?;
2311 let expr = p.parse_expr()?;
2312 Ok(Expr::Lambda(LambdaFunction {
2313 params: OneOrManyWithParens::Many(params),
2314 body: Box::new(expr),
2315 syntax: LambdaSyntax::Arrow,
2316 }))
2317 })
2318 }
2319
2320 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2330 let params = self.parse_lambda_function_parameters()?;
2332 self.expect_token(&Token::Colon)?;
2334 let body = self.parse_expr()?;
2336 Ok(Expr::Lambda(LambdaFunction {
2337 params,
2338 body: Box::new(body),
2339 syntax: LambdaSyntax::LambdaKeyword,
2340 }))
2341 }
2342
2343 fn parse_lambda_function_parameters(
2345 &mut self,
2346 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2347 let params = if self.consume_token(&Token::LParen) {
2349 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2351 self.expect_token(&Token::RParen)?;
2352 OneOrManyWithParens::Many(params)
2353 } else {
2354 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2356 if params.len() == 1 {
2357 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2358 } else {
2359 OneOrManyWithParens::Many(params)
2360 }
2361 };
2362 Ok(params)
2363 }
2364
2365 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2367 let name = self.parse_identifier()?;
2368 let data_type = match &self.peek_token_ref().token {
2369 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2370 _ => None,
2371 };
2372 Ok(LambdaFunctionParameter { name, data_type })
2373 }
2374
2375 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2382 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2384 return Ok(Some(expr));
2385 }
2386 self.maybe_parse_odbc_body_datetime()
2388 }
2389
2390 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2401 self.maybe_parse(|p| {
2402 let token = p.next_token().clone();
2403 let word_string = token.token.to_string();
2404 let data_type = match word_string.as_str() {
2405 "t" => DataType::Time(None, TimezoneInfo::None),
2406 "d" => DataType::Date,
2407 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2408 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2409 };
2410 let value = p.parse_value()?;
2411 Ok(Expr::TypedString(TypedString {
2412 data_type,
2413 value,
2414 uses_odbc_syntax: true,
2415 }))
2416 })
2417 }
2418
2419 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2428 self.maybe_parse(|p| {
2429 p.expect_keyword(Keyword::FN)?;
2430 let fn_name = p.parse_object_name(false)?;
2431 let mut fn_call = p.parse_function_call(fn_name)?;
2432 fn_call.uses_odbc_syntax = true;
2433 Ok(Expr::Function(fn_call))
2434 })
2435 }
2436
2437 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2439 self.parse_function_call(name).map(Expr::Function)
2440 }
2441
2442 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2443 self.expect_token(&Token::LParen)?;
2444
2445 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2448 let subquery = self.parse_query()?;
2449 self.expect_token(&Token::RParen)?;
2450 return Ok(Function {
2451 name,
2452 uses_odbc_syntax: false,
2453 parameters: FunctionArguments::None,
2454 args: FunctionArguments::Subquery(subquery),
2455 filter: None,
2456 null_treatment: None,
2457 over: None,
2458 within_group: vec![],
2459 });
2460 }
2461
2462 let mut args = self.parse_function_argument_list()?;
2463 let mut parameters = FunctionArguments::None;
2464 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2467 && self.consume_token(&Token::LParen)
2468 {
2469 parameters = FunctionArguments::List(args);
2470 args = self.parse_function_argument_list()?;
2471 }
2472
2473 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2474 self.expect_token(&Token::LParen)?;
2475 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2476 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2477 self.expect_token(&Token::RParen)?;
2478 order_by
2479 } else {
2480 vec![]
2481 };
2482
2483 let filter = if self.dialect.supports_filter_during_aggregation()
2484 && self.parse_keyword(Keyword::FILTER)
2485 && self.consume_token(&Token::LParen)
2486 && self.parse_keyword(Keyword::WHERE)
2487 {
2488 let filter = Some(Box::new(self.parse_expr()?));
2489 self.expect_token(&Token::RParen)?;
2490 filter
2491 } else {
2492 None
2493 };
2494
2495 let null_treatment = if args
2498 .clauses
2499 .iter()
2500 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2501 {
2502 self.parse_null_treatment()?
2503 } else {
2504 None
2505 };
2506
2507 let over = if self.parse_keyword(Keyword::OVER) {
2508 if self.consume_token(&Token::LParen) {
2509 let window_spec = self.parse_window_spec()?;
2510 Some(WindowType::WindowSpec(window_spec))
2511 } else {
2512 Some(WindowType::NamedWindow(self.parse_identifier()?))
2513 }
2514 } else {
2515 None
2516 };
2517
2518 Ok(Function {
2519 name,
2520 uses_odbc_syntax: false,
2521 parameters,
2522 args: FunctionArguments::List(args),
2523 null_treatment,
2524 filter,
2525 over,
2526 within_group,
2527 })
2528 }
2529
2530 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2532 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2533 Some(keyword) => {
2534 self.expect_keyword_is(Keyword::NULLS)?;
2535
2536 Ok(match keyword {
2537 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2538 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2539 _ => None,
2540 })
2541 }
2542 None => Ok(None),
2543 }
2544 }
2545
2546 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2548 let args = if self.consume_token(&Token::LParen) {
2549 FunctionArguments::List(self.parse_function_argument_list()?)
2550 } else {
2551 FunctionArguments::None
2552 };
2553 Ok(Expr::Function(Function {
2554 name,
2555 uses_odbc_syntax: false,
2556 parameters: FunctionArguments::None,
2557 args,
2558 filter: None,
2559 over: None,
2560 null_treatment: None,
2561 within_group: vec![],
2562 }))
2563 }
2564
2565 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2567 let next_token = self.next_token();
2568 match &next_token.token {
2569 Token::Word(w) => match w.keyword {
2570 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2571 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2572 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2573 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2574 },
2575 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2576 }
2577 }
2578
2579 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2581 let units = self.parse_window_frame_units()?;
2582 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2583 let start_bound = self.parse_window_frame_bound()?;
2584 self.expect_keyword_is(Keyword::AND)?;
2585 let end_bound = Some(self.parse_window_frame_bound()?);
2586 (start_bound, end_bound)
2587 } else {
2588 (self.parse_window_frame_bound()?, None)
2589 };
2590 Ok(WindowFrame {
2591 units,
2592 start_bound,
2593 end_bound,
2594 })
2595 }
2596
2597 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2599 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2600 Ok(WindowFrameBound::CurrentRow)
2601 } else {
2602 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2603 None
2604 } else {
2605 Some(Box::new(match &self.peek_token_ref().token {
2606 Token::SingleQuotedString(_) => self.parse_interval()?,
2607 _ => self.parse_expr()?,
2608 }))
2609 };
2610 if self.parse_keyword(Keyword::PRECEDING) {
2611 Ok(WindowFrameBound::Preceding(rows))
2612 } else if self.parse_keyword(Keyword::FOLLOWING) {
2613 Ok(WindowFrameBound::Following(rows))
2614 } else {
2615 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2616 }
2617 }
2618 }
2619
2620 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2622 if self.dialect.supports_group_by_expr() {
2623 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2624 self.expect_token(&Token::LParen)?;
2625 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2626 self.expect_token(&Token::RParen)?;
2627 Ok(Expr::GroupingSets(result))
2628 } else if self.parse_keyword(Keyword::CUBE) {
2629 self.expect_token(&Token::LParen)?;
2630 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2631 self.expect_token(&Token::RParen)?;
2632 Ok(Expr::Cube(result))
2633 } else if self.parse_keyword(Keyword::ROLLUP) {
2634 self.expect_token(&Token::LParen)?;
2635 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2636 self.expect_token(&Token::RParen)?;
2637 Ok(Expr::Rollup(result))
2638 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2639 Ok(Expr::Tuple(vec![]))
2643 } else {
2644 self.parse_expr()
2645 }
2646 } else {
2647 self.parse_expr()
2649 }
2650 }
2651
2652 fn parse_tuple(
2656 &mut self,
2657 lift_singleton: bool,
2658 allow_empty: bool,
2659 ) -> Result<Vec<Expr>, ParserError> {
2660 if lift_singleton {
2661 if self.consume_token(&Token::LParen) {
2662 let result = if allow_empty && self.consume_token(&Token::RParen) {
2663 vec![]
2664 } else {
2665 let result = self.parse_comma_separated(Parser::parse_expr)?;
2666 self.expect_token(&Token::RParen)?;
2667 result
2668 };
2669 Ok(result)
2670 } else {
2671 Ok(vec![self.parse_expr()?])
2672 }
2673 } else {
2674 self.expect_token(&Token::LParen)?;
2675 let result = if allow_empty && self.consume_token(&Token::RParen) {
2676 vec![]
2677 } else {
2678 let result = self.parse_comma_separated(Parser::parse_expr)?;
2679 self.expect_token(&Token::RParen)?;
2680 result
2681 };
2682 Ok(result)
2683 }
2684 }
2685
2686 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2688 let case_token = AttachedToken(self.get_current_token().clone());
2689 let mut operand = None;
2690 if !self.parse_keyword(Keyword::WHEN) {
2691 operand = Some(Box::new(self.parse_expr()?));
2692 self.expect_keyword_is(Keyword::WHEN)?;
2693 }
2694 let mut conditions = vec![];
2695 loop {
2696 let condition = self.parse_expr()?;
2697 self.expect_keyword_is(Keyword::THEN)?;
2698 let result = self.parse_expr()?;
2699 conditions.push(CaseWhen { condition, result });
2700 if !self.parse_keyword(Keyword::WHEN) {
2701 break;
2702 }
2703 }
2704 let else_result = if self.parse_keyword(Keyword::ELSE) {
2705 Some(Box::new(self.parse_expr()?))
2706 } else {
2707 None
2708 };
2709 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2710 Ok(Expr::Case {
2711 case_token,
2712 end_token,
2713 operand,
2714 conditions,
2715 else_result,
2716 })
2717 }
2718
2719 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2721 if self.parse_keyword(Keyword::FORMAT) {
2722 let value = self.parse_value()?;
2723 match self.parse_optional_time_zone()? {
2724 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2725 None => Ok(Some(CastFormat::Value(value))),
2726 }
2727 } else {
2728 Ok(None)
2729 }
2730 }
2731
2732 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2734 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2735 self.parse_value().map(Some)
2736 } else {
2737 Ok(None)
2738 }
2739 }
2740
2741 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2743 self.expect_token(&Token::LParen)?;
2744 let data_type = self.parse_data_type()?;
2745 self.expect_token(&Token::Comma)?;
2746 let expr = self.parse_expr()?;
2747 let styles = if self.consume_token(&Token::Comma) {
2748 self.parse_comma_separated(Parser::parse_expr)?
2749 } else {
2750 Default::default()
2751 };
2752 self.expect_token(&Token::RParen)?;
2753 Ok(Expr::Convert {
2754 is_try,
2755 expr: Box::new(expr),
2756 data_type: Some(data_type),
2757 charset: None,
2758 target_before_value: true,
2759 styles,
2760 })
2761 }
2762
2763 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2768 if self.dialect.convert_type_before_value() {
2769 return self.parse_mssql_convert(is_try);
2770 }
2771 self.expect_token(&Token::LParen)?;
2772 let expr = self.parse_expr()?;
2773 if self.parse_keyword(Keyword::USING) {
2774 let charset = self.parse_object_name(false)?;
2775 self.expect_token(&Token::RParen)?;
2776 return Ok(Expr::Convert {
2777 is_try,
2778 expr: Box::new(expr),
2779 data_type: None,
2780 charset: Some(charset),
2781 target_before_value: false,
2782 styles: vec![],
2783 });
2784 }
2785 self.expect_token(&Token::Comma)?;
2786 let data_type = self.parse_data_type()?;
2787 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2788 Some(self.parse_object_name(false)?)
2789 } else {
2790 None
2791 };
2792 self.expect_token(&Token::RParen)?;
2793 Ok(Expr::Convert {
2794 is_try,
2795 expr: Box::new(expr),
2796 data_type: Some(data_type),
2797 charset,
2798 target_before_value: false,
2799 styles: vec![],
2800 })
2801 }
2802
2803 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2805 self.expect_token(&Token::LParen)?;
2806 let expr = self.parse_expr()?;
2807 self.expect_keyword_is(Keyword::AS)?;
2808 let data_type = self.parse_data_type()?;
2809 let array = self.parse_keyword(Keyword::ARRAY);
2810 let format = self.parse_optional_cast_format()?;
2811 self.expect_token(&Token::RParen)?;
2812 Ok(Expr::Cast {
2813 kind,
2814 expr: Box::new(expr),
2815 data_type,
2816 array,
2817 format,
2818 })
2819 }
2820
2821 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2823 self.expect_token(&Token::LParen)?;
2824 let exists_node = Expr::Exists {
2825 negated,
2826 subquery: self.parse_query()?,
2827 };
2828 self.expect_token(&Token::RParen)?;
2829 Ok(exists_node)
2830 }
2831
2832 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2834 self.expect_token(&Token::LParen)?;
2835 let field = self.parse_date_time_field()?;
2836
2837 let syntax = if self.parse_keyword(Keyword::FROM) {
2838 ExtractSyntax::From
2839 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2840 {
2841 ExtractSyntax::Comma
2842 } else {
2843 return Err(ParserError::ParserError(
2844 "Expected 'FROM' or ','".to_string(),
2845 ));
2846 };
2847
2848 let expr = self.parse_expr()?;
2849 self.expect_token(&Token::RParen)?;
2850 Ok(Expr::Extract {
2851 field,
2852 expr: Box::new(expr),
2853 syntax,
2854 })
2855 }
2856
2857 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2859 self.expect_token(&Token::LParen)?;
2860 let expr = self.parse_expr()?;
2861 let field = if self.parse_keyword(Keyword::TO) {
2863 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2865 } else if self.consume_token(&Token::Comma) {
2866 let v = self.parse_value()?;
2868 if matches!(v.value, Value::Number(_, _)) {
2869 CeilFloorKind::Scale(v)
2870 } else {
2871 return Err(ParserError::ParserError(
2872 "Scale field can only be of number type".to_string(),
2873 ));
2874 }
2875 } else {
2876 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2877 };
2878 self.expect_token(&Token::RParen)?;
2879 if is_ceil {
2880 Ok(Expr::Ceil {
2881 expr: Box::new(expr),
2882 field,
2883 })
2884 } else {
2885 Ok(Expr::Floor {
2886 expr: Box::new(expr),
2887 field,
2888 })
2889 }
2890 }
2891
2892 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2894 let between_prec = self.dialect.prec_value(Precedence::Between);
2895 let position_expr = self.maybe_parse(|p| {
2896 p.expect_token(&Token::LParen)?;
2898
2899 let expr = p.parse_subexpr(between_prec)?;
2901 p.expect_keyword_is(Keyword::IN)?;
2902 let from = p.parse_expr()?;
2903 p.expect_token(&Token::RParen)?;
2904 Ok(Expr::Position {
2905 expr: Box::new(expr),
2906 r#in: Box::new(from),
2907 })
2908 })?;
2909 match position_expr {
2910 Some(expr) => Ok(expr),
2911 None => self.parse_function(ObjectName::from(vec![ident])),
2914 }
2915 }
2916
2917 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2919 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2920 Keyword::SUBSTR => true,
2921 Keyword::SUBSTRING => false,
2922 _ => {
2923 self.prev_token();
2924 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2925 }
2926 };
2927 self.expect_token(&Token::LParen)?;
2928 let expr = self.parse_expr()?;
2929 let mut from_expr = None;
2930 let special = self.consume_token(&Token::Comma);
2931 if special || self.parse_keyword(Keyword::FROM) {
2932 from_expr = Some(self.parse_expr()?);
2933 }
2934
2935 let mut to_expr = None;
2936 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2937 to_expr = Some(self.parse_expr()?);
2938 }
2939 self.expect_token(&Token::RParen)?;
2940
2941 Ok(Expr::Substring {
2942 expr: Box::new(expr),
2943 substring_from: from_expr.map(Box::new),
2944 substring_for: to_expr.map(Box::new),
2945 special,
2946 shorthand,
2947 })
2948 }
2949
2950 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2954 self.expect_token(&Token::LParen)?;
2956 let expr = self.parse_expr()?;
2957 self.expect_keyword_is(Keyword::PLACING)?;
2958 let what_expr = self.parse_expr()?;
2959 self.expect_keyword_is(Keyword::FROM)?;
2960 let from_expr = self.parse_expr()?;
2961 let mut for_expr = None;
2962 if self.parse_keyword(Keyword::FOR) {
2963 for_expr = Some(self.parse_expr()?);
2964 }
2965 self.expect_token(&Token::RParen)?;
2966
2967 Ok(Expr::Overlay {
2968 expr: Box::new(expr),
2969 overlay_what: Box::new(what_expr),
2970 overlay_from: Box::new(from_expr),
2971 overlay_for: for_expr.map(Box::new),
2972 })
2973 }
2974
2975 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2981 self.expect_token(&Token::LParen)?;
2982 let mut trim_where = None;
2983 if let Token::Word(word) = &self.peek_token_ref().token {
2984 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2985 trim_where = Some(self.parse_trim_where()?);
2986 }
2987 }
2988 let expr = self.parse_expr()?;
2989 if self.parse_keyword(Keyword::FROM) {
2990 let trim_what = Box::new(expr);
2991 let expr = self.parse_expr()?;
2992 self.expect_token(&Token::RParen)?;
2993 Ok(Expr::Trim {
2994 expr: Box::new(expr),
2995 trim_where,
2996 trim_what: Some(trim_what),
2997 trim_characters: None,
2998 })
2999 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3000 {
3001 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3002 self.expect_token(&Token::RParen)?;
3003 Ok(Expr::Trim {
3004 expr: Box::new(expr),
3005 trim_where: None,
3006 trim_what: None,
3007 trim_characters: Some(characters),
3008 })
3009 } else {
3010 self.expect_token(&Token::RParen)?;
3011 Ok(Expr::Trim {
3012 expr: Box::new(expr),
3013 trim_where,
3014 trim_what: None,
3015 trim_characters: None,
3016 })
3017 }
3018 }
3019
3020 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3024 let next_token = self.next_token();
3025 match &next_token.token {
3026 Token::Word(w) => match w.keyword {
3027 Keyword::BOTH => Ok(TrimWhereField::Both),
3028 Keyword::LEADING => Ok(TrimWhereField::Leading),
3029 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3030 _ => self.expected("trim_where field", next_token)?,
3031 },
3032 _ => self.expected("trim_where field", next_token),
3033 }
3034 }
3035
3036 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3039 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3040 self.expect_token(&Token::RBracket)?;
3041 Ok(Expr::Array(Array { elem: exprs, named }))
3042 }
3043
3044 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3048 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3049 if self.parse_keyword(Keyword::ERROR) {
3050 Ok(Some(ListAggOnOverflow::Error))
3051 } else {
3052 self.expect_keyword_is(Keyword::TRUNCATE)?;
3053 let filler = match &self.peek_token_ref().token {
3054 Token::Word(w)
3055 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3056 {
3057 None
3058 }
3059 Token::SingleQuotedString(_)
3060 | Token::EscapedStringLiteral(_)
3061 | Token::UnicodeStringLiteral(_)
3062 | Token::NationalStringLiteral(_)
3063 | Token::QuoteDelimitedStringLiteral(_)
3064 | Token::NationalQuoteDelimitedStringLiteral(_)
3065 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3066 _ => self.expected_ref(
3067 "either filler, WITH, or WITHOUT in LISTAGG",
3068 self.peek_token_ref(),
3069 )?,
3070 };
3071 let with_count = self.parse_keyword(Keyword::WITH);
3072 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3073 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3074 }
3075 self.expect_keyword_is(Keyword::COUNT)?;
3076 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3077 }
3078 } else {
3079 Ok(None)
3080 }
3081 }
3082
3083 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3090 let next_token = self.next_token();
3091 match &next_token.token {
3092 Token::Word(w) => match w.keyword {
3093 Keyword::YEAR => Ok(DateTimeField::Year),
3094 Keyword::YEARS => Ok(DateTimeField::Years),
3095 Keyword::MONTH => Ok(DateTimeField::Month),
3096 Keyword::MONTHS => Ok(DateTimeField::Months),
3097 Keyword::WEEK => {
3098 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3099 && self.consume_token(&Token::LParen)
3100 {
3101 let week_day = self.parse_identifier()?;
3102 self.expect_token(&Token::RParen)?;
3103 Some(week_day)
3104 } else {
3105 None
3106 };
3107 Ok(DateTimeField::Week(week_day))
3108 }
3109 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3110 Keyword::DAY => Ok(DateTimeField::Day),
3111 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3112 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3113 Keyword::DAYS => Ok(DateTimeField::Days),
3114 Keyword::DATE => Ok(DateTimeField::Date),
3115 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3116 Keyword::HOUR => Ok(DateTimeField::Hour),
3117 Keyword::HOURS => Ok(DateTimeField::Hours),
3118 Keyword::MINUTE => Ok(DateTimeField::Minute),
3119 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3120 Keyword::SECOND => Ok(DateTimeField::Second),
3121 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3122 Keyword::CENTURY => Ok(DateTimeField::Century),
3123 Keyword::DECADE => Ok(DateTimeField::Decade),
3124 Keyword::DOY => Ok(DateTimeField::Doy),
3125 Keyword::DOW => Ok(DateTimeField::Dow),
3126 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3127 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3128 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3129 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3130 Keyword::JULIAN => Ok(DateTimeField::Julian),
3131 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3132 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3133 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3134 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3135 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3136 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3137 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3138 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3139 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3140 Keyword::TIME => Ok(DateTimeField::Time),
3141 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3142 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3143 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3144 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3145 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3146 _ if self.dialect.allow_extract_custom() => {
3147 self.prev_token();
3148 let custom = self.parse_identifier()?;
3149 Ok(DateTimeField::Custom(custom))
3150 }
3151 _ => self.expected("date/time field", next_token),
3152 },
3153 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3154 self.prev_token();
3155 let custom = self.parse_identifier()?;
3156 Ok(DateTimeField::Custom(custom))
3157 }
3158 _ => self.expected("date/time field", next_token),
3159 }
3160 }
3161
3162 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3166 match &self.peek_token_ref().token {
3167 Token::Word(w) => match w.keyword {
3168 Keyword::EXISTS => {
3169 let negated = true;
3170 let _ = self.parse_keyword(Keyword::EXISTS);
3171 self.parse_exists_expr(negated)
3172 }
3173 _ => Ok(Expr::UnaryOp {
3174 op: UnaryOperator::Not,
3175 expr: Box::new(
3176 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3177 ),
3178 }),
3179 },
3180 _ => Ok(Expr::UnaryOp {
3181 op: UnaryOperator::Not,
3182 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3183 }),
3184 }
3185 }
3186
3187 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3197 let token = self.expect_token(&Token::LBrace)?;
3198
3199 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3200 self.expect_token(&Token::RBrace)?;
3201 return Ok(fn_expr);
3202 }
3203
3204 if self.dialect.supports_dictionary_syntax() {
3205 self.prev_token(); return self.parse_dictionary();
3207 }
3208
3209 self.expected("an expression", token)
3210 }
3211
3212 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3218 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3219
3220 self.expect_keyword_is(Keyword::AGAINST)?;
3221
3222 self.expect_token(&Token::LParen)?;
3223
3224 let match_value = self.parse_value()?;
3226
3227 let in_natural_language_mode_keywords = &[
3228 Keyword::IN,
3229 Keyword::NATURAL,
3230 Keyword::LANGUAGE,
3231 Keyword::MODE,
3232 ];
3233
3234 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3235
3236 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3237
3238 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3239 if self.parse_keywords(with_query_expansion_keywords) {
3240 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3241 } else {
3242 Some(SearchModifier::InNaturalLanguageMode)
3243 }
3244 } else if self.parse_keywords(in_boolean_mode_keywords) {
3245 Some(SearchModifier::InBooleanMode)
3246 } else if self.parse_keywords(with_query_expansion_keywords) {
3247 Some(SearchModifier::WithQueryExpansion)
3248 } else {
3249 None
3250 };
3251
3252 self.expect_token(&Token::RParen)?;
3253
3254 Ok(Expr::MatchAgainst {
3255 columns,
3256 match_value,
3257 opt_search_modifier,
3258 })
3259 }
3260
3261 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3277 let value = if self.dialect.require_interval_qualifier() {
3286 self.parse_expr()?
3288 } else {
3289 self.parse_prefix()?
3292 };
3293
3294 let leading_field = if self.next_token_is_temporal_unit() {
3300 Some(self.parse_date_time_field()?)
3301 } else if self.dialect.require_interval_qualifier() {
3302 return parser_err!(
3303 "INTERVAL requires a unit after the literal value",
3304 self.peek_token_ref().span.start
3305 );
3306 } else {
3307 None
3308 };
3309
3310 let (leading_precision, last_field, fsec_precision) =
3311 if leading_field == Some(DateTimeField::Second) {
3312 let last_field = None;
3318 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3319 (leading_precision, last_field, fsec_precision)
3320 } else {
3321 let leading_precision = self.parse_optional_precision()?;
3322 if self.parse_keyword(Keyword::TO) {
3323 let last_field = Some(self.parse_date_time_field()?);
3324 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3325 self.parse_optional_precision()?
3326 } else {
3327 None
3328 };
3329 (leading_precision, last_field, fsec_precision)
3330 } else {
3331 (leading_precision, None, None)
3332 }
3333 };
3334
3335 Ok(Expr::Interval(Interval {
3336 value: Box::new(value),
3337 leading_field,
3338 leading_precision,
3339 last_field,
3340 fractional_seconds_precision: fsec_precision,
3341 }))
3342 }
3343
3344 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3347 if let Token::Word(word) = &self.peek_token_ref().token {
3348 matches!(
3349 word.keyword,
3350 Keyword::YEAR
3351 | Keyword::YEARS
3352 | Keyword::MONTH
3353 | Keyword::MONTHS
3354 | Keyword::WEEK
3355 | Keyword::WEEKS
3356 | Keyword::DAY
3357 | Keyword::DAYS
3358 | Keyword::HOUR
3359 | Keyword::HOURS
3360 | Keyword::MINUTE
3361 | Keyword::MINUTES
3362 | Keyword::SECOND
3363 | Keyword::SECONDS
3364 | Keyword::CENTURY
3365 | Keyword::DECADE
3366 | Keyword::DOW
3367 | Keyword::DOY
3368 | Keyword::EPOCH
3369 | Keyword::ISODOW
3370 | Keyword::ISOYEAR
3371 | Keyword::JULIAN
3372 | Keyword::MICROSECOND
3373 | Keyword::MICROSECONDS
3374 | Keyword::MILLENIUM
3375 | Keyword::MILLENNIUM
3376 | Keyword::MILLISECOND
3377 | Keyword::MILLISECONDS
3378 | Keyword::NANOSECOND
3379 | Keyword::NANOSECONDS
3380 | Keyword::QUARTER
3381 | Keyword::TIMEZONE
3382 | Keyword::TIMEZONE_HOUR
3383 | Keyword::TIMEZONE_MINUTE
3384 )
3385 } else {
3386 false
3387 }
3388 }
3389
3390 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3398 self.prev_token();
3400 let (fields, trailing_bracket) =
3401 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3402 if trailing_bracket.0 {
3403 return parser_err!(
3404 "unmatched > in STRUCT literal",
3405 self.peek_token_ref().span.start
3406 );
3407 }
3408
3409 self.expect_token(&Token::LParen)?;
3411 let values = self
3412 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3413 self.expect_token(&Token::RParen)?;
3414
3415 Ok(Expr::Struct { values, fields })
3416 }
3417
3418 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3432 let expr = self.parse_expr()?;
3433 if self.parse_keyword(Keyword::AS) {
3434 if typed_syntax {
3435 return parser_err!("Typed syntax does not allow AS", {
3436 self.prev_token();
3437 self.peek_token_ref().span.start
3438 });
3439 }
3440 let field_name = self.parse_identifier()?;
3441 Ok(Expr::Named {
3442 expr: expr.into(),
3443 name: field_name,
3444 })
3445 } else {
3446 Ok(expr)
3447 }
3448 }
3449
3450 fn parse_struct_type_def<F>(
3463 &mut self,
3464 mut elem_parser: F,
3465 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3466 where
3467 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3468 {
3469 self.expect_keyword_is(Keyword::STRUCT)?;
3470
3471 if self.peek_token_ref().token != Token::Lt {
3473 return Ok((Default::default(), false.into()));
3474 }
3475 self.next_token();
3476
3477 let mut field_defs = vec![];
3478 let trailing_bracket = loop {
3479 let (def, trailing_bracket) = elem_parser(self)?;
3480 field_defs.push(def);
3481 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3483 break trailing_bracket;
3484 }
3485 };
3486
3487 Ok((
3488 field_defs,
3489 self.expect_closing_angle_bracket(trailing_bracket)?,
3490 ))
3491 }
3492
3493 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3495 self.expect_keyword_is(Keyword::STRUCT)?;
3496 self.expect_token(&Token::LParen)?;
3497 let struct_body = self.parse_comma_separated(|parser| {
3498 let field_name = parser.parse_identifier()?;
3499 let field_type = parser.parse_data_type()?;
3500
3501 Ok(StructField {
3502 field_name: Some(field_name),
3503 field_type,
3504 options: None,
3505 })
3506 });
3507 self.expect_token(&Token::RParen)?;
3508 struct_body
3509 }
3510
3511 fn parse_struct_field_def(
3523 &mut self,
3524 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3525 let is_named_field = matches!(
3528 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3529 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3530 );
3531
3532 let field_name = if is_named_field {
3533 let name = self.parse_identifier()?;
3534 let _ = self.consume_token(&Token::Colon);
3535 Some(name)
3536 } else {
3537 None
3538 };
3539
3540 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3541
3542 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3543 Ok((
3544 StructField {
3545 field_name,
3546 field_type,
3547 options,
3548 },
3549 trailing_bracket,
3550 ))
3551 }
3552
3553 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3563 self.expect_keyword_is(Keyword::UNION)?;
3564
3565 self.expect_token(&Token::LParen)?;
3566
3567 let fields = self.parse_comma_separated(|p| {
3568 Ok(UnionField {
3569 field_name: p.parse_identifier()?,
3570 field_type: p.parse_data_type()?,
3571 })
3572 })?;
3573
3574 self.expect_token(&Token::RParen)?;
3575
3576 Ok(fields)
3577 }
3578
3579 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3590 self.expect_token(&Token::LBrace)?;
3591
3592 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3593
3594 self.expect_token(&Token::RBrace)?;
3595
3596 Ok(Expr::Dictionary(fields))
3597 }
3598
3599 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3610 let key = self.parse_identifier()?;
3611
3612 self.expect_token(&Token::Colon)?;
3613
3614 let expr = self.parse_expr()?;
3615
3616 Ok(DictionaryField {
3617 key,
3618 value: Box::new(expr),
3619 })
3620 }
3621
3622 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3632 self.expect_token(&Token::LBrace)?;
3633 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3634 self.expect_token(&Token::RBrace)?;
3635 Ok(Expr::Map(Map { entries: fields }))
3636 }
3637
3638 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3648 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3650
3651 self.expect_token(&Token::Colon)?;
3652
3653 let value = self.parse_expr()?;
3654
3655 Ok(MapEntry {
3656 key: Box::new(key),
3657 value: Box::new(value),
3658 })
3659 }
3660
3661 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3671 self.expect_keyword_is(Keyword::MAP)?;
3672 self.expect_token(&Token::LParen)?;
3673 let key_data_type = self.parse_data_type()?;
3674 self.expect_token(&Token::Comma)?;
3675 let value_data_type = self.parse_data_type()?;
3676 self.expect_token(&Token::RParen)?;
3677
3678 Ok((key_data_type, value_data_type))
3679 }
3680
3681 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3691 self.expect_keyword_is(Keyword::TUPLE)?;
3692 self.expect_token(&Token::LParen)?;
3693 let mut field_defs = vec![];
3694 loop {
3695 let (def, _) = self.parse_struct_field_def()?;
3696 field_defs.push(def);
3697 if !self.consume_token(&Token::Comma) {
3698 break;
3699 }
3700 }
3701 self.expect_token(&Token::RParen)?;
3702
3703 Ok(field_defs)
3704 }
3705
3706 fn expect_closing_angle_bracket(
3711 &mut self,
3712 trailing_bracket: MatchedTrailingBracket,
3713 ) -> Result<MatchedTrailingBracket, ParserError> {
3714 let trailing_bracket = if !trailing_bracket.0 {
3715 match &self.peek_token_ref().token {
3716 Token::Gt => {
3717 self.next_token();
3718 false.into()
3719 }
3720 Token::ShiftRight => {
3721 self.next_token();
3722 true.into()
3723 }
3724 _ => return self.expected_ref(">", self.peek_token_ref()),
3725 }
3726 } else {
3727 false.into()
3728 };
3729
3730 Ok(trailing_bracket)
3731 }
3732
3733 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3735 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3737 return infix;
3738 }
3739
3740 let dialect = self.dialect;
3741
3742 self.advance_token();
3743 let tok = self.get_current_token();
3744 debug!("infix: {tok:?}");
3745 let tok_index = self.get_current_index();
3746 let span = tok.span;
3747 let regular_binary_operator = match &tok.token {
3748 Token::Spaceship => Some(BinaryOperator::Spaceship),
3749 Token::DoubleEq => Some(BinaryOperator::Eq),
3750 Token::Assignment => Some(BinaryOperator::Assignment),
3751 Token::Eq => Some(BinaryOperator::Eq),
3752 Token::Neq => Some(BinaryOperator::NotEq),
3753 Token::Gt => Some(BinaryOperator::Gt),
3754 Token::GtEq => Some(BinaryOperator::GtEq),
3755 Token::Lt => Some(BinaryOperator::Lt),
3756 Token::LtEq => Some(BinaryOperator::LtEq),
3757 Token::Plus => Some(BinaryOperator::Plus),
3758 Token::Minus => Some(BinaryOperator::Minus),
3759 Token::Mul => Some(BinaryOperator::Multiply),
3760 Token::Mod => Some(BinaryOperator::Modulo),
3761 Token::StringConcat => Some(BinaryOperator::StringConcat),
3762 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3763 Token::Caret => {
3764 if dialect_is!(dialect is PostgreSqlDialect) {
3767 Some(BinaryOperator::PGExp)
3768 } else {
3769 Some(BinaryOperator::BitwiseXor)
3770 }
3771 }
3772 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3773 Token::Div => Some(BinaryOperator::Divide),
3774 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3775 Some(BinaryOperator::DuckIntegerDivide)
3776 }
3777 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3778 Some(BinaryOperator::PGBitwiseShiftLeft)
3779 }
3780 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3781 Some(BinaryOperator::PGBitwiseShiftRight)
3782 }
3783 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3784 Some(BinaryOperator::PGBitwiseXor)
3785 }
3786 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3787 Some(BinaryOperator::PGOverlap)
3788 }
3789 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3790 Some(BinaryOperator::PGOverlap)
3791 }
3792 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3793 Some(BinaryOperator::And)
3794 }
3795 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3796 Some(BinaryOperator::PGStartsWith)
3797 }
3798 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3799 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3800 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3801 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3802 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3803 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3804 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3805 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3806 Token::Arrow => Some(BinaryOperator::Arrow),
3807 Token::LongArrow => Some(BinaryOperator::LongArrow),
3808 Token::HashArrow => Some(BinaryOperator::HashArrow),
3809 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3810 Token::AtArrow => Some(BinaryOperator::AtArrow),
3811 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3812 Token::HashMinus => Some(BinaryOperator::HashMinus),
3813 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3814 Token::AtAt => Some(BinaryOperator::AtAt),
3815 Token::Question => Some(BinaryOperator::Question),
3816 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3817 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3818 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3819 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3820 Some(BinaryOperator::DoubleHash)
3821 }
3822
3823 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3824 Some(BinaryOperator::AndLt)
3825 }
3826 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3827 Some(BinaryOperator::AndGt)
3828 }
3829 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3830 Some(BinaryOperator::QuestionDash)
3831 }
3832 Token::AmpersandLeftAngleBracketVerticalBar
3833 if self.dialect.supports_geometric_types() =>
3834 {
3835 Some(BinaryOperator::AndLtPipe)
3836 }
3837 Token::VerticalBarAmpersandRightAngleBracket
3838 if self.dialect.supports_geometric_types() =>
3839 {
3840 Some(BinaryOperator::PipeAndGt)
3841 }
3842 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3843 Some(BinaryOperator::LtDashGt)
3844 }
3845 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3846 Some(BinaryOperator::LtCaret)
3847 }
3848 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3849 Some(BinaryOperator::GtCaret)
3850 }
3851 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3852 Some(BinaryOperator::QuestionHash)
3853 }
3854 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3855 Some(BinaryOperator::QuestionDoublePipe)
3856 }
3857 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3858 Some(BinaryOperator::QuestionDashPipe)
3859 }
3860 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3861 Some(BinaryOperator::TildeEq)
3862 }
3863 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3864 Some(BinaryOperator::LtLtPipe)
3865 }
3866 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3867 Some(BinaryOperator::PipeGtGt)
3868 }
3869 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3870
3871 Token::Word(w) => match w.keyword {
3872 Keyword::AND => Some(BinaryOperator::And),
3873 Keyword::OR => Some(BinaryOperator::Or),
3874 Keyword::XOR => Some(BinaryOperator::Xor),
3875 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3876 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3877 self.expect_token(&Token::LParen)?;
3878 let mut idents = vec![];
3883 loop {
3884 self.advance_token();
3885 idents.push(self.get_current_token().to_string());
3886 if !self.consume_token(&Token::Period) {
3887 break;
3888 }
3889 }
3890 self.expect_token(&Token::RParen)?;
3891 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3892 }
3893 _ => None,
3894 },
3895 _ => None,
3896 };
3897
3898 let tok = self.token_at(tok_index);
3899 if let Some(op) = regular_binary_operator {
3900 if let Some(keyword) =
3901 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3902 {
3903 self.expect_token(&Token::LParen)?;
3904 let right = if self.peek_sub_query() {
3905 self.prev_token(); self.parse_subexpr(precedence)?
3909 } else {
3910 let right = self.parse_subexpr(precedence)?;
3912 self.expect_token(&Token::RParen)?;
3913 right
3914 };
3915
3916 if !matches!(
3917 op,
3918 BinaryOperator::Gt
3919 | BinaryOperator::Lt
3920 | BinaryOperator::GtEq
3921 | BinaryOperator::LtEq
3922 | BinaryOperator::Eq
3923 | BinaryOperator::NotEq
3924 | BinaryOperator::PGRegexMatch
3925 | BinaryOperator::PGRegexIMatch
3926 | BinaryOperator::PGRegexNotMatch
3927 | BinaryOperator::PGRegexNotIMatch
3928 | BinaryOperator::PGLikeMatch
3929 | BinaryOperator::PGILikeMatch
3930 | BinaryOperator::PGNotLikeMatch
3931 | BinaryOperator::PGNotILikeMatch
3932 ) {
3933 return parser_err!(
3934 format!(
3935 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3936 ),
3937 span.start
3938 );
3939 };
3940
3941 Ok(match keyword {
3942 Keyword::ALL => Expr::AllOp {
3943 left: Box::new(expr),
3944 compare_op: op,
3945 right: Box::new(right),
3946 },
3947 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3948 left: Box::new(expr),
3949 compare_op: op,
3950 right: Box::new(right),
3951 is_some: keyword == Keyword::SOME,
3952 },
3953 unexpected_keyword => return Err(ParserError::ParserError(
3954 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3955 )),
3956 })
3957 } else {
3958 Ok(Expr::BinaryOp {
3959 left: Box::new(expr),
3960 op,
3961 right: Box::new(self.parse_subexpr(precedence)?),
3962 })
3963 }
3964 } else if let Token::Word(w) = &tok.token {
3965 match w.keyword {
3966 Keyword::IS => {
3967 if self.parse_keyword(Keyword::NULL) {
3968 Ok(Expr::IsNull(Box::new(expr)))
3969 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3970 Ok(Expr::IsNotNull(Box::new(expr)))
3971 } else if self.parse_keywords(&[Keyword::TRUE]) {
3972 Ok(Expr::IsTrue(Box::new(expr)))
3973 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3974 Ok(Expr::IsNotTrue(Box::new(expr)))
3975 } else if self.parse_keywords(&[Keyword::FALSE]) {
3976 Ok(Expr::IsFalse(Box::new(expr)))
3977 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3978 Ok(Expr::IsNotFalse(Box::new(expr)))
3979 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3980 Ok(Expr::IsUnknown(Box::new(expr)))
3981 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3982 Ok(Expr::IsNotUnknown(Box::new(expr)))
3983 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3984 let expr2 = self.parse_expr()?;
3985 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3986 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3987 {
3988 let expr2 = self.parse_expr()?;
3989 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3990 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3991 Ok(is_normalized)
3992 } else {
3993 self.expected_ref(
3994 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3995 self.peek_token_ref(),
3996 )
3997 }
3998 }
3999 Keyword::AT => {
4000 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4001 Ok(Expr::AtTimeZone {
4002 timestamp: Box::new(expr),
4003 time_zone: Box::new(self.parse_subexpr(precedence)?),
4004 })
4005 }
4006 Keyword::NOT
4007 | Keyword::IN
4008 | Keyword::BETWEEN
4009 | Keyword::LIKE
4010 | Keyword::ILIKE
4011 | Keyword::SIMILAR
4012 | Keyword::REGEXP
4013 | Keyword::RLIKE => {
4014 self.prev_token();
4015 let negated = self.parse_keyword(Keyword::NOT);
4016 let regexp = self.parse_keyword(Keyword::REGEXP);
4017 let rlike = self.parse_keyword(Keyword::RLIKE);
4018 let null = if !self.in_column_definition_state() {
4019 self.parse_keyword(Keyword::NULL)
4020 } else {
4021 false
4022 };
4023 if regexp || rlike {
4024 Ok(Expr::RLike {
4025 negated,
4026 expr: Box::new(expr),
4027 pattern: Box::new(
4028 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4029 ),
4030 regexp,
4031 })
4032 } else if negated && null {
4033 Ok(Expr::IsNotNull(Box::new(expr)))
4034 } else if self.parse_keyword(Keyword::IN) {
4035 self.parse_in(expr, negated)
4036 } else if self.parse_keyword(Keyword::BETWEEN) {
4037 self.parse_between(expr, negated)
4038 } else if self.parse_keyword(Keyword::LIKE) {
4039 Ok(Expr::Like {
4040 negated,
4041 any: self.parse_keyword(Keyword::ANY),
4042 expr: Box::new(expr),
4043 pattern: Box::new(
4044 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4045 ),
4046 escape_char: self.parse_escape_char()?,
4047 })
4048 } else if self.parse_keyword(Keyword::ILIKE) {
4049 Ok(Expr::ILike {
4050 negated,
4051 any: self.parse_keyword(Keyword::ANY),
4052 expr: Box::new(expr),
4053 pattern: Box::new(
4054 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4055 ),
4056 escape_char: self.parse_escape_char()?,
4057 })
4058 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4059 Ok(Expr::SimilarTo {
4060 negated,
4061 expr: Box::new(expr),
4062 pattern: Box::new(
4063 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4064 ),
4065 escape_char: self.parse_escape_char()?,
4066 })
4067 } else {
4068 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4069 }
4070 }
4071 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4072 Ok(Expr::IsNotNull(Box::new(expr)))
4073 }
4074 Keyword::MEMBER => {
4075 if self.parse_keyword(Keyword::OF) {
4076 self.expect_token(&Token::LParen)?;
4077 let array = self.parse_expr()?;
4078 self.expect_token(&Token::RParen)?;
4079 Ok(Expr::MemberOf(MemberOf {
4080 value: Box::new(expr),
4081 array: Box::new(array),
4082 }))
4083 } else {
4084 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4085 }
4086 }
4087 _ => parser_err!(
4089 format!("No infix parser for token {:?}", tok.token),
4090 tok.span.start
4091 ),
4092 }
4093 } else if Token::DoubleColon == *tok {
4094 Ok(Expr::Cast {
4095 kind: CastKind::DoubleColon,
4096 expr: Box::new(expr),
4097 data_type: self.parse_data_type()?,
4098 array: false,
4099 format: None,
4100 })
4101 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4102 Ok(Expr::UnaryOp {
4103 op: UnaryOperator::PGPostfixFactorial,
4104 expr: Box::new(expr),
4105 })
4106 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4107 || (Token::Colon == *tok)
4108 {
4109 self.prev_token();
4110 self.parse_json_access(expr)
4111 } else {
4112 parser_err!(
4114 format!("No infix parser for token {:?}", tok.token),
4115 tok.span.start
4116 )
4117 }
4118 }
4119
4120 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4122 if self.parse_keyword(Keyword::ESCAPE) {
4123 Ok(Some(self.parse_value()?))
4124 } else {
4125 Ok(None)
4126 }
4127 }
4128
4129 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4139 let lower_bound = if self.consume_token(&Token::Colon) {
4141 None
4142 } else {
4143 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4145 };
4146
4147 if self.consume_token(&Token::RBracket) {
4149 if let Some(lower_bound) = lower_bound {
4150 return Ok(Subscript::Index { index: lower_bound });
4151 };
4152 return Ok(Subscript::Slice {
4153 lower_bound,
4154 upper_bound: None,
4155 stride: None,
4156 });
4157 }
4158
4159 if lower_bound.is_some() {
4161 self.expect_token(&Token::Colon)?;
4162 }
4163
4164 let upper_bound = if self.consume_token(&Token::RBracket) {
4166 return Ok(Subscript::Slice {
4167 lower_bound,
4168 upper_bound: None,
4169 stride: None,
4170 });
4171 } else {
4172 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4174 };
4175
4176 if self.consume_token(&Token::RBracket) {
4178 return Ok(Subscript::Slice {
4179 lower_bound,
4180 upper_bound,
4181 stride: None,
4182 });
4183 }
4184
4185 self.expect_token(&Token::Colon)?;
4187 let stride = if self.consume_token(&Token::RBracket) {
4188 None
4189 } else {
4190 Some(self.parse_expr()?)
4191 };
4192
4193 if stride.is_some() {
4194 self.expect_token(&Token::RBracket)?;
4195 }
4196
4197 Ok(Subscript::Slice {
4198 lower_bound,
4199 upper_bound,
4200 stride,
4201 })
4202 }
4203
4204 pub fn parse_multi_dim_subscript(
4206 &mut self,
4207 chain: &mut Vec<AccessExpr>,
4208 ) -> Result<(), ParserError> {
4209 while self.consume_token(&Token::LBracket) {
4210 self.parse_subscript(chain)?;
4211 }
4212 Ok(())
4213 }
4214
4215 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4219 let subscript = self.parse_subscript_inner()?;
4220 chain.push(AccessExpr::Subscript(subscript));
4221 Ok(())
4222 }
4223
4224 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4225 let token = self.next_token();
4226 match token.token {
4227 Token::Word(Word {
4228 value,
4229 quote_style: quote_style @ (Some('"') | Some('`') | None),
4232 keyword: _,
4235 }) => Ok(JsonPathElem::Dot {
4236 key: value,
4237 quoted: quote_style.is_some(),
4238 }),
4239
4240 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4244
4245 _ => self.expected("variant object key name", token),
4246 }
4247 }
4248
4249 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4250 let path = self.parse_json_path()?;
4251 Ok(Expr::JsonAccess {
4252 value: Box::new(expr),
4253 path,
4254 })
4255 }
4256
4257 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4258 let mut path = Vec::new();
4259 loop {
4260 match self.next_token().token {
4261 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4262 self.next_token();
4263 let key = self.parse_wildcard_expr()?;
4264 self.expect_token(&Token::RBracket)?;
4265 path.push(JsonPathElem::ColonBracket { key });
4266 }
4267 Token::Colon if path.is_empty() => {
4268 path.push(self.parse_json_path_object_key()?);
4269 }
4270 Token::Period if !path.is_empty() => {
4271 path.push(self.parse_json_path_object_key()?);
4272 }
4273 Token::LBracket => {
4274 let key = self.parse_wildcard_expr()?;
4275 self.expect_token(&Token::RBracket)?;
4276
4277 path.push(JsonPathElem::Bracket { key });
4278 }
4279 _ => {
4280 self.prev_token();
4281 break;
4282 }
4283 };
4284 }
4285
4286 debug_assert!(!path.is_empty());
4287 Ok(JsonPath { path })
4288 }
4289
4290 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4292 if self.parse_keyword(Keyword::UNNEST) {
4295 self.expect_token(&Token::LParen)?;
4296 let array_expr = self.parse_expr()?;
4297 self.expect_token(&Token::RParen)?;
4298 return Ok(Expr::InUnnest {
4299 expr: Box::new(expr),
4300 array_expr: Box::new(array_expr),
4301 negated,
4302 });
4303 }
4304 self.expect_token(&Token::LParen)?;
4305 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4306 Some(subquery) => Expr::InSubquery {
4307 expr: Box::new(expr),
4308 subquery,
4309 negated,
4310 },
4311 None => Expr::InList {
4312 expr: Box::new(expr),
4313 list: if self.dialect.supports_in_empty_list() {
4314 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4315 } else {
4316 self.parse_comma_separated(Parser::parse_expr)?
4317 },
4318 negated,
4319 },
4320 };
4321 self.expect_token(&Token::RParen)?;
4322 Ok(in_op)
4323 }
4324
4325 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4327 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4330 self.expect_keyword_is(Keyword::AND)?;
4331 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4332 Ok(Expr::Between {
4333 expr: Box::new(expr),
4334 negated,
4335 low: Box::new(low),
4336 high: Box::new(high),
4337 })
4338 }
4339
4340 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4342 Ok(Expr::Cast {
4343 kind: CastKind::DoubleColon,
4344 expr: Box::new(expr),
4345 data_type: self.parse_data_type()?,
4346 array: false,
4347 format: None,
4348 })
4349 }
4350
4351 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4353 self.dialect.get_next_precedence_default(self)
4354 }
4355
4356 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4359 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4360 }
4361
4362 pub fn peek_token(&self) -> TokenWithSpan {
4367 self.peek_nth_token(0)
4368 }
4369
4370 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4373 self.peek_nth_token_ref(0)
4374 }
4375
4376 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4399 self.peek_tokens_with_location()
4400 .map(|with_loc| with_loc.token)
4401 }
4402
4403 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4408 let mut index = self.index;
4409 core::array::from_fn(|_| loop {
4410 let token = self.tokens.get(index);
4411 index += 1;
4412 if let Some(TokenWithSpan {
4413 token: Token::Whitespace(_),
4414 span: _,
4415 }) = token
4416 {
4417 continue;
4418 }
4419 break token.cloned().unwrap_or(TokenWithSpan {
4420 token: Token::EOF,
4421 span: Span::empty(),
4422 });
4423 })
4424 }
4425
4426 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4431 let mut index = self.index;
4432 core::array::from_fn(|_| loop {
4433 let token = self.tokens.get(index);
4434 index += 1;
4435 if let Some(TokenWithSpan {
4436 token: Token::Whitespace(_),
4437 span: _,
4438 }) = token
4439 {
4440 continue;
4441 }
4442 break token.unwrap_or(&EOF_TOKEN);
4443 })
4444 }
4445
4446 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4448 self.peek_nth_token_ref(n).clone()
4449 }
4450
4451 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4453 let mut index = self.index;
4454 loop {
4455 index += 1;
4456 match self.tokens.get(index - 1) {
4457 Some(TokenWithSpan {
4458 token: Token::Whitespace(_),
4459 span: _,
4460 }) => continue,
4461 non_whitespace => {
4462 if n == 0 {
4463 return non_whitespace.unwrap_or(&EOF_TOKEN);
4464 }
4465 n -= 1;
4466 }
4467 }
4468 }
4469 }
4470
4471 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4474 self.peek_nth_token_no_skip(0)
4475 }
4476
4477 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4479 self.tokens
4480 .get(self.index + n)
4481 .cloned()
4482 .unwrap_or(TokenWithSpan {
4483 token: Token::EOF,
4484 span: Span::empty(),
4485 })
4486 }
4487
4488 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4490 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4491 }
4492
4493 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4497 let index = self.index;
4498 let matched = self.parse_keywords(expected);
4499 self.index = index;
4500 matched
4501 }
4502
4503 pub fn next_token(&mut self) -> TokenWithSpan {
4508 self.advance_token();
4509 self.get_current_token().clone()
4510 }
4511
4512 pub fn get_current_index(&self) -> usize {
4517 self.index.saturating_sub(1)
4518 }
4519
4520 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4522 self.index += 1;
4523 self.tokens.get(self.index - 1)
4524 }
4525
4526 pub fn advance_token(&mut self) {
4530 loop {
4531 self.index += 1;
4532 match self.tokens.get(self.index - 1) {
4533 Some(TokenWithSpan {
4534 token: Token::Whitespace(_),
4535 span: _,
4536 }) => continue,
4537 _ => break,
4538 }
4539 }
4540 }
4541
4542 pub fn get_current_token(&self) -> &TokenWithSpan {
4546 self.token_at(self.index.saturating_sub(1))
4547 }
4548
4549 pub fn get_previous_token(&self) -> &TokenWithSpan {
4553 self.token_at(self.index.saturating_sub(2))
4554 }
4555
4556 pub fn get_next_token(&self) -> &TokenWithSpan {
4560 self.token_at(self.index)
4561 }
4562
4563 pub fn prev_token(&mut self) {
4570 loop {
4571 assert!(self.index > 0);
4572 self.index -= 1;
4573 if let Some(TokenWithSpan {
4574 token: Token::Whitespace(_),
4575 span: _,
4576 }) = self.tokens.get(self.index)
4577 {
4578 continue;
4579 }
4580 return;
4581 }
4582 }
4583
4584 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4586 parser_err!(
4587 format!("Expected: {expected}, found: {found}"),
4588 found.span.start
4589 )
4590 }
4591
4592 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4594 parser_err!(
4595 format!("Expected: {expected}, found: {found}"),
4596 found.span.start
4597 )
4598 }
4599
4600 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4602 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4603 parser_err!(
4604 format!("Expected: {expected}, found: {found}"),
4605 found.span.start
4606 )
4607 }
4608
4609 #[must_use]
4612 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4613 if self.peek_keyword(expected) {
4614 self.advance_token();
4615 true
4616 } else {
4617 false
4618 }
4619 }
4620
4621 #[must_use]
4622 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4626 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4627 }
4628
4629 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4637 self.keyword_with_tokens(expected, tokens, true)
4638 }
4639
4640 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4645 self.keyword_with_tokens(expected, tokens, false)
4646 }
4647
4648 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4649 match &self.peek_token_ref().token {
4650 Token::Word(w) if expected == w.keyword => {
4651 for (idx, token) in tokens.iter().enumerate() {
4652 if self.peek_nth_token_ref(idx + 1).token != *token {
4653 return false;
4654 }
4655 }
4656
4657 if consume {
4658 for _ in 0..(tokens.len() + 1) {
4659 self.advance_token();
4660 }
4661 }
4662
4663 true
4664 }
4665 _ => false,
4666 }
4667 }
4668
4669 #[must_use]
4673 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4674 self.parse_keywords_indexed(keywords).is_some()
4675 }
4676
4677 #[must_use]
4680 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4681 let start_index = self.index;
4682 let mut first_keyword_index = None;
4683 for &keyword in keywords {
4684 if !self.parse_keyword(keyword) {
4685 self.index = start_index;
4686 return None;
4687 }
4688 if first_keyword_index.is_none() {
4689 first_keyword_index = Some(self.index.saturating_sub(1));
4690 }
4691 }
4692 first_keyword_index
4693 }
4694
4695 #[must_use]
4698 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4699 for keyword in keywords {
4700 if self.peek_keyword(*keyword) {
4701 return Some(*keyword);
4702 }
4703 }
4704 None
4705 }
4706
4707 #[must_use]
4711 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4712 match &self.peek_token_ref().token {
4713 Token::Word(w) => {
4714 keywords
4715 .iter()
4716 .find(|keyword| **keyword == w.keyword)
4717 .map(|keyword| {
4718 self.advance_token();
4719 *keyword
4720 })
4721 }
4722 _ => None,
4723 }
4724 }
4725
4726 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4729 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4730 Ok(keyword)
4731 } else {
4732 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4733 self.expected_ref(
4734 &format!("one of {}", keywords.join(" or ")),
4735 self.peek_token_ref(),
4736 )
4737 }
4738 }
4739
4740 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4745 if self.parse_keyword(expected) {
4746 Ok(self.get_current_token().clone())
4747 } else {
4748 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4749 }
4750 }
4751
4752 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4758 if self.parse_keyword(expected) {
4759 Ok(())
4760 } else {
4761 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4762 }
4763 }
4764
4765 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4768 for &kw in expected {
4769 self.expect_keyword_is(kw)?;
4770 }
4771 Ok(())
4772 }
4773
4774 #[must_use]
4778 pub fn consume_token(&mut self, expected: &Token) -> bool {
4779 if self.peek_token_ref() == expected {
4780 self.advance_token();
4781 true
4782 } else {
4783 false
4784 }
4785 }
4786
4787 #[must_use]
4791 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4792 let index = self.index;
4793 for token in tokens {
4794 if !self.consume_token(token) {
4795 self.index = index;
4796 return false;
4797 }
4798 }
4799 true
4800 }
4801
4802 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4804 if self.peek_token_ref() == expected {
4805 Ok(self.next_token())
4806 } else {
4807 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4808 }
4809 }
4810
4811 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4812 where
4813 <T as FromStr>::Err: Display,
4814 {
4815 s.parse::<T>().map_err(|e| {
4816 ParserError::ParserError(format!(
4817 "Could not parse '{s}' as {}: {e}{loc}",
4818 core::any::type_name::<T>()
4819 ))
4820 })
4821 }
4822
4823 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4825 let trailing_commas =
4831 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4832
4833 self.parse_comma_separated_with_trailing_commas(
4834 |p| p.parse_select_item(),
4835 trailing_commas,
4836 Self::is_reserved_for_column_alias,
4837 )
4838 }
4839
4840 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4842 let mut values = vec![];
4843 loop {
4844 values.push(self.parse_grant_permission()?);
4845 if !self.consume_token(&Token::Comma) {
4846 break;
4847 } else if self.options.trailing_commas {
4848 match &self.peek_token_ref().token {
4849 Token::Word(kw) if kw.keyword == Keyword::ON => {
4850 break;
4851 }
4852 Token::RParen
4853 | Token::SemiColon
4854 | Token::EOF
4855 | Token::RBracket
4856 | Token::RBrace => break,
4857 _ => continue,
4858 }
4859 }
4860 }
4861 Ok(values)
4862 }
4863
4864 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4866 let trailing_commas = self.dialect.supports_from_trailing_commas();
4867
4868 self.parse_comma_separated_with_trailing_commas(
4869 Parser::parse_table_and_joins,
4870 trailing_commas,
4871 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4872 )
4873 }
4874
4875 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4882 &mut self,
4883 trailing_commas: bool,
4884 is_reserved_keyword: &R,
4885 ) -> bool
4886 where
4887 R: Fn(&Keyword, &mut Parser) -> bool,
4888 {
4889 if !self.consume_token(&Token::Comma) {
4890 true
4891 } else if trailing_commas {
4892 let token = self.next_token().token;
4893 let is_end = match token {
4894 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4895 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4896 true
4897 }
4898 _ => false,
4899 };
4900 self.prev_token();
4901
4902 is_end
4903 } else {
4904 false
4905 }
4906 }
4907
4908 fn is_parse_comma_separated_end(&mut self) -> bool {
4911 self.is_parse_comma_separated_end_with_trailing_commas(
4912 self.options.trailing_commas,
4913 &Self::is_reserved_for_column_alias,
4914 )
4915 }
4916
4917 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4919 where
4920 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4921 {
4922 self.parse_comma_separated_with_trailing_commas(
4923 f,
4924 self.options.trailing_commas,
4925 Self::is_reserved_for_column_alias,
4926 )
4927 }
4928
4929 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4934 &mut self,
4935 mut f: F,
4936 trailing_commas: bool,
4937 is_reserved_keyword: R,
4938 ) -> Result<Vec<T>, ParserError>
4939 where
4940 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4941 R: Fn(&Keyword, &mut Parser) -> bool,
4942 {
4943 let mut values = vec![];
4944 loop {
4945 values.push(f(self)?);
4946 if self.is_parse_comma_separated_end_with_trailing_commas(
4947 trailing_commas,
4948 &is_reserved_keyword,
4949 ) {
4950 break;
4951 }
4952 }
4953 Ok(values)
4954 }
4955
4956 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4958 where
4959 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4960 {
4961 let mut values = vec![];
4962 loop {
4963 values.push(f(self)?);
4964 if !self.consume_token(&Token::Period) {
4965 break;
4966 }
4967 }
4968 Ok(values)
4969 }
4970
4971 pub fn parse_keyword_separated<T, F>(
4973 &mut self,
4974 keyword: Keyword,
4975 mut f: F,
4976 ) -> Result<Vec<T>, ParserError>
4977 where
4978 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4979 {
4980 let mut values = vec![];
4981 loop {
4982 values.push(f(self)?);
4983 if !self.parse_keyword(keyword) {
4984 break;
4985 }
4986 }
4987 Ok(values)
4988 }
4989
4990 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4992 where
4993 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4994 {
4995 self.expect_token(&Token::LParen)?;
4996 let res = f(self)?;
4997 self.expect_token(&Token::RParen)?;
4998 Ok(res)
4999 }
5000
5001 pub fn parse_comma_separated0<T, F>(
5004 &mut self,
5005 f: F,
5006 end_token: Token,
5007 ) -> Result<Vec<T>, ParserError>
5008 where
5009 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5010 {
5011 if self.peek_token_ref().token == end_token {
5012 return Ok(vec![]);
5013 }
5014
5015 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5016 let _ = self.consume_token(&Token::Comma);
5017 return Ok(vec![]);
5018 }
5019
5020 self.parse_comma_separated(f)
5021 }
5022
5023 pub(crate) fn parse_statement_list(
5027 &mut self,
5028 terminal_keywords: &[Keyword],
5029 ) -> Result<Vec<Statement>, ParserError> {
5030 let mut values = vec![];
5031 loop {
5032 match &self.peek_nth_token_ref(0).token {
5033 Token::EOF => break,
5034 Token::Word(w) => {
5035 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5036 break;
5037 }
5038 }
5039 _ => {}
5040 }
5041
5042 values.push(self.parse_statement()?);
5043 self.expect_token(&Token::SemiColon)?;
5044 }
5045 Ok(values)
5046 }
5047
5048 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5052 !parser.dialect.is_column_alias(kw, parser)
5053 }
5054
5055 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5059 where
5060 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5061 {
5062 match self.try_parse(f) {
5063 Ok(t) => Ok(Some(t)),
5064 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5065 _ => Ok(None),
5066 }
5067 }
5068
5069 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5071 where
5072 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5073 {
5074 let index = self.index;
5075 match f(self) {
5076 Ok(t) => Ok(t),
5077 Err(e) => {
5078 self.index = index;
5080 Err(e)
5081 }
5082 }
5083 }
5084
5085 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5088 let loc = self.peek_token_ref().span.start;
5089 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5090 Some(Keyword::ALL) => {
5091 if self.peek_keyword(Keyword::DISTINCT) {
5092 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5093 }
5094 Some(Distinct::All)
5095 }
5096 Some(Keyword::DISTINCT) => {
5097 if self.peek_keyword(Keyword::ALL) {
5098 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5099 }
5100 Some(Distinct::Distinct)
5101 }
5102 None => return Ok(None),
5103 _ => return parser_err!("ALL or DISTINCT", loc),
5104 };
5105
5106 let Some(Distinct::Distinct) = distinct else {
5107 return Ok(distinct);
5108 };
5109 if !self.parse_keyword(Keyword::ON) {
5110 return Ok(Some(Distinct::Distinct));
5111 }
5112
5113 self.expect_token(&Token::LParen)?;
5114 let col_names = if self.consume_token(&Token::RParen) {
5115 self.prev_token();
5116 Vec::new()
5117 } else {
5118 self.parse_comma_separated(Parser::parse_expr)?
5119 };
5120 self.expect_token(&Token::RParen)?;
5121 Ok(Some(Distinct::On(col_names)))
5122 }
5123
5124 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5126 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5127 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5128 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5129 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5130 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5131 let global: Option<bool> = if global {
5132 Some(true)
5133 } else if local {
5134 Some(false)
5135 } else {
5136 None
5137 };
5138 let temporary = self
5139 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5140 .is_some();
5141 let persistent = dialect_of!(self is DuckDbDialect)
5142 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5143 let create_view_params = self.parse_create_view_params()?;
5144 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5145 self.parse_create_snapshot_table().map(Into::into)
5146 } else if self.parse_keyword(Keyword::TABLE) {
5147 self.parse_create_table(or_replace, temporary, global, transient)
5148 .map(Into::into)
5149 } else if self.peek_keyword(Keyword::MATERIALIZED)
5150 || self.peek_keyword(Keyword::VIEW)
5151 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5152 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5153 {
5154 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5155 .map(Into::into)
5156 } else if self.parse_keyword(Keyword::POLICY) {
5157 self.parse_create_policy().map(Into::into)
5158 } else if self.parse_keyword(Keyword::EXTERNAL) {
5159 self.parse_create_external_table(or_replace).map(Into::into)
5160 } else if self.parse_keyword(Keyword::FUNCTION) {
5161 self.parse_create_function(or_alter, or_replace, temporary)
5162 } else if self.parse_keyword(Keyword::DOMAIN) {
5163 self.parse_create_domain().map(Into::into)
5164 } else if self.parse_keyword(Keyword::TRIGGER) {
5165 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5166 .map(Into::into)
5167 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5168 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5169 .map(Into::into)
5170 } else if self.parse_keyword(Keyword::MACRO) {
5171 self.parse_create_macro(or_replace, temporary)
5172 } else if self.parse_keyword(Keyword::SECRET) {
5173 self.parse_create_secret(or_replace, temporary, persistent)
5174 } else if self.parse_keyword(Keyword::USER) {
5175 self.parse_create_user(or_replace).map(Into::into)
5176 } else if self.parse_keyword(Keyword::AGGREGATE) {
5177 self.parse_create_aggregate(or_replace).map(Into::into)
5178 } else if or_replace {
5179 self.expected_ref(
5180 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5181 self.peek_token_ref(),
5182 )
5183 } else if self.parse_keyword(Keyword::EXTENSION) {
5184 self.parse_create_extension().map(Into::into)
5185 } else if self.parse_keyword(Keyword::INDEX) {
5186 self.parse_create_index(false).map(Into::into)
5187 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5188 self.parse_create_index(true).map(Into::into)
5189 } else if self.parse_keyword(Keyword::VIRTUAL) {
5190 self.parse_create_virtual_table()
5191 } else if self.parse_keyword(Keyword::SCHEMA) {
5192 self.parse_create_schema()
5193 } else if self.parse_keyword(Keyword::DATABASE) {
5194 self.parse_create_database()
5195 } else if self.parse_keyword(Keyword::ROLE) {
5196 self.parse_create_role().map(Into::into)
5197 } else if self.parse_keyword(Keyword::SEQUENCE) {
5198 self.parse_create_sequence(temporary)
5199 } else if self.parse_keyword(Keyword::COLLATION) {
5200 self.parse_create_collation().map(Into::into)
5201 } else if self.parse_keyword(Keyword::TYPE) {
5202 self.parse_create_type()
5203 } else if self.parse_keyword(Keyword::PROCEDURE) {
5204 self.parse_create_procedure(or_alter)
5205 } else if self.parse_keyword(Keyword::CONNECTOR) {
5206 self.parse_create_connector().map(Into::into)
5207 } else if self.parse_keyword(Keyword::OPERATOR) {
5208 if self.parse_keyword(Keyword::FAMILY) {
5210 self.parse_create_operator_family().map(Into::into)
5211 } else if self.parse_keyword(Keyword::CLASS) {
5212 self.parse_create_operator_class().map(Into::into)
5213 } else {
5214 self.parse_create_operator().map(Into::into)
5215 }
5216 } else if self.parse_keyword(Keyword::SERVER) {
5217 self.parse_pg_create_server()
5218 } else if self.parse_keyword(Keyword::FOREIGN) {
5219 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5220 self.parse_create_foreign_data_wrapper().map(Into::into)
5221 } else if self.parse_keyword(Keyword::TABLE) {
5222 self.parse_create_foreign_table().map(Into::into)
5223 } else {
5224 self.expected_ref(
5225 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5226 self.peek_token_ref(),
5227 )
5228 }
5229 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5230 self.parse_create_text_search()
5231 } else {
5232 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5233 }
5234 }
5235
5236 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5237 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5238 let name = self.parse_identifier()?;
5239 let options = self
5240 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5241 .options;
5242 let with_tags = self.parse_keyword(Keyword::WITH);
5243 let tags = if self.parse_keyword(Keyword::TAG) {
5244 self.parse_key_value_options(true, &[])?.options
5245 } else {
5246 vec![]
5247 };
5248 Ok(CreateUser {
5249 or_replace,
5250 if_not_exists,
5251 name,
5252 options: KeyValueOptions {
5253 options,
5254 delimiter: KeyValueOptionsDelimiter::Space,
5255 },
5256 with_tags,
5257 tags: KeyValueOptions {
5258 options: tags,
5259 delimiter: KeyValueOptionsDelimiter::Comma,
5260 },
5261 })
5262 }
5263
5264 pub fn parse_create_secret(
5266 &mut self,
5267 or_replace: bool,
5268 temporary: bool,
5269 persistent: bool,
5270 ) -> Result<Statement, ParserError> {
5271 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5272
5273 let mut storage_specifier = None;
5274 let mut name = None;
5275 if self.peek_token_ref().token != Token::LParen {
5276 if self.parse_keyword(Keyword::IN) {
5277 storage_specifier = self.parse_identifier().ok()
5278 } else {
5279 name = self.parse_identifier().ok();
5280 }
5281
5282 if storage_specifier.is_none()
5284 && self.peek_token_ref().token != Token::LParen
5285 && self.parse_keyword(Keyword::IN)
5286 {
5287 storage_specifier = self.parse_identifier().ok();
5288 }
5289 }
5290
5291 self.expect_token(&Token::LParen)?;
5292 self.expect_keyword_is(Keyword::TYPE)?;
5293 let secret_type = self.parse_identifier()?;
5294
5295 let mut options = Vec::new();
5296 if self.consume_token(&Token::Comma) {
5297 options.append(&mut self.parse_comma_separated(|p| {
5298 let key = p.parse_identifier()?;
5299 let value = p.parse_identifier()?;
5300 Ok(SecretOption { key, value })
5301 })?);
5302 }
5303 self.expect_token(&Token::RParen)?;
5304
5305 let temp = match (temporary, persistent) {
5306 (true, false) => Some(true),
5307 (false, true) => Some(false),
5308 (false, false) => None,
5309 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5310 };
5311
5312 Ok(Statement::CreateSecret {
5313 or_replace,
5314 temporary: temp,
5315 if_not_exists,
5316 name,
5317 storage_specifier,
5318 secret_type,
5319 options,
5320 })
5321 }
5322
5323 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5325 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5326 if self.parse_keyword(Keyword::TABLE) {
5327 let table_name = self.parse_object_name(false)?;
5328 if self.peek_token_ref().token != Token::EOF {
5329 if let Token::Word(word) = &self.peek_token_ref().token {
5330 if word.keyword == Keyword::OPTIONS {
5331 options = self.parse_options(Keyword::OPTIONS)?
5332 }
5333 };
5334
5335 if self.peek_token_ref().token != Token::EOF {
5336 let (a, q) = self.parse_as_query()?;
5337 has_as = a;
5338 query = Some(q);
5339 }
5340
5341 Ok(Statement::Cache {
5342 table_flag,
5343 table_name,
5344 has_as,
5345 options,
5346 query,
5347 })
5348 } else {
5349 Ok(Statement::Cache {
5350 table_flag,
5351 table_name,
5352 has_as,
5353 options,
5354 query,
5355 })
5356 }
5357 } else {
5358 table_flag = Some(self.parse_object_name(false)?);
5359 if self.parse_keyword(Keyword::TABLE) {
5360 let table_name = self.parse_object_name(false)?;
5361 if self.peek_token_ref().token != Token::EOF {
5362 if let Token::Word(word) = &self.peek_token_ref().token {
5363 if word.keyword == Keyword::OPTIONS {
5364 options = self.parse_options(Keyword::OPTIONS)?
5365 }
5366 };
5367
5368 if self.peek_token_ref().token != Token::EOF {
5369 let (a, q) = self.parse_as_query()?;
5370 has_as = a;
5371 query = Some(q);
5372 }
5373
5374 Ok(Statement::Cache {
5375 table_flag,
5376 table_name,
5377 has_as,
5378 options,
5379 query,
5380 })
5381 } else {
5382 Ok(Statement::Cache {
5383 table_flag,
5384 table_name,
5385 has_as,
5386 options,
5387 query,
5388 })
5389 }
5390 } else {
5391 if self.peek_token_ref().token == Token::EOF {
5392 self.prev_token();
5393 }
5394 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5395 }
5396 }
5397 }
5398
5399 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5401 match &self.peek_token_ref().token {
5402 Token::Word(word) => match word.keyword {
5403 Keyword::AS => {
5404 self.next_token();
5405 Ok((true, self.parse_query()?))
5406 }
5407 _ => Ok((false, self.parse_query()?)),
5408 },
5409 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5410 }
5411 }
5412
5413 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5415 self.expect_keyword_is(Keyword::TABLE)?;
5416 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5417 let table_name = self.parse_object_name(false)?;
5418 Ok(Statement::UNCache {
5419 table_name,
5420 if_exists,
5421 })
5422 }
5423
5424 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5426 self.expect_keyword_is(Keyword::TABLE)?;
5427 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5428 let table_name = self.parse_object_name(false)?;
5429 self.expect_keyword_is(Keyword::USING)?;
5430 let module_name = self.parse_identifier()?;
5431 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5436 Ok(Statement::CreateVirtualTable {
5437 name: table_name,
5438 if_not_exists,
5439 module_name,
5440 module_args,
5441 })
5442 }
5443
5444 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5446 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5447
5448 let schema_name = self.parse_schema_name()?;
5449
5450 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5451 Some(self.parse_expr()?)
5452 } else {
5453 None
5454 };
5455
5456 let with = if self.peek_keyword(Keyword::WITH) {
5457 Some(self.parse_options(Keyword::WITH)?)
5458 } else {
5459 None
5460 };
5461
5462 let options = if self.peek_keyword(Keyword::OPTIONS) {
5463 Some(self.parse_options(Keyword::OPTIONS)?)
5464 } else {
5465 None
5466 };
5467
5468 let clone = if self.parse_keyword(Keyword::CLONE) {
5469 Some(self.parse_object_name(false)?)
5470 } else {
5471 None
5472 };
5473
5474 Ok(Statement::CreateSchema {
5475 schema_name,
5476 if_not_exists,
5477 with,
5478 options,
5479 default_collate_spec,
5480 clone,
5481 })
5482 }
5483
5484 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5485 if self.parse_keyword(Keyword::AUTHORIZATION) {
5486 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5487 } else {
5488 let name = self.parse_object_name(false)?;
5489
5490 if self.parse_keyword(Keyword::AUTHORIZATION) {
5491 Ok(SchemaName::NamedAuthorization(
5492 name,
5493 self.parse_identifier()?,
5494 ))
5495 } else {
5496 Ok(SchemaName::Simple(name))
5497 }
5498 }
5499 }
5500
5501 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5503 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5504 let db_name = self.parse_object_name(false)?;
5505 let mut location = None;
5506 let mut managed_location = None;
5507 loop {
5508 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5509 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5510 Some(Keyword::MANAGEDLOCATION) => {
5511 managed_location = Some(self.parse_literal_string()?)
5512 }
5513 _ => break,
5514 }
5515 }
5516 let clone = if self.parse_keyword(Keyword::CLONE) {
5517 Some(self.parse_object_name(false)?)
5518 } else {
5519 None
5520 };
5521
5522 let mut default_charset = None;
5530 let mut default_collation = None;
5531 loop {
5532 let has_default = self.parse_keyword(Keyword::DEFAULT);
5533 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5534 || self.parse_keyword(Keyword::CHARSET)
5535 {
5536 let _ = self.consume_token(&Token::Eq);
5537 default_charset = Some(self.parse_identifier()?.value);
5538 } else if self.parse_keyword(Keyword::COLLATE) {
5539 let _ = self.consume_token(&Token::Eq);
5540 default_collation = Some(self.parse_identifier()?.value);
5541 } else if has_default {
5542 self.prev_token();
5544 break;
5545 } else {
5546 break;
5547 }
5548 }
5549
5550 Ok(Statement::CreateDatabase {
5551 db_name,
5552 if_not_exists: ine,
5553 location,
5554 managed_location,
5555 or_replace: false,
5556 transient: false,
5557 clone,
5558 data_retention_time_in_days: None,
5559 max_data_extension_time_in_days: None,
5560 external_volume: None,
5561 catalog: None,
5562 replace_invalid_characters: None,
5563 default_ddl_collation: None,
5564 storage_serialization_policy: None,
5565 comment: None,
5566 default_charset,
5567 default_collation,
5568 catalog_sync: None,
5569 catalog_sync_namespace_mode: None,
5570 catalog_sync_namespace_flatten_delimiter: None,
5571 with_tags: None,
5572 with_contacts: None,
5573 })
5574 }
5575
5576 pub fn parse_optional_create_function_using(
5578 &mut self,
5579 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5580 if !self.parse_keyword(Keyword::USING) {
5581 return Ok(None);
5582 };
5583 let keyword =
5584 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5585
5586 let uri = self.parse_literal_string()?;
5587
5588 match keyword {
5589 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5590 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5591 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5592 _ => self.expected(
5593 "JAR, FILE or ARCHIVE, got {:?}",
5594 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5595 ),
5596 }
5597 }
5598
5599 pub fn parse_create_function(
5601 &mut self,
5602 or_alter: bool,
5603 or_replace: bool,
5604 temporary: bool,
5605 ) -> Result<Statement, ParserError> {
5606 if dialect_of!(self is HiveDialect) {
5607 self.parse_hive_create_function(or_replace, temporary)
5608 .map(Into::into)
5609 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5610 self.parse_postgres_create_function(or_replace, temporary)
5611 .map(Into::into)
5612 } else if dialect_of!(self is DuckDbDialect) {
5613 self.parse_create_macro(or_replace, temporary)
5614 } else if dialect_of!(self is BigQueryDialect) {
5615 self.parse_bigquery_create_function(or_replace, temporary)
5616 .map(Into::into)
5617 } else if dialect_of!(self is MsSqlDialect) {
5618 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5619 .map(Into::into)
5620 } else {
5621 self.prev_token();
5622 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5623 }
5624 }
5625
5626 fn parse_postgres_create_function(
5630 &mut self,
5631 or_replace: bool,
5632 temporary: bool,
5633 ) -> Result<CreateFunction, ParserError> {
5634 let name = self.parse_object_name(false)?;
5635
5636 self.expect_token(&Token::LParen)?;
5637 let args = if Token::RParen != self.peek_token_ref().token {
5638 self.parse_comma_separated(Parser::parse_function_arg)?
5639 } else {
5640 vec![]
5641 };
5642 self.expect_token(&Token::RParen)?;
5643
5644 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5645 Some(self.parse_function_return_type()?)
5646 } else {
5647 None
5648 };
5649
5650 #[derive(Default)]
5651 struct Body {
5652 language: Option<Ident>,
5653 behavior: Option<FunctionBehavior>,
5654 function_body: Option<CreateFunctionBody>,
5655 called_on_null: Option<FunctionCalledOnNull>,
5656 parallel: Option<FunctionParallel>,
5657 security: Option<FunctionSecurity>,
5658 }
5659 let mut body = Body::default();
5660 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5661 loop {
5662 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5663 if field.is_some() {
5664 return Err(ParserError::ParserError(format!(
5665 "{name} specified more than once",
5666 )));
5667 }
5668 Ok(())
5669 }
5670 if self.parse_keyword(Keyword::AS) {
5671 ensure_not_set(&body.function_body, "AS")?;
5672 body.function_body = Some(self.parse_create_function_body_string()?);
5673 } else if self.parse_keyword(Keyword::LANGUAGE) {
5674 ensure_not_set(&body.language, "LANGUAGE")?;
5675 body.language = Some(self.parse_identifier()?);
5676 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5677 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5678 body.behavior = Some(FunctionBehavior::Immutable);
5679 } else if self.parse_keyword(Keyword::STABLE) {
5680 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5681 body.behavior = Some(FunctionBehavior::Stable);
5682 } else if self.parse_keyword(Keyword::VOLATILE) {
5683 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5684 body.behavior = Some(FunctionBehavior::Volatile);
5685 } else if self.parse_keywords(&[
5686 Keyword::CALLED,
5687 Keyword::ON,
5688 Keyword::NULL,
5689 Keyword::INPUT,
5690 ]) {
5691 ensure_not_set(
5692 &body.called_on_null,
5693 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5694 )?;
5695 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5696 } else if self.parse_keywords(&[
5697 Keyword::RETURNS,
5698 Keyword::NULL,
5699 Keyword::ON,
5700 Keyword::NULL,
5701 Keyword::INPUT,
5702 ]) {
5703 ensure_not_set(
5704 &body.called_on_null,
5705 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5706 )?;
5707 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5708 } else if self.parse_keyword(Keyword::STRICT) {
5709 ensure_not_set(
5710 &body.called_on_null,
5711 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5712 )?;
5713 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5714 } else if self.parse_keyword(Keyword::PARALLEL) {
5715 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5716 if self.parse_keyword(Keyword::UNSAFE) {
5717 body.parallel = Some(FunctionParallel::Unsafe);
5718 } else if self.parse_keyword(Keyword::RESTRICTED) {
5719 body.parallel = Some(FunctionParallel::Restricted);
5720 } else if self.parse_keyword(Keyword::SAFE) {
5721 body.parallel = Some(FunctionParallel::Safe);
5722 } else {
5723 return self
5724 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5725 }
5726 } else if self.parse_keyword(Keyword::SECURITY) {
5727 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5728 if self.parse_keyword(Keyword::DEFINER) {
5729 body.security = Some(FunctionSecurity::Definer);
5730 } else if self.parse_keyword(Keyword::INVOKER) {
5731 body.security = Some(FunctionSecurity::Invoker);
5732 } else {
5733 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5734 }
5735 } else if self.parse_keyword(Keyword::SET) {
5736 let name = self.parse_object_name(false)?;
5737 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5738 FunctionSetValue::FromCurrent
5739 } else {
5740 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5741 return self.expected_ref("= or TO", self.peek_token_ref());
5742 }
5743 if self.parse_keyword(Keyword::DEFAULT) {
5744 FunctionSetValue::Default
5745 } else {
5746 let values = self.parse_comma_separated(Parser::parse_expr)?;
5747 FunctionSetValue::Values(values)
5748 }
5749 };
5750 set_params.push(FunctionDefinitionSetParam { name, value });
5751 } else if self.parse_keyword(Keyword::RETURN) {
5752 ensure_not_set(&body.function_body, "RETURN")?;
5753 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5754 } else {
5755 break;
5756 }
5757 }
5758
5759 Ok(CreateFunction {
5760 or_alter: false,
5761 or_replace,
5762 temporary,
5763 name,
5764 args: Some(args),
5765 return_type,
5766 behavior: body.behavior,
5767 called_on_null: body.called_on_null,
5768 parallel: body.parallel,
5769 security: body.security,
5770 set_params,
5771 language: body.language,
5772 function_body: body.function_body,
5773 if_not_exists: false,
5774 using: None,
5775 determinism_specifier: None,
5776 options: None,
5777 remote_connection: None,
5778 })
5779 }
5780
5781 fn parse_hive_create_function(
5785 &mut self,
5786 or_replace: bool,
5787 temporary: bool,
5788 ) -> Result<CreateFunction, ParserError> {
5789 let name = self.parse_object_name(false)?;
5790 self.expect_keyword_is(Keyword::AS)?;
5791
5792 let body = self.parse_create_function_body_string()?;
5793 let using = self.parse_optional_create_function_using()?;
5794
5795 Ok(CreateFunction {
5796 or_alter: false,
5797 or_replace,
5798 temporary,
5799 name,
5800 function_body: Some(body),
5801 using,
5802 if_not_exists: false,
5803 args: None,
5804 return_type: None,
5805 behavior: None,
5806 called_on_null: None,
5807 parallel: None,
5808 security: None,
5809 set_params: vec![],
5810 language: None,
5811 determinism_specifier: None,
5812 options: None,
5813 remote_connection: None,
5814 })
5815 }
5816
5817 fn parse_bigquery_create_function(
5821 &mut self,
5822 or_replace: bool,
5823 temporary: bool,
5824 ) -> Result<CreateFunction, ParserError> {
5825 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5826 let (name, args) = self.parse_create_function_name_and_params()?;
5827
5828 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5829 Some(self.parse_function_return_type()?)
5830 } else {
5831 None
5832 };
5833
5834 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5835 Some(FunctionDeterminismSpecifier::Deterministic)
5836 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5837 Some(FunctionDeterminismSpecifier::NotDeterministic)
5838 } else {
5839 None
5840 };
5841
5842 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5843 Some(self.parse_identifier()?)
5844 } else {
5845 None
5846 };
5847
5848 let remote_connection =
5849 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5850 Some(self.parse_object_name(false)?)
5851 } else {
5852 None
5853 };
5854
5855 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5858
5859 let function_body = if remote_connection.is_none() {
5860 self.expect_keyword_is(Keyword::AS)?;
5861 let expr = self.parse_expr()?;
5862 if options.is_none() {
5863 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5864 Some(CreateFunctionBody::AsBeforeOptions {
5865 body: expr,
5866 link_symbol: None,
5867 })
5868 } else {
5869 Some(CreateFunctionBody::AsAfterOptions(expr))
5870 }
5871 } else {
5872 None
5873 };
5874
5875 Ok(CreateFunction {
5876 or_alter: false,
5877 or_replace,
5878 temporary,
5879 if_not_exists,
5880 name,
5881 args: Some(args),
5882 return_type,
5883 function_body,
5884 language,
5885 determinism_specifier,
5886 options,
5887 remote_connection,
5888 using: None,
5889 behavior: None,
5890 called_on_null: None,
5891 parallel: None,
5892 security: None,
5893 set_params: vec![],
5894 })
5895 }
5896
5897 fn parse_mssql_create_function(
5901 &mut self,
5902 or_alter: bool,
5903 or_replace: bool,
5904 temporary: bool,
5905 ) -> Result<CreateFunction, ParserError> {
5906 let (name, args) = self.parse_create_function_name_and_params()?;
5907
5908 self.expect_keyword(Keyword::RETURNS)?;
5909
5910 let return_table = self.maybe_parse(|p| {
5911 let return_table_name = p.parse_identifier()?;
5912
5913 p.expect_keyword_is(Keyword::TABLE)?;
5914 p.prev_token();
5915
5916 let table_column_defs = match p.parse_data_type()? {
5917 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5918 table_column_defs
5919 }
5920 _ => parser_err!(
5921 "Expected table column definitions after TABLE keyword",
5922 p.peek_token_ref().span.start
5923 )?,
5924 };
5925
5926 Ok(DataType::NamedTable {
5927 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5928 columns: table_column_defs,
5929 })
5930 })?;
5931
5932 let data_type = match return_table {
5933 Some(table_type) => table_type,
5934 None => self.parse_data_type()?,
5935 };
5936 let return_type = Some(FunctionReturnType::DataType(data_type));
5937
5938 let _ = self.parse_keyword(Keyword::AS);
5939
5940 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5941 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5942 let statements = self.parse_statement_list(&[Keyword::END])?;
5943 let end_token = self.expect_keyword(Keyword::END)?;
5944
5945 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5946 begin_token: AttachedToken(begin_token),
5947 statements,
5948 end_token: AttachedToken(end_token),
5949 }))
5950 } else if self.parse_keyword(Keyword::RETURN) {
5951 if self.peek_token_ref().token == Token::LParen {
5952 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5953 } else if self.peek_keyword(Keyword::SELECT) {
5954 let select = self.parse_select()?;
5955 Some(CreateFunctionBody::AsReturnSelect(select))
5956 } else {
5957 parser_err!(
5958 "Expected a subquery (or bare SELECT statement) after RETURN",
5959 self.peek_token_ref().span.start
5960 )?
5961 }
5962 } else {
5963 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
5964 };
5965
5966 Ok(CreateFunction {
5967 or_alter,
5968 or_replace,
5969 temporary,
5970 if_not_exists: false,
5971 name,
5972 args: Some(args),
5973 return_type,
5974 function_body,
5975 language: None,
5976 determinism_specifier: None,
5977 options: None,
5978 remote_connection: None,
5979 using: None,
5980 behavior: None,
5981 called_on_null: None,
5982 parallel: None,
5983 security: None,
5984 set_params: vec![],
5985 })
5986 }
5987
5988 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
5989 if self.parse_keyword(Keyword::SETOF) {
5990 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
5991 } else {
5992 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
5993 }
5994 }
5995
5996 fn parse_create_function_name_and_params(
5997 &mut self,
5998 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5999 let name = self.parse_object_name(false)?;
6000 let parse_function_param =
6001 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6002 let name = parser.parse_identifier()?;
6003 let data_type = parser.parse_data_type()?;
6004 let default_expr = if parser.consume_token(&Token::Eq) {
6005 Some(parser.parse_expr()?)
6006 } else {
6007 None
6008 };
6009
6010 Ok(OperateFunctionArg {
6011 mode: None,
6012 name: Some(name),
6013 data_type,
6014 default_expr,
6015 })
6016 };
6017 self.expect_token(&Token::LParen)?;
6018 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6019 self.expect_token(&Token::RParen)?;
6020 Ok((name, args))
6021 }
6022
6023 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6024 let mode = if self.parse_keyword(Keyword::IN) {
6025 Some(ArgMode::In)
6026 } else if self.parse_keyword(Keyword::OUT) {
6027 Some(ArgMode::Out)
6028 } else if self.parse_keyword(Keyword::INOUT) {
6029 Some(ArgMode::InOut)
6030 } else if self.parse_keyword(Keyword::VARIADIC) {
6031 Some(ArgMode::Variadic)
6032 } else {
6033 None
6034 };
6035
6036 let mut name = None;
6038 let mut data_type = self.parse_data_type()?;
6039
6040 let data_type_idx = self.get_current_index();
6044
6045 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6047 if parser.peek_keyword(Keyword::DEFAULT) {
6048 parser_err!(
6050 "The DEFAULT keyword is not a type",
6051 parser.peek_token_ref().span.start
6052 )
6053 } else {
6054 parser.parse_data_type()
6055 }
6056 }
6057
6058 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6059 let token = self.token_at(data_type_idx);
6060
6061 if !matches!(token.token, Token::Word(_)) {
6063 return self.expected("a name or type", token.clone());
6064 }
6065
6066 name = Some(Ident::new(token.to_string()));
6067 data_type = next_data_type;
6068 }
6069
6070 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6071 {
6072 Some(self.parse_expr()?)
6073 } else {
6074 None
6075 };
6076 Ok(OperateFunctionArg {
6077 mode,
6078 name,
6079 data_type,
6080 default_expr,
6081 })
6082 }
6083
6084 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6085 let mode = if self.parse_keyword(Keyword::IN) {
6086 Some(ArgMode::In)
6087 } else {
6088 if self
6089 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6090 .is_some()
6091 {
6092 return self.expected_ref(
6093 "IN or argument type in aggregate signature",
6094 self.peek_token_ref(),
6095 );
6096 }
6097 None
6098 };
6099
6100 let mut name = None;
6103 let mut data_type = self.parse_data_type()?;
6104 let data_type_idx = self.get_current_index();
6105
6106 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6107 if parser.peek_keyword(Keyword::DEFAULT)
6108 || parser.peek_keyword(Keyword::ORDER)
6109 || parser.peek_token_ref().token == Token::Comma
6110 || parser.peek_token_ref().token == Token::RParen
6111 {
6112 parser_err!(
6114 "The current token cannot start an aggregate argument type",
6115 parser.peek_token_ref().span.start
6116 )
6117 } else {
6118 parser.parse_data_type()
6119 }
6120 }
6121
6122 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6123 let token = self.token_at(data_type_idx);
6124 if !matches!(token.token, Token::Word(_)) {
6125 return self.expected("a name or type", token.clone());
6126 }
6127
6128 name = Some(Ident::new(token.to_string()));
6129 data_type = next_data_type;
6130 }
6131
6132 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6133 return self.expected_ref(
6134 "',' or ')' or ORDER BY after aggregate argument type",
6135 self.peek_token_ref(),
6136 );
6137 }
6138
6139 Ok(OperateFunctionArg {
6140 mode,
6141 name,
6142 data_type,
6143 default_expr: None,
6144 })
6145 }
6146
6147 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6153 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6154 {
6155 self.prev_token();
6156 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6157 }
6158 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6159 let trigger_name = self.parse_object_name(false)?;
6160 let table_name = if self.parse_keyword(Keyword::ON) {
6161 Some(self.parse_object_name(false)?)
6162 } else {
6163 None
6164 };
6165 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6166 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6167 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6168 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6169 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6170 )),
6171 None => None,
6172 };
6173 Ok(DropTrigger {
6174 if_exists,
6175 trigger_name,
6176 table_name,
6177 option,
6178 })
6179 }
6180
6181 pub fn parse_create_trigger(
6183 &mut self,
6184 temporary: bool,
6185 or_alter: bool,
6186 or_replace: bool,
6187 is_constraint: bool,
6188 ) -> Result<CreateTrigger, ParserError> {
6189 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6190 {
6191 self.prev_token();
6192 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6193 }
6194
6195 let name = self.parse_object_name(false)?;
6196 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6197
6198 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6199 self.expect_keyword_is(Keyword::ON)?;
6200 let table_name = self.parse_object_name(false)?;
6201
6202 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6203 self.parse_object_name(true).ok()
6204 } else {
6205 None
6206 };
6207
6208 let characteristics = self.parse_constraint_characteristics()?;
6209
6210 let mut referencing = vec![];
6211 if self.parse_keyword(Keyword::REFERENCING) {
6212 while let Some(refer) = self.parse_trigger_referencing()? {
6213 referencing.push(refer);
6214 }
6215 }
6216
6217 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6218 let include_each = self.parse_keyword(Keyword::EACH);
6219 let trigger_object =
6220 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6221 Keyword::ROW => TriggerObject::Row,
6222 Keyword::STATEMENT => TriggerObject::Statement,
6223 unexpected_keyword => return Err(ParserError::ParserError(
6224 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6225 )),
6226 };
6227
6228 Some(if include_each {
6229 TriggerObjectKind::ForEach(trigger_object)
6230 } else {
6231 TriggerObjectKind::For(trigger_object)
6232 })
6233 } else {
6234 let _ = self.parse_keyword(Keyword::FOR);
6235
6236 None
6237 };
6238
6239 let condition = self
6240 .parse_keyword(Keyword::WHEN)
6241 .then(|| self.parse_expr())
6242 .transpose()?;
6243
6244 let mut exec_body = None;
6245 let mut statements = None;
6246 if self.parse_keyword(Keyword::EXECUTE) {
6247 exec_body = Some(self.parse_trigger_exec_body()?);
6248 } else {
6249 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6250 }
6251
6252 Ok(CreateTrigger {
6253 or_alter,
6254 temporary,
6255 or_replace,
6256 is_constraint,
6257 name,
6258 period,
6259 period_before_table: true,
6260 events,
6261 table_name,
6262 referenced_table_name,
6263 referencing,
6264 trigger_object,
6265 condition,
6266 exec_body,
6267 statements_as: false,
6268 statements,
6269 characteristics,
6270 })
6271 }
6272
6273 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6275 Ok(
6276 match self.expect_one_of_keywords(&[
6277 Keyword::FOR,
6278 Keyword::BEFORE,
6279 Keyword::AFTER,
6280 Keyword::INSTEAD,
6281 ])? {
6282 Keyword::FOR => TriggerPeriod::For,
6283 Keyword::BEFORE => TriggerPeriod::Before,
6284 Keyword::AFTER => TriggerPeriod::After,
6285 Keyword::INSTEAD => self
6286 .expect_keyword_is(Keyword::OF)
6287 .map(|_| TriggerPeriod::InsteadOf)?,
6288 unexpected_keyword => return Err(ParserError::ParserError(
6289 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6290 )),
6291 },
6292 )
6293 }
6294
6295 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6297 Ok(
6298 match self.expect_one_of_keywords(&[
6299 Keyword::INSERT,
6300 Keyword::UPDATE,
6301 Keyword::DELETE,
6302 Keyword::TRUNCATE,
6303 ])? {
6304 Keyword::INSERT => TriggerEvent::Insert,
6305 Keyword::UPDATE => {
6306 if self.parse_keyword(Keyword::OF) {
6307 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6308 TriggerEvent::Update(cols)
6309 } else {
6310 TriggerEvent::Update(vec![])
6311 }
6312 }
6313 Keyword::DELETE => TriggerEvent::Delete,
6314 Keyword::TRUNCATE => TriggerEvent::Truncate,
6315 unexpected_keyword => return Err(ParserError::ParserError(
6316 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6317 )),
6318 },
6319 )
6320 }
6321
6322 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6324 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6325 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6326 TriggerReferencingType::OldTable
6327 }
6328 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6329 TriggerReferencingType::NewTable
6330 }
6331 _ => {
6332 return Ok(None);
6333 }
6334 };
6335
6336 let is_as = self.parse_keyword(Keyword::AS);
6337 let transition_relation_name = self.parse_object_name(false)?;
6338 Ok(Some(TriggerReferencing {
6339 refer_type,
6340 is_as,
6341 transition_relation_name,
6342 }))
6343 }
6344
6345 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6347 Ok(TriggerExecBody {
6348 exec_type: match self
6349 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6350 {
6351 Keyword::FUNCTION => TriggerExecBodyType::Function,
6352 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6353 unexpected_keyword => return Err(ParserError::ParserError(
6354 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
6355 )),
6356 },
6357 func_desc: self.parse_function_desc()?,
6358 })
6359 }
6360
6361 pub fn parse_create_macro(
6363 &mut self,
6364 or_replace: bool,
6365 temporary: bool,
6366 ) -> Result<Statement, ParserError> {
6367 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6368 let name = self.parse_object_name(false)?;
6369 self.expect_token(&Token::LParen)?;
6370 let args = if self.consume_token(&Token::RParen) {
6371 self.prev_token();
6372 None
6373 } else {
6374 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6375 };
6376
6377 self.expect_token(&Token::RParen)?;
6378 self.expect_keyword_is(Keyword::AS)?;
6379
6380 Ok(Statement::CreateMacro {
6381 or_replace,
6382 temporary,
6383 name,
6384 args,
6385 definition: if self.parse_keyword(Keyword::TABLE) {
6386 MacroDefinition::Table(self.parse_query()?)
6387 } else {
6388 MacroDefinition::Expr(self.parse_expr()?)
6389 },
6390 })
6391 } else {
6392 self.prev_token();
6393 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6394 }
6395 }
6396
6397 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6398 let name = self.parse_identifier()?;
6399
6400 let default_expr =
6401 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6402 Some(self.parse_expr()?)
6403 } else {
6404 None
6405 };
6406 Ok(MacroArg { name, default_expr })
6407 }
6408
6409 pub fn parse_create_external_table(
6411 &mut self,
6412 or_replace: bool,
6413 ) -> Result<CreateTable, ParserError> {
6414 self.expect_keyword_is(Keyword::TABLE)?;
6415 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6416 let table_name = self.parse_object_name(false)?;
6417 let (columns, constraints) = self.parse_columns()?;
6418
6419 let hive_distribution = self.parse_hive_distribution()?;
6420 let hive_formats = self.parse_hive_formats()?;
6421
6422 let file_format = if let Some(ref hf) = hive_formats {
6423 if let Some(ref ff) = hf.storage {
6424 match ff {
6425 HiveIOFormat::FileFormat { format } => Some(*format),
6426 _ => None,
6427 }
6428 } else {
6429 None
6430 }
6431 } else {
6432 None
6433 };
6434 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6435 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6436 let table_options = if !table_properties.is_empty() {
6437 CreateTableOptions::TableProperties(table_properties)
6438 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6439 CreateTableOptions::Options(options)
6440 } else {
6441 CreateTableOptions::None
6442 };
6443 Ok(CreateTableBuilder::new(table_name)
6444 .columns(columns)
6445 .constraints(constraints)
6446 .hive_distribution(hive_distribution)
6447 .hive_formats(hive_formats)
6448 .table_options(table_options)
6449 .or_replace(or_replace)
6450 .if_not_exists(if_not_exists)
6451 .external(true)
6452 .file_format(file_format)
6453 .location(location)
6454 .build())
6455 }
6456
6457 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6461 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6462 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6463 let table_name = self.parse_object_name(true)?;
6464
6465 self.expect_keyword_is(Keyword::CLONE)?;
6466 let clone = Some(self.parse_object_name(true)?);
6467
6468 let version =
6469 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6470 {
6471 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6472 } else {
6473 None
6474 };
6475
6476 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6477 CreateTableOptions::Options(options)
6478 } else {
6479 CreateTableOptions::None
6480 };
6481
6482 Ok(CreateTableBuilder::new(table_name)
6483 .snapshot(true)
6484 .if_not_exists(if_not_exists)
6485 .clone_clause(clone)
6486 .version(version)
6487 .table_options(table_options)
6488 .build())
6489 }
6490
6491 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6493 let next_token = self.next_token();
6494 match &next_token.token {
6495 Token::Word(w) => match w.keyword {
6496 Keyword::AVRO => Ok(FileFormat::AVRO),
6497 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6498 Keyword::ORC => Ok(FileFormat::ORC),
6499 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6500 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6501 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6502 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6503 _ => self.expected("fileformat", next_token),
6504 },
6505 _ => self.expected("fileformat", next_token),
6506 }
6507 }
6508
6509 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6510 if self.consume_token(&Token::Eq) {
6511 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6512 } else {
6513 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6514 }
6515 }
6516
6517 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6519 let next_token = self.next_token();
6520 match &next_token.token {
6521 Token::Word(w) => match w.keyword {
6522 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6523 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6524 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6525 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6526 _ => self.expected("fileformat", next_token),
6527 },
6528 _ => self.expected("fileformat", next_token),
6529 }
6530 }
6531
6532 pub fn parse_create_view(
6534 &mut self,
6535 or_alter: bool,
6536 or_replace: bool,
6537 temporary: bool,
6538 create_view_params: Option<CreateViewParams>,
6539 ) -> Result<CreateView, ParserError> {
6540 let secure = self.parse_keyword(Keyword::SECURE);
6541 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6542 self.expect_keyword_is(Keyword::VIEW)?;
6543 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6544 let if_not_exists_first =
6547 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6548 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6549 let name_before_not_exists = !if_not_exists_first
6550 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6551 let if_not_exists = if_not_exists_first || name_before_not_exists;
6552 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6553 let columns = self.parse_view_columns()?;
6556 let mut options = CreateTableOptions::None;
6557 let with_options = self.parse_options(Keyword::WITH)?;
6558 if !with_options.is_empty() {
6559 options = CreateTableOptions::With(with_options);
6560 }
6561
6562 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6563 self.expect_keyword_is(Keyword::BY)?;
6564 self.parse_parenthesized_column_list(Optional, false)?
6565 } else {
6566 vec![]
6567 };
6568
6569 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6570 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6571 if !opts.is_empty() {
6572 options = CreateTableOptions::Options(opts);
6573 }
6574 };
6575 }
6576
6577 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6578 && self.parse_keyword(Keyword::TO)
6579 {
6580 Some(self.parse_object_name(false)?)
6581 } else {
6582 None
6583 };
6584
6585 let comment = if self.dialect.supports_create_view_comment_syntax()
6586 && self.parse_keyword(Keyword::COMMENT)
6587 {
6588 self.expect_token(&Token::Eq)?;
6589 Some(self.parse_comment_value()?)
6590 } else {
6591 None
6592 };
6593
6594 self.expect_keyword_is(Keyword::AS)?;
6595 let query = self.parse_query()?;
6596 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6599 && self.parse_keywords(&[
6600 Keyword::WITH,
6601 Keyword::NO,
6602 Keyword::SCHEMA,
6603 Keyword::BINDING,
6604 ]);
6605
6606 Ok(CreateView {
6607 or_alter,
6608 name,
6609 columns,
6610 query,
6611 materialized,
6612 secure,
6613 or_replace,
6614 options,
6615 cluster_by,
6616 comment,
6617 with_no_schema_binding,
6618 if_not_exists,
6619 temporary,
6620 copy_grants,
6621 to,
6622 params: create_view_params,
6623 name_before_not_exists,
6624 })
6625 }
6626
6627 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6631 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6632 self.expect_token(&Token::Eq)?;
6633 Some(
6634 match self.expect_one_of_keywords(&[
6635 Keyword::UNDEFINED,
6636 Keyword::MERGE,
6637 Keyword::TEMPTABLE,
6638 ])? {
6639 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6640 Keyword::MERGE => CreateViewAlgorithm::Merge,
6641 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6642 _ => {
6643 self.prev_token();
6644 let found = self.next_token();
6645 return self
6646 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6647 }
6648 },
6649 )
6650 } else {
6651 None
6652 };
6653 let definer = if self.parse_keyword(Keyword::DEFINER) {
6654 self.expect_token(&Token::Eq)?;
6655 Some(self.parse_grantee_name()?)
6656 } else {
6657 None
6658 };
6659 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6660 Some(
6661 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6662 Keyword::DEFINER => CreateViewSecurity::Definer,
6663 Keyword::INVOKER => CreateViewSecurity::Invoker,
6664 _ => {
6665 self.prev_token();
6666 let found = self.next_token();
6667 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6668 }
6669 },
6670 )
6671 } else {
6672 None
6673 };
6674 if algorithm.is_some() || definer.is_some() || security.is_some() {
6675 Ok(Some(CreateViewParams {
6676 algorithm,
6677 definer,
6678 security,
6679 }))
6680 } else {
6681 Ok(None)
6682 }
6683 }
6684
6685 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6687 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6688 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6689
6690 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6693 vec![Keyword::AUTHORIZATION]
6694 } else if dialect_of!(self is PostgreSqlDialect) {
6695 vec![
6696 Keyword::LOGIN,
6697 Keyword::NOLOGIN,
6698 Keyword::INHERIT,
6699 Keyword::NOINHERIT,
6700 Keyword::BYPASSRLS,
6701 Keyword::NOBYPASSRLS,
6702 Keyword::PASSWORD,
6703 Keyword::CREATEDB,
6704 Keyword::NOCREATEDB,
6705 Keyword::CREATEROLE,
6706 Keyword::NOCREATEROLE,
6707 Keyword::SUPERUSER,
6708 Keyword::NOSUPERUSER,
6709 Keyword::REPLICATION,
6710 Keyword::NOREPLICATION,
6711 Keyword::CONNECTION,
6712 Keyword::VALID,
6713 Keyword::IN,
6714 Keyword::ROLE,
6715 Keyword::ADMIN,
6716 Keyword::USER,
6717 ]
6718 } else {
6719 vec![]
6720 };
6721
6722 let mut authorization_owner = None;
6724 let mut login = None;
6726 let mut inherit = None;
6727 let mut bypassrls = None;
6728 let mut password = None;
6729 let mut create_db = None;
6730 let mut create_role = None;
6731 let mut superuser = None;
6732 let mut replication = None;
6733 let mut connection_limit = None;
6734 let mut valid_until = None;
6735 let mut in_role = vec![];
6736 let mut in_group = vec![];
6737 let mut role = vec![];
6738 let mut user = vec![];
6739 let mut admin = vec![];
6740
6741 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6742 let loc = self
6743 .tokens
6744 .get(self.index - 1)
6745 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6746 match keyword {
6747 Keyword::AUTHORIZATION => {
6748 if authorization_owner.is_some() {
6749 parser_err!("Found multiple AUTHORIZATION", loc)
6750 } else {
6751 authorization_owner = Some(self.parse_object_name(false)?);
6752 Ok(())
6753 }
6754 }
6755 Keyword::LOGIN | Keyword::NOLOGIN => {
6756 if login.is_some() {
6757 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6758 } else {
6759 login = Some(keyword == Keyword::LOGIN);
6760 Ok(())
6761 }
6762 }
6763 Keyword::INHERIT | Keyword::NOINHERIT => {
6764 if inherit.is_some() {
6765 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6766 } else {
6767 inherit = Some(keyword == Keyword::INHERIT);
6768 Ok(())
6769 }
6770 }
6771 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6772 if bypassrls.is_some() {
6773 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6774 } else {
6775 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6776 Ok(())
6777 }
6778 }
6779 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6780 if create_db.is_some() {
6781 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6782 } else {
6783 create_db = Some(keyword == Keyword::CREATEDB);
6784 Ok(())
6785 }
6786 }
6787 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6788 if create_role.is_some() {
6789 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6790 } else {
6791 create_role = Some(keyword == Keyword::CREATEROLE);
6792 Ok(())
6793 }
6794 }
6795 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6796 if superuser.is_some() {
6797 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6798 } else {
6799 superuser = Some(keyword == Keyword::SUPERUSER);
6800 Ok(())
6801 }
6802 }
6803 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6804 if replication.is_some() {
6805 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6806 } else {
6807 replication = Some(keyword == Keyword::REPLICATION);
6808 Ok(())
6809 }
6810 }
6811 Keyword::PASSWORD => {
6812 if password.is_some() {
6813 parser_err!("Found multiple PASSWORD", loc)
6814 } else {
6815 password = if self.parse_keyword(Keyword::NULL) {
6816 Some(Password::NullPassword)
6817 } else {
6818 Some(Password::Password(Expr::Value(self.parse_value()?)))
6819 };
6820 Ok(())
6821 }
6822 }
6823 Keyword::CONNECTION => {
6824 self.expect_keyword_is(Keyword::LIMIT)?;
6825 if connection_limit.is_some() {
6826 parser_err!("Found multiple CONNECTION LIMIT", loc)
6827 } else {
6828 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6829 Ok(())
6830 }
6831 }
6832 Keyword::VALID => {
6833 self.expect_keyword_is(Keyword::UNTIL)?;
6834 if valid_until.is_some() {
6835 parser_err!("Found multiple VALID UNTIL", loc)
6836 } else {
6837 valid_until = Some(Expr::Value(self.parse_value()?));
6838 Ok(())
6839 }
6840 }
6841 Keyword::IN => {
6842 if self.parse_keyword(Keyword::ROLE) {
6843 if !in_role.is_empty() {
6844 parser_err!("Found multiple IN ROLE", loc)
6845 } else {
6846 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6847 Ok(())
6848 }
6849 } else if self.parse_keyword(Keyword::GROUP) {
6850 if !in_group.is_empty() {
6851 parser_err!("Found multiple IN GROUP", loc)
6852 } else {
6853 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6854 Ok(())
6855 }
6856 } else {
6857 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6858 }
6859 }
6860 Keyword::ROLE => {
6861 if !role.is_empty() {
6862 parser_err!("Found multiple ROLE", loc)
6863 } else {
6864 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6865 Ok(())
6866 }
6867 }
6868 Keyword::USER => {
6869 if !user.is_empty() {
6870 parser_err!("Found multiple USER", loc)
6871 } else {
6872 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6873 Ok(())
6874 }
6875 }
6876 Keyword::ADMIN => {
6877 if !admin.is_empty() {
6878 parser_err!("Found multiple ADMIN", loc)
6879 } else {
6880 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6881 Ok(())
6882 }
6883 }
6884 _ => break,
6885 }?
6886 }
6887
6888 Ok(CreateRole {
6889 names,
6890 if_not_exists,
6891 login,
6892 inherit,
6893 bypassrls,
6894 password,
6895 create_db,
6896 create_role,
6897 replication,
6898 superuser,
6899 connection_limit,
6900 valid_until,
6901 in_role,
6902 in_group,
6903 role,
6904 user,
6905 admin,
6906 authorization_owner,
6907 })
6908 }
6909
6910 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6912 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6913 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6914 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6915 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6916 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6917 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6918 )),
6919 None => {
6920 match self.parse_identifier() {
6921 Ok(ident) => Owner::Ident(ident),
6922 Err(e) => {
6923 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6924 }
6925 }
6926 }
6927 };
6928 Ok(owner)
6929 }
6930
6931 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
6933 let name = self.parse_object_name(false)?;
6934 self.expect_keyword_is(Keyword::AS)?;
6935 let data_type = self.parse_data_type()?;
6936 let collation = if self.parse_keyword(Keyword::COLLATE) {
6937 Some(self.parse_identifier()?)
6938 } else {
6939 None
6940 };
6941 let default = if self.parse_keyword(Keyword::DEFAULT) {
6942 Some(self.parse_expr()?)
6943 } else {
6944 None
6945 };
6946 let mut constraints = Vec::new();
6947 while let Some(constraint) = self.parse_optional_table_constraint()? {
6948 constraints.push(constraint);
6949 }
6950
6951 Ok(CreateDomain {
6952 name,
6953 data_type,
6954 collation,
6955 default,
6956 constraints,
6957 })
6958 }
6959
6960 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
6970 let name = self.parse_identifier()?;
6971 self.expect_keyword_is(Keyword::ON)?;
6972 let table_name = self.parse_object_name(false)?;
6973
6974 let policy_type = if self.parse_keyword(Keyword::AS) {
6975 let keyword =
6976 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6977 Some(match keyword {
6978 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6979 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6980 unexpected_keyword => return Err(ParserError::ParserError(
6981 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6982 )),
6983 })
6984 } else {
6985 None
6986 };
6987
6988 let command = if self.parse_keyword(Keyword::FOR) {
6989 let keyword = self.expect_one_of_keywords(&[
6990 Keyword::ALL,
6991 Keyword::SELECT,
6992 Keyword::INSERT,
6993 Keyword::UPDATE,
6994 Keyword::DELETE,
6995 ])?;
6996 Some(match keyword {
6997 Keyword::ALL => CreatePolicyCommand::All,
6998 Keyword::SELECT => CreatePolicyCommand::Select,
6999 Keyword::INSERT => CreatePolicyCommand::Insert,
7000 Keyword::UPDATE => CreatePolicyCommand::Update,
7001 Keyword::DELETE => CreatePolicyCommand::Delete,
7002 unexpected_keyword => return Err(ParserError::ParserError(
7003 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7004 )),
7005 })
7006 } else {
7007 None
7008 };
7009
7010 let to = if self.parse_keyword(Keyword::TO) {
7011 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7012 } else {
7013 None
7014 };
7015
7016 let using = if self.parse_keyword(Keyword::USING) {
7017 self.expect_token(&Token::LParen)?;
7018 let expr = self.parse_expr()?;
7019 self.expect_token(&Token::RParen)?;
7020 Some(expr)
7021 } else {
7022 None
7023 };
7024
7025 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7026 self.expect_token(&Token::LParen)?;
7027 let expr = self.parse_expr()?;
7028 self.expect_token(&Token::RParen)?;
7029 Some(expr)
7030 } else {
7031 None
7032 };
7033
7034 Ok(CreatePolicy {
7035 name,
7036 table_name,
7037 policy_type,
7038 command,
7039 to,
7040 using,
7041 with_check,
7042 })
7043 }
7044
7045 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7055 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7056 let name = self.parse_identifier()?;
7057
7058 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7059 Some(self.parse_literal_string()?)
7060 } else {
7061 None
7062 };
7063
7064 let url = if self.parse_keyword(Keyword::URL) {
7065 Some(self.parse_literal_string()?)
7066 } else {
7067 None
7068 };
7069
7070 let comment = self.parse_optional_inline_comment()?;
7071
7072 let with_dcproperties =
7073 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7074 properties if !properties.is_empty() => Some(properties),
7075 _ => None,
7076 };
7077
7078 Ok(CreateConnector {
7079 name,
7080 if_not_exists,
7081 connector_type,
7082 url,
7083 comment,
7084 with_dcproperties,
7085 })
7086 }
7087
7088 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7094 let mut parts = vec![];
7095 loop {
7096 parts.push(ObjectNamePart::Identifier(Ident::new(
7097 self.next_token().to_string(),
7098 )));
7099 if !self.consume_token(&Token::Period) {
7100 break;
7101 }
7102 }
7103 Ok(ObjectName(parts))
7104 }
7105
7106 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7110 let name = self.parse_operator_name()?;
7111 self.expect_token(&Token::LParen)?;
7112
7113 let mut function: Option<ObjectName> = None;
7114 let mut is_procedure = false;
7115 let mut left_arg: Option<DataType> = None;
7116 let mut right_arg: Option<DataType> = None;
7117 let mut options: Vec<OperatorOption> = Vec::new();
7118
7119 loop {
7120 let keyword = self.expect_one_of_keywords(&[
7121 Keyword::FUNCTION,
7122 Keyword::PROCEDURE,
7123 Keyword::LEFTARG,
7124 Keyword::RIGHTARG,
7125 Keyword::COMMUTATOR,
7126 Keyword::NEGATOR,
7127 Keyword::RESTRICT,
7128 Keyword::JOIN,
7129 Keyword::HASHES,
7130 Keyword::MERGES,
7131 ])?;
7132
7133 match keyword {
7134 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7135 options.push(OperatorOption::Hashes);
7136 }
7137 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7138 options.push(OperatorOption::Merges);
7139 }
7140 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7141 self.expect_token(&Token::Eq)?;
7142 function = Some(self.parse_object_name(false)?);
7143 is_procedure = keyword == Keyword::PROCEDURE;
7144 }
7145 Keyword::LEFTARG if left_arg.is_none() => {
7146 self.expect_token(&Token::Eq)?;
7147 left_arg = Some(self.parse_data_type()?);
7148 }
7149 Keyword::RIGHTARG if right_arg.is_none() => {
7150 self.expect_token(&Token::Eq)?;
7151 right_arg = Some(self.parse_data_type()?);
7152 }
7153 Keyword::COMMUTATOR
7154 if !options
7155 .iter()
7156 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7157 {
7158 self.expect_token(&Token::Eq)?;
7159 if self.parse_keyword(Keyword::OPERATOR) {
7160 self.expect_token(&Token::LParen)?;
7161 let op = self.parse_operator_name()?;
7162 self.expect_token(&Token::RParen)?;
7163 options.push(OperatorOption::Commutator(op));
7164 } else {
7165 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7166 }
7167 }
7168 Keyword::NEGATOR
7169 if !options
7170 .iter()
7171 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7172 {
7173 self.expect_token(&Token::Eq)?;
7174 if self.parse_keyword(Keyword::OPERATOR) {
7175 self.expect_token(&Token::LParen)?;
7176 let op = self.parse_operator_name()?;
7177 self.expect_token(&Token::RParen)?;
7178 options.push(OperatorOption::Negator(op));
7179 } else {
7180 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7181 }
7182 }
7183 Keyword::RESTRICT
7184 if !options
7185 .iter()
7186 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7187 {
7188 self.expect_token(&Token::Eq)?;
7189 options.push(OperatorOption::Restrict(Some(
7190 self.parse_object_name(false)?,
7191 )));
7192 }
7193 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7194 self.expect_token(&Token::Eq)?;
7195 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7196 }
7197 _ => {
7198 return Err(ParserError::ParserError(format!(
7199 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7200 keyword
7201 )))
7202 }
7203 }
7204
7205 if !self.consume_token(&Token::Comma) {
7206 break;
7207 }
7208 }
7209
7210 self.expect_token(&Token::RParen)?;
7212
7213 let function = function.ok_or_else(|| {
7215 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7216 })?;
7217
7218 Ok(CreateOperator {
7219 name,
7220 function,
7221 is_procedure,
7222 left_arg,
7223 right_arg,
7224 options,
7225 })
7226 }
7227
7228 pub fn parse_create_aggregate(
7232 &mut self,
7233 or_replace: bool,
7234 ) -> Result<CreateAggregate, ParserError> {
7235 let name = self.parse_object_name(false)?;
7236
7237 self.expect_token(&Token::LParen)?;
7239 let args = if self.consume_token(&Token::Mul) {
7240 vec![]
7242 } else if self.consume_token(&Token::RParen) {
7243 self.prev_token();
7244 vec![]
7245 } else {
7246 let parsed = self.parse_comma_separated(|p| p.parse_data_type())?;
7247 parsed
7248 };
7249 self.expect_token(&Token::RParen)?;
7250
7251 self.expect_token(&Token::LParen)?;
7253 let mut options: Vec<CreateAggregateOption> = Vec::new();
7254 loop {
7255 let token = self.next_token();
7256 match &token.token {
7257 Token::RParen => break,
7258 Token::Comma => continue,
7259 Token::Word(word) => {
7260 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7261 options.push(option);
7262 }
7263 other => {
7264 return Err(ParserError::ParserError(format!(
7265 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7266 )));
7267 }
7268 }
7269 }
7270
7271 Ok(CreateAggregate {
7272 or_replace,
7273 name,
7274 args,
7275 options,
7276 })
7277 }
7278
7279 fn parse_create_aggregate_option(
7280 &mut self,
7281 key: &str,
7282 ) -> Result<CreateAggregateOption, ParserError> {
7283 match key {
7284 "SFUNC" => {
7285 self.expect_token(&Token::Eq)?;
7286 Ok(CreateAggregateOption::Sfunc(
7287 self.parse_object_name(false)?,
7288 ))
7289 }
7290 "STYPE" => {
7291 self.expect_token(&Token::Eq)?;
7292 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7293 }
7294 "SSPACE" => {
7295 self.expect_token(&Token::Eq)?;
7296 let size = self.parse_literal_uint()?;
7297 Ok(CreateAggregateOption::Sspace(size))
7298 }
7299 "FINALFUNC" => {
7300 self.expect_token(&Token::Eq)?;
7301 Ok(CreateAggregateOption::Finalfunc(
7302 self.parse_object_name(false)?,
7303 ))
7304 }
7305 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7306 "FINALFUNC_MODIFY" => {
7307 self.expect_token(&Token::Eq)?;
7308 Ok(CreateAggregateOption::FinalfuncModify(
7309 self.parse_aggregate_modify_kind()?,
7310 ))
7311 }
7312 "COMBINEFUNC" => {
7313 self.expect_token(&Token::Eq)?;
7314 Ok(CreateAggregateOption::Combinefunc(
7315 self.parse_object_name(false)?,
7316 ))
7317 }
7318 "SERIALFUNC" => {
7319 self.expect_token(&Token::Eq)?;
7320 Ok(CreateAggregateOption::Serialfunc(
7321 self.parse_object_name(false)?,
7322 ))
7323 }
7324 "DESERIALFUNC" => {
7325 self.expect_token(&Token::Eq)?;
7326 Ok(CreateAggregateOption::Deserialfunc(
7327 self.parse_object_name(false)?,
7328 ))
7329 }
7330 "INITCOND" => {
7331 self.expect_token(&Token::Eq)?;
7332 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7333 }
7334 "MSFUNC" => {
7335 self.expect_token(&Token::Eq)?;
7336 Ok(CreateAggregateOption::Msfunc(
7337 self.parse_object_name(false)?,
7338 ))
7339 }
7340 "MINVFUNC" => {
7341 self.expect_token(&Token::Eq)?;
7342 Ok(CreateAggregateOption::Minvfunc(
7343 self.parse_object_name(false)?,
7344 ))
7345 }
7346 "MSTYPE" => {
7347 self.expect_token(&Token::Eq)?;
7348 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7349 }
7350 "MSSPACE" => {
7351 self.expect_token(&Token::Eq)?;
7352 let size = self.parse_literal_uint()?;
7353 Ok(CreateAggregateOption::Msspace(size))
7354 }
7355 "MFINALFUNC" => {
7356 self.expect_token(&Token::Eq)?;
7357 Ok(CreateAggregateOption::Mfinalfunc(
7358 self.parse_object_name(false)?,
7359 ))
7360 }
7361 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7362 "MFINALFUNC_MODIFY" => {
7363 self.expect_token(&Token::Eq)?;
7364 Ok(CreateAggregateOption::MfinalfuncModify(
7365 self.parse_aggregate_modify_kind()?,
7366 ))
7367 }
7368 "MINITCOND" => {
7369 self.expect_token(&Token::Eq)?;
7370 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7371 }
7372 "SORTOP" => {
7373 self.expect_token(&Token::Eq)?;
7374 Ok(CreateAggregateOption::Sortop(
7375 self.parse_object_name(false)?,
7376 ))
7377 }
7378 "PARALLEL" => {
7379 self.expect_token(&Token::Eq)?;
7380 let parallel = match self.expect_one_of_keywords(&[
7381 Keyword::SAFE,
7382 Keyword::RESTRICTED,
7383 Keyword::UNSAFE,
7384 ])? {
7385 Keyword::SAFE => FunctionParallel::Safe,
7386 Keyword::RESTRICTED => FunctionParallel::Restricted,
7387 Keyword::UNSAFE => FunctionParallel::Unsafe,
7388 _ => unreachable!(),
7389 };
7390 Ok(CreateAggregateOption::Parallel(parallel))
7391 }
7392 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7393 other => Err(ParserError::ParserError(format!(
7394 "Unknown CREATE AGGREGATE option: {other}"
7395 ))),
7396 }
7397 }
7398
7399 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7400 let token = self.next_token();
7401 match &token.token {
7402 Token::Word(word) => match word.value.to_uppercase().as_str() {
7403 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7404 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7405 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7406 other => Err(ParserError::ParserError(format!(
7407 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7408 ))),
7409 },
7410 other => Err(ParserError::ParserError(format!(
7411 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7412 ))),
7413 }
7414 }
7415
7416 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7420 let name = self.parse_object_name(false)?;
7421 self.expect_keyword(Keyword::USING)?;
7422 let using = self.parse_identifier()?;
7423
7424 Ok(CreateOperatorFamily { name, using })
7425 }
7426
7427 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7431 let name = self.parse_object_name(false)?;
7432 let default = self.parse_keyword(Keyword::DEFAULT);
7433 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7434 let for_type = self.parse_data_type()?;
7435 self.expect_keyword(Keyword::USING)?;
7436 let using = self.parse_identifier()?;
7437
7438 let family = if self.parse_keyword(Keyword::FAMILY) {
7439 Some(self.parse_object_name(false)?)
7440 } else {
7441 None
7442 };
7443
7444 self.expect_keyword(Keyword::AS)?;
7445
7446 let mut items = vec![];
7447 loop {
7448 if self.parse_keyword(Keyword::OPERATOR) {
7449 let strategy_number = self.parse_literal_uint()?;
7450 let operator_name = self.parse_operator_name()?;
7451
7452 let op_types = if self.consume_token(&Token::LParen) {
7454 let left = self.parse_data_type()?;
7455 self.expect_token(&Token::Comma)?;
7456 let right = self.parse_data_type()?;
7457 self.expect_token(&Token::RParen)?;
7458 Some(OperatorArgTypes { left, right })
7459 } else {
7460 None
7461 };
7462
7463 let purpose = if self.parse_keyword(Keyword::FOR) {
7465 if self.parse_keyword(Keyword::SEARCH) {
7466 Some(OperatorPurpose::ForSearch)
7467 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7468 let sort_family = self.parse_object_name(false)?;
7469 Some(OperatorPurpose::ForOrderBy { sort_family })
7470 } else {
7471 return self
7472 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7473 }
7474 } else {
7475 None
7476 };
7477
7478 items.push(OperatorClassItem::Operator {
7479 strategy_number,
7480 operator_name,
7481 op_types,
7482 purpose,
7483 });
7484 } else if self.parse_keyword(Keyword::FUNCTION) {
7485 let support_number = self.parse_literal_uint()?;
7486
7487 let op_types = if self.consume_token(&Token::LParen)
7489 && self.peek_token_ref().token != Token::RParen
7490 {
7491 let mut types = vec![];
7492 loop {
7493 types.push(self.parse_data_type()?);
7494 if !self.consume_token(&Token::Comma) {
7495 break;
7496 }
7497 }
7498 self.expect_token(&Token::RParen)?;
7499 Some(types)
7500 } else if self.consume_token(&Token::LParen) {
7501 self.expect_token(&Token::RParen)?;
7502 Some(vec![])
7503 } else {
7504 None
7505 };
7506
7507 let function_name = self.parse_object_name(false)?;
7508
7509 let argument_types = if self.consume_token(&Token::LParen) {
7511 let mut types = vec![];
7512 loop {
7513 if self.peek_token_ref().token == Token::RParen {
7514 break;
7515 }
7516 types.push(self.parse_data_type()?);
7517 if !self.consume_token(&Token::Comma) {
7518 break;
7519 }
7520 }
7521 self.expect_token(&Token::RParen)?;
7522 types
7523 } else {
7524 vec![]
7525 };
7526
7527 items.push(OperatorClassItem::Function {
7528 support_number,
7529 op_types,
7530 function_name,
7531 argument_types,
7532 });
7533 } else if self.parse_keyword(Keyword::STORAGE) {
7534 let storage_type = self.parse_data_type()?;
7535 items.push(OperatorClassItem::Storage { storage_type });
7536 } else {
7537 break;
7538 }
7539
7540 if !self.consume_token(&Token::Comma) {
7542 break;
7543 }
7544 }
7545
7546 Ok(CreateOperatorClass {
7547 name,
7548 default,
7549 for_type,
7550 using,
7551 family,
7552 items,
7553 })
7554 }
7555
7556 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7558 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7560 && self.parse_keyword(Keyword::TEMPORARY);
7561 let persistent = dialect_of!(self is DuckDbDialect)
7562 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7563
7564 let object_type = if self.parse_keyword(Keyword::TABLE) {
7565 ObjectType::Table
7566 } else if self.parse_keyword(Keyword::COLLATION) {
7567 ObjectType::Collation
7568 } else if self.parse_keyword(Keyword::VIEW) {
7569 ObjectType::View
7570 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7571 ObjectType::MaterializedView
7572 } else if self.parse_keyword(Keyword::INDEX) {
7573 ObjectType::Index
7574 } else if self.parse_keyword(Keyword::ROLE) {
7575 ObjectType::Role
7576 } else if self.parse_keyword(Keyword::SCHEMA) {
7577 ObjectType::Schema
7578 } else if self.parse_keyword(Keyword::DATABASE) {
7579 ObjectType::Database
7580 } else if self.parse_keyword(Keyword::SEQUENCE) {
7581 ObjectType::Sequence
7582 } else if self.parse_keyword(Keyword::STAGE) {
7583 ObjectType::Stage
7584 } else if self.parse_keyword(Keyword::TYPE) {
7585 ObjectType::Type
7586 } else if self.parse_keyword(Keyword::USER) {
7587 ObjectType::User
7588 } else if self.parse_keyword(Keyword::STREAM) {
7589 ObjectType::Stream
7590 } else if self.parse_keyword(Keyword::FUNCTION) {
7591 return self.parse_drop_function().map(Into::into);
7592 } else if self.parse_keyword(Keyword::POLICY) {
7593 return self.parse_drop_policy().map(Into::into);
7594 } else if self.parse_keyword(Keyword::CONNECTOR) {
7595 return self.parse_drop_connector();
7596 } else if self.parse_keyword(Keyword::DOMAIN) {
7597 return self.parse_drop_domain().map(Into::into);
7598 } else if self.parse_keyword(Keyword::PROCEDURE) {
7599 return self.parse_drop_procedure();
7600 } else if self.parse_keyword(Keyword::SECRET) {
7601 return self.parse_drop_secret(temporary, persistent);
7602 } else if self.parse_keyword(Keyword::TRIGGER) {
7603 return self.parse_drop_trigger().map(Into::into);
7604 } else if self.parse_keyword(Keyword::EXTENSION) {
7605 return self.parse_drop_extension();
7606 } else if self.parse_keyword(Keyword::OPERATOR) {
7607 return if self.parse_keyword(Keyword::FAMILY) {
7609 self.parse_drop_operator_family()
7610 } else if self.parse_keyword(Keyword::CLASS) {
7611 self.parse_drop_operator_class()
7612 } else {
7613 self.parse_drop_operator()
7614 };
7615 } else {
7616 return self.expected_ref(
7617 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7618 self.peek_token_ref(),
7619 );
7620 };
7621 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7624 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7625
7626 let loc = self.peek_token_ref().span.start;
7627 let cascade = self.parse_keyword(Keyword::CASCADE);
7628 let restrict = self.parse_keyword(Keyword::RESTRICT);
7629 let purge = self.parse_keyword(Keyword::PURGE);
7630 if cascade && restrict {
7631 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7632 }
7633 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7634 return parser_err!(
7635 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7636 loc
7637 );
7638 }
7639 let table = if self.parse_keyword(Keyword::ON) {
7640 Some(self.parse_object_name(false)?)
7641 } else {
7642 None
7643 };
7644 Ok(Statement::Drop {
7645 object_type,
7646 if_exists,
7647 names,
7648 cascade,
7649 restrict,
7650 purge,
7651 temporary,
7652 table,
7653 })
7654 }
7655
7656 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7657 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7658 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7659 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7660 _ => None,
7661 }
7662 }
7663
7664 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7669 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7670 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7671 let drop_behavior = self.parse_optional_drop_behavior();
7672 Ok(DropFunction {
7673 if_exists,
7674 func_desc,
7675 drop_behavior,
7676 })
7677 }
7678
7679 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7685 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7686 let name = self.parse_identifier()?;
7687 self.expect_keyword_is(Keyword::ON)?;
7688 let table_name = self.parse_object_name(false)?;
7689 let drop_behavior = self.parse_optional_drop_behavior();
7690 Ok(DropPolicy {
7691 if_exists,
7692 name,
7693 table_name,
7694 drop_behavior,
7695 })
7696 }
7697 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7703 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7704 let name = self.parse_identifier()?;
7705 Ok(Statement::DropConnector { if_exists, name })
7706 }
7707
7708 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7712 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7713 let name = self.parse_object_name(false)?;
7714 let drop_behavior = self.parse_optional_drop_behavior();
7715 Ok(DropDomain {
7716 if_exists,
7717 name,
7718 drop_behavior,
7719 })
7720 }
7721
7722 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7727 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7728 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7729 let drop_behavior = self.parse_optional_drop_behavior();
7730 Ok(Statement::DropProcedure {
7731 if_exists,
7732 proc_desc,
7733 drop_behavior,
7734 })
7735 }
7736
7737 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7738 let name = self.parse_object_name(false)?;
7739
7740 let args = if self.consume_token(&Token::LParen) {
7741 if self.consume_token(&Token::RParen) {
7742 Some(vec![])
7743 } else {
7744 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7745 self.expect_token(&Token::RParen)?;
7746 Some(args)
7747 }
7748 } else {
7749 None
7750 };
7751
7752 Ok(FunctionDesc { name, args })
7753 }
7754
7755 fn parse_drop_secret(
7757 &mut self,
7758 temporary: bool,
7759 persistent: bool,
7760 ) -> Result<Statement, ParserError> {
7761 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7762 let name = self.parse_identifier()?;
7763 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7764 self.parse_identifier().ok()
7765 } else {
7766 None
7767 };
7768 let temp = match (temporary, persistent) {
7769 (true, false) => Some(true),
7770 (false, true) => Some(false),
7771 (false, false) => None,
7772 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7773 };
7774
7775 Ok(Statement::DropSecret {
7776 if_exists,
7777 temporary: temp,
7778 name,
7779 storage_specifier,
7780 })
7781 }
7782
7783 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7793 if dialect_of!(self is BigQueryDialect) {
7794 return self.parse_big_query_declare();
7795 }
7796 if dialect_of!(self is SnowflakeDialect) {
7797 return self.parse_snowflake_declare();
7798 }
7799 if dialect_of!(self is MsSqlDialect) {
7800 return self.parse_mssql_declare();
7801 }
7802
7803 let name = self.parse_identifier()?;
7804
7805 let binary = Some(self.parse_keyword(Keyword::BINARY));
7806 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7807 Some(true)
7808 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7809 Some(false)
7810 } else {
7811 None
7812 };
7813 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7814 Some(true)
7815 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7816 Some(false)
7817 } else {
7818 None
7819 };
7820
7821 self.expect_keyword_is(Keyword::CURSOR)?;
7822 let declare_type = Some(DeclareType::Cursor);
7823
7824 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7825 Some(keyword) => {
7826 self.expect_keyword_is(Keyword::HOLD)?;
7827
7828 match keyword {
7829 Keyword::WITH => Some(true),
7830 Keyword::WITHOUT => Some(false),
7831 unexpected_keyword => return Err(ParserError::ParserError(
7832 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7833 )),
7834 }
7835 }
7836 None => None,
7837 };
7838
7839 self.expect_keyword_is(Keyword::FOR)?;
7840
7841 let query = Some(self.parse_query()?);
7842
7843 Ok(Statement::Declare {
7844 stmts: vec![Declare {
7845 names: vec![name],
7846 data_type: None,
7847 assignment: None,
7848 declare_type,
7849 binary,
7850 sensitive,
7851 scroll,
7852 hold,
7853 for_query: query,
7854 }],
7855 })
7856 }
7857
7858 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7866 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7867
7868 let data_type = match &self.peek_token_ref().token {
7869 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7870 _ => Some(self.parse_data_type()?),
7871 };
7872
7873 let expr = if data_type.is_some() {
7874 if self.parse_keyword(Keyword::DEFAULT) {
7875 Some(self.parse_expr()?)
7876 } else {
7877 None
7878 }
7879 } else {
7880 self.expect_keyword_is(Keyword::DEFAULT)?;
7883 Some(self.parse_expr()?)
7884 };
7885
7886 Ok(Statement::Declare {
7887 stmts: vec![Declare {
7888 names,
7889 data_type,
7890 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7891 declare_type: None,
7892 binary: None,
7893 sensitive: None,
7894 scroll: None,
7895 hold: None,
7896 for_query: None,
7897 }],
7898 })
7899 }
7900
7901 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7926 let mut stmts = vec![];
7927 loop {
7928 let name = self.parse_identifier()?;
7929 let (declare_type, for_query, assigned_expr, data_type) =
7930 if self.parse_keyword(Keyword::CURSOR) {
7931 self.expect_keyword_is(Keyword::FOR)?;
7932 match &self.peek_token_ref().token {
7933 Token::Word(w) if w.keyword == Keyword::SELECT => (
7934 Some(DeclareType::Cursor),
7935 Some(self.parse_query()?),
7936 None,
7937 None,
7938 ),
7939 _ => (
7940 Some(DeclareType::Cursor),
7941 None,
7942 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7943 None,
7944 ),
7945 }
7946 } else if self.parse_keyword(Keyword::RESULTSET) {
7947 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
7948 self.parse_snowflake_variable_declaration_expression()?
7949 } else {
7950 None
7952 };
7953
7954 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7955 } else if self.parse_keyword(Keyword::EXCEPTION) {
7956 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
7957 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7958 } else {
7959 None
7961 };
7962
7963 (Some(DeclareType::Exception), None, assigned_expr, None)
7964 } else {
7965 let (assigned_expr, data_type) = if let Some(assigned_expr) =
7967 self.parse_snowflake_variable_declaration_expression()?
7968 {
7969 (Some(assigned_expr), None)
7970 } else if let Token::Word(_) = &self.peek_token_ref().token {
7971 let data_type = self.parse_data_type()?;
7972 (
7973 self.parse_snowflake_variable_declaration_expression()?,
7974 Some(data_type),
7975 )
7976 } else {
7977 (None, None)
7978 };
7979 (None, None, assigned_expr, data_type)
7980 };
7981 let stmt = Declare {
7982 names: vec![name],
7983 data_type,
7984 assignment: assigned_expr,
7985 declare_type,
7986 binary: None,
7987 sensitive: None,
7988 scroll: None,
7989 hold: None,
7990 for_query,
7991 };
7992
7993 stmts.push(stmt);
7994 if self.consume_token(&Token::SemiColon) {
7995 match &self.peek_token_ref().token {
7996 Token::Word(w)
7997 if ALL_KEYWORDS
7998 .binary_search(&w.value.to_uppercase().as_str())
7999 .is_err() =>
8000 {
8001 continue;
8003 }
8004 _ => {
8005 self.prev_token();
8007 }
8008 }
8009 }
8010
8011 break;
8012 }
8013
8014 Ok(Statement::Declare { stmts })
8015 }
8016
8017 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8029 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8030
8031 Ok(Statement::Declare { stmts })
8032 }
8033
8034 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8045 let name = {
8046 let ident = self.parse_identifier()?;
8047 if !ident.value.starts_with('@')
8048 && !matches!(
8049 &self.peek_token_ref().token,
8050 Token::Word(w) if w.keyword == Keyword::CURSOR
8051 )
8052 {
8053 Err(ParserError::TokenizerError(
8054 "Invalid MsSql variable declaration.".to_string(),
8055 ))
8056 } else {
8057 Ok(ident)
8058 }
8059 }?;
8060
8061 let (declare_type, data_type) = match &self.peek_token_ref().token {
8062 Token::Word(w) => match w.keyword {
8063 Keyword::CURSOR => {
8064 self.next_token();
8065 (Some(DeclareType::Cursor), None)
8066 }
8067 Keyword::AS => {
8068 self.next_token();
8069 (None, Some(self.parse_data_type()?))
8070 }
8071 _ => (None, Some(self.parse_data_type()?)),
8072 },
8073 _ => (None, Some(self.parse_data_type()?)),
8074 };
8075
8076 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8077 self.next_token();
8078 let query = Some(self.parse_query()?);
8079 (query, None)
8080 } else {
8081 let assignment = self.parse_mssql_variable_declaration_expression()?;
8082 (None, assignment)
8083 };
8084
8085 Ok(Declare {
8086 names: vec![name],
8087 data_type,
8088 assignment,
8089 declare_type,
8090 binary: None,
8091 sensitive: None,
8092 scroll: None,
8093 hold: None,
8094 for_query,
8095 })
8096 }
8097
8098 pub fn parse_snowflake_variable_declaration_expression(
8106 &mut self,
8107 ) -> Result<Option<DeclareAssignment>, ParserError> {
8108 Ok(match &self.peek_token_ref().token {
8109 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8110 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8112 }
8113 Token::Assignment => {
8114 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8116 self.parse_expr()?,
8117 )))
8118 }
8119 _ => None,
8120 })
8121 }
8122
8123 pub fn parse_mssql_variable_declaration_expression(
8130 &mut self,
8131 ) -> Result<Option<DeclareAssignment>, ParserError> {
8132 Ok(match &self.peek_token_ref().token {
8133 Token::Eq => {
8134 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8136 self.parse_expr()?,
8137 )))
8138 }
8139 _ => None,
8140 })
8141 }
8142
8143 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8145 let direction = if self.parse_keyword(Keyword::NEXT) {
8146 FetchDirection::Next
8147 } else if self.parse_keyword(Keyword::PRIOR) {
8148 FetchDirection::Prior
8149 } else if self.parse_keyword(Keyword::FIRST) {
8150 FetchDirection::First
8151 } else if self.parse_keyword(Keyword::LAST) {
8152 FetchDirection::Last
8153 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8154 FetchDirection::Absolute {
8155 limit: self.parse_number_value()?,
8156 }
8157 } else if self.parse_keyword(Keyword::RELATIVE) {
8158 FetchDirection::Relative {
8159 limit: self.parse_number_value()?,
8160 }
8161 } else if self.parse_keyword(Keyword::FORWARD) {
8162 if self.parse_keyword(Keyword::ALL) {
8163 FetchDirection::ForwardAll
8164 } else {
8165 FetchDirection::Forward {
8166 limit: Some(self.parse_number_value()?),
8168 }
8169 }
8170 } else if self.parse_keyword(Keyword::BACKWARD) {
8171 if self.parse_keyword(Keyword::ALL) {
8172 FetchDirection::BackwardAll
8173 } else {
8174 FetchDirection::Backward {
8175 limit: Some(self.parse_number_value()?),
8177 }
8178 }
8179 } else if self.parse_keyword(Keyword::ALL) {
8180 FetchDirection::All
8181 } else {
8182 FetchDirection::Count {
8183 limit: self.parse_number_value()?,
8184 }
8185 };
8186
8187 let position = if self.peek_keyword(Keyword::FROM) {
8188 self.expect_keyword(Keyword::FROM)?;
8189 FetchPosition::From
8190 } else if self.peek_keyword(Keyword::IN) {
8191 self.expect_keyword(Keyword::IN)?;
8192 FetchPosition::In
8193 } else {
8194 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8195 };
8196
8197 let name = self.parse_identifier()?;
8198
8199 let into = if self.parse_keyword(Keyword::INTO) {
8200 Some(self.parse_object_name(false)?)
8201 } else {
8202 None
8203 };
8204
8205 Ok(Statement::Fetch {
8206 name,
8207 direction,
8208 position,
8209 into,
8210 })
8211 }
8212
8213 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8215 let object_type = if self.parse_keyword(Keyword::ALL) {
8216 DiscardObject::ALL
8217 } else if self.parse_keyword(Keyword::PLANS) {
8218 DiscardObject::PLANS
8219 } else if self.parse_keyword(Keyword::SEQUENCES) {
8220 DiscardObject::SEQUENCES
8221 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8222 DiscardObject::TEMP
8223 } else {
8224 return self.expected_ref(
8225 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8226 self.peek_token_ref(),
8227 );
8228 };
8229 Ok(Statement::Discard { object_type })
8230 }
8231
8232 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8234 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8235 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8236
8237 let mut using = None;
8238
8239 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8240 let index_name = self.parse_object_name(false)?;
8241 using = self.parse_optional_using_then_index_type()?;
8243 self.expect_keyword_is(Keyword::ON)?;
8244 Some(index_name)
8245 } else {
8246 None
8247 };
8248
8249 let table_name = self.parse_object_name(false)?;
8250
8251 using = self.parse_optional_using_then_index_type()?.or(using);
8254
8255 let columns = self.parse_parenthesized_index_column_list()?;
8256
8257 let include = if self.parse_keyword(Keyword::INCLUDE) {
8258 self.expect_token(&Token::LParen)?;
8259 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8260 self.expect_token(&Token::RParen)?;
8261 columns
8262 } else {
8263 vec![]
8264 };
8265
8266 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8267 let not = self.parse_keyword(Keyword::NOT);
8268 self.expect_keyword_is(Keyword::DISTINCT)?;
8269 Some(!not)
8270 } else {
8271 None
8272 };
8273
8274 let with = if self.dialect.supports_create_index_with_clause()
8275 && self.parse_keyword(Keyword::WITH)
8276 {
8277 self.expect_token(&Token::LParen)?;
8278 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8279 self.expect_token(&Token::RParen)?;
8280 with_params
8281 } else {
8282 Vec::new()
8283 };
8284
8285 let predicate = if self.parse_keyword(Keyword::WHERE) {
8286 Some(self.parse_expr()?)
8287 } else {
8288 None
8289 };
8290
8291 let index_options = self.parse_index_options()?;
8297
8298 let mut alter_options = Vec::new();
8300 while self
8301 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8302 .is_some()
8303 {
8304 alter_options.push(self.parse_alter_table_operation()?)
8305 }
8306
8307 Ok(CreateIndex {
8308 name: index_name,
8309 table_name,
8310 using,
8311 columns,
8312 unique,
8313 concurrently,
8314 if_not_exists,
8315 include,
8316 nulls_distinct,
8317 with,
8318 predicate,
8319 index_options,
8320 alter_options,
8321 })
8322 }
8323
8324 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8326 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8327 let name = self.parse_identifier()?;
8328
8329 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8330 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8331 Some(self.parse_identifier()?)
8332 } else {
8333 None
8334 };
8335
8336 let version = if self.parse_keyword(Keyword::VERSION) {
8337 Some(self.parse_identifier()?)
8338 } else {
8339 None
8340 };
8341
8342 let cascade = self.parse_keyword(Keyword::CASCADE);
8343
8344 (schema, version, cascade)
8345 } else {
8346 (None, None, false)
8347 };
8348
8349 Ok(CreateExtension {
8350 name,
8351 if_not_exists,
8352 schema,
8353 version,
8354 cascade,
8355 })
8356 }
8357
8358 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8360 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8361 let name = self.parse_object_name(false)?;
8362
8363 let definition = if self.parse_keyword(Keyword::FROM) {
8364 CreateCollationDefinition::From(self.parse_object_name(false)?)
8365 } else if self.consume_token(&Token::LParen) {
8366 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8367 self.expect_token(&Token::RParen)?;
8368 CreateCollationDefinition::Options(options)
8369 } else {
8370 return self.expected_ref(
8371 "FROM or parenthesized option list after CREATE COLLATION name",
8372 self.peek_token_ref(),
8373 );
8374 };
8375
8376 Ok(CreateCollation {
8377 if_not_exists,
8378 name,
8379 definition,
8380 })
8381 }
8382
8383 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8385 if self.parse_keyword(Keyword::CONFIGURATION) {
8386 let name = self.parse_object_name(false)?;
8387 self.expect_token(&Token::LParen)?;
8388 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8389 self.expect_token(&Token::RParen)?;
8390 Ok(Statement::CreateTextSearchConfiguration(
8391 CreateTextSearchConfiguration { name, options },
8392 ))
8393 } else if self.parse_keyword(Keyword::DICTIONARY) {
8394 let name = self.parse_object_name(false)?;
8395 self.expect_token(&Token::LParen)?;
8396 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8397 self.expect_token(&Token::RParen)?;
8398 Ok(Statement::CreateTextSearchDictionary(
8399 CreateTextSearchDictionary { name, options },
8400 ))
8401 } else if self.parse_keyword(Keyword::PARSER) {
8402 let name = self.parse_object_name(false)?;
8403 self.expect_token(&Token::LParen)?;
8404 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8405 self.expect_token(&Token::RParen)?;
8406 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8407 name,
8408 options,
8409 }))
8410 } else if self.parse_keyword(Keyword::TEMPLATE) {
8411 let name = self.parse_object_name(false)?;
8412 self.expect_token(&Token::LParen)?;
8413 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8414 self.expect_token(&Token::RParen)?;
8415 Ok(Statement::CreateTextSearchTemplate(
8416 CreateTextSearchTemplate { name, options },
8417 ))
8418 } else {
8419 self.expected_ref(
8420 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8421 self.peek_token_ref(),
8422 )
8423 }
8424 }
8425
8426 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8428 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8429 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8430 let cascade_or_restrict =
8431 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8432 Ok(Statement::DropExtension(DropExtension {
8433 names,
8434 if_exists,
8435 cascade_or_restrict: cascade_or_restrict
8436 .map(|k| match k {
8437 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8438 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8439 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8440 })
8441 .transpose()?,
8442 }))
8443 }
8444
8445 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8448 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8449 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8450 let drop_behavior = self.parse_optional_drop_behavior();
8451 Ok(Statement::DropOperator(DropOperator {
8452 if_exists,
8453 operators,
8454 drop_behavior,
8455 }))
8456 }
8457
8458 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8461 let name = self.parse_operator_name()?;
8462 self.expect_token(&Token::LParen)?;
8463
8464 let left_type = if self.parse_keyword(Keyword::NONE) {
8466 None
8467 } else {
8468 Some(self.parse_data_type()?)
8469 };
8470
8471 self.expect_token(&Token::Comma)?;
8472
8473 let right_type = self.parse_data_type()?;
8475
8476 self.expect_token(&Token::RParen)?;
8477
8478 Ok(DropOperatorSignature {
8479 name,
8480 left_type,
8481 right_type,
8482 })
8483 }
8484
8485 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8489 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8490 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8491 self.expect_keyword(Keyword::USING)?;
8492 let using = self.parse_identifier()?;
8493 let drop_behavior = self.parse_optional_drop_behavior();
8494 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8495 if_exists,
8496 names,
8497 using,
8498 drop_behavior,
8499 }))
8500 }
8501
8502 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8506 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8507 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8508 self.expect_keyword(Keyword::USING)?;
8509 let using = self.parse_identifier()?;
8510 let drop_behavior = self.parse_optional_drop_behavior();
8511 Ok(Statement::DropOperatorClass(DropOperatorClass {
8512 if_exists,
8513 names,
8514 using,
8515 drop_behavior,
8516 }))
8517 }
8518
8519 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8523 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8524 self.expect_token(&Token::LParen)?;
8525 let columns =
8526 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8527 self.expect_token(&Token::RParen)?;
8528 Ok(HiveDistributionStyle::PARTITIONED { columns })
8529 } else {
8530 Ok(HiveDistributionStyle::NONE)
8531 }
8532 }
8533
8534 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8538 let token = self.next_token();
8539 match &token.token {
8540 Token::Word(w) => match w.keyword {
8541 Keyword::AUTO => Ok(DistStyle::Auto),
8542 Keyword::EVEN => Ok(DistStyle::Even),
8543 Keyword::KEY => Ok(DistStyle::Key),
8544 Keyword::ALL => Ok(DistStyle::All),
8545 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8546 },
8547 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8548 }
8549 }
8550
8551 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8553 let mut hive_format: Option<HiveFormat> = None;
8554 loop {
8555 match self.parse_one_of_keywords(&[
8556 Keyword::ROW,
8557 Keyword::STORED,
8558 Keyword::LOCATION,
8559 Keyword::WITH,
8560 ]) {
8561 Some(Keyword::ROW) => {
8562 hive_format
8563 .get_or_insert_with(HiveFormat::default)
8564 .row_format = Some(self.parse_row_format()?);
8565 }
8566 Some(Keyword::STORED) => {
8567 self.expect_keyword_is(Keyword::AS)?;
8568 if self.parse_keyword(Keyword::INPUTFORMAT) {
8569 let input_format = self.parse_expr()?;
8570 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8571 let output_format = self.parse_expr()?;
8572 hive_format.get_or_insert_with(HiveFormat::default).storage =
8573 Some(HiveIOFormat::IOF {
8574 input_format,
8575 output_format,
8576 });
8577 } else {
8578 let format = self.parse_file_format()?;
8579 hive_format.get_or_insert_with(HiveFormat::default).storage =
8580 Some(HiveIOFormat::FileFormat { format });
8581 }
8582 }
8583 Some(Keyword::LOCATION) => {
8584 hive_format.get_or_insert_with(HiveFormat::default).location =
8585 Some(self.parse_literal_string()?);
8586 }
8587 Some(Keyword::WITH) => {
8588 self.prev_token();
8589 let properties = self
8590 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8591 if !properties.is_empty() {
8592 hive_format
8593 .get_or_insert_with(HiveFormat::default)
8594 .serde_properties = Some(properties);
8595 } else {
8596 break;
8597 }
8598 }
8599 None => break,
8600 _ => break,
8601 }
8602 }
8603
8604 Ok(hive_format)
8605 }
8606
8607 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8609 self.expect_keyword_is(Keyword::FORMAT)?;
8610 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8611 Some(Keyword::SERDE) => {
8612 let class = self.parse_literal_string()?;
8613 Ok(HiveRowFormat::SERDE { class })
8614 }
8615 _ => {
8616 let mut row_delimiters = vec![];
8617
8618 loop {
8619 match self.parse_one_of_keywords(&[
8620 Keyword::FIELDS,
8621 Keyword::COLLECTION,
8622 Keyword::MAP,
8623 Keyword::LINES,
8624 Keyword::NULL,
8625 ]) {
8626 Some(Keyword::FIELDS) => {
8627 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8628 row_delimiters.push(HiveRowDelimiter {
8629 delimiter: HiveDelimiter::FieldsTerminatedBy,
8630 char: self.parse_identifier()?,
8631 });
8632
8633 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8634 row_delimiters.push(HiveRowDelimiter {
8635 delimiter: HiveDelimiter::FieldsEscapedBy,
8636 char: self.parse_identifier()?,
8637 });
8638 }
8639 } else {
8640 break;
8641 }
8642 }
8643 Some(Keyword::COLLECTION) => {
8644 if self.parse_keywords(&[
8645 Keyword::ITEMS,
8646 Keyword::TERMINATED,
8647 Keyword::BY,
8648 ]) {
8649 row_delimiters.push(HiveRowDelimiter {
8650 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8651 char: self.parse_identifier()?,
8652 });
8653 } else {
8654 break;
8655 }
8656 }
8657 Some(Keyword::MAP) => {
8658 if self.parse_keywords(&[
8659 Keyword::KEYS,
8660 Keyword::TERMINATED,
8661 Keyword::BY,
8662 ]) {
8663 row_delimiters.push(HiveRowDelimiter {
8664 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8665 char: self.parse_identifier()?,
8666 });
8667 } else {
8668 break;
8669 }
8670 }
8671 Some(Keyword::LINES) => {
8672 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8673 row_delimiters.push(HiveRowDelimiter {
8674 delimiter: HiveDelimiter::LinesTerminatedBy,
8675 char: self.parse_identifier()?,
8676 });
8677 } else {
8678 break;
8679 }
8680 }
8681 Some(Keyword::NULL) => {
8682 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8683 row_delimiters.push(HiveRowDelimiter {
8684 delimiter: HiveDelimiter::NullDefinedAs,
8685 char: self.parse_identifier()?,
8686 });
8687 } else {
8688 break;
8689 }
8690 }
8691 _ => {
8692 break;
8693 }
8694 }
8695 }
8696
8697 Ok(HiveRowFormat::DELIMITED {
8698 delimiters: row_delimiters,
8699 })
8700 }
8701 }
8702 }
8703
8704 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8705 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8706 Ok(Some(self.parse_identifier()?))
8707 } else {
8708 Ok(None)
8709 }
8710 }
8711
8712 pub fn parse_create_table(
8714 &mut self,
8715 or_replace: bool,
8716 temporary: bool,
8717 global: Option<bool>,
8718 transient: bool,
8719 ) -> Result<CreateTable, ParserError> {
8720 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8721 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8722 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8723
8724 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8734 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8735 } else {
8736 None
8737 };
8738
8739 let on_cluster = self.parse_optional_on_cluster()?;
8741
8742 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8743
8744 let clone = if self.parse_keyword(Keyword::CLONE) {
8745 self.parse_object_name(allow_unquoted_hyphen).ok()
8746 } else {
8747 None
8748 };
8749
8750 let (columns, constraints) = self.parse_columns()?;
8752 let comment_after_column_def =
8753 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8754 let next_token = self.next_token();
8755 match next_token.token {
8756 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8757 _ => self.expected("comment", next_token)?,
8758 }
8759 } else {
8760 None
8761 };
8762
8763 let for_values = if partition_of.is_some() {
8765 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8766 Some(self.parse_partition_for_values()?)
8767 } else {
8768 return self.expected_ref(
8769 "FOR VALUES or DEFAULT after PARTITION OF",
8770 self.peek_token_ref(),
8771 );
8772 }
8773 } else {
8774 None
8775 };
8776
8777 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8779
8780 let hive_distribution = self.parse_hive_distribution()?;
8781 let clustered_by = self.parse_optional_clustered_by()?;
8782 let hive_formats = self.parse_hive_formats()?;
8783
8784 let create_table_config = self.parse_optional_create_table_config()?;
8785
8786 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8789 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8790 {
8791 Some(Box::new(self.parse_expr()?))
8792 } else {
8793 None
8794 };
8795
8796 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8797 if self.consume_token(&Token::LParen) {
8798 let columns = if self.peek_token_ref().token != Token::RParen {
8799 self.parse_comma_separated(|p| p.parse_expr())?
8800 } else {
8801 vec![]
8802 };
8803 self.expect_token(&Token::RParen)?;
8804 Some(OneOrManyWithParens::Many(columns))
8805 } else {
8806 Some(OneOrManyWithParens::One(self.parse_expr()?))
8807 }
8808 } else {
8809 None
8810 };
8811
8812 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8813 Some(self.parse_create_table_on_commit()?)
8814 } else {
8815 None
8816 };
8817
8818 let strict = self.parse_keyword(Keyword::STRICT);
8819
8820 let backup = if self.parse_keyword(Keyword::BACKUP) {
8822 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8823 Some(keyword == Keyword::YES)
8824 } else {
8825 None
8826 };
8827
8828 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8830 Some(self.parse_dist_style()?)
8831 } else {
8832 None
8833 };
8834 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8835 self.expect_token(&Token::LParen)?;
8836 let expr = self.parse_expr()?;
8837 self.expect_token(&Token::RParen)?;
8838 Some(expr)
8839 } else {
8840 None
8841 };
8842 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8843 self.expect_token(&Token::LParen)?;
8844 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8845 self.expect_token(&Token::RParen)?;
8846 Some(columns)
8847 } else {
8848 None
8849 };
8850
8851 let query = if self.parse_keyword(Keyword::AS) {
8853 Some(self.parse_query()?)
8854 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8855 {
8856 self.prev_token();
8858 Some(self.parse_query()?)
8859 } else {
8860 None
8861 };
8862
8863 Ok(CreateTableBuilder::new(table_name)
8864 .temporary(temporary)
8865 .columns(columns)
8866 .constraints(constraints)
8867 .or_replace(or_replace)
8868 .if_not_exists(if_not_exists)
8869 .transient(transient)
8870 .hive_distribution(hive_distribution)
8871 .hive_formats(hive_formats)
8872 .global(global)
8873 .query(query)
8874 .without_rowid(without_rowid)
8875 .like(like)
8876 .clone_clause(clone)
8877 .comment_after_column_def(comment_after_column_def)
8878 .order_by(order_by)
8879 .on_commit(on_commit)
8880 .on_cluster(on_cluster)
8881 .clustered_by(clustered_by)
8882 .partition_by(create_table_config.partition_by)
8883 .cluster_by(create_table_config.cluster_by)
8884 .inherits(create_table_config.inherits)
8885 .partition_of(partition_of)
8886 .for_values(for_values)
8887 .table_options(create_table_config.table_options)
8888 .primary_key(primary_key)
8889 .strict(strict)
8890 .backup(backup)
8891 .diststyle(diststyle)
8892 .distkey(distkey)
8893 .sortkey(sortkey)
8894 .build())
8895 }
8896
8897 fn maybe_parse_create_table_like(
8898 &mut self,
8899 allow_unquoted_hyphen: bool,
8900 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8901 let like = if self.dialect.supports_create_table_like_parenthesized()
8902 && self.consume_token(&Token::LParen)
8903 {
8904 if self.parse_keyword(Keyword::LIKE) {
8905 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8906 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8907 Some(CreateTableLikeDefaults::Including)
8908 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8909 Some(CreateTableLikeDefaults::Excluding)
8910 } else {
8911 None
8912 };
8913 self.expect_token(&Token::RParen)?;
8914 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8915 name,
8916 defaults,
8917 }))
8918 } else {
8919 self.prev_token();
8921 None
8922 }
8923 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8924 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8925 Some(CreateTableLikeKind::Plain(CreateTableLike {
8926 name,
8927 defaults: None,
8928 }))
8929 } else {
8930 None
8931 };
8932 Ok(like)
8933 }
8934
8935 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8936 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8937 Ok(OnCommit::DeleteRows)
8938 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8939 Ok(OnCommit::PreserveRows)
8940 } else if self.parse_keywords(&[Keyword::DROP]) {
8941 Ok(OnCommit::Drop)
8942 } else {
8943 parser_err!(
8944 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8945 self.peek_token_ref()
8946 )
8947 }
8948 }
8949
8950 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8956 if self.parse_keyword(Keyword::DEFAULT) {
8957 return Ok(ForValues::Default);
8958 }
8959
8960 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8961
8962 if self.parse_keyword(Keyword::IN) {
8963 self.expect_token(&Token::LParen)?;
8965 if self.peek_token_ref().token == Token::RParen {
8966 return self.expected_ref("at least one value", self.peek_token_ref());
8967 }
8968 let values = self.parse_comma_separated(Parser::parse_expr)?;
8969 self.expect_token(&Token::RParen)?;
8970 Ok(ForValues::In(values))
8971 } else if self.parse_keyword(Keyword::FROM) {
8972 self.expect_token(&Token::LParen)?;
8974 if self.peek_token_ref().token == Token::RParen {
8975 return self.expected_ref("at least one value", self.peek_token_ref());
8976 }
8977 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8978 self.expect_token(&Token::RParen)?;
8979 self.expect_keyword(Keyword::TO)?;
8980 self.expect_token(&Token::LParen)?;
8981 if self.peek_token_ref().token == Token::RParen {
8982 return self.expected_ref("at least one value", self.peek_token_ref());
8983 }
8984 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8985 self.expect_token(&Token::RParen)?;
8986 Ok(ForValues::From { from, to })
8987 } else if self.parse_keyword(Keyword::WITH) {
8988 self.expect_token(&Token::LParen)?;
8990 self.expect_keyword(Keyword::MODULUS)?;
8991 let modulus = self.parse_literal_uint()?;
8992 self.expect_token(&Token::Comma)?;
8993 self.expect_keyword(Keyword::REMAINDER)?;
8994 let remainder = self.parse_literal_uint()?;
8995 self.expect_token(&Token::RParen)?;
8996 Ok(ForValues::With { modulus, remainder })
8997 } else {
8998 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
8999 }
9000 }
9001
9002 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9004 if self.parse_keyword(Keyword::MINVALUE) {
9005 Ok(PartitionBoundValue::MinValue)
9006 } else if self.parse_keyword(Keyword::MAXVALUE) {
9007 Ok(PartitionBoundValue::MaxValue)
9008 } else {
9009 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9010 }
9011 }
9012
9013 fn parse_optional_create_table_config(
9019 &mut self,
9020 ) -> Result<CreateTableConfiguration, ParserError> {
9021 let mut table_options = CreateTableOptions::None;
9022
9023 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9024 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9025 } else {
9026 None
9027 };
9028
9029 let with_options = self.parse_options(Keyword::WITH)?;
9031 if !with_options.is_empty() {
9032 table_options = CreateTableOptions::With(with_options)
9033 }
9034
9035 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9036 if !table_properties.is_empty() {
9037 table_options = CreateTableOptions::TableProperties(table_properties);
9038 }
9039 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9040 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9041 {
9042 Some(Box::new(self.parse_expr()?))
9043 } else {
9044 None
9045 };
9046
9047 let mut cluster_by = None;
9048 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9049 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9050 cluster_by = Some(WrappedCollection::NoWrapping(
9051 self.parse_comma_separated(|p| p.parse_expr())?,
9052 ));
9053 };
9054
9055 if let Token::Word(word) = &self.peek_token_ref().token {
9056 if word.keyword == Keyword::OPTIONS {
9057 table_options =
9058 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9059 }
9060 };
9061 }
9062
9063 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9064 let plain_options = self.parse_plain_options()?;
9065 if !plain_options.is_empty() {
9066 table_options = CreateTableOptions::Plain(plain_options)
9067 }
9068 };
9069
9070 Ok(CreateTableConfiguration {
9071 partition_by,
9072 cluster_by,
9073 inherits,
9074 table_options,
9075 })
9076 }
9077
9078 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9079 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9082 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9083 }
9084
9085 if self.parse_keywords(&[Keyword::COMMENT]) {
9088 let has_eq = self.consume_token(&Token::Eq);
9089 let value = self.next_token();
9090
9091 let comment = match (has_eq, value.token) {
9092 (true, Token::SingleQuotedString(s)) => {
9093 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9094 }
9095 (false, Token::SingleQuotedString(s)) => {
9096 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9097 }
9098 (_, token) => {
9099 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9100 }
9101 };
9102 return comment;
9103 }
9104
9105 if self.parse_keywords(&[Keyword::ENGINE]) {
9108 let _ = self.consume_token(&Token::Eq);
9109 let value = self.next_token();
9110
9111 let engine = match value.token {
9112 Token::Word(w) => {
9113 let parameters = if self.peek_token_ref().token == Token::LParen {
9114 self.parse_parenthesized_identifiers()?
9115 } else {
9116 vec![]
9117 };
9118
9119 Ok(Some(SqlOption::NamedParenthesizedList(
9120 NamedParenthesizedList {
9121 key: Ident::new("ENGINE"),
9122 name: Some(Ident::new(w.value)),
9123 values: parameters,
9124 },
9125 )))
9126 }
9127 _ => {
9128 return self.expected("Token::Word", value)?;
9129 }
9130 };
9131
9132 return engine;
9133 }
9134
9135 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9137 let _ = self.consume_token(&Token::Eq);
9138 let value = self.next_token();
9139
9140 let tablespace = match value.token {
9141 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9142 let storage = match self.parse_keyword(Keyword::STORAGE) {
9143 true => {
9144 let _ = self.consume_token(&Token::Eq);
9145 let storage_token = self.next_token();
9146 match &storage_token.token {
9147 Token::Word(w) => match w.value.to_uppercase().as_str() {
9148 "DISK" => Some(StorageType::Disk),
9149 "MEMORY" => Some(StorageType::Memory),
9150 _ => self
9151 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9152 },
9153 _ => self.expected("Token::Word", storage_token)?,
9154 }
9155 }
9156 false => None,
9157 };
9158
9159 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9160 name,
9161 storage,
9162 })))
9163 }
9164 _ => {
9165 return self.expected("Token::Word", value)?;
9166 }
9167 };
9168
9169 return tablespace;
9170 }
9171
9172 if self.parse_keyword(Keyword::UNION) {
9174 let _ = self.consume_token(&Token::Eq);
9175 let value = self.next_token();
9176
9177 match value.token {
9178 Token::LParen => {
9179 let tables: Vec<Ident> =
9180 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9181 self.expect_token(&Token::RParen)?;
9182
9183 return Ok(Some(SqlOption::NamedParenthesizedList(
9184 NamedParenthesizedList {
9185 key: Ident::new("UNION"),
9186 name: None,
9187 values: tables,
9188 },
9189 )));
9190 }
9191 _ => {
9192 return self.expected("Token::LParen", value)?;
9193 }
9194 }
9195 }
9196
9197 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9199 Ident::new("DEFAULT CHARSET")
9200 } else if self.parse_keyword(Keyword::CHARSET) {
9201 Ident::new("CHARSET")
9202 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9203 Ident::new("DEFAULT CHARACTER SET")
9204 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9205 Ident::new("CHARACTER SET")
9206 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9207 Ident::new("DEFAULT COLLATE")
9208 } else if self.parse_keyword(Keyword::COLLATE) {
9209 Ident::new("COLLATE")
9210 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9211 Ident::new("DATA DIRECTORY")
9212 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9213 Ident::new("INDEX DIRECTORY")
9214 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9215 Ident::new("KEY_BLOCK_SIZE")
9216 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9217 Ident::new("ROW_FORMAT")
9218 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9219 Ident::new("PACK_KEYS")
9220 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9221 Ident::new("STATS_AUTO_RECALC")
9222 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9223 Ident::new("STATS_PERSISTENT")
9224 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9225 Ident::new("STATS_SAMPLE_PAGES")
9226 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9227 Ident::new("DELAY_KEY_WRITE")
9228 } else if self.parse_keyword(Keyword::COMPRESSION) {
9229 Ident::new("COMPRESSION")
9230 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9231 Ident::new("ENCRYPTION")
9232 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9233 Ident::new("MAX_ROWS")
9234 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9235 Ident::new("MIN_ROWS")
9236 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9237 Ident::new("AUTOEXTEND_SIZE")
9238 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9239 Ident::new("AVG_ROW_LENGTH")
9240 } else if self.parse_keyword(Keyword::CHECKSUM) {
9241 Ident::new("CHECKSUM")
9242 } else if self.parse_keyword(Keyword::CONNECTION) {
9243 Ident::new("CONNECTION")
9244 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9245 Ident::new("ENGINE_ATTRIBUTE")
9246 } else if self.parse_keyword(Keyword::PASSWORD) {
9247 Ident::new("PASSWORD")
9248 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9249 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9250 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9251 Ident::new("INSERT_METHOD")
9252 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9253 Ident::new("AUTO_INCREMENT")
9254 } else {
9255 return Ok(None);
9256 };
9257
9258 let _ = self.consume_token(&Token::Eq);
9259
9260 let value = match self
9261 .maybe_parse(|parser| parser.parse_value())?
9262 .map(Expr::Value)
9263 {
9264 Some(expr) => expr,
9265 None => Expr::Identifier(self.parse_identifier()?),
9266 };
9267
9268 Ok(Some(SqlOption::KeyValue { key, value }))
9269 }
9270
9271 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9273 let mut options = Vec::new();
9274
9275 while let Some(option) = self.parse_plain_option()? {
9276 options.push(option);
9277 let _ = self.consume_token(&Token::Comma);
9280 }
9281
9282 Ok(options)
9283 }
9284
9285 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9287 let comment = if self.parse_keyword(Keyword::COMMENT) {
9288 let has_eq = self.consume_token(&Token::Eq);
9289 let comment = self.parse_comment_value()?;
9290 Some(if has_eq {
9291 CommentDef::WithEq(comment)
9292 } else {
9293 CommentDef::WithoutEq(comment)
9294 })
9295 } else {
9296 None
9297 };
9298 Ok(comment)
9299 }
9300
9301 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9303 let next_token = self.next_token();
9304 let value = match next_token.token {
9305 Token::SingleQuotedString(str) => str,
9306 Token::DollarQuotedString(str) => str.value,
9307 _ => self.expected("string literal", next_token)?,
9308 };
9309 Ok(value)
9310 }
9311
9312 pub fn parse_optional_procedure_parameters(
9314 &mut self,
9315 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9316 let mut params = vec![];
9317 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9318 return Ok(Some(params));
9319 }
9320 loop {
9321 if let Token::Word(_) = &self.peek_token_ref().token {
9322 params.push(self.parse_procedure_param()?)
9323 }
9324 let comma = self.consume_token(&Token::Comma);
9325 if self.consume_token(&Token::RParen) {
9326 break;
9328 } else if !comma {
9329 return self.expected_ref(
9330 "',' or ')' after parameter definition",
9331 self.peek_token_ref(),
9332 );
9333 }
9334 }
9335 Ok(Some(params))
9336 }
9337
9338 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9340 let mut columns = vec![];
9341 let mut constraints = vec![];
9342 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9343 return Ok((columns, constraints));
9344 }
9345
9346 loop {
9347 if let Some(constraint) = self.parse_optional_table_constraint()? {
9348 constraints.push(constraint);
9349 } else if let Token::Word(_) = &self.peek_token_ref().token {
9350 columns.push(self.parse_column_def()?);
9351 } else {
9352 return self.expected_ref(
9353 "column name or constraint definition",
9354 self.peek_token_ref(),
9355 );
9356 }
9357
9358 let comma = self.consume_token(&Token::Comma);
9359 let rparen = self.peek_token_ref().token == Token::RParen;
9360
9361 if !comma && !rparen {
9362 return self
9363 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9364 };
9365
9366 if rparen
9367 && (!comma
9368 || self.dialect.supports_column_definition_trailing_commas()
9369 || self.options.trailing_commas)
9370 {
9371 let _ = self.consume_token(&Token::RParen);
9372 break;
9373 }
9374 }
9375
9376 Ok((columns, constraints))
9377 }
9378
9379 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9381 let mode = if self.parse_keyword(Keyword::IN) {
9382 Some(ArgMode::In)
9383 } else if self.parse_keyword(Keyword::OUT) {
9384 Some(ArgMode::Out)
9385 } else if self.parse_keyword(Keyword::INOUT) {
9386 Some(ArgMode::InOut)
9387 } else {
9388 None
9389 };
9390 let name = self.parse_identifier()?;
9391 let data_type = self.parse_data_type()?;
9392 let default = if self.consume_token(&Token::Eq) {
9393 Some(self.parse_expr()?)
9394 } else {
9395 None
9396 };
9397
9398 Ok(ProcedureParam {
9399 name,
9400 data_type,
9401 mode,
9402 default,
9403 })
9404 }
9405
9406 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9408 self.parse_column_def_inner(false)
9409 }
9410
9411 fn parse_column_def_inner(
9412 &mut self,
9413 optional_data_type: bool,
9414 ) -> Result<ColumnDef, ParserError> {
9415 let col_name = self.parse_identifier()?;
9416 let data_type = if self.is_column_type_sqlite_unspecified() {
9417 DataType::Unspecified
9418 } else if optional_data_type {
9419 self.maybe_parse(|parser| parser.parse_data_type())?
9420 .unwrap_or(DataType::Unspecified)
9421 } else {
9422 self.parse_data_type()?
9423 };
9424 let mut options = vec![];
9425 loop {
9426 if self.parse_keyword(Keyword::CONSTRAINT) {
9427 let name = Some(self.parse_identifier()?);
9428 if let Some(option) = self.parse_optional_column_option()? {
9429 options.push(ColumnOptionDef { name, option });
9430 } else {
9431 return self.expected_ref(
9432 "constraint details after CONSTRAINT <name>",
9433 self.peek_token_ref(),
9434 );
9435 }
9436 } else if let Some(option) = self.parse_optional_column_option()? {
9437 options.push(ColumnOptionDef { name: None, option });
9438 } else {
9439 break;
9440 };
9441 }
9442 Ok(ColumnDef {
9443 name: col_name,
9444 data_type,
9445 options,
9446 })
9447 }
9448
9449 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9450 if dialect_of!(self is SQLiteDialect) {
9451 match &self.peek_token_ref().token {
9452 Token::Word(word) => matches!(
9453 word.keyword,
9454 Keyword::CONSTRAINT
9455 | Keyword::PRIMARY
9456 | Keyword::NOT
9457 | Keyword::UNIQUE
9458 | Keyword::CHECK
9459 | Keyword::DEFAULT
9460 | Keyword::COLLATE
9461 | Keyword::REFERENCES
9462 | Keyword::GENERATED
9463 | Keyword::AS
9464 ),
9465 _ => true, }
9467 } else {
9468 false
9469 }
9470 }
9471
9472 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9474 if let Some(option) = self.dialect.parse_column_option(self)? {
9475 return option;
9476 }
9477
9478 self.with_state(
9479 ColumnDefinition,
9480 |parser| -> Result<Option<ColumnOption>, ParserError> {
9481 parser.parse_optional_column_option_inner()
9482 },
9483 )
9484 }
9485
9486 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9487 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9488 Ok(Some(ColumnOption::CharacterSet(
9489 self.parse_object_name(false)?,
9490 )))
9491 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9492 Ok(Some(ColumnOption::Collation(
9493 self.parse_object_name(false)?,
9494 )))
9495 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9496 Ok(Some(ColumnOption::NotNull))
9497 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9498 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9499 } else if self.parse_keyword(Keyword::NULL) {
9500 Ok(Some(ColumnOption::Null))
9501 } else if self.parse_keyword(Keyword::DEFAULT) {
9502 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9503 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9504 && self.parse_keyword(Keyword::MATERIALIZED)
9505 {
9506 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9507 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9508 && self.parse_keyword(Keyword::ALIAS)
9509 {
9510 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9511 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9512 && self.parse_keyword(Keyword::EPHEMERAL)
9513 {
9514 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9517 Ok(Some(ColumnOption::Ephemeral(None)))
9518 } else {
9519 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9520 }
9521 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9522 let characteristics = self.parse_constraint_characteristics()?;
9523 Ok(Some(
9524 PrimaryKeyConstraint {
9525 name: None,
9526 index_name: None,
9527 index_type: None,
9528 columns: vec![],
9529 index_options: vec![],
9530 characteristics,
9531 }
9532 .into(),
9533 ))
9534 } else if self.parse_keyword(Keyword::UNIQUE) {
9535 let index_type_display =
9536 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9537 KeyOrIndexDisplay::Key
9538 } else {
9539 KeyOrIndexDisplay::None
9540 };
9541 let characteristics = self.parse_constraint_characteristics()?;
9542 Ok(Some(
9543 UniqueConstraint {
9544 name: None,
9545 index_name: None,
9546 index_type_display,
9547 index_type: None,
9548 columns: vec![],
9549 index_options: vec![],
9550 characteristics,
9551 nulls_distinct: NullsDistinctOption::None,
9552 }
9553 .into(),
9554 ))
9555 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9556 let characteristics = self.parse_constraint_characteristics()?;
9559 Ok(Some(
9560 PrimaryKeyConstraint {
9561 name: None,
9562 index_name: None,
9563 index_type: None,
9564 columns: vec![],
9565 index_options: vec![],
9566 characteristics,
9567 }
9568 .into(),
9569 ))
9570 } else if self.parse_keyword(Keyword::REFERENCES) {
9571 let foreign_table = self.parse_object_name(false)?;
9572 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9575 let mut match_kind = None;
9576 let mut on_delete = None;
9577 let mut on_update = None;
9578 loop {
9579 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9580 match_kind = Some(self.parse_match_kind()?);
9581 } else if on_delete.is_none()
9582 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9583 {
9584 on_delete = Some(self.parse_referential_action()?);
9585 } else if on_update.is_none()
9586 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9587 {
9588 on_update = Some(self.parse_referential_action()?);
9589 } else {
9590 break;
9591 }
9592 }
9593 let characteristics = self.parse_constraint_characteristics()?;
9594
9595 Ok(Some(
9596 ForeignKeyConstraint {
9597 name: None, index_name: None, columns: vec![], foreign_table,
9601 referred_columns,
9602 on_delete,
9603 on_update,
9604 match_kind,
9605 characteristics,
9606 }
9607 .into(),
9608 ))
9609 } else if self.parse_keyword(Keyword::CHECK) {
9610 self.expect_token(&Token::LParen)?;
9611 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9613 self.expect_token(&Token::RParen)?;
9614
9615 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9616 Some(true)
9617 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9618 Some(false)
9619 } else {
9620 None
9621 };
9622
9623 Ok(Some(
9624 CheckConstraint {
9625 name: None, expr: Box::new(expr),
9627 enforced,
9628 }
9629 .into(),
9630 ))
9631 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9632 && dialect_of!(self is MySqlDialect | GenericDialect)
9633 {
9634 Ok(Some(ColumnOption::DialectSpecific(vec![
9636 Token::make_keyword("AUTO_INCREMENT"),
9637 ])))
9638 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9639 && dialect_of!(self is SQLiteDialect | GenericDialect)
9640 {
9641 Ok(Some(ColumnOption::DialectSpecific(vec![
9643 Token::make_keyword("AUTOINCREMENT"),
9644 ])))
9645 } else if self.parse_keyword(Keyword::ASC)
9646 && self.dialect.supports_asc_desc_in_column_definition()
9647 {
9648 Ok(Some(ColumnOption::DialectSpecific(vec![
9650 Token::make_keyword("ASC"),
9651 ])))
9652 } else if self.parse_keyword(Keyword::DESC)
9653 && self.dialect.supports_asc_desc_in_column_definition()
9654 {
9655 Ok(Some(ColumnOption::DialectSpecific(vec![
9657 Token::make_keyword("DESC"),
9658 ])))
9659 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9660 && dialect_of!(self is MySqlDialect | GenericDialect)
9661 {
9662 let expr = self.parse_expr()?;
9663 Ok(Some(ColumnOption::OnUpdate(expr)))
9664 } else if self.parse_keyword(Keyword::GENERATED) {
9665 self.parse_optional_column_option_generated()
9666 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9667 && self.parse_keyword(Keyword::OPTIONS)
9668 {
9669 self.prev_token();
9670 Ok(Some(ColumnOption::Options(
9671 self.parse_options(Keyword::OPTIONS)?,
9672 )))
9673 } else if self.parse_keyword(Keyword::AS)
9674 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9675 {
9676 self.parse_optional_column_option_as()
9677 } else if self.parse_keyword(Keyword::SRID)
9678 && dialect_of!(self is MySqlDialect | GenericDialect)
9679 {
9680 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9681 } else if self.parse_keyword(Keyword::IDENTITY)
9682 && dialect_of!(self is MsSqlDialect | GenericDialect)
9683 {
9684 let parameters = if self.consume_token(&Token::LParen) {
9685 let seed = self.parse_number()?;
9686 self.expect_token(&Token::Comma)?;
9687 let increment = self.parse_number()?;
9688 self.expect_token(&Token::RParen)?;
9689
9690 Some(IdentityPropertyFormatKind::FunctionCall(
9691 IdentityParameters { seed, increment },
9692 ))
9693 } else {
9694 None
9695 };
9696 Ok(Some(ColumnOption::Identity(
9697 IdentityPropertyKind::Identity(IdentityProperty {
9698 parameters,
9699 order: None,
9700 }),
9701 )))
9702 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9703 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9704 {
9705 Ok(Some(ColumnOption::OnConflict(
9707 self.expect_one_of_keywords(&[
9708 Keyword::ROLLBACK,
9709 Keyword::ABORT,
9710 Keyword::FAIL,
9711 Keyword::IGNORE,
9712 Keyword::REPLACE,
9713 ])?,
9714 )))
9715 } else if self.parse_keyword(Keyword::INVISIBLE) {
9716 Ok(Some(ColumnOption::Invisible))
9717 } else {
9718 Ok(None)
9719 }
9720 }
9721
9722 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9723 let name = self.parse_object_name(false)?;
9724 self.expect_token(&Token::Eq)?;
9725 let value = self.parse_literal_string()?;
9726
9727 Ok(Tag::new(name, value))
9728 }
9729
9730 fn parse_optional_column_option_generated(
9731 &mut self,
9732 ) -> Result<Option<ColumnOption>, ParserError> {
9733 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9734 let mut sequence_options = vec![];
9735 if self.expect_token(&Token::LParen).is_ok() {
9736 sequence_options = self.parse_create_sequence_options()?;
9737 self.expect_token(&Token::RParen)?;
9738 }
9739 Ok(Some(ColumnOption::Generated {
9740 generated_as: GeneratedAs::Always,
9741 sequence_options: Some(sequence_options),
9742 generation_expr: None,
9743 generation_expr_mode: None,
9744 generated_keyword: true,
9745 }))
9746 } else if self.parse_keywords(&[
9747 Keyword::BY,
9748 Keyword::DEFAULT,
9749 Keyword::AS,
9750 Keyword::IDENTITY,
9751 ]) {
9752 let mut sequence_options = vec![];
9753 if self.expect_token(&Token::LParen).is_ok() {
9754 sequence_options = self.parse_create_sequence_options()?;
9755 self.expect_token(&Token::RParen)?;
9756 }
9757 Ok(Some(ColumnOption::Generated {
9758 generated_as: GeneratedAs::ByDefault,
9759 sequence_options: Some(sequence_options),
9760 generation_expr: None,
9761 generation_expr_mode: None,
9762 generated_keyword: true,
9763 }))
9764 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9765 if self.expect_token(&Token::LParen).is_ok() {
9766 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9767 self.expect_token(&Token::RParen)?;
9768 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9769 Ok((
9770 GeneratedAs::ExpStored,
9771 Some(GeneratedExpressionMode::Stored),
9772 ))
9773 } else if dialect_of!(self is PostgreSqlDialect) {
9774 self.expected_ref("STORED", self.peek_token_ref())
9776 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9777 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9778 } else {
9779 Ok((GeneratedAs::Always, None))
9780 }?;
9781
9782 Ok(Some(ColumnOption::Generated {
9783 generated_as: gen_as,
9784 sequence_options: None,
9785 generation_expr: Some(expr),
9786 generation_expr_mode: expr_mode,
9787 generated_keyword: true,
9788 }))
9789 } else {
9790 Ok(None)
9791 }
9792 } else {
9793 Ok(None)
9794 }
9795 }
9796
9797 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9798 self.expect_token(&Token::LParen)?;
9800 let expr = self.parse_expr()?;
9801 self.expect_token(&Token::RParen)?;
9802
9803 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9804 (
9805 GeneratedAs::ExpStored,
9806 Some(GeneratedExpressionMode::Stored),
9807 )
9808 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9809 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9810 } else {
9811 (GeneratedAs::Always, None)
9812 };
9813
9814 Ok(Some(ColumnOption::Generated {
9815 generated_as: gen_as,
9816 sequence_options: None,
9817 generation_expr: Some(expr),
9818 generation_expr_mode: expr_mode,
9819 generated_keyword: false,
9820 }))
9821 }
9822
9823 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9825 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9826 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9827 {
9828 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9829
9830 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9831 self.expect_token(&Token::LParen)?;
9832 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9833 self.expect_token(&Token::RParen)?;
9834 Some(sorted_by_columns)
9835 } else {
9836 None
9837 };
9838
9839 self.expect_keyword_is(Keyword::INTO)?;
9840 let num_buckets = self.parse_number_value()?.value;
9841 self.expect_keyword_is(Keyword::BUCKETS)?;
9842 Some(ClusteredBy {
9843 columns,
9844 sorted_by,
9845 num_buckets,
9846 })
9847 } else {
9848 None
9849 };
9850 Ok(clustered_by)
9851 }
9852
9853 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9857 if self.parse_keyword(Keyword::RESTRICT) {
9858 Ok(ReferentialAction::Restrict)
9859 } else if self.parse_keyword(Keyword::CASCADE) {
9860 Ok(ReferentialAction::Cascade)
9861 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9862 Ok(ReferentialAction::SetNull)
9863 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9864 Ok(ReferentialAction::NoAction)
9865 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9866 Ok(ReferentialAction::SetDefault)
9867 } else {
9868 self.expected_ref(
9869 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9870 self.peek_token_ref(),
9871 )
9872 }
9873 }
9874
9875 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9877 if self.parse_keyword(Keyword::FULL) {
9878 Ok(ConstraintReferenceMatchKind::Full)
9879 } else if self.parse_keyword(Keyword::PARTIAL) {
9880 Ok(ConstraintReferenceMatchKind::Partial)
9881 } else if self.parse_keyword(Keyword::SIMPLE) {
9882 Ok(ConstraintReferenceMatchKind::Simple)
9883 } else {
9884 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9885 }
9886 }
9887
9888 fn parse_constraint_using_index(
9891 &mut self,
9892 name: Option<Ident>,
9893 ) -> Result<ConstraintUsingIndex, ParserError> {
9894 let index_name = self.parse_identifier()?;
9895 let characteristics = self.parse_constraint_characteristics()?;
9896 Ok(ConstraintUsingIndex {
9897 name,
9898 index_name,
9899 characteristics,
9900 })
9901 }
9902
9903 pub fn parse_constraint_characteristics(
9905 &mut self,
9906 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9907 let mut cc = ConstraintCharacteristics::default();
9908
9909 loop {
9910 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9911 {
9912 cc.deferrable = Some(false);
9913 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9914 cc.deferrable = Some(true);
9915 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9916 if self.parse_keyword(Keyword::DEFERRED) {
9917 cc.initially = Some(DeferrableInitial::Deferred);
9918 } else if self.parse_keyword(Keyword::IMMEDIATE) {
9919 cc.initially = Some(DeferrableInitial::Immediate);
9920 } else {
9921 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
9922 }
9923 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9924 cc.enforced = Some(true);
9925 } else if cc.enforced.is_none()
9926 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9927 {
9928 cc.enforced = Some(false);
9929 } else {
9930 break;
9931 }
9932 }
9933
9934 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9935 Ok(Some(cc))
9936 } else {
9937 Ok(None)
9938 }
9939 }
9940
9941 pub fn parse_optional_table_constraint(
9943 &mut self,
9944 ) -> Result<Option<TableConstraint>, ParserError> {
9945 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9946 if self.dialect.supports_constraint_keyword_without_name()
9947 && self
9948 .peek_one_of_keywords(&[
9949 Keyword::CHECK,
9950 Keyword::PRIMARY,
9951 Keyword::UNIQUE,
9952 Keyword::FOREIGN,
9953 ])
9954 .is_some()
9955 {
9956 None
9957 } else {
9958 Some(self.parse_identifier()?)
9959 }
9960 } else {
9961 None
9962 };
9963
9964 let next_token = self.next_token();
9965 match next_token.token {
9966 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
9967 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9970 return Ok(Some(TableConstraint::UniqueUsingIndex(
9971 self.parse_constraint_using_index(name)?,
9972 )));
9973 }
9974
9975 let index_type_display = self.parse_index_type_display();
9976 if !dialect_of!(self is GenericDialect | MySqlDialect)
9977 && !index_type_display.is_none()
9978 {
9979 return self.expected_ref(
9980 "`index_name` or `(column_name [, ...])`",
9981 self.peek_token_ref(),
9982 );
9983 }
9984
9985 let nulls_distinct = self.parse_optional_nulls_distinct()?;
9986
9987 let index_name = self.parse_optional_ident()?;
9989 let index_type = self.parse_optional_using_then_index_type()?;
9990
9991 let columns = self.parse_parenthesized_index_column_list()?;
9992 let index_options = self.parse_index_options()?;
9993 let characteristics = self.parse_constraint_characteristics()?;
9994 Ok(Some(
9995 UniqueConstraint {
9996 name,
9997 index_name,
9998 index_type_display,
9999 index_type,
10000 columns,
10001 index_options,
10002 characteristics,
10003 nulls_distinct,
10004 }
10005 .into(),
10006 ))
10007 }
10008 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10009 self.expect_keyword_is(Keyword::KEY)?;
10011
10012 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10015 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10016 self.parse_constraint_using_index(name)?,
10017 )));
10018 }
10019
10020 let index_name = self.parse_optional_ident()?;
10022 let index_type = self.parse_optional_using_then_index_type()?;
10023
10024 let columns = self.parse_parenthesized_index_column_list()?;
10025 let index_options = self.parse_index_options()?;
10026 let characteristics = self.parse_constraint_characteristics()?;
10027 Ok(Some(
10028 PrimaryKeyConstraint {
10029 name,
10030 index_name,
10031 index_type,
10032 columns,
10033 index_options,
10034 characteristics,
10035 }
10036 .into(),
10037 ))
10038 }
10039 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10040 self.expect_keyword_is(Keyword::KEY)?;
10041 let index_name = self.parse_optional_ident()?;
10042 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10043 self.expect_keyword_is(Keyword::REFERENCES)?;
10044 let foreign_table = self.parse_object_name(false)?;
10045 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10046 let mut match_kind = None;
10047 let mut on_delete = None;
10048 let mut on_update = None;
10049 loop {
10050 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10051 match_kind = Some(self.parse_match_kind()?);
10052 } else if on_delete.is_none()
10053 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10054 {
10055 on_delete = Some(self.parse_referential_action()?);
10056 } else if on_update.is_none()
10057 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10058 {
10059 on_update = Some(self.parse_referential_action()?);
10060 } else {
10061 break;
10062 }
10063 }
10064
10065 let characteristics = self.parse_constraint_characteristics()?;
10066
10067 Ok(Some(
10068 ForeignKeyConstraint {
10069 name,
10070 index_name,
10071 columns,
10072 foreign_table,
10073 referred_columns,
10074 on_delete,
10075 on_update,
10076 match_kind,
10077 characteristics,
10078 }
10079 .into(),
10080 ))
10081 }
10082 Token::Word(w) if w.keyword == Keyword::CHECK => {
10083 self.expect_token(&Token::LParen)?;
10084 let expr = Box::new(self.parse_expr()?);
10085 self.expect_token(&Token::RParen)?;
10086
10087 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10088 Some(true)
10089 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10090 Some(false)
10091 } else {
10092 None
10093 };
10094
10095 Ok(Some(
10096 CheckConstraint {
10097 name,
10098 expr,
10099 enforced,
10100 }
10101 .into(),
10102 ))
10103 }
10104 Token::Word(w)
10105 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10106 && dialect_of!(self is GenericDialect | MySqlDialect)
10107 && name.is_none() =>
10108 {
10109 let display_as_key = w.keyword == Keyword::KEY;
10110
10111 let name = match &self.peek_token_ref().token {
10112 Token::Word(word) if word.keyword == Keyword::USING => None,
10113 _ => self.parse_optional_ident()?,
10114 };
10115
10116 let index_type = self.parse_optional_using_then_index_type()?;
10117 let columns = self.parse_parenthesized_index_column_list()?;
10118 let index_options = self.parse_index_options()?;
10119
10120 Ok(Some(
10121 IndexConstraint {
10122 display_as_key,
10123 name,
10124 index_type,
10125 columns,
10126 index_options,
10127 }
10128 .into(),
10129 ))
10130 }
10131 Token::Word(w)
10132 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10133 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10134 {
10135 if let Some(name) = name {
10136 return self.expected(
10137 "FULLTEXT or SPATIAL option without constraint name",
10138 TokenWithSpan {
10139 token: Token::make_keyword(&name.to_string()),
10140 span: next_token.span,
10141 },
10142 );
10143 }
10144
10145 let fulltext = w.keyword == Keyword::FULLTEXT;
10146
10147 let index_type_display = self.parse_index_type_display();
10148
10149 let opt_index_name = self.parse_optional_ident()?;
10150
10151 let columns = self.parse_parenthesized_index_column_list()?;
10152
10153 Ok(Some(
10154 FullTextOrSpatialConstraint {
10155 fulltext,
10156 index_type_display,
10157 opt_index_name,
10158 columns,
10159 }
10160 .into(),
10161 ))
10162 }
10163 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10164 let index_method = if self.parse_keyword(Keyword::USING) {
10165 Some(self.parse_identifier()?)
10166 } else {
10167 None
10168 };
10169
10170 self.expect_token(&Token::LParen)?;
10171 let elements =
10172 self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10173 self.expect_token(&Token::RParen)?;
10174
10175 let include = if self.parse_keyword(Keyword::INCLUDE) {
10176 self.expect_token(&Token::LParen)?;
10177 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10178 self.expect_token(&Token::RParen)?;
10179 cols
10180 } else {
10181 vec![]
10182 };
10183
10184 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10185 self.expect_token(&Token::LParen)?;
10186 let predicate = self.parse_expr()?;
10187 self.expect_token(&Token::RParen)?;
10188 Some(Box::new(predicate))
10189 } else {
10190 None
10191 };
10192
10193 let characteristics = self.parse_constraint_characteristics()?;
10194
10195 Ok(Some(
10196 ExclusionConstraint {
10197 name,
10198 index_method,
10199 elements,
10200 include,
10201 where_clause,
10202 characteristics,
10203 }
10204 .into(),
10205 ))
10206 }
10207 _ => {
10208 if name.is_some() {
10209 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10210 } else {
10211 self.prev_token();
10212 Ok(None)
10213 }
10214 }
10215 }
10216 }
10217
10218 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10219 let expr = self.parse_expr()?;
10220 self.expect_keyword_is(Keyword::WITH)?;
10221 let operator_token = self.next_token();
10222 let operator = operator_token.token.to_string();
10223 Ok(ExclusionElement { expr, operator })
10224 }
10225
10226 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10227 Ok(if self.parse_keyword(Keyword::NULLS) {
10228 let not = self.parse_keyword(Keyword::NOT);
10229 self.expect_keyword_is(Keyword::DISTINCT)?;
10230 if not {
10231 NullsDistinctOption::NotDistinct
10232 } else {
10233 NullsDistinctOption::Distinct
10234 }
10235 } else {
10236 NullsDistinctOption::None
10237 })
10238 }
10239
10240 pub fn maybe_parse_options(
10242 &mut self,
10243 keyword: Keyword,
10244 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10245 if let Token::Word(word) = &self.peek_token_ref().token {
10246 if word.keyword == keyword {
10247 return Ok(Some(self.parse_options(keyword)?));
10248 }
10249 };
10250 Ok(None)
10251 }
10252
10253 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10255 if self.parse_keyword(keyword) {
10256 self.expect_token(&Token::LParen)?;
10257 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10258 self.expect_token(&Token::RParen)?;
10259 Ok(options)
10260 } else {
10261 Ok(vec![])
10262 }
10263 }
10264
10265 pub fn parse_options_with_keywords(
10267 &mut self,
10268 keywords: &[Keyword],
10269 ) -> Result<Vec<SqlOption>, ParserError> {
10270 if self.parse_keywords(keywords) {
10271 self.expect_token(&Token::LParen)?;
10272 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10273 self.expect_token(&Token::RParen)?;
10274 Ok(options)
10275 } else {
10276 Ok(vec![])
10277 }
10278 }
10279
10280 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10282 Ok(if self.parse_keyword(Keyword::BTREE) {
10283 IndexType::BTree
10284 } else if self.parse_keyword(Keyword::HASH) {
10285 IndexType::Hash
10286 } else if self.parse_keyword(Keyword::GIN) {
10287 IndexType::GIN
10288 } else if self.parse_keyword(Keyword::GIST) {
10289 IndexType::GiST
10290 } else if self.parse_keyword(Keyword::SPGIST) {
10291 IndexType::SPGiST
10292 } else if self.parse_keyword(Keyword::BRIN) {
10293 IndexType::BRIN
10294 } else if self.parse_keyword(Keyword::BLOOM) {
10295 IndexType::Bloom
10296 } else {
10297 IndexType::Custom(self.parse_identifier()?)
10298 })
10299 }
10300
10301 pub fn parse_optional_using_then_index_type(
10308 &mut self,
10309 ) -> Result<Option<IndexType>, ParserError> {
10310 if self.parse_keyword(Keyword::USING) {
10311 Ok(Some(self.parse_index_type()?))
10312 } else {
10313 Ok(None)
10314 }
10315 }
10316
10317 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10321 self.maybe_parse(|parser| parser.parse_identifier())
10322 }
10323
10324 #[must_use]
10325 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10327 if self.parse_keyword(Keyword::KEY) {
10328 KeyOrIndexDisplay::Key
10329 } else if self.parse_keyword(Keyword::INDEX) {
10330 KeyOrIndexDisplay::Index
10331 } else {
10332 KeyOrIndexDisplay::None
10333 }
10334 }
10335
10336 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10338 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10339 Ok(Some(IndexOption::Using(index_type)))
10340 } else if self.parse_keyword(Keyword::COMMENT) {
10341 let s = self.parse_literal_string()?;
10342 Ok(Some(IndexOption::Comment(s)))
10343 } else {
10344 Ok(None)
10345 }
10346 }
10347
10348 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10350 let mut options = Vec::new();
10351
10352 loop {
10353 match self.parse_optional_index_option()? {
10354 Some(index_option) => options.push(index_option),
10355 None => return Ok(options),
10356 }
10357 }
10358 }
10359
10360 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10362 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10363
10364 match &self.peek_token_ref().token {
10365 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10366 Ok(SqlOption::Ident(self.parse_identifier()?))
10367 }
10368 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10369 self.parse_option_partition()
10370 }
10371 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10372 self.parse_option_clustered()
10373 }
10374 _ => {
10375 let name = self.parse_identifier()?;
10376 self.expect_token(&Token::Eq)?;
10377 let value = self.parse_expr()?;
10378
10379 Ok(SqlOption::KeyValue { key: name, value })
10380 }
10381 }
10382 }
10383
10384 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10386 if self.parse_keywords(&[
10387 Keyword::CLUSTERED,
10388 Keyword::COLUMNSTORE,
10389 Keyword::INDEX,
10390 Keyword::ORDER,
10391 ]) {
10392 Ok(SqlOption::Clustered(
10393 TableOptionsClustered::ColumnstoreIndexOrder(
10394 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10395 ),
10396 ))
10397 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10398 Ok(SqlOption::Clustered(
10399 TableOptionsClustered::ColumnstoreIndex,
10400 ))
10401 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10402 self.expect_token(&Token::LParen)?;
10403
10404 let columns = self.parse_comma_separated(|p| {
10405 let name = p.parse_identifier()?;
10406 let asc = p.parse_asc_desc();
10407
10408 Ok(ClusteredIndex { name, asc })
10409 })?;
10410
10411 self.expect_token(&Token::RParen)?;
10412
10413 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10414 } else {
10415 Err(ParserError::ParserError(
10416 "invalid CLUSTERED sequence".to_string(),
10417 ))
10418 }
10419 }
10420
10421 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10423 self.expect_keyword_is(Keyword::PARTITION)?;
10424 self.expect_token(&Token::LParen)?;
10425 let column_name = self.parse_identifier()?;
10426
10427 self.expect_keyword_is(Keyword::RANGE)?;
10428 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10429 Some(PartitionRangeDirection::Left)
10430 } else if self.parse_keyword(Keyword::RIGHT) {
10431 Some(PartitionRangeDirection::Right)
10432 } else {
10433 None
10434 };
10435
10436 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10437 self.expect_token(&Token::LParen)?;
10438
10439 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10440
10441 self.expect_token(&Token::RParen)?;
10442 self.expect_token(&Token::RParen)?;
10443
10444 Ok(SqlOption::Partition {
10445 column_name,
10446 range_direction,
10447 for_values,
10448 })
10449 }
10450
10451 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10453 self.expect_token(&Token::LParen)?;
10454 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10455 self.expect_token(&Token::RParen)?;
10456 Ok(Partition::Partitions(partitions))
10457 }
10458
10459 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10461 self.expect_token(&Token::LParen)?;
10462 self.expect_keyword_is(Keyword::SELECT)?;
10463 let projection = self.parse_projection()?;
10464 let group_by = self.parse_optional_group_by()?;
10465 let order_by = self.parse_optional_order_by()?;
10466 self.expect_token(&Token::RParen)?;
10467 Ok(ProjectionSelect {
10468 projection,
10469 group_by,
10470 order_by,
10471 })
10472 }
10473 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10475 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10476 let name = self.parse_identifier()?;
10477 let query = self.parse_projection_select()?;
10478 Ok(AlterTableOperation::AddProjection {
10479 if_not_exists,
10480 name,
10481 select: query,
10482 })
10483 }
10484
10485 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10489 self.expect_keyword_is(Keyword::ALTER)?;
10490 self.expect_keyword_is(Keyword::SORTKEY)?;
10491 self.expect_token(&Token::LParen)?;
10492 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10493 self.expect_token(&Token::RParen)?;
10494 Ok(AlterTableOperation::AlterSortKey { columns })
10495 }
10496
10497 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10499 let operation = if self.parse_keyword(Keyword::ADD) {
10500 if let Some(constraint) = self.parse_optional_table_constraint()? {
10501 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10502 AlterTableOperation::AddConstraint {
10503 constraint,
10504 not_valid,
10505 }
10506 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10507 && self.parse_keyword(Keyword::PROJECTION)
10508 {
10509 return self.parse_alter_table_add_projection();
10510 } else {
10511 let if_not_exists =
10512 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10513 let mut new_partitions = vec![];
10514 loop {
10515 if self.parse_keyword(Keyword::PARTITION) {
10516 new_partitions.push(self.parse_partition()?);
10517 } else {
10518 break;
10519 }
10520 }
10521 if !new_partitions.is_empty() {
10522 AlterTableOperation::AddPartitions {
10523 if_not_exists,
10524 new_partitions,
10525 }
10526 } else {
10527 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10528
10529 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10530 {
10531 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10532 || if_not_exists
10533 } else {
10534 false
10535 };
10536
10537 let column_def = self.parse_column_def()?;
10538
10539 let column_position = self.parse_column_position()?;
10540
10541 AlterTableOperation::AddColumn {
10542 column_keyword,
10543 if_not_exists,
10544 column_def,
10545 column_position,
10546 }
10547 }
10548 }
10549 } else if self.parse_keyword(Keyword::RENAME) {
10550 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10551 let old_name = self.parse_identifier()?;
10552 self.expect_keyword_is(Keyword::TO)?;
10553 let new_name = self.parse_identifier()?;
10554 AlterTableOperation::RenameConstraint { old_name, new_name }
10555 } else if self.parse_keyword(Keyword::TO) {
10556 let table_name = self.parse_object_name(false)?;
10557 AlterTableOperation::RenameTable {
10558 table_name: RenameTableNameKind::To(table_name),
10559 }
10560 } else if self.parse_keyword(Keyword::AS) {
10561 let table_name = self.parse_object_name(false)?;
10562 AlterTableOperation::RenameTable {
10563 table_name: RenameTableNameKind::As(table_name),
10564 }
10565 } else {
10566 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10568 self.expect_keyword_is(Keyword::TO)?;
10569 let new_column_name = self.parse_identifier()?;
10570 AlterTableOperation::RenameColumn {
10571 old_column_name,
10572 new_column_name,
10573 }
10574 }
10575 } else if self.parse_keyword(Keyword::DISABLE) {
10576 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10577 AlterTableOperation::DisableRowLevelSecurity {}
10578 } else if self.parse_keyword(Keyword::RULE) {
10579 let name = self.parse_identifier()?;
10580 AlterTableOperation::DisableRule { name }
10581 } else if self.parse_keyword(Keyword::TRIGGER) {
10582 let name = self.parse_identifier()?;
10583 AlterTableOperation::DisableTrigger { name }
10584 } else {
10585 return self.expected_ref(
10586 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10587 self.peek_token_ref(),
10588 );
10589 }
10590 } else if self.parse_keyword(Keyword::ENABLE) {
10591 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10592 let name = self.parse_identifier()?;
10593 AlterTableOperation::EnableAlwaysRule { name }
10594 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10595 let name = self.parse_identifier()?;
10596 AlterTableOperation::EnableAlwaysTrigger { name }
10597 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10598 AlterTableOperation::EnableRowLevelSecurity {}
10599 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10600 let name = self.parse_identifier()?;
10601 AlterTableOperation::EnableReplicaRule { name }
10602 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10603 let name = self.parse_identifier()?;
10604 AlterTableOperation::EnableReplicaTrigger { name }
10605 } else if self.parse_keyword(Keyword::RULE) {
10606 let name = self.parse_identifier()?;
10607 AlterTableOperation::EnableRule { name }
10608 } else if self.parse_keyword(Keyword::TRIGGER) {
10609 let name = self.parse_identifier()?;
10610 AlterTableOperation::EnableTrigger { name }
10611 } else {
10612 return self.expected_ref(
10613 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10614 self.peek_token_ref(),
10615 );
10616 }
10617 } else if self.parse_keywords(&[
10618 Keyword::FORCE,
10619 Keyword::ROW,
10620 Keyword::LEVEL,
10621 Keyword::SECURITY,
10622 ]) {
10623 AlterTableOperation::ForceRowLevelSecurity
10624 } else if self.parse_keywords(&[
10625 Keyword::NO,
10626 Keyword::FORCE,
10627 Keyword::ROW,
10628 Keyword::LEVEL,
10629 Keyword::SECURITY,
10630 ]) {
10631 AlterTableOperation::NoForceRowLevelSecurity
10632 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10633 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10634 {
10635 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10636 let name = self.parse_identifier()?;
10637 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10638 Some(self.parse_identifier()?)
10639 } else {
10640 None
10641 };
10642 AlterTableOperation::ClearProjection {
10643 if_exists,
10644 name,
10645 partition,
10646 }
10647 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10648 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10649 {
10650 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10651 let name = self.parse_identifier()?;
10652 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10653 Some(self.parse_identifier()?)
10654 } else {
10655 None
10656 };
10657 AlterTableOperation::MaterializeProjection {
10658 if_exists,
10659 name,
10660 partition,
10661 }
10662 } else if self.parse_keyword(Keyword::DROP) {
10663 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10664 self.expect_token(&Token::LParen)?;
10665 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10666 self.expect_token(&Token::RParen)?;
10667 AlterTableOperation::DropPartitions {
10668 partitions,
10669 if_exists: true,
10670 }
10671 } else if self.parse_keyword(Keyword::PARTITION) {
10672 self.expect_token(&Token::LParen)?;
10673 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10674 self.expect_token(&Token::RParen)?;
10675 AlterTableOperation::DropPartitions {
10676 partitions,
10677 if_exists: false,
10678 }
10679 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10680 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10681 let name = self.parse_identifier()?;
10682 let drop_behavior = self.parse_optional_drop_behavior();
10683 AlterTableOperation::DropConstraint {
10684 if_exists,
10685 name,
10686 drop_behavior,
10687 }
10688 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10689 let drop_behavior = self.parse_optional_drop_behavior();
10690 AlterTableOperation::DropPrimaryKey { drop_behavior }
10691 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10692 let name = self.parse_identifier()?;
10693 let drop_behavior = self.parse_optional_drop_behavior();
10694 AlterTableOperation::DropForeignKey {
10695 name,
10696 drop_behavior,
10697 }
10698 } else if self.parse_keyword(Keyword::INDEX) {
10699 let name = self.parse_identifier()?;
10700 AlterTableOperation::DropIndex { name }
10701 } else if self.parse_keyword(Keyword::PROJECTION)
10702 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10703 {
10704 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10705 let name = self.parse_identifier()?;
10706 AlterTableOperation::DropProjection { if_exists, name }
10707 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10708 AlterTableOperation::DropClusteringKey
10709 } else {
10710 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10712 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10713 self.parse_comma_separated(Parser::parse_identifier)?
10714 } else {
10715 vec![self.parse_identifier()?]
10716 };
10717 let drop_behavior = self.parse_optional_drop_behavior();
10718 AlterTableOperation::DropColumn {
10719 has_column_keyword,
10720 column_names,
10721 if_exists,
10722 drop_behavior,
10723 }
10724 }
10725 } else if self.parse_keyword(Keyword::PARTITION) {
10726 self.expect_token(&Token::LParen)?;
10727 let before = self.parse_comma_separated(Parser::parse_expr)?;
10728 self.expect_token(&Token::RParen)?;
10729 self.expect_keyword_is(Keyword::RENAME)?;
10730 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10731 self.expect_token(&Token::LParen)?;
10732 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10733 self.expect_token(&Token::RParen)?;
10734 AlterTableOperation::RenamePartitions {
10735 old_partitions: before,
10736 new_partitions: renames,
10737 }
10738 } else if self.parse_keyword(Keyword::CHANGE) {
10739 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10741 let new_name = self.parse_identifier()?;
10742 let data_type = self.parse_data_type()?;
10743 let mut options = vec![];
10744 while let Some(option) = self.parse_optional_column_option()? {
10745 options.push(option);
10746 }
10747
10748 let column_position = self.parse_column_position()?;
10749
10750 AlterTableOperation::ChangeColumn {
10751 old_name,
10752 new_name,
10753 data_type,
10754 options,
10755 column_position,
10756 }
10757 } else if self.parse_keyword(Keyword::MODIFY) {
10758 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10760 let data_type = self.parse_data_type()?;
10761 let mut options = vec![];
10762 while let Some(option) = self.parse_optional_column_option()? {
10763 options.push(option);
10764 }
10765
10766 let column_position = self.parse_column_position()?;
10767
10768 AlterTableOperation::ModifyColumn {
10769 col_name,
10770 data_type,
10771 options,
10772 column_position,
10773 }
10774 } else if self.parse_keyword(Keyword::ALTER) {
10775 if self.peek_keyword(Keyword::SORTKEY) {
10776 self.prev_token();
10777 return self.parse_alter_sort_key();
10778 }
10779
10780 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10782 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10783
10784 let op: AlterColumnOperation = if self.parse_keywords(&[
10785 Keyword::SET,
10786 Keyword::NOT,
10787 Keyword::NULL,
10788 ]) {
10789 AlterColumnOperation::SetNotNull {}
10790 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10791 AlterColumnOperation::DropNotNull {}
10792 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10793 AlterColumnOperation::SetDefault {
10794 value: self.parse_expr()?,
10795 }
10796 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10797 AlterColumnOperation::DropDefault {}
10798 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10799 self.parse_set_data_type(true)?
10800 } else if self.parse_keyword(Keyword::TYPE) {
10801 self.parse_set_data_type(false)?
10802 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10803 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10804 Some(GeneratedAs::Always)
10805 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10806 Some(GeneratedAs::ByDefault)
10807 } else {
10808 None
10809 };
10810
10811 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10812
10813 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10814
10815 if self.peek_token_ref().token == Token::LParen {
10816 self.expect_token(&Token::LParen)?;
10817 sequence_options = Some(self.parse_create_sequence_options()?);
10818 self.expect_token(&Token::RParen)?;
10819 }
10820
10821 AlterColumnOperation::AddGenerated {
10822 generated_as,
10823 sequence_options,
10824 }
10825 } else {
10826 let message = if is_postgresql {
10827 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10828 } else {
10829 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10830 };
10831
10832 return self.expected_ref(message, self.peek_token_ref());
10833 };
10834 AlterTableOperation::AlterColumn { column_name, op }
10835 } else if self.parse_keyword(Keyword::SWAP) {
10836 self.expect_keyword_is(Keyword::WITH)?;
10837 let table_name = self.parse_object_name(false)?;
10838 AlterTableOperation::SwapWith { table_name }
10839 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10840 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10841 {
10842 let new_owner = self.parse_owner()?;
10843 AlterTableOperation::OwnerTo { new_owner }
10844 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10845 && self.parse_keyword(Keyword::ATTACH)
10846 {
10847 AlterTableOperation::AttachPartition {
10848 partition: self.parse_part_or_partition()?,
10849 }
10850 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10851 && self.parse_keyword(Keyword::DETACH)
10852 {
10853 AlterTableOperation::DetachPartition {
10854 partition: self.parse_part_or_partition()?,
10855 }
10856 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10857 && self.parse_keyword(Keyword::FREEZE)
10858 {
10859 let partition = self.parse_part_or_partition()?;
10860 let with_name = if self.parse_keyword(Keyword::WITH) {
10861 self.expect_keyword_is(Keyword::NAME)?;
10862 Some(self.parse_identifier()?)
10863 } else {
10864 None
10865 };
10866 AlterTableOperation::FreezePartition {
10867 partition,
10868 with_name,
10869 }
10870 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10871 && self.parse_keyword(Keyword::UNFREEZE)
10872 {
10873 let partition = self.parse_part_or_partition()?;
10874 let with_name = if self.parse_keyword(Keyword::WITH) {
10875 self.expect_keyword_is(Keyword::NAME)?;
10876 Some(self.parse_identifier()?)
10877 } else {
10878 None
10879 };
10880 AlterTableOperation::UnfreezePartition {
10881 partition,
10882 with_name,
10883 }
10884 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10885 self.expect_token(&Token::LParen)?;
10886 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10887 self.expect_token(&Token::RParen)?;
10888 AlterTableOperation::ClusterBy { exprs }
10889 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10890 AlterTableOperation::SuspendRecluster
10891 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10892 AlterTableOperation::ResumeRecluster
10893 } else if self.parse_keyword(Keyword::LOCK) {
10894 let equals = self.consume_token(&Token::Eq);
10895 let lock = match self.parse_one_of_keywords(&[
10896 Keyword::DEFAULT,
10897 Keyword::EXCLUSIVE,
10898 Keyword::NONE,
10899 Keyword::SHARED,
10900 ]) {
10901 Some(Keyword::DEFAULT) => AlterTableLock::Default,
10902 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10903 Some(Keyword::NONE) => AlterTableLock::None,
10904 Some(Keyword::SHARED) => AlterTableLock::Shared,
10905 _ => self.expected_ref(
10906 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
10907 self.peek_token_ref(),
10908 )?,
10909 };
10910 AlterTableOperation::Lock { equals, lock }
10911 } else if self.parse_keyword(Keyword::ALGORITHM) {
10912 let equals = self.consume_token(&Token::Eq);
10913 let algorithm = match self.parse_one_of_keywords(&[
10914 Keyword::DEFAULT,
10915 Keyword::INSTANT,
10916 Keyword::INPLACE,
10917 Keyword::COPY,
10918 ]) {
10919 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
10920 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
10921 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
10922 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
10923 _ => self.expected_ref(
10924 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
10925 self.peek_token_ref(),
10926 )?,
10927 };
10928 AlterTableOperation::Algorithm { equals, algorithm }
10929 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
10930 let equals = self.consume_token(&Token::Eq);
10931 let value = self.parse_number_value()?;
10932 AlterTableOperation::AutoIncrement { equals, value }
10933 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
10934 let identity = if self.parse_keyword(Keyword::NOTHING) {
10935 ReplicaIdentity::Nothing
10936 } else if self.parse_keyword(Keyword::FULL) {
10937 ReplicaIdentity::Full
10938 } else if self.parse_keyword(Keyword::DEFAULT) {
10939 ReplicaIdentity::Default
10940 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10941 ReplicaIdentity::Index(self.parse_identifier()?)
10942 } else {
10943 return self.expected_ref(
10944 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
10945 self.peek_token_ref(),
10946 );
10947 };
10948
10949 AlterTableOperation::ReplicaIdentity { identity }
10950 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
10951 let name = self.parse_identifier()?;
10952 AlterTableOperation::ValidateConstraint { name }
10953 } else {
10954 let mut options =
10955 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
10956 if !options.is_empty() {
10957 AlterTableOperation::SetTblProperties {
10958 table_properties: options,
10959 }
10960 } else {
10961 options = self.parse_options(Keyword::SET)?;
10962 if !options.is_empty() {
10963 AlterTableOperation::SetOptionsParens { options }
10964 } else {
10965 return self.expected_ref(
10966 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
10967 self.peek_token_ref(),
10968 );
10969 }
10970 }
10971 };
10972 Ok(operation)
10973 }
10974
10975 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
10976 let data_type = self.parse_data_type()?;
10977 let using = if self.dialect.supports_alter_column_type_using()
10978 && self.parse_keyword(Keyword::USING)
10979 {
10980 Some(self.parse_expr()?)
10981 } else {
10982 None
10983 };
10984 Ok(AlterColumnOperation::SetDataType {
10985 data_type,
10986 using,
10987 had_set,
10988 })
10989 }
10990
10991 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
10992 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
10993 match keyword {
10994 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
10995 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
10996 unexpected_keyword => Err(ParserError::ParserError(
10998 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
10999 )),
11000 }
11001 }
11002
11003 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11005 let object_type = self.expect_one_of_keywords(&[
11006 Keyword::VIEW,
11007 Keyword::TYPE,
11008 Keyword::COLLATION,
11009 Keyword::TABLE,
11010 Keyword::INDEX,
11011 Keyword::FUNCTION,
11012 Keyword::AGGREGATE,
11013 Keyword::ROLE,
11014 Keyword::POLICY,
11015 Keyword::CONNECTOR,
11016 Keyword::ICEBERG,
11017 Keyword::SCHEMA,
11018 Keyword::USER,
11019 Keyword::OPERATOR,
11020 ])?;
11021 match object_type {
11022 Keyword::SCHEMA => {
11023 self.prev_token();
11024 self.prev_token();
11025 self.parse_alter_schema()
11026 }
11027 Keyword::VIEW => self.parse_alter_view(),
11028 Keyword::TYPE => self.parse_alter_type(),
11029 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11030 Keyword::TABLE => self.parse_alter_table(false),
11031 Keyword::ICEBERG => {
11032 self.expect_keyword(Keyword::TABLE)?;
11033 self.parse_alter_table(true)
11034 }
11035 Keyword::INDEX => {
11036 let index_name = self.parse_object_name(false)?;
11037 let operation = if self.parse_keyword(Keyword::RENAME) {
11038 if self.parse_keyword(Keyword::TO) {
11039 let index_name = self.parse_object_name(false)?;
11040 AlterIndexOperation::RenameIndex { index_name }
11041 } else {
11042 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11043 }
11044 } else {
11045 return self.expected_ref("RENAME after ALTER INDEX", self.peek_token_ref());
11046 };
11047
11048 Ok(Statement::AlterIndex {
11049 name: index_name,
11050 operation,
11051 })
11052 }
11053 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11054 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11055 Keyword::OPERATOR => {
11056 if self.parse_keyword(Keyword::FAMILY) {
11057 self.parse_alter_operator_family().map(Into::into)
11058 } else if self.parse_keyword(Keyword::CLASS) {
11059 self.parse_alter_operator_class().map(Into::into)
11060 } else {
11061 self.parse_alter_operator().map(Into::into)
11062 }
11063 }
11064 Keyword::ROLE => self.parse_alter_role(),
11065 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11066 Keyword::CONNECTOR => self.parse_alter_connector(),
11067 Keyword::USER => self.parse_alter_user().map(Into::into),
11068 unexpected_keyword => Err(ParserError::ParserError(
11070 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
11071 )),
11072 }
11073 }
11074
11075 fn parse_alter_aggregate_signature(
11076 &mut self,
11077 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11078 let name = self.parse_object_name(false)?;
11079 self.expect_token(&Token::LParen)?;
11080
11081 if self.consume_token(&Token::Mul) {
11082 self.expect_token(&Token::RParen)?;
11083 return Ok((
11084 FunctionDesc {
11085 name,
11086 args: Some(vec![]),
11087 },
11088 true,
11089 None,
11090 ));
11091 }
11092
11093 let args =
11094 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11095 vec![]
11096 } else {
11097 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11098 };
11099
11100 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11101 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11102 } else {
11103 None
11104 };
11105
11106 self.expect_token(&Token::RParen)?;
11107 Ok((
11108 FunctionDesc {
11109 name,
11110 args: Some(args),
11111 },
11112 false,
11113 aggregate_order_by,
11114 ))
11115 }
11116
11117 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11118 let action = if self.parse_keywords(&[
11119 Keyword::CALLED,
11120 Keyword::ON,
11121 Keyword::NULL,
11122 Keyword::INPUT,
11123 ]) {
11124 Some(AlterFunctionAction::CalledOnNull(
11125 FunctionCalledOnNull::CalledOnNullInput,
11126 ))
11127 } else if self.parse_keywords(&[
11128 Keyword::RETURNS,
11129 Keyword::NULL,
11130 Keyword::ON,
11131 Keyword::NULL,
11132 Keyword::INPUT,
11133 ]) {
11134 Some(AlterFunctionAction::CalledOnNull(
11135 FunctionCalledOnNull::ReturnsNullOnNullInput,
11136 ))
11137 } else if self.parse_keyword(Keyword::STRICT) {
11138 Some(AlterFunctionAction::CalledOnNull(
11139 FunctionCalledOnNull::Strict,
11140 ))
11141 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11142 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11143 } else if self.parse_keyword(Keyword::STABLE) {
11144 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11145 } else if self.parse_keyword(Keyword::VOLATILE) {
11146 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11147 } else if self.parse_keyword(Keyword::NOT) {
11148 self.expect_keyword(Keyword::LEAKPROOF)?;
11149 Some(AlterFunctionAction::Leakproof(false))
11150 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11151 Some(AlterFunctionAction::Leakproof(true))
11152 } else if self.parse_keyword(Keyword::EXTERNAL) {
11153 self.expect_keyword(Keyword::SECURITY)?;
11154 let security = if self.parse_keyword(Keyword::DEFINER) {
11155 FunctionSecurity::Definer
11156 } else if self.parse_keyword(Keyword::INVOKER) {
11157 FunctionSecurity::Invoker
11158 } else {
11159 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11160 };
11161 Some(AlterFunctionAction::Security {
11162 external: true,
11163 security,
11164 })
11165 } else if self.parse_keyword(Keyword::SECURITY) {
11166 let security = if self.parse_keyword(Keyword::DEFINER) {
11167 FunctionSecurity::Definer
11168 } else if self.parse_keyword(Keyword::INVOKER) {
11169 FunctionSecurity::Invoker
11170 } else {
11171 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11172 };
11173 Some(AlterFunctionAction::Security {
11174 external: false,
11175 security,
11176 })
11177 } else if self.parse_keyword(Keyword::PARALLEL) {
11178 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11179 FunctionParallel::Unsafe
11180 } else if self.parse_keyword(Keyword::RESTRICTED) {
11181 FunctionParallel::Restricted
11182 } else if self.parse_keyword(Keyword::SAFE) {
11183 FunctionParallel::Safe
11184 } else {
11185 return self
11186 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11187 };
11188 Some(AlterFunctionAction::Parallel(parallel))
11189 } else if self.parse_keyword(Keyword::COST) {
11190 Some(AlterFunctionAction::Cost(self.parse_number()?))
11191 } else if self.parse_keyword(Keyword::ROWS) {
11192 Some(AlterFunctionAction::Rows(self.parse_number()?))
11193 } else if self.parse_keyword(Keyword::SUPPORT) {
11194 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11195 } else if self.parse_keyword(Keyword::SET) {
11196 let name = self.parse_object_name(false)?;
11197 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11198 FunctionSetValue::FromCurrent
11199 } else {
11200 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11201 return self.expected_ref("= or TO", self.peek_token_ref());
11202 }
11203 if self.parse_keyword(Keyword::DEFAULT) {
11204 FunctionSetValue::Default
11205 } else {
11206 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11207 }
11208 };
11209 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11210 name,
11211 value,
11212 }))
11213 } else if self.parse_keyword(Keyword::RESET) {
11214 let reset_config = if self.parse_keyword(Keyword::ALL) {
11215 ResetConfig::ALL
11216 } else {
11217 ResetConfig::ConfigName(self.parse_object_name(false)?)
11218 };
11219 Some(AlterFunctionAction::Reset(reset_config))
11220 } else {
11221 None
11222 };
11223
11224 Ok(action)
11225 }
11226
11227 fn parse_alter_function_actions(
11228 &mut self,
11229 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11230 let mut actions = vec![];
11231 while let Some(action) = self.parse_alter_function_action()? {
11232 actions.push(action);
11233 }
11234 if actions.is_empty() {
11235 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11236 }
11237 let restrict = self.parse_keyword(Keyword::RESTRICT);
11238 Ok((actions, restrict))
11239 }
11240
11241 pub fn parse_alter_function(
11243 &mut self,
11244 kind: AlterFunctionKind,
11245 ) -> Result<Statement, ParserError> {
11246 let (function, aggregate_star, aggregate_order_by) = match kind {
11247 AlterFunctionKind::Function => (self.parse_function_desc()?, false, None),
11248 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11249 };
11250
11251 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11252 let new_name = self.parse_identifier()?;
11253 AlterFunctionOperation::RenameTo { new_name }
11254 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11255 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11256 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11257 AlterFunctionOperation::SetSchema {
11258 schema_name: self.parse_object_name(false)?,
11259 }
11260 } else if matches!(kind, AlterFunctionKind::Function) && self.parse_keyword(Keyword::NO) {
11261 if !self.parse_keyword(Keyword::DEPENDS) {
11262 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11263 }
11264 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11265 AlterFunctionOperation::DependsOnExtension {
11266 no: true,
11267 extension_name: self.parse_object_name(false)?,
11268 }
11269 } else if matches!(kind, AlterFunctionKind::Function)
11270 && self.parse_keyword(Keyword::DEPENDS)
11271 {
11272 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11273 AlterFunctionOperation::DependsOnExtension {
11274 no: false,
11275 extension_name: self.parse_object_name(false)?,
11276 }
11277 } else if matches!(kind, AlterFunctionKind::Function) {
11278 let (actions, restrict) = self.parse_alter_function_actions()?;
11279 AlterFunctionOperation::Actions { actions, restrict }
11280 } else {
11281 return self.expected_ref(
11282 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11283 self.peek_token_ref(),
11284 );
11285 };
11286
11287 Ok(Statement::AlterFunction(AlterFunction {
11288 kind,
11289 function,
11290 aggregate_order_by,
11291 aggregate_star,
11292 operation,
11293 }))
11294 }
11295
11296 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11298 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11299 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11301 let on_cluster = self.parse_optional_on_cluster()?;
11302 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11303
11304 let mut location = None;
11305 if self.parse_keyword(Keyword::LOCATION) {
11306 location = Some(HiveSetLocation {
11307 has_set: false,
11308 location: self.parse_identifier()?,
11309 });
11310 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11311 location = Some(HiveSetLocation {
11312 has_set: true,
11313 location: self.parse_identifier()?,
11314 });
11315 }
11316
11317 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11318 self.peek_token_ref().clone()
11319 } else {
11320 self.get_current_token().clone()
11321 };
11322
11323 Ok(AlterTable {
11324 name: table_name,
11325 if_exists,
11326 only,
11327 operations,
11328 location,
11329 on_cluster,
11330 table_type: if iceberg {
11331 Some(AlterTableType::Iceberg)
11332 } else {
11333 None
11334 },
11335 end_token: AttachedToken(end_token),
11336 }
11337 .into())
11338 }
11339
11340 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11342 let name = self.parse_object_name(false)?;
11343 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11344
11345 let with_options = self.parse_options(Keyword::WITH)?;
11346
11347 self.expect_keyword_is(Keyword::AS)?;
11348 let query = self.parse_query()?;
11349
11350 Ok(Statement::AlterView {
11351 name,
11352 columns,
11353 query,
11354 with_options,
11355 })
11356 }
11357
11358 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11360 let name = self.parse_object_name(false)?;
11361
11362 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11363 let new_name = self.parse_identifier()?;
11364 Ok(Statement::AlterType(AlterType {
11365 name,
11366 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
11367 }))
11368 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11369 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11370 let new_enum_value = self.parse_identifier()?;
11371 let position = if self.parse_keyword(Keyword::BEFORE) {
11372 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11373 } else if self.parse_keyword(Keyword::AFTER) {
11374 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11375 } else {
11376 None
11377 };
11378
11379 Ok(Statement::AlterType(AlterType {
11380 name,
11381 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
11382 if_not_exists,
11383 value: new_enum_value,
11384 position,
11385 }),
11386 }))
11387 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11388 let existing_enum_value = self.parse_identifier()?;
11389 self.expect_keyword(Keyword::TO)?;
11390 let new_enum_value = self.parse_identifier()?;
11391
11392 Ok(Statement::AlterType(AlterType {
11393 name,
11394 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11395 from: existing_enum_value,
11396 to: new_enum_value,
11397 }),
11398 }))
11399 } else {
11400 self.expected_ref(
11401 "{RENAME TO | { RENAME | ADD } VALUE}",
11402 self.peek_token_ref(),
11403 )
11404 }
11405 }
11406
11407 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11411 let name = self.parse_object_name(false)?;
11412 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11413 AlterCollationOperation::RenameTo {
11414 new_name: self.parse_identifier()?,
11415 }
11416 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11417 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11418 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11419 AlterCollationOperation::SetSchema {
11420 schema_name: self.parse_object_name(false)?,
11421 }
11422 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11423 AlterCollationOperation::RefreshVersion
11424 } else {
11425 return self.expected_ref(
11426 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11427 self.peek_token_ref(),
11428 );
11429 };
11430
11431 Ok(AlterCollation { name, operation })
11432 }
11433
11434 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11438 let name = self.parse_operator_name()?;
11439
11440 self.expect_token(&Token::LParen)?;
11442
11443 let left_type = if self.parse_keyword(Keyword::NONE) {
11444 None
11445 } else {
11446 Some(self.parse_data_type()?)
11447 };
11448
11449 self.expect_token(&Token::Comma)?;
11450 let right_type = self.parse_data_type()?;
11451 self.expect_token(&Token::RParen)?;
11452
11453 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11455 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11456 Owner::CurrentRole
11457 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11458 Owner::CurrentUser
11459 } else if self.parse_keyword(Keyword::SESSION_USER) {
11460 Owner::SessionUser
11461 } else {
11462 Owner::Ident(self.parse_identifier()?)
11463 };
11464 AlterOperatorOperation::OwnerTo(owner)
11465 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11466 let schema_name = self.parse_object_name(false)?;
11467 AlterOperatorOperation::SetSchema { schema_name }
11468 } else if self.parse_keyword(Keyword::SET) {
11469 self.expect_token(&Token::LParen)?;
11470
11471 let mut options = Vec::new();
11472 loop {
11473 let keyword = self.expect_one_of_keywords(&[
11474 Keyword::RESTRICT,
11475 Keyword::JOIN,
11476 Keyword::COMMUTATOR,
11477 Keyword::NEGATOR,
11478 Keyword::HASHES,
11479 Keyword::MERGES,
11480 ])?;
11481
11482 match keyword {
11483 Keyword::RESTRICT => {
11484 self.expect_token(&Token::Eq)?;
11485 let proc_name = if self.parse_keyword(Keyword::NONE) {
11486 None
11487 } else {
11488 Some(self.parse_object_name(false)?)
11489 };
11490 options.push(OperatorOption::Restrict(proc_name));
11491 }
11492 Keyword::JOIN => {
11493 self.expect_token(&Token::Eq)?;
11494 let proc_name = if self.parse_keyword(Keyword::NONE) {
11495 None
11496 } else {
11497 Some(self.parse_object_name(false)?)
11498 };
11499 options.push(OperatorOption::Join(proc_name));
11500 }
11501 Keyword::COMMUTATOR => {
11502 self.expect_token(&Token::Eq)?;
11503 let op_name = self.parse_operator_name()?;
11504 options.push(OperatorOption::Commutator(op_name));
11505 }
11506 Keyword::NEGATOR => {
11507 self.expect_token(&Token::Eq)?;
11508 let op_name = self.parse_operator_name()?;
11509 options.push(OperatorOption::Negator(op_name));
11510 }
11511 Keyword::HASHES => {
11512 options.push(OperatorOption::Hashes);
11513 }
11514 Keyword::MERGES => {
11515 options.push(OperatorOption::Merges);
11516 }
11517 unexpected_keyword => return Err(ParserError::ParserError(
11518 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11519 )),
11520 }
11521
11522 if !self.consume_token(&Token::Comma) {
11523 break;
11524 }
11525 }
11526
11527 self.expect_token(&Token::RParen)?;
11528 AlterOperatorOperation::Set { options }
11529 } else {
11530 return self.expected_ref(
11531 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11532 self.peek_token_ref(),
11533 );
11534 };
11535
11536 Ok(AlterOperator {
11537 name,
11538 left_type,
11539 right_type,
11540 operation,
11541 })
11542 }
11543
11544 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11546 let strategy_number = self.parse_literal_uint()?;
11547 let operator_name = self.parse_operator_name()?;
11548
11549 self.expect_token(&Token::LParen)?;
11551 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11552 self.expect_token(&Token::RParen)?;
11553
11554 let purpose = if self.parse_keyword(Keyword::FOR) {
11556 if self.parse_keyword(Keyword::SEARCH) {
11557 Some(OperatorPurpose::ForSearch)
11558 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11559 let sort_family = self.parse_object_name(false)?;
11560 Some(OperatorPurpose::ForOrderBy { sort_family })
11561 } else {
11562 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11563 }
11564 } else {
11565 None
11566 };
11567
11568 Ok(OperatorFamilyItem::Operator {
11569 strategy_number,
11570 operator_name,
11571 op_types,
11572 purpose,
11573 })
11574 }
11575
11576 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11578 let support_number = self.parse_literal_uint()?;
11579
11580 let op_types =
11582 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11583 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11584 self.expect_token(&Token::RParen)?;
11585 Some(types)
11586 } else if self.consume_token(&Token::LParen) {
11587 self.expect_token(&Token::RParen)?;
11588 Some(vec![])
11589 } else {
11590 None
11591 };
11592
11593 let function_name = self.parse_object_name(false)?;
11594
11595 let argument_types = if self.consume_token(&Token::LParen) {
11597 if self.peek_token_ref().token == Token::RParen {
11598 self.expect_token(&Token::RParen)?;
11599 vec![]
11600 } else {
11601 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11602 self.expect_token(&Token::RParen)?;
11603 types
11604 }
11605 } else {
11606 vec![]
11607 };
11608
11609 Ok(OperatorFamilyItem::Function {
11610 support_number,
11611 op_types,
11612 function_name,
11613 argument_types,
11614 })
11615 }
11616
11617 fn parse_operator_family_drop_operator(
11619 &mut self,
11620 ) -> Result<OperatorFamilyDropItem, ParserError> {
11621 let strategy_number = self.parse_literal_uint()?;
11622
11623 self.expect_token(&Token::LParen)?;
11625 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11626 self.expect_token(&Token::RParen)?;
11627
11628 Ok(OperatorFamilyDropItem::Operator {
11629 strategy_number,
11630 op_types,
11631 })
11632 }
11633
11634 fn parse_operator_family_drop_function(
11636 &mut self,
11637 ) -> Result<OperatorFamilyDropItem, ParserError> {
11638 let support_number = self.parse_literal_uint()?;
11639
11640 self.expect_token(&Token::LParen)?;
11642 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11643 self.expect_token(&Token::RParen)?;
11644
11645 Ok(OperatorFamilyDropItem::Function {
11646 support_number,
11647 op_types,
11648 })
11649 }
11650
11651 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11653 if self.parse_keyword(Keyword::OPERATOR) {
11654 self.parse_operator_family_add_operator()
11655 } else if self.parse_keyword(Keyword::FUNCTION) {
11656 self.parse_operator_family_add_function()
11657 } else {
11658 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11659 }
11660 }
11661
11662 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
11664 if self.parse_keyword(Keyword::OPERATOR) {
11665 self.parse_operator_family_drop_operator()
11666 } else if self.parse_keyword(Keyword::FUNCTION) {
11667 self.parse_operator_family_drop_function()
11668 } else {
11669 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11670 }
11671 }
11672
11673 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
11676 let name = self.parse_object_name(false)?;
11677 self.expect_keyword(Keyword::USING)?;
11678 let using = self.parse_identifier()?;
11679
11680 let operation = if self.parse_keyword(Keyword::ADD) {
11681 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
11682 AlterOperatorFamilyOperation::Add { items }
11683 } else if self.parse_keyword(Keyword::DROP) {
11684 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
11685 AlterOperatorFamilyOperation::Drop { items }
11686 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11687 let new_name = self.parse_object_name(false)?;
11688 AlterOperatorFamilyOperation::RenameTo { new_name }
11689 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11690 let owner = self.parse_owner()?;
11691 AlterOperatorFamilyOperation::OwnerTo(owner)
11692 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11693 let schema_name = self.parse_object_name(false)?;
11694 AlterOperatorFamilyOperation::SetSchema { schema_name }
11695 } else {
11696 return self.expected_ref(
11697 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
11698 self.peek_token_ref(),
11699 );
11700 };
11701
11702 Ok(AlterOperatorFamily {
11703 name,
11704 using,
11705 operation,
11706 })
11707 }
11708
11709 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
11713 let name = self.parse_object_name(false)?;
11714 self.expect_keyword(Keyword::USING)?;
11715 let using = self.parse_identifier()?;
11716
11717 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11718 let new_name = self.parse_object_name(false)?;
11719 AlterOperatorClassOperation::RenameTo { new_name }
11720 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11721 let owner = self.parse_owner()?;
11722 AlterOperatorClassOperation::OwnerTo(owner)
11723 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11724 let schema_name = self.parse_object_name(false)?;
11725 AlterOperatorClassOperation::SetSchema { schema_name }
11726 } else {
11727 return self.expected_ref(
11728 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
11729 self.peek_token_ref(),
11730 );
11731 };
11732
11733 Ok(AlterOperatorClass {
11734 name,
11735 using,
11736 operation,
11737 })
11738 }
11739
11740 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
11744 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
11745 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11746 let name = self.parse_object_name(false)?;
11747 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
11748 self.prev_token();
11749 let options = self.parse_options(Keyword::OPTIONS)?;
11750 AlterSchemaOperation::SetOptionsParens { options }
11751 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
11752 let collate = self.parse_expr()?;
11753 AlterSchemaOperation::SetDefaultCollate { collate }
11754 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
11755 let replica = self.parse_identifier()?;
11756 let options = if self.peek_keyword(Keyword::OPTIONS) {
11757 Some(self.parse_options(Keyword::OPTIONS)?)
11758 } else {
11759 None
11760 };
11761 AlterSchemaOperation::AddReplica { replica, options }
11762 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
11763 let replica = self.parse_identifier()?;
11764 AlterSchemaOperation::DropReplica { replica }
11765 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11766 let new_name = self.parse_object_name(false)?;
11767 AlterSchemaOperation::Rename { name: new_name }
11768 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11769 let owner = self.parse_owner()?;
11770 AlterSchemaOperation::OwnerTo { owner }
11771 } else {
11772 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
11773 };
11774 Ok(Statement::AlterSchema(AlterSchema {
11775 name,
11776 if_exists,
11777 operations: vec![operation],
11778 }))
11779 }
11780
11781 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
11784 let object_name = self.parse_object_name(false)?;
11785 if self.peek_token_ref().token == Token::LParen {
11786 match self.parse_function(object_name)? {
11787 Expr::Function(f) => Ok(Statement::Call(f)),
11788 other => parser_err!(
11789 format!("Expected a simple procedure call but found: {other}"),
11790 self.peek_token_ref().span.start
11791 ),
11792 }
11793 } else {
11794 Ok(Statement::Call(Function {
11795 name: object_name,
11796 uses_odbc_syntax: false,
11797 parameters: FunctionArguments::None,
11798 args: FunctionArguments::None,
11799 over: None,
11800 filter: None,
11801 null_treatment: None,
11802 within_group: vec![],
11803 }))
11804 }
11805 }
11806
11807 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
11809 let source;
11810 if self.consume_token(&Token::LParen) {
11811 source = CopySource::Query(self.parse_query()?);
11812 self.expect_token(&Token::RParen)?;
11813 } else {
11814 let table_name = self.parse_object_name(false)?;
11815 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11816 source = CopySource::Table {
11817 table_name,
11818 columns,
11819 };
11820 }
11821 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
11822 Some(Keyword::FROM) => false,
11823 Some(Keyword::TO) => true,
11824 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
11825 };
11826 if !to {
11827 if let CopySource::Query(_) = source {
11830 return Err(ParserError::ParserError(
11831 "COPY ... FROM does not support query as a source".to_string(),
11832 ));
11833 }
11834 }
11835 let target = if self.parse_keyword(Keyword::STDIN) {
11836 CopyTarget::Stdin
11837 } else if self.parse_keyword(Keyword::STDOUT) {
11838 CopyTarget::Stdout
11839 } else if self.parse_keyword(Keyword::PROGRAM) {
11840 CopyTarget::Program {
11841 command: self.parse_literal_string()?,
11842 }
11843 } else {
11844 CopyTarget::File {
11845 filename: self.parse_literal_string()?,
11846 }
11847 };
11848 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
11850 if self.consume_token(&Token::LParen) {
11851 options = self.parse_comma_separated(Parser::parse_copy_option)?;
11852 self.expect_token(&Token::RParen)?;
11853 }
11854 let mut legacy_options = vec![];
11855 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
11856 legacy_options.push(opt);
11857 }
11858 let values =
11859 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
11860 self.expect_token(&Token::SemiColon)?;
11861 self.parse_tsv()
11862 } else {
11863 vec![]
11864 };
11865 Ok(Statement::Copy {
11866 source,
11867 to,
11868 target,
11869 options,
11870 legacy_options,
11871 values,
11872 })
11873 }
11874
11875 fn parse_open(&mut self) -> Result<Statement, ParserError> {
11877 self.expect_keyword(Keyword::OPEN)?;
11878 Ok(Statement::Open(OpenStatement {
11879 cursor_name: self.parse_identifier()?,
11880 }))
11881 }
11882
11883 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
11885 let cursor = if self.parse_keyword(Keyword::ALL) {
11886 CloseCursor::All
11887 } else {
11888 let name = self.parse_identifier()?;
11889
11890 CloseCursor::Specific { name }
11891 };
11892
11893 Ok(Statement::Close { cursor })
11894 }
11895
11896 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
11897 let ret = match self.parse_one_of_keywords(&[
11898 Keyword::FORMAT,
11899 Keyword::FREEZE,
11900 Keyword::DELIMITER,
11901 Keyword::NULL,
11902 Keyword::HEADER,
11903 Keyword::QUOTE,
11904 Keyword::ESCAPE,
11905 Keyword::FORCE_QUOTE,
11906 Keyword::FORCE_NOT_NULL,
11907 Keyword::FORCE_NULL,
11908 Keyword::ENCODING,
11909 ]) {
11910 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
11911 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
11912 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
11913 Some(Keyword::FALSE)
11914 )),
11915 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
11916 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
11917 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
11918 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
11919 Some(Keyword::FALSE)
11920 )),
11921 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
11922 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
11923 Some(Keyword::FORCE_QUOTE) => {
11924 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
11925 }
11926 Some(Keyword::FORCE_NOT_NULL) => {
11927 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
11928 }
11929 Some(Keyword::FORCE_NULL) => {
11930 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
11931 }
11932 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
11933 _ => self.expected_ref("option", self.peek_token_ref())?,
11934 };
11935 Ok(ret)
11936 }
11937
11938 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
11939 if self.parse_keyword(Keyword::FORMAT) {
11941 let _ = self.parse_keyword(Keyword::AS);
11942 }
11943
11944 let ret = match self.parse_one_of_keywords(&[
11945 Keyword::ACCEPTANYDATE,
11946 Keyword::ACCEPTINVCHARS,
11947 Keyword::ADDQUOTES,
11948 Keyword::ALLOWOVERWRITE,
11949 Keyword::BINARY,
11950 Keyword::BLANKSASNULL,
11951 Keyword::BZIP2,
11952 Keyword::CLEANPATH,
11953 Keyword::COMPUPDATE,
11954 Keyword::CREDENTIALS,
11955 Keyword::CSV,
11956 Keyword::DATEFORMAT,
11957 Keyword::DELIMITER,
11958 Keyword::EMPTYASNULL,
11959 Keyword::ENCRYPTED,
11960 Keyword::ESCAPE,
11961 Keyword::EXTENSION,
11962 Keyword::FIXEDWIDTH,
11963 Keyword::GZIP,
11964 Keyword::HEADER,
11965 Keyword::IAM_ROLE,
11966 Keyword::IGNOREHEADER,
11967 Keyword::JSON,
11968 Keyword::MANIFEST,
11969 Keyword::MAXFILESIZE,
11970 Keyword::NULL,
11971 Keyword::PARALLEL,
11972 Keyword::PARQUET,
11973 Keyword::PARTITION,
11974 Keyword::REGION,
11975 Keyword::REMOVEQUOTES,
11976 Keyword::ROWGROUPSIZE,
11977 Keyword::STATUPDATE,
11978 Keyword::TIMEFORMAT,
11979 Keyword::TRUNCATECOLUMNS,
11980 Keyword::ZSTD,
11981 ]) {
11982 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
11983 Some(Keyword::ACCEPTINVCHARS) => {
11984 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11986 Some(self.parse_literal_string()?)
11987 } else {
11988 None
11989 };
11990 CopyLegacyOption::AcceptInvChars(ch)
11991 }
11992 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
11993 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
11994 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
11995 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
11996 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
11997 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
11998 Some(Keyword::COMPUPDATE) => {
11999 let preset = self.parse_keyword(Keyword::PRESET);
12000 let enabled = match self.parse_one_of_keywords(&[
12001 Keyword::TRUE,
12002 Keyword::FALSE,
12003 Keyword::ON,
12004 Keyword::OFF,
12005 ]) {
12006 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12007 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12008 _ => None,
12009 };
12010 CopyLegacyOption::CompUpdate { preset, enabled }
12011 }
12012 Some(Keyword::CREDENTIALS) => {
12013 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12014 }
12015 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12016 let mut opts = vec![];
12017 while let Some(opt) =
12018 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12019 {
12020 opts.push(opt);
12021 }
12022 opts
12023 }),
12024 Some(Keyword::DATEFORMAT) => {
12025 let _ = self.parse_keyword(Keyword::AS);
12026 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12027 Some(self.parse_literal_string()?)
12028 } else {
12029 None
12030 };
12031 CopyLegacyOption::DateFormat(fmt)
12032 }
12033 Some(Keyword::DELIMITER) => {
12034 let _ = self.parse_keyword(Keyword::AS);
12035 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12036 }
12037 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12038 Some(Keyword::ENCRYPTED) => {
12039 let auto = self.parse_keyword(Keyword::AUTO);
12040 CopyLegacyOption::Encrypted { auto }
12041 }
12042 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12043 Some(Keyword::EXTENSION) => {
12044 let ext = self.parse_literal_string()?;
12045 CopyLegacyOption::Extension(ext)
12046 }
12047 Some(Keyword::FIXEDWIDTH) => {
12048 let spec = self.parse_literal_string()?;
12049 CopyLegacyOption::FixedWidth(spec)
12050 }
12051 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12052 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12053 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12054 Some(Keyword::IGNOREHEADER) => {
12055 let _ = self.parse_keyword(Keyword::AS);
12056 let num_rows = self.parse_literal_uint()?;
12057 CopyLegacyOption::IgnoreHeader(num_rows)
12058 }
12059 Some(Keyword::JSON) => {
12060 let _ = self.parse_keyword(Keyword::AS);
12061 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12062 Some(self.parse_literal_string()?)
12063 } else {
12064 None
12065 };
12066 CopyLegacyOption::Json(fmt)
12067 }
12068 Some(Keyword::MANIFEST) => {
12069 let verbose = self.parse_keyword(Keyword::VERBOSE);
12070 CopyLegacyOption::Manifest { verbose }
12071 }
12072 Some(Keyword::MAXFILESIZE) => {
12073 let _ = self.parse_keyword(Keyword::AS);
12074 let size = self.parse_number_value()?;
12075 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12076 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12077 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12078 _ => None,
12079 };
12080 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12081 }
12082 Some(Keyword::NULL) => {
12083 let _ = self.parse_keyword(Keyword::AS);
12084 CopyLegacyOption::Null(self.parse_literal_string()?)
12085 }
12086 Some(Keyword::PARALLEL) => {
12087 let enabled = match self.parse_one_of_keywords(&[
12088 Keyword::TRUE,
12089 Keyword::FALSE,
12090 Keyword::ON,
12091 Keyword::OFF,
12092 ]) {
12093 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12094 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12095 _ => None,
12096 };
12097 CopyLegacyOption::Parallel(enabled)
12098 }
12099 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12100 Some(Keyword::PARTITION) => {
12101 self.expect_keyword(Keyword::BY)?;
12102 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12103 let include = self.parse_keyword(Keyword::INCLUDE);
12104 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12105 }
12106 Some(Keyword::REGION) => {
12107 let _ = self.parse_keyword(Keyword::AS);
12108 let region = self.parse_literal_string()?;
12109 CopyLegacyOption::Region(region)
12110 }
12111 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12112 Some(Keyword::ROWGROUPSIZE) => {
12113 let _ = self.parse_keyword(Keyword::AS);
12114 let file_size = self.parse_file_size()?;
12115 CopyLegacyOption::RowGroupSize(file_size)
12116 }
12117 Some(Keyword::STATUPDATE) => {
12118 let enabled = match self.parse_one_of_keywords(&[
12119 Keyword::TRUE,
12120 Keyword::FALSE,
12121 Keyword::ON,
12122 Keyword::OFF,
12123 ]) {
12124 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12125 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12126 _ => None,
12127 };
12128 CopyLegacyOption::StatUpdate(enabled)
12129 }
12130 Some(Keyword::TIMEFORMAT) => {
12131 let _ = self.parse_keyword(Keyword::AS);
12132 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12133 Some(self.parse_literal_string()?)
12134 } else {
12135 None
12136 };
12137 CopyLegacyOption::TimeFormat(fmt)
12138 }
12139 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12140 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12141 _ => self.expected_ref("option", self.peek_token_ref())?,
12142 };
12143 Ok(ret)
12144 }
12145
12146 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12147 let size = self.parse_number_value()?;
12148 let unit = self.maybe_parse_file_size_unit();
12149 Ok(FileSize { size, unit })
12150 }
12151
12152 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12153 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12154 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12155 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12156 _ => None,
12157 }
12158 }
12159
12160 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12161 if self.parse_keyword(Keyword::DEFAULT) {
12162 Ok(IamRoleKind::Default)
12163 } else {
12164 let arn = self.parse_literal_string()?;
12165 Ok(IamRoleKind::Arn(arn))
12166 }
12167 }
12168
12169 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12170 let ret = match self.parse_one_of_keywords(&[
12171 Keyword::HEADER,
12172 Keyword::QUOTE,
12173 Keyword::ESCAPE,
12174 Keyword::FORCE,
12175 ]) {
12176 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12177 Some(Keyword::QUOTE) => {
12178 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12180 }
12181 Some(Keyword::ESCAPE) => {
12182 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12184 }
12185 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12186 CopyLegacyCsvOption::ForceNotNull(
12187 self.parse_comma_separated(|p| p.parse_identifier())?,
12188 )
12189 }
12190 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12191 CopyLegacyCsvOption::ForceQuote(
12192 self.parse_comma_separated(|p| p.parse_identifier())?,
12193 )
12194 }
12195 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12196 };
12197 Ok(ret)
12198 }
12199
12200 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12201 let s = self.parse_literal_string()?;
12202 if s.len() != 1 {
12203 let loc = self
12204 .tokens
12205 .get(self.index - 1)
12206 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12207 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12208 }
12209 Ok(s.chars().next().unwrap())
12210 }
12211
12212 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12215 self.parse_tab_value()
12216 }
12217
12218 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12220 let mut values = vec![];
12221 let mut content = String::new();
12222 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12223 match t {
12224 Token::Whitespace(Whitespace::Tab) => {
12225 values.push(Some(core::mem::take(&mut content)));
12226 }
12227 Token::Whitespace(Whitespace::Newline) => {
12228 values.push(Some(core::mem::take(&mut content)));
12229 }
12230 Token::Backslash => {
12231 if self.consume_token(&Token::Period) {
12232 return values;
12233 }
12234 if let Token::Word(w) = self.next_token().token {
12235 if w.value == "N" {
12236 values.push(None);
12237 }
12238 }
12239 }
12240 _ => {
12241 content.push_str(&t.to_string());
12242 }
12243 }
12244 }
12245 values
12246 }
12247
12248 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12250 let next_token = self.next_token();
12251 let span = next_token.span;
12252 let ok_value = |value: Value| Ok(value.with_span(span));
12253 match next_token.token {
12254 Token::Word(w) => match w.keyword {
12255 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12256 ok_value(Value::Boolean(true))
12257 }
12258 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12259 ok_value(Value::Boolean(false))
12260 }
12261 Keyword::NULL => ok_value(Value::Null),
12262 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12263 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12264 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12265 _ => self.expected(
12266 "A value?",
12267 TokenWithSpan {
12268 token: Token::Word(w),
12269 span,
12270 },
12271 )?,
12272 },
12273 _ => self.expected(
12274 "a concrete value",
12275 TokenWithSpan {
12276 token: Token::Word(w),
12277 span,
12278 },
12279 ),
12280 },
12281 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12285 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12286 self.maybe_concat_string_literal(s.to_string()),
12287 )),
12288 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12289 self.maybe_concat_string_literal(s.to_string()),
12290 )),
12291 Token::TripleSingleQuotedString(ref s) => {
12292 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12293 }
12294 Token::TripleDoubleQuotedString(ref s) => {
12295 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12296 }
12297 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12298 Token::SingleQuotedByteStringLiteral(ref s) => {
12299 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12300 }
12301 Token::DoubleQuotedByteStringLiteral(ref s) => {
12302 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12303 }
12304 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12305 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12306 }
12307 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12308 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12309 }
12310 Token::SingleQuotedRawStringLiteral(ref s) => {
12311 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12312 }
12313 Token::DoubleQuotedRawStringLiteral(ref s) => {
12314 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12315 }
12316 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12317 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12318 }
12319 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12320 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12321 }
12322 Token::NationalStringLiteral(ref s) => {
12323 ok_value(Value::NationalStringLiteral(s.to_string()))
12324 }
12325 Token::QuoteDelimitedStringLiteral(v) => {
12326 ok_value(Value::QuoteDelimitedStringLiteral(v))
12327 }
12328 Token::NationalQuoteDelimitedStringLiteral(v) => {
12329 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12330 }
12331 Token::EscapedStringLiteral(ref s) => {
12332 ok_value(Value::EscapedStringLiteral(s.to_string()))
12333 }
12334 Token::UnicodeStringLiteral(ref s) => {
12335 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12336 }
12337 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12338 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12339 tok @ Token::Colon | tok @ Token::AtSign => {
12340 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12348 let ident = match next_token.token {
12349 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12350 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12351 _ => self.expected("placeholder", next_token),
12352 }?;
12353 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12354 .with_span(Span::new(span.start, ident.span.end)))
12355 }
12356 unexpected => self.expected(
12357 "a value",
12358 TokenWithSpan {
12359 token: unexpected,
12360 span,
12361 },
12362 ),
12363 }
12364 }
12365
12366 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12367 if self.dialect.supports_string_literal_concatenation() {
12368 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12369 self.peek_token_ref().token
12370 {
12371 str.push_str(s);
12372 self.advance_token();
12373 }
12374 } else if self
12375 .dialect
12376 .supports_string_literal_concatenation_with_newline()
12377 {
12378 let mut after_newline = false;
12381 loop {
12382 match self.peek_token_no_skip().token {
12383 Token::Whitespace(Whitespace::Newline) => {
12384 after_newline = true;
12385 self.next_token_no_skip();
12386 }
12387 Token::Whitespace(_) => {
12388 self.next_token_no_skip();
12389 }
12390 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12391 if after_newline =>
12392 {
12393 str.push_str(s.clone().as_str());
12394 self.next_token_no_skip();
12395 after_newline = false;
12396 }
12397 _ => break,
12398 }
12399 }
12400 }
12401
12402 str
12403 }
12404
12405 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12407 let value_wrapper = self.parse_value()?;
12408 match &value_wrapper.value {
12409 Value::Number(_, _) => Ok(value_wrapper),
12410 Value::Placeholder(_) => Ok(value_wrapper),
12411 _ => {
12412 self.prev_token();
12413 self.expected_ref("literal number", self.peek_token_ref())
12414 }
12415 }
12416 }
12417
12418 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12421 let next_token = self.next_token();
12422 match next_token.token {
12423 Token::Plus => Ok(Expr::UnaryOp {
12424 op: UnaryOperator::Plus,
12425 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12426 }),
12427 Token::Minus => Ok(Expr::UnaryOp {
12428 op: UnaryOperator::Minus,
12429 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12430 }),
12431 _ => {
12432 self.prev_token();
12433 Ok(Expr::Value(self.parse_number_value()?))
12434 }
12435 }
12436 }
12437
12438 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12439 let next_token = self.next_token();
12440 let span = next_token.span;
12441 match next_token.token {
12442 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12443 Value::SingleQuotedString(s.to_string()).with_span(span),
12444 )),
12445 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12446 Value::DoubleQuotedString(s.to_string()).with_span(span),
12447 )),
12448 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12449 Value::HexStringLiteral(s.to_string()).with_span(span),
12450 )),
12451 unexpected => self.expected(
12452 "a string value",
12453 TokenWithSpan {
12454 token: unexpected,
12455 span,
12456 },
12457 ),
12458 }
12459 }
12460
12461 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12463 let next_token = self.next_token();
12464 match next_token.token {
12465 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12466 _ => self.expected("literal int", next_token),
12467 }
12468 }
12469
12470 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12473 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12474 let peek_token = parser.peek_token();
12475 let span = peek_token.span;
12476 match peek_token.token {
12477 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12478 {
12479 parser.next_token();
12480 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12481 }
12482 _ => Ok(Expr::Value(
12483 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12484 )),
12485 }
12486 };
12487
12488 Ok(CreateFunctionBody::AsBeforeOptions {
12489 body: parse_string_expr(self)?,
12490 link_symbol: if self.consume_token(&Token::Comma) {
12491 Some(parse_string_expr(self)?)
12492 } else {
12493 None
12494 },
12495 })
12496 }
12497
12498 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12500 let next_token = self.next_token();
12501 match next_token.token {
12502 Token::Word(Word {
12503 value,
12504 keyword: Keyword::NoKeyword,
12505 ..
12506 }) => Ok(value),
12507 Token::SingleQuotedString(s) => Ok(s),
12508 Token::DoubleQuotedString(s) => Ok(s),
12509 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12510 Ok(s)
12511 }
12512 Token::UnicodeStringLiteral(s) => Ok(s),
12513 _ => self.expected("literal string", next_token),
12514 }
12515 }
12516
12517 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12519 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12520 Some(Keyword::TRUE) => Ok(true),
12521 Some(Keyword::FALSE) => Ok(false),
12522 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12523 }
12524 }
12525
12526 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12528 let neg = self.parse_keyword(Keyword::NOT);
12529 let normalized_form = self.maybe_parse(|parser| {
12530 match parser.parse_one_of_keywords(&[
12531 Keyword::NFC,
12532 Keyword::NFD,
12533 Keyword::NFKC,
12534 Keyword::NFKD,
12535 ]) {
12536 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12537 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12538 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12539 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12540 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12541 }
12542 })?;
12543 if self.parse_keyword(Keyword::NORMALIZED) {
12544 return Ok(Expr::IsNormalized {
12545 expr: Box::new(expr),
12546 form: normalized_form,
12547 negated: neg,
12548 });
12549 }
12550 self.expected_ref("unicode normalization form", self.peek_token_ref())
12551 }
12552
12553 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12555 self.expect_token(&Token::LParen)?;
12556 let values = self.parse_comma_separated(|parser| {
12557 let name = parser.parse_literal_string()?;
12558 let e = if parser.consume_token(&Token::Eq) {
12559 let value = parser.parse_number()?;
12560 EnumMember::NamedValue(name, value)
12561 } else {
12562 EnumMember::Name(name)
12563 };
12564 Ok(e)
12565 })?;
12566 self.expect_token(&Token::RParen)?;
12567
12568 Ok(values)
12569 }
12570
12571 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12573 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12574 if trailing_bracket.0 {
12575 return parser_err!(
12576 format!("unmatched > after parsing data type {ty}"),
12577 self.peek_token_ref()
12578 );
12579 }
12580
12581 Ok(ty)
12582 }
12583
12584 fn parse_data_type_helper(
12585 &mut self,
12586 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12587 let dialect = self.dialect;
12588 self.advance_token();
12589 let next_token = self.get_current_token();
12590 let next_token_index = self.get_current_index();
12591
12592 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12593 let mut data = match &next_token.token {
12594 Token::Word(w) => match w.keyword {
12595 Keyword::BOOLEAN => Ok(DataType::Boolean),
12596 Keyword::BOOL => Ok(DataType::Bool),
12597 Keyword::FLOAT => {
12598 let precision = self.parse_exact_number_optional_precision_scale()?;
12599
12600 if self.parse_keyword(Keyword::UNSIGNED) {
12601 Ok(DataType::FloatUnsigned(precision))
12602 } else {
12603 Ok(DataType::Float(precision))
12604 }
12605 }
12606 Keyword::REAL => {
12607 if self.parse_keyword(Keyword::UNSIGNED) {
12608 Ok(DataType::RealUnsigned)
12609 } else {
12610 Ok(DataType::Real)
12611 }
12612 }
12613 Keyword::FLOAT4 => Ok(DataType::Float4),
12614 Keyword::FLOAT32 => Ok(DataType::Float32),
12615 Keyword::FLOAT64 => Ok(DataType::Float64),
12616 Keyword::FLOAT8 => Ok(DataType::Float8),
12617 Keyword::DOUBLE => {
12618 if self.parse_keyword(Keyword::PRECISION) {
12619 if self.parse_keyword(Keyword::UNSIGNED) {
12620 Ok(DataType::DoublePrecisionUnsigned)
12621 } else {
12622 Ok(DataType::DoublePrecision)
12623 }
12624 } else {
12625 let precision = self.parse_exact_number_optional_precision_scale()?;
12626
12627 if self.parse_keyword(Keyword::UNSIGNED) {
12628 Ok(DataType::DoubleUnsigned(precision))
12629 } else {
12630 Ok(DataType::Double(precision))
12631 }
12632 }
12633 }
12634 Keyword::TINYINT => {
12635 let optional_precision = self.parse_optional_precision();
12636 if self.parse_keyword(Keyword::UNSIGNED) {
12637 Ok(DataType::TinyIntUnsigned(optional_precision?))
12638 } else {
12639 if dialect.supports_data_type_signed_suffix() {
12640 let _ = self.parse_keyword(Keyword::SIGNED);
12641 }
12642 Ok(DataType::TinyInt(optional_precision?))
12643 }
12644 }
12645 Keyword::INT2 => {
12646 let optional_precision = self.parse_optional_precision();
12647 if self.parse_keyword(Keyword::UNSIGNED) {
12648 Ok(DataType::Int2Unsigned(optional_precision?))
12649 } else {
12650 Ok(DataType::Int2(optional_precision?))
12651 }
12652 }
12653 Keyword::SMALLINT => {
12654 let optional_precision = self.parse_optional_precision();
12655 if self.parse_keyword(Keyword::UNSIGNED) {
12656 Ok(DataType::SmallIntUnsigned(optional_precision?))
12657 } else {
12658 if dialect.supports_data_type_signed_suffix() {
12659 let _ = self.parse_keyword(Keyword::SIGNED);
12660 }
12661 Ok(DataType::SmallInt(optional_precision?))
12662 }
12663 }
12664 Keyword::MEDIUMINT => {
12665 let optional_precision = self.parse_optional_precision();
12666 if self.parse_keyword(Keyword::UNSIGNED) {
12667 Ok(DataType::MediumIntUnsigned(optional_precision?))
12668 } else {
12669 if dialect.supports_data_type_signed_suffix() {
12670 let _ = self.parse_keyword(Keyword::SIGNED);
12671 }
12672 Ok(DataType::MediumInt(optional_precision?))
12673 }
12674 }
12675 Keyword::INT => {
12676 let optional_precision = self.parse_optional_precision();
12677 if self.parse_keyword(Keyword::UNSIGNED) {
12678 Ok(DataType::IntUnsigned(optional_precision?))
12679 } else {
12680 if dialect.supports_data_type_signed_suffix() {
12681 let _ = self.parse_keyword(Keyword::SIGNED);
12682 }
12683 Ok(DataType::Int(optional_precision?))
12684 }
12685 }
12686 Keyword::INT4 => {
12687 let optional_precision = self.parse_optional_precision();
12688 if self.parse_keyword(Keyword::UNSIGNED) {
12689 Ok(DataType::Int4Unsigned(optional_precision?))
12690 } else {
12691 Ok(DataType::Int4(optional_precision?))
12692 }
12693 }
12694 Keyword::INT8 => {
12695 let optional_precision = self.parse_optional_precision();
12696 if self.parse_keyword(Keyword::UNSIGNED) {
12697 Ok(DataType::Int8Unsigned(optional_precision?))
12698 } else {
12699 Ok(DataType::Int8(optional_precision?))
12700 }
12701 }
12702 Keyword::INT16 => Ok(DataType::Int16),
12703 Keyword::INT32 => Ok(DataType::Int32),
12704 Keyword::INT64 => Ok(DataType::Int64),
12705 Keyword::INT128 => Ok(DataType::Int128),
12706 Keyword::INT256 => Ok(DataType::Int256),
12707 Keyword::INTEGER => {
12708 let optional_precision = self.parse_optional_precision();
12709 if self.parse_keyword(Keyword::UNSIGNED) {
12710 Ok(DataType::IntegerUnsigned(optional_precision?))
12711 } else {
12712 if dialect.supports_data_type_signed_suffix() {
12713 let _ = self.parse_keyword(Keyword::SIGNED);
12714 }
12715 Ok(DataType::Integer(optional_precision?))
12716 }
12717 }
12718 Keyword::BIGINT => {
12719 let optional_precision = self.parse_optional_precision();
12720 if self.parse_keyword(Keyword::UNSIGNED) {
12721 Ok(DataType::BigIntUnsigned(optional_precision?))
12722 } else {
12723 if dialect.supports_data_type_signed_suffix() {
12724 let _ = self.parse_keyword(Keyword::SIGNED);
12725 }
12726 Ok(DataType::BigInt(optional_precision?))
12727 }
12728 }
12729 Keyword::HUGEINT => Ok(DataType::HugeInt),
12730 Keyword::UBIGINT => Ok(DataType::UBigInt),
12731 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
12732 Keyword::USMALLINT => Ok(DataType::USmallInt),
12733 Keyword::UTINYINT => Ok(DataType::UTinyInt),
12734 Keyword::UINT8 => Ok(DataType::UInt8),
12735 Keyword::UINT16 => Ok(DataType::UInt16),
12736 Keyword::UINT32 => Ok(DataType::UInt32),
12737 Keyword::UINT64 => Ok(DataType::UInt64),
12738 Keyword::UINT128 => Ok(DataType::UInt128),
12739 Keyword::UINT256 => Ok(DataType::UInt256),
12740 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
12741 Keyword::NVARCHAR => {
12742 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
12743 }
12744 Keyword::CHARACTER => {
12745 if self.parse_keyword(Keyword::VARYING) {
12746 Ok(DataType::CharacterVarying(
12747 self.parse_optional_character_length()?,
12748 ))
12749 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12750 Ok(DataType::CharacterLargeObject(
12751 self.parse_optional_precision()?,
12752 ))
12753 } else {
12754 Ok(DataType::Character(self.parse_optional_character_length()?))
12755 }
12756 }
12757 Keyword::CHAR => {
12758 if self.parse_keyword(Keyword::VARYING) {
12759 Ok(DataType::CharVarying(
12760 self.parse_optional_character_length()?,
12761 ))
12762 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12763 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
12764 } else {
12765 Ok(DataType::Char(self.parse_optional_character_length()?))
12766 }
12767 }
12768 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
12769 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
12770 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
12771 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
12772 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
12773 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
12774 Keyword::LONGBLOB => Ok(DataType::LongBlob),
12775 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
12776 Keyword::BIT => {
12777 if self.parse_keyword(Keyword::VARYING) {
12778 Ok(DataType::BitVarying(self.parse_optional_precision()?))
12779 } else {
12780 Ok(DataType::Bit(self.parse_optional_precision()?))
12781 }
12782 }
12783 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
12784 Keyword::UUID => Ok(DataType::Uuid),
12785 Keyword::DATE => Ok(DataType::Date),
12786 Keyword::DATE32 => Ok(DataType::Date32),
12787 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
12788 Keyword::DATETIME64 => {
12789 self.prev_token();
12790 let (precision, time_zone) = self.parse_datetime_64()?;
12791 Ok(DataType::Datetime64(precision, time_zone))
12792 }
12793 Keyword::TIMESTAMP => {
12794 let precision = self.parse_optional_precision()?;
12795 let tz = if self.parse_keyword(Keyword::WITH) {
12796 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12797 TimezoneInfo::WithTimeZone
12798 } else if self.parse_keyword(Keyword::WITHOUT) {
12799 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12800 TimezoneInfo::WithoutTimeZone
12801 } else {
12802 TimezoneInfo::None
12803 };
12804 Ok(DataType::Timestamp(precision, tz))
12805 }
12806 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
12807 self.parse_optional_precision()?,
12808 TimezoneInfo::Tz,
12809 )),
12810 Keyword::TIMESTAMP_NTZ => {
12811 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
12812 }
12813 Keyword::TIME => {
12814 let precision = self.parse_optional_precision()?;
12815 let tz = if self.parse_keyword(Keyword::WITH) {
12816 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12817 TimezoneInfo::WithTimeZone
12818 } else if self.parse_keyword(Keyword::WITHOUT) {
12819 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12820 TimezoneInfo::WithoutTimeZone
12821 } else {
12822 TimezoneInfo::None
12823 };
12824 Ok(DataType::Time(precision, tz))
12825 }
12826 Keyword::TIMETZ => Ok(DataType::Time(
12827 self.parse_optional_precision()?,
12828 TimezoneInfo::Tz,
12829 )),
12830 Keyword::INTERVAL => {
12831 if self.dialect.supports_interval_options() {
12832 let fields = self.maybe_parse_optional_interval_fields()?;
12833 let precision = self.parse_optional_precision()?;
12834 Ok(DataType::Interval { fields, precision })
12835 } else {
12836 Ok(DataType::Interval {
12837 fields: None,
12838 precision: None,
12839 })
12840 }
12841 }
12842 Keyword::JSON => Ok(DataType::JSON),
12843 Keyword::JSONB => Ok(DataType::JSONB),
12844 Keyword::REGCLASS => Ok(DataType::Regclass),
12845 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
12846 Keyword::FIXEDSTRING => {
12847 self.expect_token(&Token::LParen)?;
12848 let character_length = self.parse_literal_uint()?;
12849 self.expect_token(&Token::RParen)?;
12850 Ok(DataType::FixedString(character_length))
12851 }
12852 Keyword::TEXT => Ok(DataType::Text),
12853 Keyword::TINYTEXT => Ok(DataType::TinyText),
12854 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
12855 Keyword::LONGTEXT => Ok(DataType::LongText),
12856 Keyword::BYTEA => Ok(DataType::Bytea),
12857 Keyword::NUMERIC => Ok(DataType::Numeric(
12858 self.parse_exact_number_optional_precision_scale()?,
12859 )),
12860 Keyword::DECIMAL => {
12861 let precision = self.parse_exact_number_optional_precision_scale()?;
12862
12863 if self.parse_keyword(Keyword::UNSIGNED) {
12864 Ok(DataType::DecimalUnsigned(precision))
12865 } else {
12866 Ok(DataType::Decimal(precision))
12867 }
12868 }
12869 Keyword::DEC => {
12870 let precision = self.parse_exact_number_optional_precision_scale()?;
12871
12872 if self.parse_keyword(Keyword::UNSIGNED) {
12873 Ok(DataType::DecUnsigned(precision))
12874 } else {
12875 Ok(DataType::Dec(precision))
12876 }
12877 }
12878 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
12879 self.parse_exact_number_optional_precision_scale()?,
12880 )),
12881 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
12882 self.parse_exact_number_optional_precision_scale()?,
12883 )),
12884 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
12885 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
12886 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
12887 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
12888 Keyword::ARRAY => {
12889 if self.dialect.supports_array_typedef_without_element_type() {
12890 Ok(DataType::Array(ArrayElemTypeDef::None))
12891 } else if dialect_of!(self is ClickHouseDialect) {
12892 Ok(self.parse_sub_type(|internal_type| {
12893 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
12894 })?)
12895 } else {
12896 self.expect_token(&Token::Lt)?;
12897 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
12898 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
12899 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
12900 inside_type,
12901 ))))
12902 }
12903 }
12904 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
12905 self.prev_token();
12906 let field_defs = self.parse_duckdb_struct_type_def()?;
12907 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
12908 }
12909 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
12910 {
12911 self.prev_token();
12912 let (field_defs, _trailing_bracket) =
12913 self.parse_struct_type_def(Self::parse_struct_field_def)?;
12914 trailing_bracket = _trailing_bracket;
12915 Ok(DataType::Struct(
12916 field_defs,
12917 StructBracketKind::AngleBrackets,
12918 ))
12919 }
12920 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
12921 self.prev_token();
12922 let fields = self.parse_union_type_def()?;
12923 Ok(DataType::Union(fields))
12924 }
12925 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12926 Ok(self.parse_sub_type(DataType::Nullable)?)
12927 }
12928 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12929 Ok(self.parse_sub_type(DataType::LowCardinality)?)
12930 }
12931 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12932 self.prev_token();
12933 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
12934 Ok(DataType::Map(
12935 Box::new(key_data_type),
12936 Box::new(value_data_type),
12937 ))
12938 }
12939 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12940 self.expect_token(&Token::LParen)?;
12941 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
12942 self.expect_token(&Token::RParen)?;
12943 Ok(DataType::Nested(field_defs))
12944 }
12945 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12946 self.prev_token();
12947 let field_defs = self.parse_click_house_tuple_def()?;
12948 Ok(DataType::Tuple(field_defs))
12949 }
12950 Keyword::TRIGGER => Ok(DataType::Trigger),
12951 Keyword::SETOF => {
12952 let inner = self.parse_data_type()?;
12953 Ok(DataType::SetOf(Box::new(inner)))
12954 }
12955 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
12956 let _ = self.parse_keyword(Keyword::TYPE);
12957 Ok(DataType::AnyType)
12958 }
12959 Keyword::TABLE => {
12960 if self.peek_token_ref().token == Token::LParen {
12963 let columns = self.parse_returns_table_columns()?;
12964 Ok(DataType::Table(Some(columns)))
12965 } else {
12966 Ok(DataType::Table(None))
12967 }
12968 }
12969 Keyword::SIGNED => {
12970 if self.parse_keyword(Keyword::INTEGER) {
12971 Ok(DataType::SignedInteger)
12972 } else {
12973 Ok(DataType::Signed)
12974 }
12975 }
12976 Keyword::UNSIGNED => {
12977 if self.parse_keyword(Keyword::INTEGER) {
12978 Ok(DataType::UnsignedInteger)
12979 } else {
12980 Ok(DataType::Unsigned)
12981 }
12982 }
12983 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
12984 Ok(DataType::TsVector)
12985 }
12986 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
12987 Ok(DataType::TsQuery)
12988 }
12989 _ => {
12990 self.prev_token();
12991 let type_name = self.parse_object_name(false)?;
12992 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
12993 Ok(DataType::Custom(type_name, modifiers))
12994 } else {
12995 Ok(DataType::Custom(type_name, vec![]))
12996 }
12997 }
12998 },
12999 _ => self.expected_at("a data type name", next_token_index),
13000 }?;
13001
13002 if self.dialect.supports_array_typedef_with_brackets() {
13003 while self.consume_token(&Token::LBracket) {
13004 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13006 self.expect_token(&Token::RBracket)?;
13007 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13008 }
13009 }
13010 Ok((data, trailing_bracket))
13011 }
13012
13013 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13014 self.parse_column_def()
13015 }
13016
13017 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13018 self.expect_token(&Token::LParen)?;
13019 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13020 self.expect_token(&Token::RParen)?;
13021 Ok(columns)
13022 }
13023
13024 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13026 self.expect_token(&Token::LParen)?;
13027 let mut values = Vec::new();
13028 loop {
13029 let next_token = self.next_token();
13030 match next_token.token {
13031 Token::SingleQuotedString(value) => values.push(value),
13032 _ => self.expected("a string", next_token)?,
13033 }
13034 let next_token = self.next_token();
13035 match next_token.token {
13036 Token::Comma => (),
13037 Token::RParen => break,
13038 _ => self.expected(", or }", next_token)?,
13039 }
13040 }
13041 Ok(values)
13042 }
13043
13044 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13046 let ident = self.parse_identifier()?;
13047 self.expect_keyword_is(Keyword::AS)?;
13048 let alias = self.parse_identifier()?;
13049 Ok(IdentWithAlias { ident, alias })
13050 }
13051
13052 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13054 let ident = self.parse_identifier()?;
13055 let _after_as = self.parse_keyword(Keyword::AS);
13056 let alias = self.parse_identifier()?;
13057 Ok(IdentWithAlias { ident, alias })
13058 }
13059
13060 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13062 self.parse_comma_separated(|parser| {
13063 parser.expect_token(&Token::LParen)?;
13064 let query = parser.parse_query()?;
13065 parser.expect_token(&Token::RParen)?;
13066 Ok(*query)
13067 })
13068 }
13069
13070 fn parse_distinct_required_set_quantifier(
13072 &mut self,
13073 operator_name: &str,
13074 ) -> Result<SetQuantifier, ParserError> {
13075 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13076 match quantifier {
13077 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13078 _ => Err(ParserError::ParserError(format!(
13079 "{operator_name} pipe operator requires DISTINCT modifier",
13080 ))),
13081 }
13082 }
13083
13084 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13086 if self.parse_keyword(Keyword::AS) {
13087 Ok(Some(self.parse_identifier()?))
13088 } else {
13089 self.maybe_parse(|parser| parser.parse_identifier())
13091 }
13092 }
13093
13094 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13096 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13097 parser.dialect.is_select_item_alias(explicit, kw, parser)
13098 }
13099 self.parse_optional_alias_inner(None, validator)
13100 }
13101
13102 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13106 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13107 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13108 }
13109 let explicit = self.peek_keyword(Keyword::AS);
13110 match self.parse_optional_alias_inner(None, validator)? {
13111 Some(name) => {
13112 let columns = self.parse_table_alias_column_defs()?;
13113 Ok(Some(TableAlias {
13114 explicit,
13115 name,
13116 columns,
13117 }))
13118 }
13119 None => Ok(None),
13120 }
13121 }
13122
13123 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13124 let mut hints = vec![];
13125 while let Some(hint_type) =
13126 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13127 {
13128 let hint_type = match hint_type {
13129 Keyword::USE => TableIndexHintType::Use,
13130 Keyword::IGNORE => TableIndexHintType::Ignore,
13131 Keyword::FORCE => TableIndexHintType::Force,
13132 _ => {
13133 return self.expected_ref(
13134 "expected to match USE/IGNORE/FORCE keyword",
13135 self.peek_token_ref(),
13136 )
13137 }
13138 };
13139 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13140 Some(Keyword::INDEX) => TableIndexType::Index,
13141 Some(Keyword::KEY) => TableIndexType::Key,
13142 _ => {
13143 return self
13144 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13145 }
13146 };
13147 let for_clause = if self.parse_keyword(Keyword::FOR) {
13148 let clause = if self.parse_keyword(Keyword::JOIN) {
13149 TableIndexHintForClause::Join
13150 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13151 TableIndexHintForClause::OrderBy
13152 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13153 TableIndexHintForClause::GroupBy
13154 } else {
13155 return self.expected_ref(
13156 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13157 self.peek_token_ref(),
13158 );
13159 };
13160 Some(clause)
13161 } else {
13162 None
13163 };
13164
13165 self.expect_token(&Token::LParen)?;
13166 let index_names = if self.peek_token_ref().token != Token::RParen {
13167 self.parse_comma_separated(Parser::parse_identifier)?
13168 } else {
13169 vec![]
13170 };
13171 self.expect_token(&Token::RParen)?;
13172 hints.push(TableIndexHints {
13173 hint_type,
13174 index_type,
13175 for_clause,
13176 index_names,
13177 });
13178 }
13179 Ok(hints)
13180 }
13181
13182 pub fn parse_optional_alias(
13186 &mut self,
13187 reserved_kwds: &[Keyword],
13188 ) -> Result<Option<Ident>, ParserError> {
13189 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13190 false
13191 }
13192 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13193 }
13194
13195 fn parse_optional_alias_inner<F>(
13202 &mut self,
13203 reserved_kwds: Option<&[Keyword]>,
13204 validator: F,
13205 ) -> Result<Option<Ident>, ParserError>
13206 where
13207 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13208 {
13209 let after_as = self.parse_keyword(Keyword::AS);
13210
13211 let next_token = self.next_token();
13212 match next_token.token {
13213 Token::Word(w)
13216 if reserved_kwds.is_some()
13217 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13218 {
13219 Ok(Some(w.into_ident(next_token.span)))
13220 }
13221 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13225 Ok(Some(w.into_ident(next_token.span)))
13226 }
13227 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13229 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13230 _ => {
13231 if after_as {
13232 return self.expected("an identifier after AS", next_token);
13233 }
13234 self.prev_token();
13235 Ok(None) }
13237 }
13238 }
13239
13240 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13242 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13243 let expressions = if self.parse_keyword(Keyword::ALL) {
13244 None
13245 } else {
13246 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13247 };
13248
13249 let mut modifiers = vec![];
13250 if self.dialect.supports_group_by_with_modifier() {
13251 loop {
13252 if !self.parse_keyword(Keyword::WITH) {
13253 break;
13254 }
13255 let keyword = self.expect_one_of_keywords(&[
13256 Keyword::ROLLUP,
13257 Keyword::CUBE,
13258 Keyword::TOTALS,
13259 ])?;
13260 modifiers.push(match keyword {
13261 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13262 Keyword::CUBE => GroupByWithModifier::Cube,
13263 Keyword::TOTALS => GroupByWithModifier::Totals,
13264 _ => {
13265 return parser_err!(
13266 "BUG: expected to match GroupBy modifier keyword",
13267 self.peek_token_ref().span.start
13268 )
13269 }
13270 });
13271 }
13272 }
13273 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13274 self.expect_token(&Token::LParen)?;
13275 let result = self.parse_comma_separated(|p| {
13276 if p.peek_token_ref().token == Token::LParen {
13277 p.parse_tuple(true, true)
13278 } else {
13279 Ok(vec![p.parse_expr()?])
13280 }
13281 })?;
13282 self.expect_token(&Token::RParen)?;
13283 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13284 result,
13285 )));
13286 };
13287 let group_by = match expressions {
13288 None => GroupByExpr::All(modifiers),
13289 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13290 };
13291 Ok(Some(group_by))
13292 } else {
13293 Ok(None)
13294 }
13295 }
13296
13297 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13299 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13300 let order_by =
13301 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13302 let order_by_options = self.parse_order_by_options()?;
13303 OrderBy {
13304 kind: OrderByKind::All(order_by_options),
13305 interpolate: None,
13306 }
13307 } else {
13308 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13309 let interpolate = if self.dialect.supports_interpolate() {
13310 self.parse_interpolations()?
13311 } else {
13312 None
13313 };
13314 OrderBy {
13315 kind: OrderByKind::Expressions(exprs),
13316 interpolate,
13317 }
13318 };
13319 Ok(Some(order_by))
13320 } else {
13321 Ok(None)
13322 }
13323 }
13324
13325 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13326 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13327 Some(self.parse_offset()?)
13328 } else {
13329 None
13330 };
13331
13332 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13333 let expr = self.parse_limit()?;
13334
13335 if self.dialect.supports_limit_comma()
13336 && offset.is_none()
13337 && expr.is_some() && self.consume_token(&Token::Comma)
13339 {
13340 let offset = expr.ok_or_else(|| {
13341 ParserError::ParserError(
13342 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13343 )
13344 })?;
13345 return Ok(Some(LimitClause::OffsetCommaLimit {
13346 offset,
13347 limit: self.parse_expr()?,
13348 }));
13349 }
13350
13351 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13352 Some(self.parse_comma_separated(Parser::parse_expr)?)
13353 } else {
13354 None
13355 };
13356
13357 (Some(expr), limit_by)
13358 } else {
13359 (None, None)
13360 };
13361
13362 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13363 offset = Some(self.parse_offset()?);
13364 }
13365
13366 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13367 Ok(Some(LimitClause::LimitOffset {
13368 limit: limit.unwrap_or_default(),
13369 offset,
13370 limit_by: limit_by.unwrap_or_default(),
13371 }))
13372 } else {
13373 Ok(None)
13374 }
13375 }
13376
13377 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13380 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13381 let fn_name = self.parse_object_name(false)?;
13382 self.parse_function_call(fn_name)
13383 .map(TableObject::TableFunction)
13384 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13385 self.parse_parenthesized(|p| p.parse_query())
13386 .map(TableObject::TableQuery)
13387 } else {
13388 self.parse_object_name(false).map(TableObject::TableName)
13389 }
13390 }
13391
13392 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13399 self.parse_object_name_inner(in_table_clause, false)
13400 }
13401
13402 fn parse_object_name_inner(
13412 &mut self,
13413 in_table_clause: bool,
13414 allow_wildcards: bool,
13415 ) -> Result<ObjectName, ParserError> {
13416 let mut parts = vec![];
13417 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13418 loop {
13419 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13420 parts.push(ObjectNamePart::Identifier(ident));
13421 if !self.consume_token(&Token::Period) && !end_with_period {
13422 break;
13423 }
13424 }
13425 } else {
13426 loop {
13427 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13428 let span = self.next_token().span;
13429 parts.push(ObjectNamePart::Identifier(Ident {
13430 value: Token::Mul.to_string(),
13431 quote_style: None,
13432 span,
13433 }));
13434 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13435 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13436 parts.push(ObjectNamePart::Identifier(ident));
13437 if !self.consume_token(&Token::Period) && !end_with_period {
13438 break;
13439 }
13440 } else if self.dialect.supports_object_name_double_dot_notation()
13441 && parts.len() == 1
13442 && matches!(self.peek_token_ref().token, Token::Period)
13443 {
13444 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13446 } else {
13447 let ident = self.parse_identifier()?;
13448 let part = if self
13449 .dialect
13450 .is_identifier_generating_function_name(&ident, &parts)
13451 {
13452 self.expect_token(&Token::LParen)?;
13453 let args: Vec<FunctionArg> =
13454 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13455 self.expect_token(&Token::RParen)?;
13456 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13457 } else {
13458 ObjectNamePart::Identifier(ident)
13459 };
13460 parts.push(part);
13461 }
13462
13463 if !self.consume_token(&Token::Period) {
13464 break;
13465 }
13466 }
13467 }
13468
13469 if dialect_of!(self is BigQueryDialect)
13472 && parts.iter().any(|part| {
13473 part.as_ident()
13474 .is_some_and(|ident| ident.value.contains('.'))
13475 })
13476 {
13477 parts = parts
13478 .into_iter()
13479 .flat_map(|part| match part.as_ident() {
13480 Some(ident) => ident
13481 .value
13482 .split('.')
13483 .map(|value| {
13484 ObjectNamePart::Identifier(Ident {
13485 value: value.into(),
13486 quote_style: ident.quote_style,
13487 span: ident.span,
13488 })
13489 })
13490 .collect::<Vec<_>>(),
13491 None => vec![part],
13492 })
13493 .collect()
13494 }
13495
13496 Ok(ObjectName(parts))
13497 }
13498
13499 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13501 let mut idents = vec![];
13502 loop {
13503 let token = self.peek_token_ref();
13504 match &token.token {
13505 Token::Word(w) => {
13506 idents.push(w.to_ident(token.span));
13507 }
13508 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13509 break
13510 }
13511 _ => {}
13512 }
13513 self.advance_token();
13514 }
13515 Ok(idents)
13516 }
13517
13518 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13558 let mut idents = vec![];
13559
13560 let next_token = self.next_token();
13562 match next_token.token {
13563 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13564 Token::EOF => {
13565 return Err(ParserError::ParserError(
13566 "Empty input when parsing identifier".to_string(),
13567 ))?
13568 }
13569 token => {
13570 return Err(ParserError::ParserError(format!(
13571 "Unexpected token in identifier: {token}"
13572 )))?
13573 }
13574 };
13575
13576 loop {
13578 match self.next_token().token {
13579 Token::Period => {
13581 let next_token = self.next_token();
13582 match next_token.token {
13583 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13584 Token::EOF => {
13585 return Err(ParserError::ParserError(
13586 "Trailing period in identifier".to_string(),
13587 ))?
13588 }
13589 token => {
13590 return Err(ParserError::ParserError(format!(
13591 "Unexpected token following period in identifier: {token}"
13592 )))?
13593 }
13594 }
13595 }
13596 Token::EOF => break,
13597 token => {
13598 return Err(ParserError::ParserError(format!(
13599 "Unexpected token in identifier: {token}"
13600 )))?;
13601 }
13602 }
13603 }
13604
13605 Ok(idents)
13606 }
13607
13608 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
13610 let next_token = self.next_token();
13611 match next_token.token {
13612 Token::Word(w) => Ok(w.into_ident(next_token.span)),
13613 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
13614 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
13615 _ => self.expected("identifier", next_token),
13616 }
13617 }
13618
13619 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
13630 match self.peek_token().token {
13631 Token::Word(w) => {
13632 let quote_style_is_none = w.quote_style.is_none();
13633 let mut requires_whitespace = false;
13634 let mut ident = w.into_ident(self.next_token().span);
13635 if quote_style_is_none {
13636 while matches!(self.peek_token_no_skip().token, Token::Minus) {
13637 self.next_token();
13638 ident.value.push('-');
13639
13640 let token = self
13641 .next_token_no_skip()
13642 .cloned()
13643 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
13644 requires_whitespace = match token.token {
13645 Token::Word(next_word) if next_word.quote_style.is_none() => {
13646 ident.value.push_str(&next_word.value);
13647 false
13648 }
13649 Token::Number(s, false) => {
13650 if s.ends_with('.') {
13657 let Some(s) = s.split('.').next().filter(|s| {
13658 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
13659 }) else {
13660 return self.expected(
13661 "continuation of hyphenated identifier",
13662 TokenWithSpan::new(Token::Number(s, false), token.span),
13663 );
13664 };
13665 ident.value.push_str(s);
13666 return Ok((ident, true));
13667 } else {
13668 ident.value.push_str(&s);
13669 }
13670 !matches!(self.peek_token_ref().token, Token::Period)
13673 }
13674 _ => {
13675 return self
13676 .expected("continuation of hyphenated identifier", token);
13677 }
13678 }
13679 }
13680
13681 if requires_whitespace {
13684 let token = self.next_token();
13685 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
13686 return self
13687 .expected("whitespace following hyphenated identifier", token);
13688 }
13689 }
13690 }
13691 Ok((ident, false))
13692 }
13693 _ => Ok((self.parse_identifier()?, false)),
13694 }
13695 }
13696
13697 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
13699 if self.consume_token(&Token::LParen) {
13700 if self.peek_token_ref().token == Token::RParen {
13701 self.next_token();
13702 Ok(vec![])
13703 } else {
13704 let cols = self.parse_comma_separated_with_trailing_commas(
13705 Parser::parse_view_column,
13706 self.dialect.supports_column_definition_trailing_commas(),
13707 Self::is_reserved_for_column_alias,
13708 )?;
13709 self.expect_token(&Token::RParen)?;
13710 Ok(cols)
13711 }
13712 } else {
13713 Ok(vec![])
13714 }
13715 }
13716
13717 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
13719 let name = self.parse_identifier()?;
13720 let options = self.parse_view_column_options()?;
13721 let data_type = if dialect_of!(self is ClickHouseDialect) {
13722 Some(self.parse_data_type()?)
13723 } else {
13724 None
13725 };
13726 Ok(ViewColumnDef {
13727 name,
13728 data_type,
13729 options,
13730 })
13731 }
13732
13733 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
13734 let mut options = Vec::new();
13735 loop {
13736 let option = self.parse_optional_column_option()?;
13737 if let Some(option) = option {
13738 options.push(option);
13739 } else {
13740 break;
13741 }
13742 }
13743 if options.is_empty() {
13744 Ok(None)
13745 } else if self.dialect.supports_space_separated_column_options() {
13746 Ok(Some(ColumnOptions::SpaceSeparated(options)))
13747 } else {
13748 Ok(Some(ColumnOptions::CommaSeparated(options)))
13749 }
13750 }
13751
13752 pub fn parse_parenthesized_column_list(
13755 &mut self,
13756 optional: IsOptional,
13757 allow_empty: bool,
13758 ) -> Result<Vec<Ident>, ParserError> {
13759 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
13760 }
13761
13762 pub fn parse_parenthesized_compound_identifier_list(
13764 &mut self,
13765 optional: IsOptional,
13766 allow_empty: bool,
13767 ) -> Result<Vec<Expr>, ParserError> {
13768 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13769 Ok(Expr::CompoundIdentifier(
13770 p.parse_period_separated(|p| p.parse_identifier())?,
13771 ))
13772 })
13773 }
13774
13775 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
13778 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
13779 p.parse_create_index_expr()
13780 })
13781 }
13782
13783 pub fn parse_parenthesized_qualified_column_list(
13786 &mut self,
13787 optional: IsOptional,
13788 allow_empty: bool,
13789 ) -> Result<Vec<ObjectName>, ParserError> {
13790 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13791 p.parse_object_name(true)
13792 })
13793 }
13794
13795 fn parse_parenthesized_column_list_inner<F, T>(
13798 &mut self,
13799 optional: IsOptional,
13800 allow_empty: bool,
13801 mut f: F,
13802 ) -> Result<Vec<T>, ParserError>
13803 where
13804 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13805 {
13806 if self.consume_token(&Token::LParen) {
13807 if allow_empty && self.peek_token_ref().token == Token::RParen {
13808 self.next_token();
13809 Ok(vec![])
13810 } else {
13811 let cols = self.parse_comma_separated(|p| f(p))?;
13812 self.expect_token(&Token::RParen)?;
13813 Ok(cols)
13814 }
13815 } else if optional == Optional {
13816 Ok(vec![])
13817 } else {
13818 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
13819 }
13820 }
13821
13822 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
13824 if self.consume_token(&Token::LParen) {
13825 let cols = self.parse_comma_separated(|p| {
13826 let name = p.parse_identifier()?;
13827 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
13828 Ok(TableAliasColumnDef { name, data_type })
13829 })?;
13830 self.expect_token(&Token::RParen)?;
13831 Ok(cols)
13832 } else {
13833 Ok(vec![])
13834 }
13835 }
13836
13837 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
13839 self.expect_token(&Token::LParen)?;
13840 let n = self.parse_literal_uint()?;
13841 self.expect_token(&Token::RParen)?;
13842 Ok(n)
13843 }
13844
13845 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
13847 if self.consume_token(&Token::LParen) {
13848 let n = self.parse_literal_uint()?;
13849 self.expect_token(&Token::RParen)?;
13850 Ok(Some(n))
13851 } else {
13852 Ok(None)
13853 }
13854 }
13855
13856 fn maybe_parse_optional_interval_fields(
13857 &mut self,
13858 ) -> Result<Option<IntervalFields>, ParserError> {
13859 match self.parse_one_of_keywords(&[
13860 Keyword::YEAR,
13862 Keyword::DAY,
13863 Keyword::HOUR,
13864 Keyword::MINUTE,
13865 Keyword::MONTH,
13867 Keyword::SECOND,
13868 ]) {
13869 Some(Keyword::YEAR) => {
13870 if self.peek_keyword(Keyword::TO) {
13871 self.expect_keyword(Keyword::TO)?;
13872 self.expect_keyword(Keyword::MONTH)?;
13873 Ok(Some(IntervalFields::YearToMonth))
13874 } else {
13875 Ok(Some(IntervalFields::Year))
13876 }
13877 }
13878 Some(Keyword::DAY) => {
13879 if self.peek_keyword(Keyword::TO) {
13880 self.expect_keyword(Keyword::TO)?;
13881 match self.expect_one_of_keywords(&[
13882 Keyword::HOUR,
13883 Keyword::MINUTE,
13884 Keyword::SECOND,
13885 ])? {
13886 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
13887 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
13888 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
13889 _ => {
13890 self.prev_token();
13891 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
13892 }
13893 }
13894 } else {
13895 Ok(Some(IntervalFields::Day))
13896 }
13897 }
13898 Some(Keyword::HOUR) => {
13899 if self.peek_keyword(Keyword::TO) {
13900 self.expect_keyword(Keyword::TO)?;
13901 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
13902 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
13903 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
13904 _ => {
13905 self.prev_token();
13906 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
13907 }
13908 }
13909 } else {
13910 Ok(Some(IntervalFields::Hour))
13911 }
13912 }
13913 Some(Keyword::MINUTE) => {
13914 if self.peek_keyword(Keyword::TO) {
13915 self.expect_keyword(Keyword::TO)?;
13916 self.expect_keyword(Keyword::SECOND)?;
13917 Ok(Some(IntervalFields::MinuteToSecond))
13918 } else {
13919 Ok(Some(IntervalFields::Minute))
13920 }
13921 }
13922 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
13923 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
13924 Some(_) => {
13925 self.prev_token();
13926 self.expected_ref(
13927 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
13928 self.peek_token_ref(),
13929 )
13930 }
13931 None => Ok(None),
13932 }
13933 }
13934
13935 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
13943 self.expect_keyword_is(Keyword::DATETIME64)?;
13944 self.expect_token(&Token::LParen)?;
13945 let precision = self.parse_literal_uint()?;
13946 let time_zone = if self.consume_token(&Token::Comma) {
13947 Some(self.parse_literal_string()?)
13948 } else {
13949 None
13950 };
13951 self.expect_token(&Token::RParen)?;
13952 Ok((precision, time_zone))
13953 }
13954
13955 pub fn parse_optional_character_length(
13957 &mut self,
13958 ) -> Result<Option<CharacterLength>, ParserError> {
13959 if self.consume_token(&Token::LParen) {
13960 let character_length = self.parse_character_length()?;
13961 self.expect_token(&Token::RParen)?;
13962 Ok(Some(character_length))
13963 } else {
13964 Ok(None)
13965 }
13966 }
13967
13968 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
13970 if self.consume_token(&Token::LParen) {
13971 let binary_length = self.parse_binary_length()?;
13972 self.expect_token(&Token::RParen)?;
13973 Ok(Some(binary_length))
13974 } else {
13975 Ok(None)
13976 }
13977 }
13978
13979 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
13981 if self.parse_keyword(Keyword::MAX) {
13982 return Ok(CharacterLength::Max);
13983 }
13984 let length = self.parse_literal_uint()?;
13985 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
13986 Some(CharLengthUnits::Characters)
13987 } else if self.parse_keyword(Keyword::OCTETS) {
13988 Some(CharLengthUnits::Octets)
13989 } else {
13990 None
13991 };
13992 Ok(CharacterLength::IntegerLength { length, unit })
13993 }
13994
13995 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
13997 if self.parse_keyword(Keyword::MAX) {
13998 return Ok(BinaryLength::Max);
13999 }
14000 let length = self.parse_literal_uint()?;
14001 Ok(BinaryLength::IntegerLength { length })
14002 }
14003
14004 pub fn parse_optional_precision_scale(
14006 &mut self,
14007 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14008 if self.consume_token(&Token::LParen) {
14009 let n = self.parse_literal_uint()?;
14010 let scale = if self.consume_token(&Token::Comma) {
14011 Some(self.parse_literal_uint()?)
14012 } else {
14013 None
14014 };
14015 self.expect_token(&Token::RParen)?;
14016 Ok((Some(n), scale))
14017 } else {
14018 Ok((None, None))
14019 }
14020 }
14021
14022 pub fn parse_exact_number_optional_precision_scale(
14024 &mut self,
14025 ) -> Result<ExactNumberInfo, ParserError> {
14026 if self.consume_token(&Token::LParen) {
14027 let precision = self.parse_literal_uint()?;
14028 let scale = if self.consume_token(&Token::Comma) {
14029 Some(self.parse_signed_integer()?)
14030 } else {
14031 None
14032 };
14033
14034 self.expect_token(&Token::RParen)?;
14035
14036 match scale {
14037 None => Ok(ExactNumberInfo::Precision(precision)),
14038 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14039 }
14040 } else {
14041 Ok(ExactNumberInfo::None)
14042 }
14043 }
14044
14045 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14047 let is_negative = self.consume_token(&Token::Minus);
14048
14049 if !is_negative {
14050 let _ = self.consume_token(&Token::Plus);
14051 }
14052
14053 let current_token = self.peek_token_ref();
14054 match ¤t_token.token {
14055 Token::Number(s, _) => {
14056 let s = s.clone();
14057 let span_start = current_token.span.start;
14058 self.advance_token();
14059 let value = Self::parse::<i64>(s, span_start)?;
14060 Ok(if is_negative { -value } else { value })
14061 }
14062 _ => self.expected_ref("number", current_token),
14063 }
14064 }
14065
14066 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14068 if self.consume_token(&Token::LParen) {
14069 let mut modifiers = Vec::new();
14070 loop {
14071 let next_token = self.next_token();
14072 match next_token.token {
14073 Token::Word(w) => modifiers.push(w.to_string()),
14074 Token::Number(n, _) => modifiers.push(n),
14075 Token::SingleQuotedString(s) => modifiers.push(s),
14076
14077 Token::Comma => {
14078 continue;
14079 }
14080 Token::RParen => {
14081 break;
14082 }
14083 _ => self.expected("type modifiers", next_token)?,
14084 }
14085 }
14086
14087 Ok(Some(modifiers))
14088 } else {
14089 Ok(None)
14090 }
14091 }
14092
14093 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14095 where
14096 F: FnOnce(Box<DataType>) -> DataType,
14097 {
14098 self.expect_token(&Token::LParen)?;
14099 let inside_type = self.parse_data_type()?;
14100 self.expect_token(&Token::RParen)?;
14101 Ok(parent_type(inside_type.into()))
14102 }
14103
14104 fn parse_delete_setexpr_boxed(
14108 &mut self,
14109 delete_token: TokenWithSpan,
14110 ) -> Result<Box<SetExpr>, ParserError> {
14111 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14112 }
14113
14114 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14116 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14117 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14118 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14121 (vec![], false)
14122 } else {
14123 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14124 self.expect_keyword_is(Keyword::FROM)?;
14125 (tables, true)
14126 }
14127 } else {
14128 (vec![], true)
14129 };
14130
14131 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14132
14133 let output = self.maybe_parse_output_clause()?;
14134
14135 let using = if self.parse_keyword(Keyword::USING) {
14136 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14137 } else {
14138 None
14139 };
14140 let selection = if self.parse_keyword(Keyword::WHERE) {
14141 Some(self.parse_expr()?)
14142 } else {
14143 None
14144 };
14145 let returning = if self.parse_keyword(Keyword::RETURNING) {
14146 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14147 } else {
14148 None
14149 };
14150 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14151 self.parse_comma_separated(Parser::parse_order_by_expr)?
14152 } else {
14153 vec![]
14154 };
14155 let limit = if self.parse_keyword(Keyword::LIMIT) {
14156 self.parse_limit()?
14157 } else {
14158 None
14159 };
14160
14161 Ok(Statement::Delete(Delete {
14162 delete_token: delete_token.into(),
14163 optimizer_hints,
14164 tables,
14165 from: if with_from_keyword {
14166 FromTable::WithFromKeyword(from)
14167 } else {
14168 FromTable::WithoutKeyword(from)
14169 },
14170 using,
14171 selection,
14172 returning,
14173 output,
14174 order_by,
14175 limit,
14176 }))
14177 }
14178
14179 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14182 let modifier_keyword =
14183 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14184
14185 let id = self.parse_literal_uint()?;
14186
14187 let modifier = match modifier_keyword {
14188 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14189 Some(Keyword::QUERY) => Some(KillType::Query),
14190 Some(Keyword::MUTATION) => {
14191 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14192 Some(KillType::Mutation)
14193 } else {
14194 self.expected_ref(
14195 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14196 self.peek_token_ref(),
14197 )?
14198 }
14199 }
14200 _ => None,
14201 };
14202
14203 Ok(Statement::Kill { modifier, id })
14204 }
14205
14206 pub fn parse_explain(
14208 &mut self,
14209 describe_alias: DescribeAlias,
14210 ) -> Result<Statement, ParserError> {
14211 let mut analyze = false;
14212 let mut verbose = false;
14213 let mut query_plan = false;
14214 let mut estimate = false;
14215 let mut format = None;
14216 let mut options = None;
14217
14218 if describe_alias == DescribeAlias::Explain
14221 && self.dialect.supports_explain_with_utility_options()
14222 && self.peek_token_ref().token == Token::LParen
14223 {
14224 options = Some(self.parse_utility_options()?)
14225 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14226 query_plan = true;
14227 } else if self.parse_keyword(Keyword::ESTIMATE) {
14228 estimate = true;
14229 } else {
14230 analyze = self.parse_keyword(Keyword::ANALYZE);
14231 verbose = self.parse_keyword(Keyword::VERBOSE);
14232 if self.parse_keyword(Keyword::FORMAT) {
14233 format = Some(self.parse_analyze_format_kind()?);
14234 }
14235 }
14236
14237 match self.maybe_parse(|parser| parser.parse_statement())? {
14238 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14239 ParserError::ParserError("Explain must be root of the plan".to_string()),
14240 ),
14241 Some(statement) => Ok(Statement::Explain {
14242 describe_alias,
14243 analyze,
14244 verbose,
14245 query_plan,
14246 estimate,
14247 statement: Box::new(statement),
14248 format,
14249 options,
14250 }),
14251 _ => {
14252 let hive_format =
14253 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14254 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14255 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14256 _ => None,
14257 };
14258
14259 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14260 self.parse_keyword(Keyword::TABLE)
14262 } else {
14263 false
14264 };
14265
14266 let table_name = self.parse_object_name(false)?;
14267 Ok(Statement::ExplainTable {
14268 describe_alias,
14269 hive_format,
14270 has_table_keyword,
14271 table_name,
14272 })
14273 }
14274 }
14275 }
14276
14277 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14282 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14283 let _guard = self.recursion_counter.try_decrease()?;
14284 let with = if self.parse_keyword(Keyword::WITH) {
14285 let with_token = self.get_current_token();
14286 Some(With {
14287 with_token: with_token.clone().into(),
14288 recursive: self.parse_keyword(Keyword::RECURSIVE),
14289 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14290 })
14291 } else {
14292 None
14293 };
14294 if self.parse_keyword(Keyword::INSERT) {
14295 Ok(Query {
14296 with,
14297 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14298 order_by: None,
14299 limit_clause: None,
14300 fetch: None,
14301 locks: vec![],
14302 for_clause: None,
14303 settings: None,
14304 format_clause: None,
14305 pipe_operators: vec![],
14306 }
14307 .into())
14308 } else if self.parse_keyword(Keyword::UPDATE) {
14309 Ok(Query {
14310 with,
14311 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14312 order_by: None,
14313 limit_clause: None,
14314 fetch: None,
14315 locks: vec![],
14316 for_clause: None,
14317 settings: None,
14318 format_clause: None,
14319 pipe_operators: vec![],
14320 }
14321 .into())
14322 } else if self.parse_keyword(Keyword::DELETE) {
14323 Ok(Query {
14324 with,
14325 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14326 limit_clause: None,
14327 order_by: None,
14328 fetch: None,
14329 locks: vec![],
14330 for_clause: None,
14331 settings: None,
14332 format_clause: None,
14333 pipe_operators: vec![],
14334 }
14335 .into())
14336 } else if self.parse_keyword(Keyword::MERGE) {
14337 Ok(Query {
14338 with,
14339 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14340 limit_clause: None,
14341 order_by: None,
14342 fetch: None,
14343 locks: vec![],
14344 for_clause: None,
14345 settings: None,
14346 format_clause: None,
14347 pipe_operators: vec![],
14348 }
14349 .into())
14350 } else {
14351 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14352
14353 let order_by = self.parse_optional_order_by()?;
14354
14355 let limit_clause = self.parse_optional_limit_clause()?;
14356
14357 let settings = self.parse_settings()?;
14358
14359 let fetch = if self.parse_keyword(Keyword::FETCH) {
14360 Some(self.parse_fetch()?)
14361 } else {
14362 None
14363 };
14364
14365 let mut for_clause = None;
14366 let mut locks = Vec::new();
14367 while self.parse_keyword(Keyword::FOR) {
14368 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14369 for_clause = Some(parsed_for_clause);
14370 break;
14371 } else {
14372 locks.push(self.parse_lock()?);
14373 }
14374 }
14375 let format_clause =
14376 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14377 if self.parse_keyword(Keyword::NULL) {
14378 Some(FormatClause::Null)
14379 } else {
14380 let ident = self.parse_identifier()?;
14381 Some(FormatClause::Identifier(ident))
14382 }
14383 } else {
14384 None
14385 };
14386
14387 let pipe_operators = if self.dialect.supports_pipe_operator() {
14388 self.parse_pipe_operators()?
14389 } else {
14390 Vec::new()
14391 };
14392
14393 Ok(Query {
14394 with,
14395 body,
14396 order_by,
14397 limit_clause,
14398 fetch,
14399 locks,
14400 for_clause,
14401 settings,
14402 format_clause,
14403 pipe_operators,
14404 }
14405 .into())
14406 }
14407 }
14408
14409 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14410 let mut pipe_operators = Vec::new();
14411
14412 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14413 let kw = self.expect_one_of_keywords(&[
14414 Keyword::SELECT,
14415 Keyword::EXTEND,
14416 Keyword::SET,
14417 Keyword::DROP,
14418 Keyword::AS,
14419 Keyword::WHERE,
14420 Keyword::LIMIT,
14421 Keyword::AGGREGATE,
14422 Keyword::ORDER,
14423 Keyword::TABLESAMPLE,
14424 Keyword::RENAME,
14425 Keyword::UNION,
14426 Keyword::INTERSECT,
14427 Keyword::EXCEPT,
14428 Keyword::CALL,
14429 Keyword::PIVOT,
14430 Keyword::UNPIVOT,
14431 Keyword::JOIN,
14432 Keyword::INNER,
14433 Keyword::LEFT,
14434 Keyword::RIGHT,
14435 Keyword::FULL,
14436 Keyword::CROSS,
14437 ])?;
14438 match kw {
14439 Keyword::SELECT => {
14440 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14441 pipe_operators.push(PipeOperator::Select { exprs })
14442 }
14443 Keyword::EXTEND => {
14444 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14445 pipe_operators.push(PipeOperator::Extend { exprs })
14446 }
14447 Keyword::SET => {
14448 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14449 pipe_operators.push(PipeOperator::Set { assignments })
14450 }
14451 Keyword::DROP => {
14452 let columns = self.parse_identifiers()?;
14453 pipe_operators.push(PipeOperator::Drop { columns })
14454 }
14455 Keyword::AS => {
14456 let alias = self.parse_identifier()?;
14457 pipe_operators.push(PipeOperator::As { alias })
14458 }
14459 Keyword::WHERE => {
14460 let expr = self.parse_expr()?;
14461 pipe_operators.push(PipeOperator::Where { expr })
14462 }
14463 Keyword::LIMIT => {
14464 let expr = self.parse_expr()?;
14465 let offset = if self.parse_keyword(Keyword::OFFSET) {
14466 Some(self.parse_expr()?)
14467 } else {
14468 None
14469 };
14470 pipe_operators.push(PipeOperator::Limit { expr, offset })
14471 }
14472 Keyword::AGGREGATE => {
14473 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14474 vec![]
14475 } else {
14476 self.parse_comma_separated(|parser| {
14477 parser.parse_expr_with_alias_and_order_by()
14478 })?
14479 };
14480
14481 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14482 self.parse_comma_separated(|parser| {
14483 parser.parse_expr_with_alias_and_order_by()
14484 })?
14485 } else {
14486 vec![]
14487 };
14488
14489 pipe_operators.push(PipeOperator::Aggregate {
14490 full_table_exprs,
14491 group_by_expr,
14492 })
14493 }
14494 Keyword::ORDER => {
14495 self.expect_one_of_keywords(&[Keyword::BY])?;
14496 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14497 pipe_operators.push(PipeOperator::OrderBy { exprs })
14498 }
14499 Keyword::TABLESAMPLE => {
14500 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14501 pipe_operators.push(PipeOperator::TableSample { sample });
14502 }
14503 Keyword::RENAME => {
14504 let mappings =
14505 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14506 pipe_operators.push(PipeOperator::Rename { mappings });
14507 }
14508 Keyword::UNION => {
14509 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14510 let queries = self.parse_pipe_operator_queries()?;
14511 pipe_operators.push(PipeOperator::Union {
14512 set_quantifier,
14513 queries,
14514 });
14515 }
14516 Keyword::INTERSECT => {
14517 let set_quantifier =
14518 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14519 let queries = self.parse_pipe_operator_queries()?;
14520 pipe_operators.push(PipeOperator::Intersect {
14521 set_quantifier,
14522 queries,
14523 });
14524 }
14525 Keyword::EXCEPT => {
14526 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14527 let queries = self.parse_pipe_operator_queries()?;
14528 pipe_operators.push(PipeOperator::Except {
14529 set_quantifier,
14530 queries,
14531 });
14532 }
14533 Keyword::CALL => {
14534 let function_name = self.parse_object_name(false)?;
14535 let function_expr = self.parse_function(function_name)?;
14536 if let Expr::Function(function) = function_expr {
14537 let alias = self.parse_identifier_optional_alias()?;
14538 pipe_operators.push(PipeOperator::Call { function, alias });
14539 } else {
14540 return Err(ParserError::ParserError(
14541 "Expected function call after CALL".to_string(),
14542 ));
14543 }
14544 }
14545 Keyword::PIVOT => {
14546 self.expect_token(&Token::LParen)?;
14547 let aggregate_functions =
14548 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14549 self.expect_keyword_is(Keyword::FOR)?;
14550 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14551 self.expect_keyword_is(Keyword::IN)?;
14552
14553 self.expect_token(&Token::LParen)?;
14554 let value_source = if self.parse_keyword(Keyword::ANY) {
14555 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14556 self.parse_comma_separated(Parser::parse_order_by_expr)?
14557 } else {
14558 vec![]
14559 };
14560 PivotValueSource::Any(order_by)
14561 } else if self.peek_sub_query() {
14562 PivotValueSource::Subquery(self.parse_query()?)
14563 } else {
14564 PivotValueSource::List(
14565 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14566 )
14567 };
14568 self.expect_token(&Token::RParen)?;
14569 self.expect_token(&Token::RParen)?;
14570
14571 let alias = self.parse_identifier_optional_alias()?;
14572
14573 pipe_operators.push(PipeOperator::Pivot {
14574 aggregate_functions,
14575 value_column,
14576 value_source,
14577 alias,
14578 });
14579 }
14580 Keyword::UNPIVOT => {
14581 self.expect_token(&Token::LParen)?;
14582 let value_column = self.parse_identifier()?;
14583 self.expect_keyword(Keyword::FOR)?;
14584 let name_column = self.parse_identifier()?;
14585 self.expect_keyword(Keyword::IN)?;
14586
14587 self.expect_token(&Token::LParen)?;
14588 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14589 self.expect_token(&Token::RParen)?;
14590
14591 self.expect_token(&Token::RParen)?;
14592
14593 let alias = self.parse_identifier_optional_alias()?;
14594
14595 pipe_operators.push(PipeOperator::Unpivot {
14596 value_column,
14597 name_column,
14598 unpivot_columns,
14599 alias,
14600 });
14601 }
14602 Keyword::JOIN
14603 | Keyword::INNER
14604 | Keyword::LEFT
14605 | Keyword::RIGHT
14606 | Keyword::FULL
14607 | Keyword::CROSS => {
14608 self.prev_token();
14609 let mut joins = self.parse_joins()?;
14610 if joins.len() != 1 {
14611 return Err(ParserError::ParserError(
14612 "Join pipe operator must have a single join".to_string(),
14613 ));
14614 }
14615 let join = joins.swap_remove(0);
14616 pipe_operators.push(PipeOperator::Join(join))
14617 }
14618 unhandled => {
14619 return Err(ParserError::ParserError(format!(
14620 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
14621 )))
14622 }
14623 }
14624 }
14625 Ok(pipe_operators)
14626 }
14627
14628 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
14629 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
14630 {
14631 let key_values = self.parse_comma_separated(|p| {
14632 let key = p.parse_identifier()?;
14633 p.expect_token(&Token::Eq)?;
14634 let value = p.parse_expr()?;
14635 Ok(Setting { key, value })
14636 })?;
14637 Some(key_values)
14638 } else {
14639 None
14640 };
14641 Ok(settings)
14642 }
14643
14644 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
14646 if self.parse_keyword(Keyword::XML) {
14647 Ok(Some(self.parse_for_xml()?))
14648 } else if self.parse_keyword(Keyword::JSON) {
14649 Ok(Some(self.parse_for_json()?))
14650 } else if self.parse_keyword(Keyword::BROWSE) {
14651 Ok(Some(ForClause::Browse))
14652 } else {
14653 Ok(None)
14654 }
14655 }
14656
14657 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
14659 let for_xml = if self.parse_keyword(Keyword::RAW) {
14660 let mut element_name = None;
14661 if self.peek_token_ref().token == Token::LParen {
14662 self.expect_token(&Token::LParen)?;
14663 element_name = Some(self.parse_literal_string()?);
14664 self.expect_token(&Token::RParen)?;
14665 }
14666 ForXml::Raw(element_name)
14667 } else if self.parse_keyword(Keyword::AUTO) {
14668 ForXml::Auto
14669 } else if self.parse_keyword(Keyword::EXPLICIT) {
14670 ForXml::Explicit
14671 } else if self.parse_keyword(Keyword::PATH) {
14672 let mut element_name = None;
14673 if self.peek_token_ref().token == Token::LParen {
14674 self.expect_token(&Token::LParen)?;
14675 element_name = Some(self.parse_literal_string()?);
14676 self.expect_token(&Token::RParen)?;
14677 }
14678 ForXml::Path(element_name)
14679 } else {
14680 return Err(ParserError::ParserError(
14681 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
14682 ));
14683 };
14684 let mut elements = false;
14685 let mut binary_base64 = false;
14686 let mut root = None;
14687 let mut r#type = false;
14688 while self.peek_token_ref().token == Token::Comma {
14689 self.next_token();
14690 if self.parse_keyword(Keyword::ELEMENTS) {
14691 elements = true;
14692 } else if self.parse_keyword(Keyword::BINARY) {
14693 self.expect_keyword_is(Keyword::BASE64)?;
14694 binary_base64 = true;
14695 } else if self.parse_keyword(Keyword::ROOT) {
14696 self.expect_token(&Token::LParen)?;
14697 root = Some(self.parse_literal_string()?);
14698 self.expect_token(&Token::RParen)?;
14699 } else if self.parse_keyword(Keyword::TYPE) {
14700 r#type = true;
14701 }
14702 }
14703 Ok(ForClause::Xml {
14704 for_xml,
14705 elements,
14706 binary_base64,
14707 root,
14708 r#type,
14709 })
14710 }
14711
14712 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
14714 let for_json = if self.parse_keyword(Keyword::AUTO) {
14715 ForJson::Auto
14716 } else if self.parse_keyword(Keyword::PATH) {
14717 ForJson::Path
14718 } else {
14719 return Err(ParserError::ParserError(
14720 "Expected FOR JSON [AUTO | PATH ]".to_string(),
14721 ));
14722 };
14723 let mut root = None;
14724 let mut include_null_values = false;
14725 let mut without_array_wrapper = false;
14726 while self.peek_token_ref().token == Token::Comma {
14727 self.next_token();
14728 if self.parse_keyword(Keyword::ROOT) {
14729 self.expect_token(&Token::LParen)?;
14730 root = Some(self.parse_literal_string()?);
14731 self.expect_token(&Token::RParen)?;
14732 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
14733 include_null_values = true;
14734 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
14735 without_array_wrapper = true;
14736 }
14737 }
14738 Ok(ForClause::Json {
14739 for_json,
14740 root,
14741 include_null_values,
14742 without_array_wrapper,
14743 })
14744 }
14745
14746 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
14748 let name = self.parse_identifier()?;
14749
14750 let as_optional = self.dialect.supports_cte_without_as();
14751
14752 if as_optional && !self.peek_keyword(Keyword::AS) {
14754 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
14755 p.expect_token(&Token::LParen)?;
14756 let query = p.parse_query()?;
14757 let closing_paren_token = p.expect_token(&Token::RParen)?;
14758 Ok((query, closing_paren_token))
14759 })? {
14760 let mut cte = Cte {
14761 alias: TableAlias {
14762 explicit: false,
14763 name,
14764 columns: vec![],
14765 },
14766 query,
14767 from: None,
14768 materialized: None,
14769 closing_paren_token: closing_paren_token.into(),
14770 };
14771 if self.parse_keyword(Keyword::FROM) {
14772 cte.from = Some(self.parse_identifier()?);
14773 }
14774 return Ok(cte);
14775 }
14776 }
14777
14778 let columns = if self.parse_keyword(Keyword::AS) {
14780 vec![]
14781 } else {
14782 let columns = self.parse_table_alias_column_defs()?;
14783 if as_optional {
14784 let _ = self.parse_keyword(Keyword::AS);
14785 } else {
14786 self.expect_keyword_is(Keyword::AS)?;
14787 }
14788 columns
14789 };
14790
14791 let mut is_materialized = None;
14792 if dialect_of!(self is PostgreSqlDialect) {
14793 if self.parse_keyword(Keyword::MATERIALIZED) {
14794 is_materialized = Some(CteAsMaterialized::Materialized);
14795 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
14796 is_materialized = Some(CteAsMaterialized::NotMaterialized);
14797 }
14798 }
14799
14800 self.expect_token(&Token::LParen)?;
14801 let query = self.parse_query()?;
14802 let closing_paren_token = self.expect_token(&Token::RParen)?;
14803
14804 let mut cte = Cte {
14805 alias: TableAlias {
14806 explicit: false,
14807 name,
14808 columns,
14809 },
14810 query,
14811 from: None,
14812 materialized: is_materialized,
14813 closing_paren_token: closing_paren_token.into(),
14814 };
14815 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
14816 cte.from = Some(self.parse_identifier()?);
14817 }
14818 Ok(cte)
14819 }
14820
14821 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
14830 let expr = if self.peek_keyword(Keyword::SELECT)
14833 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
14834 {
14835 SetExpr::Select(self.parse_select().map(Box::new)?)
14836 } else if self.consume_token(&Token::LParen) {
14837 let subquery = self.parse_query()?;
14839 self.expect_token(&Token::RParen)?;
14840 SetExpr::Query(subquery)
14841 } else if self.parse_keyword(Keyword::VALUES) {
14842 let is_mysql = dialect_of!(self is MySqlDialect);
14843 SetExpr::Values(self.parse_values(is_mysql, false)?)
14844 } else if self.parse_keyword(Keyword::VALUE) {
14845 let is_mysql = dialect_of!(self is MySqlDialect);
14846 SetExpr::Values(self.parse_values(is_mysql, true)?)
14847 } else if self.parse_keyword(Keyword::TABLE) {
14848 SetExpr::Table(Box::new(self.parse_as_table()?))
14849 } else {
14850 return self.expected_ref(
14851 "SELECT, VALUES, or a subquery in the query body",
14852 self.peek_token_ref(),
14853 );
14854 };
14855
14856 self.parse_remaining_set_exprs(expr, precedence)
14857 }
14858
14859 fn parse_remaining_set_exprs(
14863 &mut self,
14864 mut expr: SetExpr,
14865 precedence: u8,
14866 ) -> Result<Box<SetExpr>, ParserError> {
14867 loop {
14868 let op = self.parse_set_operator(&self.peek_token().token);
14870 let next_precedence = match op {
14871 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
14873 10
14874 }
14875 Some(SetOperator::Intersect) => 20,
14877 None => break,
14879 };
14880 if precedence >= next_precedence {
14881 break;
14882 }
14883 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
14885 expr = SetExpr::SetOperation {
14886 left: Box::new(expr),
14887 op: op.unwrap(),
14888 set_quantifier,
14889 right: self.parse_query_body(next_precedence)?,
14890 };
14891 }
14892
14893 Ok(expr.into())
14894 }
14895
14896 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
14898 match token {
14899 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
14900 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
14901 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
14902 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
14903 _ => None,
14904 }
14905 }
14906
14907 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
14909 match op {
14910 Some(
14911 SetOperator::Except
14912 | SetOperator::Intersect
14913 | SetOperator::Union
14914 | SetOperator::Minus,
14915 ) => {
14916 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
14917 SetQuantifier::DistinctByName
14918 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
14919 SetQuantifier::ByName
14920 } else if self.parse_keyword(Keyword::ALL) {
14921 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
14922 SetQuantifier::AllByName
14923 } else {
14924 SetQuantifier::All
14925 }
14926 } else if self.parse_keyword(Keyword::DISTINCT) {
14927 SetQuantifier::Distinct
14928 } else {
14929 SetQuantifier::None
14930 }
14931 }
14932 _ => SetQuantifier::None,
14933 }
14934 }
14935
14936 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
14938 let mut from_first = None;
14939
14940 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
14941 let from_token = self.expect_keyword(Keyword::FROM)?;
14942 let from = self.parse_table_with_joins()?;
14943 if !self.peek_keyword(Keyword::SELECT) {
14944 return Ok(Select {
14945 select_token: AttachedToken(from_token),
14946 optimizer_hints: vec![],
14947 distinct: None,
14948 select_modifiers: None,
14949 top: None,
14950 top_before_distinct: false,
14951 projection: vec![],
14952 exclude: None,
14953 into: None,
14954 from,
14955 lateral_views: vec![],
14956 prewhere: None,
14957 selection: None,
14958 group_by: GroupByExpr::Expressions(vec![], vec![]),
14959 cluster_by: vec![],
14960 distribute_by: vec![],
14961 sort_by: vec![],
14962 having: None,
14963 named_window: vec![],
14964 window_before_qualify: false,
14965 qualify: None,
14966 value_table_mode: None,
14967 connect_by: vec![],
14968 flavor: SelectFlavor::FromFirstNoSelect,
14969 });
14970 }
14971 from_first = Some(from);
14972 }
14973
14974 let select_token = self.expect_keyword(Keyword::SELECT)?;
14975 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14976 let value_table_mode = self.parse_value_table_mode()?;
14977
14978 let (select_modifiers, distinct_select_modifier) =
14979 if self.dialect.supports_select_modifiers() {
14980 self.parse_select_modifiers()?
14981 } else {
14982 (None, None)
14983 };
14984
14985 let mut top_before_distinct = false;
14986 let mut top = None;
14987 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
14988 top = Some(self.parse_top()?);
14989 top_before_distinct = true;
14990 }
14991
14992 let distinct = if distinct_select_modifier.is_some() {
14993 distinct_select_modifier
14994 } else {
14995 self.parse_all_or_distinct()?
14996 };
14997
14998 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
14999 top = Some(self.parse_top()?);
15000 }
15001
15002 let projection =
15003 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15004 vec![]
15005 } else {
15006 self.parse_projection()?
15007 };
15008
15009 let exclude = if self.dialect.supports_select_exclude() {
15010 self.parse_optional_select_item_exclude()?
15011 } else {
15012 None
15013 };
15014
15015 let into = if self.parse_keyword(Keyword::INTO) {
15016 Some(self.parse_select_into()?)
15017 } else {
15018 None
15019 };
15020
15021 let (from, from_first) = if let Some(from) = from_first.take() {
15027 (from, true)
15028 } else if self.parse_keyword(Keyword::FROM) {
15029 (self.parse_table_with_joins()?, false)
15030 } else {
15031 (vec![], false)
15032 };
15033
15034 let mut lateral_views = vec![];
15035 loop {
15036 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15037 let outer = self.parse_keyword(Keyword::OUTER);
15038 let lateral_view = self.parse_expr()?;
15039 let lateral_view_name = self.parse_object_name(false)?;
15040 let lateral_col_alias = self
15041 .parse_comma_separated(|parser| {
15042 parser.parse_optional_alias(&[
15043 Keyword::WHERE,
15044 Keyword::GROUP,
15045 Keyword::CLUSTER,
15046 Keyword::HAVING,
15047 Keyword::LATERAL,
15048 ]) })?
15050 .into_iter()
15051 .flatten()
15052 .collect();
15053
15054 lateral_views.push(LateralView {
15055 lateral_view,
15056 lateral_view_name,
15057 lateral_col_alias,
15058 outer,
15059 });
15060 } else {
15061 break;
15062 }
15063 }
15064
15065 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15066 {
15067 Some(self.parse_expr()?)
15068 } else {
15069 None
15070 };
15071
15072 let selection = if self.parse_keyword(Keyword::WHERE) {
15073 Some(self.parse_expr()?)
15074 } else {
15075 None
15076 };
15077
15078 let connect_by = self.maybe_parse_connect_by()?;
15079
15080 let group_by = self
15081 .parse_optional_group_by()?
15082 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15083
15084 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15085 self.parse_comma_separated(Parser::parse_expr)?
15086 } else {
15087 vec![]
15088 };
15089
15090 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15091 self.parse_comma_separated(Parser::parse_expr)?
15092 } else {
15093 vec![]
15094 };
15095
15096 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15097 self.parse_comma_separated(Parser::parse_order_by_expr)?
15098 } else {
15099 vec![]
15100 };
15101
15102 let having = if self.parse_keyword(Keyword::HAVING) {
15103 Some(self.parse_expr()?)
15104 } else {
15105 None
15106 };
15107
15108 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15110 {
15111 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15112 if self.parse_keyword(Keyword::QUALIFY) {
15113 (named_windows, Some(self.parse_expr()?), true)
15114 } else {
15115 (named_windows, None, true)
15116 }
15117 } else if self.parse_keyword(Keyword::QUALIFY) {
15118 let qualify = Some(self.parse_expr()?);
15119 if self.parse_keyword(Keyword::WINDOW) {
15120 (
15121 self.parse_comma_separated(Parser::parse_named_window)?,
15122 qualify,
15123 false,
15124 )
15125 } else {
15126 (Default::default(), qualify, false)
15127 }
15128 } else {
15129 Default::default()
15130 };
15131
15132 Ok(Select {
15133 select_token: AttachedToken(select_token),
15134 optimizer_hints,
15135 distinct,
15136 select_modifiers,
15137 top,
15138 top_before_distinct,
15139 projection,
15140 exclude,
15141 into,
15142 from,
15143 lateral_views,
15144 prewhere,
15145 selection,
15146 group_by,
15147 cluster_by,
15148 distribute_by,
15149 sort_by,
15150 having,
15151 named_window: named_windows,
15152 window_before_qualify,
15153 qualify,
15154 value_table_mode,
15155 connect_by,
15156 flavor: if from_first {
15157 SelectFlavor::FromFirst
15158 } else {
15159 SelectFlavor::Standard
15160 },
15161 })
15162 }
15163
15164 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15173 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15174 if !supports_hints {
15175 return Ok(vec![]);
15176 }
15177 let mut hints = vec![];
15178 loop {
15179 let t = self.peek_nth_token_no_skip_ref(0);
15180 let Token::Whitespace(ws) = &t.token else {
15181 break;
15182 };
15183 match ws {
15184 Whitespace::SingleLineComment { comment, prefix } => {
15185 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15186 hints.push(OptimizerHint {
15187 prefix: hint_prefix,
15188 text,
15189 style: OptimizerHintStyle::SingleLine {
15190 prefix: prefix.clone(),
15191 },
15192 });
15193 }
15194 self.next_token_no_skip();
15195 }
15196 Whitespace::MultiLineComment(comment) => {
15197 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15198 hints.push(OptimizerHint {
15199 prefix: hint_prefix,
15200 text,
15201 style: OptimizerHintStyle::MultiLine,
15202 });
15203 }
15204 self.next_token_no_skip();
15205 }
15206 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15207 self.next_token_no_skip();
15208 }
15209 }
15210 }
15211 Ok(hints)
15212 }
15213
15214 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15217 let (before_plus, text) = comment.split_once('+')?;
15218 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15219 Some((before_plus.to_string(), text.to_string()))
15220 } else {
15221 None
15222 }
15223 }
15224
15225 fn parse_select_modifiers(
15232 &mut self,
15233 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15234 let mut modifiers = SelectModifiers::default();
15235 let mut distinct = None;
15236
15237 let keywords = &[
15238 Keyword::ALL,
15239 Keyword::DISTINCT,
15240 Keyword::DISTINCTROW,
15241 Keyword::HIGH_PRIORITY,
15242 Keyword::STRAIGHT_JOIN,
15243 Keyword::SQL_SMALL_RESULT,
15244 Keyword::SQL_BIG_RESULT,
15245 Keyword::SQL_BUFFER_RESULT,
15246 Keyword::SQL_NO_CACHE,
15247 Keyword::SQL_CALC_FOUND_ROWS,
15248 ];
15249
15250 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15251 match keyword {
15252 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15253 self.prev_token();
15254 distinct = self.parse_all_or_distinct()?;
15255 }
15256 Keyword::DISTINCTROW if distinct.is_none() => {
15258 distinct = Some(Distinct::Distinct);
15259 }
15260 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15261 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15262 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15263 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15264 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15265 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15266 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15267 _ => {
15268 self.prev_token();
15269 return self.expected_ref(
15270 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15271 self.peek_token_ref(),
15272 );
15273 }
15274 }
15275 }
15276
15277 let select_modifiers = if modifiers.is_any_set() {
15280 Some(modifiers)
15281 } else {
15282 None
15283 };
15284 Ok((select_modifiers, distinct))
15285 }
15286
15287 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15288 if !dialect_of!(self is BigQueryDialect) {
15289 return Ok(None);
15290 }
15291
15292 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15293 Some(ValueTableMode::DistinctAsValue)
15294 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15295 Some(ValueTableMode::DistinctAsStruct)
15296 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15297 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15298 {
15299 Some(ValueTableMode::AsValue)
15300 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15301 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15302 {
15303 Some(ValueTableMode::AsStruct)
15304 } else if self.parse_keyword(Keyword::AS) {
15305 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15306 } else {
15307 None
15308 };
15309
15310 Ok(mode)
15311 }
15312
15313 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15317 where
15318 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15319 {
15320 let current_state = self.state;
15321 self.state = state;
15322 let res = f(self);
15323 self.state = current_state;
15324 res
15325 }
15326
15327 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15329 let mut clauses = Vec::with_capacity(2);
15330 loop {
15331 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15332 clauses.push(ConnectByKind::StartWith {
15333 start_token: self.token_at(idx).clone().into(),
15334 condition: self.parse_expr()?.into(),
15335 });
15336 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15337 {
15338 clauses.push(ConnectByKind::ConnectBy {
15339 connect_token: self.token_at(idx).clone().into(),
15340 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15341 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15342 parser.parse_comma_separated(Parser::parse_expr)
15343 })?,
15344 });
15345 } else {
15346 break;
15347 }
15348 }
15349 Ok(clauses)
15350 }
15351
15352 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15354 let token1 = self.next_token();
15355 let token2 = self.next_token();
15356 let token3 = self.next_token();
15357
15358 let table_name;
15359 let schema_name;
15360 if token2 == Token::Period {
15361 match token1.token {
15362 Token::Word(w) => {
15363 schema_name = w.value;
15364 }
15365 _ => {
15366 return self.expected("Schema name", token1);
15367 }
15368 }
15369 match token3.token {
15370 Token::Word(w) => {
15371 table_name = w.value;
15372 }
15373 _ => {
15374 return self.expected("Table name", token3);
15375 }
15376 }
15377 Ok(Table {
15378 table_name: Some(table_name),
15379 schema_name: Some(schema_name),
15380 })
15381 } else {
15382 match token1.token {
15383 Token::Word(w) => {
15384 table_name = w.value;
15385 }
15386 _ => {
15387 return self.expected("Table name", token1);
15388 }
15389 }
15390 Ok(Table {
15391 table_name: Some(table_name),
15392 schema_name: None,
15393 })
15394 }
15395 }
15396
15397 fn parse_set_role(
15399 &mut self,
15400 modifier: Option<ContextModifier>,
15401 ) -> Result<Statement, ParserError> {
15402 self.expect_keyword_is(Keyword::ROLE)?;
15403
15404 let role_name = if self.parse_keyword(Keyword::NONE) {
15405 None
15406 } else {
15407 Some(self.parse_identifier()?)
15408 };
15409 Ok(Statement::Set(Set::SetRole {
15410 context_modifier: modifier,
15411 role_name,
15412 }))
15413 }
15414
15415 fn parse_set_values(
15416 &mut self,
15417 parenthesized_assignment: bool,
15418 ) -> Result<Vec<Expr>, ParserError> {
15419 let mut values = vec![];
15420
15421 if parenthesized_assignment {
15422 self.expect_token(&Token::LParen)?;
15423 }
15424
15425 loop {
15426 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15427 expr
15428 } else if let Ok(expr) = self.parse_expr() {
15429 expr
15430 } else {
15431 self.expected_ref("variable value", self.peek_token_ref())?
15432 };
15433
15434 values.push(value);
15435 if self.consume_token(&Token::Comma) {
15436 continue;
15437 }
15438
15439 if parenthesized_assignment {
15440 self.expect_token(&Token::RParen)?;
15441 }
15442 return Ok(values);
15443 }
15444 }
15445
15446 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15447 let modifier =
15448 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15449
15450 Self::keyword_to_modifier(modifier)
15451 }
15452
15453 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15455 let scope = self.parse_context_modifier();
15456
15457 let name = if self.dialect.supports_parenthesized_set_variables()
15458 && self.consume_token(&Token::LParen)
15459 {
15460 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15464 } else {
15465 self.parse_object_name(false)?
15466 };
15467
15468 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15469 return self.expected_ref("assignment operator", self.peek_token_ref());
15470 }
15471
15472 let value = self.parse_expr()?;
15473
15474 Ok(SetAssignment { scope, name, value })
15475 }
15476
15477 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15478 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15479
15480 let scope = if !hivevar {
15482 self.parse_context_modifier()
15483 } else {
15484 None
15485 };
15486
15487 if hivevar {
15488 self.expect_token(&Token::Colon)?;
15489 }
15490
15491 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15492 return Ok(set_role_stmt);
15493 }
15494
15495 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15497 || self.parse_keyword(Keyword::TIMEZONE)
15498 {
15499 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15500 return Ok(Set::SingleAssignment {
15501 scope,
15502 hivevar,
15503 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15504 values: self.parse_set_values(false)?,
15505 }
15506 .into());
15507 } else {
15508 return Ok(Set::SetTimeZone {
15512 local: scope == Some(ContextModifier::Local),
15513 value: self.parse_expr()?,
15514 }
15515 .into());
15516 }
15517 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15518 if self.parse_keyword(Keyword::DEFAULT) {
15519 return Ok(Set::SetNamesDefault {}.into());
15520 }
15521 let charset_name = self.parse_identifier()?;
15522 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15523 Some(self.parse_literal_string()?)
15524 } else {
15525 None
15526 };
15527
15528 return Ok(Set::SetNames {
15529 charset_name,
15530 collation_name,
15531 }
15532 .into());
15533 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15534 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15535 return Ok(Set::SetTransaction {
15536 modes: self.parse_transaction_modes()?,
15537 snapshot: None,
15538 session: true,
15539 }
15540 .into());
15541 } else if self.parse_keyword(Keyword::TRANSACTION) {
15542 if self.parse_keyword(Keyword::SNAPSHOT) {
15543 let snapshot_id = self.parse_value()?;
15544 return Ok(Set::SetTransaction {
15545 modes: vec![],
15546 snapshot: Some(snapshot_id),
15547 session: false,
15548 }
15549 .into());
15550 }
15551 return Ok(Set::SetTransaction {
15552 modes: self.parse_transaction_modes()?,
15553 snapshot: None,
15554 session: false,
15555 }
15556 .into());
15557 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15558 let scope = match scope {
15559 Some(s) => s,
15560 None => {
15561 return self.expected_at(
15562 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15563 self.get_current_index(),
15564 )
15565 }
15566 };
15567 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15568 SetSessionAuthorizationParamKind::Default
15569 } else {
15570 let value = self.parse_identifier()?;
15571 SetSessionAuthorizationParamKind::User(value)
15572 };
15573 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15574 scope,
15575 kind: auth_value,
15576 })
15577 .into());
15578 }
15579
15580 if self.dialect.supports_comma_separated_set_assignments() {
15581 if scope.is_some() {
15582 self.prev_token();
15583 }
15584
15585 if let Some(assignments) = self
15586 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15587 {
15588 return if assignments.len() > 1 {
15589 Ok(Set::MultipleAssignments { assignments }.into())
15590 } else {
15591 let SetAssignment { scope, name, value } =
15592 assignments.into_iter().next().ok_or_else(|| {
15593 ParserError::ParserError("Expected at least one assignment".to_string())
15594 })?;
15595
15596 Ok(Set::SingleAssignment {
15597 scope,
15598 hivevar,
15599 variable: name,
15600 values: vec![value],
15601 }
15602 .into())
15603 };
15604 }
15605 }
15606
15607 let variables = if self.dialect.supports_parenthesized_set_variables()
15608 && self.consume_token(&Token::LParen)
15609 {
15610 let vars = OneOrManyWithParens::Many(
15611 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
15612 .into_iter()
15613 .map(|ident| ObjectName::from(vec![ident]))
15614 .collect(),
15615 );
15616 self.expect_token(&Token::RParen)?;
15617 vars
15618 } else {
15619 OneOrManyWithParens::One(self.parse_object_name(false)?)
15620 };
15621
15622 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15623 let stmt = match variables {
15624 OneOrManyWithParens::One(var) => Set::SingleAssignment {
15625 scope,
15626 hivevar,
15627 variable: var,
15628 values: self.parse_set_values(false)?,
15629 },
15630 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
15631 variables: vars,
15632 values: self.parse_set_values(true)?,
15633 },
15634 };
15635
15636 return Ok(stmt.into());
15637 }
15638
15639 if self.dialect.supports_set_stmt_without_operator() {
15640 self.prev_token();
15641 return self.parse_set_session_params();
15642 };
15643
15644 self.expected_ref("equals sign or TO", self.peek_token_ref())
15645 }
15646
15647 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
15649 if self.parse_keyword(Keyword::STATISTICS) {
15650 let topic = match self.parse_one_of_keywords(&[
15651 Keyword::IO,
15652 Keyword::PROFILE,
15653 Keyword::TIME,
15654 Keyword::XML,
15655 ]) {
15656 Some(Keyword::IO) => SessionParamStatsTopic::IO,
15657 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
15658 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
15659 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
15660 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
15661 };
15662 let value = self.parse_session_param_value()?;
15663 Ok(
15664 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
15665 topic,
15666 value,
15667 }))
15668 .into(),
15669 )
15670 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
15671 let obj = self.parse_object_name(false)?;
15672 let value = self.parse_session_param_value()?;
15673 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
15674 SetSessionParamIdentityInsert { obj, value },
15675 ))
15676 .into())
15677 } else if self.parse_keyword(Keyword::OFFSETS) {
15678 let keywords = self.parse_comma_separated(|parser| {
15679 let next_token = parser.next_token();
15680 match &next_token.token {
15681 Token::Word(w) => Ok(w.to_string()),
15682 _ => parser.expected("SQL keyword", next_token),
15683 }
15684 })?;
15685 let value = self.parse_session_param_value()?;
15686 Ok(
15687 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
15688 keywords,
15689 value,
15690 }))
15691 .into(),
15692 )
15693 } else {
15694 let names = self.parse_comma_separated(|parser| {
15695 let next_token = parser.next_token();
15696 match next_token.token {
15697 Token::Word(w) => Ok(w.to_string()),
15698 _ => parser.expected("Session param name", next_token),
15699 }
15700 })?;
15701 let value = self.parse_expr()?.to_string();
15702 Ok(
15703 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
15704 names,
15705 value,
15706 }))
15707 .into(),
15708 )
15709 }
15710 }
15711
15712 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
15713 if self.parse_keyword(Keyword::ON) {
15714 Ok(SessionParamValue::On)
15715 } else if self.parse_keyword(Keyword::OFF) {
15716 Ok(SessionParamValue::Off)
15717 } else {
15718 self.expected_ref("ON or OFF", self.peek_token_ref())
15719 }
15720 }
15721
15722 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
15724 let terse = self.parse_keyword(Keyword::TERSE);
15725 let extended = self.parse_keyword(Keyword::EXTENDED);
15726 let full = self.parse_keyword(Keyword::FULL);
15727 let session = self.parse_keyword(Keyword::SESSION);
15728 let global = self.parse_keyword(Keyword::GLOBAL);
15729 let external = self.parse_keyword(Keyword::EXTERNAL);
15730 if self
15731 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
15732 .is_some()
15733 {
15734 Ok(self.parse_show_columns(extended, full)?)
15735 } else if self.parse_keyword(Keyword::TABLES) {
15736 Ok(self.parse_show_tables(terse, extended, full, external)?)
15737 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
15738 Ok(self.parse_show_views(terse, true)?)
15739 } else if self.parse_keyword(Keyword::VIEWS) {
15740 Ok(self.parse_show_views(terse, false)?)
15741 } else if self.parse_keyword(Keyword::FUNCTIONS) {
15742 Ok(self.parse_show_functions()?)
15743 } else if self.parse_keyword(Keyword::PROCESSLIST) {
15744 Ok(Statement::ShowProcessList { full })
15745 } else if extended || full {
15746 Err(ParserError::ParserError(
15747 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
15748 ))
15749 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
15750 Ok(self.parse_show_create()?)
15751 } else if self.parse_keyword(Keyword::COLLATION) {
15752 Ok(self.parse_show_collation()?)
15753 } else if self.parse_keyword(Keyword::VARIABLES)
15754 && dialect_of!(self is MySqlDialect | GenericDialect)
15755 {
15756 Ok(Statement::ShowVariables {
15757 filter: self.parse_show_statement_filter()?,
15758 session,
15759 global,
15760 })
15761 } else if self.parse_keyword(Keyword::STATUS)
15762 && dialect_of!(self is MySqlDialect | GenericDialect)
15763 {
15764 Ok(Statement::ShowStatus {
15765 filter: self.parse_show_statement_filter()?,
15766 session,
15767 global,
15768 })
15769 } else if self.parse_keyword(Keyword::CATALOGS) {
15770 self.parse_show_catalogs(terse)
15771 } else if self.parse_keyword(Keyword::DATABASES) {
15772 self.parse_show_databases(terse)
15773 } else if self.parse_keyword(Keyword::SCHEMAS) {
15774 self.parse_show_schemas(terse)
15775 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
15776 self.parse_show_charset(false)
15777 } else if self.parse_keyword(Keyword::CHARSET) {
15778 self.parse_show_charset(true)
15779 } else {
15780 Ok(Statement::ShowVariable {
15781 variable: self.parse_identifiers()?,
15782 })
15783 }
15784 }
15785
15786 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
15787 Ok(Statement::ShowCharset(ShowCharset {
15789 is_shorthand,
15790 filter: self.parse_show_statement_filter()?,
15791 }))
15792 }
15793
15794 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
15795 let history = self.parse_keyword(Keyword::HISTORY);
15796 let show_options = self.parse_show_stmt_options()?;
15797 Ok(Statement::ShowCatalogs {
15798 terse,
15799 history,
15800 show_options,
15801 })
15802 }
15803
15804 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
15805 let history = self.parse_keyword(Keyword::HISTORY);
15806 let show_options = self.parse_show_stmt_options()?;
15807 Ok(Statement::ShowDatabases {
15808 terse,
15809 history,
15810 show_options,
15811 })
15812 }
15813
15814 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
15815 let history = self.parse_keyword(Keyword::HISTORY);
15816 let show_options = self.parse_show_stmt_options()?;
15817 Ok(Statement::ShowSchemas {
15818 terse,
15819 history,
15820 show_options,
15821 })
15822 }
15823
15824 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
15826 let obj_type = match self.expect_one_of_keywords(&[
15827 Keyword::TABLE,
15828 Keyword::TRIGGER,
15829 Keyword::FUNCTION,
15830 Keyword::PROCEDURE,
15831 Keyword::EVENT,
15832 Keyword::VIEW,
15833 ])? {
15834 Keyword::TABLE => Ok(ShowCreateObject::Table),
15835 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
15836 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
15837 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
15838 Keyword::EVENT => Ok(ShowCreateObject::Event),
15839 Keyword::VIEW => Ok(ShowCreateObject::View),
15840 keyword => Err(ParserError::ParserError(format!(
15841 "Unable to map keyword to ShowCreateObject: {keyword:?}"
15842 ))),
15843 }?;
15844
15845 let obj_name = self.parse_object_name(false)?;
15846
15847 Ok(Statement::ShowCreate { obj_type, obj_name })
15848 }
15849
15850 pub fn parse_show_columns(
15852 &mut self,
15853 extended: bool,
15854 full: bool,
15855 ) -> Result<Statement, ParserError> {
15856 let show_options = self.parse_show_stmt_options()?;
15857 Ok(Statement::ShowColumns {
15858 extended,
15859 full,
15860 show_options,
15861 })
15862 }
15863
15864 fn parse_show_tables(
15865 &mut self,
15866 terse: bool,
15867 extended: bool,
15868 full: bool,
15869 external: bool,
15870 ) -> Result<Statement, ParserError> {
15871 let history = !external && self.parse_keyword(Keyword::HISTORY);
15872 let show_options = self.parse_show_stmt_options()?;
15873 Ok(Statement::ShowTables {
15874 terse,
15875 history,
15876 extended,
15877 full,
15878 external,
15879 show_options,
15880 })
15881 }
15882
15883 fn parse_show_views(
15884 &mut self,
15885 terse: bool,
15886 materialized: bool,
15887 ) -> Result<Statement, ParserError> {
15888 let show_options = self.parse_show_stmt_options()?;
15889 Ok(Statement::ShowViews {
15890 materialized,
15891 terse,
15892 show_options,
15893 })
15894 }
15895
15896 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
15898 let filter = self.parse_show_statement_filter()?;
15899 Ok(Statement::ShowFunctions { filter })
15900 }
15901
15902 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
15904 let filter = self.parse_show_statement_filter()?;
15905 Ok(Statement::ShowCollation { filter })
15906 }
15907
15908 pub fn parse_show_statement_filter(
15910 &mut self,
15911 ) -> Result<Option<ShowStatementFilter>, ParserError> {
15912 if self.parse_keyword(Keyword::LIKE) {
15913 Ok(Some(ShowStatementFilter::Like(
15914 self.parse_literal_string()?,
15915 )))
15916 } else if self.parse_keyword(Keyword::ILIKE) {
15917 Ok(Some(ShowStatementFilter::ILike(
15918 self.parse_literal_string()?,
15919 )))
15920 } else if self.parse_keyword(Keyword::WHERE) {
15921 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
15922 } else {
15923 self.maybe_parse(|parser| -> Result<String, ParserError> {
15924 parser.parse_literal_string()
15925 })?
15926 .map_or(Ok(None), |filter| {
15927 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
15928 })
15929 }
15930 }
15931
15932 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
15934 let parsed_keyword = if dialect_of!(self is HiveDialect) {
15936 if self.parse_keyword(Keyword::DEFAULT) {
15938 return Ok(Statement::Use(Use::Default));
15939 }
15940 None } else if dialect_of!(self is DatabricksDialect) {
15942 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
15943 } else if dialect_of!(self is SnowflakeDialect) {
15944 self.parse_one_of_keywords(&[
15945 Keyword::DATABASE,
15946 Keyword::SCHEMA,
15947 Keyword::WAREHOUSE,
15948 Keyword::ROLE,
15949 Keyword::SECONDARY,
15950 ])
15951 } else {
15952 None };
15954
15955 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
15956 self.parse_secondary_roles()?
15957 } else {
15958 let obj_name = self.parse_object_name(false)?;
15959 match parsed_keyword {
15960 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
15961 Some(Keyword::DATABASE) => Use::Database(obj_name),
15962 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
15963 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
15964 Some(Keyword::ROLE) => Use::Role(obj_name),
15965 _ => Use::Object(obj_name),
15966 }
15967 };
15968
15969 Ok(Statement::Use(result))
15970 }
15971
15972 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
15973 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
15974 if self.parse_keyword(Keyword::NONE) {
15975 Ok(Use::SecondaryRoles(SecondaryRoles::None))
15976 } else if self.parse_keyword(Keyword::ALL) {
15977 Ok(Use::SecondaryRoles(SecondaryRoles::All))
15978 } else {
15979 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
15980 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
15981 }
15982 }
15983
15984 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
15986 let relation = self.parse_table_factor()?;
15987 let joins = self.parse_joins()?;
15991 Ok(TableWithJoins { relation, joins })
15992 }
15993
15994 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
15995 let mut joins = vec![];
15996 loop {
15997 let global = self.parse_keyword(Keyword::GLOBAL);
15998 let join = if self.parse_keyword(Keyword::CROSS) {
15999 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16000 JoinOperator::CrossJoin(JoinConstraint::None)
16001 } else if self.parse_keyword(Keyword::APPLY) {
16002 JoinOperator::CrossApply
16004 } else {
16005 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16006 };
16007 let relation = self.parse_table_factor()?;
16008 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16009 && self.dialect.supports_cross_join_constraint()
16010 {
16011 let constraint = self.parse_join_constraint(false)?;
16012 JoinOperator::CrossJoin(constraint)
16013 } else {
16014 join_operator
16015 };
16016 Join {
16017 relation,
16018 global,
16019 join_operator,
16020 }
16021 } else if self.parse_keyword(Keyword::OUTER) {
16022 self.expect_keyword_is(Keyword::APPLY)?;
16024 Join {
16025 relation: self.parse_table_factor()?,
16026 global,
16027 join_operator: JoinOperator::OuterApply,
16028 }
16029 } else if self.parse_keyword(Keyword::ASOF) {
16030 self.expect_keyword_is(Keyword::JOIN)?;
16031 let relation = self.parse_table_factor()?;
16032 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16033 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16034 Join {
16035 relation,
16036 global,
16037 join_operator: JoinOperator::AsOf {
16038 match_condition,
16039 constraint: self.parse_join_constraint(false)?,
16040 },
16041 }
16042 } else {
16043 let natural = self.parse_keyword(Keyword::NATURAL);
16044 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16045 w.keyword
16046 } else {
16047 Keyword::NoKeyword
16048 };
16049
16050 let join_operator_type = match peek_keyword {
16051 Keyword::INNER | Keyword::JOIN => {
16052 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16054 if inner {
16055 JoinOperator::Inner
16056 } else {
16057 JoinOperator::Join
16058 }
16059 }
16060 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16061 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16063 let join_type = self.parse_one_of_keywords(&[
16064 Keyword::OUTER,
16065 Keyword::SEMI,
16066 Keyword::ANTI,
16067 Keyword::JOIN,
16068 ]);
16069 match join_type {
16070 Some(Keyword::OUTER) => {
16071 self.expect_keyword_is(Keyword::JOIN)?;
16072 if is_left {
16073 JoinOperator::LeftOuter
16074 } else {
16075 JoinOperator::RightOuter
16076 }
16077 }
16078 Some(Keyword::SEMI) => {
16079 self.expect_keyword_is(Keyword::JOIN)?;
16080 if is_left {
16081 JoinOperator::LeftSemi
16082 } else {
16083 JoinOperator::RightSemi
16084 }
16085 }
16086 Some(Keyword::ANTI) => {
16087 self.expect_keyword_is(Keyword::JOIN)?;
16088 if is_left {
16089 JoinOperator::LeftAnti
16090 } else {
16091 JoinOperator::RightAnti
16092 }
16093 }
16094 Some(Keyword::JOIN) => {
16095 if is_left {
16096 JoinOperator::Left
16097 } else {
16098 JoinOperator::Right
16099 }
16100 }
16101 _ => {
16102 return Err(ParserError::ParserError(format!(
16103 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16104 )))
16105 }
16106 }
16107 }
16108 Keyword::ANTI => {
16109 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16111 JoinOperator::Anti
16112 }
16113 Keyword::SEMI => {
16114 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16116 JoinOperator::Semi
16117 }
16118 Keyword::FULL => {
16119 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16122 JoinOperator::FullOuter
16123 }
16124 Keyword::OUTER => {
16125 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16126 }
16127 Keyword::STRAIGHT_JOIN => {
16128 let _ = self.next_token(); JoinOperator::StraightJoin
16130 }
16131 _ if natural => {
16132 return self
16133 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16134 }
16135 _ => break,
16136 };
16137 let mut relation = self.parse_table_factor()?;
16138
16139 if !self
16140 .dialect
16141 .supports_left_associative_joins_without_parens()
16142 && self.peek_parens_less_nested_join()
16143 {
16144 let joins = self.parse_joins()?;
16145 relation = TableFactor::NestedJoin {
16146 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16147 alias: None,
16148 };
16149 }
16150
16151 let join_constraint = self.parse_join_constraint(natural)?;
16152 Join {
16153 relation,
16154 global,
16155 join_operator: join_operator_type(join_constraint),
16156 }
16157 };
16158 joins.push(join);
16159 }
16160 Ok(joins)
16161 }
16162
16163 fn peek_parens_less_nested_join(&self) -> bool {
16164 matches!(
16165 self.peek_token_ref().token,
16166 Token::Word(Word {
16167 keyword: Keyword::JOIN
16168 | Keyword::INNER
16169 | Keyword::LEFT
16170 | Keyword::RIGHT
16171 | Keyword::FULL,
16172 ..
16173 })
16174 )
16175 }
16176
16177 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16179 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16180 let _guard = self.recursion_counter.try_decrease()?;
16181 if self.parse_keyword(Keyword::LATERAL) {
16182 if self.consume_token(&Token::LParen) {
16184 self.parse_derived_table_factor(Lateral)
16185 } else {
16186 let name = self.parse_object_name(false)?;
16187 self.expect_token(&Token::LParen)?;
16188 let args = self.parse_optional_args()?;
16189 let alias = self.maybe_parse_table_alias()?;
16190 Ok(TableFactor::Function {
16191 lateral: true,
16192 name,
16193 args,
16194 alias,
16195 })
16196 }
16197 } else if self.parse_keyword(Keyword::TABLE) {
16198 self.expect_token(&Token::LParen)?;
16200 let expr = self.parse_expr()?;
16201 self.expect_token(&Token::RParen)?;
16202 let alias = self.maybe_parse_table_alias()?;
16203 Ok(TableFactor::TableFunction { expr, alias })
16204 } else if self.consume_token(&Token::LParen) {
16205 if let Some(mut table) =
16227 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16228 {
16229 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16230 {
16231 table = match kw {
16232 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16233 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16234 unexpected_keyword => return Err(ParserError::ParserError(
16235 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16236 )),
16237 }
16238 }
16239 return Ok(table);
16240 }
16241
16242 let mut table_and_joins = self.parse_table_and_joins()?;
16249
16250 #[allow(clippy::if_same_then_else)]
16251 if !table_and_joins.joins.is_empty() {
16252 self.expect_token(&Token::RParen)?;
16253 let alias = self.maybe_parse_table_alias()?;
16254 Ok(TableFactor::NestedJoin {
16255 table_with_joins: Box::new(table_and_joins),
16256 alias,
16257 }) } else if let TableFactor::NestedJoin {
16259 table_with_joins: _,
16260 alias: _,
16261 } = &table_and_joins.relation
16262 {
16263 self.expect_token(&Token::RParen)?;
16266 let alias = self.maybe_parse_table_alias()?;
16267 Ok(TableFactor::NestedJoin {
16268 table_with_joins: Box::new(table_and_joins),
16269 alias,
16270 })
16271 } else if self.dialect.supports_parens_around_table_factor() {
16272 self.expect_token(&Token::RParen)?;
16279
16280 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16281 match &mut table_and_joins.relation {
16284 TableFactor::Derived { alias, .. }
16285 | TableFactor::Table { alias, .. }
16286 | TableFactor::Function { alias, .. }
16287 | TableFactor::UNNEST { alias, .. }
16288 | TableFactor::JsonTable { alias, .. }
16289 | TableFactor::XmlTable { alias, .. }
16290 | TableFactor::OpenJsonTable { alias, .. }
16291 | TableFactor::TableFunction { alias, .. }
16292 | TableFactor::Pivot { alias, .. }
16293 | TableFactor::Unpivot { alias, .. }
16294 | TableFactor::MatchRecognize { alias, .. }
16295 | TableFactor::SemanticView { alias, .. }
16296 | TableFactor::NestedJoin { alias, .. } => {
16297 if let Some(inner_alias) = alias {
16299 return Err(ParserError::ParserError(format!(
16300 "duplicate alias {inner_alias}"
16301 )));
16302 }
16303 alias.replace(outer_alias);
16307 }
16308 };
16309 }
16310 Ok(table_and_joins.relation)
16312 } else {
16313 self.expected_ref("joined table", self.peek_token_ref())
16316 }
16317 } else if self.dialect.supports_values_as_table_factor()
16318 && matches!(
16319 self.peek_tokens(),
16320 [
16321 Token::Word(Word {
16322 keyword: Keyword::VALUES,
16323 ..
16324 }),
16325 Token::LParen
16326 ]
16327 )
16328 {
16329 self.expect_keyword_is(Keyword::VALUES)?;
16330
16331 let values = SetExpr::Values(self.parse_values(false, false)?);
16335 let alias = self.maybe_parse_table_alias()?;
16336 Ok(TableFactor::Derived {
16337 lateral: false,
16338 subquery: Box::new(Query {
16339 with: None,
16340 body: Box::new(values),
16341 order_by: None,
16342 limit_clause: None,
16343 fetch: None,
16344 locks: vec![],
16345 for_clause: None,
16346 settings: None,
16347 format_clause: None,
16348 pipe_operators: vec![],
16349 }),
16350 alias,
16351 sample: None,
16352 })
16353 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16354 && self.parse_keyword(Keyword::UNNEST)
16355 {
16356 self.expect_token(&Token::LParen)?;
16357 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16358 self.expect_token(&Token::RParen)?;
16359
16360 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16361 let alias = match self.maybe_parse_table_alias() {
16362 Ok(Some(alias)) => Some(alias),
16363 Ok(None) => None,
16364 Err(e) => return Err(e),
16365 };
16366
16367 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16368 Ok(()) => true,
16369 Err(_) => false,
16370 };
16371
16372 let with_offset_alias = if with_offset {
16373 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16374 Ok(Some(alias)) => Some(alias),
16375 Ok(None) => None,
16376 Err(e) => return Err(e),
16377 }
16378 } else {
16379 None
16380 };
16381
16382 Ok(TableFactor::UNNEST {
16383 alias,
16384 array_exprs,
16385 with_offset,
16386 with_offset_alias,
16387 with_ordinality,
16388 })
16389 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16390 let json_expr = self.parse_expr()?;
16391 self.expect_token(&Token::Comma)?;
16392 let json_path = self.parse_value()?;
16393 self.expect_keyword_is(Keyword::COLUMNS)?;
16394 self.expect_token(&Token::LParen)?;
16395 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16396 self.expect_token(&Token::RParen)?;
16397 self.expect_token(&Token::RParen)?;
16398 let alias = self.maybe_parse_table_alias()?;
16399 Ok(TableFactor::JsonTable {
16400 json_expr,
16401 json_path,
16402 columns,
16403 alias,
16404 })
16405 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16406 self.prev_token();
16407 self.parse_open_json_table_factor()
16408 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16409 self.prev_token();
16410 self.parse_xml_table_factor()
16411 } else if self.dialect.supports_semantic_view_table_factor()
16412 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16413 {
16414 self.parse_semantic_view_table_factor()
16415 } else if self.peek_token_ref().token == Token::AtSign {
16416 self.parse_snowflake_stage_table_factor()
16418 } else {
16419 let name = self.parse_object_name(true)?;
16420
16421 let json_path = match &self.peek_token_ref().token {
16422 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16423 _ => None,
16424 };
16425
16426 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16427 && self.parse_keyword(Keyword::PARTITION)
16428 {
16429 self.parse_parenthesized_identifiers()?
16430 } else {
16431 vec![]
16432 };
16433
16434 let version = self.maybe_parse_table_version()?;
16436
16437 let args = if self.consume_token(&Token::LParen) {
16439 Some(self.parse_table_function_args()?)
16440 } else {
16441 None
16442 };
16443
16444 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16445
16446 let mut sample = None;
16447 if self.dialect.supports_table_sample_before_alias() {
16448 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16449 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16450 }
16451 }
16452
16453 let alias = self.maybe_parse_table_alias()?;
16454
16455 let index_hints = if self.dialect.supports_table_hints() {
16457 self.maybe_parse(|p| p.parse_table_index_hints())?
16458 .unwrap_or(vec![])
16459 } else {
16460 vec![]
16461 };
16462
16463 let mut with_hints = vec![];
16465 if self.parse_keyword(Keyword::WITH) {
16466 if self.consume_token(&Token::LParen) {
16467 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16468 self.expect_token(&Token::RParen)?;
16469 } else {
16470 self.prev_token();
16472 }
16473 };
16474
16475 if !self.dialect.supports_table_sample_before_alias() {
16476 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16477 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16478 }
16479 }
16480
16481 let mut table = TableFactor::Table {
16482 name,
16483 alias,
16484 args,
16485 with_hints,
16486 version,
16487 partitions,
16488 with_ordinality,
16489 json_path,
16490 sample,
16491 index_hints,
16492 };
16493
16494 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16495 table = match kw {
16496 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16497 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16498 unexpected_keyword => return Err(ParserError::ParserError(
16499 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16500 )),
16501 }
16502 }
16503
16504 if self.dialect.supports_match_recognize()
16505 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16506 {
16507 table = self.parse_match_recognize(table)?;
16508 }
16509
16510 Ok(table)
16511 }
16512 }
16513
16514 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16519 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16521
16522 let args = if self.consume_token(&Token::LParen) {
16524 Some(self.parse_table_function_args()?)
16525 } else {
16526 None
16527 };
16528
16529 let alias = self.maybe_parse_table_alias()?;
16530
16531 Ok(TableFactor::Table {
16532 name,
16533 alias,
16534 args,
16535 with_hints: vec![],
16536 version: None,
16537 partitions: vec![],
16538 with_ordinality: false,
16539 json_path: None,
16540 sample: None,
16541 index_hints: vec![],
16542 })
16543 }
16544
16545 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16546 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16547 TableSampleModifier::TableSample
16548 } else if self.parse_keyword(Keyword::SAMPLE) {
16549 TableSampleModifier::Sample
16550 } else {
16551 return Ok(None);
16552 };
16553 self.parse_table_sample(modifier).map(Some)
16554 }
16555
16556 fn parse_table_sample(
16557 &mut self,
16558 modifier: TableSampleModifier,
16559 ) -> Result<Box<TableSample>, ParserError> {
16560 let name = match self.parse_one_of_keywords(&[
16561 Keyword::BERNOULLI,
16562 Keyword::ROW,
16563 Keyword::SYSTEM,
16564 Keyword::BLOCK,
16565 ]) {
16566 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16567 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16568 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16569 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16570 _ => None,
16571 };
16572
16573 let parenthesized = self.consume_token(&Token::LParen);
16574
16575 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16576 let selected_bucket = self.parse_number_value()?;
16577 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16578 let total = self.parse_number_value()?;
16579 let on = if self.parse_keyword(Keyword::ON) {
16580 Some(self.parse_expr()?)
16581 } else {
16582 None
16583 };
16584 (
16585 None,
16586 Some(TableSampleBucket {
16587 bucket: selected_bucket,
16588 total,
16589 on,
16590 }),
16591 )
16592 } else {
16593 let value = match self.maybe_parse(|p| p.parse_expr())? {
16594 Some(num) => num,
16595 None => {
16596 let next_token = self.next_token();
16597 if let Token::Word(w) = next_token.token {
16598 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16599 } else {
16600 return parser_err!(
16601 "Expecting number or byte length e.g. 100M",
16602 self.peek_token_ref().span.start
16603 );
16604 }
16605 }
16606 };
16607 let unit = if self.parse_keyword(Keyword::ROWS) {
16608 Some(TableSampleUnit::Rows)
16609 } else if self.parse_keyword(Keyword::PERCENT) {
16610 Some(TableSampleUnit::Percent)
16611 } else {
16612 None
16613 };
16614 (
16615 Some(TableSampleQuantity {
16616 parenthesized,
16617 value,
16618 unit,
16619 }),
16620 None,
16621 )
16622 };
16623 if parenthesized {
16624 self.expect_token(&Token::RParen)?;
16625 }
16626
16627 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
16628 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
16629 } else if self.parse_keyword(Keyword::SEED) {
16630 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
16631 } else {
16632 None
16633 };
16634
16635 let offset = if self.parse_keyword(Keyword::OFFSET) {
16636 Some(self.parse_expr()?)
16637 } else {
16638 None
16639 };
16640
16641 Ok(Box::new(TableSample {
16642 modifier,
16643 name,
16644 quantity,
16645 seed,
16646 bucket,
16647 offset,
16648 }))
16649 }
16650
16651 fn parse_table_sample_seed(
16652 &mut self,
16653 modifier: TableSampleSeedModifier,
16654 ) -> Result<TableSampleSeed, ParserError> {
16655 self.expect_token(&Token::LParen)?;
16656 let value = self.parse_number_value()?;
16657 self.expect_token(&Token::RParen)?;
16658 Ok(TableSampleSeed { modifier, value })
16659 }
16660
16661 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16664 self.expect_token(&Token::LParen)?;
16665 let json_expr = self.parse_expr()?;
16666 let json_path = if self.consume_token(&Token::Comma) {
16667 Some(self.parse_value()?)
16668 } else {
16669 None
16670 };
16671 self.expect_token(&Token::RParen)?;
16672 let columns = if self.parse_keyword(Keyword::WITH) {
16673 self.expect_token(&Token::LParen)?;
16674 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
16675 self.expect_token(&Token::RParen)?;
16676 columns
16677 } else {
16678 Vec::new()
16679 };
16680 let alias = self.maybe_parse_table_alias()?;
16681 Ok(TableFactor::OpenJsonTable {
16682 json_expr,
16683 json_path,
16684 columns,
16685 alias,
16686 })
16687 }
16688
16689 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16690 self.expect_token(&Token::LParen)?;
16691 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
16692 self.expect_token(&Token::LParen)?;
16693 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
16694 self.expect_token(&Token::RParen)?;
16695 self.expect_token(&Token::Comma)?;
16696 namespaces
16697 } else {
16698 vec![]
16699 };
16700 let row_expression = self.parse_expr()?;
16701 let passing = self.parse_xml_passing_clause()?;
16702 self.expect_keyword_is(Keyword::COLUMNS)?;
16703 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
16704 self.expect_token(&Token::RParen)?;
16705 let alias = self.maybe_parse_table_alias()?;
16706 Ok(TableFactor::XmlTable {
16707 namespaces,
16708 row_expression,
16709 passing,
16710 columns,
16711 alias,
16712 })
16713 }
16714
16715 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
16716 let uri = self.parse_expr()?;
16717 self.expect_keyword_is(Keyword::AS)?;
16718 let name = self.parse_identifier()?;
16719 Ok(XmlNamespaceDefinition { uri, name })
16720 }
16721
16722 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
16723 let name = self.parse_identifier()?;
16724
16725 let option = if self.parse_keyword(Keyword::FOR) {
16726 self.expect_keyword(Keyword::ORDINALITY)?;
16727 XmlTableColumnOption::ForOrdinality
16728 } else {
16729 let r#type = self.parse_data_type()?;
16730 let mut path = None;
16731 let mut default = None;
16732
16733 if self.parse_keyword(Keyword::PATH) {
16734 path = Some(self.parse_expr()?);
16735 }
16736
16737 if self.parse_keyword(Keyword::DEFAULT) {
16738 default = Some(self.parse_expr()?);
16739 }
16740
16741 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
16742 if !not_null {
16743 let _ = self.parse_keyword(Keyword::NULL);
16745 }
16746
16747 XmlTableColumnOption::NamedInfo {
16748 r#type,
16749 path,
16750 default,
16751 nullable: !not_null,
16752 }
16753 };
16754 Ok(XmlTableColumn { name, option })
16755 }
16756
16757 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
16758 let mut arguments = vec![];
16759 if self.parse_keyword(Keyword::PASSING) {
16760 loop {
16761 let by_value =
16762 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
16763 let expr = self.parse_expr()?;
16764 let alias = if self.parse_keyword(Keyword::AS) {
16765 Some(self.parse_identifier()?)
16766 } else {
16767 None
16768 };
16769 arguments.push(XmlPassingArgument {
16770 expr,
16771 alias,
16772 by_value,
16773 });
16774 if !self.consume_token(&Token::Comma) {
16775 break;
16776 }
16777 }
16778 }
16779 Ok(XmlPassingClause { arguments })
16780 }
16781
16782 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16784 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
16785 self.expect_token(&Token::LParen)?;
16786
16787 let name = self.parse_object_name(true)?;
16788
16789 let mut dimensions = Vec::new();
16791 let mut metrics = Vec::new();
16792 let mut facts = Vec::new();
16793 let mut where_clause = None;
16794
16795 while self.peek_token_ref().token != Token::RParen {
16796 if self.parse_keyword(Keyword::DIMENSIONS) {
16797 if !dimensions.is_empty() {
16798 return Err(ParserError::ParserError(
16799 "DIMENSIONS clause can only be specified once".to_string(),
16800 ));
16801 }
16802 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16803 } else if self.parse_keyword(Keyword::METRICS) {
16804 if !metrics.is_empty() {
16805 return Err(ParserError::ParserError(
16806 "METRICS clause can only be specified once".to_string(),
16807 ));
16808 }
16809 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16810 } else if self.parse_keyword(Keyword::FACTS) {
16811 if !facts.is_empty() {
16812 return Err(ParserError::ParserError(
16813 "FACTS clause can only be specified once".to_string(),
16814 ));
16815 }
16816 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16817 } else if self.parse_keyword(Keyword::WHERE) {
16818 if where_clause.is_some() {
16819 return Err(ParserError::ParserError(
16820 "WHERE clause can only be specified once".to_string(),
16821 ));
16822 }
16823 where_clause = Some(self.parse_expr()?);
16824 } else {
16825 let tok = self.peek_token_ref();
16826 return parser_err!(
16827 format!(
16828 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
16829 tok.token
16830 ),
16831 tok.span.start
16832 )?;
16833 }
16834 }
16835
16836 self.expect_token(&Token::RParen)?;
16837
16838 let alias = self.maybe_parse_table_alias()?;
16839
16840 Ok(TableFactor::SemanticView {
16841 name,
16842 dimensions,
16843 metrics,
16844 facts,
16845 where_clause,
16846 alias,
16847 })
16848 }
16849
16850 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
16851 self.expect_token(&Token::LParen)?;
16852
16853 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
16854 self.parse_comma_separated(Parser::parse_expr)?
16855 } else {
16856 vec![]
16857 };
16858
16859 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16860 self.parse_comma_separated(Parser::parse_order_by_expr)?
16861 } else {
16862 vec![]
16863 };
16864
16865 let measures = if self.parse_keyword(Keyword::MEASURES) {
16866 self.parse_comma_separated(|p| {
16867 let expr = p.parse_expr()?;
16868 let _ = p.parse_keyword(Keyword::AS);
16869 let alias = p.parse_identifier()?;
16870 Ok(Measure { expr, alias })
16871 })?
16872 } else {
16873 vec![]
16874 };
16875
16876 let rows_per_match =
16877 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
16878 Some(RowsPerMatch::OneRow)
16879 } else if self.parse_keywords(&[
16880 Keyword::ALL,
16881 Keyword::ROWS,
16882 Keyword::PER,
16883 Keyword::MATCH,
16884 ]) {
16885 Some(RowsPerMatch::AllRows(
16886 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
16887 Some(EmptyMatchesMode::Show)
16888 } else if self.parse_keywords(&[
16889 Keyword::OMIT,
16890 Keyword::EMPTY,
16891 Keyword::MATCHES,
16892 ]) {
16893 Some(EmptyMatchesMode::Omit)
16894 } else if self.parse_keywords(&[
16895 Keyword::WITH,
16896 Keyword::UNMATCHED,
16897 Keyword::ROWS,
16898 ]) {
16899 Some(EmptyMatchesMode::WithUnmatched)
16900 } else {
16901 None
16902 },
16903 ))
16904 } else {
16905 None
16906 };
16907
16908 let after_match_skip =
16909 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
16910 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
16911 Some(AfterMatchSkip::PastLastRow)
16912 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
16913 Some(AfterMatchSkip::ToNextRow)
16914 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
16915 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
16916 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
16917 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
16918 } else {
16919 let found = self.next_token();
16920 return self.expected("after match skip option", found);
16921 }
16922 } else {
16923 None
16924 };
16925
16926 self.expect_keyword_is(Keyword::PATTERN)?;
16927 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
16928
16929 self.expect_keyword_is(Keyword::DEFINE)?;
16930
16931 let symbols = self.parse_comma_separated(|p| {
16932 let symbol = p.parse_identifier()?;
16933 p.expect_keyword_is(Keyword::AS)?;
16934 let definition = p.parse_expr()?;
16935 Ok(SymbolDefinition { symbol, definition })
16936 })?;
16937
16938 self.expect_token(&Token::RParen)?;
16939
16940 let alias = self.maybe_parse_table_alias()?;
16941
16942 Ok(TableFactor::MatchRecognize {
16943 table: Box::new(table),
16944 partition_by,
16945 order_by,
16946 measures,
16947 rows_per_match,
16948 after_match_skip,
16949 pattern,
16950 symbols,
16951 alias,
16952 })
16953 }
16954
16955 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16956 match self.next_token().token {
16957 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
16958 Token::Placeholder(s) if s == "$" => {
16959 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
16960 }
16961 Token::LBrace => {
16962 self.expect_token(&Token::Minus)?;
16963 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
16964 self.expect_token(&Token::Minus)?;
16965 self.expect_token(&Token::RBrace)?;
16966 Ok(MatchRecognizePattern::Exclude(symbol))
16967 }
16968 Token::Word(Word {
16969 value,
16970 quote_style: None,
16971 ..
16972 }) if value == "PERMUTE" => {
16973 self.expect_token(&Token::LParen)?;
16974 let symbols = self.parse_comma_separated(|p| {
16975 p.parse_identifier().map(MatchRecognizeSymbol::Named)
16976 })?;
16977 self.expect_token(&Token::RParen)?;
16978 Ok(MatchRecognizePattern::Permute(symbols))
16979 }
16980 Token::LParen => {
16981 let pattern = self.parse_pattern()?;
16982 self.expect_token(&Token::RParen)?;
16983 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
16984 }
16985 _ => {
16986 self.prev_token();
16987 self.parse_identifier()
16988 .map(MatchRecognizeSymbol::Named)
16989 .map(MatchRecognizePattern::Symbol)
16990 }
16991 }
16992 }
16993
16994 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16995 let mut pattern = self.parse_base_pattern()?;
16996 loop {
16997 let token = self.next_token();
16998 let quantifier = match token.token {
16999 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17000 Token::Plus => RepetitionQuantifier::OneOrMore,
17001 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17002 Token::LBrace => {
17003 let token = self.next_token();
17005 match token.token {
17006 Token::Comma => {
17007 let next_token = self.next_token();
17008 let Token::Number(n, _) = next_token.token else {
17009 return self.expected("literal number", next_token);
17010 };
17011 self.expect_token(&Token::RBrace)?;
17012 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17013 }
17014 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17015 let next_token = self.next_token();
17016 match next_token.token {
17017 Token::Number(m, _) => {
17018 self.expect_token(&Token::RBrace)?;
17019 RepetitionQuantifier::Range(
17020 Self::parse(n, token.span.start)?,
17021 Self::parse(m, token.span.start)?,
17022 )
17023 }
17024 Token::RBrace => {
17025 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17026 }
17027 _ => {
17028 return self.expected("} or upper bound", next_token);
17029 }
17030 }
17031 }
17032 Token::Number(n, _) => {
17033 self.expect_token(&Token::RBrace)?;
17034 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17035 }
17036 _ => return self.expected("quantifier range", token),
17037 }
17038 }
17039 _ => {
17040 self.prev_token();
17041 break;
17042 }
17043 };
17044 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17045 }
17046 Ok(pattern)
17047 }
17048
17049 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17050 let mut patterns = vec![self.parse_repetition_pattern()?];
17051 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17052 patterns.push(self.parse_repetition_pattern()?);
17053 }
17054 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17055 Ok([pattern]) => Ok(pattern),
17056 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17057 }
17058 }
17059
17060 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17061 let pattern = self.parse_concat_pattern()?;
17062 if self.consume_token(&Token::Pipe) {
17063 match self.parse_pattern()? {
17064 MatchRecognizePattern::Alternation(mut patterns) => {
17066 patterns.insert(0, pattern);
17067 Ok(MatchRecognizePattern::Alternation(patterns))
17068 }
17069 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17070 }
17071 } else {
17072 Ok(pattern)
17073 }
17074 }
17075
17076 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17078 if self.dialect.supports_table_versioning() {
17079 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17080 {
17081 let expr = self.parse_expr()?;
17082 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17083 } else if self.peek_keyword(Keyword::CHANGES) {
17084 return self.parse_table_version_changes().map(Some);
17085 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17086 let func_name = self.parse_object_name(true)?;
17087 let func = self.parse_function(func_name)?;
17088 return Ok(Some(TableVersion::Function(func)));
17089 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17090 let expr = self.parse_expr()?;
17091 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17092 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17093 let expr = Expr::Value(self.parse_number_value()?);
17094 return Ok(Some(TableVersion::VersionAsOf(expr)));
17095 }
17096 }
17097 Ok(None)
17098 }
17099
17100 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17111 let changes_name = self.parse_object_name(true)?;
17112 let changes = self.parse_function(changes_name)?;
17113 let at_name = self.parse_object_name(true)?;
17114 let at = self.parse_function(at_name)?;
17115 let end = if self.peek_keyword(Keyword::END) {
17116 let end_name = self.parse_object_name(true)?;
17117 Some(self.parse_function(end_name)?)
17118 } else {
17119 None
17120 };
17121 Ok(TableVersion::Changes { changes, at, end })
17122 }
17123
17124 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17127 if self.parse_keyword(Keyword::NESTED) {
17128 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17129 let path = self.parse_value()?;
17130 self.expect_keyword_is(Keyword::COLUMNS)?;
17131 let columns = self.parse_parenthesized(|p| {
17132 p.parse_comma_separated(Self::parse_json_table_column_def)
17133 })?;
17134 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17135 path,
17136 columns,
17137 }));
17138 }
17139 let name = self.parse_identifier()?;
17140 if self.parse_keyword(Keyword::FOR) {
17141 self.expect_keyword_is(Keyword::ORDINALITY)?;
17142 return Ok(JsonTableColumn::ForOrdinality(name));
17143 }
17144 let r#type = self.parse_data_type()?;
17145 let exists = self.parse_keyword(Keyword::EXISTS);
17146 self.expect_keyword_is(Keyword::PATH)?;
17147 let path = self.parse_value()?;
17148 let mut on_empty = None;
17149 let mut on_error = None;
17150 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17151 if self.parse_keyword(Keyword::EMPTY) {
17152 on_empty = Some(error_handling);
17153 } else {
17154 self.expect_keyword_is(Keyword::ERROR)?;
17155 on_error = Some(error_handling);
17156 }
17157 }
17158 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17159 name,
17160 r#type,
17161 path,
17162 exists,
17163 on_empty,
17164 on_error,
17165 }))
17166 }
17167
17168 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17176 let name = self.parse_identifier()?;
17177 let r#type = self.parse_data_type()?;
17178 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17179 self.next_token();
17180 Some(path)
17181 } else {
17182 None
17183 };
17184 let as_json = self.parse_keyword(Keyword::AS);
17185 if as_json {
17186 self.expect_keyword_is(Keyword::JSON)?;
17187 }
17188 Ok(OpenJsonTableColumn {
17189 name,
17190 r#type,
17191 path,
17192 as_json,
17193 })
17194 }
17195
17196 fn parse_json_table_column_error_handling(
17197 &mut self,
17198 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17199 let res = if self.parse_keyword(Keyword::NULL) {
17200 JsonTableColumnErrorHandling::Null
17201 } else if self.parse_keyword(Keyword::ERROR) {
17202 JsonTableColumnErrorHandling::Error
17203 } else if self.parse_keyword(Keyword::DEFAULT) {
17204 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17205 } else {
17206 return Ok(None);
17207 };
17208 self.expect_keyword_is(Keyword::ON)?;
17209 Ok(Some(res))
17210 }
17211
17212 pub fn parse_derived_table_factor(
17214 &mut self,
17215 lateral: IsLateral,
17216 ) -> Result<TableFactor, ParserError> {
17217 let subquery = self.parse_query()?;
17218 self.expect_token(&Token::RParen)?;
17219 let alias = self.maybe_parse_table_alias()?;
17220
17221 let sample = self
17223 .maybe_parse_table_sample()?
17224 .map(TableSampleKind::AfterTableAlias);
17225
17226 Ok(TableFactor::Derived {
17227 lateral: match lateral {
17228 Lateral => true,
17229 NotLateral => false,
17230 },
17231 subquery,
17232 alias,
17233 sample,
17234 })
17235 }
17236
17237 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17260 let expr = self.parse_expr()?;
17261 let alias = if self.parse_keyword(Keyword::AS) {
17262 Some(self.parse_identifier()?)
17263 } else {
17264 None
17265 };
17266
17267 Ok(ExprWithAlias { expr, alias })
17268 }
17269
17270 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17274 let expr = self.parse_expr()?;
17275 let alias = self.parse_identifier_optional_alias()?;
17276 Ok(ExprWithAlias { expr, alias })
17277 }
17278
17279 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17281 let function_name = match self.next_token().token {
17282 Token::Word(w) => Ok(w.value),
17283 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17284 }?;
17285 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17286 let alias = {
17287 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17288 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17290 }
17291 self.parse_optional_alias_inner(None, validator)?
17292 };
17293 Ok(ExprWithAlias { expr, alias })
17294 }
17295
17296 pub fn parse_pivot_table_factor(
17298 &mut self,
17299 table: TableFactor,
17300 ) -> Result<TableFactor, ParserError> {
17301 self.expect_token(&Token::LParen)?;
17302 let aggregate_functions =
17303 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17304 self.expect_keyword_is(Keyword::FOR)?;
17305 let value_column = if self.peek_token_ref().token == Token::LParen {
17306 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17307 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17308 })?
17309 } else {
17310 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17311 };
17312 self.expect_keyword_is(Keyword::IN)?;
17313
17314 self.expect_token(&Token::LParen)?;
17315 let value_source = if self.parse_keyword(Keyword::ANY) {
17316 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17317 self.parse_comma_separated(Parser::parse_order_by_expr)?
17318 } else {
17319 vec![]
17320 };
17321 PivotValueSource::Any(order_by)
17322 } else if self.peek_sub_query() {
17323 PivotValueSource::Subquery(self.parse_query()?)
17324 } else {
17325 PivotValueSource::List(
17326 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17327 )
17328 };
17329 self.expect_token(&Token::RParen)?;
17330
17331 let default_on_null =
17332 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17333 self.expect_token(&Token::LParen)?;
17334 let expr = self.parse_expr()?;
17335 self.expect_token(&Token::RParen)?;
17336 Some(expr)
17337 } else {
17338 None
17339 };
17340
17341 self.expect_token(&Token::RParen)?;
17342 let alias = self.maybe_parse_table_alias()?;
17343 Ok(TableFactor::Pivot {
17344 table: Box::new(table),
17345 aggregate_functions,
17346 value_column,
17347 value_source,
17348 default_on_null,
17349 alias,
17350 })
17351 }
17352
17353 pub fn parse_unpivot_table_factor(
17355 &mut self,
17356 table: TableFactor,
17357 ) -> Result<TableFactor, ParserError> {
17358 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17359 self.expect_keyword_is(Keyword::NULLS)?;
17360 Some(NullInclusion::IncludeNulls)
17361 } else if self.parse_keyword(Keyword::EXCLUDE) {
17362 self.expect_keyword_is(Keyword::NULLS)?;
17363 Some(NullInclusion::ExcludeNulls)
17364 } else {
17365 None
17366 };
17367 self.expect_token(&Token::LParen)?;
17368 let value = self.parse_expr()?;
17369 self.expect_keyword_is(Keyword::FOR)?;
17370 let name = self.parse_identifier()?;
17371 self.expect_keyword_is(Keyword::IN)?;
17372 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17373 p.parse_expr_with_alias()
17374 })?;
17375 self.expect_token(&Token::RParen)?;
17376 let alias = self.maybe_parse_table_alias()?;
17377 Ok(TableFactor::Unpivot {
17378 table: Box::new(table),
17379 value,
17380 null_inclusion,
17381 name,
17382 columns,
17383 alias,
17384 })
17385 }
17386
17387 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17389 if natural {
17390 Ok(JoinConstraint::Natural)
17391 } else if self.parse_keyword(Keyword::ON) {
17392 let constraint = self.parse_expr()?;
17393 Ok(JoinConstraint::On(constraint))
17394 } else if self.parse_keyword(Keyword::USING) {
17395 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17396 Ok(JoinConstraint::Using(columns))
17397 } else {
17398 Ok(JoinConstraint::None)
17399 }
17401 }
17402
17403 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17405 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17406
17407 self.expect_keyword_is(Keyword::TO)?;
17408 let grantees = self.parse_grantees()?;
17409
17410 let with_grant_option =
17411 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17412
17413 let current_grants =
17414 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17415 Some(CurrentGrantsKind::CopyCurrentGrants)
17416 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17417 Some(CurrentGrantsKind::RevokeCurrentGrants)
17418 } else {
17419 None
17420 };
17421
17422 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17423 Some(self.parse_identifier()?)
17424 } else {
17425 None
17426 };
17427
17428 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17429 Some(self.parse_identifier()?)
17430 } else {
17431 None
17432 };
17433
17434 Ok(Grant {
17435 privileges,
17436 objects,
17437 grantees,
17438 with_grant_option,
17439 as_grantor,
17440 granted_by,
17441 current_grants,
17442 })
17443 }
17444
17445 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17446 let mut values = vec![];
17447 let mut grantee_type = GranteesType::None;
17448 loop {
17449 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17450 GranteesType::Role
17451 } else if self.parse_keyword(Keyword::USER) {
17452 GranteesType::User
17453 } else if self.parse_keyword(Keyword::SHARE) {
17454 GranteesType::Share
17455 } else if self.parse_keyword(Keyword::GROUP) {
17456 GranteesType::Group
17457 } else if self.parse_keyword(Keyword::PUBLIC) {
17458 GranteesType::Public
17459 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17460 GranteesType::DatabaseRole
17461 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17462 GranteesType::ApplicationRole
17463 } else if self.parse_keyword(Keyword::APPLICATION) {
17464 GranteesType::Application
17465 } else {
17466 grantee_type.clone() };
17468
17469 if self
17470 .dialect
17471 .get_reserved_grantees_types()
17472 .contains(&new_grantee_type)
17473 {
17474 self.prev_token();
17475 } else {
17476 grantee_type = new_grantee_type;
17477 }
17478
17479 let grantee = if grantee_type == GranteesType::Public {
17480 Grantee {
17481 grantee_type: grantee_type.clone(),
17482 name: None,
17483 }
17484 } else {
17485 let mut name = self.parse_grantee_name()?;
17486 if self.consume_token(&Token::Colon) {
17487 let ident = self.parse_identifier()?;
17491 if let GranteeName::ObjectName(namespace) = name {
17492 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17493 format!("{namespace}:{ident}"),
17494 )]));
17495 };
17496 }
17497 Grantee {
17498 grantee_type: grantee_type.clone(),
17499 name: Some(name),
17500 }
17501 };
17502
17503 values.push(grantee);
17504
17505 if !self.consume_token(&Token::Comma) {
17506 break;
17507 }
17508 }
17509
17510 Ok(values)
17511 }
17512
17513 pub fn parse_grant_deny_revoke_privileges_objects(
17515 &mut self,
17516 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17517 let privileges = if self.parse_keyword(Keyword::ALL) {
17518 Privileges::All {
17519 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17520 }
17521 } else {
17522 let actions = self.parse_actions_list()?;
17523 Privileges::Actions(actions)
17524 };
17525
17526 let objects = if self.parse_keyword(Keyword::ON) {
17527 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17528 Some(GrantObjects::AllTablesInSchema {
17529 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17530 })
17531 } else if self.parse_keywords(&[
17532 Keyword::ALL,
17533 Keyword::EXTERNAL,
17534 Keyword::TABLES,
17535 Keyword::IN,
17536 Keyword::SCHEMA,
17537 ]) {
17538 Some(GrantObjects::AllExternalTablesInSchema {
17539 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17540 })
17541 } else if self.parse_keywords(&[
17542 Keyword::ALL,
17543 Keyword::VIEWS,
17544 Keyword::IN,
17545 Keyword::SCHEMA,
17546 ]) {
17547 Some(GrantObjects::AllViewsInSchema {
17548 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17549 })
17550 } else if self.parse_keywords(&[
17551 Keyword::ALL,
17552 Keyword::MATERIALIZED,
17553 Keyword::VIEWS,
17554 Keyword::IN,
17555 Keyword::SCHEMA,
17556 ]) {
17557 Some(GrantObjects::AllMaterializedViewsInSchema {
17558 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17559 })
17560 } else if self.parse_keywords(&[
17561 Keyword::ALL,
17562 Keyword::FUNCTIONS,
17563 Keyword::IN,
17564 Keyword::SCHEMA,
17565 ]) {
17566 Some(GrantObjects::AllFunctionsInSchema {
17567 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17568 })
17569 } else if self.parse_keywords(&[
17570 Keyword::FUTURE,
17571 Keyword::SCHEMAS,
17572 Keyword::IN,
17573 Keyword::DATABASE,
17574 ]) {
17575 Some(GrantObjects::FutureSchemasInDatabase {
17576 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17577 })
17578 } else if self.parse_keywords(&[
17579 Keyword::FUTURE,
17580 Keyword::TABLES,
17581 Keyword::IN,
17582 Keyword::SCHEMA,
17583 ]) {
17584 Some(GrantObjects::FutureTablesInSchema {
17585 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17586 })
17587 } else if self.parse_keywords(&[
17588 Keyword::FUTURE,
17589 Keyword::EXTERNAL,
17590 Keyword::TABLES,
17591 Keyword::IN,
17592 Keyword::SCHEMA,
17593 ]) {
17594 Some(GrantObjects::FutureExternalTablesInSchema {
17595 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17596 })
17597 } else if self.parse_keywords(&[
17598 Keyword::FUTURE,
17599 Keyword::VIEWS,
17600 Keyword::IN,
17601 Keyword::SCHEMA,
17602 ]) {
17603 Some(GrantObjects::FutureViewsInSchema {
17604 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17605 })
17606 } else if self.parse_keywords(&[
17607 Keyword::FUTURE,
17608 Keyword::MATERIALIZED,
17609 Keyword::VIEWS,
17610 Keyword::IN,
17611 Keyword::SCHEMA,
17612 ]) {
17613 Some(GrantObjects::FutureMaterializedViewsInSchema {
17614 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17615 })
17616 } else if self.parse_keywords(&[
17617 Keyword::ALL,
17618 Keyword::SEQUENCES,
17619 Keyword::IN,
17620 Keyword::SCHEMA,
17621 ]) {
17622 Some(GrantObjects::AllSequencesInSchema {
17623 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17624 })
17625 } else if self.parse_keywords(&[
17626 Keyword::FUTURE,
17627 Keyword::SEQUENCES,
17628 Keyword::IN,
17629 Keyword::SCHEMA,
17630 ]) {
17631 Some(GrantObjects::FutureSequencesInSchema {
17632 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17633 })
17634 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
17635 Some(GrantObjects::ResourceMonitors(
17636 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17637 ))
17638 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17639 Some(GrantObjects::ComputePools(
17640 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17641 ))
17642 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17643 Some(GrantObjects::FailoverGroup(
17644 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17645 ))
17646 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17647 Some(GrantObjects::ReplicationGroup(
17648 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17649 ))
17650 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17651 Some(GrantObjects::ExternalVolumes(
17652 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17653 ))
17654 } else {
17655 let object_type = self.parse_one_of_keywords(&[
17656 Keyword::SEQUENCE,
17657 Keyword::DATABASE,
17658 Keyword::SCHEMA,
17659 Keyword::TABLE,
17660 Keyword::VIEW,
17661 Keyword::WAREHOUSE,
17662 Keyword::INTEGRATION,
17663 Keyword::VIEW,
17664 Keyword::WAREHOUSE,
17665 Keyword::INTEGRATION,
17666 Keyword::USER,
17667 Keyword::CONNECTION,
17668 Keyword::PROCEDURE,
17669 Keyword::FUNCTION,
17670 ]);
17671 let objects =
17672 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
17673 match object_type {
17674 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
17675 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
17676 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
17677 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
17678 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
17679 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
17680 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
17681 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
17682 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
17683 if let Some(name) = objects?.first() {
17684 self.parse_grant_procedure_or_function(name, &kw)?
17685 } else {
17686 self.expected_ref("procedure or function name", self.peek_token_ref())?
17687 }
17688 }
17689 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
17690 Some(unexpected_keyword) => return Err(ParserError::ParserError(
17691 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
17692 )),
17693 }
17694 }
17695 } else {
17696 None
17697 };
17698
17699 Ok((privileges, objects))
17700 }
17701
17702 fn parse_grant_procedure_or_function(
17703 &mut self,
17704 name: &ObjectName,
17705 kw: &Option<Keyword>,
17706 ) -> Result<Option<GrantObjects>, ParserError> {
17707 let arg_types = if self.consume_token(&Token::LParen) {
17708 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
17709 self.expect_token(&Token::RParen)?;
17710 list
17711 } else {
17712 vec![]
17713 };
17714 match kw {
17715 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
17716 name: name.clone(),
17717 arg_types,
17718 })),
17719 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
17720 name: name.clone(),
17721 arg_types,
17722 })),
17723 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
17724 }
17725 }
17726
17727 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
17729 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
17730 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
17731 if columns.is_empty() {
17732 Ok(None)
17733 } else {
17734 Ok(Some(columns))
17735 }
17736 }
17737
17738 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
17740 Ok(Action::ImportedPrivileges)
17741 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
17742 Ok(Action::AddSearchOptimization)
17743 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
17744 Ok(Action::AttachListing)
17745 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
17746 Ok(Action::AttachPolicy)
17747 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
17748 Ok(Action::BindServiceEndpoint)
17749 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17750 let role = self.parse_object_name(false)?;
17751 Ok(Action::DatabaseRole { role })
17752 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
17753 Ok(Action::EvolveSchema)
17754 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
17755 Ok(Action::ImportShare)
17756 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
17757 Ok(Action::ManageVersions)
17758 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
17759 Ok(Action::ManageReleases)
17760 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
17761 Ok(Action::OverrideShareRestrictions)
17762 } else if self.parse_keywords(&[
17763 Keyword::PURCHASE,
17764 Keyword::DATA,
17765 Keyword::EXCHANGE,
17766 Keyword::LISTING,
17767 ]) {
17768 Ok(Action::PurchaseDataExchangeListing)
17769 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
17770 Ok(Action::ResolveAll)
17771 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
17772 Ok(Action::ReadSession)
17773
17774 } else if self.parse_keyword(Keyword::APPLY) {
17776 let apply_type = self.parse_action_apply_type()?;
17777 Ok(Action::Apply { apply_type })
17778 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
17779 Ok(Action::ApplyBudget)
17780 } else if self.parse_keyword(Keyword::AUDIT) {
17781 Ok(Action::Audit)
17782 } else if self.parse_keyword(Keyword::CONNECT) {
17783 Ok(Action::Connect)
17784 } else if self.parse_keyword(Keyword::CREATE) {
17785 let obj_type = self.maybe_parse_action_create_object_type();
17786 Ok(Action::Create { obj_type })
17787 } else if self.parse_keyword(Keyword::DELETE) {
17788 Ok(Action::Delete)
17789 } else if self.parse_keyword(Keyword::EXEC) {
17790 let obj_type = self.maybe_parse_action_execute_obj_type();
17791 Ok(Action::Exec { obj_type })
17792 } else if self.parse_keyword(Keyword::EXECUTE) {
17793 let obj_type = self.maybe_parse_action_execute_obj_type();
17794 Ok(Action::Execute { obj_type })
17795 } else if self.parse_keyword(Keyword::FAILOVER) {
17796 Ok(Action::Failover)
17797 } else if self.parse_keyword(Keyword::INSERT) {
17798 Ok(Action::Insert {
17799 columns: parse_columns(self)?,
17800 })
17801 } else if self.parse_keyword(Keyword::MANAGE) {
17802 let manage_type = self.parse_action_manage_type()?;
17803 Ok(Action::Manage { manage_type })
17804 } else if self.parse_keyword(Keyword::MODIFY) {
17805 let modify_type = self.parse_action_modify_type();
17806 Ok(Action::Modify { modify_type })
17807 } else if self.parse_keyword(Keyword::MONITOR) {
17808 let monitor_type = self.parse_action_monitor_type();
17809 Ok(Action::Monitor { monitor_type })
17810 } else if self.parse_keyword(Keyword::OPERATE) {
17811 Ok(Action::Operate)
17812 } else if self.parse_keyword(Keyword::REFERENCES) {
17813 Ok(Action::References {
17814 columns: parse_columns(self)?,
17815 })
17816 } else if self.parse_keyword(Keyword::READ) {
17817 Ok(Action::Read)
17818 } else if self.parse_keyword(Keyword::REPLICATE) {
17819 Ok(Action::Replicate)
17820 } else if self.parse_keyword(Keyword::ROLE) {
17821 let role = self.parse_object_name(false)?;
17822 Ok(Action::Role { role })
17823 } else if self.parse_keyword(Keyword::SELECT) {
17824 Ok(Action::Select {
17825 columns: parse_columns(self)?,
17826 })
17827 } else if self.parse_keyword(Keyword::TEMPORARY) {
17828 Ok(Action::Temporary)
17829 } else if self.parse_keyword(Keyword::TRIGGER) {
17830 Ok(Action::Trigger)
17831 } else if self.parse_keyword(Keyword::TRUNCATE) {
17832 Ok(Action::Truncate)
17833 } else if self.parse_keyword(Keyword::UPDATE) {
17834 Ok(Action::Update {
17835 columns: parse_columns(self)?,
17836 })
17837 } else if self.parse_keyword(Keyword::USAGE) {
17838 Ok(Action::Usage)
17839 } else if self.parse_keyword(Keyword::OWNERSHIP) {
17840 Ok(Action::Ownership)
17841 } else if self.parse_keyword(Keyword::DROP) {
17842 Ok(Action::Drop)
17843 } else {
17844 self.expected_ref("a privilege keyword", self.peek_token_ref())?
17845 }
17846 }
17847
17848 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
17849 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
17851 Some(ActionCreateObjectType::ApplicationPackage)
17852 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17853 Some(ActionCreateObjectType::ComputePool)
17854 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
17855 Some(ActionCreateObjectType::DataExchangeListing)
17856 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17857 Some(ActionCreateObjectType::ExternalVolume)
17858 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17859 Some(ActionCreateObjectType::FailoverGroup)
17860 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
17861 Some(ActionCreateObjectType::NetworkPolicy)
17862 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
17863 Some(ActionCreateObjectType::OrganiationListing)
17864 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17865 Some(ActionCreateObjectType::ReplicationGroup)
17866 }
17867 else if self.parse_keyword(Keyword::ACCOUNT) {
17869 Some(ActionCreateObjectType::Account)
17870 } else if self.parse_keyword(Keyword::APPLICATION) {
17871 Some(ActionCreateObjectType::Application)
17872 } else if self.parse_keyword(Keyword::DATABASE) {
17873 Some(ActionCreateObjectType::Database)
17874 } else if self.parse_keyword(Keyword::INTEGRATION) {
17875 Some(ActionCreateObjectType::Integration)
17876 } else if self.parse_keyword(Keyword::ROLE) {
17877 Some(ActionCreateObjectType::Role)
17878 } else if self.parse_keyword(Keyword::SCHEMA) {
17879 Some(ActionCreateObjectType::Schema)
17880 } else if self.parse_keyword(Keyword::SHARE) {
17881 Some(ActionCreateObjectType::Share)
17882 } else if self.parse_keyword(Keyword::USER) {
17883 Some(ActionCreateObjectType::User)
17884 } else if self.parse_keyword(Keyword::WAREHOUSE) {
17885 Some(ActionCreateObjectType::Warehouse)
17886 } else {
17887 None
17888 }
17889 }
17890
17891 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
17892 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
17893 Ok(ActionApplyType::AggregationPolicy)
17894 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
17895 Ok(ActionApplyType::AuthenticationPolicy)
17896 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
17897 Ok(ActionApplyType::JoinPolicy)
17898 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
17899 Ok(ActionApplyType::MaskingPolicy)
17900 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
17901 Ok(ActionApplyType::PackagesPolicy)
17902 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
17903 Ok(ActionApplyType::PasswordPolicy)
17904 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
17905 Ok(ActionApplyType::ProjectionPolicy)
17906 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
17907 Ok(ActionApplyType::RowAccessPolicy)
17908 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
17909 Ok(ActionApplyType::SessionPolicy)
17910 } else if self.parse_keyword(Keyword::TAG) {
17911 Ok(ActionApplyType::Tag)
17912 } else {
17913 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
17914 }
17915 }
17916
17917 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
17918 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
17919 Some(ActionExecuteObjectType::DataMetricFunction)
17920 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
17921 Some(ActionExecuteObjectType::ManagedAlert)
17922 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
17923 Some(ActionExecuteObjectType::ManagedTask)
17924 } else if self.parse_keyword(Keyword::ALERT) {
17925 Some(ActionExecuteObjectType::Alert)
17926 } else if self.parse_keyword(Keyword::TASK) {
17927 Some(ActionExecuteObjectType::Task)
17928 } else {
17929 None
17930 }
17931 }
17932
17933 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
17934 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
17935 Ok(ActionManageType::AccountSupportCases)
17936 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
17937 Ok(ActionManageType::EventSharing)
17938 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
17939 Ok(ActionManageType::ListingAutoFulfillment)
17940 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
17941 Ok(ActionManageType::OrganizationSupportCases)
17942 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
17943 Ok(ActionManageType::UserSupportCases)
17944 } else if self.parse_keyword(Keyword::GRANTS) {
17945 Ok(ActionManageType::Grants)
17946 } else if self.parse_keyword(Keyword::WAREHOUSES) {
17947 Ok(ActionManageType::Warehouses)
17948 } else {
17949 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
17950 }
17951 }
17952
17953 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
17954 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
17955 Some(ActionModifyType::LogLevel)
17956 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
17957 Some(ActionModifyType::TraceLevel)
17958 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
17959 Some(ActionModifyType::SessionLogLevel)
17960 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
17961 Some(ActionModifyType::SessionTraceLevel)
17962 } else {
17963 None
17964 }
17965 }
17966
17967 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
17968 if self.parse_keyword(Keyword::EXECUTION) {
17969 Some(ActionMonitorType::Execution)
17970 } else if self.parse_keyword(Keyword::SECURITY) {
17971 Some(ActionMonitorType::Security)
17972 } else if self.parse_keyword(Keyword::USAGE) {
17973 Some(ActionMonitorType::Usage)
17974 } else {
17975 None
17976 }
17977 }
17978
17979 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
17981 let mut name = self.parse_object_name(false)?;
17982 if self.dialect.supports_user_host_grantee()
17983 && name.0.len() == 1
17984 && name.0[0].as_ident().is_some()
17985 && self.consume_token(&Token::AtSign)
17986 {
17987 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
17988 let host = self.parse_identifier()?;
17989 Ok(GranteeName::UserHost { user, host })
17990 } else {
17991 Ok(GranteeName::ObjectName(name))
17992 }
17993 }
17994
17995 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
17997 self.expect_keyword(Keyword::DENY)?;
17998
17999 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18000 let objects = match objects {
18001 Some(o) => o,
18002 None => {
18003 return parser_err!(
18004 "DENY statements must specify an object",
18005 self.peek_token_ref().span.start
18006 )
18007 }
18008 };
18009
18010 self.expect_keyword_is(Keyword::TO)?;
18011 let grantees = self.parse_grantees()?;
18012 let cascade = self.parse_cascade_option();
18013 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18014 Some(self.parse_identifier()?)
18015 } else {
18016 None
18017 };
18018
18019 Ok(Statement::Deny(DenyStatement {
18020 privileges,
18021 objects,
18022 grantees,
18023 cascade,
18024 granted_by,
18025 }))
18026 }
18027
18028 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18030 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18031
18032 self.expect_keyword_is(Keyword::FROM)?;
18033 let grantees = self.parse_grantees()?;
18034
18035 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18036 Some(self.parse_identifier()?)
18037 } else {
18038 None
18039 };
18040
18041 let cascade = self.parse_cascade_option();
18042
18043 Ok(Revoke {
18044 privileges,
18045 objects,
18046 grantees,
18047 granted_by,
18048 cascade,
18049 })
18050 }
18051
18052 pub fn parse_replace(
18054 &mut self,
18055 replace_token: TokenWithSpan,
18056 ) -> Result<Statement, ParserError> {
18057 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18058 return parser_err!(
18059 "Unsupported statement REPLACE",
18060 self.peek_token_ref().span.start
18061 );
18062 }
18063
18064 let mut insert = self.parse_insert(replace_token)?;
18065 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18066 *replace_into = true;
18067 }
18068
18069 Ok(insert)
18070 }
18071
18072 fn parse_insert_setexpr_boxed(
18076 &mut self,
18077 insert_token: TokenWithSpan,
18078 ) -> Result<Box<SetExpr>, ParserError> {
18079 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18080 }
18081
18082 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18084 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18085 let or = self.parse_conflict_clause();
18086 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18087 None
18088 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18089 Some(MysqlInsertPriority::LowPriority)
18090 } else if self.parse_keyword(Keyword::DELAYED) {
18091 Some(MysqlInsertPriority::Delayed)
18092 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18093 Some(MysqlInsertPriority::HighPriority)
18094 } else {
18095 None
18096 };
18097
18098 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18099 && self.parse_keyword(Keyword::IGNORE);
18100
18101 let replace_into = false;
18102
18103 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18104 let into = self.parse_keyword(Keyword::INTO);
18105
18106 let local = self.parse_keyword(Keyword::LOCAL);
18107
18108 if self.parse_keyword(Keyword::DIRECTORY) {
18109 let path = self.parse_literal_string()?;
18110 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18111 Some(self.parse_file_format()?)
18112 } else {
18113 None
18114 };
18115 let source = self.parse_query()?;
18116 Ok(Statement::Directory {
18117 local,
18118 path,
18119 overwrite,
18120 file_format,
18121 source,
18122 })
18123 } else {
18124 let table = self.parse_keyword(Keyword::TABLE);
18126 let table_object = self.parse_table_object()?;
18127
18128 let table_alias = if self.dialect.supports_insert_table_alias()
18129 && !self.peek_sub_query()
18130 && self
18131 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18132 .is_none()
18133 {
18134 if self.parse_keyword(Keyword::AS) {
18135 Some(TableAliasWithoutColumns {
18136 explicit: true,
18137 alias: self.parse_identifier()?,
18138 })
18139 } else {
18140 self.maybe_parse(|parser| parser.parse_identifier())?
18141 .map(|alias| TableAliasWithoutColumns {
18142 explicit: false,
18143 alias,
18144 })
18145 }
18146 } else {
18147 None
18148 };
18149
18150 let is_mysql = dialect_of!(self is MySqlDialect);
18151
18152 let (columns, partitioned, after_columns, output, source, assignments) = if self
18153 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18154 {
18155 (vec![], None, vec![], None, None, vec![])
18156 } else {
18157 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18158 let columns =
18159 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18160
18161 let partitioned = self.parse_insert_partition()?;
18162 let after_columns = if dialect_of!(self is HiveDialect) {
18164 self.parse_parenthesized_column_list(Optional, false)?
18165 } else {
18166 vec![]
18167 };
18168 (columns, partitioned, after_columns)
18169 } else {
18170 Default::default()
18171 };
18172
18173 let output = self.maybe_parse_output_clause()?;
18174
18175 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18176 || self.peek_keyword(Keyword::SETTINGS)
18177 {
18178 (None, vec![])
18179 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18180 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18181 } else {
18182 (Some(self.parse_query()?), vec![])
18183 };
18184
18185 (
18186 columns,
18187 partitioned,
18188 after_columns,
18189 output,
18190 source,
18191 assignments,
18192 )
18193 };
18194
18195 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18196 let settings = self.parse_settings()?;
18199
18200 let format = if self.parse_keyword(Keyword::FORMAT) {
18201 Some(self.parse_input_format_clause()?)
18202 } else {
18203 None
18204 };
18205
18206 (format, settings)
18207 } else {
18208 Default::default()
18209 };
18210
18211 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18212 && self.parse_keyword(Keyword::AS)
18213 {
18214 let row_alias = self.parse_object_name(false)?;
18215 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18216 Some(InsertAliases {
18217 row_alias,
18218 col_aliases,
18219 })
18220 } else {
18221 None
18222 };
18223
18224 let on = if self.parse_keyword(Keyword::ON) {
18225 if self.parse_keyword(Keyword::CONFLICT) {
18226 let conflict_target =
18227 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18228 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18229 } else if self.peek_token_ref().token == Token::LParen {
18230 Some(ConflictTarget::Columns(
18231 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18232 ))
18233 } else {
18234 None
18235 };
18236
18237 self.expect_keyword_is(Keyword::DO)?;
18238 let action = if self.parse_keyword(Keyword::NOTHING) {
18239 OnConflictAction::DoNothing
18240 } else {
18241 self.expect_keyword_is(Keyword::UPDATE)?;
18242 self.expect_keyword_is(Keyword::SET)?;
18243 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18244 let selection = if self.parse_keyword(Keyword::WHERE) {
18245 Some(self.parse_expr()?)
18246 } else {
18247 None
18248 };
18249 OnConflictAction::DoUpdate(DoUpdate {
18250 assignments,
18251 selection,
18252 })
18253 };
18254
18255 Some(OnInsert::OnConflict(OnConflict {
18256 conflict_target,
18257 action,
18258 }))
18259 } else {
18260 self.expect_keyword_is(Keyword::DUPLICATE)?;
18261 self.expect_keyword_is(Keyword::KEY)?;
18262 self.expect_keyword_is(Keyword::UPDATE)?;
18263 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18264
18265 Some(OnInsert::DuplicateKeyUpdate(l))
18266 }
18267 } else {
18268 None
18269 };
18270
18271 let returning = if self.parse_keyword(Keyword::RETURNING) {
18272 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18273 } else {
18274 None
18275 };
18276
18277 Ok(Insert {
18278 insert_token: insert_token.into(),
18279 optimizer_hints,
18280 or,
18281 table: table_object,
18282 table_alias,
18283 ignore,
18284 into,
18285 overwrite,
18286 partitioned,
18287 columns,
18288 after_columns,
18289 source,
18290 assignments,
18291 has_table_keyword: table,
18292 on,
18293 returning,
18294 output,
18295 replace_into,
18296 priority,
18297 insert_alias,
18298 settings,
18299 format_clause,
18300 multi_table_insert_type: None,
18301 multi_table_into_clauses: vec![],
18302 multi_table_when_clauses: vec![],
18303 multi_table_else_clause: None,
18304 }
18305 .into())
18306 }
18307 }
18308
18309 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18313 let ident = self.parse_identifier()?;
18314 let values = self
18315 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18316 .unwrap_or_default();
18317
18318 Ok(InputFormatClause { ident, values })
18319 }
18320
18321 fn peek_subquery_start(&mut self) -> bool {
18324 matches!(
18325 self.peek_tokens_ref(),
18326 [
18327 TokenWithSpan {
18328 token: Token::LParen,
18329 ..
18330 },
18331 TokenWithSpan {
18332 token: Token::Word(Word {
18333 keyword: Keyword::SELECT,
18334 ..
18335 }),
18336 ..
18337 },
18338 ]
18339 )
18340 }
18341
18342 fn peek_subquery_or_cte_start(&mut self) -> bool {
18346 matches!(
18347 self.peek_tokens_ref(),
18348 [
18349 TokenWithSpan {
18350 token: Token::LParen,
18351 ..
18352 },
18353 TokenWithSpan {
18354 token: Token::Word(Word {
18355 keyword: Keyword::SELECT | Keyword::WITH,
18356 ..
18357 }),
18358 ..
18359 },
18360 ]
18361 )
18362 }
18363
18364 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18365 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18366 Some(SqliteOnConflict::Replace)
18367 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18368 Some(SqliteOnConflict::Rollback)
18369 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18370 Some(SqliteOnConflict::Abort)
18371 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18372 Some(SqliteOnConflict::Fail)
18373 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18374 Some(SqliteOnConflict::Ignore)
18375 } else if self.parse_keyword(Keyword::REPLACE) {
18376 Some(SqliteOnConflict::Replace)
18377 } else {
18378 None
18379 }
18380 }
18381
18382 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18384 if self.parse_keyword(Keyword::PARTITION) {
18385 self.expect_token(&Token::LParen)?;
18386 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18387 self.expect_token(&Token::RParen)?;
18388 Ok(partition_cols)
18389 } else {
18390 Ok(None)
18391 }
18392 }
18393
18394 pub fn parse_load_data_table_format(
18396 &mut self,
18397 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18398 if self.parse_keyword(Keyword::INPUTFORMAT) {
18399 let input_format = self.parse_expr()?;
18400 self.expect_keyword_is(Keyword::SERDE)?;
18401 let serde = self.parse_expr()?;
18402 Ok(Some(HiveLoadDataFormat {
18403 input_format,
18404 serde,
18405 }))
18406 } else {
18407 Ok(None)
18408 }
18409 }
18410
18411 fn parse_update_setexpr_boxed(
18415 &mut self,
18416 update_token: TokenWithSpan,
18417 ) -> Result<Box<SetExpr>, ParserError> {
18418 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18419 }
18420
18421 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18423 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18424 let or = self.parse_conflict_clause();
18425 let table = self.parse_table_and_joins()?;
18426 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18427 Some(UpdateTableFromKind::BeforeSet(
18428 self.parse_table_with_joins()?,
18429 ))
18430 } else {
18431 None
18432 };
18433 self.expect_keyword(Keyword::SET)?;
18434 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18435
18436 let output = self.maybe_parse_output_clause()?;
18437
18438 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18439 Some(UpdateTableFromKind::AfterSet(
18440 self.parse_table_with_joins()?,
18441 ))
18442 } else {
18443 from_before_set
18444 };
18445 let selection = if self.parse_keyword(Keyword::WHERE) {
18446 Some(self.parse_expr()?)
18447 } else {
18448 None
18449 };
18450 let returning = if self.parse_keyword(Keyword::RETURNING) {
18451 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18452 } else {
18453 None
18454 };
18455 let order_by = if self.dialect.supports_update_order_by()
18456 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18457 {
18458 self.parse_comma_separated(Parser::parse_order_by_expr)?
18459 } else {
18460 vec![]
18461 };
18462 let limit = if self.parse_keyword(Keyword::LIMIT) {
18463 Some(self.parse_expr()?)
18464 } else {
18465 None
18466 };
18467 Ok(Update {
18468 update_token: update_token.into(),
18469 optimizer_hints,
18470 table,
18471 assignments,
18472 from,
18473 selection,
18474 returning,
18475 output,
18476 or,
18477 order_by,
18478 limit,
18479 }
18480 .into())
18481 }
18482
18483 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18485 let target = self.parse_assignment_target()?;
18486 self.expect_token(&Token::Eq)?;
18487 let value = self.parse_expr()?;
18488 Ok(Assignment { target, value })
18489 }
18490
18491 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18493 if self.consume_token(&Token::LParen) {
18494 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18495 self.expect_token(&Token::RParen)?;
18496 Ok(AssignmentTarget::Tuple(columns))
18497 } else {
18498 let column = self.parse_object_name(false)?;
18499 Ok(AssignmentTarget::ColumnName(column))
18500 }
18501 }
18502
18503 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18505 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18506 self.maybe_parse(|p| {
18507 let name = p.parse_expr()?;
18508 let operator = p.parse_function_named_arg_operator()?;
18509 let arg = p.parse_wildcard_expr()?.into();
18510 Ok(FunctionArg::ExprNamed {
18511 name,
18512 arg,
18513 operator,
18514 })
18515 })?
18516 } else {
18517 self.maybe_parse(|p| {
18518 let name = p.parse_identifier()?;
18519 let operator = p.parse_function_named_arg_operator()?;
18520 let arg = p.parse_wildcard_expr()?.into();
18521 Ok(FunctionArg::Named {
18522 name,
18523 arg,
18524 operator,
18525 })
18526 })?
18527 };
18528 if let Some(arg) = arg {
18529 return Ok(arg);
18530 }
18531 let wildcard_expr = self.parse_wildcard_expr()?;
18532 let arg_expr: FunctionArgExpr = match wildcard_expr {
18533 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18534 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18537 if opts.opt_exclude.is_some()
18538 || opts.opt_except.is_some()
18539 || opts.opt_replace.is_some()
18540 || opts.opt_rename.is_some()
18541 || opts.opt_ilike.is_some()
18542 {
18543 FunctionArgExpr::WildcardWithOptions(opts)
18544 } else {
18545 wildcard_expr.into()
18546 }
18547 }
18548 other => other.into(),
18549 };
18550 Ok(FunctionArg::Unnamed(arg_expr))
18551 }
18552
18553 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18554 if self.parse_keyword(Keyword::VALUE) {
18555 return Ok(FunctionArgOperator::Value);
18556 }
18557 let tok = self.next_token();
18558 match tok.token {
18559 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18560 Ok(FunctionArgOperator::RightArrow)
18561 }
18562 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18563 Ok(FunctionArgOperator::Equals)
18564 }
18565 Token::Assignment
18566 if self
18567 .dialect
18568 .supports_named_fn_args_with_assignment_operator() =>
18569 {
18570 Ok(FunctionArgOperator::Assignment)
18571 }
18572 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18573 Ok(FunctionArgOperator::Colon)
18574 }
18575 _ => {
18576 self.prev_token();
18577 self.expected("argument operator", tok)
18578 }
18579 }
18580 }
18581
18582 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18584 if self.consume_token(&Token::RParen) {
18585 Ok(vec![])
18586 } else {
18587 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18588 self.expect_token(&Token::RParen)?;
18589 Ok(args)
18590 }
18591 }
18592
18593 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18594 if self.consume_token(&Token::RParen) {
18595 return Ok(TableFunctionArgs {
18596 args: vec![],
18597 settings: None,
18598 });
18599 }
18600 let mut args = vec![];
18601 let settings = loop {
18602 if let Some(settings) = self.parse_settings()? {
18603 break Some(settings);
18604 }
18605 args.push(self.parse_function_args()?);
18606 if self.is_parse_comma_separated_end() {
18607 break None;
18608 }
18609 };
18610 self.expect_token(&Token::RParen)?;
18611 Ok(TableFunctionArgs { args, settings })
18612 }
18613
18614 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
18623 let mut clauses = vec![];
18624
18625 if let Some(null_clause) = self.parse_json_null_clause() {
18628 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18629 }
18630
18631 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18632 clauses.push(FunctionArgumentClause::JsonReturningClause(
18633 json_returning_clause,
18634 ));
18635 }
18636
18637 if self.consume_token(&Token::RParen) {
18638 return Ok(FunctionArgumentList {
18639 duplicate_treatment: None,
18640 args: vec![],
18641 clauses,
18642 });
18643 }
18644
18645 let duplicate_treatment = self.parse_duplicate_treatment()?;
18646 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18647
18648 if self.dialect.supports_window_function_null_treatment_arg() {
18649 if let Some(null_treatment) = self.parse_null_treatment()? {
18650 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
18651 }
18652 }
18653
18654 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18655 clauses.push(FunctionArgumentClause::OrderBy(
18656 self.parse_comma_separated(Parser::parse_order_by_expr)?,
18657 ));
18658 }
18659
18660 if self.parse_keyword(Keyword::LIMIT) {
18661 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
18662 }
18663
18664 if dialect_of!(self is GenericDialect | BigQueryDialect)
18665 && self.parse_keyword(Keyword::HAVING)
18666 {
18667 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
18668 Keyword::MIN => HavingBoundKind::Min,
18669 Keyword::MAX => HavingBoundKind::Max,
18670 unexpected_keyword => return Err(ParserError::ParserError(
18671 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
18672 )),
18673 };
18674 clauses.push(FunctionArgumentClause::Having(HavingBound(
18675 kind,
18676 self.parse_expr()?,
18677 )))
18678 }
18679
18680 if dialect_of!(self is GenericDialect | MySqlDialect)
18681 && self.parse_keyword(Keyword::SEPARATOR)
18682 {
18683 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
18684 }
18685
18686 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
18687 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
18688 }
18689
18690 if let Some(null_clause) = self.parse_json_null_clause() {
18691 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18692 }
18693
18694 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18695 clauses.push(FunctionArgumentClause::JsonReturningClause(
18696 json_returning_clause,
18697 ));
18698 }
18699
18700 self.expect_token(&Token::RParen)?;
18701 Ok(FunctionArgumentList {
18702 duplicate_treatment,
18703 args,
18704 clauses,
18705 })
18706 }
18707
18708 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
18709 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
18710 Some(JsonNullClause::AbsentOnNull)
18711 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
18712 Some(JsonNullClause::NullOnNull)
18713 } else {
18714 None
18715 }
18716 }
18717
18718 fn maybe_parse_json_returning_clause(
18719 &mut self,
18720 ) -> Result<Option<JsonReturningClause>, ParserError> {
18721 if self.parse_keyword(Keyword::RETURNING) {
18722 let data_type = self.parse_data_type()?;
18723 Ok(Some(JsonReturningClause { data_type }))
18724 } else {
18725 Ok(None)
18726 }
18727 }
18728
18729 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
18730 let loc = self.peek_token_ref().span.start;
18731 match (
18732 self.parse_keyword(Keyword::ALL),
18733 self.parse_keyword(Keyword::DISTINCT),
18734 ) {
18735 (true, false) => Ok(Some(DuplicateTreatment::All)),
18736 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
18737 (false, false) => Ok(None),
18738 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
18739 }
18740 }
18741
18742 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
18744 let prefix = self
18745 .parse_one_of_keywords(
18746 self.dialect
18747 .get_reserved_keywords_for_select_item_operator(),
18748 )
18749 .map(|keyword| Ident::new(format!("{keyword:?}")));
18750
18751 match self.parse_wildcard_expr()? {
18752 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
18753 SelectItemQualifiedWildcardKind::ObjectName(prefix),
18754 self.parse_wildcard_additional_options(token.0)?,
18755 )),
18756 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
18757 self.parse_wildcard_additional_options(token.0)?,
18758 )),
18759 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
18760 parser_err!(
18761 format!("Expected an expression, found: {}", v),
18762 self.peek_token_ref().span.start
18763 )
18764 }
18765 Expr::BinaryOp {
18766 left,
18767 op: BinaryOperator::Eq,
18768 right,
18769 } if self.dialect.supports_eq_alias_assignment()
18770 && matches!(left.as_ref(), Expr::Identifier(_)) =>
18771 {
18772 let Expr::Identifier(alias) = *left else {
18773 return parser_err!(
18774 "BUG: expected identifier expression as alias",
18775 self.peek_token_ref().span.start
18776 );
18777 };
18778 Ok(SelectItem::ExprWithAlias {
18779 expr: *right,
18780 alias,
18781 })
18782 }
18783 expr if self.dialect.supports_select_expr_star()
18784 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
18785 {
18786 let wildcard_token = self.get_previous_token().clone();
18787 Ok(SelectItem::QualifiedWildcard(
18788 SelectItemQualifiedWildcardKind::Expr(expr),
18789 self.parse_wildcard_additional_options(wildcard_token)?,
18790 ))
18791 }
18792 expr if self.dialect.supports_select_item_multi_column_alias()
18793 && self.peek_keyword(Keyword::AS)
18794 && self.peek_nth_token(1).token == Token::LParen =>
18795 {
18796 self.expect_keyword(Keyword::AS)?;
18797 self.expect_token(&Token::LParen)?;
18798 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
18799 self.expect_token(&Token::RParen)?;
18800 Ok(SelectItem::ExprWithAliases {
18801 expr: maybe_prefixed_expr(expr, prefix),
18802 aliases,
18803 })
18804 }
18805 expr => self
18806 .maybe_parse_select_item_alias()
18807 .map(|alias| match alias {
18808 Some(alias) => SelectItem::ExprWithAlias {
18809 expr: maybe_prefixed_expr(expr, prefix),
18810 alias,
18811 },
18812 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
18813 }),
18814 }
18815 }
18816
18817 pub fn parse_wildcard_additional_options(
18821 &mut self,
18822 wildcard_token: TokenWithSpan,
18823 ) -> Result<WildcardAdditionalOptions, ParserError> {
18824 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
18825 self.parse_optional_select_item_ilike()?
18826 } else {
18827 None
18828 };
18829 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
18830 {
18831 self.parse_optional_select_item_exclude()?
18832 } else {
18833 None
18834 };
18835 let opt_except = if self.dialect.supports_select_wildcard_except() {
18836 self.parse_optional_select_item_except()?
18837 } else {
18838 None
18839 };
18840 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
18841 self.parse_optional_select_item_replace()?
18842 } else {
18843 None
18844 };
18845 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
18846 self.parse_optional_select_item_rename()?
18847 } else {
18848 None
18849 };
18850
18851 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
18852 self.maybe_parse_select_item_alias()?
18853 } else {
18854 None
18855 };
18856
18857 Ok(WildcardAdditionalOptions {
18858 wildcard_token: wildcard_token.into(),
18859 opt_ilike,
18860 opt_exclude,
18861 opt_except,
18862 opt_rename,
18863 opt_replace,
18864 opt_alias,
18865 })
18866 }
18867
18868 pub fn parse_optional_select_item_ilike(
18872 &mut self,
18873 ) -> Result<Option<IlikeSelectItem>, ParserError> {
18874 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
18875 let next_token = self.next_token();
18876 let pattern = match next_token.token {
18877 Token::SingleQuotedString(s) => s,
18878 _ => return self.expected("ilike pattern", next_token),
18879 };
18880 Some(IlikeSelectItem { pattern })
18881 } else {
18882 None
18883 };
18884 Ok(opt_ilike)
18885 }
18886
18887 pub fn parse_optional_select_item_exclude(
18891 &mut self,
18892 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
18893 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
18894 if self.consume_token(&Token::LParen) {
18895 let columns =
18896 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
18897 self.expect_token(&Token::RParen)?;
18898 Some(ExcludeSelectItem::Multiple(columns))
18899 } else {
18900 let column = self.parse_object_name(false)?;
18901 Some(ExcludeSelectItem::Single(column))
18902 }
18903 } else {
18904 None
18905 };
18906
18907 Ok(opt_exclude)
18908 }
18909
18910 pub fn parse_optional_select_item_except(
18914 &mut self,
18915 ) -> Result<Option<ExceptSelectItem>, ParserError> {
18916 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
18917 if self.peek_token_ref().token == Token::LParen {
18918 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
18919 match &idents[..] {
18920 [] => {
18921 return self.expected_ref(
18922 "at least one column should be parsed by the expect clause",
18923 self.peek_token_ref(),
18924 )?;
18925 }
18926 [first, idents @ ..] => Some(ExceptSelectItem {
18927 first_element: first.clone(),
18928 additional_elements: idents.to_vec(),
18929 }),
18930 }
18931 } else {
18932 let ident = self.parse_identifier()?;
18934 Some(ExceptSelectItem {
18935 first_element: ident,
18936 additional_elements: vec![],
18937 })
18938 }
18939 } else {
18940 None
18941 };
18942
18943 Ok(opt_except)
18944 }
18945
18946 pub fn parse_optional_select_item_rename(
18948 &mut self,
18949 ) -> Result<Option<RenameSelectItem>, ParserError> {
18950 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
18951 if self.consume_token(&Token::LParen) {
18952 let idents =
18953 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
18954 self.expect_token(&Token::RParen)?;
18955 Some(RenameSelectItem::Multiple(idents))
18956 } else {
18957 let ident = self.parse_identifier_with_alias()?;
18958 Some(RenameSelectItem::Single(ident))
18959 }
18960 } else {
18961 None
18962 };
18963
18964 Ok(opt_rename)
18965 }
18966
18967 pub fn parse_optional_select_item_replace(
18969 &mut self,
18970 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
18971 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
18972 if self.consume_token(&Token::LParen) {
18973 let items = self.parse_comma_separated(|parser| {
18974 Ok(Box::new(parser.parse_replace_elements()?))
18975 })?;
18976 self.expect_token(&Token::RParen)?;
18977 Some(ReplaceSelectItem { items })
18978 } else {
18979 let tok = self.next_token();
18980 return self.expected("( after REPLACE but", tok);
18981 }
18982 } else {
18983 None
18984 };
18985
18986 Ok(opt_replace)
18987 }
18988 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
18990 let expr = self.parse_expr()?;
18991 let as_keyword = self.parse_keyword(Keyword::AS);
18992 let ident = self.parse_identifier()?;
18993 Ok(ReplaceSelectElement {
18994 expr,
18995 column_name: ident,
18996 as_keyword,
18997 })
18998 }
18999
19000 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19003 if self.parse_keyword(Keyword::ASC) {
19004 Some(true)
19005 } else if self.parse_keyword(Keyword::DESC) {
19006 Some(false)
19007 } else {
19008 None
19009 }
19010 }
19011
19012 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19014 self.parse_order_by_expr_inner(false)
19015 .map(|(order_by, _)| order_by)
19016 }
19017
19018 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19020 self.parse_order_by_expr_inner(true)
19021 .map(|(column, operator_class)| IndexColumn {
19022 column,
19023 operator_class,
19024 })
19025 }
19026
19027 fn parse_order_by_expr_inner(
19028 &mut self,
19029 with_operator_class: bool,
19030 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19031 let expr = self.parse_expr()?;
19032
19033 let operator_class: Option<ObjectName> = if with_operator_class {
19034 if self
19037 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19038 .is_some()
19039 {
19040 None
19041 } else {
19042 self.maybe_parse(|parser| parser.parse_object_name(false))?
19043 }
19044 } else {
19045 None
19046 };
19047
19048 let options = self.parse_order_by_options()?;
19049
19050 let with_fill = if self.dialect.supports_with_fill()
19051 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19052 {
19053 Some(self.parse_with_fill()?)
19054 } else {
19055 None
19056 };
19057
19058 Ok((
19059 OrderByExpr {
19060 expr,
19061 options,
19062 with_fill,
19063 },
19064 operator_class,
19065 ))
19066 }
19067
19068 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19069 let asc = self.parse_asc_desc();
19070
19071 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19072 Some(true)
19073 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19074 Some(false)
19075 } else {
19076 None
19077 };
19078
19079 Ok(OrderByOptions { asc, nulls_first })
19080 }
19081
19082 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19086 let from = if self.parse_keyword(Keyword::FROM) {
19087 Some(self.parse_expr()?)
19088 } else {
19089 None
19090 };
19091
19092 let to = if self.parse_keyword(Keyword::TO) {
19093 Some(self.parse_expr()?)
19094 } else {
19095 None
19096 };
19097
19098 let step = if self.parse_keyword(Keyword::STEP) {
19099 Some(self.parse_expr()?)
19100 } else {
19101 None
19102 };
19103
19104 Ok(WithFill { from, to, step })
19105 }
19106
19107 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19110 if !self.parse_keyword(Keyword::INTERPOLATE) {
19111 return Ok(None);
19112 }
19113
19114 if self.consume_token(&Token::LParen) {
19115 let interpolations =
19116 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19117 self.expect_token(&Token::RParen)?;
19118 return Ok(Some(Interpolate {
19120 exprs: Some(interpolations),
19121 }));
19122 }
19123
19124 Ok(Some(Interpolate { exprs: None }))
19126 }
19127
19128 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19130 let column = self.parse_identifier()?;
19131 let expr = if self.parse_keyword(Keyword::AS) {
19132 Some(self.parse_expr()?)
19133 } else {
19134 None
19135 };
19136 Ok(InterpolateExpr { column, expr })
19137 }
19138
19139 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19142 let quantity = if self.consume_token(&Token::LParen) {
19143 let quantity = self.parse_expr()?;
19144 self.expect_token(&Token::RParen)?;
19145 Some(TopQuantity::Expr(quantity))
19146 } else {
19147 let next_token = self.next_token();
19148 let quantity = match next_token.token {
19149 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19150 _ => self.expected("literal int", next_token)?,
19151 };
19152 Some(TopQuantity::Constant(quantity))
19153 };
19154
19155 let percent = self.parse_keyword(Keyword::PERCENT);
19156
19157 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19158
19159 Ok(Top {
19160 with_ties,
19161 percent,
19162 quantity,
19163 })
19164 }
19165
19166 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19168 if self.parse_keyword(Keyword::ALL) {
19169 Ok(None)
19170 } else {
19171 Ok(Some(self.parse_expr()?))
19172 }
19173 }
19174
19175 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19177 let value = self.parse_expr()?;
19178 let rows = if self.parse_keyword(Keyword::ROW) {
19179 OffsetRows::Row
19180 } else if self.parse_keyword(Keyword::ROWS) {
19181 OffsetRows::Rows
19182 } else {
19183 OffsetRows::None
19184 };
19185 Ok(Offset { value, rows })
19186 }
19187
19188 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19190 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19191
19192 let (quantity, percent) = if self
19193 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19194 .is_some()
19195 {
19196 (None, false)
19197 } else {
19198 let quantity = Expr::Value(self.parse_value()?);
19199 let percent = self.parse_keyword(Keyword::PERCENT);
19200 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19201 (Some(quantity), percent)
19202 };
19203
19204 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19205 false
19206 } else {
19207 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19208 };
19209
19210 Ok(Fetch {
19211 with_ties,
19212 percent,
19213 quantity,
19214 })
19215 }
19216
19217 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19219 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19220 Keyword::UPDATE => LockType::Update,
19221 Keyword::SHARE => LockType::Share,
19222 unexpected_keyword => return Err(ParserError::ParserError(
19223 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19224 )),
19225 };
19226 let of = if self.parse_keyword(Keyword::OF) {
19227 Some(self.parse_object_name(false)?)
19228 } else {
19229 None
19230 };
19231 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19232 Some(NonBlock::Nowait)
19233 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19234 Some(NonBlock::SkipLocked)
19235 } else {
19236 None
19237 };
19238 Ok(LockClause {
19239 lock_type,
19240 of,
19241 nonblock,
19242 })
19243 }
19244
19245 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19247 self.expect_keyword(Keyword::LOCK)?;
19248
19249 if self.peek_keyword(Keyword::TABLES) {
19250 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19251 }
19252
19253 let _ = self.parse_keyword(Keyword::TABLE);
19254 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19255 let lock_mode = if self.parse_keyword(Keyword::IN) {
19256 let lock_mode = self.parse_lock_table_mode()?;
19257 self.expect_keyword(Keyword::MODE)?;
19258 Some(lock_mode)
19259 } else {
19260 None
19261 };
19262 let nowait = self.parse_keyword(Keyword::NOWAIT);
19263
19264 Ok(Lock {
19265 tables,
19266 lock_mode,
19267 nowait,
19268 })
19269 }
19270
19271 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19272 let only = self.parse_keyword(Keyword::ONLY);
19273 let name = self.parse_object_name(false)?;
19274 let has_asterisk = self.consume_token(&Token::Mul);
19275
19276 Ok(LockTableTarget {
19277 name,
19278 only,
19279 has_asterisk,
19280 })
19281 }
19282
19283 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19284 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19285 Ok(LockTableMode::AccessShare)
19286 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19287 Ok(LockTableMode::AccessExclusive)
19288 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19289 Ok(LockTableMode::RowShare)
19290 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19291 Ok(LockTableMode::RowExclusive)
19292 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19293 Ok(LockTableMode::ShareUpdateExclusive)
19294 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19295 Ok(LockTableMode::ShareRowExclusive)
19296 } else if self.parse_keyword(Keyword::SHARE) {
19297 Ok(LockTableMode::Share)
19298 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19299 Ok(LockTableMode::Exclusive)
19300 } else {
19301 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19302 }
19303 }
19304
19305 pub fn parse_values(
19307 &mut self,
19308 allow_empty: bool,
19309 value_keyword: bool,
19310 ) -> Result<Values, ParserError> {
19311 let mut explicit_row = false;
19312
19313 let rows = self.parse_comma_separated(|parser| {
19314 if parser.parse_keyword(Keyword::ROW) {
19315 explicit_row = true;
19316 }
19317
19318 parser.expect_token(&Token::LParen)?;
19319 if allow_empty && parser.peek_token().token == Token::RParen {
19320 parser.next_token();
19321 Ok(vec![])
19322 } else {
19323 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19324 parser.expect_token(&Token::RParen)?;
19325 Ok(exprs)
19326 }
19327 })?;
19328 Ok(Values {
19329 explicit_row,
19330 rows,
19331 value_keyword,
19332 })
19333 }
19334
19335 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19337 self.expect_keyword_is(Keyword::TRANSACTION)?;
19338 Ok(Statement::StartTransaction {
19339 modes: self.parse_transaction_modes()?,
19340 begin: false,
19341 transaction: Some(BeginTransactionKind::Transaction),
19342 modifier: None,
19343 statements: vec![],
19344 exception: None,
19345 has_end_keyword: false,
19346 })
19347 }
19348
19349 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19351 if !self.dialect.supports_start_transaction_modifier() {
19352 None
19353 } else if self.parse_keyword(Keyword::DEFERRED) {
19354 Some(TransactionModifier::Deferred)
19355 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19356 Some(TransactionModifier::Immediate)
19357 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19358 Some(TransactionModifier::Exclusive)
19359 } else if self.parse_keyword(Keyword::TRY) {
19360 Some(TransactionModifier::Try)
19361 } else if self.parse_keyword(Keyword::CATCH) {
19362 Some(TransactionModifier::Catch)
19363 } else {
19364 None
19365 }
19366 }
19367
19368 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19370 let modifier = self.parse_transaction_modifier();
19371 let transaction =
19372 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19373 {
19374 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19375 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19376 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19377 _ => None,
19378 };
19379 Ok(Statement::StartTransaction {
19380 modes: self.parse_transaction_modes()?,
19381 begin: true,
19382 transaction,
19383 modifier,
19384 statements: vec![],
19385 exception: None,
19386 has_end_keyword: false,
19387 })
19388 }
19389
19390 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19392 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19393
19394 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19395 let mut when = Vec::new();
19396
19397 while !self.peek_keyword(Keyword::END) {
19399 self.expect_keyword(Keyword::WHEN)?;
19400
19401 let mut idents = Vec::new();
19405
19406 while !self.parse_keyword(Keyword::THEN) {
19407 let ident = self.parse_identifier()?;
19408 idents.push(ident);
19409
19410 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19411 }
19412
19413 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19414
19415 when.push(ExceptionWhen { idents, statements });
19416 }
19417
19418 Some(when)
19419 } else {
19420 None
19421 };
19422
19423 self.expect_keyword(Keyword::END)?;
19424
19425 Ok(Statement::StartTransaction {
19426 begin: true,
19427 statements,
19428 exception,
19429 has_end_keyword: true,
19430 transaction: None,
19431 modifier: None,
19432 modes: Default::default(),
19433 })
19434 }
19435
19436 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19438 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19439 None
19440 } else if self.parse_keyword(Keyword::TRY) {
19441 Some(TransactionModifier::Try)
19442 } else if self.parse_keyword(Keyword::CATCH) {
19443 Some(TransactionModifier::Catch)
19444 } else {
19445 None
19446 };
19447 Ok(Statement::Commit {
19448 chain: self.parse_commit_rollback_chain()?,
19449 end: true,
19450 modifier,
19451 })
19452 }
19453
19454 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19456 let mut modes = vec![];
19457 let mut required = false;
19458 loop {
19459 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19460 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19461 TransactionIsolationLevel::ReadUncommitted
19462 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19463 TransactionIsolationLevel::ReadCommitted
19464 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19465 TransactionIsolationLevel::RepeatableRead
19466 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19467 TransactionIsolationLevel::Serializable
19468 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19469 TransactionIsolationLevel::Snapshot
19470 } else {
19471 self.expected_ref("isolation level", self.peek_token_ref())?
19472 };
19473 TransactionMode::IsolationLevel(iso_level)
19474 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19475 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19476 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19477 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19478 } else if required {
19479 self.expected_ref("transaction mode", self.peek_token_ref())?
19480 } else {
19481 break;
19482 };
19483 modes.push(mode);
19484 required = self.consume_token(&Token::Comma);
19489 }
19490 Ok(modes)
19491 }
19492
19493 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19495 Ok(Statement::Commit {
19496 chain: self.parse_commit_rollback_chain()?,
19497 end: false,
19498 modifier: None,
19499 })
19500 }
19501
19502 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19504 let chain = self.parse_commit_rollback_chain()?;
19505 let savepoint = self.parse_rollback_savepoint()?;
19506
19507 Ok(Statement::Rollback { chain, savepoint })
19508 }
19509
19510 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19512 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19513 if self.parse_keyword(Keyword::AND) {
19514 let chain = !self.parse_keyword(Keyword::NO);
19515 self.expect_keyword_is(Keyword::CHAIN)?;
19516 Ok(chain)
19517 } else {
19518 Ok(false)
19519 }
19520 }
19521
19522 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19524 if self.parse_keyword(Keyword::TO) {
19525 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19526 let savepoint = self.parse_identifier()?;
19527
19528 Ok(Some(savepoint))
19529 } else {
19530 Ok(None)
19531 }
19532 }
19533
19534 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19536 self.expect_token(&Token::LParen)?;
19537 let message = Box::new(self.parse_expr()?);
19538 self.expect_token(&Token::Comma)?;
19539 let severity = Box::new(self.parse_expr()?);
19540 self.expect_token(&Token::Comma)?;
19541 let state = Box::new(self.parse_expr()?);
19542 let arguments = if self.consume_token(&Token::Comma) {
19543 self.parse_comma_separated(Parser::parse_expr)?
19544 } else {
19545 vec![]
19546 };
19547 self.expect_token(&Token::RParen)?;
19548 let options = if self.parse_keyword(Keyword::WITH) {
19549 self.parse_comma_separated(Parser::parse_raiserror_option)?
19550 } else {
19551 vec![]
19552 };
19553 Ok(Statement::RaisError {
19554 message,
19555 severity,
19556 state,
19557 arguments,
19558 options,
19559 })
19560 }
19561
19562 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19564 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19565 Keyword::LOG => Ok(RaisErrorOption::Log),
19566 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19567 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19568 _ => self.expected_ref(
19569 "LOG, NOWAIT OR SETERROR raiserror option",
19570 self.peek_token_ref(),
19571 ),
19572 }
19573 }
19574
19575 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19579 self.expect_keyword_is(Keyword::THROW)?;
19580
19581 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19582 let (message, state) = if error_number.is_some() {
19583 self.expect_token(&Token::Comma)?;
19584 let message = Box::new(self.parse_expr()?);
19585 self.expect_token(&Token::Comma)?;
19586 let state = Box::new(self.parse_expr()?);
19587 (Some(message), Some(state))
19588 } else {
19589 (None, None)
19590 };
19591
19592 Ok(ThrowStatement {
19593 error_number,
19594 message,
19595 state,
19596 })
19597 }
19598
19599 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
19601 let prepare = self.parse_keyword(Keyword::PREPARE);
19602 let name = self.parse_identifier()?;
19603 Ok(Statement::Deallocate { name, prepare })
19604 }
19605
19606 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
19608 let immediate =
19609 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
19610
19611 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
19617 None
19618 } else {
19619 Some(self.parse_object_name(false)?)
19620 };
19621
19622 let has_parentheses = self.consume_token(&Token::LParen);
19623
19624 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
19625 let end_token = match (has_parentheses, self.peek_token().token) {
19626 (true, _) => Token::RParen,
19627 (false, Token::EOF) => Token::EOF,
19628 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
19629 (false, _) => Token::SemiColon,
19630 };
19631
19632 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
19633
19634 if has_parentheses {
19635 self.expect_token(&Token::RParen)?;
19636 }
19637
19638 let into = if self.parse_keyword(Keyword::INTO) {
19639 self.parse_comma_separated(Self::parse_identifier)?
19640 } else {
19641 vec![]
19642 };
19643
19644 let using = if self.parse_keyword(Keyword::USING) {
19645 self.parse_comma_separated(Self::parse_expr_with_alias)?
19646 } else {
19647 vec![]
19648 };
19649
19650 let output = self.parse_keyword(Keyword::OUTPUT);
19651
19652 let default = self.parse_keyword(Keyword::DEFAULT);
19653
19654 Ok(Statement::Execute {
19655 immediate,
19656 name,
19657 parameters,
19658 has_parentheses,
19659 into,
19660 using,
19661 output,
19662 default,
19663 })
19664 }
19665
19666 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
19668 let name = self.parse_identifier()?;
19669
19670 let mut data_types = vec![];
19671 if self.consume_token(&Token::LParen) {
19672 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
19673 self.expect_token(&Token::RParen)?;
19674 }
19675
19676 self.expect_keyword_is(Keyword::AS)?;
19677 let statement = Box::new(self.parse_statement()?);
19678 Ok(Statement::Prepare {
19679 name,
19680 data_types,
19681 statement,
19682 })
19683 }
19684
19685 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
19687 self.expect_keyword(Keyword::UNLOAD)?;
19688 self.expect_token(&Token::LParen)?;
19689 let (query, query_text) =
19690 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
19691 (None, Some(self.parse_literal_string()?))
19692 } else {
19693 (Some(self.parse_query()?), None)
19694 };
19695 self.expect_token(&Token::RParen)?;
19696
19697 self.expect_keyword_is(Keyword::TO)?;
19698 let to = self.parse_identifier()?;
19699 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
19700 Some(self.parse_iam_role_kind()?)
19701 } else {
19702 None
19703 };
19704 let with = self.parse_options(Keyword::WITH)?;
19705 let mut options = vec![];
19706 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
19707 options.push(opt);
19708 }
19709 Ok(Statement::Unload {
19710 query,
19711 query_text,
19712 to,
19713 auth,
19714 with,
19715 options,
19716 })
19717 }
19718
19719 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
19720 let temporary = self
19721 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
19722 .is_some();
19723 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
19724 let table = self.parse_keyword(Keyword::TABLE);
19725 let name = self.parse_object_name(false)?;
19726
19727 Ok(SelectInto {
19728 temporary,
19729 unlogged,
19730 table,
19731 name,
19732 })
19733 }
19734
19735 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
19736 let v = self.parse_value()?;
19737 match &v.value {
19738 Value::SingleQuotedString(_) => Ok(v),
19739 Value::DoubleQuotedString(_) => Ok(v),
19740 Value::Number(_, _) => Ok(v),
19741 Value::Placeholder(_) => Ok(v),
19742 _ => {
19743 self.prev_token();
19744 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
19745 }
19746 }
19747 }
19748
19749 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
19751 let name = self.parse_object_name(false)?;
19752 if self.consume_token(&Token::LParen) {
19753 let value = self.parse_pragma_value()?;
19754 self.expect_token(&Token::RParen)?;
19755 Ok(Statement::Pragma {
19756 name,
19757 value: Some(value),
19758 is_eq: false,
19759 })
19760 } else if self.consume_token(&Token::Eq) {
19761 Ok(Statement::Pragma {
19762 name,
19763 value: Some(self.parse_pragma_value()?),
19764 is_eq: true,
19765 })
19766 } else {
19767 Ok(Statement::Pragma {
19768 name,
19769 value: None,
19770 is_eq: false,
19771 })
19772 }
19773 }
19774
19775 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
19777 let extension_name = self.parse_identifier()?;
19778
19779 Ok(Statement::Install { extension_name })
19780 }
19781
19782 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
19784 if self.dialect.supports_load_extension() {
19785 let extension_name = self.parse_identifier()?;
19786 Ok(Statement::Load { extension_name })
19787 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
19788 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
19789 self.expect_keyword_is(Keyword::INPATH)?;
19790 let inpath = self.parse_literal_string()?;
19791 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
19792 self.expect_keyword_is(Keyword::INTO)?;
19793 self.expect_keyword_is(Keyword::TABLE)?;
19794 let table_name = self.parse_object_name(false)?;
19795 let partitioned = self.parse_insert_partition()?;
19796 let table_format = self.parse_load_data_table_format()?;
19797 Ok(Statement::LoadData {
19798 local,
19799 inpath,
19800 overwrite,
19801 table_name,
19802 partitioned,
19803 table_format,
19804 })
19805 } else {
19806 self.expected_ref(
19807 "`DATA` or an extension name after `LOAD`",
19808 self.peek_token_ref(),
19809 )
19810 }
19811 }
19812
19813 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
19825 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
19826
19827 let name = self.parse_object_name(false)?;
19828
19829 let on_cluster = self.parse_optional_on_cluster()?;
19831
19832 let partition = if self.parse_keyword(Keyword::PARTITION) {
19833 if self.parse_keyword(Keyword::ID) {
19834 Some(Partition::Identifier(self.parse_identifier()?))
19835 } else {
19836 Some(Partition::Expr(self.parse_expr()?))
19837 }
19838 } else {
19839 None
19840 };
19841
19842 let include_final = self.parse_keyword(Keyword::FINAL);
19843
19844 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
19845 if self.parse_keyword(Keyword::BY) {
19846 Some(Deduplicate::ByExpression(self.parse_expr()?))
19847 } else {
19848 Some(Deduplicate::All)
19849 }
19850 } else {
19851 None
19852 };
19853
19854 let predicate = if self.parse_keyword(Keyword::WHERE) {
19856 Some(self.parse_expr()?)
19857 } else {
19858 None
19859 };
19860
19861 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
19862 self.expect_token(&Token::LParen)?;
19863 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
19864 self.expect_token(&Token::RParen)?;
19865 Some(columns)
19866 } else {
19867 None
19868 };
19869
19870 Ok(Statement::OptimizeTable {
19871 name,
19872 has_table_keyword,
19873 on_cluster,
19874 partition,
19875 include_final,
19876 deduplicate,
19877 predicate,
19878 zorder,
19879 })
19880 }
19881
19882 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
19888 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
19890 let name = self.parse_object_name(false)?;
19892 let mut data_type: Option<DataType> = None;
19894 if self.parse_keywords(&[Keyword::AS]) {
19895 data_type = Some(self.parse_data_type()?)
19896 }
19897 let sequence_options = self.parse_create_sequence_options()?;
19898 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
19900 if self.parse_keywords(&[Keyword::NONE]) {
19901 Some(ObjectName::from(vec![Ident::new("NONE")]))
19902 } else {
19903 Some(self.parse_object_name(false)?)
19904 }
19905 } else {
19906 None
19907 };
19908 Ok(Statement::CreateSequence {
19909 temporary,
19910 if_not_exists,
19911 name,
19912 data_type,
19913 sequence_options,
19914 owned_by,
19915 })
19916 }
19917
19918 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
19919 let mut sequence_options = vec![];
19920 if self.parse_keywords(&[Keyword::INCREMENT]) {
19922 if self.parse_keywords(&[Keyword::BY]) {
19923 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
19924 } else {
19925 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
19926 }
19927 }
19928 if self.parse_keyword(Keyword::MINVALUE) {
19930 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
19931 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
19932 sequence_options.push(SequenceOptions::MinValue(None));
19933 }
19934 if self.parse_keywords(&[Keyword::MAXVALUE]) {
19936 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
19937 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
19938 sequence_options.push(SequenceOptions::MaxValue(None));
19939 }
19940
19941 if self.parse_keywords(&[Keyword::START]) {
19943 if self.parse_keywords(&[Keyword::WITH]) {
19944 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
19945 } else {
19946 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
19947 }
19948 }
19949 if self.parse_keywords(&[Keyword::CACHE]) {
19951 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
19952 }
19953 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
19955 sequence_options.push(SequenceOptions::Cycle(true));
19956 } else if self.parse_keywords(&[Keyword::CYCLE]) {
19957 sequence_options.push(SequenceOptions::Cycle(false));
19958 }
19959
19960 Ok(sequence_options)
19961 }
19962
19963 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
19967 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
19968 let name = self.parse_object_name(false)?;
19969
19970 let server_type = if self.parse_keyword(Keyword::TYPE) {
19971 Some(self.parse_identifier()?)
19972 } else {
19973 None
19974 };
19975
19976 let version = if self.parse_keyword(Keyword::VERSION) {
19977 Some(self.parse_identifier()?)
19978 } else {
19979 None
19980 };
19981
19982 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
19983 let foreign_data_wrapper = self.parse_object_name(false)?;
19984
19985 let mut options = None;
19986 if self.parse_keyword(Keyword::OPTIONS) {
19987 self.expect_token(&Token::LParen)?;
19988 options = Some(self.parse_comma_separated(|p| {
19989 let key = p.parse_identifier()?;
19990 let value = p.parse_identifier()?;
19991 Ok(CreateServerOption { key, value })
19992 })?);
19993 self.expect_token(&Token::RParen)?;
19994 }
19995
19996 Ok(Statement::CreateServer(CreateServerStatement {
19997 name,
19998 if_not_exists: ine,
19999 server_type,
20000 version,
20001 foreign_data_wrapper,
20002 options,
20003 }))
20004 }
20005
20006 pub fn parse_create_foreign_data_wrapper(
20010 &mut self,
20011 ) -> Result<CreateForeignDataWrapper, ParserError> {
20012 let name = self.parse_identifier()?;
20013
20014 let handler = if self.parse_keyword(Keyword::HANDLER) {
20015 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20016 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20017 Some(FdwRoutineClause::NoFunction)
20018 } else {
20019 None
20020 };
20021
20022 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20023 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20024 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20025 Some(FdwRoutineClause::NoFunction)
20026 } else {
20027 None
20028 };
20029
20030 let options = if self.parse_keyword(Keyword::OPTIONS) {
20031 self.expect_token(&Token::LParen)?;
20032 let opts = self.parse_comma_separated(|p| {
20033 let key = p.parse_identifier()?;
20034 let value = p.parse_identifier()?;
20035 Ok(CreateServerOption { key, value })
20036 })?;
20037 self.expect_token(&Token::RParen)?;
20038 Some(opts)
20039 } else {
20040 None
20041 };
20042
20043 Ok(CreateForeignDataWrapper {
20044 name,
20045 handler,
20046 validator,
20047 options,
20048 })
20049 }
20050
20051 pub fn parse_create_foreign_table(
20055 &mut self,
20056 ) -> Result<CreateForeignTable, ParserError> {
20057 let if_not_exists =
20058 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20059 let name = self.parse_object_name(false)?;
20060 let (columns, _constraints) = self.parse_columns()?;
20061 self.expect_keyword_is(Keyword::SERVER)?;
20062 let server_name = self.parse_identifier()?;
20063
20064 let options = if self.parse_keyword(Keyword::OPTIONS) {
20065 self.expect_token(&Token::LParen)?;
20066 let opts = self.parse_comma_separated(|p| {
20067 let key = p.parse_identifier()?;
20068 let value = p.parse_identifier()?;
20069 Ok(CreateServerOption { key, value })
20070 })?;
20071 self.expect_token(&Token::RParen)?;
20072 Some(opts)
20073 } else {
20074 None
20075 };
20076
20077 Ok(CreateForeignTable {
20078 name,
20079 if_not_exists,
20080 columns,
20081 server_name,
20082 options,
20083 })
20084 }
20085
20086 pub fn index(&self) -> usize {
20088 self.index
20089 }
20090
20091 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
20093 let ident = self.parse_identifier()?;
20094 self.expect_keyword_is(Keyword::AS)?;
20095
20096 let window_expr = if self.consume_token(&Token::LParen) {
20097 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
20098 } else if self.dialect.supports_window_clause_named_window_reference() {
20099 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
20100 } else {
20101 return self.expected_ref("(", self.peek_token_ref());
20102 };
20103
20104 Ok(NamedWindowDefinition(ident, window_expr))
20105 }
20106
20107 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
20109 let name = self.parse_object_name(false)?;
20110 let params = self.parse_optional_procedure_parameters()?;
20111
20112 let language = if self.parse_keyword(Keyword::LANGUAGE) {
20113 Some(self.parse_identifier()?)
20114 } else {
20115 None
20116 };
20117
20118 self.expect_keyword_is(Keyword::AS)?;
20119
20120 let body = self.parse_conditional_statements(&[Keyword::END])?;
20121
20122 Ok(Statement::CreateProcedure {
20123 name,
20124 or_alter,
20125 params,
20126 language,
20127 body,
20128 })
20129 }
20130
20131 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
20133 let window_name = match &self.peek_token_ref().token {
20134 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
20135 self.parse_optional_ident()?
20136 }
20137 _ => None,
20138 };
20139
20140 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
20141 self.parse_comma_separated(Parser::parse_expr)?
20142 } else {
20143 vec![]
20144 };
20145 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
20146 self.parse_comma_separated(Parser::parse_order_by_expr)?
20147 } else {
20148 vec![]
20149 };
20150
20151 let window_frame = if !self.consume_token(&Token::RParen) {
20152 let window_frame = self.parse_window_frame()?;
20153 self.expect_token(&Token::RParen)?;
20154 Some(window_frame)
20155 } else {
20156 None
20157 };
20158 Ok(WindowSpec {
20159 window_name,
20160 partition_by,
20161 order_by,
20162 window_frame,
20163 })
20164 }
20165
20166 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
20168 let name = self.parse_object_name(false)?;
20169
20170 let has_as = self.parse_keyword(Keyword::AS);
20172
20173 if !has_as {
20174 if self.consume_token(&Token::LParen) {
20176 let options = self.parse_create_type_sql_definition_options()?;
20178 self.expect_token(&Token::RParen)?;
20179 return Ok(Statement::CreateType {
20180 name,
20181 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
20182 });
20183 }
20184
20185 return Ok(Statement::CreateType {
20187 name,
20188 representation: None,
20189 });
20190 }
20191
20192 if self.parse_keyword(Keyword::ENUM) {
20194 self.parse_create_type_enum(name)
20196 } else if self.parse_keyword(Keyword::RANGE) {
20197 self.parse_create_type_range(name)
20199 } else if self.consume_token(&Token::LParen) {
20200 self.parse_create_type_composite(name)
20202 } else {
20203 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
20204 }
20205 }
20206
20207 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20211 if self.consume_token(&Token::RParen) {
20212 return Ok(Statement::CreateType {
20214 name,
20215 representation: Some(UserDefinedTypeRepresentation::Composite {
20216 attributes: vec![],
20217 }),
20218 });
20219 }
20220
20221 let mut attributes = vec![];
20222 loop {
20223 let attr_name = self.parse_identifier()?;
20224 let attr_data_type = self.parse_data_type()?;
20225 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
20226 Some(self.parse_object_name(false)?)
20227 } else {
20228 None
20229 };
20230 attributes.push(UserDefinedTypeCompositeAttributeDef {
20231 name: attr_name,
20232 data_type: attr_data_type,
20233 collation: attr_collation,
20234 });
20235
20236 if !self.consume_token(&Token::Comma) {
20237 break;
20238 }
20239 }
20240 self.expect_token(&Token::RParen)?;
20241
20242 Ok(Statement::CreateType {
20243 name,
20244 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
20245 })
20246 }
20247
20248 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20252 self.expect_token(&Token::LParen)?;
20253 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
20254 self.expect_token(&Token::RParen)?;
20255
20256 Ok(Statement::CreateType {
20257 name,
20258 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
20259 })
20260 }
20261
20262 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20266 self.expect_token(&Token::LParen)?;
20267 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
20268 self.expect_token(&Token::RParen)?;
20269
20270 Ok(Statement::CreateType {
20271 name,
20272 representation: Some(UserDefinedTypeRepresentation::Range { options }),
20273 })
20274 }
20275
20276 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
20278 let keyword = self.parse_one_of_keywords(&[
20279 Keyword::SUBTYPE,
20280 Keyword::SUBTYPE_OPCLASS,
20281 Keyword::COLLATION,
20282 Keyword::CANONICAL,
20283 Keyword::SUBTYPE_DIFF,
20284 Keyword::MULTIRANGE_TYPE_NAME,
20285 ]);
20286
20287 match keyword {
20288 Some(Keyword::SUBTYPE) => {
20289 self.expect_token(&Token::Eq)?;
20290 let data_type = self.parse_data_type()?;
20291 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
20292 }
20293 Some(Keyword::SUBTYPE_OPCLASS) => {
20294 self.expect_token(&Token::Eq)?;
20295 let name = self.parse_object_name(false)?;
20296 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
20297 }
20298 Some(Keyword::COLLATION) => {
20299 self.expect_token(&Token::Eq)?;
20300 let name = self.parse_object_name(false)?;
20301 Ok(UserDefinedTypeRangeOption::Collation(name))
20302 }
20303 Some(Keyword::CANONICAL) => {
20304 self.expect_token(&Token::Eq)?;
20305 let name = self.parse_object_name(false)?;
20306 Ok(UserDefinedTypeRangeOption::Canonical(name))
20307 }
20308 Some(Keyword::SUBTYPE_DIFF) => {
20309 self.expect_token(&Token::Eq)?;
20310 let name = self.parse_object_name(false)?;
20311 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
20312 }
20313 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
20314 self.expect_token(&Token::Eq)?;
20315 let name = self.parse_object_name(false)?;
20316 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
20317 }
20318 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
20319 }
20320 }
20321
20322 fn parse_create_type_sql_definition_options(
20324 &mut self,
20325 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
20326 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
20327 }
20328
20329 fn parse_sql_definition_option(
20331 &mut self,
20332 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
20333 let keyword = self.parse_one_of_keywords(&[
20334 Keyword::INPUT,
20335 Keyword::OUTPUT,
20336 Keyword::RECEIVE,
20337 Keyword::SEND,
20338 Keyword::TYPMOD_IN,
20339 Keyword::TYPMOD_OUT,
20340 Keyword::ANALYZE,
20341 Keyword::SUBSCRIPT,
20342 Keyword::INTERNALLENGTH,
20343 Keyword::PASSEDBYVALUE,
20344 Keyword::ALIGNMENT,
20345 Keyword::STORAGE,
20346 Keyword::LIKE,
20347 Keyword::CATEGORY,
20348 Keyword::PREFERRED,
20349 Keyword::DEFAULT,
20350 Keyword::ELEMENT,
20351 Keyword::DELIMITER,
20352 Keyword::COLLATABLE,
20353 ]);
20354
20355 match keyword {
20356 Some(Keyword::INPUT) => {
20357 self.expect_token(&Token::Eq)?;
20358 let name = self.parse_object_name(false)?;
20359 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
20360 }
20361 Some(Keyword::OUTPUT) => {
20362 self.expect_token(&Token::Eq)?;
20363 let name = self.parse_object_name(false)?;
20364 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
20365 }
20366 Some(Keyword::RECEIVE) => {
20367 self.expect_token(&Token::Eq)?;
20368 let name = self.parse_object_name(false)?;
20369 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
20370 }
20371 Some(Keyword::SEND) => {
20372 self.expect_token(&Token::Eq)?;
20373 let name = self.parse_object_name(false)?;
20374 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
20375 }
20376 Some(Keyword::TYPMOD_IN) => {
20377 self.expect_token(&Token::Eq)?;
20378 let name = self.parse_object_name(false)?;
20379 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
20380 }
20381 Some(Keyword::TYPMOD_OUT) => {
20382 self.expect_token(&Token::Eq)?;
20383 let name = self.parse_object_name(false)?;
20384 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
20385 }
20386 Some(Keyword::ANALYZE) => {
20387 self.expect_token(&Token::Eq)?;
20388 let name = self.parse_object_name(false)?;
20389 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
20390 }
20391 Some(Keyword::SUBSCRIPT) => {
20392 self.expect_token(&Token::Eq)?;
20393 let name = self.parse_object_name(false)?;
20394 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
20395 }
20396 Some(Keyword::INTERNALLENGTH) => {
20397 self.expect_token(&Token::Eq)?;
20398 if self.parse_keyword(Keyword::VARIABLE) {
20399 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20400 UserDefinedTypeInternalLength::Variable,
20401 ))
20402 } else {
20403 let value = self.parse_literal_uint()?;
20404 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20405 UserDefinedTypeInternalLength::Fixed(value),
20406 ))
20407 }
20408 }
20409 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
20410 Some(Keyword::ALIGNMENT) => {
20411 self.expect_token(&Token::Eq)?;
20412 let align_keyword = self.parse_one_of_keywords(&[
20413 Keyword::CHAR,
20414 Keyword::INT2,
20415 Keyword::INT4,
20416 Keyword::DOUBLE,
20417 ]);
20418 match align_keyword {
20419 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20420 Alignment::Char,
20421 )),
20422 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20423 Alignment::Int2,
20424 )),
20425 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20426 Alignment::Int4,
20427 )),
20428 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20429 Alignment::Double,
20430 )),
20431 _ => self.expected_ref(
20432 "alignment value (char, int2, int4, or double)",
20433 self.peek_token_ref(),
20434 ),
20435 }
20436 }
20437 Some(Keyword::STORAGE) => {
20438 self.expect_token(&Token::Eq)?;
20439 let storage_keyword = self.parse_one_of_keywords(&[
20440 Keyword::PLAIN,
20441 Keyword::EXTERNAL,
20442 Keyword::EXTENDED,
20443 Keyword::MAIN,
20444 ]);
20445 match storage_keyword {
20446 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20447 UserDefinedTypeStorage::Plain,
20448 )),
20449 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20450 UserDefinedTypeStorage::External,
20451 )),
20452 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20453 UserDefinedTypeStorage::Extended,
20454 )),
20455 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20456 UserDefinedTypeStorage::Main,
20457 )),
20458 _ => self.expected_ref(
20459 "storage value (plain, external, extended, or main)",
20460 self.peek_token_ref(),
20461 ),
20462 }
20463 }
20464 Some(Keyword::LIKE) => {
20465 self.expect_token(&Token::Eq)?;
20466 let name = self.parse_object_name(false)?;
20467 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
20468 }
20469 Some(Keyword::CATEGORY) => {
20470 self.expect_token(&Token::Eq)?;
20471 let category_str = self.parse_literal_string()?;
20472 let category_char = category_str.chars().next().ok_or_else(|| {
20473 ParserError::ParserError(
20474 "CATEGORY value must be a single character".to_string(),
20475 )
20476 })?;
20477 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
20478 }
20479 Some(Keyword::PREFERRED) => {
20480 self.expect_token(&Token::Eq)?;
20481 let value =
20482 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20483 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
20484 }
20485 Some(Keyword::DEFAULT) => {
20486 self.expect_token(&Token::Eq)?;
20487 let expr = self.parse_expr()?;
20488 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
20489 }
20490 Some(Keyword::ELEMENT) => {
20491 self.expect_token(&Token::Eq)?;
20492 let data_type = self.parse_data_type()?;
20493 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
20494 }
20495 Some(Keyword::DELIMITER) => {
20496 self.expect_token(&Token::Eq)?;
20497 let delimiter = self.parse_literal_string()?;
20498 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
20499 }
20500 Some(Keyword::COLLATABLE) => {
20501 self.expect_token(&Token::Eq)?;
20502 let value =
20503 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20504 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
20505 }
20506 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
20507 }
20508 }
20509
20510 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
20511 self.expect_token(&Token::LParen)?;
20512 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
20513 self.expect_token(&Token::RParen)?;
20514 Ok(idents)
20515 }
20516
20517 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
20518 if dialect_of!(self is MySqlDialect | GenericDialect) {
20519 if self.parse_keyword(Keyword::FIRST) {
20520 Ok(Some(MySQLColumnPosition::First))
20521 } else if self.parse_keyword(Keyword::AFTER) {
20522 let ident = self.parse_identifier()?;
20523 Ok(Some(MySQLColumnPosition::After(ident)))
20524 } else {
20525 Ok(None)
20526 }
20527 } else {
20528 Ok(None)
20529 }
20530 }
20531
20532 fn parse_print(&mut self) -> Result<Statement, ParserError> {
20534 Ok(Statement::Print(PrintStatement {
20535 message: Box::new(self.parse_expr()?),
20536 }))
20537 }
20538
20539 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
20543 let wait_type = if self.parse_keyword(Keyword::DELAY) {
20544 WaitForType::Delay
20545 } else if self.parse_keyword(Keyword::TIME) {
20546 WaitForType::Time
20547 } else {
20548 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
20549 };
20550 let expr = self.parse_expr()?;
20551 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
20552 }
20553
20554 fn parse_return(&mut self) -> Result<Statement, ParserError> {
20556 match self.maybe_parse(|p| p.parse_expr())? {
20557 Some(expr) => Ok(Statement::Return(ReturnStatement {
20558 value: Some(ReturnStatementValue::Expr(expr)),
20559 })),
20560 None => Ok(Statement::Return(ReturnStatement { value: None })),
20561 }
20562 }
20563
20564 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
20568 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
20569
20570 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
20571 Some(self.parse_object_name(false)?)
20572 } else {
20573 None
20574 };
20575 self.expect_keyword(Keyword::OPTIONS)?;
20576 self.expect_token(&Token::LParen)?;
20577 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
20578 self.expect_token(&Token::RParen)?;
20579 self.expect_keyword(Keyword::AS)?;
20580 let query = self.parse_query()?;
20581 Ok(Statement::ExportData(ExportData {
20582 options,
20583 query,
20584 connection,
20585 }))
20586 }
20587
20588 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
20589 self.expect_keyword(Keyword::VACUUM)?;
20590 let full = self.parse_keyword(Keyword::FULL);
20591 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
20592 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
20593 let reindex = self.parse_keyword(Keyword::REINDEX);
20594 let recluster = self.parse_keyword(Keyword::RECLUSTER);
20595 let (table_name, threshold, boost) =
20596 match self.maybe_parse(|p| p.parse_object_name(false))? {
20597 Some(table_name) => {
20598 let threshold = if self.parse_keyword(Keyword::TO) {
20599 let value = self.parse_value()?;
20600 self.expect_keyword(Keyword::PERCENT)?;
20601 Some(value)
20602 } else {
20603 None
20604 };
20605 let boost = self.parse_keyword(Keyword::BOOST);
20606 (Some(table_name), threshold, boost)
20607 }
20608 _ => (None, None, false),
20609 };
20610 Ok(Statement::Vacuum(VacuumStatement {
20611 full,
20612 sort_only,
20613 delete_only,
20614 reindex,
20615 recluster,
20616 table_name,
20617 threshold,
20618 boost,
20619 }))
20620 }
20621
20622 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
20624 self.tokens
20625 }
20626
20627 fn peek_sub_query(&mut self) -> bool {
20629 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
20630 .is_some()
20631 }
20632
20633 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
20634 let show_in;
20635 let mut filter_position = None;
20636 if self.dialect.supports_show_like_before_in() {
20637 if let Some(filter) = self.parse_show_statement_filter()? {
20638 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
20639 }
20640 show_in = self.maybe_parse_show_stmt_in()?;
20641 } else {
20642 show_in = self.maybe_parse_show_stmt_in()?;
20643 if let Some(filter) = self.parse_show_statement_filter()? {
20644 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
20645 }
20646 }
20647 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
20648 let limit = self.maybe_parse_show_stmt_limit()?;
20649 let from = self.maybe_parse_show_stmt_from()?;
20650 Ok(ShowStatementOptions {
20651 filter_position,
20652 show_in,
20653 starts_with,
20654 limit,
20655 limit_from: from,
20656 })
20657 }
20658
20659 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
20660 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
20661 Some(Keyword::FROM) => ShowStatementInClause::FROM,
20662 Some(Keyword::IN) => ShowStatementInClause::IN,
20663 None => return Ok(None),
20664 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
20665 };
20666
20667 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
20668 Keyword::ACCOUNT,
20669 Keyword::DATABASE,
20670 Keyword::SCHEMA,
20671 Keyword::TABLE,
20672 Keyword::VIEW,
20673 ]) {
20674 Some(Keyword::DATABASE)
20676 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20677 | self.peek_keyword(Keyword::LIMIT) =>
20678 {
20679 (Some(ShowStatementInParentType::Database), None)
20680 }
20681 Some(Keyword::SCHEMA)
20682 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20683 | self.peek_keyword(Keyword::LIMIT) =>
20684 {
20685 (Some(ShowStatementInParentType::Schema), None)
20686 }
20687 Some(parent_kw) => {
20688 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
20692 match parent_kw {
20693 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
20694 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
20695 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
20696 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
20697 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
20698 _ => {
20699 return self.expected_ref(
20700 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
20701 self.peek_token_ref(),
20702 )
20703 }
20704 }
20705 }
20706 None => {
20707 let mut parent_name = self.parse_object_name(false)?;
20710 if self
20711 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
20712 .is_some()
20713 {
20714 parent_name
20715 .0
20716 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
20717 }
20718 (None, Some(parent_name))
20719 }
20720 };
20721
20722 Ok(Some(ShowStatementIn {
20723 clause,
20724 parent_type,
20725 parent_name,
20726 }))
20727 }
20728
20729 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20730 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
20731 Ok(Some(self.parse_value()?))
20732 } else {
20733 Ok(None)
20734 }
20735 }
20736
20737 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
20738 if self.parse_keyword(Keyword::LIMIT) {
20739 Ok(self.parse_limit()?)
20740 } else {
20741 Ok(None)
20742 }
20743 }
20744
20745 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20746 if self.parse_keyword(Keyword::FROM) {
20747 Ok(Some(self.parse_value()?))
20748 } else {
20749 Ok(None)
20750 }
20751 }
20752
20753 pub(crate) fn in_column_definition_state(&self) -> bool {
20754 matches!(self.state, ColumnDefinition)
20755 }
20756
20757 pub(crate) fn parse_key_value_options(
20762 &mut self,
20763 parenthesized: bool,
20764 end_words: &[Keyword],
20765 ) -> Result<KeyValueOptions, ParserError> {
20766 let mut options: Vec<KeyValueOption> = Vec::new();
20767 let mut delimiter = KeyValueOptionsDelimiter::Space;
20768 if parenthesized {
20769 self.expect_token(&Token::LParen)?;
20770 }
20771 loop {
20772 match self.next_token().token {
20773 Token::RParen => {
20774 if parenthesized {
20775 break;
20776 } else {
20777 return self.expected_ref(" another option or EOF", self.peek_token_ref());
20778 }
20779 }
20780 Token::EOF | Token::SemiColon => break,
20781 Token::Comma => {
20782 delimiter = KeyValueOptionsDelimiter::Comma;
20783 continue;
20784 }
20785 Token::Word(w) if !end_words.contains(&w.keyword) => {
20786 options.push(self.parse_key_value_option(&w)?)
20787 }
20788 Token::Word(w) if end_words.contains(&w.keyword) => {
20789 self.prev_token();
20790 break;
20791 }
20792 _ => {
20793 return self.expected_ref(
20794 "another option, EOF, SemiColon, Comma or ')'",
20795 self.peek_token_ref(),
20796 )
20797 }
20798 };
20799 }
20800
20801 Ok(KeyValueOptions { delimiter, options })
20802 }
20803
20804 pub(crate) fn parse_key_value_option(
20806 &mut self,
20807 key: &Word,
20808 ) -> Result<KeyValueOption, ParserError> {
20809 self.expect_token(&Token::Eq)?;
20810 let peeked_token = self.peek_token();
20811 match peeked_token.token {
20812 Token::SingleQuotedString(_) => Ok(KeyValueOption {
20813 option_name: key.value.clone(),
20814 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20815 }),
20816 Token::Word(word)
20817 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
20818 {
20819 Ok(KeyValueOption {
20820 option_name: key.value.clone(),
20821 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20822 })
20823 }
20824 Token::Number(..) => Ok(KeyValueOption {
20825 option_name: key.value.clone(),
20826 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20827 }),
20828 Token::Word(word) => {
20829 self.next_token();
20830 Ok(KeyValueOption {
20831 option_name: key.value.clone(),
20832 option_value: KeyValueOptionKind::Single(
20833 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
20834 ),
20835 })
20836 }
20837 Token::LParen => {
20838 match self.maybe_parse(|parser| {
20842 parser.expect_token(&Token::LParen)?;
20843 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
20844 parser.expect_token(&Token::RParen)?;
20845 values
20846 })? {
20847 Some(values) => Ok(KeyValueOption {
20848 option_name: key.value.clone(),
20849 option_value: KeyValueOptionKind::Multi(values),
20850 }),
20851 None => Ok(KeyValueOption {
20852 option_name: key.value.clone(),
20853 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
20854 self.parse_key_value_options(true, &[])?,
20855 )),
20856 }),
20857 }
20858 }
20859 _ => self.expected_ref("expected option value", self.peek_token_ref()),
20860 }
20861 }
20862
20863 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
20865 if self.parse_keyword(Keyword::ALL) {
20866 return Ok(ResetStatement { reset: Reset::ALL });
20867 }
20868
20869 let obj = self.parse_object_name(false)?;
20870 Ok(ResetStatement {
20871 reset: Reset::ConfigurationParameter(obj),
20872 })
20873 }
20874}
20875
20876fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
20877 if let Some(prefix) = prefix {
20878 Expr::Prefixed {
20879 prefix,
20880 value: Box::new(expr),
20881 }
20882 } else {
20883 expr
20884 }
20885}
20886
20887impl Word {
20888 pub fn to_ident(&self, span: Span) -> Ident {
20894 Ident {
20895 value: self.value.clone(),
20896 quote_style: self.quote_style,
20897 span,
20898 }
20899 }
20900
20901 pub fn into_ident(self, span: Span) -> Ident {
20906 Ident {
20907 value: self.value,
20908 quote_style: self.quote_style,
20909 span,
20910 }
20911 }
20912}
20913
20914#[cfg(test)]
20915mod tests {
20916 use crate::test_utils::{all_dialects, TestedDialects};
20917
20918 use super::*;
20919
20920 #[test]
20921 fn test_prev_index() {
20922 let sql = "SELECT version";
20923 all_dialects().run_parser_method(sql, |parser| {
20924 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
20925 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
20926 parser.prev_token();
20927 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
20928 assert_eq!(parser.next_token(), Token::make_word("version", None));
20929 parser.prev_token();
20930 assert_eq!(parser.peek_token(), Token::make_word("version", None));
20931 assert_eq!(parser.next_token(), Token::make_word("version", None));
20932 assert_eq!(parser.peek_token(), Token::EOF);
20933 parser.prev_token();
20934 assert_eq!(parser.next_token(), Token::make_word("version", None));
20935 assert_eq!(parser.next_token(), Token::EOF);
20936 assert_eq!(parser.next_token(), Token::EOF);
20937 parser.prev_token();
20938 });
20939 }
20940
20941 #[test]
20942 fn test_peek_tokens() {
20943 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
20944 assert!(matches!(
20945 parser.peek_tokens(),
20946 [Token::Word(Word {
20947 keyword: Keyword::SELECT,
20948 ..
20949 })]
20950 ));
20951
20952 assert!(matches!(
20953 parser.peek_tokens(),
20954 [
20955 Token::Word(Word {
20956 keyword: Keyword::SELECT,
20957 ..
20958 }),
20959 Token::Word(_),
20960 Token::Word(Word {
20961 keyword: Keyword::AS,
20962 ..
20963 }),
20964 ]
20965 ));
20966
20967 for _ in 0..4 {
20968 parser.next_token();
20969 }
20970
20971 assert!(matches!(
20972 parser.peek_tokens(),
20973 [
20974 Token::Word(Word {
20975 keyword: Keyword::FROM,
20976 ..
20977 }),
20978 Token::Word(_),
20979 Token::EOF,
20980 Token::EOF,
20981 ]
20982 ))
20983 })
20984 }
20985
20986 #[cfg(test)]
20987 mod test_parse_data_type {
20988 use crate::ast::{
20989 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
20990 };
20991 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
20992 use crate::test_utils::TestedDialects;
20993
20994 macro_rules! test_parse_data_type {
20995 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
20996 $dialect.run_parser_method(&*$input, |parser| {
20997 let data_type = parser.parse_data_type().unwrap();
20998 assert_eq!($expected_type, data_type);
20999 assert_eq!($input.to_string(), data_type.to_string());
21000 });
21001 }};
21002 }
21003
21004 #[test]
21005 fn test_ansii_character_string_types() {
21006 let dialect =
21008 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21009
21010 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21011
21012 test_parse_data_type!(
21013 dialect,
21014 "CHARACTER(20)",
21015 DataType::Character(Some(CharacterLength::IntegerLength {
21016 length: 20,
21017 unit: None
21018 }))
21019 );
21020
21021 test_parse_data_type!(
21022 dialect,
21023 "CHARACTER(20 CHARACTERS)",
21024 DataType::Character(Some(CharacterLength::IntegerLength {
21025 length: 20,
21026 unit: Some(CharLengthUnits::Characters)
21027 }))
21028 );
21029
21030 test_parse_data_type!(
21031 dialect,
21032 "CHARACTER(20 OCTETS)",
21033 DataType::Character(Some(CharacterLength::IntegerLength {
21034 length: 20,
21035 unit: Some(CharLengthUnits::Octets)
21036 }))
21037 );
21038
21039 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
21040
21041 test_parse_data_type!(
21042 dialect,
21043 "CHAR(20)",
21044 DataType::Char(Some(CharacterLength::IntegerLength {
21045 length: 20,
21046 unit: None
21047 }))
21048 );
21049
21050 test_parse_data_type!(
21051 dialect,
21052 "CHAR(20 CHARACTERS)",
21053 DataType::Char(Some(CharacterLength::IntegerLength {
21054 length: 20,
21055 unit: Some(CharLengthUnits::Characters)
21056 }))
21057 );
21058
21059 test_parse_data_type!(
21060 dialect,
21061 "CHAR(20 OCTETS)",
21062 DataType::Char(Some(CharacterLength::IntegerLength {
21063 length: 20,
21064 unit: Some(CharLengthUnits::Octets)
21065 }))
21066 );
21067
21068 test_parse_data_type!(
21069 dialect,
21070 "CHARACTER VARYING(20)",
21071 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21072 length: 20,
21073 unit: None
21074 }))
21075 );
21076
21077 test_parse_data_type!(
21078 dialect,
21079 "CHARACTER VARYING(20 CHARACTERS)",
21080 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21081 length: 20,
21082 unit: Some(CharLengthUnits::Characters)
21083 }))
21084 );
21085
21086 test_parse_data_type!(
21087 dialect,
21088 "CHARACTER VARYING(20 OCTETS)",
21089 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21090 length: 20,
21091 unit: Some(CharLengthUnits::Octets)
21092 }))
21093 );
21094
21095 test_parse_data_type!(
21096 dialect,
21097 "CHAR VARYING(20)",
21098 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21099 length: 20,
21100 unit: None
21101 }))
21102 );
21103
21104 test_parse_data_type!(
21105 dialect,
21106 "CHAR VARYING(20 CHARACTERS)",
21107 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21108 length: 20,
21109 unit: Some(CharLengthUnits::Characters)
21110 }))
21111 );
21112
21113 test_parse_data_type!(
21114 dialect,
21115 "CHAR VARYING(20 OCTETS)",
21116 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21117 length: 20,
21118 unit: Some(CharLengthUnits::Octets)
21119 }))
21120 );
21121
21122 test_parse_data_type!(
21123 dialect,
21124 "VARCHAR(20)",
21125 DataType::Varchar(Some(CharacterLength::IntegerLength {
21126 length: 20,
21127 unit: None
21128 }))
21129 );
21130 }
21131
21132 #[test]
21133 fn test_ansii_character_large_object_types() {
21134 let dialect =
21136 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21137
21138 test_parse_data_type!(
21139 dialect,
21140 "CHARACTER LARGE OBJECT",
21141 DataType::CharacterLargeObject(None)
21142 );
21143 test_parse_data_type!(
21144 dialect,
21145 "CHARACTER LARGE OBJECT(20)",
21146 DataType::CharacterLargeObject(Some(20))
21147 );
21148
21149 test_parse_data_type!(
21150 dialect,
21151 "CHAR LARGE OBJECT",
21152 DataType::CharLargeObject(None)
21153 );
21154 test_parse_data_type!(
21155 dialect,
21156 "CHAR LARGE OBJECT(20)",
21157 DataType::CharLargeObject(Some(20))
21158 );
21159
21160 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
21161 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
21162 }
21163
21164 #[test]
21165 fn test_parse_custom_types() {
21166 let dialect =
21167 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21168
21169 test_parse_data_type!(
21170 dialect,
21171 "GEOMETRY",
21172 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
21173 );
21174
21175 test_parse_data_type!(
21176 dialect,
21177 "GEOMETRY(POINT)",
21178 DataType::Custom(
21179 ObjectName::from(vec!["GEOMETRY".into()]),
21180 vec!["POINT".to_string()]
21181 )
21182 );
21183
21184 test_parse_data_type!(
21185 dialect,
21186 "GEOMETRY(POINT, 4326)",
21187 DataType::Custom(
21188 ObjectName::from(vec!["GEOMETRY".into()]),
21189 vec!["POINT".to_string(), "4326".to_string()]
21190 )
21191 );
21192 }
21193
21194 #[test]
21195 fn test_ansii_exact_numeric_types() {
21196 let dialect = TestedDialects::new(vec![
21198 Box::new(GenericDialect {}),
21199 Box::new(AnsiDialect {}),
21200 Box::new(PostgreSqlDialect {}),
21201 ]);
21202
21203 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
21204
21205 test_parse_data_type!(
21206 dialect,
21207 "NUMERIC(2)",
21208 DataType::Numeric(ExactNumberInfo::Precision(2))
21209 );
21210
21211 test_parse_data_type!(
21212 dialect,
21213 "NUMERIC(2,10)",
21214 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
21215 );
21216
21217 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
21218
21219 test_parse_data_type!(
21220 dialect,
21221 "DECIMAL(2)",
21222 DataType::Decimal(ExactNumberInfo::Precision(2))
21223 );
21224
21225 test_parse_data_type!(
21226 dialect,
21227 "DECIMAL(2,10)",
21228 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
21229 );
21230
21231 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
21232
21233 test_parse_data_type!(
21234 dialect,
21235 "DEC(2)",
21236 DataType::Dec(ExactNumberInfo::Precision(2))
21237 );
21238
21239 test_parse_data_type!(
21240 dialect,
21241 "DEC(2,10)",
21242 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
21243 );
21244
21245 test_parse_data_type!(
21247 dialect,
21248 "NUMERIC(10,-2)",
21249 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
21250 );
21251
21252 test_parse_data_type!(
21253 dialect,
21254 "DECIMAL(1000,-10)",
21255 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
21256 );
21257
21258 test_parse_data_type!(
21259 dialect,
21260 "DEC(5,-1000)",
21261 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
21262 );
21263
21264 test_parse_data_type!(
21265 dialect,
21266 "NUMERIC(10,-5)",
21267 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
21268 );
21269
21270 test_parse_data_type!(
21271 dialect,
21272 "DECIMAL(20,-10)",
21273 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
21274 );
21275
21276 test_parse_data_type!(
21277 dialect,
21278 "DEC(5,-2)",
21279 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
21280 );
21281
21282 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
21283 let data_type = parser.parse_data_type().unwrap();
21284 assert_eq!(
21285 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
21286 data_type
21287 );
21288 assert_eq!("NUMERIC(10,5)", data_type.to_string());
21290 });
21291 }
21292
21293 #[test]
21294 fn test_ansii_date_type() {
21295 let dialect =
21297 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21298
21299 test_parse_data_type!(dialect, "DATE", DataType::Date);
21300
21301 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
21302
21303 test_parse_data_type!(
21304 dialect,
21305 "TIME(6)",
21306 DataType::Time(Some(6), TimezoneInfo::None)
21307 );
21308
21309 test_parse_data_type!(
21310 dialect,
21311 "TIME WITH TIME ZONE",
21312 DataType::Time(None, TimezoneInfo::WithTimeZone)
21313 );
21314
21315 test_parse_data_type!(
21316 dialect,
21317 "TIME(6) WITH TIME ZONE",
21318 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
21319 );
21320
21321 test_parse_data_type!(
21322 dialect,
21323 "TIME WITHOUT TIME ZONE",
21324 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
21325 );
21326
21327 test_parse_data_type!(
21328 dialect,
21329 "TIME(6) WITHOUT TIME ZONE",
21330 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
21331 );
21332
21333 test_parse_data_type!(
21334 dialect,
21335 "TIMESTAMP",
21336 DataType::Timestamp(None, TimezoneInfo::None)
21337 );
21338
21339 test_parse_data_type!(
21340 dialect,
21341 "TIMESTAMP(22)",
21342 DataType::Timestamp(Some(22), TimezoneInfo::None)
21343 );
21344
21345 test_parse_data_type!(
21346 dialect,
21347 "TIMESTAMP(22) WITH TIME ZONE",
21348 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
21349 );
21350
21351 test_parse_data_type!(
21352 dialect,
21353 "TIMESTAMP(33) WITHOUT TIME ZONE",
21354 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
21355 );
21356 }
21357 }
21358
21359 #[test]
21360 fn test_parse_schema_name() {
21361 macro_rules! test_parse_schema_name {
21363 ($input:expr, $expected_name:expr $(,)?) => {{
21364 all_dialects().run_parser_method(&*$input, |parser| {
21365 let schema_name = parser.parse_schema_name().unwrap();
21366 assert_eq!(schema_name, $expected_name);
21368 assert_eq!(schema_name.to_string(), $input.to_string());
21370 });
21371 }};
21372 }
21373
21374 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
21375 let dummy_authorization = Ident::new("dummy_authorization");
21376
21377 test_parse_schema_name!(
21378 format!("{dummy_name}"),
21379 SchemaName::Simple(dummy_name.clone())
21380 );
21381
21382 test_parse_schema_name!(
21383 format!("AUTHORIZATION {dummy_authorization}"),
21384 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
21385 );
21386 test_parse_schema_name!(
21387 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
21388 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
21389 );
21390 }
21391
21392 #[test]
21393 fn mysql_parse_index_table_constraint() {
21394 macro_rules! test_parse_table_constraint {
21395 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
21396 $dialect.run_parser_method(&*$input, |parser| {
21397 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
21398 assert_eq!(constraint, $expected);
21400 assert_eq!(constraint.to_string(), $input.to_string());
21402 });
21403 }};
21404 }
21405
21406 fn mk_expected_col(name: &str) -> IndexColumn {
21407 IndexColumn {
21408 column: OrderByExpr {
21409 expr: Expr::Identifier(name.into()),
21410 options: OrderByOptions {
21411 asc: None,
21412 nulls_first: None,
21413 },
21414 with_fill: None,
21415 },
21416 operator_class: None,
21417 }
21418 }
21419
21420 let dialect =
21421 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
21422
21423 test_parse_table_constraint!(
21424 dialect,
21425 "INDEX (c1)",
21426 IndexConstraint {
21427 display_as_key: false,
21428 name: None,
21429 index_type: None,
21430 columns: vec![mk_expected_col("c1")],
21431 index_options: vec![],
21432 }
21433 .into()
21434 );
21435
21436 test_parse_table_constraint!(
21437 dialect,
21438 "KEY (c1)",
21439 IndexConstraint {
21440 display_as_key: true,
21441 name: None,
21442 index_type: None,
21443 columns: vec![mk_expected_col("c1")],
21444 index_options: vec![],
21445 }
21446 .into()
21447 );
21448
21449 test_parse_table_constraint!(
21450 dialect,
21451 "INDEX 'index' (c1, c2)",
21452 TableConstraint::Index(IndexConstraint {
21453 display_as_key: false,
21454 name: Some(Ident::with_quote('\'', "index")),
21455 index_type: None,
21456 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
21457 index_options: vec![],
21458 })
21459 );
21460
21461 test_parse_table_constraint!(
21462 dialect,
21463 "INDEX USING BTREE (c1)",
21464 IndexConstraint {
21465 display_as_key: false,
21466 name: None,
21467 index_type: Some(IndexType::BTree),
21468 columns: vec![mk_expected_col("c1")],
21469 index_options: vec![],
21470 }
21471 .into()
21472 );
21473
21474 test_parse_table_constraint!(
21475 dialect,
21476 "INDEX USING HASH (c1)",
21477 IndexConstraint {
21478 display_as_key: false,
21479 name: None,
21480 index_type: Some(IndexType::Hash),
21481 columns: vec![mk_expected_col("c1")],
21482 index_options: vec![],
21483 }
21484 .into()
21485 );
21486
21487 test_parse_table_constraint!(
21488 dialect,
21489 "INDEX idx_name USING BTREE (c1)",
21490 IndexConstraint {
21491 display_as_key: false,
21492 name: Some(Ident::new("idx_name")),
21493 index_type: Some(IndexType::BTree),
21494 columns: vec![mk_expected_col("c1")],
21495 index_options: vec![],
21496 }
21497 .into()
21498 );
21499
21500 test_parse_table_constraint!(
21501 dialect,
21502 "INDEX idx_name USING HASH (c1)",
21503 IndexConstraint {
21504 display_as_key: false,
21505 name: Some(Ident::new("idx_name")),
21506 index_type: Some(IndexType::Hash),
21507 columns: vec![mk_expected_col("c1")],
21508 index_options: vec![],
21509 }
21510 .into()
21511 );
21512 }
21513
21514 #[test]
21515 fn test_tokenizer_error_loc() {
21516 let sql = "foo '";
21517 let ast = Parser::parse_sql(&GenericDialect, sql);
21518 assert_eq!(
21519 ast,
21520 Err(ParserError::TokenizerError(
21521 "Unterminated string literal at Line: 1, Column: 5".to_string()
21522 ))
21523 );
21524 }
21525
21526 #[test]
21527 fn test_parser_error_loc() {
21528 let sql = "SELECT this is a syntax error";
21529 let ast = Parser::parse_sql(&GenericDialect, sql);
21530 assert_eq!(
21531 ast,
21532 Err(ParserError::ParserError(
21533 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
21534 .to_string()
21535 ))
21536 );
21537 }
21538
21539 #[test]
21540 fn test_nested_explain_error() {
21541 let sql = "EXPLAIN EXPLAIN SELECT 1";
21542 let ast = Parser::parse_sql(&GenericDialect, sql);
21543 assert_eq!(
21544 ast,
21545 Err(ParserError::ParserError(
21546 "Explain must be root of the plan".to_string()
21547 ))
21548 );
21549 }
21550
21551 #[test]
21552 fn test_parse_multipart_identifier_positive() {
21553 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
21554
21555 let expected = vec![
21557 Ident {
21558 value: "CATALOG".to_string(),
21559 quote_style: None,
21560 span: Span::empty(),
21561 },
21562 Ident {
21563 value: "F(o)o. \"bar".to_string(),
21564 quote_style: Some('"'),
21565 span: Span::empty(),
21566 },
21567 Ident {
21568 value: "table".to_string(),
21569 quote_style: None,
21570 span: Span::empty(),
21571 },
21572 ];
21573 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
21574 let actual = parser.parse_multipart_identifier().unwrap();
21575 assert_eq!(expected, actual);
21576 });
21577
21578 let expected = vec![
21580 Ident {
21581 value: "CATALOG".to_string(),
21582 quote_style: None,
21583 span: Span::empty(),
21584 },
21585 Ident {
21586 value: "table".to_string(),
21587 quote_style: None,
21588 span: Span::empty(),
21589 },
21590 ];
21591 dialect.run_parser_method("CATALOG . table", |parser| {
21592 let actual = parser.parse_multipart_identifier().unwrap();
21593 assert_eq!(expected, actual);
21594 });
21595 }
21596
21597 #[test]
21598 fn test_parse_multipart_identifier_negative() {
21599 macro_rules! test_parse_multipart_identifier_error {
21600 ($input:expr, $expected_err:expr $(,)?) => {{
21601 all_dialects().run_parser_method(&*$input, |parser| {
21602 let actual_err = parser.parse_multipart_identifier().unwrap_err();
21603 assert_eq!(actual_err.to_string(), $expected_err);
21604 });
21605 }};
21606 }
21607
21608 test_parse_multipart_identifier_error!(
21609 "",
21610 "sql parser error: Empty input when parsing identifier",
21611 );
21612
21613 test_parse_multipart_identifier_error!(
21614 "*schema.table",
21615 "sql parser error: Unexpected token in identifier: *",
21616 );
21617
21618 test_parse_multipart_identifier_error!(
21619 "schema.table*",
21620 "sql parser error: Unexpected token in identifier: *",
21621 );
21622
21623 test_parse_multipart_identifier_error!(
21624 "schema.table.",
21625 "sql parser error: Trailing period in identifier",
21626 );
21627
21628 test_parse_multipart_identifier_error!(
21629 "schema.*",
21630 "sql parser error: Unexpected token following period in identifier: *",
21631 );
21632 }
21633
21634 #[test]
21635 fn test_mysql_partition_selection() {
21636 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
21637 let expected = vec!["p0", "p2"];
21638
21639 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
21640 assert_eq!(ast.len(), 1);
21641 if let Statement::Query(v) = &ast[0] {
21642 if let SetExpr::Select(select) = &*v.body {
21643 assert_eq!(select.from.len(), 1);
21644 let from: &TableWithJoins = &select.from[0];
21645 let table_factor = &from.relation;
21646 if let TableFactor::Table { partitions, .. } = table_factor {
21647 let actual: Vec<&str> = partitions
21648 .iter()
21649 .map(|ident| ident.value.as_str())
21650 .collect();
21651 assert_eq!(expected, actual);
21652 }
21653 }
21654 } else {
21655 panic!("fail to parse mysql partition selection");
21656 }
21657 }
21658
21659 #[test]
21660 fn test_replace_into_placeholders() {
21661 let sql = "REPLACE INTO t (a) VALUES (&a)";
21662
21663 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21664 }
21665
21666 #[test]
21667 fn test_replace_into_set_placeholder() {
21668 let sql = "REPLACE INTO t SET ?";
21669
21670 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21671 }
21672
21673 #[test]
21674 fn test_replace_incomplete() {
21675 let sql = r#"REPLACE"#;
21676
21677 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
21678 }
21679
21680 #[test]
21681 fn test_placeholder_invalid_whitespace() {
21682 for w in [" ", "/*invalid*/"] {
21683 let sql = format!("\nSELECT\n :{w}fooBar");
21684 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
21685 }
21686 }
21687}