1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word) => {
512 if expecting_statement_delimiter && word.keyword == Keyword::END {
513 break;
514 }
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 _ => self.expected("an SQL statement", next_token),
723 },
724 Token::LParen => {
725 self.prev_token();
726 self.parse_query().map(Into::into)
727 }
728 _ => self.expected("an SQL statement", next_token),
729 }
730 }
731
732 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
736 let case_token = self.expect_keyword(Keyword::CASE)?;
737
738 let match_expr = if self.peek_keyword(Keyword::WHEN) {
739 None
740 } else {
741 Some(self.parse_expr()?)
742 };
743
744 self.expect_keyword_is(Keyword::WHEN)?;
745 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
746 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
747 })?;
748
749 let else_block = if self.parse_keyword(Keyword::ELSE) {
750 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
751 } else {
752 None
753 };
754
755 let mut end_case_token = self.expect_keyword(Keyword::END)?;
756 if self.peek_keyword(Keyword::CASE) {
757 end_case_token = self.expect_keyword(Keyword::CASE)?;
758 }
759
760 Ok(CaseStatement {
761 case_token: AttachedToken(case_token),
762 match_expr,
763 when_blocks,
764 else_block,
765 end_case_token: AttachedToken(end_case_token),
766 })
767 }
768
769 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
773 self.expect_keyword_is(Keyword::IF)?;
774 let if_block = self.parse_conditional_statement_block(&[
775 Keyword::ELSE,
776 Keyword::ELSEIF,
777 Keyword::END,
778 ])?;
779
780 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
781 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
782 parser.parse_conditional_statement_block(&[
783 Keyword::ELSEIF,
784 Keyword::ELSE,
785 Keyword::END,
786 ])
787 })?
788 } else {
789 vec![]
790 };
791
792 let else_block = if self.parse_keyword(Keyword::ELSE) {
793 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
794 } else {
795 None
796 };
797
798 self.expect_keyword_is(Keyword::END)?;
799 let end_token = self.expect_keyword(Keyword::IF)?;
800
801 Ok(IfStatement {
802 if_block,
803 elseif_blocks,
804 else_block,
805 end_token: Some(AttachedToken(end_token)),
806 })
807 }
808
809 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
813 self.expect_keyword_is(Keyword::WHILE)?;
814 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
815
816 Ok(WhileStatement { while_block })
817 }
818
819 fn parse_conditional_statement_block(
827 &mut self,
828 terminal_keywords: &[Keyword],
829 ) -> Result<ConditionalStatementBlock, ParserError> {
830 let start_token = self.get_current_token().clone(); let mut then_token = None;
832
833 let condition = match &start_token.token {
834 Token::Word(w) if w.keyword == Keyword::ELSE => None,
835 Token::Word(w) if w.keyword == Keyword::WHILE => {
836 let expr = self.parse_expr()?;
837 Some(expr)
838 }
839 _ => {
840 let expr = self.parse_expr()?;
841 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
842 Some(expr)
843 }
844 };
845
846 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
847
848 Ok(ConditionalStatementBlock {
849 start_token: AttachedToken(start_token),
850 condition,
851 then_token,
852 conditional_statements,
853 })
854 }
855
856 pub(crate) fn parse_conditional_statements(
859 &mut self,
860 terminal_keywords: &[Keyword],
861 ) -> Result<ConditionalStatements, ParserError> {
862 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
863 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
864 let statements = self.parse_statement_list(terminal_keywords)?;
865 let end_token = self.expect_keyword(Keyword::END)?;
866
867 ConditionalStatements::BeginEnd(BeginEndStatements {
868 begin_token: AttachedToken(begin_token),
869 statements,
870 end_token: AttachedToken(end_token),
871 })
872 } else {
873 ConditionalStatements::Sequence {
874 statements: self.parse_statement_list(terminal_keywords)?,
875 }
876 };
877 Ok(conditional_statements)
878 }
879
880 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
884 self.expect_keyword_is(Keyword::RAISE)?;
885
886 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
887 self.expect_token(&Token::Eq)?;
888 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
889 } else {
890 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
891 };
892
893 Ok(RaiseStatement { value })
894 }
895 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
899 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
900
901 self.expect_keyword_is(Keyword::ON)?;
902 let token = self.next_token();
903
904 let (object_type, object_name) = match token.token {
905 Token::Word(w) if w.keyword == Keyword::COLLATION => {
906 (CommentObject::Collation, self.parse_object_name(false)?)
907 }
908 Token::Word(w) if w.keyword == Keyword::COLUMN => {
909 (CommentObject::Column, self.parse_object_name(false)?)
910 }
911 Token::Word(w) if w.keyword == Keyword::DATABASE => {
912 (CommentObject::Database, self.parse_object_name(false)?)
913 }
914 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
915 (CommentObject::Domain, self.parse_object_name(false)?)
916 }
917 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
918 (CommentObject::Extension, self.parse_object_name(false)?)
919 }
920 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
921 (CommentObject::Function, self.parse_object_name(false)?)
922 }
923 Token::Word(w) if w.keyword == Keyword::INDEX => {
924 (CommentObject::Index, self.parse_object_name(false)?)
925 }
926 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
927 self.expect_keyword_is(Keyword::VIEW)?;
928 (
929 CommentObject::MaterializedView,
930 self.parse_object_name(false)?,
931 )
932 }
933 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
934 (CommentObject::Procedure, self.parse_object_name(false)?)
935 }
936 Token::Word(w) if w.keyword == Keyword::ROLE => {
937 (CommentObject::Role, self.parse_object_name(false)?)
938 }
939 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
940 (CommentObject::Schema, self.parse_object_name(false)?)
941 }
942 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
943 (CommentObject::Sequence, self.parse_object_name(false)?)
944 }
945 Token::Word(w) if w.keyword == Keyword::TABLE => {
946 (CommentObject::Table, self.parse_object_name(false)?)
947 }
948 Token::Word(w) if w.keyword == Keyword::TYPE => {
949 (CommentObject::Type, self.parse_object_name(false)?)
950 }
951 Token::Word(w) if w.keyword == Keyword::USER => {
952 (CommentObject::User, self.parse_object_name(false)?)
953 }
954 Token::Word(w) if w.keyword == Keyword::VIEW => {
955 (CommentObject::View, self.parse_object_name(false)?)
956 }
957 _ => self.expected("comment object_type", token)?,
958 };
959
960 self.expect_keyword_is(Keyword::IS)?;
961 let comment = if self.parse_keyword(Keyword::NULL) {
962 None
963 } else {
964 Some(self.parse_literal_string()?)
965 };
966 Ok(Statement::Comment {
967 object_type,
968 object_name,
969 comment,
970 if_exists,
971 })
972 }
973
974 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
976 let mut channel = None;
977 let mut tables: Vec<ObjectName> = vec![];
978 let mut read_lock = false;
979 let mut export = false;
980
981 if !dialect_of!(self is MySqlDialect | GenericDialect) {
982 return parser_err!(
983 "Unsupported statement FLUSH",
984 self.peek_token_ref().span.start
985 );
986 }
987
988 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
989 Some(FlushLocation::NoWriteToBinlog)
990 } else if self.parse_keyword(Keyword::LOCAL) {
991 Some(FlushLocation::Local)
992 } else {
993 None
994 };
995
996 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
997 FlushType::BinaryLogs
998 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
999 FlushType::EngineLogs
1000 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1001 FlushType::ErrorLogs
1002 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1003 FlushType::GeneralLogs
1004 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1005 FlushType::Hosts
1006 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1007 FlushType::Privileges
1008 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1009 FlushType::OptimizerCosts
1010 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1011 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1012 channel = Some(self.parse_object_name(false).unwrap().to_string());
1013 }
1014 FlushType::RelayLogs
1015 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1016 FlushType::SlowLogs
1017 } else if self.parse_keyword(Keyword::STATUS) {
1018 FlushType::Status
1019 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1020 FlushType::UserResources
1021 } else if self.parse_keywords(&[Keyword::LOGS]) {
1022 FlushType::Logs
1023 } else if self.parse_keywords(&[Keyword::TABLES]) {
1024 loop {
1025 let next_token = self.next_token();
1026 match &next_token.token {
1027 Token::Word(w) => match w.keyword {
1028 Keyword::WITH => {
1029 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1030 }
1031 Keyword::FOR => {
1032 export = self.parse_keyword(Keyword::EXPORT);
1033 }
1034 Keyword::NoKeyword => {
1035 self.prev_token();
1036 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1037 }
1038 _ => {}
1039 },
1040 _ => {
1041 break;
1042 }
1043 }
1044 }
1045
1046 FlushType::Tables
1047 } else {
1048 return self.expected_ref(
1049 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1050 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1051 self.peek_token_ref(),
1052 );
1053 };
1054
1055 Ok(Statement::Flush {
1056 object_type,
1057 location,
1058 channel,
1059 read_lock,
1060 export,
1061 tables,
1062 })
1063 }
1064
1065 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1067 let repair = self.parse_keyword(Keyword::REPAIR);
1068 self.expect_keyword_is(Keyword::TABLE)?;
1069 let table_name = self.parse_object_name(false)?;
1070 let partition_action = self
1071 .maybe_parse(|parser| {
1072 let pa = match parser.parse_one_of_keywords(&[
1073 Keyword::ADD,
1074 Keyword::DROP,
1075 Keyword::SYNC,
1076 ]) {
1077 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1078 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1079 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1080 _ => None,
1081 };
1082 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1083 Ok(pa)
1084 })?
1085 .unwrap_or_default();
1086 Ok(Msck {
1087 repair,
1088 table_name,
1089 partition_action,
1090 })
1091 }
1092
1093 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1095 let table = self.parse_keyword(Keyword::TABLE);
1096 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1097
1098 let table_names = self.parse_comma_separated(|p| {
1099 let only = p.parse_keyword(Keyword::ONLY);
1100 let name = p.parse_object_name(false)?;
1101 let has_asterisk = p.consume_token(&Token::Mul);
1102 Ok(TruncateTableTarget {
1103 name,
1104 only,
1105 has_asterisk,
1106 })
1107 })?;
1108
1109 let mut partitions = None;
1110 if self.parse_keyword(Keyword::PARTITION) {
1111 self.expect_token(&Token::LParen)?;
1112 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1113 self.expect_token(&Token::RParen)?;
1114 }
1115
1116 let mut identity = None;
1117 let mut cascade = None;
1118
1119 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1120 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1121 Some(TruncateIdentityOption::Restart)
1122 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1123 Some(TruncateIdentityOption::Continue)
1124 } else {
1125 None
1126 };
1127
1128 cascade = self.parse_cascade_option();
1129 };
1130
1131 let on_cluster = self.parse_optional_on_cluster()?;
1132
1133 Ok(Truncate {
1134 table_names,
1135 partitions,
1136 table,
1137 if_exists,
1138 identity,
1139 cascade,
1140 on_cluster,
1141 })
1142 }
1143
1144 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1145 if self.parse_keyword(Keyword::CASCADE) {
1146 Some(CascadeOption::Cascade)
1147 } else if self.parse_keyword(Keyword::RESTRICT) {
1148 Some(CascadeOption::Restrict)
1149 } else {
1150 None
1151 }
1152 }
1153
1154 pub fn parse_attach_duckdb_database_options(
1156 &mut self,
1157 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1158 if !self.consume_token(&Token::LParen) {
1159 return Ok(vec![]);
1160 }
1161
1162 let mut options = vec![];
1163 loop {
1164 if self.parse_keyword(Keyword::READ_ONLY) {
1165 let boolean = if self.parse_keyword(Keyword::TRUE) {
1166 Some(true)
1167 } else if self.parse_keyword(Keyword::FALSE) {
1168 Some(false)
1169 } else {
1170 None
1171 };
1172 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1173 } else if self.parse_keyword(Keyword::TYPE) {
1174 let ident = self.parse_identifier()?;
1175 options.push(AttachDuckDBDatabaseOption::Type(ident));
1176 } else {
1177 return self
1178 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1179 };
1180
1181 if self.consume_token(&Token::RParen) {
1182 return Ok(options);
1183 } else if self.consume_token(&Token::Comma) {
1184 continue;
1185 } else {
1186 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1187 }
1188 }
1189 }
1190
1191 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1193 let database = self.parse_keyword(Keyword::DATABASE);
1194 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1195 let database_path = self.parse_identifier()?;
1196 let database_alias = if self.parse_keyword(Keyword::AS) {
1197 Some(self.parse_identifier()?)
1198 } else {
1199 None
1200 };
1201
1202 let attach_options = self.parse_attach_duckdb_database_options()?;
1203 Ok(Statement::AttachDuckDBDatabase {
1204 if_not_exists,
1205 database,
1206 database_path,
1207 database_alias,
1208 attach_options,
1209 })
1210 }
1211
1212 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1214 let database = self.parse_keyword(Keyword::DATABASE);
1215 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1216 let database_alias = self.parse_identifier()?;
1217 Ok(Statement::DetachDuckDBDatabase {
1218 if_exists,
1219 database,
1220 database_alias,
1221 })
1222 }
1223
1224 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1226 let database = self.parse_keyword(Keyword::DATABASE);
1227 let database_file_name = self.parse_expr()?;
1228 self.expect_keyword_is(Keyword::AS)?;
1229 let schema_name = self.parse_identifier()?;
1230 Ok(Statement::AttachDatabase {
1231 database,
1232 schema_name,
1233 database_file_name,
1234 })
1235 }
1236
1237 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1239 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1240 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1241 let mut for_columns = false;
1242 let mut cache_metadata = false;
1243 let mut noscan = false;
1244 let mut partitions = None;
1245 let mut compute_statistics = false;
1246 let mut columns = vec![];
1247
1248 if table_name.is_some() && self.consume_token(&Token::LParen) {
1250 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1251 self.expect_token(&Token::RParen)?;
1252 }
1253
1254 loop {
1255 match self.parse_one_of_keywords(&[
1256 Keyword::PARTITION,
1257 Keyword::FOR,
1258 Keyword::CACHE,
1259 Keyword::NOSCAN,
1260 Keyword::COMPUTE,
1261 ]) {
1262 Some(Keyword::PARTITION) => {
1263 self.expect_token(&Token::LParen)?;
1264 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1265 self.expect_token(&Token::RParen)?;
1266 }
1267 Some(Keyword::NOSCAN) => noscan = true,
1268 Some(Keyword::FOR) => {
1269 self.expect_keyword_is(Keyword::COLUMNS)?;
1270
1271 columns = self
1272 .maybe_parse(|parser| {
1273 parser.parse_comma_separated(|p| p.parse_identifier())
1274 })?
1275 .unwrap_or_default();
1276 for_columns = true
1277 }
1278 Some(Keyword::CACHE) => {
1279 self.expect_keyword_is(Keyword::METADATA)?;
1280 cache_metadata = true
1281 }
1282 Some(Keyword::COMPUTE) => {
1283 self.expect_keyword_is(Keyword::STATISTICS)?;
1284 compute_statistics = true
1285 }
1286 _ => break,
1287 }
1288 }
1289
1290 Ok(Analyze {
1291 has_table_keyword,
1292 table_name,
1293 for_columns,
1294 columns,
1295 partitions,
1296 cache_metadata,
1297 noscan,
1298 compute_statistics,
1299 })
1300 }
1301
1302 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1304 let index = self.index;
1305
1306 let next_token = self.next_token();
1307 match next_token.token {
1308 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1309 if self.peek_token_ref().token == Token::Period {
1310 let mut id_parts: Vec<Ident> = vec![match t {
1311 Token::Word(w) => w.into_ident(next_token.span),
1312 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1313 _ => {
1314 return Err(ParserError::ParserError(
1315 "Internal parser error: unexpected token type".to_string(),
1316 ))
1317 }
1318 }];
1319
1320 while self.consume_token(&Token::Period) {
1321 let next_token = self.next_token();
1322 match next_token.token {
1323 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1324 Token::SingleQuotedString(s) => {
1325 id_parts.push(Ident::with_quote('\'', s))
1327 }
1328 Token::Placeholder(s) => {
1329 id_parts.push(Ident::new(s))
1332 }
1333 Token::Mul => {
1334 return Ok(Expr::QualifiedWildcard(
1335 ObjectName::from(id_parts),
1336 AttachedToken(next_token),
1337 ));
1338 }
1339 _ => {
1340 return self
1341 .expected("an identifier or a '*' after '.'", next_token);
1342 }
1343 }
1344 }
1345 }
1346 }
1347 Token::Mul => {
1348 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1349 }
1350 Token::LParen => {
1352 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1353 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1354 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1357 }
1358 }
1359 _ => (),
1360 };
1361
1362 self.index = index;
1363 self.parse_expr()
1364 }
1365
1366 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1368 self.parse_subexpr(self.dialect.prec_unknown())
1369 }
1370
1371 pub fn parse_expr_with_alias_and_order_by(
1373 &mut self,
1374 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1375 let expr = self.parse_expr()?;
1376
1377 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1378 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1379 }
1380 let alias = self.parse_optional_alias_inner(None, validator)?;
1381 let order_by = OrderByOptions {
1382 asc: self.parse_asc_desc(),
1383 nulls_first: None,
1384 };
1385 Ok(ExprWithAliasAndOrderBy {
1386 expr: ExprWithAlias { expr, alias },
1387 order_by,
1388 })
1389 }
1390
1391 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1393 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1394 let _guard = self.recursion_counter.try_decrease()?;
1395 debug!("parsing expr");
1396 let mut expr = self.parse_prefix()?;
1397
1398 expr = self.parse_compound_expr(expr, vec![])?;
1399
1400 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1404 expr = Expr::Collate {
1405 expr: Box::new(expr),
1406 collation: self.parse_object_name(false)?,
1407 };
1408 }
1409
1410 debug!("prefix: {expr:?}");
1411 loop {
1412 let next_precedence = self.get_next_precedence()?;
1413 debug!("next precedence: {next_precedence:?}");
1414
1415 if precedence >= next_precedence {
1416 break;
1417 }
1418
1419 if Token::Period == self.peek_token_ref().token {
1422 break;
1423 }
1424
1425 expr = self.parse_infix(expr, next_precedence)?;
1426 }
1427 Ok(expr)
1428 }
1429
1430 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1432 let condition = self.parse_expr()?;
1433 let message = if self.parse_keyword(Keyword::AS) {
1434 Some(self.parse_expr()?)
1435 } else {
1436 None
1437 };
1438
1439 Ok(Statement::Assert { condition, message })
1440 }
1441
1442 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1444 let name = self.parse_identifier()?;
1445 Ok(Statement::Savepoint { name })
1446 }
1447
1448 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1450 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1451 let name = self.parse_identifier()?;
1452
1453 Ok(Statement::ReleaseSavepoint { name })
1454 }
1455
1456 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1458 let channel = self.parse_identifier()?;
1459 Ok(Statement::LISTEN { channel })
1460 }
1461
1462 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1464 let channel = if self.consume_token(&Token::Mul) {
1465 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1466 } else {
1467 match self.parse_identifier() {
1468 Ok(expr) => expr,
1469 _ => {
1470 self.prev_token();
1471 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1472 }
1473 }
1474 };
1475 Ok(Statement::UNLISTEN { channel })
1476 }
1477
1478 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1480 let channel = self.parse_identifier()?;
1481 let payload = if self.consume_token(&Token::Comma) {
1482 Some(self.parse_literal_string()?)
1483 } else {
1484 None
1485 };
1486 Ok(Statement::NOTIFY { channel, payload })
1487 }
1488
1489 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1491 if self.peek_keyword(Keyword::TABLE) {
1492 self.expect_keyword(Keyword::TABLE)?;
1493 let rename_tables = self.parse_comma_separated(|parser| {
1494 let old_name = parser.parse_object_name(false)?;
1495 parser.expect_keyword(Keyword::TO)?;
1496 let new_name = parser.parse_object_name(false)?;
1497
1498 Ok(RenameTable { old_name, new_name })
1499 })?;
1500 Ok(rename_tables.into())
1501 } else {
1502 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1503 }
1504 }
1505
1506 fn parse_expr_prefix_by_reserved_word(
1509 &mut self,
1510 w: &Word,
1511 w_span: Span,
1512 ) -> Result<Option<Expr>, ParserError> {
1513 match w.keyword {
1514 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1515 self.prev_token();
1516 Ok(Some(Expr::Value(self.parse_value()?)))
1517 }
1518 Keyword::NULL => {
1519 self.prev_token();
1520 Ok(Some(Expr::Value(self.parse_value()?)))
1521 }
1522 Keyword::CURRENT_CATALOG
1523 | Keyword::CURRENT_USER
1524 | Keyword::SESSION_USER
1525 | Keyword::USER
1526 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1527 {
1528 Ok(Some(Expr::Function(Function {
1529 name: ObjectName::from(vec![w.to_ident(w_span)]),
1530 uses_odbc_syntax: false,
1531 parameters: FunctionArguments::None,
1532 args: FunctionArguments::None,
1533 null_treatment: None,
1534 filter: None,
1535 over: None,
1536 within_group: vec![],
1537 })))
1538 }
1539 Keyword::CURRENT_TIMESTAMP
1540 | Keyword::CURRENT_TIME
1541 | Keyword::CURRENT_DATE
1542 | Keyword::LOCALTIME
1543 | Keyword::LOCALTIMESTAMP => {
1544 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1545 }
1546 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1547 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1548 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1549 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1550 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1551 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1552 Keyword::EXISTS
1553 if !dialect_of!(self is DatabricksDialect)
1555 || matches!(
1556 self.peek_nth_token_ref(1).token,
1557 Token::Word(Word {
1558 keyword: Keyword::SELECT | Keyword::WITH,
1559 ..
1560 })
1561 ) =>
1562 {
1563 Ok(Some(self.parse_exists_expr(false)?))
1564 }
1565 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1566 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1567 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1568 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1569 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1570 }
1571 Keyword::SUBSTR | Keyword::SUBSTRING => {
1572 self.prev_token();
1573 Ok(Some(self.parse_substring()?))
1574 }
1575 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1576 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1577 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1578 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1580 self.expect_token(&Token::LBracket)?;
1581 Ok(Some(self.parse_array_expr(true)?))
1582 }
1583 Keyword::ARRAY
1584 if self.peek_token_ref().token == Token::LParen
1585 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1586 {
1587 self.expect_token(&Token::LParen)?;
1588 let query = self.parse_query()?;
1589 self.expect_token(&Token::RParen)?;
1590 Ok(Some(Expr::Function(Function {
1591 name: ObjectName::from(vec![w.to_ident(w_span)]),
1592 uses_odbc_syntax: false,
1593 parameters: FunctionArguments::None,
1594 args: FunctionArguments::Subquery(query),
1595 filter: None,
1596 null_treatment: None,
1597 over: None,
1598 within_group: vec![],
1599 })))
1600 }
1601 Keyword::NOT => Ok(Some(self.parse_not()?)),
1602 Keyword::MATCH if self.dialect.supports_match_against() => {
1603 Ok(Some(self.parse_match_against()?))
1604 }
1605 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1606 let struct_expr = self.parse_struct_literal()?;
1607 Ok(Some(struct_expr))
1608 }
1609 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1610 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1611 Ok(Some(Expr::Prior(Box::new(expr))))
1612 }
1613 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1614 Ok(Some(self.parse_duckdb_map_literal()?))
1615 }
1616 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1617 Ok(Some(self.parse_lambda_expr()?))
1618 }
1619 _ if self.dialect.supports_geometric_types() => match w.keyword {
1620 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1621 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1622 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1623 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1624 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1625 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1626 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1627 _ => Ok(None),
1628 },
1629 _ => Ok(None),
1630 }
1631 }
1632
1633 fn parse_expr_prefix_by_unreserved_word(
1635 &mut self,
1636 w: &Word,
1637 w_span: Span,
1638 ) -> Result<Expr, ParserError> {
1639 let is_outer_join = self.peek_outer_join_operator();
1640 match &self.peek_token_ref().token {
1641 Token::LParen if !is_outer_join => {
1642 let id_parts = vec![w.to_ident(w_span)];
1643 self.parse_function(ObjectName::from(id_parts))
1644 }
1645 Token::SingleQuotedString(_)
1647 | Token::DoubleQuotedString(_)
1648 | Token::HexStringLiteral(_)
1649 if w.value.starts_with('_') =>
1650 {
1651 Ok(Expr::Prefixed {
1652 prefix: w.to_ident(w_span),
1653 value: self.parse_introduced_string_expr()?.into(),
1654 })
1655 }
1656 Token::SingleQuotedString(_)
1658 | Token::DoubleQuotedString(_)
1659 | Token::HexStringLiteral(_)
1660 if w.value.starts_with('_') =>
1661 {
1662 Ok(Expr::Prefixed {
1663 prefix: w.to_ident(w_span),
1664 value: self.parse_introduced_string_expr()?.into(),
1665 })
1666 }
1667 Token::Arrow if self.dialect.supports_lambda_functions() => {
1671 self.expect_token(&Token::Arrow)?;
1672 Ok(Expr::Lambda(LambdaFunction {
1673 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1674 name: w.to_ident(w_span),
1675 data_type: None,
1676 }),
1677 body: Box::new(self.parse_expr()?),
1678 syntax: LambdaSyntax::Arrow,
1679 }))
1680 }
1681 Token::Word(_)
1685 if self.dialect.supports_lambda_functions()
1686 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1687 {
1688 let data_type = self.parse_data_type()?;
1689 self.expect_token(&Token::Arrow)?;
1690 Ok(Expr::Lambda(LambdaFunction {
1691 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1692 name: w.to_ident(w_span),
1693 data_type: Some(data_type),
1694 }),
1695 body: Box::new(self.parse_expr()?),
1696 syntax: LambdaSyntax::Arrow,
1697 }))
1698 }
1699 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1700 }
1701 }
1702
1703 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1706 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1707 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1708 } else {
1709 false
1710 }
1711 }
1712
1713 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1715 if let Some(prefix) = self.dialect.parse_prefix(self) {
1717 return prefix;
1718 }
1719
1720 let loc = self.peek_token_ref().span.start;
1737 let opt_expr = self.maybe_parse(|parser| {
1738 match parser.parse_data_type()? {
1739 DataType::Interval { .. } => parser.parse_interval(),
1740 DataType::Custom(ref name, ref modifiers)
1751 if modifiers.is_empty()
1752 && Self::is_simple_unquoted_object_name(name, "xml")
1753 && parser.dialect.supports_xml_expressions() =>
1754 {
1755 Ok(Expr::TypedString(TypedString {
1756 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1757 value: parser.parse_value()?,
1758 uses_odbc_syntax: false,
1759 }))
1760 }
1761 DataType::Custom(..) => parser_err!("dummy", loc),
1762 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1764 Ok(Expr::Cast {
1765 kind: CastKind::Cast,
1766 expr: Box::new(parser.parse_expr()?),
1767 data_type: DataType::Binary(None),
1768 array: false,
1769 format: None,
1770 })
1771 }
1772 data_type => Ok(Expr::TypedString(TypedString {
1773 data_type,
1774 value: parser.parse_value()?,
1775 uses_odbc_syntax: false,
1776 })),
1777 }
1778 })?;
1779
1780 if let Some(expr) = opt_expr {
1781 return Ok(expr);
1782 }
1783
1784 let dialect = self.dialect;
1788
1789 self.advance_token();
1790 let next_token_index = self.get_current_index();
1791 let next_token = self.get_current_token();
1792 let span = next_token.span;
1793 let expr = match &next_token.token {
1794 Token::Word(w) => {
1795 let w = w.clone();
1804 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1805 Ok(Some(expr)) => Ok(expr),
1807
1808 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1810
1811 Err(e) => {
1818 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1819 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1820 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1821 }) {
1822 return Ok(expr);
1823 }
1824 }
1825 return Err(e);
1826 }
1827 }
1828 } Token::LBracket => self.parse_array_expr(false),
1831 tok @ Token::Minus | tok @ Token::Plus => {
1832 let op = if *tok == Token::Plus {
1833 UnaryOperator::Plus
1834 } else {
1835 UnaryOperator::Minus
1836 };
1837 Ok(Expr::UnaryOp {
1838 op,
1839 expr: Box::new(
1840 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1841 ),
1842 })
1843 }
1844 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1845 op: UnaryOperator::BangNot,
1846 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1847 }),
1848 tok @ Token::DoubleExclamationMark
1849 | tok @ Token::PGSquareRoot
1850 | tok @ Token::PGCubeRoot
1851 | tok @ Token::AtSign
1852 if dialect_is!(dialect is PostgreSqlDialect) =>
1853 {
1854 let op = match tok {
1855 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1856 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1857 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1858 Token::AtSign => UnaryOperator::PGAbs,
1859 _ => {
1860 return Err(ParserError::ParserError(
1861 "Internal parser error: unexpected unary operator token".to_string(),
1862 ))
1863 }
1864 };
1865 Ok(Expr::UnaryOp {
1866 op,
1867 expr: Box::new(
1868 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1869 ),
1870 })
1871 }
1872 Token::Tilde => Ok(Expr::UnaryOp {
1873 op: UnaryOperator::BitwiseNot,
1874 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1875 }),
1876 tok @ Token::Sharp
1877 | tok @ Token::AtDashAt
1878 | tok @ Token::AtAt
1879 | tok @ Token::QuestionMarkDash
1880 | tok @ Token::QuestionPipe
1881 if self.dialect.supports_geometric_types() =>
1882 {
1883 let op = match tok {
1884 Token::Sharp => UnaryOperator::Hash,
1885 Token::AtDashAt => UnaryOperator::AtDashAt,
1886 Token::AtAt => UnaryOperator::DoubleAt,
1887 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1888 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1889 _ => {
1890 return Err(ParserError::ParserError(format!(
1891 "Unexpected token in unary operator parsing: {tok:?}"
1892 )))
1893 }
1894 };
1895 Ok(Expr::UnaryOp {
1896 op,
1897 expr: Box::new(
1898 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1899 ),
1900 })
1901 }
1902 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1903 {
1904 self.prev_token();
1905 Ok(Expr::Value(self.parse_value()?))
1906 }
1907 Token::UnicodeStringLiteral(_) => {
1908 self.prev_token();
1909 Ok(Expr::Value(self.parse_value()?))
1910 }
1911 Token::Number(_, _)
1912 | Token::SingleQuotedString(_)
1913 | Token::DoubleQuotedString(_)
1914 | Token::TripleSingleQuotedString(_)
1915 | Token::TripleDoubleQuotedString(_)
1916 | Token::DollarQuotedString(_)
1917 | Token::SingleQuotedByteStringLiteral(_)
1918 | Token::DoubleQuotedByteStringLiteral(_)
1919 | Token::TripleSingleQuotedByteStringLiteral(_)
1920 | Token::TripleDoubleQuotedByteStringLiteral(_)
1921 | Token::SingleQuotedRawStringLiteral(_)
1922 | Token::DoubleQuotedRawStringLiteral(_)
1923 | Token::TripleSingleQuotedRawStringLiteral(_)
1924 | Token::TripleDoubleQuotedRawStringLiteral(_)
1925 | Token::NationalStringLiteral(_)
1926 | Token::QuoteDelimitedStringLiteral(_)
1927 | Token::NationalQuoteDelimitedStringLiteral(_)
1928 | Token::HexStringLiteral(_) => {
1929 self.prev_token();
1930 Ok(Expr::Value(self.parse_value()?))
1931 }
1932 Token::LParen => {
1933 let expr =
1934 if let Some(expr) = self.try_parse_expr_sub_query()? {
1935 expr
1936 } else if let Some(lambda) = self.try_parse_lambda()? {
1937 return Ok(lambda);
1938 } else {
1939 let exprs = self.with_state(ParserState::Normal, |p| {
1950 p.parse_comma_separated(Parser::parse_expr)
1951 })?;
1952 match exprs.len() {
1953 0 => return Err(ParserError::ParserError(
1954 "Internal parser error: parse_comma_separated returned empty list"
1955 .to_string(),
1956 )),
1957 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1958 _ => Expr::Tuple(exprs),
1959 }
1960 };
1961 self.expect_token(&Token::RParen)?;
1962 Ok(expr)
1963 }
1964 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1965 self.prev_token();
1966 Ok(Expr::Value(self.parse_value()?))
1967 }
1968 Token::LBrace => {
1969 self.prev_token();
1970 self.parse_lbrace_expr()
1971 }
1972 _ => self.expected_at("an expression", next_token_index),
1973 }?;
1974
1975 Ok(expr)
1976 }
1977
1978 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1979 Ok(Expr::TypedString(TypedString {
1980 data_type: DataType::GeometricType(kind),
1981 value: self.parse_value()?,
1982 uses_odbc_syntax: false,
1983 }))
1984 }
1985
1986 pub fn parse_compound_expr(
1993 &mut self,
1994 root: Expr,
1995 mut chain: Vec<AccessExpr>,
1996 ) -> Result<Expr, ParserError> {
1997 let mut ending_wildcard: Option<TokenWithSpan> = None;
1998 loop {
1999 if self.consume_token(&Token::Period) {
2000 let next_token = self.peek_token_ref();
2001 match &next_token.token {
2002 Token::Mul => {
2003 if dialect_of!(self is PostgreSqlDialect) {
2006 ending_wildcard = Some(self.next_token());
2007 } else {
2008 self.prev_token(); }
2015
2016 break;
2017 }
2018 Token::SingleQuotedString(s) => {
2019 let expr =
2020 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2021 chain.push(AccessExpr::Dot(expr));
2022 self.advance_token(); }
2024 Token::Placeholder(s) => {
2025 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2028 chain.push(AccessExpr::Dot(expr));
2029 self.advance_token(); }
2031 _ => {
2036 let expr = self.maybe_parse(|parser| {
2037 let expr = parser
2038 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2039 match &expr {
2040 Expr::CompoundFieldAccess { .. }
2041 | Expr::CompoundIdentifier(_)
2042 | Expr::Identifier(_)
2043 | Expr::Value(_)
2044 | Expr::Function(_) => Ok(expr),
2045 _ => parser.expected_ref(
2046 "an identifier or value",
2047 parser.peek_token_ref(),
2048 ),
2049 }
2050 })?;
2051
2052 match expr {
2053 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2062 chain.push(AccessExpr::Dot(*root));
2063 chain.extend(access_chain);
2064 }
2065 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2066 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2067 ),
2068 Some(expr) => {
2069 chain.push(AccessExpr::Dot(expr));
2070 }
2071 None => {
2075 chain.push(AccessExpr::Dot(Expr::Identifier(
2076 self.parse_identifier()?,
2077 )));
2078 }
2079 }
2080 }
2081 }
2082 } else if !self.dialect.supports_partiql()
2083 && self.peek_token_ref().token == Token::LBracket
2084 {
2085 self.parse_multi_dim_subscript(&mut chain)?;
2086 } else {
2087 break;
2088 }
2089 }
2090
2091 let tok_index = self.get_current_index();
2092 if let Some(wildcard_token) = ending_wildcard {
2093 if !Self::is_all_ident(&root, &chain) {
2094 return self
2095 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2096 };
2097 Ok(Expr::QualifiedWildcard(
2098 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2099 AttachedToken(wildcard_token),
2100 ))
2101 } else if self.maybe_parse_outer_join_operator() {
2102 if !Self::is_all_ident(&root, &chain) {
2103 return self.expected_at("column identifier before (+)", tok_index);
2104 };
2105 let expr = if chain.is_empty() {
2106 root
2107 } else {
2108 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2109 };
2110 Ok(Expr::OuterJoin(expr.into()))
2111 } else {
2112 Self::build_compound_expr(root, chain)
2113 }
2114 }
2115
2116 fn build_compound_expr(
2121 root: Expr,
2122 mut access_chain: Vec<AccessExpr>,
2123 ) -> Result<Expr, ParserError> {
2124 if access_chain.is_empty() {
2125 return Ok(root);
2126 }
2127
2128 if Self::is_all_ident(&root, &access_chain) {
2129 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2130 root,
2131 access_chain,
2132 )?));
2133 }
2134
2135 if matches!(root, Expr::Identifier(_))
2140 && matches!(
2141 access_chain.last(),
2142 Some(AccessExpr::Dot(Expr::Function(_)))
2143 )
2144 && access_chain
2145 .iter()
2146 .rev()
2147 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2149 {
2150 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2151 return parser_err!("expected function expression", root.span().start);
2152 };
2153
2154 let compound_func_name = [root]
2155 .into_iter()
2156 .chain(access_chain.into_iter().flat_map(|access| match access {
2157 AccessExpr::Dot(expr) => Some(expr),
2158 _ => None,
2159 }))
2160 .flat_map(|expr| match expr {
2161 Expr::Identifier(ident) => Some(ident),
2162 _ => None,
2163 })
2164 .map(ObjectNamePart::Identifier)
2165 .chain(func.name.0)
2166 .collect::<Vec<_>>();
2167 func.name = ObjectName(compound_func_name);
2168
2169 return Ok(Expr::Function(func));
2170 }
2171
2172 if access_chain.len() == 1
2177 && matches!(
2178 access_chain.last(),
2179 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2180 )
2181 {
2182 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2183 return parser_err!("expected (+) expression", root.span().start);
2184 };
2185
2186 if !Self::is_all_ident(&root, &[]) {
2187 return parser_err!("column identifier before (+)", root.span().start);
2188 };
2189
2190 let token_start = root.span().start;
2191 let mut idents = Self::exprs_to_idents(root, vec![])?;
2192 match *inner_expr {
2193 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2194 Expr::Identifier(suffix) => idents.push(suffix),
2195 _ => {
2196 return parser_err!("column identifier before (+)", token_start);
2197 }
2198 }
2199
2200 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2201 }
2202
2203 Ok(Expr::CompoundFieldAccess {
2204 root: Box::new(root),
2205 access_chain,
2206 })
2207 }
2208
2209 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2210 match k {
2211 Keyword::LOCAL => Some(ContextModifier::Local),
2212 Keyword::GLOBAL => Some(ContextModifier::Global),
2213 Keyword::SESSION => Some(ContextModifier::Session),
2214 _ => None,
2215 }
2216 }
2217
2218 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2220 if !matches!(root, Expr::Identifier(_)) {
2221 return false;
2222 }
2223 fields
2224 .iter()
2225 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2226 }
2227
2228 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2230 let mut idents = vec![];
2231 if let Expr::Identifier(root) = root {
2232 idents.push(root);
2233 for x in fields {
2234 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2235 idents.push(ident);
2236 } else {
2237 return parser_err!(
2238 format!("Expected identifier, found: {}", x),
2239 x.span().start
2240 );
2241 }
2242 }
2243 Ok(idents)
2244 } else {
2245 parser_err!(
2246 format!("Expected identifier, found: {}", root),
2247 root.span().start
2248 )
2249 }
2250 }
2251
2252 fn peek_outer_join_operator(&mut self) -> bool {
2254 if !self.dialect.supports_outer_join_operator() {
2255 return false;
2256 }
2257
2258 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2259 Token::LParen == maybe_lparen.token
2260 && Token::Plus == maybe_plus.token
2261 && Token::RParen == maybe_rparen.token
2262 }
2263
2264 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2267 self.dialect.supports_outer_join_operator()
2268 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2269 }
2270
2271 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2273 self.expect_token(&Token::LParen)?;
2274 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2275 self.expect_token(&Token::RParen)?;
2276
2277 Ok(options)
2278 }
2279
2280 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2281 let name = self.parse_identifier()?;
2282
2283 let next_token = self.peek_token_ref();
2284 if next_token == &Token::Comma || next_token == &Token::RParen {
2285 return Ok(UtilityOption { name, arg: None });
2286 }
2287 let arg = self.parse_expr()?;
2288
2289 Ok(UtilityOption {
2290 name,
2291 arg: Some(arg),
2292 })
2293 }
2294
2295 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2296 if !self.peek_sub_query() {
2297 return Ok(None);
2298 }
2299
2300 Ok(Some(Expr::Subquery(self.parse_query()?)))
2301 }
2302
2303 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2304 if !self.dialect.supports_lambda_functions() {
2305 return Ok(None);
2306 }
2307 self.maybe_parse(|p| {
2308 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2309 p.expect_token(&Token::RParen)?;
2310 p.expect_token(&Token::Arrow)?;
2311 let expr = p.parse_expr()?;
2312 Ok(Expr::Lambda(LambdaFunction {
2313 params: OneOrManyWithParens::Many(params),
2314 body: Box::new(expr),
2315 syntax: LambdaSyntax::Arrow,
2316 }))
2317 })
2318 }
2319
2320 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2330 let params = self.parse_lambda_function_parameters()?;
2332 self.expect_token(&Token::Colon)?;
2334 let body = self.parse_expr()?;
2336 Ok(Expr::Lambda(LambdaFunction {
2337 params,
2338 body: Box::new(body),
2339 syntax: LambdaSyntax::LambdaKeyword,
2340 }))
2341 }
2342
2343 fn parse_lambda_function_parameters(
2345 &mut self,
2346 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2347 let params = if self.consume_token(&Token::LParen) {
2349 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2351 self.expect_token(&Token::RParen)?;
2352 OneOrManyWithParens::Many(params)
2353 } else {
2354 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2356 if params.len() == 1 {
2357 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2358 } else {
2359 OneOrManyWithParens::Many(params)
2360 }
2361 };
2362 Ok(params)
2363 }
2364
2365 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2367 let name = self.parse_identifier()?;
2368 let data_type = match &self.peek_token_ref().token {
2369 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2370 _ => None,
2371 };
2372 Ok(LambdaFunctionParameter { name, data_type })
2373 }
2374
2375 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2382 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2384 return Ok(Some(expr));
2385 }
2386 self.maybe_parse_odbc_body_datetime()
2388 }
2389
2390 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2401 self.maybe_parse(|p| {
2402 let token = p.next_token().clone();
2403 let word_string = token.token.to_string();
2404 let data_type = match word_string.as_str() {
2405 "t" => DataType::Time(None, TimezoneInfo::None),
2406 "d" => DataType::Date,
2407 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2408 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2409 };
2410 let value = p.parse_value()?;
2411 Ok(Expr::TypedString(TypedString {
2412 data_type,
2413 value,
2414 uses_odbc_syntax: true,
2415 }))
2416 })
2417 }
2418
2419 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2428 self.maybe_parse(|p| {
2429 p.expect_keyword(Keyword::FN)?;
2430 let fn_name = p.parse_object_name(false)?;
2431 let mut fn_call = p.parse_function_call(fn_name)?;
2432 fn_call.uses_odbc_syntax = true;
2433 Ok(Expr::Function(fn_call))
2434 })
2435 }
2436
2437 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2439 self.parse_function_call(name).map(Expr::Function)
2440 }
2441
2442 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2443 self.expect_token(&Token::LParen)?;
2444
2445 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2448 let subquery = self.parse_query()?;
2449 self.expect_token(&Token::RParen)?;
2450 return Ok(Function {
2451 name,
2452 uses_odbc_syntax: false,
2453 parameters: FunctionArguments::None,
2454 args: FunctionArguments::Subquery(subquery),
2455 filter: None,
2456 null_treatment: None,
2457 over: None,
2458 within_group: vec![],
2459 });
2460 }
2461
2462 let mut args = self.parse_function_argument_list()?;
2463 let mut parameters = FunctionArguments::None;
2464 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2467 && self.consume_token(&Token::LParen)
2468 {
2469 parameters = FunctionArguments::List(args);
2470 args = self.parse_function_argument_list()?;
2471 }
2472
2473 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2474 self.expect_token(&Token::LParen)?;
2475 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2476 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2477 self.expect_token(&Token::RParen)?;
2478 order_by
2479 } else {
2480 vec![]
2481 };
2482
2483 let filter = if self.dialect.supports_filter_during_aggregation()
2484 && self.parse_keyword(Keyword::FILTER)
2485 && self.consume_token(&Token::LParen)
2486 && self.parse_keyword(Keyword::WHERE)
2487 {
2488 let filter = Some(Box::new(self.parse_expr()?));
2489 self.expect_token(&Token::RParen)?;
2490 filter
2491 } else {
2492 None
2493 };
2494
2495 let null_treatment = if args
2498 .clauses
2499 .iter()
2500 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2501 {
2502 self.parse_null_treatment()?
2503 } else {
2504 None
2505 };
2506
2507 let over = if self.parse_keyword(Keyword::OVER) {
2508 if self.consume_token(&Token::LParen) {
2509 let window_spec = self.parse_window_spec()?;
2510 Some(WindowType::WindowSpec(window_spec))
2511 } else {
2512 Some(WindowType::NamedWindow(self.parse_identifier()?))
2513 }
2514 } else {
2515 None
2516 };
2517
2518 Ok(Function {
2519 name,
2520 uses_odbc_syntax: false,
2521 parameters,
2522 args: FunctionArguments::List(args),
2523 null_treatment,
2524 filter,
2525 over,
2526 within_group,
2527 })
2528 }
2529
2530 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2532 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2533 Some(keyword) => {
2534 self.expect_keyword_is(Keyword::NULLS)?;
2535
2536 Ok(match keyword {
2537 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2538 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2539 _ => None,
2540 })
2541 }
2542 None => Ok(None),
2543 }
2544 }
2545
2546 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2548 let args = if self.consume_token(&Token::LParen) {
2549 FunctionArguments::List(self.parse_function_argument_list()?)
2550 } else {
2551 FunctionArguments::None
2552 };
2553 Ok(Expr::Function(Function {
2554 name,
2555 uses_odbc_syntax: false,
2556 parameters: FunctionArguments::None,
2557 args,
2558 filter: None,
2559 over: None,
2560 null_treatment: None,
2561 within_group: vec![],
2562 }))
2563 }
2564
2565 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2567 let next_token = self.next_token();
2568 match &next_token.token {
2569 Token::Word(w) => match w.keyword {
2570 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2571 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2572 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2573 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2574 },
2575 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2576 }
2577 }
2578
2579 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2581 let units = self.parse_window_frame_units()?;
2582 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2583 let start_bound = self.parse_window_frame_bound()?;
2584 self.expect_keyword_is(Keyword::AND)?;
2585 let end_bound = Some(self.parse_window_frame_bound()?);
2586 (start_bound, end_bound)
2587 } else {
2588 (self.parse_window_frame_bound()?, None)
2589 };
2590 Ok(WindowFrame {
2591 units,
2592 start_bound,
2593 end_bound,
2594 })
2595 }
2596
2597 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2599 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2600 Ok(WindowFrameBound::CurrentRow)
2601 } else {
2602 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2603 None
2604 } else {
2605 Some(Box::new(match &self.peek_token_ref().token {
2606 Token::SingleQuotedString(_) => self.parse_interval()?,
2607 _ => self.parse_expr()?,
2608 }))
2609 };
2610 if self.parse_keyword(Keyword::PRECEDING) {
2611 Ok(WindowFrameBound::Preceding(rows))
2612 } else if self.parse_keyword(Keyword::FOLLOWING) {
2613 Ok(WindowFrameBound::Following(rows))
2614 } else {
2615 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2616 }
2617 }
2618 }
2619
2620 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2622 if self.dialect.supports_group_by_expr() {
2623 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2624 self.expect_token(&Token::LParen)?;
2625 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2626 self.expect_token(&Token::RParen)?;
2627 Ok(Expr::GroupingSets(result))
2628 } else if self.parse_keyword(Keyword::CUBE) {
2629 self.expect_token(&Token::LParen)?;
2630 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2631 self.expect_token(&Token::RParen)?;
2632 Ok(Expr::Cube(result))
2633 } else if self.parse_keyword(Keyword::ROLLUP) {
2634 self.expect_token(&Token::LParen)?;
2635 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2636 self.expect_token(&Token::RParen)?;
2637 Ok(Expr::Rollup(result))
2638 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2639 Ok(Expr::Tuple(vec![]))
2643 } else {
2644 self.parse_expr()
2645 }
2646 } else {
2647 self.parse_expr()
2649 }
2650 }
2651
2652 fn parse_tuple(
2656 &mut self,
2657 lift_singleton: bool,
2658 allow_empty: bool,
2659 ) -> Result<Vec<Expr>, ParserError> {
2660 if lift_singleton {
2661 if self.consume_token(&Token::LParen) {
2662 let result = if allow_empty && self.consume_token(&Token::RParen) {
2663 vec![]
2664 } else {
2665 let result = self.parse_comma_separated(Parser::parse_expr)?;
2666 self.expect_token(&Token::RParen)?;
2667 result
2668 };
2669 Ok(result)
2670 } else {
2671 Ok(vec![self.parse_expr()?])
2672 }
2673 } else {
2674 self.expect_token(&Token::LParen)?;
2675 let result = if allow_empty && self.consume_token(&Token::RParen) {
2676 vec![]
2677 } else {
2678 let result = self.parse_comma_separated(Parser::parse_expr)?;
2679 self.expect_token(&Token::RParen)?;
2680 result
2681 };
2682 Ok(result)
2683 }
2684 }
2685
2686 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2688 let case_token = AttachedToken(self.get_current_token().clone());
2689 let mut operand = None;
2690 if !self.parse_keyword(Keyword::WHEN) {
2691 operand = Some(Box::new(self.parse_expr()?));
2692 self.expect_keyword_is(Keyword::WHEN)?;
2693 }
2694 let mut conditions = vec![];
2695 loop {
2696 let condition = self.parse_expr()?;
2697 self.expect_keyword_is(Keyword::THEN)?;
2698 let result = self.parse_expr()?;
2699 conditions.push(CaseWhen { condition, result });
2700 if !self.parse_keyword(Keyword::WHEN) {
2701 break;
2702 }
2703 }
2704 let else_result = if self.parse_keyword(Keyword::ELSE) {
2705 Some(Box::new(self.parse_expr()?))
2706 } else {
2707 None
2708 };
2709 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2710 Ok(Expr::Case {
2711 case_token,
2712 end_token,
2713 operand,
2714 conditions,
2715 else_result,
2716 })
2717 }
2718
2719 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2721 if self.parse_keyword(Keyword::FORMAT) {
2722 let value = self.parse_value()?;
2723 match self.parse_optional_time_zone()? {
2724 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2725 None => Ok(Some(CastFormat::Value(value))),
2726 }
2727 } else {
2728 Ok(None)
2729 }
2730 }
2731
2732 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2734 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2735 self.parse_value().map(Some)
2736 } else {
2737 Ok(None)
2738 }
2739 }
2740
2741 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2743 self.expect_token(&Token::LParen)?;
2744 let data_type = self.parse_data_type()?;
2745 self.expect_token(&Token::Comma)?;
2746 let expr = self.parse_expr()?;
2747 let styles = if self.consume_token(&Token::Comma) {
2748 self.parse_comma_separated(Parser::parse_expr)?
2749 } else {
2750 Default::default()
2751 };
2752 self.expect_token(&Token::RParen)?;
2753 Ok(Expr::Convert {
2754 is_try,
2755 expr: Box::new(expr),
2756 data_type: Some(data_type),
2757 charset: None,
2758 target_before_value: true,
2759 styles,
2760 })
2761 }
2762
2763 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2768 if self.dialect.convert_type_before_value() {
2769 return self.parse_mssql_convert(is_try);
2770 }
2771 self.expect_token(&Token::LParen)?;
2772 let expr = self.parse_expr()?;
2773 if self.parse_keyword(Keyword::USING) {
2774 let charset = self.parse_object_name(false)?;
2775 self.expect_token(&Token::RParen)?;
2776 return Ok(Expr::Convert {
2777 is_try,
2778 expr: Box::new(expr),
2779 data_type: None,
2780 charset: Some(charset),
2781 target_before_value: false,
2782 styles: vec![],
2783 });
2784 }
2785 self.expect_token(&Token::Comma)?;
2786 let data_type = self.parse_data_type()?;
2787 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2788 Some(self.parse_object_name(false)?)
2789 } else {
2790 None
2791 };
2792 self.expect_token(&Token::RParen)?;
2793 Ok(Expr::Convert {
2794 is_try,
2795 expr: Box::new(expr),
2796 data_type: Some(data_type),
2797 charset,
2798 target_before_value: false,
2799 styles: vec![],
2800 })
2801 }
2802
2803 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2805 self.expect_token(&Token::LParen)?;
2806 let expr = self.parse_expr()?;
2807 self.expect_keyword_is(Keyword::AS)?;
2808 let data_type = self.parse_data_type()?;
2809 let array = self.parse_keyword(Keyword::ARRAY);
2810 let format = self.parse_optional_cast_format()?;
2811 self.expect_token(&Token::RParen)?;
2812 Ok(Expr::Cast {
2813 kind,
2814 expr: Box::new(expr),
2815 data_type,
2816 array,
2817 format,
2818 })
2819 }
2820
2821 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2823 self.expect_token(&Token::LParen)?;
2824 let exists_node = Expr::Exists {
2825 negated,
2826 subquery: self.parse_query()?,
2827 };
2828 self.expect_token(&Token::RParen)?;
2829 Ok(exists_node)
2830 }
2831
2832 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2834 self.expect_token(&Token::LParen)?;
2835 let field = self.parse_date_time_field()?;
2836
2837 let syntax = if self.parse_keyword(Keyword::FROM) {
2838 ExtractSyntax::From
2839 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2840 {
2841 ExtractSyntax::Comma
2842 } else {
2843 return Err(ParserError::ParserError(
2844 "Expected 'FROM' or ','".to_string(),
2845 ));
2846 };
2847
2848 let expr = self.parse_expr()?;
2849 self.expect_token(&Token::RParen)?;
2850 Ok(Expr::Extract {
2851 field,
2852 expr: Box::new(expr),
2853 syntax,
2854 })
2855 }
2856
2857 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2859 self.expect_token(&Token::LParen)?;
2860 let expr = self.parse_expr()?;
2861 let field = if self.parse_keyword(Keyword::TO) {
2863 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2865 } else if self.consume_token(&Token::Comma) {
2866 let v = self.parse_value()?;
2868 if matches!(v.value, Value::Number(_, _)) {
2869 CeilFloorKind::Scale(v)
2870 } else {
2871 return Err(ParserError::ParserError(
2872 "Scale field can only be of number type".to_string(),
2873 ));
2874 }
2875 } else {
2876 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2877 };
2878 self.expect_token(&Token::RParen)?;
2879 if is_ceil {
2880 Ok(Expr::Ceil {
2881 expr: Box::new(expr),
2882 field,
2883 })
2884 } else {
2885 Ok(Expr::Floor {
2886 expr: Box::new(expr),
2887 field,
2888 })
2889 }
2890 }
2891
2892 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2894 let between_prec = self.dialect.prec_value(Precedence::Between);
2895 let position_expr = self.maybe_parse(|p| {
2896 p.expect_token(&Token::LParen)?;
2898
2899 let expr = p.parse_subexpr(between_prec)?;
2901 p.expect_keyword_is(Keyword::IN)?;
2902 let from = p.parse_expr()?;
2903 p.expect_token(&Token::RParen)?;
2904 Ok(Expr::Position {
2905 expr: Box::new(expr),
2906 r#in: Box::new(from),
2907 })
2908 })?;
2909 match position_expr {
2910 Some(expr) => Ok(expr),
2911 None => self.parse_function(ObjectName::from(vec![ident])),
2914 }
2915 }
2916
2917 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2919 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2920 Keyword::SUBSTR => true,
2921 Keyword::SUBSTRING => false,
2922 _ => {
2923 self.prev_token();
2924 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2925 }
2926 };
2927 self.expect_token(&Token::LParen)?;
2928 let expr = self.parse_expr()?;
2929 let mut from_expr = None;
2930 let special = self.consume_token(&Token::Comma);
2931 if special || self.parse_keyword(Keyword::FROM) {
2932 from_expr = Some(self.parse_expr()?);
2933 }
2934
2935 let mut to_expr = None;
2936 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2937 to_expr = Some(self.parse_expr()?);
2938 }
2939 self.expect_token(&Token::RParen)?;
2940
2941 Ok(Expr::Substring {
2942 expr: Box::new(expr),
2943 substring_from: from_expr.map(Box::new),
2944 substring_for: to_expr.map(Box::new),
2945 special,
2946 shorthand,
2947 })
2948 }
2949
2950 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2954 self.expect_token(&Token::LParen)?;
2956 let expr = self.parse_expr()?;
2957 self.expect_keyword_is(Keyword::PLACING)?;
2958 let what_expr = self.parse_expr()?;
2959 self.expect_keyword_is(Keyword::FROM)?;
2960 let from_expr = self.parse_expr()?;
2961 let mut for_expr = None;
2962 if self.parse_keyword(Keyword::FOR) {
2963 for_expr = Some(self.parse_expr()?);
2964 }
2965 self.expect_token(&Token::RParen)?;
2966
2967 Ok(Expr::Overlay {
2968 expr: Box::new(expr),
2969 overlay_what: Box::new(what_expr),
2970 overlay_from: Box::new(from_expr),
2971 overlay_for: for_expr.map(Box::new),
2972 })
2973 }
2974
2975 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2981 self.expect_token(&Token::LParen)?;
2982 let mut trim_where = None;
2983 if let Token::Word(word) = &self.peek_token_ref().token {
2984 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2985 trim_where = Some(self.parse_trim_where()?);
2986 }
2987 }
2988 let expr = self.parse_expr()?;
2989 if self.parse_keyword(Keyword::FROM) {
2990 let trim_what = Box::new(expr);
2991 let expr = self.parse_expr()?;
2992 self.expect_token(&Token::RParen)?;
2993 Ok(Expr::Trim {
2994 expr: Box::new(expr),
2995 trim_where,
2996 trim_what: Some(trim_what),
2997 trim_characters: None,
2998 })
2999 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3000 {
3001 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3002 self.expect_token(&Token::RParen)?;
3003 Ok(Expr::Trim {
3004 expr: Box::new(expr),
3005 trim_where: None,
3006 trim_what: None,
3007 trim_characters: Some(characters),
3008 })
3009 } else {
3010 self.expect_token(&Token::RParen)?;
3011 Ok(Expr::Trim {
3012 expr: Box::new(expr),
3013 trim_where,
3014 trim_what: None,
3015 trim_characters: None,
3016 })
3017 }
3018 }
3019
3020 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3024 let next_token = self.next_token();
3025 match &next_token.token {
3026 Token::Word(w) => match w.keyword {
3027 Keyword::BOTH => Ok(TrimWhereField::Both),
3028 Keyword::LEADING => Ok(TrimWhereField::Leading),
3029 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3030 _ => self.expected("trim_where field", next_token)?,
3031 },
3032 _ => self.expected("trim_where field", next_token),
3033 }
3034 }
3035
3036 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3039 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3040 self.expect_token(&Token::RBracket)?;
3041 Ok(Expr::Array(Array { elem: exprs, named }))
3042 }
3043
3044 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3048 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3049 if self.parse_keyword(Keyword::ERROR) {
3050 Ok(Some(ListAggOnOverflow::Error))
3051 } else {
3052 self.expect_keyword_is(Keyword::TRUNCATE)?;
3053 let filler = match &self.peek_token_ref().token {
3054 Token::Word(w)
3055 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3056 {
3057 None
3058 }
3059 Token::SingleQuotedString(_)
3060 | Token::EscapedStringLiteral(_)
3061 | Token::UnicodeStringLiteral(_)
3062 | Token::NationalStringLiteral(_)
3063 | Token::QuoteDelimitedStringLiteral(_)
3064 | Token::NationalQuoteDelimitedStringLiteral(_)
3065 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3066 _ => self.expected_ref(
3067 "either filler, WITH, or WITHOUT in LISTAGG",
3068 self.peek_token_ref(),
3069 )?,
3070 };
3071 let with_count = self.parse_keyword(Keyword::WITH);
3072 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3073 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3074 }
3075 self.expect_keyword_is(Keyword::COUNT)?;
3076 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3077 }
3078 } else {
3079 Ok(None)
3080 }
3081 }
3082
3083 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3090 let next_token = self.next_token();
3091 match &next_token.token {
3092 Token::Word(w) => match w.keyword {
3093 Keyword::YEAR => Ok(DateTimeField::Year),
3094 Keyword::YEARS => Ok(DateTimeField::Years),
3095 Keyword::MONTH => Ok(DateTimeField::Month),
3096 Keyword::MONTHS => Ok(DateTimeField::Months),
3097 Keyword::WEEK => {
3098 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3099 && self.consume_token(&Token::LParen)
3100 {
3101 let week_day = self.parse_identifier()?;
3102 self.expect_token(&Token::RParen)?;
3103 Some(week_day)
3104 } else {
3105 None
3106 };
3107 Ok(DateTimeField::Week(week_day))
3108 }
3109 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3110 Keyword::DAY => Ok(DateTimeField::Day),
3111 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3112 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3113 Keyword::DAYS => Ok(DateTimeField::Days),
3114 Keyword::DATE => Ok(DateTimeField::Date),
3115 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3116 Keyword::HOUR => Ok(DateTimeField::Hour),
3117 Keyword::HOURS => Ok(DateTimeField::Hours),
3118 Keyword::MINUTE => Ok(DateTimeField::Minute),
3119 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3120 Keyword::SECOND => Ok(DateTimeField::Second),
3121 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3122 Keyword::CENTURY => Ok(DateTimeField::Century),
3123 Keyword::DECADE => Ok(DateTimeField::Decade),
3124 Keyword::DOY => Ok(DateTimeField::Doy),
3125 Keyword::DOW => Ok(DateTimeField::Dow),
3126 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3127 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3128 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3129 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3130 Keyword::JULIAN => Ok(DateTimeField::Julian),
3131 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3132 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3133 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3134 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3135 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3136 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3137 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3138 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3139 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3140 Keyword::TIME => Ok(DateTimeField::Time),
3141 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3142 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3143 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3144 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3145 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3146 _ if self.dialect.allow_extract_custom() => {
3147 self.prev_token();
3148 let custom = self.parse_identifier()?;
3149 Ok(DateTimeField::Custom(custom))
3150 }
3151 _ => self.expected("date/time field", next_token),
3152 },
3153 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3154 self.prev_token();
3155 let custom = self.parse_identifier()?;
3156 Ok(DateTimeField::Custom(custom))
3157 }
3158 _ => self.expected("date/time field", next_token),
3159 }
3160 }
3161
3162 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3166 match &self.peek_token_ref().token {
3167 Token::Word(w) => match w.keyword {
3168 Keyword::EXISTS => {
3169 let negated = true;
3170 let _ = self.parse_keyword(Keyword::EXISTS);
3171 self.parse_exists_expr(negated)
3172 }
3173 _ => Ok(Expr::UnaryOp {
3174 op: UnaryOperator::Not,
3175 expr: Box::new(
3176 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3177 ),
3178 }),
3179 },
3180 _ => Ok(Expr::UnaryOp {
3181 op: UnaryOperator::Not,
3182 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3183 }),
3184 }
3185 }
3186
3187 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3197 let token = self.expect_token(&Token::LBrace)?;
3198
3199 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3200 self.expect_token(&Token::RBrace)?;
3201 return Ok(fn_expr);
3202 }
3203
3204 if self.dialect.supports_dictionary_syntax() {
3205 self.prev_token(); return self.parse_dictionary();
3207 }
3208
3209 self.expected("an expression", token)
3210 }
3211
3212 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3218 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3219
3220 self.expect_keyword_is(Keyword::AGAINST)?;
3221
3222 self.expect_token(&Token::LParen)?;
3223
3224 let match_value = self.parse_value()?;
3226
3227 let in_natural_language_mode_keywords = &[
3228 Keyword::IN,
3229 Keyword::NATURAL,
3230 Keyword::LANGUAGE,
3231 Keyword::MODE,
3232 ];
3233
3234 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3235
3236 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3237
3238 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3239 if self.parse_keywords(with_query_expansion_keywords) {
3240 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3241 } else {
3242 Some(SearchModifier::InNaturalLanguageMode)
3243 }
3244 } else if self.parse_keywords(in_boolean_mode_keywords) {
3245 Some(SearchModifier::InBooleanMode)
3246 } else if self.parse_keywords(with_query_expansion_keywords) {
3247 Some(SearchModifier::WithQueryExpansion)
3248 } else {
3249 None
3250 };
3251
3252 self.expect_token(&Token::RParen)?;
3253
3254 Ok(Expr::MatchAgainst {
3255 columns,
3256 match_value,
3257 opt_search_modifier,
3258 })
3259 }
3260
3261 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3277 let value = if self.dialect.require_interval_qualifier() {
3286 self.parse_expr()?
3288 } else {
3289 self.parse_prefix()?
3292 };
3293
3294 let leading_field = if self.next_token_is_temporal_unit() {
3300 Some(self.parse_date_time_field()?)
3301 } else if self.dialect.require_interval_qualifier() {
3302 return parser_err!(
3303 "INTERVAL requires a unit after the literal value",
3304 self.peek_token_ref().span.start
3305 );
3306 } else {
3307 None
3308 };
3309
3310 let (leading_precision, last_field, fsec_precision) =
3311 if leading_field == Some(DateTimeField::Second) {
3312 let last_field = None;
3318 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3319 (leading_precision, last_field, fsec_precision)
3320 } else {
3321 let leading_precision = self.parse_optional_precision()?;
3322 if self.parse_keyword(Keyword::TO) {
3323 let last_field = Some(self.parse_date_time_field()?);
3324 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3325 self.parse_optional_precision()?
3326 } else {
3327 None
3328 };
3329 (leading_precision, last_field, fsec_precision)
3330 } else {
3331 (leading_precision, None, None)
3332 }
3333 };
3334
3335 Ok(Expr::Interval(Interval {
3336 value: Box::new(value),
3337 leading_field,
3338 leading_precision,
3339 last_field,
3340 fractional_seconds_precision: fsec_precision,
3341 }))
3342 }
3343
3344 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3347 if let Token::Word(word) = &self.peek_token_ref().token {
3348 matches!(
3349 word.keyword,
3350 Keyword::YEAR
3351 | Keyword::YEARS
3352 | Keyword::MONTH
3353 | Keyword::MONTHS
3354 | Keyword::WEEK
3355 | Keyword::WEEKS
3356 | Keyword::DAY
3357 | Keyword::DAYS
3358 | Keyword::HOUR
3359 | Keyword::HOURS
3360 | Keyword::MINUTE
3361 | Keyword::MINUTES
3362 | Keyword::SECOND
3363 | Keyword::SECONDS
3364 | Keyword::CENTURY
3365 | Keyword::DECADE
3366 | Keyword::DOW
3367 | Keyword::DOY
3368 | Keyword::EPOCH
3369 | Keyword::ISODOW
3370 | Keyword::ISOYEAR
3371 | Keyword::JULIAN
3372 | Keyword::MICROSECOND
3373 | Keyword::MICROSECONDS
3374 | Keyword::MILLENIUM
3375 | Keyword::MILLENNIUM
3376 | Keyword::MILLISECOND
3377 | Keyword::MILLISECONDS
3378 | Keyword::NANOSECOND
3379 | Keyword::NANOSECONDS
3380 | Keyword::QUARTER
3381 | Keyword::TIMEZONE
3382 | Keyword::TIMEZONE_HOUR
3383 | Keyword::TIMEZONE_MINUTE
3384 )
3385 } else {
3386 false
3387 }
3388 }
3389
3390 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3398 self.prev_token();
3400 let (fields, trailing_bracket) =
3401 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3402 if trailing_bracket.0 {
3403 return parser_err!(
3404 "unmatched > in STRUCT literal",
3405 self.peek_token_ref().span.start
3406 );
3407 }
3408
3409 self.expect_token(&Token::LParen)?;
3411 let values = self
3412 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3413 self.expect_token(&Token::RParen)?;
3414
3415 Ok(Expr::Struct { values, fields })
3416 }
3417
3418 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3432 let expr = self.parse_expr()?;
3433 if self.parse_keyword(Keyword::AS) {
3434 if typed_syntax {
3435 return parser_err!("Typed syntax does not allow AS", {
3436 self.prev_token();
3437 self.peek_token_ref().span.start
3438 });
3439 }
3440 let field_name = self.parse_identifier()?;
3441 Ok(Expr::Named {
3442 expr: expr.into(),
3443 name: field_name,
3444 })
3445 } else {
3446 Ok(expr)
3447 }
3448 }
3449
3450 fn parse_struct_type_def<F>(
3463 &mut self,
3464 mut elem_parser: F,
3465 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3466 where
3467 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3468 {
3469 self.expect_keyword_is(Keyword::STRUCT)?;
3470
3471 if self.peek_token_ref().token != Token::Lt {
3473 return Ok((Default::default(), false.into()));
3474 }
3475 self.next_token();
3476
3477 let mut field_defs = vec![];
3478 let trailing_bracket = loop {
3479 let (def, trailing_bracket) = elem_parser(self)?;
3480 field_defs.push(def);
3481 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3483 break trailing_bracket;
3484 }
3485 };
3486
3487 Ok((
3488 field_defs,
3489 self.expect_closing_angle_bracket(trailing_bracket)?,
3490 ))
3491 }
3492
3493 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3495 self.expect_keyword_is(Keyword::STRUCT)?;
3496 self.expect_token(&Token::LParen)?;
3497 let struct_body = self.parse_comma_separated(|parser| {
3498 let field_name = parser.parse_identifier()?;
3499 let field_type = parser.parse_data_type()?;
3500
3501 Ok(StructField {
3502 field_name: Some(field_name),
3503 field_type,
3504 options: None,
3505 })
3506 });
3507 self.expect_token(&Token::RParen)?;
3508 struct_body
3509 }
3510
3511 fn parse_struct_field_def(
3523 &mut self,
3524 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3525 let is_named_field = matches!(
3528 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3529 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3530 );
3531
3532 let field_name = if is_named_field {
3533 let name = self.parse_identifier()?;
3534 let _ = self.consume_token(&Token::Colon);
3535 Some(name)
3536 } else {
3537 None
3538 };
3539
3540 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3541
3542 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3543 Ok((
3544 StructField {
3545 field_name,
3546 field_type,
3547 options,
3548 },
3549 trailing_bracket,
3550 ))
3551 }
3552
3553 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3563 self.expect_keyword_is(Keyword::UNION)?;
3564
3565 self.expect_token(&Token::LParen)?;
3566
3567 let fields = self.parse_comma_separated(|p| {
3568 Ok(UnionField {
3569 field_name: p.parse_identifier()?,
3570 field_type: p.parse_data_type()?,
3571 })
3572 })?;
3573
3574 self.expect_token(&Token::RParen)?;
3575
3576 Ok(fields)
3577 }
3578
3579 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3590 self.expect_token(&Token::LBrace)?;
3591
3592 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3593
3594 self.expect_token(&Token::RBrace)?;
3595
3596 Ok(Expr::Dictionary(fields))
3597 }
3598
3599 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3610 let key = self.parse_identifier()?;
3611
3612 self.expect_token(&Token::Colon)?;
3613
3614 let expr = self.parse_expr()?;
3615
3616 Ok(DictionaryField {
3617 key,
3618 value: Box::new(expr),
3619 })
3620 }
3621
3622 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3632 self.expect_token(&Token::LBrace)?;
3633 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3634 self.expect_token(&Token::RBrace)?;
3635 Ok(Expr::Map(Map { entries: fields }))
3636 }
3637
3638 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3648 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3650
3651 self.expect_token(&Token::Colon)?;
3652
3653 let value = self.parse_expr()?;
3654
3655 Ok(MapEntry {
3656 key: Box::new(key),
3657 value: Box::new(value),
3658 })
3659 }
3660
3661 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3671 self.expect_keyword_is(Keyword::MAP)?;
3672 self.expect_token(&Token::LParen)?;
3673 let key_data_type = self.parse_data_type()?;
3674 self.expect_token(&Token::Comma)?;
3675 let value_data_type = self.parse_data_type()?;
3676 self.expect_token(&Token::RParen)?;
3677
3678 Ok((key_data_type, value_data_type))
3679 }
3680
3681 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3691 self.expect_keyword_is(Keyword::TUPLE)?;
3692 self.expect_token(&Token::LParen)?;
3693 let mut field_defs = vec![];
3694 loop {
3695 let (def, _) = self.parse_struct_field_def()?;
3696 field_defs.push(def);
3697 if !self.consume_token(&Token::Comma) {
3698 break;
3699 }
3700 }
3701 self.expect_token(&Token::RParen)?;
3702
3703 Ok(field_defs)
3704 }
3705
3706 fn expect_closing_angle_bracket(
3711 &mut self,
3712 trailing_bracket: MatchedTrailingBracket,
3713 ) -> Result<MatchedTrailingBracket, ParserError> {
3714 let trailing_bracket = if !trailing_bracket.0 {
3715 match &self.peek_token_ref().token {
3716 Token::Gt => {
3717 self.next_token();
3718 false.into()
3719 }
3720 Token::ShiftRight => {
3721 self.next_token();
3722 true.into()
3723 }
3724 _ => return self.expected_ref(">", self.peek_token_ref()),
3725 }
3726 } else {
3727 false.into()
3728 };
3729
3730 Ok(trailing_bracket)
3731 }
3732
3733 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3735 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3737 return infix;
3738 }
3739
3740 let dialect = self.dialect;
3741
3742 self.advance_token();
3743 let tok = self.get_current_token();
3744 debug!("infix: {tok:?}");
3745 let tok_index = self.get_current_index();
3746 let span = tok.span;
3747 let regular_binary_operator = match &tok.token {
3748 Token::Spaceship => Some(BinaryOperator::Spaceship),
3749 Token::DoubleEq => Some(BinaryOperator::Eq),
3750 Token::Assignment => Some(BinaryOperator::Assignment),
3751 Token::Eq => Some(BinaryOperator::Eq),
3752 Token::Neq => Some(BinaryOperator::NotEq),
3753 Token::Gt => Some(BinaryOperator::Gt),
3754 Token::GtEq => Some(BinaryOperator::GtEq),
3755 Token::Lt => Some(BinaryOperator::Lt),
3756 Token::LtEq => Some(BinaryOperator::LtEq),
3757 Token::Plus => Some(BinaryOperator::Plus),
3758 Token::Minus => Some(BinaryOperator::Minus),
3759 Token::Mul => Some(BinaryOperator::Multiply),
3760 Token::Mod => Some(BinaryOperator::Modulo),
3761 Token::StringConcat => Some(BinaryOperator::StringConcat),
3762 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3763 Token::Caret => {
3764 if dialect_is!(dialect is PostgreSqlDialect) {
3767 Some(BinaryOperator::PGExp)
3768 } else {
3769 Some(BinaryOperator::BitwiseXor)
3770 }
3771 }
3772 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3773 Token::Div => Some(BinaryOperator::Divide),
3774 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3775 Some(BinaryOperator::DuckIntegerDivide)
3776 }
3777 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3778 Some(BinaryOperator::PGBitwiseShiftLeft)
3779 }
3780 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3781 Some(BinaryOperator::PGBitwiseShiftRight)
3782 }
3783 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3784 Some(BinaryOperator::PGBitwiseXor)
3785 }
3786 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3787 Some(BinaryOperator::PGOverlap)
3788 }
3789 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3790 Some(BinaryOperator::PGOverlap)
3791 }
3792 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3793 Some(BinaryOperator::And)
3794 }
3795 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3796 Some(BinaryOperator::PGStartsWith)
3797 }
3798 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3799 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3800 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3801 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3802 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3803 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3804 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3805 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3806 Token::Arrow => Some(BinaryOperator::Arrow),
3807 Token::LongArrow => Some(BinaryOperator::LongArrow),
3808 Token::HashArrow => Some(BinaryOperator::HashArrow),
3809 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3810 Token::AtArrow => Some(BinaryOperator::AtArrow),
3811 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3812 Token::HashMinus => Some(BinaryOperator::HashMinus),
3813 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3814 Token::AtAt => Some(BinaryOperator::AtAt),
3815 Token::Question => Some(BinaryOperator::Question),
3816 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3817 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3818 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3819 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3820 Some(BinaryOperator::DoubleHash)
3821 }
3822
3823 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3824 Some(BinaryOperator::AndLt)
3825 }
3826 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3827 Some(BinaryOperator::AndGt)
3828 }
3829 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3830 Some(BinaryOperator::QuestionDash)
3831 }
3832 Token::AmpersandLeftAngleBracketVerticalBar
3833 if self.dialect.supports_geometric_types() =>
3834 {
3835 Some(BinaryOperator::AndLtPipe)
3836 }
3837 Token::VerticalBarAmpersandRightAngleBracket
3838 if self.dialect.supports_geometric_types() =>
3839 {
3840 Some(BinaryOperator::PipeAndGt)
3841 }
3842 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3843 Some(BinaryOperator::LtDashGt)
3844 }
3845 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3846 Some(BinaryOperator::LtCaret)
3847 }
3848 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3849 Some(BinaryOperator::GtCaret)
3850 }
3851 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3852 Some(BinaryOperator::QuestionHash)
3853 }
3854 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3855 Some(BinaryOperator::QuestionDoublePipe)
3856 }
3857 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3858 Some(BinaryOperator::QuestionDashPipe)
3859 }
3860 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3861 Some(BinaryOperator::TildeEq)
3862 }
3863 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3864 Some(BinaryOperator::LtLtPipe)
3865 }
3866 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3867 Some(BinaryOperator::PipeGtGt)
3868 }
3869 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3870
3871 Token::Word(w) => match w.keyword {
3872 Keyword::AND => Some(BinaryOperator::And),
3873 Keyword::OR => Some(BinaryOperator::Or),
3874 Keyword::XOR => Some(BinaryOperator::Xor),
3875 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3876 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3877 self.expect_token(&Token::LParen)?;
3878 let mut idents = vec![];
3883 loop {
3884 self.advance_token();
3885 idents.push(self.get_current_token().to_string());
3886 if !self.consume_token(&Token::Period) {
3887 break;
3888 }
3889 }
3890 self.expect_token(&Token::RParen)?;
3891 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3892 }
3893 _ => None,
3894 },
3895 _ => None,
3896 };
3897
3898 let tok = self.token_at(tok_index);
3899 if let Some(op) = regular_binary_operator {
3900 if let Some(keyword) =
3901 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3902 {
3903 self.expect_token(&Token::LParen)?;
3904 let right = if self.peek_sub_query() {
3905 self.prev_token(); self.parse_subexpr(precedence)?
3909 } else {
3910 let right = self.parse_subexpr(precedence)?;
3912 self.expect_token(&Token::RParen)?;
3913 right
3914 };
3915
3916 if !matches!(
3917 op,
3918 BinaryOperator::Gt
3919 | BinaryOperator::Lt
3920 | BinaryOperator::GtEq
3921 | BinaryOperator::LtEq
3922 | BinaryOperator::Eq
3923 | BinaryOperator::NotEq
3924 | BinaryOperator::PGRegexMatch
3925 | BinaryOperator::PGRegexIMatch
3926 | BinaryOperator::PGRegexNotMatch
3927 | BinaryOperator::PGRegexNotIMatch
3928 | BinaryOperator::PGLikeMatch
3929 | BinaryOperator::PGILikeMatch
3930 | BinaryOperator::PGNotLikeMatch
3931 | BinaryOperator::PGNotILikeMatch
3932 ) {
3933 return parser_err!(
3934 format!(
3935 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3936 ),
3937 span.start
3938 );
3939 };
3940
3941 Ok(match keyword {
3942 Keyword::ALL => Expr::AllOp {
3943 left: Box::new(expr),
3944 compare_op: op,
3945 right: Box::new(right),
3946 },
3947 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3948 left: Box::new(expr),
3949 compare_op: op,
3950 right: Box::new(right),
3951 is_some: keyword == Keyword::SOME,
3952 },
3953 unexpected_keyword => return Err(ParserError::ParserError(
3954 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3955 )),
3956 })
3957 } else {
3958 Ok(Expr::BinaryOp {
3959 left: Box::new(expr),
3960 op,
3961 right: Box::new(self.parse_subexpr(precedence)?),
3962 })
3963 }
3964 } else if let Token::Word(w) = &tok.token {
3965 match w.keyword {
3966 Keyword::IS => {
3967 if self.parse_keyword(Keyword::NULL) {
3968 Ok(Expr::IsNull(Box::new(expr)))
3969 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3970 Ok(Expr::IsNotNull(Box::new(expr)))
3971 } else if self.parse_keywords(&[Keyword::TRUE]) {
3972 Ok(Expr::IsTrue(Box::new(expr)))
3973 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3974 Ok(Expr::IsNotTrue(Box::new(expr)))
3975 } else if self.parse_keywords(&[Keyword::FALSE]) {
3976 Ok(Expr::IsFalse(Box::new(expr)))
3977 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3978 Ok(Expr::IsNotFalse(Box::new(expr)))
3979 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3980 Ok(Expr::IsUnknown(Box::new(expr)))
3981 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3982 Ok(Expr::IsNotUnknown(Box::new(expr)))
3983 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3984 let expr2 = self.parse_expr()?;
3985 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3986 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3987 {
3988 let expr2 = self.parse_expr()?;
3989 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3990 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3991 Ok(is_normalized)
3992 } else {
3993 self.expected_ref(
3994 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3995 self.peek_token_ref(),
3996 )
3997 }
3998 }
3999 Keyword::AT => {
4000 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4001 Ok(Expr::AtTimeZone {
4002 timestamp: Box::new(expr),
4003 time_zone: Box::new(self.parse_subexpr(precedence)?),
4004 })
4005 }
4006 Keyword::NOT
4007 | Keyword::IN
4008 | Keyword::BETWEEN
4009 | Keyword::LIKE
4010 | Keyword::ILIKE
4011 | Keyword::SIMILAR
4012 | Keyword::REGEXP
4013 | Keyword::RLIKE => {
4014 self.prev_token();
4015 let negated = self.parse_keyword(Keyword::NOT);
4016 let regexp = self.parse_keyword(Keyword::REGEXP);
4017 let rlike = self.parse_keyword(Keyword::RLIKE);
4018 let null = if !self.in_column_definition_state() {
4019 self.parse_keyword(Keyword::NULL)
4020 } else {
4021 false
4022 };
4023 if regexp || rlike {
4024 Ok(Expr::RLike {
4025 negated,
4026 expr: Box::new(expr),
4027 pattern: Box::new(
4028 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4029 ),
4030 regexp,
4031 })
4032 } else if negated && null {
4033 Ok(Expr::IsNotNull(Box::new(expr)))
4034 } else if self.parse_keyword(Keyword::IN) {
4035 self.parse_in(expr, negated)
4036 } else if self.parse_keyword(Keyword::BETWEEN) {
4037 self.parse_between(expr, negated)
4038 } else if self.parse_keyword(Keyword::LIKE) {
4039 Ok(Expr::Like {
4040 negated,
4041 any: self.parse_keyword(Keyword::ANY),
4042 expr: Box::new(expr),
4043 pattern: Box::new(
4044 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4045 ),
4046 escape_char: self.parse_escape_char()?,
4047 })
4048 } else if self.parse_keyword(Keyword::ILIKE) {
4049 Ok(Expr::ILike {
4050 negated,
4051 any: self.parse_keyword(Keyword::ANY),
4052 expr: Box::new(expr),
4053 pattern: Box::new(
4054 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4055 ),
4056 escape_char: self.parse_escape_char()?,
4057 })
4058 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4059 Ok(Expr::SimilarTo {
4060 negated,
4061 expr: Box::new(expr),
4062 pattern: Box::new(
4063 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4064 ),
4065 escape_char: self.parse_escape_char()?,
4066 })
4067 } else {
4068 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4069 }
4070 }
4071 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4072 Ok(Expr::IsNotNull(Box::new(expr)))
4073 }
4074 Keyword::MEMBER => {
4075 if self.parse_keyword(Keyword::OF) {
4076 self.expect_token(&Token::LParen)?;
4077 let array = self.parse_expr()?;
4078 self.expect_token(&Token::RParen)?;
4079 Ok(Expr::MemberOf(MemberOf {
4080 value: Box::new(expr),
4081 array: Box::new(array),
4082 }))
4083 } else {
4084 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4085 }
4086 }
4087 _ => parser_err!(
4089 format!("No infix parser for token {:?}", tok.token),
4090 tok.span.start
4091 ),
4092 }
4093 } else if Token::DoubleColon == *tok {
4094 Ok(Expr::Cast {
4095 kind: CastKind::DoubleColon,
4096 expr: Box::new(expr),
4097 data_type: self.parse_data_type()?,
4098 array: false,
4099 format: None,
4100 })
4101 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4102 Ok(Expr::UnaryOp {
4103 op: UnaryOperator::PGPostfixFactorial,
4104 expr: Box::new(expr),
4105 })
4106 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4107 || (Token::Colon == *tok)
4108 {
4109 self.prev_token();
4110 self.parse_json_access(expr)
4111 } else {
4112 parser_err!(
4114 format!("No infix parser for token {:?}", tok.token),
4115 tok.span.start
4116 )
4117 }
4118 }
4119
4120 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4122 if self.parse_keyword(Keyword::ESCAPE) {
4123 Ok(Some(self.parse_value()?))
4124 } else {
4125 Ok(None)
4126 }
4127 }
4128
4129 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4139 let lower_bound = if self.consume_token(&Token::Colon) {
4141 None
4142 } else {
4143 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4145 };
4146
4147 if self.consume_token(&Token::RBracket) {
4149 if let Some(lower_bound) = lower_bound {
4150 return Ok(Subscript::Index { index: lower_bound });
4151 };
4152 return Ok(Subscript::Slice {
4153 lower_bound,
4154 upper_bound: None,
4155 stride: None,
4156 });
4157 }
4158
4159 if lower_bound.is_some() {
4161 self.expect_token(&Token::Colon)?;
4162 }
4163
4164 let upper_bound = if self.consume_token(&Token::RBracket) {
4166 return Ok(Subscript::Slice {
4167 lower_bound,
4168 upper_bound: None,
4169 stride: None,
4170 });
4171 } else {
4172 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4174 };
4175
4176 if self.consume_token(&Token::RBracket) {
4178 return Ok(Subscript::Slice {
4179 lower_bound,
4180 upper_bound,
4181 stride: None,
4182 });
4183 }
4184
4185 self.expect_token(&Token::Colon)?;
4187 let stride = if self.consume_token(&Token::RBracket) {
4188 None
4189 } else {
4190 Some(self.parse_expr()?)
4191 };
4192
4193 if stride.is_some() {
4194 self.expect_token(&Token::RBracket)?;
4195 }
4196
4197 Ok(Subscript::Slice {
4198 lower_bound,
4199 upper_bound,
4200 stride,
4201 })
4202 }
4203
4204 pub fn parse_multi_dim_subscript(
4206 &mut self,
4207 chain: &mut Vec<AccessExpr>,
4208 ) -> Result<(), ParserError> {
4209 while self.consume_token(&Token::LBracket) {
4210 self.parse_subscript(chain)?;
4211 }
4212 Ok(())
4213 }
4214
4215 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4219 let subscript = self.parse_subscript_inner()?;
4220 chain.push(AccessExpr::Subscript(subscript));
4221 Ok(())
4222 }
4223
4224 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4225 let token = self.next_token();
4226 match token.token {
4227 Token::Word(Word {
4228 value,
4229 quote_style: quote_style @ (Some('"') | Some('`') | None),
4232 keyword: _,
4235 }) => Ok(JsonPathElem::Dot {
4236 key: value,
4237 quoted: quote_style.is_some(),
4238 }),
4239
4240 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4244
4245 _ => self.expected("variant object key name", token),
4246 }
4247 }
4248
4249 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4250 let path = self.parse_json_path()?;
4251 Ok(Expr::JsonAccess {
4252 value: Box::new(expr),
4253 path,
4254 })
4255 }
4256
4257 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4258 let mut path = Vec::new();
4259 loop {
4260 match self.next_token().token {
4261 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4262 self.next_token();
4263 let key = self.parse_wildcard_expr()?;
4264 self.expect_token(&Token::RBracket)?;
4265 path.push(JsonPathElem::ColonBracket { key });
4266 }
4267 Token::Colon if path.is_empty() => {
4268 path.push(self.parse_json_path_object_key()?);
4269 }
4270 Token::Period if !path.is_empty() => {
4271 path.push(self.parse_json_path_object_key()?);
4272 }
4273 Token::LBracket => {
4274 let key = self.parse_wildcard_expr()?;
4275 self.expect_token(&Token::RBracket)?;
4276
4277 path.push(JsonPathElem::Bracket { key });
4278 }
4279 _ => {
4280 self.prev_token();
4281 break;
4282 }
4283 };
4284 }
4285
4286 debug_assert!(!path.is_empty());
4287 Ok(JsonPath { path })
4288 }
4289
4290 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4292 if self.parse_keyword(Keyword::UNNEST) {
4295 self.expect_token(&Token::LParen)?;
4296 let array_expr = self.parse_expr()?;
4297 self.expect_token(&Token::RParen)?;
4298 return Ok(Expr::InUnnest {
4299 expr: Box::new(expr),
4300 array_expr: Box::new(array_expr),
4301 negated,
4302 });
4303 }
4304 self.expect_token(&Token::LParen)?;
4305 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4306 Some(subquery) => Expr::InSubquery {
4307 expr: Box::new(expr),
4308 subquery,
4309 negated,
4310 },
4311 None => Expr::InList {
4312 expr: Box::new(expr),
4313 list: if self.dialect.supports_in_empty_list() {
4314 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4315 } else {
4316 self.parse_comma_separated(Parser::parse_expr)?
4317 },
4318 negated,
4319 },
4320 };
4321 self.expect_token(&Token::RParen)?;
4322 Ok(in_op)
4323 }
4324
4325 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4327 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4330 self.expect_keyword_is(Keyword::AND)?;
4331 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4332 Ok(Expr::Between {
4333 expr: Box::new(expr),
4334 negated,
4335 low: Box::new(low),
4336 high: Box::new(high),
4337 })
4338 }
4339
4340 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4342 Ok(Expr::Cast {
4343 kind: CastKind::DoubleColon,
4344 expr: Box::new(expr),
4345 data_type: self.parse_data_type()?,
4346 array: false,
4347 format: None,
4348 })
4349 }
4350
4351 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4353 self.dialect.get_next_precedence_default(self)
4354 }
4355
4356 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4359 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4360 }
4361
4362 pub fn peek_token(&self) -> TokenWithSpan {
4367 self.peek_nth_token(0)
4368 }
4369
4370 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4373 self.peek_nth_token_ref(0)
4374 }
4375
4376 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4399 self.peek_tokens_with_location()
4400 .map(|with_loc| with_loc.token)
4401 }
4402
4403 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4408 let mut index = self.index;
4409 core::array::from_fn(|_| loop {
4410 let token = self.tokens.get(index);
4411 index += 1;
4412 if let Some(TokenWithSpan {
4413 token: Token::Whitespace(_),
4414 span: _,
4415 }) = token
4416 {
4417 continue;
4418 }
4419 break token.cloned().unwrap_or(TokenWithSpan {
4420 token: Token::EOF,
4421 span: Span::empty(),
4422 });
4423 })
4424 }
4425
4426 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4431 let mut index = self.index;
4432 core::array::from_fn(|_| loop {
4433 let token = self.tokens.get(index);
4434 index += 1;
4435 if let Some(TokenWithSpan {
4436 token: Token::Whitespace(_),
4437 span: _,
4438 }) = token
4439 {
4440 continue;
4441 }
4442 break token.unwrap_or(&EOF_TOKEN);
4443 })
4444 }
4445
4446 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4448 self.peek_nth_token_ref(n).clone()
4449 }
4450
4451 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4453 let mut index = self.index;
4454 loop {
4455 index += 1;
4456 match self.tokens.get(index - 1) {
4457 Some(TokenWithSpan {
4458 token: Token::Whitespace(_),
4459 span: _,
4460 }) => continue,
4461 non_whitespace => {
4462 if n == 0 {
4463 return non_whitespace.unwrap_or(&EOF_TOKEN);
4464 }
4465 n -= 1;
4466 }
4467 }
4468 }
4469 }
4470
4471 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4474 self.peek_nth_token_no_skip(0)
4475 }
4476
4477 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4479 self.tokens
4480 .get(self.index + n)
4481 .cloned()
4482 .unwrap_or(TokenWithSpan {
4483 token: Token::EOF,
4484 span: Span::empty(),
4485 })
4486 }
4487
4488 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4490 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4491 }
4492
4493 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4497 let index = self.index;
4498 let matched = self.parse_keywords(expected);
4499 self.index = index;
4500 matched
4501 }
4502
4503 pub fn next_token(&mut self) -> TokenWithSpan {
4508 self.advance_token();
4509 self.get_current_token().clone()
4510 }
4511
4512 pub fn get_current_index(&self) -> usize {
4517 self.index.saturating_sub(1)
4518 }
4519
4520 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4522 self.index += 1;
4523 self.tokens.get(self.index - 1)
4524 }
4525
4526 pub fn advance_token(&mut self) {
4530 loop {
4531 self.index += 1;
4532 match self.tokens.get(self.index - 1) {
4533 Some(TokenWithSpan {
4534 token: Token::Whitespace(_),
4535 span: _,
4536 }) => continue,
4537 _ => break,
4538 }
4539 }
4540 }
4541
4542 pub fn get_current_token(&self) -> &TokenWithSpan {
4546 self.token_at(self.index.saturating_sub(1))
4547 }
4548
4549 pub fn get_previous_token(&self) -> &TokenWithSpan {
4553 self.token_at(self.index.saturating_sub(2))
4554 }
4555
4556 pub fn get_next_token(&self) -> &TokenWithSpan {
4560 self.token_at(self.index)
4561 }
4562
4563 pub fn prev_token(&mut self) {
4570 loop {
4571 assert!(self.index > 0);
4572 self.index -= 1;
4573 if let Some(TokenWithSpan {
4574 token: Token::Whitespace(_),
4575 span: _,
4576 }) = self.tokens.get(self.index)
4577 {
4578 continue;
4579 }
4580 return;
4581 }
4582 }
4583
4584 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4586 parser_err!(
4587 format!("Expected: {expected}, found: {found}"),
4588 found.span.start
4589 )
4590 }
4591
4592 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4594 parser_err!(
4595 format!("Expected: {expected}, found: {found}"),
4596 found.span.start
4597 )
4598 }
4599
4600 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4602 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4603 parser_err!(
4604 format!("Expected: {expected}, found: {found}"),
4605 found.span.start
4606 )
4607 }
4608
4609 #[must_use]
4612 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4613 if self.peek_keyword(expected) {
4614 self.advance_token();
4615 true
4616 } else {
4617 false
4618 }
4619 }
4620
4621 #[must_use]
4622 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4626 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4627 }
4628
4629 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4637 self.keyword_with_tokens(expected, tokens, true)
4638 }
4639
4640 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4645 self.keyword_with_tokens(expected, tokens, false)
4646 }
4647
4648 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4649 match &self.peek_token_ref().token {
4650 Token::Word(w) if expected == w.keyword => {
4651 for (idx, token) in tokens.iter().enumerate() {
4652 if self.peek_nth_token_ref(idx + 1).token != *token {
4653 return false;
4654 }
4655 }
4656
4657 if consume {
4658 for _ in 0..(tokens.len() + 1) {
4659 self.advance_token();
4660 }
4661 }
4662
4663 true
4664 }
4665 _ => false,
4666 }
4667 }
4668
4669 #[must_use]
4673 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4674 self.parse_keywords_indexed(keywords).is_some()
4675 }
4676
4677 #[must_use]
4680 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4681 let start_index = self.index;
4682 let mut first_keyword_index = None;
4683 for &keyword in keywords {
4684 if !self.parse_keyword(keyword) {
4685 self.index = start_index;
4686 return None;
4687 }
4688 if first_keyword_index.is_none() {
4689 first_keyword_index = Some(self.index.saturating_sub(1));
4690 }
4691 }
4692 first_keyword_index
4693 }
4694
4695 #[must_use]
4698 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4699 for keyword in keywords {
4700 if self.peek_keyword(*keyword) {
4701 return Some(*keyword);
4702 }
4703 }
4704 None
4705 }
4706
4707 #[must_use]
4711 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4712 match &self.peek_token_ref().token {
4713 Token::Word(w) => {
4714 keywords
4715 .iter()
4716 .find(|keyword| **keyword == w.keyword)
4717 .map(|keyword| {
4718 self.advance_token();
4719 *keyword
4720 })
4721 }
4722 _ => None,
4723 }
4724 }
4725
4726 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4729 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4730 Ok(keyword)
4731 } else {
4732 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4733 self.expected_ref(
4734 &format!("one of {}", keywords.join(" or ")),
4735 self.peek_token_ref(),
4736 )
4737 }
4738 }
4739
4740 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4745 if self.parse_keyword(expected) {
4746 Ok(self.get_current_token().clone())
4747 } else {
4748 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4749 }
4750 }
4751
4752 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4758 if self.parse_keyword(expected) {
4759 Ok(())
4760 } else {
4761 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4762 }
4763 }
4764
4765 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4768 for &kw in expected {
4769 self.expect_keyword_is(kw)?;
4770 }
4771 Ok(())
4772 }
4773
4774 #[must_use]
4778 pub fn consume_token(&mut self, expected: &Token) -> bool {
4779 if self.peek_token_ref() == expected {
4780 self.advance_token();
4781 true
4782 } else {
4783 false
4784 }
4785 }
4786
4787 #[must_use]
4791 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4792 let index = self.index;
4793 for token in tokens {
4794 if !self.consume_token(token) {
4795 self.index = index;
4796 return false;
4797 }
4798 }
4799 true
4800 }
4801
4802 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4804 if self.peek_token_ref() == expected {
4805 Ok(self.next_token())
4806 } else {
4807 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4808 }
4809 }
4810
4811 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4812 where
4813 <T as FromStr>::Err: Display,
4814 {
4815 s.parse::<T>().map_err(|e| {
4816 ParserError::ParserError(format!(
4817 "Could not parse '{s}' as {}: {e}{loc}",
4818 core::any::type_name::<T>()
4819 ))
4820 })
4821 }
4822
4823 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4825 let trailing_commas =
4831 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4832
4833 self.parse_comma_separated_with_trailing_commas(
4834 |p| p.parse_select_item(),
4835 trailing_commas,
4836 Self::is_reserved_for_column_alias,
4837 )
4838 }
4839
4840 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4842 let mut values = vec![];
4843 loop {
4844 values.push(self.parse_grant_permission()?);
4845 if !self.consume_token(&Token::Comma) {
4846 break;
4847 } else if self.options.trailing_commas {
4848 match &self.peek_token_ref().token {
4849 Token::Word(kw) if kw.keyword == Keyword::ON => {
4850 break;
4851 }
4852 Token::RParen
4853 | Token::SemiColon
4854 | Token::EOF
4855 | Token::RBracket
4856 | Token::RBrace => break,
4857 _ => continue,
4858 }
4859 }
4860 }
4861 Ok(values)
4862 }
4863
4864 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4866 let trailing_commas = self.dialect.supports_from_trailing_commas();
4867
4868 self.parse_comma_separated_with_trailing_commas(
4869 Parser::parse_table_and_joins,
4870 trailing_commas,
4871 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4872 )
4873 }
4874
4875 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4882 &mut self,
4883 trailing_commas: bool,
4884 is_reserved_keyword: &R,
4885 ) -> bool
4886 where
4887 R: Fn(&Keyword, &mut Parser) -> bool,
4888 {
4889 if !self.consume_token(&Token::Comma) {
4890 true
4891 } else if trailing_commas {
4892 let token = self.next_token().token;
4893 let is_end = match token {
4894 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4895 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4896 true
4897 }
4898 _ => false,
4899 };
4900 self.prev_token();
4901
4902 is_end
4903 } else {
4904 false
4905 }
4906 }
4907
4908 fn is_parse_comma_separated_end(&mut self) -> bool {
4911 self.is_parse_comma_separated_end_with_trailing_commas(
4912 self.options.trailing_commas,
4913 &Self::is_reserved_for_column_alias,
4914 )
4915 }
4916
4917 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4919 where
4920 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4921 {
4922 self.parse_comma_separated_with_trailing_commas(
4923 f,
4924 self.options.trailing_commas,
4925 Self::is_reserved_for_column_alias,
4926 )
4927 }
4928
4929 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4934 &mut self,
4935 mut f: F,
4936 trailing_commas: bool,
4937 is_reserved_keyword: R,
4938 ) -> Result<Vec<T>, ParserError>
4939 where
4940 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4941 R: Fn(&Keyword, &mut Parser) -> bool,
4942 {
4943 let mut values = vec![];
4944 loop {
4945 values.push(f(self)?);
4946 if self.is_parse_comma_separated_end_with_trailing_commas(
4947 trailing_commas,
4948 &is_reserved_keyword,
4949 ) {
4950 break;
4951 }
4952 }
4953 Ok(values)
4954 }
4955
4956 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4958 where
4959 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4960 {
4961 let mut values = vec![];
4962 loop {
4963 values.push(f(self)?);
4964 if !self.consume_token(&Token::Period) {
4965 break;
4966 }
4967 }
4968 Ok(values)
4969 }
4970
4971 pub fn parse_keyword_separated<T, F>(
4973 &mut self,
4974 keyword: Keyword,
4975 mut f: F,
4976 ) -> Result<Vec<T>, ParserError>
4977 where
4978 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4979 {
4980 let mut values = vec![];
4981 loop {
4982 values.push(f(self)?);
4983 if !self.parse_keyword(keyword) {
4984 break;
4985 }
4986 }
4987 Ok(values)
4988 }
4989
4990 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4992 where
4993 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4994 {
4995 self.expect_token(&Token::LParen)?;
4996 let res = f(self)?;
4997 self.expect_token(&Token::RParen)?;
4998 Ok(res)
4999 }
5000
5001 pub fn parse_comma_separated0<T, F>(
5004 &mut self,
5005 f: F,
5006 end_token: Token,
5007 ) -> Result<Vec<T>, ParserError>
5008 where
5009 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5010 {
5011 if self.peek_token_ref().token == end_token {
5012 return Ok(vec![]);
5013 }
5014
5015 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5016 let _ = self.consume_token(&Token::Comma);
5017 return Ok(vec![]);
5018 }
5019
5020 self.parse_comma_separated(f)
5021 }
5022
5023 pub(crate) fn parse_statement_list(
5027 &mut self,
5028 terminal_keywords: &[Keyword],
5029 ) -> Result<Vec<Statement>, ParserError> {
5030 let mut values = vec![];
5031 loop {
5032 match &self.peek_nth_token_ref(0).token {
5033 Token::EOF => break,
5034 Token::Word(w) => {
5035 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5036 break;
5037 }
5038 }
5039 _ => {}
5040 }
5041
5042 values.push(self.parse_statement()?);
5043 self.expect_token(&Token::SemiColon)?;
5044 }
5045 Ok(values)
5046 }
5047
5048 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5052 !parser.dialect.is_column_alias(kw, parser)
5053 }
5054
5055 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5059 where
5060 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5061 {
5062 match self.try_parse(f) {
5063 Ok(t) => Ok(Some(t)),
5064 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5065 _ => Ok(None),
5066 }
5067 }
5068
5069 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5071 where
5072 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5073 {
5074 let index = self.index;
5075 match f(self) {
5076 Ok(t) => Ok(t),
5077 Err(e) => {
5078 self.index = index;
5080 Err(e)
5081 }
5082 }
5083 }
5084
5085 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5088 let loc = self.peek_token_ref().span.start;
5089 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5090 Some(Keyword::ALL) => {
5091 if self.peek_keyword(Keyword::DISTINCT) {
5092 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5093 }
5094 Some(Distinct::All)
5095 }
5096 Some(Keyword::DISTINCT) => {
5097 if self.peek_keyword(Keyword::ALL) {
5098 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5099 }
5100 Some(Distinct::Distinct)
5101 }
5102 None => return Ok(None),
5103 _ => return parser_err!("ALL or DISTINCT", loc),
5104 };
5105
5106 let Some(Distinct::Distinct) = distinct else {
5107 return Ok(distinct);
5108 };
5109 if !self.parse_keyword(Keyword::ON) {
5110 return Ok(Some(Distinct::Distinct));
5111 }
5112
5113 self.expect_token(&Token::LParen)?;
5114 let col_names = if self.consume_token(&Token::RParen) {
5115 self.prev_token();
5116 Vec::new()
5117 } else {
5118 self.parse_comma_separated(Parser::parse_expr)?
5119 };
5120 self.expect_token(&Token::RParen)?;
5121 Ok(Some(Distinct::On(col_names)))
5122 }
5123
5124 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5126 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5127 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5128 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5129 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5130 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5131 let global: Option<bool> = if global {
5132 Some(true)
5133 } else if local {
5134 Some(false)
5135 } else {
5136 None
5137 };
5138 let temporary = self
5139 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5140 .is_some();
5141 let persistent = dialect_of!(self is DuckDbDialect)
5142 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5143 let create_view_params = self.parse_create_view_params()?;
5144 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5145 self.parse_create_snapshot_table().map(Into::into)
5146 } else if self.parse_keyword(Keyword::TABLE) {
5147 self.parse_create_table(or_replace, temporary, global, transient)
5148 .map(Into::into)
5149 } else if self.peek_keyword(Keyword::MATERIALIZED)
5150 || self.peek_keyword(Keyword::VIEW)
5151 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5152 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5153 {
5154 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5155 .map(Into::into)
5156 } else if self.parse_keyword(Keyword::POLICY) {
5157 self.parse_create_policy().map(Into::into)
5158 } else if self.parse_keyword(Keyword::EXTERNAL) {
5159 self.parse_create_external_table(or_replace).map(Into::into)
5160 } else if self.parse_keyword(Keyword::FUNCTION) {
5161 self.parse_create_function(or_alter, or_replace, temporary)
5162 } else if self.parse_keyword(Keyword::DOMAIN) {
5163 self.parse_create_domain().map(Into::into)
5164 } else if self.parse_keyword(Keyword::TRIGGER) {
5165 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5166 .map(Into::into)
5167 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5168 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5169 .map(Into::into)
5170 } else if self.parse_keyword(Keyword::MACRO) {
5171 self.parse_create_macro(or_replace, temporary)
5172 } else if self.parse_keyword(Keyword::SECRET) {
5173 self.parse_create_secret(or_replace, temporary, persistent)
5174 } else if self.parse_keyword(Keyword::USER) {
5175 self.parse_create_user(or_replace).map(Into::into)
5176 } else if or_replace {
5177 self.expected_ref(
5178 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5179 self.peek_token_ref(),
5180 )
5181 } else if self.parse_keyword(Keyword::EXTENSION) {
5182 self.parse_create_extension().map(Into::into)
5183 } else if self.parse_keyword(Keyword::INDEX) {
5184 self.parse_create_index(false).map(Into::into)
5185 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5186 self.parse_create_index(true).map(Into::into)
5187 } else if self.parse_keyword(Keyword::VIRTUAL) {
5188 self.parse_create_virtual_table()
5189 } else if self.parse_keyword(Keyword::SCHEMA) {
5190 self.parse_create_schema()
5191 } else if self.parse_keyword(Keyword::DATABASE) {
5192 self.parse_create_database()
5193 } else if self.parse_keyword(Keyword::ROLE) {
5194 self.parse_create_role().map(Into::into)
5195 } else if self.parse_keyword(Keyword::SEQUENCE) {
5196 self.parse_create_sequence(temporary)
5197 } else if self.parse_keyword(Keyword::COLLATION) {
5198 self.parse_create_collation().map(Into::into)
5199 } else if self.parse_keyword(Keyword::TYPE) {
5200 self.parse_create_type()
5201 } else if self.parse_keyword(Keyword::PROCEDURE) {
5202 self.parse_create_procedure(or_alter)
5203 } else if self.parse_keyword(Keyword::CONNECTOR) {
5204 self.parse_create_connector().map(Into::into)
5205 } else if self.parse_keyword(Keyword::OPERATOR) {
5206 if self.parse_keyword(Keyword::FAMILY) {
5208 self.parse_create_operator_family().map(Into::into)
5209 } else if self.parse_keyword(Keyword::CLASS) {
5210 self.parse_create_operator_class().map(Into::into)
5211 } else {
5212 self.parse_create_operator().map(Into::into)
5213 }
5214 } else if self.parse_keyword(Keyword::SERVER) {
5215 self.parse_pg_create_server()
5216 } else {
5217 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5218 }
5219 }
5220
5221 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5222 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5223 let name = self.parse_identifier()?;
5224 let options = self
5225 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5226 .options;
5227 let with_tags = self.parse_keyword(Keyword::WITH);
5228 let tags = if self.parse_keyword(Keyword::TAG) {
5229 self.parse_key_value_options(true, &[])?.options
5230 } else {
5231 vec![]
5232 };
5233 Ok(CreateUser {
5234 or_replace,
5235 if_not_exists,
5236 name,
5237 options: KeyValueOptions {
5238 options,
5239 delimiter: KeyValueOptionsDelimiter::Space,
5240 },
5241 with_tags,
5242 tags: KeyValueOptions {
5243 options: tags,
5244 delimiter: KeyValueOptionsDelimiter::Comma,
5245 },
5246 })
5247 }
5248
5249 pub fn parse_create_secret(
5251 &mut self,
5252 or_replace: bool,
5253 temporary: bool,
5254 persistent: bool,
5255 ) -> Result<Statement, ParserError> {
5256 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5257
5258 let mut storage_specifier = None;
5259 let mut name = None;
5260 if self.peek_token_ref().token != Token::LParen {
5261 if self.parse_keyword(Keyword::IN) {
5262 storage_specifier = self.parse_identifier().ok()
5263 } else {
5264 name = self.parse_identifier().ok();
5265 }
5266
5267 if storage_specifier.is_none()
5269 && self.peek_token_ref().token != Token::LParen
5270 && self.parse_keyword(Keyword::IN)
5271 {
5272 storage_specifier = self.parse_identifier().ok();
5273 }
5274 }
5275
5276 self.expect_token(&Token::LParen)?;
5277 self.expect_keyword_is(Keyword::TYPE)?;
5278 let secret_type = self.parse_identifier()?;
5279
5280 let mut options = Vec::new();
5281 if self.consume_token(&Token::Comma) {
5282 options.append(&mut self.parse_comma_separated(|p| {
5283 let key = p.parse_identifier()?;
5284 let value = p.parse_identifier()?;
5285 Ok(SecretOption { key, value })
5286 })?);
5287 }
5288 self.expect_token(&Token::RParen)?;
5289
5290 let temp = match (temporary, persistent) {
5291 (true, false) => Some(true),
5292 (false, true) => Some(false),
5293 (false, false) => None,
5294 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5295 };
5296
5297 Ok(Statement::CreateSecret {
5298 or_replace,
5299 temporary: temp,
5300 if_not_exists,
5301 name,
5302 storage_specifier,
5303 secret_type,
5304 options,
5305 })
5306 }
5307
5308 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5310 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5311 if self.parse_keyword(Keyword::TABLE) {
5312 let table_name = self.parse_object_name(false)?;
5313 if self.peek_token_ref().token != Token::EOF {
5314 if let Token::Word(word) = &self.peek_token_ref().token {
5315 if word.keyword == Keyword::OPTIONS {
5316 options = self.parse_options(Keyword::OPTIONS)?
5317 }
5318 };
5319
5320 if self.peek_token_ref().token != Token::EOF {
5321 let (a, q) = self.parse_as_query()?;
5322 has_as = a;
5323 query = Some(q);
5324 }
5325
5326 Ok(Statement::Cache {
5327 table_flag,
5328 table_name,
5329 has_as,
5330 options,
5331 query,
5332 })
5333 } else {
5334 Ok(Statement::Cache {
5335 table_flag,
5336 table_name,
5337 has_as,
5338 options,
5339 query,
5340 })
5341 }
5342 } else {
5343 table_flag = Some(self.parse_object_name(false)?);
5344 if self.parse_keyword(Keyword::TABLE) {
5345 let table_name = self.parse_object_name(false)?;
5346 if self.peek_token_ref().token != Token::EOF {
5347 if let Token::Word(word) = &self.peek_token_ref().token {
5348 if word.keyword == Keyword::OPTIONS {
5349 options = self.parse_options(Keyword::OPTIONS)?
5350 }
5351 };
5352
5353 if self.peek_token_ref().token != Token::EOF {
5354 let (a, q) = self.parse_as_query()?;
5355 has_as = a;
5356 query = Some(q);
5357 }
5358
5359 Ok(Statement::Cache {
5360 table_flag,
5361 table_name,
5362 has_as,
5363 options,
5364 query,
5365 })
5366 } else {
5367 Ok(Statement::Cache {
5368 table_flag,
5369 table_name,
5370 has_as,
5371 options,
5372 query,
5373 })
5374 }
5375 } else {
5376 if self.peek_token_ref().token == Token::EOF {
5377 self.prev_token();
5378 }
5379 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5380 }
5381 }
5382 }
5383
5384 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5386 match &self.peek_token_ref().token {
5387 Token::Word(word) => match word.keyword {
5388 Keyword::AS => {
5389 self.next_token();
5390 Ok((true, self.parse_query()?))
5391 }
5392 _ => Ok((false, self.parse_query()?)),
5393 },
5394 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5395 }
5396 }
5397
5398 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5400 self.expect_keyword_is(Keyword::TABLE)?;
5401 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5402 let table_name = self.parse_object_name(false)?;
5403 Ok(Statement::UNCache {
5404 table_name,
5405 if_exists,
5406 })
5407 }
5408
5409 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5411 self.expect_keyword_is(Keyword::TABLE)?;
5412 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5413 let table_name = self.parse_object_name(false)?;
5414 self.expect_keyword_is(Keyword::USING)?;
5415 let module_name = self.parse_identifier()?;
5416 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5421 Ok(Statement::CreateVirtualTable {
5422 name: table_name,
5423 if_not_exists,
5424 module_name,
5425 module_args,
5426 })
5427 }
5428
5429 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5431 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5432
5433 let schema_name = self.parse_schema_name()?;
5434
5435 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5436 Some(self.parse_expr()?)
5437 } else {
5438 None
5439 };
5440
5441 let with = if self.peek_keyword(Keyword::WITH) {
5442 Some(self.parse_options(Keyword::WITH)?)
5443 } else {
5444 None
5445 };
5446
5447 let options = if self.peek_keyword(Keyword::OPTIONS) {
5448 Some(self.parse_options(Keyword::OPTIONS)?)
5449 } else {
5450 None
5451 };
5452
5453 let clone = if self.parse_keyword(Keyword::CLONE) {
5454 Some(self.parse_object_name(false)?)
5455 } else {
5456 None
5457 };
5458
5459 Ok(Statement::CreateSchema {
5460 schema_name,
5461 if_not_exists,
5462 with,
5463 options,
5464 default_collate_spec,
5465 clone,
5466 })
5467 }
5468
5469 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5470 if self.parse_keyword(Keyword::AUTHORIZATION) {
5471 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5472 } else {
5473 let name = self.parse_object_name(false)?;
5474
5475 if self.parse_keyword(Keyword::AUTHORIZATION) {
5476 Ok(SchemaName::NamedAuthorization(
5477 name,
5478 self.parse_identifier()?,
5479 ))
5480 } else {
5481 Ok(SchemaName::Simple(name))
5482 }
5483 }
5484 }
5485
5486 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5488 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5489 let db_name = self.parse_object_name(false)?;
5490 let mut location = None;
5491 let mut managed_location = None;
5492 loop {
5493 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5494 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5495 Some(Keyword::MANAGEDLOCATION) => {
5496 managed_location = Some(self.parse_literal_string()?)
5497 }
5498 _ => break,
5499 }
5500 }
5501 let clone = if self.parse_keyword(Keyword::CLONE) {
5502 Some(self.parse_object_name(false)?)
5503 } else {
5504 None
5505 };
5506
5507 let mut default_charset = None;
5515 let mut default_collation = None;
5516 loop {
5517 let has_default = self.parse_keyword(Keyword::DEFAULT);
5518 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5519 || self.parse_keyword(Keyword::CHARSET)
5520 {
5521 let _ = self.consume_token(&Token::Eq);
5522 default_charset = Some(self.parse_identifier()?.value);
5523 } else if self.parse_keyword(Keyword::COLLATE) {
5524 let _ = self.consume_token(&Token::Eq);
5525 default_collation = Some(self.parse_identifier()?.value);
5526 } else if has_default {
5527 self.prev_token();
5529 break;
5530 } else {
5531 break;
5532 }
5533 }
5534
5535 Ok(Statement::CreateDatabase {
5536 db_name,
5537 if_not_exists: ine,
5538 location,
5539 managed_location,
5540 or_replace: false,
5541 transient: false,
5542 clone,
5543 data_retention_time_in_days: None,
5544 max_data_extension_time_in_days: None,
5545 external_volume: None,
5546 catalog: None,
5547 replace_invalid_characters: None,
5548 default_ddl_collation: None,
5549 storage_serialization_policy: None,
5550 comment: None,
5551 default_charset,
5552 default_collation,
5553 catalog_sync: None,
5554 catalog_sync_namespace_mode: None,
5555 catalog_sync_namespace_flatten_delimiter: None,
5556 with_tags: None,
5557 with_contacts: None,
5558 })
5559 }
5560
5561 pub fn parse_optional_create_function_using(
5563 &mut self,
5564 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5565 if !self.parse_keyword(Keyword::USING) {
5566 return Ok(None);
5567 };
5568 let keyword =
5569 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5570
5571 let uri = self.parse_literal_string()?;
5572
5573 match keyword {
5574 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5575 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5576 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5577 _ => self.expected(
5578 "JAR, FILE or ARCHIVE, got {:?}",
5579 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5580 ),
5581 }
5582 }
5583
5584 pub fn parse_create_function(
5586 &mut self,
5587 or_alter: bool,
5588 or_replace: bool,
5589 temporary: bool,
5590 ) -> Result<Statement, ParserError> {
5591 if dialect_of!(self is HiveDialect) {
5592 self.parse_hive_create_function(or_replace, temporary)
5593 .map(Into::into)
5594 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5595 self.parse_postgres_create_function(or_replace, temporary)
5596 .map(Into::into)
5597 } else if dialect_of!(self is DuckDbDialect) {
5598 self.parse_create_macro(or_replace, temporary)
5599 } else if dialect_of!(self is BigQueryDialect) {
5600 self.parse_bigquery_create_function(or_replace, temporary)
5601 .map(Into::into)
5602 } else if dialect_of!(self is MsSqlDialect) {
5603 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5604 .map(Into::into)
5605 } else {
5606 self.prev_token();
5607 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5608 }
5609 }
5610
5611 fn parse_postgres_create_function(
5615 &mut self,
5616 or_replace: bool,
5617 temporary: bool,
5618 ) -> Result<CreateFunction, ParserError> {
5619 let name = self.parse_object_name(false)?;
5620
5621 self.expect_token(&Token::LParen)?;
5622 let args = if Token::RParen != self.peek_token_ref().token {
5623 self.parse_comma_separated(Parser::parse_function_arg)?
5624 } else {
5625 vec![]
5626 };
5627 self.expect_token(&Token::RParen)?;
5628
5629 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5630 Some(self.parse_function_return_type()?)
5631 } else {
5632 None
5633 };
5634
5635 #[derive(Default)]
5636 struct Body {
5637 language: Option<Ident>,
5638 behavior: Option<FunctionBehavior>,
5639 function_body: Option<CreateFunctionBody>,
5640 called_on_null: Option<FunctionCalledOnNull>,
5641 parallel: Option<FunctionParallel>,
5642 security: Option<FunctionSecurity>,
5643 }
5644 let mut body = Body::default();
5645 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5646 loop {
5647 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5648 if field.is_some() {
5649 return Err(ParserError::ParserError(format!(
5650 "{name} specified more than once",
5651 )));
5652 }
5653 Ok(())
5654 }
5655 if self.parse_keyword(Keyword::AS) {
5656 ensure_not_set(&body.function_body, "AS")?;
5657 body.function_body = Some(self.parse_create_function_body_string()?);
5658 } else if self.parse_keyword(Keyword::LANGUAGE) {
5659 ensure_not_set(&body.language, "LANGUAGE")?;
5660 body.language = Some(self.parse_identifier()?);
5661 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5662 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5663 body.behavior = Some(FunctionBehavior::Immutable);
5664 } else if self.parse_keyword(Keyword::STABLE) {
5665 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5666 body.behavior = Some(FunctionBehavior::Stable);
5667 } else if self.parse_keyword(Keyword::VOLATILE) {
5668 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5669 body.behavior = Some(FunctionBehavior::Volatile);
5670 } else if self.parse_keywords(&[
5671 Keyword::CALLED,
5672 Keyword::ON,
5673 Keyword::NULL,
5674 Keyword::INPUT,
5675 ]) {
5676 ensure_not_set(
5677 &body.called_on_null,
5678 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5679 )?;
5680 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5681 } else if self.parse_keywords(&[
5682 Keyword::RETURNS,
5683 Keyword::NULL,
5684 Keyword::ON,
5685 Keyword::NULL,
5686 Keyword::INPUT,
5687 ]) {
5688 ensure_not_set(
5689 &body.called_on_null,
5690 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5691 )?;
5692 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5693 } else if self.parse_keyword(Keyword::STRICT) {
5694 ensure_not_set(
5695 &body.called_on_null,
5696 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5697 )?;
5698 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5699 } else if self.parse_keyword(Keyword::PARALLEL) {
5700 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5701 if self.parse_keyword(Keyword::UNSAFE) {
5702 body.parallel = Some(FunctionParallel::Unsafe);
5703 } else if self.parse_keyword(Keyword::RESTRICTED) {
5704 body.parallel = Some(FunctionParallel::Restricted);
5705 } else if self.parse_keyword(Keyword::SAFE) {
5706 body.parallel = Some(FunctionParallel::Safe);
5707 } else {
5708 return self
5709 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5710 }
5711 } else if self.parse_keyword(Keyword::SECURITY) {
5712 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5713 if self.parse_keyword(Keyword::DEFINER) {
5714 body.security = Some(FunctionSecurity::Definer);
5715 } else if self.parse_keyword(Keyword::INVOKER) {
5716 body.security = Some(FunctionSecurity::Invoker);
5717 } else {
5718 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5719 }
5720 } else if self.parse_keyword(Keyword::SET) {
5721 let name = self.parse_object_name(false)?;
5722 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5723 FunctionSetValue::FromCurrent
5724 } else {
5725 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5726 return self.expected_ref("= or TO", self.peek_token_ref());
5727 }
5728 if self.parse_keyword(Keyword::DEFAULT) {
5729 FunctionSetValue::Default
5730 } else {
5731 let values = self.parse_comma_separated(Parser::parse_expr)?;
5732 FunctionSetValue::Values(values)
5733 }
5734 };
5735 set_params.push(FunctionDefinitionSetParam { name, value });
5736 } else if self.parse_keyword(Keyword::RETURN) {
5737 ensure_not_set(&body.function_body, "RETURN")?;
5738 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5739 } else {
5740 break;
5741 }
5742 }
5743
5744 Ok(CreateFunction {
5745 or_alter: false,
5746 or_replace,
5747 temporary,
5748 name,
5749 args: Some(args),
5750 return_type,
5751 behavior: body.behavior,
5752 called_on_null: body.called_on_null,
5753 parallel: body.parallel,
5754 security: body.security,
5755 set_params,
5756 language: body.language,
5757 function_body: body.function_body,
5758 if_not_exists: false,
5759 using: None,
5760 determinism_specifier: None,
5761 options: None,
5762 remote_connection: None,
5763 })
5764 }
5765
5766 fn parse_hive_create_function(
5770 &mut self,
5771 or_replace: bool,
5772 temporary: bool,
5773 ) -> Result<CreateFunction, ParserError> {
5774 let name = self.parse_object_name(false)?;
5775 self.expect_keyword_is(Keyword::AS)?;
5776
5777 let body = self.parse_create_function_body_string()?;
5778 let using = self.parse_optional_create_function_using()?;
5779
5780 Ok(CreateFunction {
5781 or_alter: false,
5782 or_replace,
5783 temporary,
5784 name,
5785 function_body: Some(body),
5786 using,
5787 if_not_exists: false,
5788 args: None,
5789 return_type: None,
5790 behavior: None,
5791 called_on_null: None,
5792 parallel: None,
5793 security: None,
5794 set_params: vec![],
5795 language: None,
5796 determinism_specifier: None,
5797 options: None,
5798 remote_connection: None,
5799 })
5800 }
5801
5802 fn parse_bigquery_create_function(
5806 &mut self,
5807 or_replace: bool,
5808 temporary: bool,
5809 ) -> Result<CreateFunction, ParserError> {
5810 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5811 let (name, args) = self.parse_create_function_name_and_params()?;
5812
5813 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5814 Some(self.parse_function_return_type()?)
5815 } else {
5816 None
5817 };
5818
5819 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5820 Some(FunctionDeterminismSpecifier::Deterministic)
5821 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5822 Some(FunctionDeterminismSpecifier::NotDeterministic)
5823 } else {
5824 None
5825 };
5826
5827 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5828 Some(self.parse_identifier()?)
5829 } else {
5830 None
5831 };
5832
5833 let remote_connection =
5834 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5835 Some(self.parse_object_name(false)?)
5836 } else {
5837 None
5838 };
5839
5840 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5843
5844 let function_body = if remote_connection.is_none() {
5845 self.expect_keyword_is(Keyword::AS)?;
5846 let expr = self.parse_expr()?;
5847 if options.is_none() {
5848 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5849 Some(CreateFunctionBody::AsBeforeOptions {
5850 body: expr,
5851 link_symbol: None,
5852 })
5853 } else {
5854 Some(CreateFunctionBody::AsAfterOptions(expr))
5855 }
5856 } else {
5857 None
5858 };
5859
5860 Ok(CreateFunction {
5861 or_alter: false,
5862 or_replace,
5863 temporary,
5864 if_not_exists,
5865 name,
5866 args: Some(args),
5867 return_type,
5868 function_body,
5869 language,
5870 determinism_specifier,
5871 options,
5872 remote_connection,
5873 using: None,
5874 behavior: None,
5875 called_on_null: None,
5876 parallel: None,
5877 security: None,
5878 set_params: vec![],
5879 })
5880 }
5881
5882 fn parse_mssql_create_function(
5886 &mut self,
5887 or_alter: bool,
5888 or_replace: bool,
5889 temporary: bool,
5890 ) -> Result<CreateFunction, ParserError> {
5891 let (name, args) = self.parse_create_function_name_and_params()?;
5892
5893 self.expect_keyword(Keyword::RETURNS)?;
5894
5895 let return_table = self.maybe_parse(|p| {
5896 let return_table_name = p.parse_identifier()?;
5897
5898 p.expect_keyword_is(Keyword::TABLE)?;
5899 p.prev_token();
5900
5901 let table_column_defs = match p.parse_data_type()? {
5902 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5903 table_column_defs
5904 }
5905 _ => parser_err!(
5906 "Expected table column definitions after TABLE keyword",
5907 p.peek_token_ref().span.start
5908 )?,
5909 };
5910
5911 Ok(DataType::NamedTable {
5912 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5913 columns: table_column_defs,
5914 })
5915 })?;
5916
5917 let data_type = match return_table {
5918 Some(table_type) => table_type,
5919 None => self.parse_data_type()?,
5920 };
5921 let return_type = Some(FunctionReturnType::DataType(data_type));
5922
5923 let _ = self.parse_keyword(Keyword::AS);
5924
5925 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5926 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5927 let statements = self.parse_statement_list(&[Keyword::END])?;
5928 let end_token = self.expect_keyword(Keyword::END)?;
5929
5930 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5931 begin_token: AttachedToken(begin_token),
5932 statements,
5933 end_token: AttachedToken(end_token),
5934 }))
5935 } else if self.parse_keyword(Keyword::RETURN) {
5936 if self.peek_token_ref().token == Token::LParen {
5937 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5938 } else if self.peek_keyword(Keyword::SELECT) {
5939 let select = self.parse_select()?;
5940 Some(CreateFunctionBody::AsReturnSelect(select))
5941 } else {
5942 parser_err!(
5943 "Expected a subquery (or bare SELECT statement) after RETURN",
5944 self.peek_token_ref().span.start
5945 )?
5946 }
5947 } else {
5948 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
5949 };
5950
5951 Ok(CreateFunction {
5952 or_alter,
5953 or_replace,
5954 temporary,
5955 if_not_exists: false,
5956 name,
5957 args: Some(args),
5958 return_type,
5959 function_body,
5960 language: None,
5961 determinism_specifier: None,
5962 options: None,
5963 remote_connection: None,
5964 using: None,
5965 behavior: None,
5966 called_on_null: None,
5967 parallel: None,
5968 security: None,
5969 set_params: vec![],
5970 })
5971 }
5972
5973 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
5974 if self.parse_keyword(Keyword::SETOF) {
5975 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
5976 } else {
5977 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
5978 }
5979 }
5980
5981 fn parse_create_function_name_and_params(
5982 &mut self,
5983 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5984 let name = self.parse_object_name(false)?;
5985 let parse_function_param =
5986 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5987 let name = parser.parse_identifier()?;
5988 let data_type = parser.parse_data_type()?;
5989 let default_expr = if parser.consume_token(&Token::Eq) {
5990 Some(parser.parse_expr()?)
5991 } else {
5992 None
5993 };
5994
5995 Ok(OperateFunctionArg {
5996 mode: None,
5997 name: Some(name),
5998 data_type,
5999 default_expr,
6000 })
6001 };
6002 self.expect_token(&Token::LParen)?;
6003 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6004 self.expect_token(&Token::RParen)?;
6005 Ok((name, args))
6006 }
6007
6008 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6009 let mode = if self.parse_keyword(Keyword::IN) {
6010 Some(ArgMode::In)
6011 } else if self.parse_keyword(Keyword::OUT) {
6012 Some(ArgMode::Out)
6013 } else if self.parse_keyword(Keyword::INOUT) {
6014 Some(ArgMode::InOut)
6015 } else if self.parse_keyword(Keyword::VARIADIC) {
6016 Some(ArgMode::Variadic)
6017 } else {
6018 None
6019 };
6020
6021 let mut name = None;
6023 let mut data_type = self.parse_data_type()?;
6024
6025 let data_type_idx = self.get_current_index();
6029
6030 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6032 if parser.peek_keyword(Keyword::DEFAULT) {
6033 parser_err!(
6035 "The DEFAULT keyword is not a type",
6036 parser.peek_token_ref().span.start
6037 )
6038 } else {
6039 parser.parse_data_type()
6040 }
6041 }
6042
6043 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6044 let token = self.token_at(data_type_idx);
6045
6046 if !matches!(token.token, Token::Word(_)) {
6048 return self.expected("a name or type", token.clone());
6049 }
6050
6051 name = Some(Ident::new(token.to_string()));
6052 data_type = next_data_type;
6053 }
6054
6055 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6056 {
6057 Some(self.parse_expr()?)
6058 } else {
6059 None
6060 };
6061 Ok(OperateFunctionArg {
6062 mode,
6063 name,
6064 data_type,
6065 default_expr,
6066 })
6067 }
6068
6069 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6070 let mode = if self.parse_keyword(Keyword::IN) {
6071 Some(ArgMode::In)
6072 } else {
6073 if self
6074 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6075 .is_some()
6076 {
6077 return self.expected_ref(
6078 "IN or argument type in aggregate signature",
6079 self.peek_token_ref(),
6080 );
6081 }
6082 None
6083 };
6084
6085 let mut name = None;
6088 let mut data_type = self.parse_data_type()?;
6089 let data_type_idx = self.get_current_index();
6090
6091 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6092 if parser.peek_keyword(Keyword::DEFAULT)
6093 || parser.peek_keyword(Keyword::ORDER)
6094 || parser.peek_token_ref().token == Token::Comma
6095 || parser.peek_token_ref().token == Token::RParen
6096 {
6097 parser_err!(
6099 "The current token cannot start an aggregate argument type",
6100 parser.peek_token_ref().span.start
6101 )
6102 } else {
6103 parser.parse_data_type()
6104 }
6105 }
6106
6107 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6108 let token = self.token_at(data_type_idx);
6109 if !matches!(token.token, Token::Word(_)) {
6110 return self.expected("a name or type", token.clone());
6111 }
6112
6113 name = Some(Ident::new(token.to_string()));
6114 data_type = next_data_type;
6115 }
6116
6117 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6118 return self.expected_ref(
6119 "',' or ')' or ORDER BY after aggregate argument type",
6120 self.peek_token_ref(),
6121 );
6122 }
6123
6124 Ok(OperateFunctionArg {
6125 mode,
6126 name,
6127 data_type,
6128 default_expr: None,
6129 })
6130 }
6131
6132 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6138 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6139 {
6140 self.prev_token();
6141 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6142 }
6143 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6144 let trigger_name = self.parse_object_name(false)?;
6145 let table_name = if self.parse_keyword(Keyword::ON) {
6146 Some(self.parse_object_name(false)?)
6147 } else {
6148 None
6149 };
6150 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6151 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6152 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6153 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6154 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6155 )),
6156 None => None,
6157 };
6158 Ok(DropTrigger {
6159 if_exists,
6160 trigger_name,
6161 table_name,
6162 option,
6163 })
6164 }
6165
6166 pub fn parse_create_trigger(
6168 &mut self,
6169 temporary: bool,
6170 or_alter: bool,
6171 or_replace: bool,
6172 is_constraint: bool,
6173 ) -> Result<CreateTrigger, ParserError> {
6174 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6175 {
6176 self.prev_token();
6177 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6178 }
6179
6180 let name = self.parse_object_name(false)?;
6181 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6182
6183 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6184 self.expect_keyword_is(Keyword::ON)?;
6185 let table_name = self.parse_object_name(false)?;
6186
6187 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6188 self.parse_object_name(true).ok()
6189 } else {
6190 None
6191 };
6192
6193 let characteristics = self.parse_constraint_characteristics()?;
6194
6195 let mut referencing = vec![];
6196 if self.parse_keyword(Keyword::REFERENCING) {
6197 while let Some(refer) = self.parse_trigger_referencing()? {
6198 referencing.push(refer);
6199 }
6200 }
6201
6202 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6203 let include_each = self.parse_keyword(Keyword::EACH);
6204 let trigger_object =
6205 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6206 Keyword::ROW => TriggerObject::Row,
6207 Keyword::STATEMENT => TriggerObject::Statement,
6208 unexpected_keyword => return Err(ParserError::ParserError(
6209 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6210 )),
6211 };
6212
6213 Some(if include_each {
6214 TriggerObjectKind::ForEach(trigger_object)
6215 } else {
6216 TriggerObjectKind::For(trigger_object)
6217 })
6218 } else {
6219 let _ = self.parse_keyword(Keyword::FOR);
6220
6221 None
6222 };
6223
6224 let condition = self
6225 .parse_keyword(Keyword::WHEN)
6226 .then(|| self.parse_expr())
6227 .transpose()?;
6228
6229 let mut exec_body = None;
6230 let mut statements = None;
6231 if self.parse_keyword(Keyword::EXECUTE) {
6232 exec_body = Some(self.parse_trigger_exec_body()?);
6233 } else {
6234 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6235 }
6236
6237 Ok(CreateTrigger {
6238 or_alter,
6239 temporary,
6240 or_replace,
6241 is_constraint,
6242 name,
6243 period,
6244 period_before_table: true,
6245 events,
6246 table_name,
6247 referenced_table_name,
6248 referencing,
6249 trigger_object,
6250 condition,
6251 exec_body,
6252 statements_as: false,
6253 statements,
6254 characteristics,
6255 })
6256 }
6257
6258 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6260 Ok(
6261 match self.expect_one_of_keywords(&[
6262 Keyword::FOR,
6263 Keyword::BEFORE,
6264 Keyword::AFTER,
6265 Keyword::INSTEAD,
6266 ])? {
6267 Keyword::FOR => TriggerPeriod::For,
6268 Keyword::BEFORE => TriggerPeriod::Before,
6269 Keyword::AFTER => TriggerPeriod::After,
6270 Keyword::INSTEAD => self
6271 .expect_keyword_is(Keyword::OF)
6272 .map(|_| TriggerPeriod::InsteadOf)?,
6273 unexpected_keyword => return Err(ParserError::ParserError(
6274 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6275 )),
6276 },
6277 )
6278 }
6279
6280 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6282 Ok(
6283 match self.expect_one_of_keywords(&[
6284 Keyword::INSERT,
6285 Keyword::UPDATE,
6286 Keyword::DELETE,
6287 Keyword::TRUNCATE,
6288 ])? {
6289 Keyword::INSERT => TriggerEvent::Insert,
6290 Keyword::UPDATE => {
6291 if self.parse_keyword(Keyword::OF) {
6292 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6293 TriggerEvent::Update(cols)
6294 } else {
6295 TriggerEvent::Update(vec![])
6296 }
6297 }
6298 Keyword::DELETE => TriggerEvent::Delete,
6299 Keyword::TRUNCATE => TriggerEvent::Truncate,
6300 unexpected_keyword => return Err(ParserError::ParserError(
6301 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6302 )),
6303 },
6304 )
6305 }
6306
6307 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6309 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6310 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6311 TriggerReferencingType::OldTable
6312 }
6313 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6314 TriggerReferencingType::NewTable
6315 }
6316 _ => {
6317 return Ok(None);
6318 }
6319 };
6320
6321 let is_as = self.parse_keyword(Keyword::AS);
6322 let transition_relation_name = self.parse_object_name(false)?;
6323 Ok(Some(TriggerReferencing {
6324 refer_type,
6325 is_as,
6326 transition_relation_name,
6327 }))
6328 }
6329
6330 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6332 Ok(TriggerExecBody {
6333 exec_type: match self
6334 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6335 {
6336 Keyword::FUNCTION => TriggerExecBodyType::Function,
6337 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6338 unexpected_keyword => return Err(ParserError::ParserError(
6339 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
6340 )),
6341 },
6342 func_desc: self.parse_function_desc()?,
6343 })
6344 }
6345
6346 pub fn parse_create_macro(
6348 &mut self,
6349 or_replace: bool,
6350 temporary: bool,
6351 ) -> Result<Statement, ParserError> {
6352 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6353 let name = self.parse_object_name(false)?;
6354 self.expect_token(&Token::LParen)?;
6355 let args = if self.consume_token(&Token::RParen) {
6356 self.prev_token();
6357 None
6358 } else {
6359 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6360 };
6361
6362 self.expect_token(&Token::RParen)?;
6363 self.expect_keyword_is(Keyword::AS)?;
6364
6365 Ok(Statement::CreateMacro {
6366 or_replace,
6367 temporary,
6368 name,
6369 args,
6370 definition: if self.parse_keyword(Keyword::TABLE) {
6371 MacroDefinition::Table(self.parse_query()?)
6372 } else {
6373 MacroDefinition::Expr(self.parse_expr()?)
6374 },
6375 })
6376 } else {
6377 self.prev_token();
6378 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6379 }
6380 }
6381
6382 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6383 let name = self.parse_identifier()?;
6384
6385 let default_expr =
6386 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6387 Some(self.parse_expr()?)
6388 } else {
6389 None
6390 };
6391 Ok(MacroArg { name, default_expr })
6392 }
6393
6394 pub fn parse_create_external_table(
6396 &mut self,
6397 or_replace: bool,
6398 ) -> Result<CreateTable, ParserError> {
6399 self.expect_keyword_is(Keyword::TABLE)?;
6400 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6401 let table_name = self.parse_object_name(false)?;
6402 let (columns, constraints) = self.parse_columns()?;
6403
6404 let hive_distribution = self.parse_hive_distribution()?;
6405 let hive_formats = self.parse_hive_formats()?;
6406
6407 let file_format = if let Some(ref hf) = hive_formats {
6408 if let Some(ref ff) = hf.storage {
6409 match ff {
6410 HiveIOFormat::FileFormat { format } => Some(*format),
6411 _ => None,
6412 }
6413 } else {
6414 None
6415 }
6416 } else {
6417 None
6418 };
6419 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6420 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6421 let table_options = if !table_properties.is_empty() {
6422 CreateTableOptions::TableProperties(table_properties)
6423 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6424 CreateTableOptions::Options(options)
6425 } else {
6426 CreateTableOptions::None
6427 };
6428 Ok(CreateTableBuilder::new(table_name)
6429 .columns(columns)
6430 .constraints(constraints)
6431 .hive_distribution(hive_distribution)
6432 .hive_formats(hive_formats)
6433 .table_options(table_options)
6434 .or_replace(or_replace)
6435 .if_not_exists(if_not_exists)
6436 .external(true)
6437 .file_format(file_format)
6438 .location(location)
6439 .build())
6440 }
6441
6442 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6446 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6447 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6448 let table_name = self.parse_object_name(true)?;
6449
6450 self.expect_keyword_is(Keyword::CLONE)?;
6451 let clone = Some(self.parse_object_name(true)?);
6452
6453 let version =
6454 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6455 {
6456 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6457 } else {
6458 None
6459 };
6460
6461 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6462 CreateTableOptions::Options(options)
6463 } else {
6464 CreateTableOptions::None
6465 };
6466
6467 Ok(CreateTableBuilder::new(table_name)
6468 .snapshot(true)
6469 .if_not_exists(if_not_exists)
6470 .clone_clause(clone)
6471 .version(version)
6472 .table_options(table_options)
6473 .build())
6474 }
6475
6476 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6478 let next_token = self.next_token();
6479 match &next_token.token {
6480 Token::Word(w) => match w.keyword {
6481 Keyword::AVRO => Ok(FileFormat::AVRO),
6482 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6483 Keyword::ORC => Ok(FileFormat::ORC),
6484 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6485 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6486 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6487 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6488 _ => self.expected("fileformat", next_token),
6489 },
6490 _ => self.expected("fileformat", next_token),
6491 }
6492 }
6493
6494 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6495 if self.consume_token(&Token::Eq) {
6496 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6497 } else {
6498 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6499 }
6500 }
6501
6502 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6504 let next_token = self.next_token();
6505 match &next_token.token {
6506 Token::Word(w) => match w.keyword {
6507 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6508 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6509 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6510 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6511 _ => self.expected("fileformat", next_token),
6512 },
6513 _ => self.expected("fileformat", next_token),
6514 }
6515 }
6516
6517 pub fn parse_create_view(
6519 &mut self,
6520 or_alter: bool,
6521 or_replace: bool,
6522 temporary: bool,
6523 create_view_params: Option<CreateViewParams>,
6524 ) -> Result<CreateView, ParserError> {
6525 let secure = self.parse_keyword(Keyword::SECURE);
6526 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6527 self.expect_keyword_is(Keyword::VIEW)?;
6528 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6529 let if_not_exists_first =
6532 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6533 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6534 let name_before_not_exists = !if_not_exists_first
6535 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6536 let if_not_exists = if_not_exists_first || name_before_not_exists;
6537 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6538 let columns = self.parse_view_columns()?;
6541 let mut options = CreateTableOptions::None;
6542 let with_options = self.parse_options(Keyword::WITH)?;
6543 if !with_options.is_empty() {
6544 options = CreateTableOptions::With(with_options);
6545 }
6546
6547 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6548 self.expect_keyword_is(Keyword::BY)?;
6549 self.parse_parenthesized_column_list(Optional, false)?
6550 } else {
6551 vec![]
6552 };
6553
6554 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6555 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6556 if !opts.is_empty() {
6557 options = CreateTableOptions::Options(opts);
6558 }
6559 };
6560 }
6561
6562 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6563 && self.parse_keyword(Keyword::TO)
6564 {
6565 Some(self.parse_object_name(false)?)
6566 } else {
6567 None
6568 };
6569
6570 let comment = if self.dialect.supports_create_view_comment_syntax()
6571 && self.parse_keyword(Keyword::COMMENT)
6572 {
6573 self.expect_token(&Token::Eq)?;
6574 Some(self.parse_comment_value()?)
6575 } else {
6576 None
6577 };
6578
6579 self.expect_keyword_is(Keyword::AS)?;
6580 let query = self.parse_query()?;
6581 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6584 && self.parse_keywords(&[
6585 Keyword::WITH,
6586 Keyword::NO,
6587 Keyword::SCHEMA,
6588 Keyword::BINDING,
6589 ]);
6590
6591 Ok(CreateView {
6592 or_alter,
6593 name,
6594 columns,
6595 query,
6596 materialized,
6597 secure,
6598 or_replace,
6599 options,
6600 cluster_by,
6601 comment,
6602 with_no_schema_binding,
6603 if_not_exists,
6604 temporary,
6605 copy_grants,
6606 to,
6607 params: create_view_params,
6608 name_before_not_exists,
6609 })
6610 }
6611
6612 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6616 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6617 self.expect_token(&Token::Eq)?;
6618 Some(
6619 match self.expect_one_of_keywords(&[
6620 Keyword::UNDEFINED,
6621 Keyword::MERGE,
6622 Keyword::TEMPTABLE,
6623 ])? {
6624 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6625 Keyword::MERGE => CreateViewAlgorithm::Merge,
6626 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6627 _ => {
6628 self.prev_token();
6629 let found = self.next_token();
6630 return self
6631 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6632 }
6633 },
6634 )
6635 } else {
6636 None
6637 };
6638 let definer = if self.parse_keyword(Keyword::DEFINER) {
6639 self.expect_token(&Token::Eq)?;
6640 Some(self.parse_grantee_name()?)
6641 } else {
6642 None
6643 };
6644 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6645 Some(
6646 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6647 Keyword::DEFINER => CreateViewSecurity::Definer,
6648 Keyword::INVOKER => CreateViewSecurity::Invoker,
6649 _ => {
6650 self.prev_token();
6651 let found = self.next_token();
6652 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6653 }
6654 },
6655 )
6656 } else {
6657 None
6658 };
6659 if algorithm.is_some() || definer.is_some() || security.is_some() {
6660 Ok(Some(CreateViewParams {
6661 algorithm,
6662 definer,
6663 security,
6664 }))
6665 } else {
6666 Ok(None)
6667 }
6668 }
6669
6670 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6672 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6673 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6674
6675 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6678 vec![Keyword::AUTHORIZATION]
6679 } else if dialect_of!(self is PostgreSqlDialect) {
6680 vec![
6681 Keyword::LOGIN,
6682 Keyword::NOLOGIN,
6683 Keyword::INHERIT,
6684 Keyword::NOINHERIT,
6685 Keyword::BYPASSRLS,
6686 Keyword::NOBYPASSRLS,
6687 Keyword::PASSWORD,
6688 Keyword::CREATEDB,
6689 Keyword::NOCREATEDB,
6690 Keyword::CREATEROLE,
6691 Keyword::NOCREATEROLE,
6692 Keyword::SUPERUSER,
6693 Keyword::NOSUPERUSER,
6694 Keyword::REPLICATION,
6695 Keyword::NOREPLICATION,
6696 Keyword::CONNECTION,
6697 Keyword::VALID,
6698 Keyword::IN,
6699 Keyword::ROLE,
6700 Keyword::ADMIN,
6701 Keyword::USER,
6702 ]
6703 } else {
6704 vec![]
6705 };
6706
6707 let mut authorization_owner = None;
6709 let mut login = None;
6711 let mut inherit = None;
6712 let mut bypassrls = None;
6713 let mut password = None;
6714 let mut create_db = None;
6715 let mut create_role = None;
6716 let mut superuser = None;
6717 let mut replication = None;
6718 let mut connection_limit = None;
6719 let mut valid_until = None;
6720 let mut in_role = vec![];
6721 let mut in_group = vec![];
6722 let mut role = vec![];
6723 let mut user = vec![];
6724 let mut admin = vec![];
6725
6726 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6727 let loc = self
6728 .tokens
6729 .get(self.index - 1)
6730 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6731 match keyword {
6732 Keyword::AUTHORIZATION => {
6733 if authorization_owner.is_some() {
6734 parser_err!("Found multiple AUTHORIZATION", loc)
6735 } else {
6736 authorization_owner = Some(self.parse_object_name(false)?);
6737 Ok(())
6738 }
6739 }
6740 Keyword::LOGIN | Keyword::NOLOGIN => {
6741 if login.is_some() {
6742 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6743 } else {
6744 login = Some(keyword == Keyword::LOGIN);
6745 Ok(())
6746 }
6747 }
6748 Keyword::INHERIT | Keyword::NOINHERIT => {
6749 if inherit.is_some() {
6750 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6751 } else {
6752 inherit = Some(keyword == Keyword::INHERIT);
6753 Ok(())
6754 }
6755 }
6756 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6757 if bypassrls.is_some() {
6758 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6759 } else {
6760 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6761 Ok(())
6762 }
6763 }
6764 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6765 if create_db.is_some() {
6766 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6767 } else {
6768 create_db = Some(keyword == Keyword::CREATEDB);
6769 Ok(())
6770 }
6771 }
6772 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6773 if create_role.is_some() {
6774 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6775 } else {
6776 create_role = Some(keyword == Keyword::CREATEROLE);
6777 Ok(())
6778 }
6779 }
6780 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6781 if superuser.is_some() {
6782 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6783 } else {
6784 superuser = Some(keyword == Keyword::SUPERUSER);
6785 Ok(())
6786 }
6787 }
6788 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6789 if replication.is_some() {
6790 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6791 } else {
6792 replication = Some(keyword == Keyword::REPLICATION);
6793 Ok(())
6794 }
6795 }
6796 Keyword::PASSWORD => {
6797 if password.is_some() {
6798 parser_err!("Found multiple PASSWORD", loc)
6799 } else {
6800 password = if self.parse_keyword(Keyword::NULL) {
6801 Some(Password::NullPassword)
6802 } else {
6803 Some(Password::Password(Expr::Value(self.parse_value()?)))
6804 };
6805 Ok(())
6806 }
6807 }
6808 Keyword::CONNECTION => {
6809 self.expect_keyword_is(Keyword::LIMIT)?;
6810 if connection_limit.is_some() {
6811 parser_err!("Found multiple CONNECTION LIMIT", loc)
6812 } else {
6813 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6814 Ok(())
6815 }
6816 }
6817 Keyword::VALID => {
6818 self.expect_keyword_is(Keyword::UNTIL)?;
6819 if valid_until.is_some() {
6820 parser_err!("Found multiple VALID UNTIL", loc)
6821 } else {
6822 valid_until = Some(Expr::Value(self.parse_value()?));
6823 Ok(())
6824 }
6825 }
6826 Keyword::IN => {
6827 if self.parse_keyword(Keyword::ROLE) {
6828 if !in_role.is_empty() {
6829 parser_err!("Found multiple IN ROLE", loc)
6830 } else {
6831 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6832 Ok(())
6833 }
6834 } else if self.parse_keyword(Keyword::GROUP) {
6835 if !in_group.is_empty() {
6836 parser_err!("Found multiple IN GROUP", loc)
6837 } else {
6838 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6839 Ok(())
6840 }
6841 } else {
6842 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6843 }
6844 }
6845 Keyword::ROLE => {
6846 if !role.is_empty() {
6847 parser_err!("Found multiple ROLE", loc)
6848 } else {
6849 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6850 Ok(())
6851 }
6852 }
6853 Keyword::USER => {
6854 if !user.is_empty() {
6855 parser_err!("Found multiple USER", loc)
6856 } else {
6857 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6858 Ok(())
6859 }
6860 }
6861 Keyword::ADMIN => {
6862 if !admin.is_empty() {
6863 parser_err!("Found multiple ADMIN", loc)
6864 } else {
6865 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6866 Ok(())
6867 }
6868 }
6869 _ => break,
6870 }?
6871 }
6872
6873 Ok(CreateRole {
6874 names,
6875 if_not_exists,
6876 login,
6877 inherit,
6878 bypassrls,
6879 password,
6880 create_db,
6881 create_role,
6882 replication,
6883 superuser,
6884 connection_limit,
6885 valid_until,
6886 in_role,
6887 in_group,
6888 role,
6889 user,
6890 admin,
6891 authorization_owner,
6892 })
6893 }
6894
6895 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6897 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6898 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6899 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6900 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6901 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6902 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6903 )),
6904 None => {
6905 match self.parse_identifier() {
6906 Ok(ident) => Owner::Ident(ident),
6907 Err(e) => {
6908 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6909 }
6910 }
6911 }
6912 };
6913 Ok(owner)
6914 }
6915
6916 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
6918 let name = self.parse_object_name(false)?;
6919 self.expect_keyword_is(Keyword::AS)?;
6920 let data_type = self.parse_data_type()?;
6921 let collation = if self.parse_keyword(Keyword::COLLATE) {
6922 Some(self.parse_identifier()?)
6923 } else {
6924 None
6925 };
6926 let default = if self.parse_keyword(Keyword::DEFAULT) {
6927 Some(self.parse_expr()?)
6928 } else {
6929 None
6930 };
6931 let mut constraints = Vec::new();
6932 while let Some(constraint) = self.parse_optional_table_constraint()? {
6933 constraints.push(constraint);
6934 }
6935
6936 Ok(CreateDomain {
6937 name,
6938 data_type,
6939 collation,
6940 default,
6941 constraints,
6942 })
6943 }
6944
6945 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
6955 let name = self.parse_identifier()?;
6956 self.expect_keyword_is(Keyword::ON)?;
6957 let table_name = self.parse_object_name(false)?;
6958
6959 let policy_type = if self.parse_keyword(Keyword::AS) {
6960 let keyword =
6961 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6962 Some(match keyword {
6963 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6964 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6965 unexpected_keyword => return Err(ParserError::ParserError(
6966 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6967 )),
6968 })
6969 } else {
6970 None
6971 };
6972
6973 let command = if self.parse_keyword(Keyword::FOR) {
6974 let keyword = self.expect_one_of_keywords(&[
6975 Keyword::ALL,
6976 Keyword::SELECT,
6977 Keyword::INSERT,
6978 Keyword::UPDATE,
6979 Keyword::DELETE,
6980 ])?;
6981 Some(match keyword {
6982 Keyword::ALL => CreatePolicyCommand::All,
6983 Keyword::SELECT => CreatePolicyCommand::Select,
6984 Keyword::INSERT => CreatePolicyCommand::Insert,
6985 Keyword::UPDATE => CreatePolicyCommand::Update,
6986 Keyword::DELETE => CreatePolicyCommand::Delete,
6987 unexpected_keyword => return Err(ParserError::ParserError(
6988 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6989 )),
6990 })
6991 } else {
6992 None
6993 };
6994
6995 let to = if self.parse_keyword(Keyword::TO) {
6996 Some(self.parse_comma_separated(|p| p.parse_owner())?)
6997 } else {
6998 None
6999 };
7000
7001 let using = if self.parse_keyword(Keyword::USING) {
7002 self.expect_token(&Token::LParen)?;
7003 let expr = self.parse_expr()?;
7004 self.expect_token(&Token::RParen)?;
7005 Some(expr)
7006 } else {
7007 None
7008 };
7009
7010 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7011 self.expect_token(&Token::LParen)?;
7012 let expr = self.parse_expr()?;
7013 self.expect_token(&Token::RParen)?;
7014 Some(expr)
7015 } else {
7016 None
7017 };
7018
7019 Ok(CreatePolicy {
7020 name,
7021 table_name,
7022 policy_type,
7023 command,
7024 to,
7025 using,
7026 with_check,
7027 })
7028 }
7029
7030 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7040 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7041 let name = self.parse_identifier()?;
7042
7043 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7044 Some(self.parse_literal_string()?)
7045 } else {
7046 None
7047 };
7048
7049 let url = if self.parse_keyword(Keyword::URL) {
7050 Some(self.parse_literal_string()?)
7051 } else {
7052 None
7053 };
7054
7055 let comment = self.parse_optional_inline_comment()?;
7056
7057 let with_dcproperties =
7058 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7059 properties if !properties.is_empty() => Some(properties),
7060 _ => None,
7061 };
7062
7063 Ok(CreateConnector {
7064 name,
7065 if_not_exists,
7066 connector_type,
7067 url,
7068 comment,
7069 with_dcproperties,
7070 })
7071 }
7072
7073 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7079 let mut parts = vec![];
7080 loop {
7081 parts.push(ObjectNamePart::Identifier(Ident::new(
7082 self.next_token().to_string(),
7083 )));
7084 if !self.consume_token(&Token::Period) {
7085 break;
7086 }
7087 }
7088 Ok(ObjectName(parts))
7089 }
7090
7091 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7095 let name = self.parse_operator_name()?;
7096 self.expect_token(&Token::LParen)?;
7097
7098 let mut function: Option<ObjectName> = None;
7099 let mut is_procedure = false;
7100 let mut left_arg: Option<DataType> = None;
7101 let mut right_arg: Option<DataType> = None;
7102 let mut options: Vec<OperatorOption> = Vec::new();
7103
7104 loop {
7105 let keyword = self.expect_one_of_keywords(&[
7106 Keyword::FUNCTION,
7107 Keyword::PROCEDURE,
7108 Keyword::LEFTARG,
7109 Keyword::RIGHTARG,
7110 Keyword::COMMUTATOR,
7111 Keyword::NEGATOR,
7112 Keyword::RESTRICT,
7113 Keyword::JOIN,
7114 Keyword::HASHES,
7115 Keyword::MERGES,
7116 ])?;
7117
7118 match keyword {
7119 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7120 options.push(OperatorOption::Hashes);
7121 }
7122 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7123 options.push(OperatorOption::Merges);
7124 }
7125 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7126 self.expect_token(&Token::Eq)?;
7127 function = Some(self.parse_object_name(false)?);
7128 is_procedure = keyword == Keyword::PROCEDURE;
7129 }
7130 Keyword::LEFTARG if left_arg.is_none() => {
7131 self.expect_token(&Token::Eq)?;
7132 left_arg = Some(self.parse_data_type()?);
7133 }
7134 Keyword::RIGHTARG if right_arg.is_none() => {
7135 self.expect_token(&Token::Eq)?;
7136 right_arg = Some(self.parse_data_type()?);
7137 }
7138 Keyword::COMMUTATOR
7139 if !options
7140 .iter()
7141 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7142 {
7143 self.expect_token(&Token::Eq)?;
7144 if self.parse_keyword(Keyword::OPERATOR) {
7145 self.expect_token(&Token::LParen)?;
7146 let op = self.parse_operator_name()?;
7147 self.expect_token(&Token::RParen)?;
7148 options.push(OperatorOption::Commutator(op));
7149 } else {
7150 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7151 }
7152 }
7153 Keyword::NEGATOR
7154 if !options
7155 .iter()
7156 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7157 {
7158 self.expect_token(&Token::Eq)?;
7159 if self.parse_keyword(Keyword::OPERATOR) {
7160 self.expect_token(&Token::LParen)?;
7161 let op = self.parse_operator_name()?;
7162 self.expect_token(&Token::RParen)?;
7163 options.push(OperatorOption::Negator(op));
7164 } else {
7165 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7166 }
7167 }
7168 Keyword::RESTRICT
7169 if !options
7170 .iter()
7171 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7172 {
7173 self.expect_token(&Token::Eq)?;
7174 options.push(OperatorOption::Restrict(Some(
7175 self.parse_object_name(false)?,
7176 )));
7177 }
7178 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7179 self.expect_token(&Token::Eq)?;
7180 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7181 }
7182 _ => {
7183 return Err(ParserError::ParserError(format!(
7184 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7185 keyword
7186 )))
7187 }
7188 }
7189
7190 if !self.consume_token(&Token::Comma) {
7191 break;
7192 }
7193 }
7194
7195 self.expect_token(&Token::RParen)?;
7197
7198 let function = function.ok_or_else(|| {
7200 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7201 })?;
7202
7203 Ok(CreateOperator {
7204 name,
7205 function,
7206 is_procedure,
7207 left_arg,
7208 right_arg,
7209 options,
7210 })
7211 }
7212
7213 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7217 let name = self.parse_object_name(false)?;
7218 self.expect_keyword(Keyword::USING)?;
7219 let using = self.parse_identifier()?;
7220
7221 Ok(CreateOperatorFamily { name, using })
7222 }
7223
7224 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7228 let name = self.parse_object_name(false)?;
7229 let default = self.parse_keyword(Keyword::DEFAULT);
7230 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7231 let for_type = self.parse_data_type()?;
7232 self.expect_keyword(Keyword::USING)?;
7233 let using = self.parse_identifier()?;
7234
7235 let family = if self.parse_keyword(Keyword::FAMILY) {
7236 Some(self.parse_object_name(false)?)
7237 } else {
7238 None
7239 };
7240
7241 self.expect_keyword(Keyword::AS)?;
7242
7243 let mut items = vec![];
7244 loop {
7245 if self.parse_keyword(Keyword::OPERATOR) {
7246 let strategy_number = self.parse_literal_uint()?;
7247 let operator_name = self.parse_operator_name()?;
7248
7249 let op_types = if self.consume_token(&Token::LParen) {
7251 let left = self.parse_data_type()?;
7252 self.expect_token(&Token::Comma)?;
7253 let right = self.parse_data_type()?;
7254 self.expect_token(&Token::RParen)?;
7255 Some(OperatorArgTypes { left, right })
7256 } else {
7257 None
7258 };
7259
7260 let purpose = if self.parse_keyword(Keyword::FOR) {
7262 if self.parse_keyword(Keyword::SEARCH) {
7263 Some(OperatorPurpose::ForSearch)
7264 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7265 let sort_family = self.parse_object_name(false)?;
7266 Some(OperatorPurpose::ForOrderBy { sort_family })
7267 } else {
7268 return self
7269 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7270 }
7271 } else {
7272 None
7273 };
7274
7275 items.push(OperatorClassItem::Operator {
7276 strategy_number,
7277 operator_name,
7278 op_types,
7279 purpose,
7280 });
7281 } else if self.parse_keyword(Keyword::FUNCTION) {
7282 let support_number = self.parse_literal_uint()?;
7283
7284 let op_types = if self.consume_token(&Token::LParen)
7286 && self.peek_token_ref().token != Token::RParen
7287 {
7288 let mut types = vec![];
7289 loop {
7290 types.push(self.parse_data_type()?);
7291 if !self.consume_token(&Token::Comma) {
7292 break;
7293 }
7294 }
7295 self.expect_token(&Token::RParen)?;
7296 Some(types)
7297 } else if self.consume_token(&Token::LParen) {
7298 self.expect_token(&Token::RParen)?;
7299 Some(vec![])
7300 } else {
7301 None
7302 };
7303
7304 let function_name = self.parse_object_name(false)?;
7305
7306 let argument_types = if self.consume_token(&Token::LParen) {
7308 let mut types = vec![];
7309 loop {
7310 if self.peek_token_ref().token == Token::RParen {
7311 break;
7312 }
7313 types.push(self.parse_data_type()?);
7314 if !self.consume_token(&Token::Comma) {
7315 break;
7316 }
7317 }
7318 self.expect_token(&Token::RParen)?;
7319 types
7320 } else {
7321 vec![]
7322 };
7323
7324 items.push(OperatorClassItem::Function {
7325 support_number,
7326 op_types,
7327 function_name,
7328 argument_types,
7329 });
7330 } else if self.parse_keyword(Keyword::STORAGE) {
7331 let storage_type = self.parse_data_type()?;
7332 items.push(OperatorClassItem::Storage { storage_type });
7333 } else {
7334 break;
7335 }
7336
7337 if !self.consume_token(&Token::Comma) {
7339 break;
7340 }
7341 }
7342
7343 Ok(CreateOperatorClass {
7344 name,
7345 default,
7346 for_type,
7347 using,
7348 family,
7349 items,
7350 })
7351 }
7352
7353 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7355 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7357 && self.parse_keyword(Keyword::TEMPORARY);
7358 let persistent = dialect_of!(self is DuckDbDialect)
7359 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7360
7361 let object_type = if self.parse_keyword(Keyword::TABLE) {
7362 ObjectType::Table
7363 } else if self.parse_keyword(Keyword::COLLATION) {
7364 ObjectType::Collation
7365 } else if self.parse_keyword(Keyword::VIEW) {
7366 ObjectType::View
7367 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7368 ObjectType::MaterializedView
7369 } else if self.parse_keyword(Keyword::INDEX) {
7370 ObjectType::Index
7371 } else if self.parse_keyword(Keyword::ROLE) {
7372 ObjectType::Role
7373 } else if self.parse_keyword(Keyword::SCHEMA) {
7374 ObjectType::Schema
7375 } else if self.parse_keyword(Keyword::DATABASE) {
7376 ObjectType::Database
7377 } else if self.parse_keyword(Keyword::SEQUENCE) {
7378 ObjectType::Sequence
7379 } else if self.parse_keyword(Keyword::STAGE) {
7380 ObjectType::Stage
7381 } else if self.parse_keyword(Keyword::TYPE) {
7382 ObjectType::Type
7383 } else if self.parse_keyword(Keyword::USER) {
7384 ObjectType::User
7385 } else if self.parse_keyword(Keyword::STREAM) {
7386 ObjectType::Stream
7387 } else if self.parse_keyword(Keyword::FUNCTION) {
7388 return self.parse_drop_function().map(Into::into);
7389 } else if self.parse_keyword(Keyword::POLICY) {
7390 return self.parse_drop_policy().map(Into::into);
7391 } else if self.parse_keyword(Keyword::CONNECTOR) {
7392 return self.parse_drop_connector();
7393 } else if self.parse_keyword(Keyword::DOMAIN) {
7394 return self.parse_drop_domain().map(Into::into);
7395 } else if self.parse_keyword(Keyword::PROCEDURE) {
7396 return self.parse_drop_procedure();
7397 } else if self.parse_keyword(Keyword::SECRET) {
7398 return self.parse_drop_secret(temporary, persistent);
7399 } else if self.parse_keyword(Keyword::TRIGGER) {
7400 return self.parse_drop_trigger().map(Into::into);
7401 } else if self.parse_keyword(Keyword::EXTENSION) {
7402 return self.parse_drop_extension();
7403 } else if self.parse_keyword(Keyword::OPERATOR) {
7404 return if self.parse_keyword(Keyword::FAMILY) {
7406 self.parse_drop_operator_family()
7407 } else if self.parse_keyword(Keyword::CLASS) {
7408 self.parse_drop_operator_class()
7409 } else {
7410 self.parse_drop_operator()
7411 };
7412 } else {
7413 return self.expected_ref(
7414 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7415 self.peek_token_ref(),
7416 );
7417 };
7418 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7421 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7422
7423 let loc = self.peek_token_ref().span.start;
7424 let cascade = self.parse_keyword(Keyword::CASCADE);
7425 let restrict = self.parse_keyword(Keyword::RESTRICT);
7426 let purge = self.parse_keyword(Keyword::PURGE);
7427 if cascade && restrict {
7428 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7429 }
7430 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7431 return parser_err!(
7432 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7433 loc
7434 );
7435 }
7436 let table = if self.parse_keyword(Keyword::ON) {
7437 Some(self.parse_object_name(false)?)
7438 } else {
7439 None
7440 };
7441 Ok(Statement::Drop {
7442 object_type,
7443 if_exists,
7444 names,
7445 cascade,
7446 restrict,
7447 purge,
7448 temporary,
7449 table,
7450 })
7451 }
7452
7453 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7454 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7455 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7456 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7457 _ => None,
7458 }
7459 }
7460
7461 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7466 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7467 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7468 let drop_behavior = self.parse_optional_drop_behavior();
7469 Ok(DropFunction {
7470 if_exists,
7471 func_desc,
7472 drop_behavior,
7473 })
7474 }
7475
7476 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7482 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7483 let name = self.parse_identifier()?;
7484 self.expect_keyword_is(Keyword::ON)?;
7485 let table_name = self.parse_object_name(false)?;
7486 let drop_behavior = self.parse_optional_drop_behavior();
7487 Ok(DropPolicy {
7488 if_exists,
7489 name,
7490 table_name,
7491 drop_behavior,
7492 })
7493 }
7494 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7500 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7501 let name = self.parse_identifier()?;
7502 Ok(Statement::DropConnector { if_exists, name })
7503 }
7504
7505 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7509 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7510 let name = self.parse_object_name(false)?;
7511 let drop_behavior = self.parse_optional_drop_behavior();
7512 Ok(DropDomain {
7513 if_exists,
7514 name,
7515 drop_behavior,
7516 })
7517 }
7518
7519 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7524 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7525 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7526 let drop_behavior = self.parse_optional_drop_behavior();
7527 Ok(Statement::DropProcedure {
7528 if_exists,
7529 proc_desc,
7530 drop_behavior,
7531 })
7532 }
7533
7534 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7535 let name = self.parse_object_name(false)?;
7536
7537 let args = if self.consume_token(&Token::LParen) {
7538 if self.consume_token(&Token::RParen) {
7539 Some(vec![])
7540 } else {
7541 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7542 self.expect_token(&Token::RParen)?;
7543 Some(args)
7544 }
7545 } else {
7546 None
7547 };
7548
7549 Ok(FunctionDesc { name, args })
7550 }
7551
7552 fn parse_drop_secret(
7554 &mut self,
7555 temporary: bool,
7556 persistent: bool,
7557 ) -> Result<Statement, ParserError> {
7558 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7559 let name = self.parse_identifier()?;
7560 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7561 self.parse_identifier().ok()
7562 } else {
7563 None
7564 };
7565 let temp = match (temporary, persistent) {
7566 (true, false) => Some(true),
7567 (false, true) => Some(false),
7568 (false, false) => None,
7569 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7570 };
7571
7572 Ok(Statement::DropSecret {
7573 if_exists,
7574 temporary: temp,
7575 name,
7576 storage_specifier,
7577 })
7578 }
7579
7580 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7590 if dialect_of!(self is BigQueryDialect) {
7591 return self.parse_big_query_declare();
7592 }
7593 if dialect_of!(self is SnowflakeDialect) {
7594 return self.parse_snowflake_declare();
7595 }
7596 if dialect_of!(self is MsSqlDialect) {
7597 return self.parse_mssql_declare();
7598 }
7599
7600 let name = self.parse_identifier()?;
7601
7602 let binary = Some(self.parse_keyword(Keyword::BINARY));
7603 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7604 Some(true)
7605 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7606 Some(false)
7607 } else {
7608 None
7609 };
7610 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7611 Some(true)
7612 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7613 Some(false)
7614 } else {
7615 None
7616 };
7617
7618 self.expect_keyword_is(Keyword::CURSOR)?;
7619 let declare_type = Some(DeclareType::Cursor);
7620
7621 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7622 Some(keyword) => {
7623 self.expect_keyword_is(Keyword::HOLD)?;
7624
7625 match keyword {
7626 Keyword::WITH => Some(true),
7627 Keyword::WITHOUT => Some(false),
7628 unexpected_keyword => return Err(ParserError::ParserError(
7629 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7630 )),
7631 }
7632 }
7633 None => None,
7634 };
7635
7636 self.expect_keyword_is(Keyword::FOR)?;
7637
7638 let query = Some(self.parse_query()?);
7639
7640 Ok(Statement::Declare {
7641 stmts: vec![Declare {
7642 names: vec![name],
7643 data_type: None,
7644 assignment: None,
7645 declare_type,
7646 binary,
7647 sensitive,
7648 scroll,
7649 hold,
7650 for_query: query,
7651 }],
7652 })
7653 }
7654
7655 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7663 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7664
7665 let data_type = match &self.peek_token_ref().token {
7666 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7667 _ => Some(self.parse_data_type()?),
7668 };
7669
7670 let expr = if data_type.is_some() {
7671 if self.parse_keyword(Keyword::DEFAULT) {
7672 Some(self.parse_expr()?)
7673 } else {
7674 None
7675 }
7676 } else {
7677 self.expect_keyword_is(Keyword::DEFAULT)?;
7680 Some(self.parse_expr()?)
7681 };
7682
7683 Ok(Statement::Declare {
7684 stmts: vec![Declare {
7685 names,
7686 data_type,
7687 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7688 declare_type: None,
7689 binary: None,
7690 sensitive: None,
7691 scroll: None,
7692 hold: None,
7693 for_query: None,
7694 }],
7695 })
7696 }
7697
7698 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7723 let mut stmts = vec![];
7724 loop {
7725 let name = self.parse_identifier()?;
7726 let (declare_type, for_query, assigned_expr, data_type) =
7727 if self.parse_keyword(Keyword::CURSOR) {
7728 self.expect_keyword_is(Keyword::FOR)?;
7729 match &self.peek_token_ref().token {
7730 Token::Word(w) if w.keyword == Keyword::SELECT => (
7731 Some(DeclareType::Cursor),
7732 Some(self.parse_query()?),
7733 None,
7734 None,
7735 ),
7736 _ => (
7737 Some(DeclareType::Cursor),
7738 None,
7739 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7740 None,
7741 ),
7742 }
7743 } else if self.parse_keyword(Keyword::RESULTSET) {
7744 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
7745 self.parse_snowflake_variable_declaration_expression()?
7746 } else {
7747 None
7749 };
7750
7751 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7752 } else if self.parse_keyword(Keyword::EXCEPTION) {
7753 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
7754 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7755 } else {
7756 None
7758 };
7759
7760 (Some(DeclareType::Exception), None, assigned_expr, None)
7761 } else {
7762 let (assigned_expr, data_type) = if let Some(assigned_expr) =
7764 self.parse_snowflake_variable_declaration_expression()?
7765 {
7766 (Some(assigned_expr), None)
7767 } else if let Token::Word(_) = &self.peek_token_ref().token {
7768 let data_type = self.parse_data_type()?;
7769 (
7770 self.parse_snowflake_variable_declaration_expression()?,
7771 Some(data_type),
7772 )
7773 } else {
7774 (None, None)
7775 };
7776 (None, None, assigned_expr, data_type)
7777 };
7778 let stmt = Declare {
7779 names: vec![name],
7780 data_type,
7781 assignment: assigned_expr,
7782 declare_type,
7783 binary: None,
7784 sensitive: None,
7785 scroll: None,
7786 hold: None,
7787 for_query,
7788 };
7789
7790 stmts.push(stmt);
7791 if self.consume_token(&Token::SemiColon) {
7792 match &self.peek_token_ref().token {
7793 Token::Word(w)
7794 if ALL_KEYWORDS
7795 .binary_search(&w.value.to_uppercase().as_str())
7796 .is_err() =>
7797 {
7798 continue;
7800 }
7801 _ => {
7802 self.prev_token();
7804 }
7805 }
7806 }
7807
7808 break;
7809 }
7810
7811 Ok(Statement::Declare { stmts })
7812 }
7813
7814 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7826 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7827
7828 Ok(Statement::Declare { stmts })
7829 }
7830
7831 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7842 let name = {
7843 let ident = self.parse_identifier()?;
7844 if !ident.value.starts_with('@')
7845 && !matches!(
7846 &self.peek_token_ref().token,
7847 Token::Word(w) if w.keyword == Keyword::CURSOR
7848 )
7849 {
7850 Err(ParserError::TokenizerError(
7851 "Invalid MsSql variable declaration.".to_string(),
7852 ))
7853 } else {
7854 Ok(ident)
7855 }
7856 }?;
7857
7858 let (declare_type, data_type) = match &self.peek_token_ref().token {
7859 Token::Word(w) => match w.keyword {
7860 Keyword::CURSOR => {
7861 self.next_token();
7862 (Some(DeclareType::Cursor), None)
7863 }
7864 Keyword::AS => {
7865 self.next_token();
7866 (None, Some(self.parse_data_type()?))
7867 }
7868 _ => (None, Some(self.parse_data_type()?)),
7869 },
7870 _ => (None, Some(self.parse_data_type()?)),
7871 };
7872
7873 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7874 self.next_token();
7875 let query = Some(self.parse_query()?);
7876 (query, None)
7877 } else {
7878 let assignment = self.parse_mssql_variable_declaration_expression()?;
7879 (None, assignment)
7880 };
7881
7882 Ok(Declare {
7883 names: vec![name],
7884 data_type,
7885 assignment,
7886 declare_type,
7887 binary: None,
7888 sensitive: None,
7889 scroll: None,
7890 hold: None,
7891 for_query,
7892 })
7893 }
7894
7895 pub fn parse_snowflake_variable_declaration_expression(
7903 &mut self,
7904 ) -> Result<Option<DeclareAssignment>, ParserError> {
7905 Ok(match &self.peek_token_ref().token {
7906 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7907 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7909 }
7910 Token::Assignment => {
7911 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
7913 self.parse_expr()?,
7914 )))
7915 }
7916 _ => None,
7917 })
7918 }
7919
7920 pub fn parse_mssql_variable_declaration_expression(
7927 &mut self,
7928 ) -> Result<Option<DeclareAssignment>, ParserError> {
7929 Ok(match &self.peek_token_ref().token {
7930 Token::Eq => {
7931 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
7933 self.parse_expr()?,
7934 )))
7935 }
7936 _ => None,
7937 })
7938 }
7939
7940 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7942 let direction = if self.parse_keyword(Keyword::NEXT) {
7943 FetchDirection::Next
7944 } else if self.parse_keyword(Keyword::PRIOR) {
7945 FetchDirection::Prior
7946 } else if self.parse_keyword(Keyword::FIRST) {
7947 FetchDirection::First
7948 } else if self.parse_keyword(Keyword::LAST) {
7949 FetchDirection::Last
7950 } else if self.parse_keyword(Keyword::ABSOLUTE) {
7951 FetchDirection::Absolute {
7952 limit: self.parse_number_value()?,
7953 }
7954 } else if self.parse_keyword(Keyword::RELATIVE) {
7955 FetchDirection::Relative {
7956 limit: self.parse_number_value()?,
7957 }
7958 } else if self.parse_keyword(Keyword::FORWARD) {
7959 if self.parse_keyword(Keyword::ALL) {
7960 FetchDirection::ForwardAll
7961 } else {
7962 FetchDirection::Forward {
7963 limit: Some(self.parse_number_value()?),
7965 }
7966 }
7967 } else if self.parse_keyword(Keyword::BACKWARD) {
7968 if self.parse_keyword(Keyword::ALL) {
7969 FetchDirection::BackwardAll
7970 } else {
7971 FetchDirection::Backward {
7972 limit: Some(self.parse_number_value()?),
7974 }
7975 }
7976 } else if self.parse_keyword(Keyword::ALL) {
7977 FetchDirection::All
7978 } else {
7979 FetchDirection::Count {
7980 limit: self.parse_number_value()?,
7981 }
7982 };
7983
7984 let position = if self.peek_keyword(Keyword::FROM) {
7985 self.expect_keyword(Keyword::FROM)?;
7986 FetchPosition::From
7987 } else if self.peek_keyword(Keyword::IN) {
7988 self.expect_keyword(Keyword::IN)?;
7989 FetchPosition::In
7990 } else {
7991 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
7992 };
7993
7994 let name = self.parse_identifier()?;
7995
7996 let into = if self.parse_keyword(Keyword::INTO) {
7997 Some(self.parse_object_name(false)?)
7998 } else {
7999 None
8000 };
8001
8002 Ok(Statement::Fetch {
8003 name,
8004 direction,
8005 position,
8006 into,
8007 })
8008 }
8009
8010 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8012 let object_type = if self.parse_keyword(Keyword::ALL) {
8013 DiscardObject::ALL
8014 } else if self.parse_keyword(Keyword::PLANS) {
8015 DiscardObject::PLANS
8016 } else if self.parse_keyword(Keyword::SEQUENCES) {
8017 DiscardObject::SEQUENCES
8018 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8019 DiscardObject::TEMP
8020 } else {
8021 return self.expected_ref(
8022 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8023 self.peek_token_ref(),
8024 );
8025 };
8026 Ok(Statement::Discard { object_type })
8027 }
8028
8029 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8031 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8032 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8033
8034 let mut using = None;
8035
8036 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8037 let index_name = self.parse_object_name(false)?;
8038 using = self.parse_optional_using_then_index_type()?;
8040 self.expect_keyword_is(Keyword::ON)?;
8041 Some(index_name)
8042 } else {
8043 None
8044 };
8045
8046 let table_name = self.parse_object_name(false)?;
8047
8048 using = self.parse_optional_using_then_index_type()?.or(using);
8051
8052 let columns = self.parse_parenthesized_index_column_list()?;
8053
8054 let include = if self.parse_keyword(Keyword::INCLUDE) {
8055 self.expect_token(&Token::LParen)?;
8056 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8057 self.expect_token(&Token::RParen)?;
8058 columns
8059 } else {
8060 vec![]
8061 };
8062
8063 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8064 let not = self.parse_keyword(Keyword::NOT);
8065 self.expect_keyword_is(Keyword::DISTINCT)?;
8066 Some(!not)
8067 } else {
8068 None
8069 };
8070
8071 let with = if self.dialect.supports_create_index_with_clause()
8072 && self.parse_keyword(Keyword::WITH)
8073 {
8074 self.expect_token(&Token::LParen)?;
8075 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8076 self.expect_token(&Token::RParen)?;
8077 with_params
8078 } else {
8079 Vec::new()
8080 };
8081
8082 let predicate = if self.parse_keyword(Keyword::WHERE) {
8083 Some(self.parse_expr()?)
8084 } else {
8085 None
8086 };
8087
8088 let index_options = self.parse_index_options()?;
8094
8095 let mut alter_options = Vec::new();
8097 while self
8098 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8099 .is_some()
8100 {
8101 alter_options.push(self.parse_alter_table_operation()?)
8102 }
8103
8104 Ok(CreateIndex {
8105 name: index_name,
8106 table_name,
8107 using,
8108 columns,
8109 unique,
8110 concurrently,
8111 if_not_exists,
8112 include,
8113 nulls_distinct,
8114 with,
8115 predicate,
8116 index_options,
8117 alter_options,
8118 })
8119 }
8120
8121 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8123 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8124 let name = self.parse_identifier()?;
8125
8126 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8127 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8128 Some(self.parse_identifier()?)
8129 } else {
8130 None
8131 };
8132
8133 let version = if self.parse_keyword(Keyword::VERSION) {
8134 Some(self.parse_identifier()?)
8135 } else {
8136 None
8137 };
8138
8139 let cascade = self.parse_keyword(Keyword::CASCADE);
8140
8141 (schema, version, cascade)
8142 } else {
8143 (None, None, false)
8144 };
8145
8146 Ok(CreateExtension {
8147 name,
8148 if_not_exists,
8149 schema,
8150 version,
8151 cascade,
8152 })
8153 }
8154
8155 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8157 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8158 let name = self.parse_object_name(false)?;
8159
8160 let definition = if self.parse_keyword(Keyword::FROM) {
8161 CreateCollationDefinition::From(self.parse_object_name(false)?)
8162 } else if self.consume_token(&Token::LParen) {
8163 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8164 self.expect_token(&Token::RParen)?;
8165 CreateCollationDefinition::Options(options)
8166 } else {
8167 return self.expected_ref(
8168 "FROM or parenthesized option list after CREATE COLLATION name",
8169 self.peek_token_ref(),
8170 );
8171 };
8172
8173 Ok(CreateCollation {
8174 if_not_exists,
8175 name,
8176 definition,
8177 })
8178 }
8179
8180 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8182 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8183 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8184 let cascade_or_restrict =
8185 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8186 Ok(Statement::DropExtension(DropExtension {
8187 names,
8188 if_exists,
8189 cascade_or_restrict: cascade_or_restrict
8190 .map(|k| match k {
8191 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8192 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8193 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8194 })
8195 .transpose()?,
8196 }))
8197 }
8198
8199 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8202 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8203 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8204 let drop_behavior = self.parse_optional_drop_behavior();
8205 Ok(Statement::DropOperator(DropOperator {
8206 if_exists,
8207 operators,
8208 drop_behavior,
8209 }))
8210 }
8211
8212 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8215 let name = self.parse_operator_name()?;
8216 self.expect_token(&Token::LParen)?;
8217
8218 let left_type = if self.parse_keyword(Keyword::NONE) {
8220 None
8221 } else {
8222 Some(self.parse_data_type()?)
8223 };
8224
8225 self.expect_token(&Token::Comma)?;
8226
8227 let right_type = self.parse_data_type()?;
8229
8230 self.expect_token(&Token::RParen)?;
8231
8232 Ok(DropOperatorSignature {
8233 name,
8234 left_type,
8235 right_type,
8236 })
8237 }
8238
8239 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8243 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8244 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8245 self.expect_keyword(Keyword::USING)?;
8246 let using = self.parse_identifier()?;
8247 let drop_behavior = self.parse_optional_drop_behavior();
8248 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8249 if_exists,
8250 names,
8251 using,
8252 drop_behavior,
8253 }))
8254 }
8255
8256 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8260 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8261 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8262 self.expect_keyword(Keyword::USING)?;
8263 let using = self.parse_identifier()?;
8264 let drop_behavior = self.parse_optional_drop_behavior();
8265 Ok(Statement::DropOperatorClass(DropOperatorClass {
8266 if_exists,
8267 names,
8268 using,
8269 drop_behavior,
8270 }))
8271 }
8272
8273 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8277 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8278 self.expect_token(&Token::LParen)?;
8279 let columns =
8280 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8281 self.expect_token(&Token::RParen)?;
8282 Ok(HiveDistributionStyle::PARTITIONED { columns })
8283 } else {
8284 Ok(HiveDistributionStyle::NONE)
8285 }
8286 }
8287
8288 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8292 let token = self.next_token();
8293 match &token.token {
8294 Token::Word(w) => match w.keyword {
8295 Keyword::AUTO => Ok(DistStyle::Auto),
8296 Keyword::EVEN => Ok(DistStyle::Even),
8297 Keyword::KEY => Ok(DistStyle::Key),
8298 Keyword::ALL => Ok(DistStyle::All),
8299 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8300 },
8301 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8302 }
8303 }
8304
8305 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8307 let mut hive_format: Option<HiveFormat> = None;
8308 loop {
8309 match self.parse_one_of_keywords(&[
8310 Keyword::ROW,
8311 Keyword::STORED,
8312 Keyword::LOCATION,
8313 Keyword::WITH,
8314 ]) {
8315 Some(Keyword::ROW) => {
8316 hive_format
8317 .get_or_insert_with(HiveFormat::default)
8318 .row_format = Some(self.parse_row_format()?);
8319 }
8320 Some(Keyword::STORED) => {
8321 self.expect_keyword_is(Keyword::AS)?;
8322 if self.parse_keyword(Keyword::INPUTFORMAT) {
8323 let input_format = self.parse_expr()?;
8324 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8325 let output_format = self.parse_expr()?;
8326 hive_format.get_or_insert_with(HiveFormat::default).storage =
8327 Some(HiveIOFormat::IOF {
8328 input_format,
8329 output_format,
8330 });
8331 } else {
8332 let format = self.parse_file_format()?;
8333 hive_format.get_or_insert_with(HiveFormat::default).storage =
8334 Some(HiveIOFormat::FileFormat { format });
8335 }
8336 }
8337 Some(Keyword::LOCATION) => {
8338 hive_format.get_or_insert_with(HiveFormat::default).location =
8339 Some(self.parse_literal_string()?);
8340 }
8341 Some(Keyword::WITH) => {
8342 self.prev_token();
8343 let properties = self
8344 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8345 if !properties.is_empty() {
8346 hive_format
8347 .get_or_insert_with(HiveFormat::default)
8348 .serde_properties = Some(properties);
8349 } else {
8350 break;
8351 }
8352 }
8353 None => break,
8354 _ => break,
8355 }
8356 }
8357
8358 Ok(hive_format)
8359 }
8360
8361 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8363 self.expect_keyword_is(Keyword::FORMAT)?;
8364 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8365 Some(Keyword::SERDE) => {
8366 let class = self.parse_literal_string()?;
8367 Ok(HiveRowFormat::SERDE { class })
8368 }
8369 _ => {
8370 let mut row_delimiters = vec![];
8371
8372 loop {
8373 match self.parse_one_of_keywords(&[
8374 Keyword::FIELDS,
8375 Keyword::COLLECTION,
8376 Keyword::MAP,
8377 Keyword::LINES,
8378 Keyword::NULL,
8379 ]) {
8380 Some(Keyword::FIELDS) => {
8381 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8382 row_delimiters.push(HiveRowDelimiter {
8383 delimiter: HiveDelimiter::FieldsTerminatedBy,
8384 char: self.parse_identifier()?,
8385 });
8386
8387 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8388 row_delimiters.push(HiveRowDelimiter {
8389 delimiter: HiveDelimiter::FieldsEscapedBy,
8390 char: self.parse_identifier()?,
8391 });
8392 }
8393 } else {
8394 break;
8395 }
8396 }
8397 Some(Keyword::COLLECTION) => {
8398 if self.parse_keywords(&[
8399 Keyword::ITEMS,
8400 Keyword::TERMINATED,
8401 Keyword::BY,
8402 ]) {
8403 row_delimiters.push(HiveRowDelimiter {
8404 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8405 char: self.parse_identifier()?,
8406 });
8407 } else {
8408 break;
8409 }
8410 }
8411 Some(Keyword::MAP) => {
8412 if self.parse_keywords(&[
8413 Keyword::KEYS,
8414 Keyword::TERMINATED,
8415 Keyword::BY,
8416 ]) {
8417 row_delimiters.push(HiveRowDelimiter {
8418 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8419 char: self.parse_identifier()?,
8420 });
8421 } else {
8422 break;
8423 }
8424 }
8425 Some(Keyword::LINES) => {
8426 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8427 row_delimiters.push(HiveRowDelimiter {
8428 delimiter: HiveDelimiter::LinesTerminatedBy,
8429 char: self.parse_identifier()?,
8430 });
8431 } else {
8432 break;
8433 }
8434 }
8435 Some(Keyword::NULL) => {
8436 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8437 row_delimiters.push(HiveRowDelimiter {
8438 delimiter: HiveDelimiter::NullDefinedAs,
8439 char: self.parse_identifier()?,
8440 });
8441 } else {
8442 break;
8443 }
8444 }
8445 _ => {
8446 break;
8447 }
8448 }
8449 }
8450
8451 Ok(HiveRowFormat::DELIMITED {
8452 delimiters: row_delimiters,
8453 })
8454 }
8455 }
8456 }
8457
8458 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8459 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8460 Ok(Some(self.parse_identifier()?))
8461 } else {
8462 Ok(None)
8463 }
8464 }
8465
8466 pub fn parse_create_table(
8468 &mut self,
8469 or_replace: bool,
8470 temporary: bool,
8471 global: Option<bool>,
8472 transient: bool,
8473 ) -> Result<CreateTable, ParserError> {
8474 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8475 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8476 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8477
8478 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8488 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8489 } else {
8490 None
8491 };
8492
8493 let on_cluster = self.parse_optional_on_cluster()?;
8495
8496 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8497
8498 let clone = if self.parse_keyword(Keyword::CLONE) {
8499 self.parse_object_name(allow_unquoted_hyphen).ok()
8500 } else {
8501 None
8502 };
8503
8504 let (columns, constraints) = self.parse_columns()?;
8506 let comment_after_column_def =
8507 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8508 let next_token = self.next_token();
8509 match next_token.token {
8510 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8511 _ => self.expected("comment", next_token)?,
8512 }
8513 } else {
8514 None
8515 };
8516
8517 let for_values = if partition_of.is_some() {
8519 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8520 Some(self.parse_partition_for_values()?)
8521 } else {
8522 return self.expected_ref(
8523 "FOR VALUES or DEFAULT after PARTITION OF",
8524 self.peek_token_ref(),
8525 );
8526 }
8527 } else {
8528 None
8529 };
8530
8531 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8533
8534 let hive_distribution = self.parse_hive_distribution()?;
8535 let clustered_by = self.parse_optional_clustered_by()?;
8536 let hive_formats = self.parse_hive_formats()?;
8537
8538 let create_table_config = self.parse_optional_create_table_config()?;
8539
8540 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8543 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8544 {
8545 Some(Box::new(self.parse_expr()?))
8546 } else {
8547 None
8548 };
8549
8550 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8551 if self.consume_token(&Token::LParen) {
8552 let columns = if self.peek_token_ref().token != Token::RParen {
8553 self.parse_comma_separated(|p| p.parse_expr())?
8554 } else {
8555 vec![]
8556 };
8557 self.expect_token(&Token::RParen)?;
8558 Some(OneOrManyWithParens::Many(columns))
8559 } else {
8560 Some(OneOrManyWithParens::One(self.parse_expr()?))
8561 }
8562 } else {
8563 None
8564 };
8565
8566 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8567 Some(self.parse_create_table_on_commit()?)
8568 } else {
8569 None
8570 };
8571
8572 let strict = self.parse_keyword(Keyword::STRICT);
8573
8574 let backup = if self.parse_keyword(Keyword::BACKUP) {
8576 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8577 Some(keyword == Keyword::YES)
8578 } else {
8579 None
8580 };
8581
8582 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8584 Some(self.parse_dist_style()?)
8585 } else {
8586 None
8587 };
8588 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8589 self.expect_token(&Token::LParen)?;
8590 let expr = self.parse_expr()?;
8591 self.expect_token(&Token::RParen)?;
8592 Some(expr)
8593 } else {
8594 None
8595 };
8596 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8597 self.expect_token(&Token::LParen)?;
8598 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8599 self.expect_token(&Token::RParen)?;
8600 Some(columns)
8601 } else {
8602 None
8603 };
8604
8605 let query = if self.parse_keyword(Keyword::AS) {
8607 Some(self.parse_query()?)
8608 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8609 {
8610 self.prev_token();
8612 Some(self.parse_query()?)
8613 } else {
8614 None
8615 };
8616
8617 Ok(CreateTableBuilder::new(table_name)
8618 .temporary(temporary)
8619 .columns(columns)
8620 .constraints(constraints)
8621 .or_replace(or_replace)
8622 .if_not_exists(if_not_exists)
8623 .transient(transient)
8624 .hive_distribution(hive_distribution)
8625 .hive_formats(hive_formats)
8626 .global(global)
8627 .query(query)
8628 .without_rowid(without_rowid)
8629 .like(like)
8630 .clone_clause(clone)
8631 .comment_after_column_def(comment_after_column_def)
8632 .order_by(order_by)
8633 .on_commit(on_commit)
8634 .on_cluster(on_cluster)
8635 .clustered_by(clustered_by)
8636 .partition_by(create_table_config.partition_by)
8637 .cluster_by(create_table_config.cluster_by)
8638 .inherits(create_table_config.inherits)
8639 .partition_of(partition_of)
8640 .for_values(for_values)
8641 .table_options(create_table_config.table_options)
8642 .primary_key(primary_key)
8643 .strict(strict)
8644 .backup(backup)
8645 .diststyle(diststyle)
8646 .distkey(distkey)
8647 .sortkey(sortkey)
8648 .build())
8649 }
8650
8651 fn maybe_parse_create_table_like(
8652 &mut self,
8653 allow_unquoted_hyphen: bool,
8654 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8655 let like = if self.dialect.supports_create_table_like_parenthesized()
8656 && self.consume_token(&Token::LParen)
8657 {
8658 if self.parse_keyword(Keyword::LIKE) {
8659 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8660 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8661 Some(CreateTableLikeDefaults::Including)
8662 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8663 Some(CreateTableLikeDefaults::Excluding)
8664 } else {
8665 None
8666 };
8667 self.expect_token(&Token::RParen)?;
8668 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8669 name,
8670 defaults,
8671 }))
8672 } else {
8673 self.prev_token();
8675 None
8676 }
8677 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8678 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8679 Some(CreateTableLikeKind::Plain(CreateTableLike {
8680 name,
8681 defaults: None,
8682 }))
8683 } else {
8684 None
8685 };
8686 Ok(like)
8687 }
8688
8689 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8690 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8691 Ok(OnCommit::DeleteRows)
8692 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8693 Ok(OnCommit::PreserveRows)
8694 } else if self.parse_keywords(&[Keyword::DROP]) {
8695 Ok(OnCommit::Drop)
8696 } else {
8697 parser_err!(
8698 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8699 self.peek_token_ref()
8700 )
8701 }
8702 }
8703
8704 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8710 if self.parse_keyword(Keyword::DEFAULT) {
8711 return Ok(ForValues::Default);
8712 }
8713
8714 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8715
8716 if self.parse_keyword(Keyword::IN) {
8717 self.expect_token(&Token::LParen)?;
8719 if self.peek_token_ref().token == Token::RParen {
8720 return self.expected_ref("at least one value", self.peek_token_ref());
8721 }
8722 let values = self.parse_comma_separated(Parser::parse_expr)?;
8723 self.expect_token(&Token::RParen)?;
8724 Ok(ForValues::In(values))
8725 } else if self.parse_keyword(Keyword::FROM) {
8726 self.expect_token(&Token::LParen)?;
8728 if self.peek_token_ref().token == Token::RParen {
8729 return self.expected_ref("at least one value", self.peek_token_ref());
8730 }
8731 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8732 self.expect_token(&Token::RParen)?;
8733 self.expect_keyword(Keyword::TO)?;
8734 self.expect_token(&Token::LParen)?;
8735 if self.peek_token_ref().token == Token::RParen {
8736 return self.expected_ref("at least one value", self.peek_token_ref());
8737 }
8738 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8739 self.expect_token(&Token::RParen)?;
8740 Ok(ForValues::From { from, to })
8741 } else if self.parse_keyword(Keyword::WITH) {
8742 self.expect_token(&Token::LParen)?;
8744 self.expect_keyword(Keyword::MODULUS)?;
8745 let modulus = self.parse_literal_uint()?;
8746 self.expect_token(&Token::Comma)?;
8747 self.expect_keyword(Keyword::REMAINDER)?;
8748 let remainder = self.parse_literal_uint()?;
8749 self.expect_token(&Token::RParen)?;
8750 Ok(ForValues::With { modulus, remainder })
8751 } else {
8752 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
8753 }
8754 }
8755
8756 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8758 if self.parse_keyword(Keyword::MINVALUE) {
8759 Ok(PartitionBoundValue::MinValue)
8760 } else if self.parse_keyword(Keyword::MAXVALUE) {
8761 Ok(PartitionBoundValue::MaxValue)
8762 } else {
8763 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8764 }
8765 }
8766
8767 fn parse_optional_create_table_config(
8773 &mut self,
8774 ) -> Result<CreateTableConfiguration, ParserError> {
8775 let mut table_options = CreateTableOptions::None;
8776
8777 let inherits = if self.parse_keyword(Keyword::INHERITS) {
8778 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8779 } else {
8780 None
8781 };
8782
8783 let with_options = self.parse_options(Keyword::WITH)?;
8785 if !with_options.is_empty() {
8786 table_options = CreateTableOptions::With(with_options)
8787 }
8788
8789 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8790 if !table_properties.is_empty() {
8791 table_options = CreateTableOptions::TableProperties(table_properties);
8792 }
8793 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8794 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8795 {
8796 Some(Box::new(self.parse_expr()?))
8797 } else {
8798 None
8799 };
8800
8801 let mut cluster_by = None;
8802 if dialect_of!(self is BigQueryDialect | GenericDialect) {
8803 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8804 cluster_by = Some(WrappedCollection::NoWrapping(
8805 self.parse_comma_separated(|p| p.parse_expr())?,
8806 ));
8807 };
8808
8809 if let Token::Word(word) = &self.peek_token_ref().token {
8810 if word.keyword == Keyword::OPTIONS {
8811 table_options =
8812 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8813 }
8814 };
8815 }
8816
8817 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8818 let plain_options = self.parse_plain_options()?;
8819 if !plain_options.is_empty() {
8820 table_options = CreateTableOptions::Plain(plain_options)
8821 }
8822 };
8823
8824 Ok(CreateTableConfiguration {
8825 partition_by,
8826 cluster_by,
8827 inherits,
8828 table_options,
8829 })
8830 }
8831
8832 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8833 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8836 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8837 }
8838
8839 if self.parse_keywords(&[Keyword::COMMENT]) {
8842 let has_eq = self.consume_token(&Token::Eq);
8843 let value = self.next_token();
8844
8845 let comment = match (has_eq, value.token) {
8846 (true, Token::SingleQuotedString(s)) => {
8847 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8848 }
8849 (false, Token::SingleQuotedString(s)) => {
8850 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8851 }
8852 (_, token) => {
8853 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8854 }
8855 };
8856 return comment;
8857 }
8858
8859 if self.parse_keywords(&[Keyword::ENGINE]) {
8862 let _ = self.consume_token(&Token::Eq);
8863 let value = self.next_token();
8864
8865 let engine = match value.token {
8866 Token::Word(w) => {
8867 let parameters = if self.peek_token_ref().token == Token::LParen {
8868 self.parse_parenthesized_identifiers()?
8869 } else {
8870 vec![]
8871 };
8872
8873 Ok(Some(SqlOption::NamedParenthesizedList(
8874 NamedParenthesizedList {
8875 key: Ident::new("ENGINE"),
8876 name: Some(Ident::new(w.value)),
8877 values: parameters,
8878 },
8879 )))
8880 }
8881 _ => {
8882 return self.expected("Token::Word", value)?;
8883 }
8884 };
8885
8886 return engine;
8887 }
8888
8889 if self.parse_keywords(&[Keyword::TABLESPACE]) {
8891 let _ = self.consume_token(&Token::Eq);
8892 let value = self.next_token();
8893
8894 let tablespace = match value.token {
8895 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8896 let storage = match self.parse_keyword(Keyword::STORAGE) {
8897 true => {
8898 let _ = self.consume_token(&Token::Eq);
8899 let storage_token = self.next_token();
8900 match &storage_token.token {
8901 Token::Word(w) => match w.value.to_uppercase().as_str() {
8902 "DISK" => Some(StorageType::Disk),
8903 "MEMORY" => Some(StorageType::Memory),
8904 _ => self
8905 .expected("Storage type (DISK or MEMORY)", storage_token)?,
8906 },
8907 _ => self.expected("Token::Word", storage_token)?,
8908 }
8909 }
8910 false => None,
8911 };
8912
8913 Ok(Some(SqlOption::TableSpace(TablespaceOption {
8914 name,
8915 storage,
8916 })))
8917 }
8918 _ => {
8919 return self.expected("Token::Word", value)?;
8920 }
8921 };
8922
8923 return tablespace;
8924 }
8925
8926 if self.parse_keyword(Keyword::UNION) {
8928 let _ = self.consume_token(&Token::Eq);
8929 let value = self.next_token();
8930
8931 match value.token {
8932 Token::LParen => {
8933 let tables: Vec<Ident> =
8934 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8935 self.expect_token(&Token::RParen)?;
8936
8937 return Ok(Some(SqlOption::NamedParenthesizedList(
8938 NamedParenthesizedList {
8939 key: Ident::new("UNION"),
8940 name: None,
8941 values: tables,
8942 },
8943 )));
8944 }
8945 _ => {
8946 return self.expected("Token::LParen", value)?;
8947 }
8948 }
8949 }
8950
8951 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8953 Ident::new("DEFAULT CHARSET")
8954 } else if self.parse_keyword(Keyword::CHARSET) {
8955 Ident::new("CHARSET")
8956 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8957 Ident::new("DEFAULT CHARACTER SET")
8958 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8959 Ident::new("CHARACTER SET")
8960 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8961 Ident::new("DEFAULT COLLATE")
8962 } else if self.parse_keyword(Keyword::COLLATE) {
8963 Ident::new("COLLATE")
8964 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8965 Ident::new("DATA DIRECTORY")
8966 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8967 Ident::new("INDEX DIRECTORY")
8968 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8969 Ident::new("KEY_BLOCK_SIZE")
8970 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8971 Ident::new("ROW_FORMAT")
8972 } else if self.parse_keyword(Keyword::PACK_KEYS) {
8973 Ident::new("PACK_KEYS")
8974 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8975 Ident::new("STATS_AUTO_RECALC")
8976 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8977 Ident::new("STATS_PERSISTENT")
8978 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8979 Ident::new("STATS_SAMPLE_PAGES")
8980 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8981 Ident::new("DELAY_KEY_WRITE")
8982 } else if self.parse_keyword(Keyword::COMPRESSION) {
8983 Ident::new("COMPRESSION")
8984 } else if self.parse_keyword(Keyword::ENCRYPTION) {
8985 Ident::new("ENCRYPTION")
8986 } else if self.parse_keyword(Keyword::MAX_ROWS) {
8987 Ident::new("MAX_ROWS")
8988 } else if self.parse_keyword(Keyword::MIN_ROWS) {
8989 Ident::new("MIN_ROWS")
8990 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8991 Ident::new("AUTOEXTEND_SIZE")
8992 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8993 Ident::new("AVG_ROW_LENGTH")
8994 } else if self.parse_keyword(Keyword::CHECKSUM) {
8995 Ident::new("CHECKSUM")
8996 } else if self.parse_keyword(Keyword::CONNECTION) {
8997 Ident::new("CONNECTION")
8998 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8999 Ident::new("ENGINE_ATTRIBUTE")
9000 } else if self.parse_keyword(Keyword::PASSWORD) {
9001 Ident::new("PASSWORD")
9002 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9003 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9004 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9005 Ident::new("INSERT_METHOD")
9006 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9007 Ident::new("AUTO_INCREMENT")
9008 } else {
9009 return Ok(None);
9010 };
9011
9012 let _ = self.consume_token(&Token::Eq);
9013
9014 let value = match self
9015 .maybe_parse(|parser| parser.parse_value())?
9016 .map(Expr::Value)
9017 {
9018 Some(expr) => expr,
9019 None => Expr::Identifier(self.parse_identifier()?),
9020 };
9021
9022 Ok(Some(SqlOption::KeyValue { key, value }))
9023 }
9024
9025 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9027 let mut options = Vec::new();
9028
9029 while let Some(option) = self.parse_plain_option()? {
9030 options.push(option);
9031 let _ = self.consume_token(&Token::Comma);
9034 }
9035
9036 Ok(options)
9037 }
9038
9039 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9041 let comment = if self.parse_keyword(Keyword::COMMENT) {
9042 let has_eq = self.consume_token(&Token::Eq);
9043 let comment = self.parse_comment_value()?;
9044 Some(if has_eq {
9045 CommentDef::WithEq(comment)
9046 } else {
9047 CommentDef::WithoutEq(comment)
9048 })
9049 } else {
9050 None
9051 };
9052 Ok(comment)
9053 }
9054
9055 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9057 let next_token = self.next_token();
9058 let value = match next_token.token {
9059 Token::SingleQuotedString(str) => str,
9060 Token::DollarQuotedString(str) => str.value,
9061 _ => self.expected("string literal", next_token)?,
9062 };
9063 Ok(value)
9064 }
9065
9066 pub fn parse_optional_procedure_parameters(
9068 &mut self,
9069 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9070 let mut params = vec![];
9071 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9072 return Ok(Some(params));
9073 }
9074 loop {
9075 if let Token::Word(_) = &self.peek_token_ref().token {
9076 params.push(self.parse_procedure_param()?)
9077 }
9078 let comma = self.consume_token(&Token::Comma);
9079 if self.consume_token(&Token::RParen) {
9080 break;
9082 } else if !comma {
9083 return self.expected_ref(
9084 "',' or ')' after parameter definition",
9085 self.peek_token_ref(),
9086 );
9087 }
9088 }
9089 Ok(Some(params))
9090 }
9091
9092 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9094 let mut columns = vec![];
9095 let mut constraints = vec![];
9096 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9097 return Ok((columns, constraints));
9098 }
9099
9100 loop {
9101 if let Some(constraint) = self.parse_optional_table_constraint()? {
9102 constraints.push(constraint);
9103 } else if let Token::Word(_) = &self.peek_token_ref().token {
9104 columns.push(self.parse_column_def()?);
9105 } else {
9106 return self.expected_ref(
9107 "column name or constraint definition",
9108 self.peek_token_ref(),
9109 );
9110 }
9111
9112 let comma = self.consume_token(&Token::Comma);
9113 let rparen = self.peek_token_ref().token == Token::RParen;
9114
9115 if !comma && !rparen {
9116 return self
9117 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9118 };
9119
9120 if rparen
9121 && (!comma
9122 || self.dialect.supports_column_definition_trailing_commas()
9123 || self.options.trailing_commas)
9124 {
9125 let _ = self.consume_token(&Token::RParen);
9126 break;
9127 }
9128 }
9129
9130 Ok((columns, constraints))
9131 }
9132
9133 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9135 let mode = if self.parse_keyword(Keyword::IN) {
9136 Some(ArgMode::In)
9137 } else if self.parse_keyword(Keyword::OUT) {
9138 Some(ArgMode::Out)
9139 } else if self.parse_keyword(Keyword::INOUT) {
9140 Some(ArgMode::InOut)
9141 } else {
9142 None
9143 };
9144 let name = self.parse_identifier()?;
9145 let data_type = self.parse_data_type()?;
9146 let default = if self.consume_token(&Token::Eq) {
9147 Some(self.parse_expr()?)
9148 } else {
9149 None
9150 };
9151
9152 Ok(ProcedureParam {
9153 name,
9154 data_type,
9155 mode,
9156 default,
9157 })
9158 }
9159
9160 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9162 self.parse_column_def_inner(false)
9163 }
9164
9165 fn parse_column_def_inner(
9166 &mut self,
9167 optional_data_type: bool,
9168 ) -> Result<ColumnDef, ParserError> {
9169 let col_name = self.parse_identifier()?;
9170 let data_type = if self.is_column_type_sqlite_unspecified() {
9171 DataType::Unspecified
9172 } else if optional_data_type {
9173 self.maybe_parse(|parser| parser.parse_data_type())?
9174 .unwrap_or(DataType::Unspecified)
9175 } else {
9176 self.parse_data_type()?
9177 };
9178 let mut options = vec![];
9179 loop {
9180 if self.parse_keyword(Keyword::CONSTRAINT) {
9181 let name = Some(self.parse_identifier()?);
9182 if let Some(option) = self.parse_optional_column_option()? {
9183 options.push(ColumnOptionDef { name, option });
9184 } else {
9185 return self.expected_ref(
9186 "constraint details after CONSTRAINT <name>",
9187 self.peek_token_ref(),
9188 );
9189 }
9190 } else if let Some(option) = self.parse_optional_column_option()? {
9191 options.push(ColumnOptionDef { name: None, option });
9192 } else {
9193 break;
9194 };
9195 }
9196 Ok(ColumnDef {
9197 name: col_name,
9198 data_type,
9199 options,
9200 })
9201 }
9202
9203 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9204 if dialect_of!(self is SQLiteDialect) {
9205 match &self.peek_token_ref().token {
9206 Token::Word(word) => matches!(
9207 word.keyword,
9208 Keyword::CONSTRAINT
9209 | Keyword::PRIMARY
9210 | Keyword::NOT
9211 | Keyword::UNIQUE
9212 | Keyword::CHECK
9213 | Keyword::DEFAULT
9214 | Keyword::COLLATE
9215 | Keyword::REFERENCES
9216 | Keyword::GENERATED
9217 | Keyword::AS
9218 ),
9219 _ => true, }
9221 } else {
9222 false
9223 }
9224 }
9225
9226 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9228 if let Some(option) = self.dialect.parse_column_option(self)? {
9229 return option;
9230 }
9231
9232 self.with_state(
9233 ColumnDefinition,
9234 |parser| -> Result<Option<ColumnOption>, ParserError> {
9235 parser.parse_optional_column_option_inner()
9236 },
9237 )
9238 }
9239
9240 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9241 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9242 Ok(Some(ColumnOption::CharacterSet(
9243 self.parse_object_name(false)?,
9244 )))
9245 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9246 Ok(Some(ColumnOption::Collation(
9247 self.parse_object_name(false)?,
9248 )))
9249 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9250 Ok(Some(ColumnOption::NotNull))
9251 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9252 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9253 } else if self.parse_keyword(Keyword::NULL) {
9254 Ok(Some(ColumnOption::Null))
9255 } else if self.parse_keyword(Keyword::DEFAULT) {
9256 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9257 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9258 && self.parse_keyword(Keyword::MATERIALIZED)
9259 {
9260 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9261 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9262 && self.parse_keyword(Keyword::ALIAS)
9263 {
9264 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9265 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9266 && self.parse_keyword(Keyword::EPHEMERAL)
9267 {
9268 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9271 Ok(Some(ColumnOption::Ephemeral(None)))
9272 } else {
9273 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9274 }
9275 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9276 let characteristics = self.parse_constraint_characteristics()?;
9277 Ok(Some(
9278 PrimaryKeyConstraint {
9279 name: None,
9280 index_name: None,
9281 index_type: None,
9282 columns: vec![],
9283 index_options: vec![],
9284 characteristics,
9285 }
9286 .into(),
9287 ))
9288 } else if self.parse_keyword(Keyword::UNIQUE) {
9289 let index_type_display =
9290 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9291 KeyOrIndexDisplay::Key
9292 } else {
9293 KeyOrIndexDisplay::None
9294 };
9295 let characteristics = self.parse_constraint_characteristics()?;
9296 Ok(Some(
9297 UniqueConstraint {
9298 name: None,
9299 index_name: None,
9300 index_type_display,
9301 index_type: None,
9302 columns: vec![],
9303 index_options: vec![],
9304 characteristics,
9305 nulls_distinct: NullsDistinctOption::None,
9306 }
9307 .into(),
9308 ))
9309 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9310 let characteristics = self.parse_constraint_characteristics()?;
9313 Ok(Some(
9314 PrimaryKeyConstraint {
9315 name: None,
9316 index_name: None,
9317 index_type: None,
9318 columns: vec![],
9319 index_options: vec![],
9320 characteristics,
9321 }
9322 .into(),
9323 ))
9324 } else if self.parse_keyword(Keyword::REFERENCES) {
9325 let foreign_table = self.parse_object_name(false)?;
9326 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9329 let mut match_kind = None;
9330 let mut on_delete = None;
9331 let mut on_update = None;
9332 loop {
9333 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9334 match_kind = Some(self.parse_match_kind()?);
9335 } else if on_delete.is_none()
9336 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9337 {
9338 on_delete = Some(self.parse_referential_action()?);
9339 } else if on_update.is_none()
9340 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9341 {
9342 on_update = Some(self.parse_referential_action()?);
9343 } else {
9344 break;
9345 }
9346 }
9347 let characteristics = self.parse_constraint_characteristics()?;
9348
9349 Ok(Some(
9350 ForeignKeyConstraint {
9351 name: None, index_name: None, columns: vec![], foreign_table,
9355 referred_columns,
9356 on_delete,
9357 on_update,
9358 match_kind,
9359 characteristics,
9360 }
9361 .into(),
9362 ))
9363 } else if self.parse_keyword(Keyword::CHECK) {
9364 self.expect_token(&Token::LParen)?;
9365 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9367 self.expect_token(&Token::RParen)?;
9368
9369 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9370 Some(true)
9371 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9372 Some(false)
9373 } else {
9374 None
9375 };
9376
9377 Ok(Some(
9378 CheckConstraint {
9379 name: None, expr: Box::new(expr),
9381 enforced,
9382 }
9383 .into(),
9384 ))
9385 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9386 && dialect_of!(self is MySqlDialect | GenericDialect)
9387 {
9388 Ok(Some(ColumnOption::DialectSpecific(vec![
9390 Token::make_keyword("AUTO_INCREMENT"),
9391 ])))
9392 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9393 && dialect_of!(self is SQLiteDialect | GenericDialect)
9394 {
9395 Ok(Some(ColumnOption::DialectSpecific(vec![
9397 Token::make_keyword("AUTOINCREMENT"),
9398 ])))
9399 } else if self.parse_keyword(Keyword::ASC)
9400 && self.dialect.supports_asc_desc_in_column_definition()
9401 {
9402 Ok(Some(ColumnOption::DialectSpecific(vec![
9404 Token::make_keyword("ASC"),
9405 ])))
9406 } else if self.parse_keyword(Keyword::DESC)
9407 && self.dialect.supports_asc_desc_in_column_definition()
9408 {
9409 Ok(Some(ColumnOption::DialectSpecific(vec![
9411 Token::make_keyword("DESC"),
9412 ])))
9413 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9414 && dialect_of!(self is MySqlDialect | GenericDialect)
9415 {
9416 let expr = self.parse_expr()?;
9417 Ok(Some(ColumnOption::OnUpdate(expr)))
9418 } else if self.parse_keyword(Keyword::GENERATED) {
9419 self.parse_optional_column_option_generated()
9420 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9421 && self.parse_keyword(Keyword::OPTIONS)
9422 {
9423 self.prev_token();
9424 Ok(Some(ColumnOption::Options(
9425 self.parse_options(Keyword::OPTIONS)?,
9426 )))
9427 } else if self.parse_keyword(Keyword::AS)
9428 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9429 {
9430 self.parse_optional_column_option_as()
9431 } else if self.parse_keyword(Keyword::SRID)
9432 && dialect_of!(self is MySqlDialect | GenericDialect)
9433 {
9434 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9435 } else if self.parse_keyword(Keyword::IDENTITY)
9436 && dialect_of!(self is MsSqlDialect | GenericDialect)
9437 {
9438 let parameters = if self.consume_token(&Token::LParen) {
9439 let seed = self.parse_number()?;
9440 self.expect_token(&Token::Comma)?;
9441 let increment = self.parse_number()?;
9442 self.expect_token(&Token::RParen)?;
9443
9444 Some(IdentityPropertyFormatKind::FunctionCall(
9445 IdentityParameters { seed, increment },
9446 ))
9447 } else {
9448 None
9449 };
9450 Ok(Some(ColumnOption::Identity(
9451 IdentityPropertyKind::Identity(IdentityProperty {
9452 parameters,
9453 order: None,
9454 }),
9455 )))
9456 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9457 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9458 {
9459 Ok(Some(ColumnOption::OnConflict(
9461 self.expect_one_of_keywords(&[
9462 Keyword::ROLLBACK,
9463 Keyword::ABORT,
9464 Keyword::FAIL,
9465 Keyword::IGNORE,
9466 Keyword::REPLACE,
9467 ])?,
9468 )))
9469 } else if self.parse_keyword(Keyword::INVISIBLE) {
9470 Ok(Some(ColumnOption::Invisible))
9471 } else {
9472 Ok(None)
9473 }
9474 }
9475
9476 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9477 let name = self.parse_object_name(false)?;
9478 self.expect_token(&Token::Eq)?;
9479 let value = self.parse_literal_string()?;
9480
9481 Ok(Tag::new(name, value))
9482 }
9483
9484 fn parse_optional_column_option_generated(
9485 &mut self,
9486 ) -> Result<Option<ColumnOption>, ParserError> {
9487 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9488 let mut sequence_options = vec![];
9489 if self.expect_token(&Token::LParen).is_ok() {
9490 sequence_options = self.parse_create_sequence_options()?;
9491 self.expect_token(&Token::RParen)?;
9492 }
9493 Ok(Some(ColumnOption::Generated {
9494 generated_as: GeneratedAs::Always,
9495 sequence_options: Some(sequence_options),
9496 generation_expr: None,
9497 generation_expr_mode: None,
9498 generated_keyword: true,
9499 }))
9500 } else if self.parse_keywords(&[
9501 Keyword::BY,
9502 Keyword::DEFAULT,
9503 Keyword::AS,
9504 Keyword::IDENTITY,
9505 ]) {
9506 let mut sequence_options = vec![];
9507 if self.expect_token(&Token::LParen).is_ok() {
9508 sequence_options = self.parse_create_sequence_options()?;
9509 self.expect_token(&Token::RParen)?;
9510 }
9511 Ok(Some(ColumnOption::Generated {
9512 generated_as: GeneratedAs::ByDefault,
9513 sequence_options: Some(sequence_options),
9514 generation_expr: None,
9515 generation_expr_mode: None,
9516 generated_keyword: true,
9517 }))
9518 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9519 if self.expect_token(&Token::LParen).is_ok() {
9520 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9521 self.expect_token(&Token::RParen)?;
9522 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9523 Ok((
9524 GeneratedAs::ExpStored,
9525 Some(GeneratedExpressionMode::Stored),
9526 ))
9527 } else if dialect_of!(self is PostgreSqlDialect) {
9528 self.expected_ref("STORED", self.peek_token_ref())
9530 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9531 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9532 } else {
9533 Ok((GeneratedAs::Always, None))
9534 }?;
9535
9536 Ok(Some(ColumnOption::Generated {
9537 generated_as: gen_as,
9538 sequence_options: None,
9539 generation_expr: Some(expr),
9540 generation_expr_mode: expr_mode,
9541 generated_keyword: true,
9542 }))
9543 } else {
9544 Ok(None)
9545 }
9546 } else {
9547 Ok(None)
9548 }
9549 }
9550
9551 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9552 self.expect_token(&Token::LParen)?;
9554 let expr = self.parse_expr()?;
9555 self.expect_token(&Token::RParen)?;
9556
9557 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9558 (
9559 GeneratedAs::ExpStored,
9560 Some(GeneratedExpressionMode::Stored),
9561 )
9562 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9563 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9564 } else {
9565 (GeneratedAs::Always, None)
9566 };
9567
9568 Ok(Some(ColumnOption::Generated {
9569 generated_as: gen_as,
9570 sequence_options: None,
9571 generation_expr: Some(expr),
9572 generation_expr_mode: expr_mode,
9573 generated_keyword: false,
9574 }))
9575 }
9576
9577 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9579 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9580 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9581 {
9582 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9583
9584 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9585 self.expect_token(&Token::LParen)?;
9586 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9587 self.expect_token(&Token::RParen)?;
9588 Some(sorted_by_columns)
9589 } else {
9590 None
9591 };
9592
9593 self.expect_keyword_is(Keyword::INTO)?;
9594 let num_buckets = self.parse_number_value()?.value;
9595 self.expect_keyword_is(Keyword::BUCKETS)?;
9596 Some(ClusteredBy {
9597 columns,
9598 sorted_by,
9599 num_buckets,
9600 })
9601 } else {
9602 None
9603 };
9604 Ok(clustered_by)
9605 }
9606
9607 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9611 if self.parse_keyword(Keyword::RESTRICT) {
9612 Ok(ReferentialAction::Restrict)
9613 } else if self.parse_keyword(Keyword::CASCADE) {
9614 Ok(ReferentialAction::Cascade)
9615 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9616 Ok(ReferentialAction::SetNull)
9617 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9618 Ok(ReferentialAction::NoAction)
9619 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9620 Ok(ReferentialAction::SetDefault)
9621 } else {
9622 self.expected_ref(
9623 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9624 self.peek_token_ref(),
9625 )
9626 }
9627 }
9628
9629 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9631 if self.parse_keyword(Keyword::FULL) {
9632 Ok(ConstraintReferenceMatchKind::Full)
9633 } else if self.parse_keyword(Keyword::PARTIAL) {
9634 Ok(ConstraintReferenceMatchKind::Partial)
9635 } else if self.parse_keyword(Keyword::SIMPLE) {
9636 Ok(ConstraintReferenceMatchKind::Simple)
9637 } else {
9638 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9639 }
9640 }
9641
9642 fn parse_constraint_using_index(
9645 &mut self,
9646 name: Option<Ident>,
9647 ) -> Result<ConstraintUsingIndex, ParserError> {
9648 let index_name = self.parse_identifier()?;
9649 let characteristics = self.parse_constraint_characteristics()?;
9650 Ok(ConstraintUsingIndex {
9651 name,
9652 index_name,
9653 characteristics,
9654 })
9655 }
9656
9657 pub fn parse_constraint_characteristics(
9659 &mut self,
9660 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9661 let mut cc = ConstraintCharacteristics::default();
9662
9663 loop {
9664 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9665 {
9666 cc.deferrable = Some(false);
9667 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9668 cc.deferrable = Some(true);
9669 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9670 if self.parse_keyword(Keyword::DEFERRED) {
9671 cc.initially = Some(DeferrableInitial::Deferred);
9672 } else if self.parse_keyword(Keyword::IMMEDIATE) {
9673 cc.initially = Some(DeferrableInitial::Immediate);
9674 } else {
9675 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
9676 }
9677 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9678 cc.enforced = Some(true);
9679 } else if cc.enforced.is_none()
9680 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9681 {
9682 cc.enforced = Some(false);
9683 } else {
9684 break;
9685 }
9686 }
9687
9688 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9689 Ok(Some(cc))
9690 } else {
9691 Ok(None)
9692 }
9693 }
9694
9695 pub fn parse_optional_table_constraint(
9697 &mut self,
9698 ) -> Result<Option<TableConstraint>, ParserError> {
9699 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9700 if self.dialect.supports_constraint_keyword_without_name()
9701 && self
9702 .peek_one_of_keywords(&[
9703 Keyword::CHECK,
9704 Keyword::PRIMARY,
9705 Keyword::UNIQUE,
9706 Keyword::FOREIGN,
9707 ])
9708 .is_some()
9709 {
9710 None
9711 } else {
9712 Some(self.parse_identifier()?)
9713 }
9714 } else {
9715 None
9716 };
9717
9718 let next_token = self.next_token();
9719 match next_token.token {
9720 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
9721 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9724 return Ok(Some(TableConstraint::UniqueUsingIndex(
9725 self.parse_constraint_using_index(name)?,
9726 )));
9727 }
9728
9729 let index_type_display = self.parse_index_type_display();
9730 if !dialect_of!(self is GenericDialect | MySqlDialect)
9731 && !index_type_display.is_none()
9732 {
9733 return self.expected_ref(
9734 "`index_name` or `(column_name [, ...])`",
9735 self.peek_token_ref(),
9736 );
9737 }
9738
9739 let nulls_distinct = self.parse_optional_nulls_distinct()?;
9740
9741 let index_name = self.parse_optional_ident()?;
9743 let index_type = self.parse_optional_using_then_index_type()?;
9744
9745 let columns = self.parse_parenthesized_index_column_list()?;
9746 let index_options = self.parse_index_options()?;
9747 let characteristics = self.parse_constraint_characteristics()?;
9748 Ok(Some(
9749 UniqueConstraint {
9750 name,
9751 index_name,
9752 index_type_display,
9753 index_type,
9754 columns,
9755 index_options,
9756 characteristics,
9757 nulls_distinct,
9758 }
9759 .into(),
9760 ))
9761 }
9762 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9763 self.expect_keyword_is(Keyword::KEY)?;
9765
9766 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9769 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
9770 self.parse_constraint_using_index(name)?,
9771 )));
9772 }
9773
9774 let index_name = self.parse_optional_ident()?;
9776 let index_type = self.parse_optional_using_then_index_type()?;
9777
9778 let columns = self.parse_parenthesized_index_column_list()?;
9779 let index_options = self.parse_index_options()?;
9780 let characteristics = self.parse_constraint_characteristics()?;
9781 Ok(Some(
9782 PrimaryKeyConstraint {
9783 name,
9784 index_name,
9785 index_type,
9786 columns,
9787 index_options,
9788 characteristics,
9789 }
9790 .into(),
9791 ))
9792 }
9793 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9794 self.expect_keyword_is(Keyword::KEY)?;
9795 let index_name = self.parse_optional_ident()?;
9796 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9797 self.expect_keyword_is(Keyword::REFERENCES)?;
9798 let foreign_table = self.parse_object_name(false)?;
9799 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9800 let mut match_kind = None;
9801 let mut on_delete = None;
9802 let mut on_update = None;
9803 loop {
9804 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9805 match_kind = Some(self.parse_match_kind()?);
9806 } else if on_delete.is_none()
9807 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9808 {
9809 on_delete = Some(self.parse_referential_action()?);
9810 } else if on_update.is_none()
9811 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9812 {
9813 on_update = Some(self.parse_referential_action()?);
9814 } else {
9815 break;
9816 }
9817 }
9818
9819 let characteristics = self.parse_constraint_characteristics()?;
9820
9821 Ok(Some(
9822 ForeignKeyConstraint {
9823 name,
9824 index_name,
9825 columns,
9826 foreign_table,
9827 referred_columns,
9828 on_delete,
9829 on_update,
9830 match_kind,
9831 characteristics,
9832 }
9833 .into(),
9834 ))
9835 }
9836 Token::Word(w) if w.keyword == Keyword::CHECK => {
9837 self.expect_token(&Token::LParen)?;
9838 let expr = Box::new(self.parse_expr()?);
9839 self.expect_token(&Token::RParen)?;
9840
9841 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9842 Some(true)
9843 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9844 Some(false)
9845 } else {
9846 None
9847 };
9848
9849 Ok(Some(
9850 CheckConstraint {
9851 name,
9852 expr,
9853 enforced,
9854 }
9855 .into(),
9856 ))
9857 }
9858 Token::Word(w)
9859 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9860 && dialect_of!(self is GenericDialect | MySqlDialect)
9861 && name.is_none() =>
9862 {
9863 let display_as_key = w.keyword == Keyword::KEY;
9864
9865 let name = match &self.peek_token_ref().token {
9866 Token::Word(word) if word.keyword == Keyword::USING => None,
9867 _ => self.parse_optional_ident()?,
9868 };
9869
9870 let index_type = self.parse_optional_using_then_index_type()?;
9871 let columns = self.parse_parenthesized_index_column_list()?;
9872 let index_options = self.parse_index_options()?;
9873
9874 Ok(Some(
9875 IndexConstraint {
9876 display_as_key,
9877 name,
9878 index_type,
9879 columns,
9880 index_options,
9881 }
9882 .into(),
9883 ))
9884 }
9885 Token::Word(w)
9886 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9887 && dialect_of!(self is GenericDialect | MySqlDialect) =>
9888 {
9889 if let Some(name) = name {
9890 return self.expected(
9891 "FULLTEXT or SPATIAL option without constraint name",
9892 TokenWithSpan {
9893 token: Token::make_keyword(&name.to_string()),
9894 span: next_token.span,
9895 },
9896 );
9897 }
9898
9899 let fulltext = w.keyword == Keyword::FULLTEXT;
9900
9901 let index_type_display = self.parse_index_type_display();
9902
9903 let opt_index_name = self.parse_optional_ident()?;
9904
9905 let columns = self.parse_parenthesized_index_column_list()?;
9906
9907 Ok(Some(
9908 FullTextOrSpatialConstraint {
9909 fulltext,
9910 index_type_display,
9911 opt_index_name,
9912 columns,
9913 }
9914 .into(),
9915 ))
9916 }
9917 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
9918 let index_method = if self.parse_keyword(Keyword::USING) {
9919 Some(self.parse_identifier()?)
9920 } else {
9921 None
9922 };
9923
9924 self.expect_token(&Token::LParen)?;
9925 let elements =
9926 self.parse_comma_separated(|p| p.parse_exclusion_element())?;
9927 self.expect_token(&Token::RParen)?;
9928
9929 let include = if self.parse_keyword(Keyword::INCLUDE) {
9930 self.expect_token(&Token::LParen)?;
9931 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
9932 self.expect_token(&Token::RParen)?;
9933 cols
9934 } else {
9935 vec![]
9936 };
9937
9938 let where_clause = if self.parse_keyword(Keyword::WHERE) {
9939 self.expect_token(&Token::LParen)?;
9940 let predicate = self.parse_expr()?;
9941 self.expect_token(&Token::RParen)?;
9942 Some(Box::new(predicate))
9943 } else {
9944 None
9945 };
9946
9947 let characteristics = self.parse_constraint_characteristics()?;
9948
9949 Ok(Some(
9950 ExclusionConstraint {
9951 name,
9952 index_method,
9953 elements,
9954 include,
9955 where_clause,
9956 characteristics,
9957 }
9958 .into(),
9959 ))
9960 }
9961 _ => {
9962 if name.is_some() {
9963 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9964 } else {
9965 self.prev_token();
9966 Ok(None)
9967 }
9968 }
9969 }
9970 }
9971
9972 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
9973 let expr = self.parse_expr()?;
9974 self.expect_keyword_is(Keyword::WITH)?;
9975 let operator_token = self.next_token();
9976 let operator = operator_token.token.to_string();
9977 Ok(ExclusionElement { expr, operator })
9978 }
9979
9980 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9981 Ok(if self.parse_keyword(Keyword::NULLS) {
9982 let not = self.parse_keyword(Keyword::NOT);
9983 self.expect_keyword_is(Keyword::DISTINCT)?;
9984 if not {
9985 NullsDistinctOption::NotDistinct
9986 } else {
9987 NullsDistinctOption::Distinct
9988 }
9989 } else {
9990 NullsDistinctOption::None
9991 })
9992 }
9993
9994 pub fn maybe_parse_options(
9996 &mut self,
9997 keyword: Keyword,
9998 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9999 if let Token::Word(word) = &self.peek_token_ref().token {
10000 if word.keyword == keyword {
10001 return Ok(Some(self.parse_options(keyword)?));
10002 }
10003 };
10004 Ok(None)
10005 }
10006
10007 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10009 if self.parse_keyword(keyword) {
10010 self.expect_token(&Token::LParen)?;
10011 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10012 self.expect_token(&Token::RParen)?;
10013 Ok(options)
10014 } else {
10015 Ok(vec![])
10016 }
10017 }
10018
10019 pub fn parse_options_with_keywords(
10021 &mut self,
10022 keywords: &[Keyword],
10023 ) -> Result<Vec<SqlOption>, ParserError> {
10024 if self.parse_keywords(keywords) {
10025 self.expect_token(&Token::LParen)?;
10026 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10027 self.expect_token(&Token::RParen)?;
10028 Ok(options)
10029 } else {
10030 Ok(vec![])
10031 }
10032 }
10033
10034 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10036 Ok(if self.parse_keyword(Keyword::BTREE) {
10037 IndexType::BTree
10038 } else if self.parse_keyword(Keyword::HASH) {
10039 IndexType::Hash
10040 } else if self.parse_keyword(Keyword::GIN) {
10041 IndexType::GIN
10042 } else if self.parse_keyword(Keyword::GIST) {
10043 IndexType::GiST
10044 } else if self.parse_keyword(Keyword::SPGIST) {
10045 IndexType::SPGiST
10046 } else if self.parse_keyword(Keyword::BRIN) {
10047 IndexType::BRIN
10048 } else if self.parse_keyword(Keyword::BLOOM) {
10049 IndexType::Bloom
10050 } else {
10051 IndexType::Custom(self.parse_identifier()?)
10052 })
10053 }
10054
10055 pub fn parse_optional_using_then_index_type(
10062 &mut self,
10063 ) -> Result<Option<IndexType>, ParserError> {
10064 if self.parse_keyword(Keyword::USING) {
10065 Ok(Some(self.parse_index_type()?))
10066 } else {
10067 Ok(None)
10068 }
10069 }
10070
10071 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10075 self.maybe_parse(|parser| parser.parse_identifier())
10076 }
10077
10078 #[must_use]
10079 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10081 if self.parse_keyword(Keyword::KEY) {
10082 KeyOrIndexDisplay::Key
10083 } else if self.parse_keyword(Keyword::INDEX) {
10084 KeyOrIndexDisplay::Index
10085 } else {
10086 KeyOrIndexDisplay::None
10087 }
10088 }
10089
10090 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10092 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10093 Ok(Some(IndexOption::Using(index_type)))
10094 } else if self.parse_keyword(Keyword::COMMENT) {
10095 let s = self.parse_literal_string()?;
10096 Ok(Some(IndexOption::Comment(s)))
10097 } else {
10098 Ok(None)
10099 }
10100 }
10101
10102 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10104 let mut options = Vec::new();
10105
10106 loop {
10107 match self.parse_optional_index_option()? {
10108 Some(index_option) => options.push(index_option),
10109 None => return Ok(options),
10110 }
10111 }
10112 }
10113
10114 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10116 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10117
10118 match &self.peek_token_ref().token {
10119 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10120 Ok(SqlOption::Ident(self.parse_identifier()?))
10121 }
10122 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10123 self.parse_option_partition()
10124 }
10125 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10126 self.parse_option_clustered()
10127 }
10128 _ => {
10129 let name = self.parse_identifier()?;
10130 self.expect_token(&Token::Eq)?;
10131 let value = self.parse_expr()?;
10132
10133 Ok(SqlOption::KeyValue { key: name, value })
10134 }
10135 }
10136 }
10137
10138 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10140 if self.parse_keywords(&[
10141 Keyword::CLUSTERED,
10142 Keyword::COLUMNSTORE,
10143 Keyword::INDEX,
10144 Keyword::ORDER,
10145 ]) {
10146 Ok(SqlOption::Clustered(
10147 TableOptionsClustered::ColumnstoreIndexOrder(
10148 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10149 ),
10150 ))
10151 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10152 Ok(SqlOption::Clustered(
10153 TableOptionsClustered::ColumnstoreIndex,
10154 ))
10155 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10156 self.expect_token(&Token::LParen)?;
10157
10158 let columns = self.parse_comma_separated(|p| {
10159 let name = p.parse_identifier()?;
10160 let asc = p.parse_asc_desc();
10161
10162 Ok(ClusteredIndex { name, asc })
10163 })?;
10164
10165 self.expect_token(&Token::RParen)?;
10166
10167 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10168 } else {
10169 Err(ParserError::ParserError(
10170 "invalid CLUSTERED sequence".to_string(),
10171 ))
10172 }
10173 }
10174
10175 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10177 self.expect_keyword_is(Keyword::PARTITION)?;
10178 self.expect_token(&Token::LParen)?;
10179 let column_name = self.parse_identifier()?;
10180
10181 self.expect_keyword_is(Keyword::RANGE)?;
10182 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10183 Some(PartitionRangeDirection::Left)
10184 } else if self.parse_keyword(Keyword::RIGHT) {
10185 Some(PartitionRangeDirection::Right)
10186 } else {
10187 None
10188 };
10189
10190 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10191 self.expect_token(&Token::LParen)?;
10192
10193 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10194
10195 self.expect_token(&Token::RParen)?;
10196 self.expect_token(&Token::RParen)?;
10197
10198 Ok(SqlOption::Partition {
10199 column_name,
10200 range_direction,
10201 for_values,
10202 })
10203 }
10204
10205 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10207 self.expect_token(&Token::LParen)?;
10208 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10209 self.expect_token(&Token::RParen)?;
10210 Ok(Partition::Partitions(partitions))
10211 }
10212
10213 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10215 self.expect_token(&Token::LParen)?;
10216 self.expect_keyword_is(Keyword::SELECT)?;
10217 let projection = self.parse_projection()?;
10218 let group_by = self.parse_optional_group_by()?;
10219 let order_by = self.parse_optional_order_by()?;
10220 self.expect_token(&Token::RParen)?;
10221 Ok(ProjectionSelect {
10222 projection,
10223 group_by,
10224 order_by,
10225 })
10226 }
10227 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10229 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10230 let name = self.parse_identifier()?;
10231 let query = self.parse_projection_select()?;
10232 Ok(AlterTableOperation::AddProjection {
10233 if_not_exists,
10234 name,
10235 select: query,
10236 })
10237 }
10238
10239 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10243 self.expect_keyword_is(Keyword::ALTER)?;
10244 self.expect_keyword_is(Keyword::SORTKEY)?;
10245 self.expect_token(&Token::LParen)?;
10246 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10247 self.expect_token(&Token::RParen)?;
10248 Ok(AlterTableOperation::AlterSortKey { columns })
10249 }
10250
10251 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10253 let operation = if self.parse_keyword(Keyword::ADD) {
10254 if let Some(constraint) = self.parse_optional_table_constraint()? {
10255 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10256 AlterTableOperation::AddConstraint {
10257 constraint,
10258 not_valid,
10259 }
10260 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10261 && self.parse_keyword(Keyword::PROJECTION)
10262 {
10263 return self.parse_alter_table_add_projection();
10264 } else {
10265 let if_not_exists =
10266 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10267 let mut new_partitions = vec![];
10268 loop {
10269 if self.parse_keyword(Keyword::PARTITION) {
10270 new_partitions.push(self.parse_partition()?);
10271 } else {
10272 break;
10273 }
10274 }
10275 if !new_partitions.is_empty() {
10276 AlterTableOperation::AddPartitions {
10277 if_not_exists,
10278 new_partitions,
10279 }
10280 } else {
10281 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10282
10283 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10284 {
10285 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10286 || if_not_exists
10287 } else {
10288 false
10289 };
10290
10291 let column_def = self.parse_column_def()?;
10292
10293 let column_position = self.parse_column_position()?;
10294
10295 AlterTableOperation::AddColumn {
10296 column_keyword,
10297 if_not_exists,
10298 column_def,
10299 column_position,
10300 }
10301 }
10302 }
10303 } else if self.parse_keyword(Keyword::RENAME) {
10304 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10305 let old_name = self.parse_identifier()?;
10306 self.expect_keyword_is(Keyword::TO)?;
10307 let new_name = self.parse_identifier()?;
10308 AlterTableOperation::RenameConstraint { old_name, new_name }
10309 } else if self.parse_keyword(Keyword::TO) {
10310 let table_name = self.parse_object_name(false)?;
10311 AlterTableOperation::RenameTable {
10312 table_name: RenameTableNameKind::To(table_name),
10313 }
10314 } else if self.parse_keyword(Keyword::AS) {
10315 let table_name = self.parse_object_name(false)?;
10316 AlterTableOperation::RenameTable {
10317 table_name: RenameTableNameKind::As(table_name),
10318 }
10319 } else {
10320 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10322 self.expect_keyword_is(Keyword::TO)?;
10323 let new_column_name = self.parse_identifier()?;
10324 AlterTableOperation::RenameColumn {
10325 old_column_name,
10326 new_column_name,
10327 }
10328 }
10329 } else if self.parse_keyword(Keyword::DISABLE) {
10330 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10331 AlterTableOperation::DisableRowLevelSecurity {}
10332 } else if self.parse_keyword(Keyword::RULE) {
10333 let name = self.parse_identifier()?;
10334 AlterTableOperation::DisableRule { name }
10335 } else if self.parse_keyword(Keyword::TRIGGER) {
10336 let name = self.parse_identifier()?;
10337 AlterTableOperation::DisableTrigger { name }
10338 } else {
10339 return self.expected_ref(
10340 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10341 self.peek_token_ref(),
10342 );
10343 }
10344 } else if self.parse_keyword(Keyword::ENABLE) {
10345 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10346 let name = self.parse_identifier()?;
10347 AlterTableOperation::EnableAlwaysRule { name }
10348 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10349 let name = self.parse_identifier()?;
10350 AlterTableOperation::EnableAlwaysTrigger { name }
10351 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10352 AlterTableOperation::EnableRowLevelSecurity {}
10353 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10354 let name = self.parse_identifier()?;
10355 AlterTableOperation::EnableReplicaRule { name }
10356 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10357 let name = self.parse_identifier()?;
10358 AlterTableOperation::EnableReplicaTrigger { name }
10359 } else if self.parse_keyword(Keyword::RULE) {
10360 let name = self.parse_identifier()?;
10361 AlterTableOperation::EnableRule { name }
10362 } else if self.parse_keyword(Keyword::TRIGGER) {
10363 let name = self.parse_identifier()?;
10364 AlterTableOperation::EnableTrigger { name }
10365 } else {
10366 return self.expected_ref(
10367 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10368 self.peek_token_ref(),
10369 );
10370 }
10371 } else if self.parse_keywords(&[
10372 Keyword::FORCE,
10373 Keyword::ROW,
10374 Keyword::LEVEL,
10375 Keyword::SECURITY,
10376 ]) {
10377 AlterTableOperation::ForceRowLevelSecurity
10378 } else if self.parse_keywords(&[
10379 Keyword::NO,
10380 Keyword::FORCE,
10381 Keyword::ROW,
10382 Keyword::LEVEL,
10383 Keyword::SECURITY,
10384 ]) {
10385 AlterTableOperation::NoForceRowLevelSecurity
10386 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10387 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10388 {
10389 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10390 let name = self.parse_identifier()?;
10391 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10392 Some(self.parse_identifier()?)
10393 } else {
10394 None
10395 };
10396 AlterTableOperation::ClearProjection {
10397 if_exists,
10398 name,
10399 partition,
10400 }
10401 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10402 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10403 {
10404 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10405 let name = self.parse_identifier()?;
10406 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10407 Some(self.parse_identifier()?)
10408 } else {
10409 None
10410 };
10411 AlterTableOperation::MaterializeProjection {
10412 if_exists,
10413 name,
10414 partition,
10415 }
10416 } else if self.parse_keyword(Keyword::DROP) {
10417 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10418 self.expect_token(&Token::LParen)?;
10419 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10420 self.expect_token(&Token::RParen)?;
10421 AlterTableOperation::DropPartitions {
10422 partitions,
10423 if_exists: true,
10424 }
10425 } else if self.parse_keyword(Keyword::PARTITION) {
10426 self.expect_token(&Token::LParen)?;
10427 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10428 self.expect_token(&Token::RParen)?;
10429 AlterTableOperation::DropPartitions {
10430 partitions,
10431 if_exists: false,
10432 }
10433 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10434 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10435 let name = self.parse_identifier()?;
10436 let drop_behavior = self.parse_optional_drop_behavior();
10437 AlterTableOperation::DropConstraint {
10438 if_exists,
10439 name,
10440 drop_behavior,
10441 }
10442 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10443 let drop_behavior = self.parse_optional_drop_behavior();
10444 AlterTableOperation::DropPrimaryKey { drop_behavior }
10445 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10446 let name = self.parse_identifier()?;
10447 let drop_behavior = self.parse_optional_drop_behavior();
10448 AlterTableOperation::DropForeignKey {
10449 name,
10450 drop_behavior,
10451 }
10452 } else if self.parse_keyword(Keyword::INDEX) {
10453 let name = self.parse_identifier()?;
10454 AlterTableOperation::DropIndex { name }
10455 } else if self.parse_keyword(Keyword::PROJECTION)
10456 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10457 {
10458 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10459 let name = self.parse_identifier()?;
10460 AlterTableOperation::DropProjection { if_exists, name }
10461 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10462 AlterTableOperation::DropClusteringKey
10463 } else {
10464 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10466 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10467 self.parse_comma_separated(Parser::parse_identifier)?
10468 } else {
10469 vec![self.parse_identifier()?]
10470 };
10471 let drop_behavior = self.parse_optional_drop_behavior();
10472 AlterTableOperation::DropColumn {
10473 has_column_keyword,
10474 column_names,
10475 if_exists,
10476 drop_behavior,
10477 }
10478 }
10479 } else if self.parse_keyword(Keyword::PARTITION) {
10480 self.expect_token(&Token::LParen)?;
10481 let before = self.parse_comma_separated(Parser::parse_expr)?;
10482 self.expect_token(&Token::RParen)?;
10483 self.expect_keyword_is(Keyword::RENAME)?;
10484 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10485 self.expect_token(&Token::LParen)?;
10486 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10487 self.expect_token(&Token::RParen)?;
10488 AlterTableOperation::RenamePartitions {
10489 old_partitions: before,
10490 new_partitions: renames,
10491 }
10492 } else if self.parse_keyword(Keyword::CHANGE) {
10493 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10495 let new_name = self.parse_identifier()?;
10496 let data_type = self.parse_data_type()?;
10497 let mut options = vec![];
10498 while let Some(option) = self.parse_optional_column_option()? {
10499 options.push(option);
10500 }
10501
10502 let column_position = self.parse_column_position()?;
10503
10504 AlterTableOperation::ChangeColumn {
10505 old_name,
10506 new_name,
10507 data_type,
10508 options,
10509 column_position,
10510 }
10511 } else if self.parse_keyword(Keyword::MODIFY) {
10512 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10514 let data_type = self.parse_data_type()?;
10515 let mut options = vec![];
10516 while let Some(option) = self.parse_optional_column_option()? {
10517 options.push(option);
10518 }
10519
10520 let column_position = self.parse_column_position()?;
10521
10522 AlterTableOperation::ModifyColumn {
10523 col_name,
10524 data_type,
10525 options,
10526 column_position,
10527 }
10528 } else if self.parse_keyword(Keyword::ALTER) {
10529 if self.peek_keyword(Keyword::SORTKEY) {
10530 self.prev_token();
10531 return self.parse_alter_sort_key();
10532 }
10533
10534 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10536 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10537
10538 let op: AlterColumnOperation = if self.parse_keywords(&[
10539 Keyword::SET,
10540 Keyword::NOT,
10541 Keyword::NULL,
10542 ]) {
10543 AlterColumnOperation::SetNotNull {}
10544 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10545 AlterColumnOperation::DropNotNull {}
10546 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10547 AlterColumnOperation::SetDefault {
10548 value: self.parse_expr()?,
10549 }
10550 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10551 AlterColumnOperation::DropDefault {}
10552 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10553 self.parse_set_data_type(true)?
10554 } else if self.parse_keyword(Keyword::TYPE) {
10555 self.parse_set_data_type(false)?
10556 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10557 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10558 Some(GeneratedAs::Always)
10559 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10560 Some(GeneratedAs::ByDefault)
10561 } else {
10562 None
10563 };
10564
10565 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10566
10567 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10568
10569 if self.peek_token_ref().token == Token::LParen {
10570 self.expect_token(&Token::LParen)?;
10571 sequence_options = Some(self.parse_create_sequence_options()?);
10572 self.expect_token(&Token::RParen)?;
10573 }
10574
10575 AlterColumnOperation::AddGenerated {
10576 generated_as,
10577 sequence_options,
10578 }
10579 } else {
10580 let message = if is_postgresql {
10581 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10582 } else {
10583 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10584 };
10585
10586 return self.expected_ref(message, self.peek_token_ref());
10587 };
10588 AlterTableOperation::AlterColumn { column_name, op }
10589 } else if self.parse_keyword(Keyword::SWAP) {
10590 self.expect_keyword_is(Keyword::WITH)?;
10591 let table_name = self.parse_object_name(false)?;
10592 AlterTableOperation::SwapWith { table_name }
10593 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10594 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10595 {
10596 let new_owner = self.parse_owner()?;
10597 AlterTableOperation::OwnerTo { new_owner }
10598 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10599 && self.parse_keyword(Keyword::ATTACH)
10600 {
10601 AlterTableOperation::AttachPartition {
10602 partition: self.parse_part_or_partition()?,
10603 }
10604 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10605 && self.parse_keyword(Keyword::DETACH)
10606 {
10607 AlterTableOperation::DetachPartition {
10608 partition: self.parse_part_or_partition()?,
10609 }
10610 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10611 && self.parse_keyword(Keyword::FREEZE)
10612 {
10613 let partition = self.parse_part_or_partition()?;
10614 let with_name = if self.parse_keyword(Keyword::WITH) {
10615 self.expect_keyword_is(Keyword::NAME)?;
10616 Some(self.parse_identifier()?)
10617 } else {
10618 None
10619 };
10620 AlterTableOperation::FreezePartition {
10621 partition,
10622 with_name,
10623 }
10624 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10625 && self.parse_keyword(Keyword::UNFREEZE)
10626 {
10627 let partition = self.parse_part_or_partition()?;
10628 let with_name = if self.parse_keyword(Keyword::WITH) {
10629 self.expect_keyword_is(Keyword::NAME)?;
10630 Some(self.parse_identifier()?)
10631 } else {
10632 None
10633 };
10634 AlterTableOperation::UnfreezePartition {
10635 partition,
10636 with_name,
10637 }
10638 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10639 self.expect_token(&Token::LParen)?;
10640 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10641 self.expect_token(&Token::RParen)?;
10642 AlterTableOperation::ClusterBy { exprs }
10643 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10644 AlterTableOperation::SuspendRecluster
10645 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10646 AlterTableOperation::ResumeRecluster
10647 } else if self.parse_keyword(Keyword::LOCK) {
10648 let equals = self.consume_token(&Token::Eq);
10649 let lock = match self.parse_one_of_keywords(&[
10650 Keyword::DEFAULT,
10651 Keyword::EXCLUSIVE,
10652 Keyword::NONE,
10653 Keyword::SHARED,
10654 ]) {
10655 Some(Keyword::DEFAULT) => AlterTableLock::Default,
10656 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10657 Some(Keyword::NONE) => AlterTableLock::None,
10658 Some(Keyword::SHARED) => AlterTableLock::Shared,
10659 _ => self.expected_ref(
10660 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
10661 self.peek_token_ref(),
10662 )?,
10663 };
10664 AlterTableOperation::Lock { equals, lock }
10665 } else if self.parse_keyword(Keyword::ALGORITHM) {
10666 let equals = self.consume_token(&Token::Eq);
10667 let algorithm = match self.parse_one_of_keywords(&[
10668 Keyword::DEFAULT,
10669 Keyword::INSTANT,
10670 Keyword::INPLACE,
10671 Keyword::COPY,
10672 ]) {
10673 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
10674 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
10675 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
10676 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
10677 _ => self.expected_ref(
10678 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
10679 self.peek_token_ref(),
10680 )?,
10681 };
10682 AlterTableOperation::Algorithm { equals, algorithm }
10683 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
10684 let equals = self.consume_token(&Token::Eq);
10685 let value = self.parse_number_value()?;
10686 AlterTableOperation::AutoIncrement { equals, value }
10687 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
10688 let identity = if self.parse_keyword(Keyword::NOTHING) {
10689 ReplicaIdentity::Nothing
10690 } else if self.parse_keyword(Keyword::FULL) {
10691 ReplicaIdentity::Full
10692 } else if self.parse_keyword(Keyword::DEFAULT) {
10693 ReplicaIdentity::Default
10694 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10695 ReplicaIdentity::Index(self.parse_identifier()?)
10696 } else {
10697 return self.expected_ref(
10698 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
10699 self.peek_token_ref(),
10700 );
10701 };
10702
10703 AlterTableOperation::ReplicaIdentity { identity }
10704 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
10705 let name = self.parse_identifier()?;
10706 AlterTableOperation::ValidateConstraint { name }
10707 } else {
10708 let mut options =
10709 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
10710 if !options.is_empty() {
10711 AlterTableOperation::SetTblProperties {
10712 table_properties: options,
10713 }
10714 } else {
10715 options = self.parse_options(Keyword::SET)?;
10716 if !options.is_empty() {
10717 AlterTableOperation::SetOptionsParens { options }
10718 } else {
10719 return self.expected_ref(
10720 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
10721 self.peek_token_ref(),
10722 );
10723 }
10724 }
10725 };
10726 Ok(operation)
10727 }
10728
10729 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
10730 let data_type = self.parse_data_type()?;
10731 let using = if self.dialect.supports_alter_column_type_using()
10732 && self.parse_keyword(Keyword::USING)
10733 {
10734 Some(self.parse_expr()?)
10735 } else {
10736 None
10737 };
10738 Ok(AlterColumnOperation::SetDataType {
10739 data_type,
10740 using,
10741 had_set,
10742 })
10743 }
10744
10745 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
10746 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
10747 match keyword {
10748 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
10749 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
10750 unexpected_keyword => Err(ParserError::ParserError(
10752 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
10753 )),
10754 }
10755 }
10756
10757 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
10759 let object_type = self.expect_one_of_keywords(&[
10760 Keyword::VIEW,
10761 Keyword::TYPE,
10762 Keyword::COLLATION,
10763 Keyword::TABLE,
10764 Keyword::INDEX,
10765 Keyword::FUNCTION,
10766 Keyword::AGGREGATE,
10767 Keyword::ROLE,
10768 Keyword::POLICY,
10769 Keyword::CONNECTOR,
10770 Keyword::ICEBERG,
10771 Keyword::SCHEMA,
10772 Keyword::USER,
10773 Keyword::OPERATOR,
10774 ])?;
10775 match object_type {
10776 Keyword::SCHEMA => {
10777 self.prev_token();
10778 self.prev_token();
10779 self.parse_alter_schema()
10780 }
10781 Keyword::VIEW => self.parse_alter_view(),
10782 Keyword::TYPE => self.parse_alter_type(),
10783 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
10784 Keyword::TABLE => self.parse_alter_table(false),
10785 Keyword::ICEBERG => {
10786 self.expect_keyword(Keyword::TABLE)?;
10787 self.parse_alter_table(true)
10788 }
10789 Keyword::INDEX => {
10790 let index_name = self.parse_object_name(false)?;
10791 let operation = if self.parse_keyword(Keyword::RENAME) {
10792 if self.parse_keyword(Keyword::TO) {
10793 let index_name = self.parse_object_name(false)?;
10794 AlterIndexOperation::RenameIndex { index_name }
10795 } else {
10796 return self.expected_ref("TO after RENAME", self.peek_token_ref());
10797 }
10798 } else {
10799 return self.expected_ref("RENAME after ALTER INDEX", self.peek_token_ref());
10800 };
10801
10802 Ok(Statement::AlterIndex {
10803 name: index_name,
10804 operation,
10805 })
10806 }
10807 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
10808 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
10809 Keyword::OPERATOR => {
10810 if self.parse_keyword(Keyword::FAMILY) {
10811 self.parse_alter_operator_family().map(Into::into)
10812 } else if self.parse_keyword(Keyword::CLASS) {
10813 self.parse_alter_operator_class().map(Into::into)
10814 } else {
10815 self.parse_alter_operator().map(Into::into)
10816 }
10817 }
10818 Keyword::ROLE => self.parse_alter_role(),
10819 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
10820 Keyword::CONNECTOR => self.parse_alter_connector(),
10821 Keyword::USER => self.parse_alter_user().map(Into::into),
10822 unexpected_keyword => Err(ParserError::ParserError(
10824 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
10825 )),
10826 }
10827 }
10828
10829 fn parse_alter_aggregate_signature(
10830 &mut self,
10831 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
10832 let name = self.parse_object_name(false)?;
10833 self.expect_token(&Token::LParen)?;
10834
10835 if self.consume_token(&Token::Mul) {
10836 self.expect_token(&Token::RParen)?;
10837 return Ok((
10838 FunctionDesc {
10839 name,
10840 args: Some(vec![]),
10841 },
10842 true,
10843 None,
10844 ));
10845 }
10846
10847 let args =
10848 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
10849 vec![]
10850 } else {
10851 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
10852 };
10853
10854 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10855 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
10856 } else {
10857 None
10858 };
10859
10860 self.expect_token(&Token::RParen)?;
10861 Ok((
10862 FunctionDesc {
10863 name,
10864 args: Some(args),
10865 },
10866 false,
10867 aggregate_order_by,
10868 ))
10869 }
10870
10871 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
10872 let action = if self.parse_keywords(&[
10873 Keyword::CALLED,
10874 Keyword::ON,
10875 Keyword::NULL,
10876 Keyword::INPUT,
10877 ]) {
10878 Some(AlterFunctionAction::CalledOnNull(
10879 FunctionCalledOnNull::CalledOnNullInput,
10880 ))
10881 } else if self.parse_keywords(&[
10882 Keyword::RETURNS,
10883 Keyword::NULL,
10884 Keyword::ON,
10885 Keyword::NULL,
10886 Keyword::INPUT,
10887 ]) {
10888 Some(AlterFunctionAction::CalledOnNull(
10889 FunctionCalledOnNull::ReturnsNullOnNullInput,
10890 ))
10891 } else if self.parse_keyword(Keyword::STRICT) {
10892 Some(AlterFunctionAction::CalledOnNull(
10893 FunctionCalledOnNull::Strict,
10894 ))
10895 } else if self.parse_keyword(Keyword::IMMUTABLE) {
10896 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
10897 } else if self.parse_keyword(Keyword::STABLE) {
10898 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
10899 } else if self.parse_keyword(Keyword::VOLATILE) {
10900 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
10901 } else if self.parse_keyword(Keyword::NOT) {
10902 self.expect_keyword(Keyword::LEAKPROOF)?;
10903 Some(AlterFunctionAction::Leakproof(false))
10904 } else if self.parse_keyword(Keyword::LEAKPROOF) {
10905 Some(AlterFunctionAction::Leakproof(true))
10906 } else if self.parse_keyword(Keyword::EXTERNAL) {
10907 self.expect_keyword(Keyword::SECURITY)?;
10908 let security = if self.parse_keyword(Keyword::DEFINER) {
10909 FunctionSecurity::Definer
10910 } else if self.parse_keyword(Keyword::INVOKER) {
10911 FunctionSecurity::Invoker
10912 } else {
10913 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
10914 };
10915 Some(AlterFunctionAction::Security {
10916 external: true,
10917 security,
10918 })
10919 } else if self.parse_keyword(Keyword::SECURITY) {
10920 let security = if self.parse_keyword(Keyword::DEFINER) {
10921 FunctionSecurity::Definer
10922 } else if self.parse_keyword(Keyword::INVOKER) {
10923 FunctionSecurity::Invoker
10924 } else {
10925 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
10926 };
10927 Some(AlterFunctionAction::Security {
10928 external: false,
10929 security,
10930 })
10931 } else if self.parse_keyword(Keyword::PARALLEL) {
10932 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
10933 FunctionParallel::Unsafe
10934 } else if self.parse_keyword(Keyword::RESTRICTED) {
10935 FunctionParallel::Restricted
10936 } else if self.parse_keyword(Keyword::SAFE) {
10937 FunctionParallel::Safe
10938 } else {
10939 return self
10940 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
10941 };
10942 Some(AlterFunctionAction::Parallel(parallel))
10943 } else if self.parse_keyword(Keyword::COST) {
10944 Some(AlterFunctionAction::Cost(self.parse_number()?))
10945 } else if self.parse_keyword(Keyword::ROWS) {
10946 Some(AlterFunctionAction::Rows(self.parse_number()?))
10947 } else if self.parse_keyword(Keyword::SUPPORT) {
10948 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
10949 } else if self.parse_keyword(Keyword::SET) {
10950 let name = self.parse_object_name(false)?;
10951 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
10952 FunctionSetValue::FromCurrent
10953 } else {
10954 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
10955 return self.expected_ref("= or TO", self.peek_token_ref());
10956 }
10957 if self.parse_keyword(Keyword::DEFAULT) {
10958 FunctionSetValue::Default
10959 } else {
10960 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
10961 }
10962 };
10963 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
10964 name,
10965 value,
10966 }))
10967 } else if self.parse_keyword(Keyword::RESET) {
10968 let reset_config = if self.parse_keyword(Keyword::ALL) {
10969 ResetConfig::ALL
10970 } else {
10971 ResetConfig::ConfigName(self.parse_object_name(false)?)
10972 };
10973 Some(AlterFunctionAction::Reset(reset_config))
10974 } else {
10975 None
10976 };
10977
10978 Ok(action)
10979 }
10980
10981 fn parse_alter_function_actions(
10982 &mut self,
10983 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
10984 let mut actions = vec![];
10985 while let Some(action) = self.parse_alter_function_action()? {
10986 actions.push(action);
10987 }
10988 if actions.is_empty() {
10989 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
10990 }
10991 let restrict = self.parse_keyword(Keyword::RESTRICT);
10992 Ok((actions, restrict))
10993 }
10994
10995 pub fn parse_alter_function(
10997 &mut self,
10998 kind: AlterFunctionKind,
10999 ) -> Result<Statement, ParserError> {
11000 let (function, aggregate_star, aggregate_order_by) = match kind {
11001 AlterFunctionKind::Function => (self.parse_function_desc()?, false, None),
11002 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11003 };
11004
11005 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11006 let new_name = self.parse_identifier()?;
11007 AlterFunctionOperation::RenameTo { new_name }
11008 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11009 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11010 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11011 AlterFunctionOperation::SetSchema {
11012 schema_name: self.parse_object_name(false)?,
11013 }
11014 } else if matches!(kind, AlterFunctionKind::Function) && self.parse_keyword(Keyword::NO) {
11015 if !self.parse_keyword(Keyword::DEPENDS) {
11016 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11017 }
11018 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11019 AlterFunctionOperation::DependsOnExtension {
11020 no: true,
11021 extension_name: self.parse_object_name(false)?,
11022 }
11023 } else if matches!(kind, AlterFunctionKind::Function)
11024 && self.parse_keyword(Keyword::DEPENDS)
11025 {
11026 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11027 AlterFunctionOperation::DependsOnExtension {
11028 no: false,
11029 extension_name: self.parse_object_name(false)?,
11030 }
11031 } else if matches!(kind, AlterFunctionKind::Function) {
11032 let (actions, restrict) = self.parse_alter_function_actions()?;
11033 AlterFunctionOperation::Actions { actions, restrict }
11034 } else {
11035 return self.expected_ref(
11036 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11037 self.peek_token_ref(),
11038 );
11039 };
11040
11041 Ok(Statement::AlterFunction(AlterFunction {
11042 kind,
11043 function,
11044 aggregate_order_by,
11045 aggregate_star,
11046 operation,
11047 }))
11048 }
11049
11050 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11052 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11053 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11055 let on_cluster = self.parse_optional_on_cluster()?;
11056 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11057
11058 let mut location = None;
11059 if self.parse_keyword(Keyword::LOCATION) {
11060 location = Some(HiveSetLocation {
11061 has_set: false,
11062 location: self.parse_identifier()?,
11063 });
11064 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11065 location = Some(HiveSetLocation {
11066 has_set: true,
11067 location: self.parse_identifier()?,
11068 });
11069 }
11070
11071 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11072 self.peek_token_ref().clone()
11073 } else {
11074 self.get_current_token().clone()
11075 };
11076
11077 Ok(AlterTable {
11078 name: table_name,
11079 if_exists,
11080 only,
11081 operations,
11082 location,
11083 on_cluster,
11084 table_type: if iceberg {
11085 Some(AlterTableType::Iceberg)
11086 } else {
11087 None
11088 },
11089 end_token: AttachedToken(end_token),
11090 }
11091 .into())
11092 }
11093
11094 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11096 let name = self.parse_object_name(false)?;
11097 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11098
11099 let with_options = self.parse_options(Keyword::WITH)?;
11100
11101 self.expect_keyword_is(Keyword::AS)?;
11102 let query = self.parse_query()?;
11103
11104 Ok(Statement::AlterView {
11105 name,
11106 columns,
11107 query,
11108 with_options,
11109 })
11110 }
11111
11112 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11114 let name = self.parse_object_name(false)?;
11115
11116 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11117 let new_name = self.parse_identifier()?;
11118 Ok(Statement::AlterType(AlterType {
11119 name,
11120 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
11121 }))
11122 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11123 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11124 let new_enum_value = self.parse_identifier()?;
11125 let position = if self.parse_keyword(Keyword::BEFORE) {
11126 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11127 } else if self.parse_keyword(Keyword::AFTER) {
11128 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11129 } else {
11130 None
11131 };
11132
11133 Ok(Statement::AlterType(AlterType {
11134 name,
11135 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
11136 if_not_exists,
11137 value: new_enum_value,
11138 position,
11139 }),
11140 }))
11141 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11142 let existing_enum_value = self.parse_identifier()?;
11143 self.expect_keyword(Keyword::TO)?;
11144 let new_enum_value = self.parse_identifier()?;
11145
11146 Ok(Statement::AlterType(AlterType {
11147 name,
11148 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11149 from: existing_enum_value,
11150 to: new_enum_value,
11151 }),
11152 }))
11153 } else {
11154 self.expected_ref(
11155 "{RENAME TO | { RENAME | ADD } VALUE}",
11156 self.peek_token_ref(),
11157 )
11158 }
11159 }
11160
11161 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11165 let name = self.parse_object_name(false)?;
11166 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11167 AlterCollationOperation::RenameTo {
11168 new_name: self.parse_identifier()?,
11169 }
11170 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11171 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11172 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11173 AlterCollationOperation::SetSchema {
11174 schema_name: self.parse_object_name(false)?,
11175 }
11176 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11177 AlterCollationOperation::RefreshVersion
11178 } else {
11179 return self.expected_ref(
11180 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11181 self.peek_token_ref(),
11182 );
11183 };
11184
11185 Ok(AlterCollation { name, operation })
11186 }
11187
11188 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11192 let name = self.parse_operator_name()?;
11193
11194 self.expect_token(&Token::LParen)?;
11196
11197 let left_type = if self.parse_keyword(Keyword::NONE) {
11198 None
11199 } else {
11200 Some(self.parse_data_type()?)
11201 };
11202
11203 self.expect_token(&Token::Comma)?;
11204 let right_type = self.parse_data_type()?;
11205 self.expect_token(&Token::RParen)?;
11206
11207 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11209 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11210 Owner::CurrentRole
11211 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11212 Owner::CurrentUser
11213 } else if self.parse_keyword(Keyword::SESSION_USER) {
11214 Owner::SessionUser
11215 } else {
11216 Owner::Ident(self.parse_identifier()?)
11217 };
11218 AlterOperatorOperation::OwnerTo(owner)
11219 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11220 let schema_name = self.parse_object_name(false)?;
11221 AlterOperatorOperation::SetSchema { schema_name }
11222 } else if self.parse_keyword(Keyword::SET) {
11223 self.expect_token(&Token::LParen)?;
11224
11225 let mut options = Vec::new();
11226 loop {
11227 let keyword = self.expect_one_of_keywords(&[
11228 Keyword::RESTRICT,
11229 Keyword::JOIN,
11230 Keyword::COMMUTATOR,
11231 Keyword::NEGATOR,
11232 Keyword::HASHES,
11233 Keyword::MERGES,
11234 ])?;
11235
11236 match keyword {
11237 Keyword::RESTRICT => {
11238 self.expect_token(&Token::Eq)?;
11239 let proc_name = if self.parse_keyword(Keyword::NONE) {
11240 None
11241 } else {
11242 Some(self.parse_object_name(false)?)
11243 };
11244 options.push(OperatorOption::Restrict(proc_name));
11245 }
11246 Keyword::JOIN => {
11247 self.expect_token(&Token::Eq)?;
11248 let proc_name = if self.parse_keyword(Keyword::NONE) {
11249 None
11250 } else {
11251 Some(self.parse_object_name(false)?)
11252 };
11253 options.push(OperatorOption::Join(proc_name));
11254 }
11255 Keyword::COMMUTATOR => {
11256 self.expect_token(&Token::Eq)?;
11257 let op_name = self.parse_operator_name()?;
11258 options.push(OperatorOption::Commutator(op_name));
11259 }
11260 Keyword::NEGATOR => {
11261 self.expect_token(&Token::Eq)?;
11262 let op_name = self.parse_operator_name()?;
11263 options.push(OperatorOption::Negator(op_name));
11264 }
11265 Keyword::HASHES => {
11266 options.push(OperatorOption::Hashes);
11267 }
11268 Keyword::MERGES => {
11269 options.push(OperatorOption::Merges);
11270 }
11271 unexpected_keyword => return Err(ParserError::ParserError(
11272 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11273 )),
11274 }
11275
11276 if !self.consume_token(&Token::Comma) {
11277 break;
11278 }
11279 }
11280
11281 self.expect_token(&Token::RParen)?;
11282 AlterOperatorOperation::Set { options }
11283 } else {
11284 return self.expected_ref(
11285 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11286 self.peek_token_ref(),
11287 );
11288 };
11289
11290 Ok(AlterOperator {
11291 name,
11292 left_type,
11293 right_type,
11294 operation,
11295 })
11296 }
11297
11298 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11300 let strategy_number = self.parse_literal_uint()?;
11301 let operator_name = self.parse_operator_name()?;
11302
11303 self.expect_token(&Token::LParen)?;
11305 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11306 self.expect_token(&Token::RParen)?;
11307
11308 let purpose = if self.parse_keyword(Keyword::FOR) {
11310 if self.parse_keyword(Keyword::SEARCH) {
11311 Some(OperatorPurpose::ForSearch)
11312 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11313 let sort_family = self.parse_object_name(false)?;
11314 Some(OperatorPurpose::ForOrderBy { sort_family })
11315 } else {
11316 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11317 }
11318 } else {
11319 None
11320 };
11321
11322 Ok(OperatorFamilyItem::Operator {
11323 strategy_number,
11324 operator_name,
11325 op_types,
11326 purpose,
11327 })
11328 }
11329
11330 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11332 let support_number = self.parse_literal_uint()?;
11333
11334 let op_types =
11336 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11337 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11338 self.expect_token(&Token::RParen)?;
11339 Some(types)
11340 } else if self.consume_token(&Token::LParen) {
11341 self.expect_token(&Token::RParen)?;
11342 Some(vec![])
11343 } else {
11344 None
11345 };
11346
11347 let function_name = self.parse_object_name(false)?;
11348
11349 let argument_types = if self.consume_token(&Token::LParen) {
11351 if self.peek_token_ref().token == Token::RParen {
11352 self.expect_token(&Token::RParen)?;
11353 vec![]
11354 } else {
11355 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11356 self.expect_token(&Token::RParen)?;
11357 types
11358 }
11359 } else {
11360 vec![]
11361 };
11362
11363 Ok(OperatorFamilyItem::Function {
11364 support_number,
11365 op_types,
11366 function_name,
11367 argument_types,
11368 })
11369 }
11370
11371 fn parse_operator_family_drop_operator(
11373 &mut self,
11374 ) -> Result<OperatorFamilyDropItem, ParserError> {
11375 let strategy_number = self.parse_literal_uint()?;
11376
11377 self.expect_token(&Token::LParen)?;
11379 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11380 self.expect_token(&Token::RParen)?;
11381
11382 Ok(OperatorFamilyDropItem::Operator {
11383 strategy_number,
11384 op_types,
11385 })
11386 }
11387
11388 fn parse_operator_family_drop_function(
11390 &mut self,
11391 ) -> Result<OperatorFamilyDropItem, ParserError> {
11392 let support_number = self.parse_literal_uint()?;
11393
11394 self.expect_token(&Token::LParen)?;
11396 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11397 self.expect_token(&Token::RParen)?;
11398
11399 Ok(OperatorFamilyDropItem::Function {
11400 support_number,
11401 op_types,
11402 })
11403 }
11404
11405 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11407 if self.parse_keyword(Keyword::OPERATOR) {
11408 self.parse_operator_family_add_operator()
11409 } else if self.parse_keyword(Keyword::FUNCTION) {
11410 self.parse_operator_family_add_function()
11411 } else {
11412 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11413 }
11414 }
11415
11416 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
11418 if self.parse_keyword(Keyword::OPERATOR) {
11419 self.parse_operator_family_drop_operator()
11420 } else if self.parse_keyword(Keyword::FUNCTION) {
11421 self.parse_operator_family_drop_function()
11422 } else {
11423 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11424 }
11425 }
11426
11427 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
11430 let name = self.parse_object_name(false)?;
11431 self.expect_keyword(Keyword::USING)?;
11432 let using = self.parse_identifier()?;
11433
11434 let operation = if self.parse_keyword(Keyword::ADD) {
11435 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
11436 AlterOperatorFamilyOperation::Add { items }
11437 } else if self.parse_keyword(Keyword::DROP) {
11438 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
11439 AlterOperatorFamilyOperation::Drop { items }
11440 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11441 let new_name = self.parse_object_name(false)?;
11442 AlterOperatorFamilyOperation::RenameTo { new_name }
11443 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11444 let owner = self.parse_owner()?;
11445 AlterOperatorFamilyOperation::OwnerTo(owner)
11446 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11447 let schema_name = self.parse_object_name(false)?;
11448 AlterOperatorFamilyOperation::SetSchema { schema_name }
11449 } else {
11450 return self.expected_ref(
11451 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
11452 self.peek_token_ref(),
11453 );
11454 };
11455
11456 Ok(AlterOperatorFamily {
11457 name,
11458 using,
11459 operation,
11460 })
11461 }
11462
11463 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
11467 let name = self.parse_object_name(false)?;
11468 self.expect_keyword(Keyword::USING)?;
11469 let using = self.parse_identifier()?;
11470
11471 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11472 let new_name = self.parse_object_name(false)?;
11473 AlterOperatorClassOperation::RenameTo { new_name }
11474 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11475 let owner = self.parse_owner()?;
11476 AlterOperatorClassOperation::OwnerTo(owner)
11477 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11478 let schema_name = self.parse_object_name(false)?;
11479 AlterOperatorClassOperation::SetSchema { schema_name }
11480 } else {
11481 return self.expected_ref(
11482 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
11483 self.peek_token_ref(),
11484 );
11485 };
11486
11487 Ok(AlterOperatorClass {
11488 name,
11489 using,
11490 operation,
11491 })
11492 }
11493
11494 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
11498 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
11499 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11500 let name = self.parse_object_name(false)?;
11501 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
11502 self.prev_token();
11503 let options = self.parse_options(Keyword::OPTIONS)?;
11504 AlterSchemaOperation::SetOptionsParens { options }
11505 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
11506 let collate = self.parse_expr()?;
11507 AlterSchemaOperation::SetDefaultCollate { collate }
11508 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
11509 let replica = self.parse_identifier()?;
11510 let options = if self.peek_keyword(Keyword::OPTIONS) {
11511 Some(self.parse_options(Keyword::OPTIONS)?)
11512 } else {
11513 None
11514 };
11515 AlterSchemaOperation::AddReplica { replica, options }
11516 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
11517 let replica = self.parse_identifier()?;
11518 AlterSchemaOperation::DropReplica { replica }
11519 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11520 let new_name = self.parse_object_name(false)?;
11521 AlterSchemaOperation::Rename { name: new_name }
11522 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11523 let owner = self.parse_owner()?;
11524 AlterSchemaOperation::OwnerTo { owner }
11525 } else {
11526 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
11527 };
11528 Ok(Statement::AlterSchema(AlterSchema {
11529 name,
11530 if_exists,
11531 operations: vec![operation],
11532 }))
11533 }
11534
11535 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
11538 let object_name = self.parse_object_name(false)?;
11539 if self.peek_token_ref().token == Token::LParen {
11540 match self.parse_function(object_name)? {
11541 Expr::Function(f) => Ok(Statement::Call(f)),
11542 other => parser_err!(
11543 format!("Expected a simple procedure call but found: {other}"),
11544 self.peek_token_ref().span.start
11545 ),
11546 }
11547 } else {
11548 Ok(Statement::Call(Function {
11549 name: object_name,
11550 uses_odbc_syntax: false,
11551 parameters: FunctionArguments::None,
11552 args: FunctionArguments::None,
11553 over: None,
11554 filter: None,
11555 null_treatment: None,
11556 within_group: vec![],
11557 }))
11558 }
11559 }
11560
11561 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
11563 let source;
11564 if self.consume_token(&Token::LParen) {
11565 source = CopySource::Query(self.parse_query()?);
11566 self.expect_token(&Token::RParen)?;
11567 } else {
11568 let table_name = self.parse_object_name(false)?;
11569 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11570 source = CopySource::Table {
11571 table_name,
11572 columns,
11573 };
11574 }
11575 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
11576 Some(Keyword::FROM) => false,
11577 Some(Keyword::TO) => true,
11578 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
11579 };
11580 if !to {
11581 if let CopySource::Query(_) = source {
11584 return Err(ParserError::ParserError(
11585 "COPY ... FROM does not support query as a source".to_string(),
11586 ));
11587 }
11588 }
11589 let target = if self.parse_keyword(Keyword::STDIN) {
11590 CopyTarget::Stdin
11591 } else if self.parse_keyword(Keyword::STDOUT) {
11592 CopyTarget::Stdout
11593 } else if self.parse_keyword(Keyword::PROGRAM) {
11594 CopyTarget::Program {
11595 command: self.parse_literal_string()?,
11596 }
11597 } else {
11598 CopyTarget::File {
11599 filename: self.parse_literal_string()?,
11600 }
11601 };
11602 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
11604 if self.consume_token(&Token::LParen) {
11605 options = self.parse_comma_separated(Parser::parse_copy_option)?;
11606 self.expect_token(&Token::RParen)?;
11607 }
11608 let mut legacy_options = vec![];
11609 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
11610 legacy_options.push(opt);
11611 }
11612 let values =
11613 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
11614 self.expect_token(&Token::SemiColon)?;
11615 self.parse_tsv()
11616 } else {
11617 vec![]
11618 };
11619 Ok(Statement::Copy {
11620 source,
11621 to,
11622 target,
11623 options,
11624 legacy_options,
11625 values,
11626 })
11627 }
11628
11629 fn parse_open(&mut self) -> Result<Statement, ParserError> {
11631 self.expect_keyword(Keyword::OPEN)?;
11632 Ok(Statement::Open(OpenStatement {
11633 cursor_name: self.parse_identifier()?,
11634 }))
11635 }
11636
11637 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
11639 let cursor = if self.parse_keyword(Keyword::ALL) {
11640 CloseCursor::All
11641 } else {
11642 let name = self.parse_identifier()?;
11643
11644 CloseCursor::Specific { name }
11645 };
11646
11647 Ok(Statement::Close { cursor })
11648 }
11649
11650 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
11651 let ret = match self.parse_one_of_keywords(&[
11652 Keyword::FORMAT,
11653 Keyword::FREEZE,
11654 Keyword::DELIMITER,
11655 Keyword::NULL,
11656 Keyword::HEADER,
11657 Keyword::QUOTE,
11658 Keyword::ESCAPE,
11659 Keyword::FORCE_QUOTE,
11660 Keyword::FORCE_NOT_NULL,
11661 Keyword::FORCE_NULL,
11662 Keyword::ENCODING,
11663 ]) {
11664 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
11665 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
11666 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
11667 Some(Keyword::FALSE)
11668 )),
11669 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
11670 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
11671 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
11672 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
11673 Some(Keyword::FALSE)
11674 )),
11675 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
11676 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
11677 Some(Keyword::FORCE_QUOTE) => {
11678 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
11679 }
11680 Some(Keyword::FORCE_NOT_NULL) => {
11681 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
11682 }
11683 Some(Keyword::FORCE_NULL) => {
11684 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
11685 }
11686 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
11687 _ => self.expected_ref("option", self.peek_token_ref())?,
11688 };
11689 Ok(ret)
11690 }
11691
11692 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
11693 if self.parse_keyword(Keyword::FORMAT) {
11695 let _ = self.parse_keyword(Keyword::AS);
11696 }
11697
11698 let ret = match self.parse_one_of_keywords(&[
11699 Keyword::ACCEPTANYDATE,
11700 Keyword::ACCEPTINVCHARS,
11701 Keyword::ADDQUOTES,
11702 Keyword::ALLOWOVERWRITE,
11703 Keyword::BINARY,
11704 Keyword::BLANKSASNULL,
11705 Keyword::BZIP2,
11706 Keyword::CLEANPATH,
11707 Keyword::COMPUPDATE,
11708 Keyword::CREDENTIALS,
11709 Keyword::CSV,
11710 Keyword::DATEFORMAT,
11711 Keyword::DELIMITER,
11712 Keyword::EMPTYASNULL,
11713 Keyword::ENCRYPTED,
11714 Keyword::ESCAPE,
11715 Keyword::EXTENSION,
11716 Keyword::FIXEDWIDTH,
11717 Keyword::GZIP,
11718 Keyword::HEADER,
11719 Keyword::IAM_ROLE,
11720 Keyword::IGNOREHEADER,
11721 Keyword::JSON,
11722 Keyword::MANIFEST,
11723 Keyword::MAXFILESIZE,
11724 Keyword::NULL,
11725 Keyword::PARALLEL,
11726 Keyword::PARQUET,
11727 Keyword::PARTITION,
11728 Keyword::REGION,
11729 Keyword::REMOVEQUOTES,
11730 Keyword::ROWGROUPSIZE,
11731 Keyword::STATUPDATE,
11732 Keyword::TIMEFORMAT,
11733 Keyword::TRUNCATECOLUMNS,
11734 Keyword::ZSTD,
11735 ]) {
11736 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
11737 Some(Keyword::ACCEPTINVCHARS) => {
11738 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11740 Some(self.parse_literal_string()?)
11741 } else {
11742 None
11743 };
11744 CopyLegacyOption::AcceptInvChars(ch)
11745 }
11746 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
11747 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
11748 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
11749 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
11750 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
11751 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
11752 Some(Keyword::COMPUPDATE) => {
11753 let preset = self.parse_keyword(Keyword::PRESET);
11754 let enabled = match self.parse_one_of_keywords(&[
11755 Keyword::TRUE,
11756 Keyword::FALSE,
11757 Keyword::ON,
11758 Keyword::OFF,
11759 ]) {
11760 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11761 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11762 _ => None,
11763 };
11764 CopyLegacyOption::CompUpdate { preset, enabled }
11765 }
11766 Some(Keyword::CREDENTIALS) => {
11767 CopyLegacyOption::Credentials(self.parse_literal_string()?)
11768 }
11769 Some(Keyword::CSV) => CopyLegacyOption::Csv({
11770 let mut opts = vec![];
11771 while let Some(opt) =
11772 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
11773 {
11774 opts.push(opt);
11775 }
11776 opts
11777 }),
11778 Some(Keyword::DATEFORMAT) => {
11779 let _ = self.parse_keyword(Keyword::AS);
11780 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11781 Some(self.parse_literal_string()?)
11782 } else {
11783 None
11784 };
11785 CopyLegacyOption::DateFormat(fmt)
11786 }
11787 Some(Keyword::DELIMITER) => {
11788 let _ = self.parse_keyword(Keyword::AS);
11789 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
11790 }
11791 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
11792 Some(Keyword::ENCRYPTED) => {
11793 let auto = self.parse_keyword(Keyword::AUTO);
11794 CopyLegacyOption::Encrypted { auto }
11795 }
11796 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
11797 Some(Keyword::EXTENSION) => {
11798 let ext = self.parse_literal_string()?;
11799 CopyLegacyOption::Extension(ext)
11800 }
11801 Some(Keyword::FIXEDWIDTH) => {
11802 let spec = self.parse_literal_string()?;
11803 CopyLegacyOption::FixedWidth(spec)
11804 }
11805 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
11806 Some(Keyword::HEADER) => CopyLegacyOption::Header,
11807 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
11808 Some(Keyword::IGNOREHEADER) => {
11809 let _ = self.parse_keyword(Keyword::AS);
11810 let num_rows = self.parse_literal_uint()?;
11811 CopyLegacyOption::IgnoreHeader(num_rows)
11812 }
11813 Some(Keyword::JSON) => {
11814 let _ = self.parse_keyword(Keyword::AS);
11815 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11816 Some(self.parse_literal_string()?)
11817 } else {
11818 None
11819 };
11820 CopyLegacyOption::Json(fmt)
11821 }
11822 Some(Keyword::MANIFEST) => {
11823 let verbose = self.parse_keyword(Keyword::VERBOSE);
11824 CopyLegacyOption::Manifest { verbose }
11825 }
11826 Some(Keyword::MAXFILESIZE) => {
11827 let _ = self.parse_keyword(Keyword::AS);
11828 let size = self.parse_number_value()?;
11829 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
11830 Some(Keyword::MB) => Some(FileSizeUnit::MB),
11831 Some(Keyword::GB) => Some(FileSizeUnit::GB),
11832 _ => None,
11833 };
11834 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
11835 }
11836 Some(Keyword::NULL) => {
11837 let _ = self.parse_keyword(Keyword::AS);
11838 CopyLegacyOption::Null(self.parse_literal_string()?)
11839 }
11840 Some(Keyword::PARALLEL) => {
11841 let enabled = match self.parse_one_of_keywords(&[
11842 Keyword::TRUE,
11843 Keyword::FALSE,
11844 Keyword::ON,
11845 Keyword::OFF,
11846 ]) {
11847 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11848 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11849 _ => None,
11850 };
11851 CopyLegacyOption::Parallel(enabled)
11852 }
11853 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
11854 Some(Keyword::PARTITION) => {
11855 self.expect_keyword(Keyword::BY)?;
11856 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
11857 let include = self.parse_keyword(Keyword::INCLUDE);
11858 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
11859 }
11860 Some(Keyword::REGION) => {
11861 let _ = self.parse_keyword(Keyword::AS);
11862 let region = self.parse_literal_string()?;
11863 CopyLegacyOption::Region(region)
11864 }
11865 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
11866 Some(Keyword::ROWGROUPSIZE) => {
11867 let _ = self.parse_keyword(Keyword::AS);
11868 let file_size = self.parse_file_size()?;
11869 CopyLegacyOption::RowGroupSize(file_size)
11870 }
11871 Some(Keyword::STATUPDATE) => {
11872 let enabled = match self.parse_one_of_keywords(&[
11873 Keyword::TRUE,
11874 Keyword::FALSE,
11875 Keyword::ON,
11876 Keyword::OFF,
11877 ]) {
11878 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11879 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11880 _ => None,
11881 };
11882 CopyLegacyOption::StatUpdate(enabled)
11883 }
11884 Some(Keyword::TIMEFORMAT) => {
11885 let _ = self.parse_keyword(Keyword::AS);
11886 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
11887 Some(self.parse_literal_string()?)
11888 } else {
11889 None
11890 };
11891 CopyLegacyOption::TimeFormat(fmt)
11892 }
11893 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
11894 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
11895 _ => self.expected_ref("option", self.peek_token_ref())?,
11896 };
11897 Ok(ret)
11898 }
11899
11900 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
11901 let size = self.parse_number_value()?;
11902 let unit = self.maybe_parse_file_size_unit();
11903 Ok(FileSize { size, unit })
11904 }
11905
11906 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
11907 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
11908 Some(Keyword::MB) => Some(FileSizeUnit::MB),
11909 Some(Keyword::GB) => Some(FileSizeUnit::GB),
11910 _ => None,
11911 }
11912 }
11913
11914 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
11915 if self.parse_keyword(Keyword::DEFAULT) {
11916 Ok(IamRoleKind::Default)
11917 } else {
11918 let arn = self.parse_literal_string()?;
11919 Ok(IamRoleKind::Arn(arn))
11920 }
11921 }
11922
11923 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
11924 let ret = match self.parse_one_of_keywords(&[
11925 Keyword::HEADER,
11926 Keyword::QUOTE,
11927 Keyword::ESCAPE,
11928 Keyword::FORCE,
11929 ]) {
11930 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
11931 Some(Keyword::QUOTE) => {
11932 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
11934 }
11935 Some(Keyword::ESCAPE) => {
11936 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
11938 }
11939 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
11940 CopyLegacyCsvOption::ForceNotNull(
11941 self.parse_comma_separated(|p| p.parse_identifier())?,
11942 )
11943 }
11944 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
11945 CopyLegacyCsvOption::ForceQuote(
11946 self.parse_comma_separated(|p| p.parse_identifier())?,
11947 )
11948 }
11949 _ => self.expected_ref("csv option", self.peek_token_ref())?,
11950 };
11951 Ok(ret)
11952 }
11953
11954 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
11955 let s = self.parse_literal_string()?;
11956 if s.len() != 1 {
11957 let loc = self
11958 .tokens
11959 .get(self.index - 1)
11960 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
11961 return parser_err!(format!("Expect a char, found {s:?}"), loc);
11962 }
11963 Ok(s.chars().next().unwrap())
11964 }
11965
11966 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
11969 self.parse_tab_value()
11970 }
11971
11972 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
11974 let mut values = vec![];
11975 let mut content = String::new();
11976 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
11977 match t {
11978 Token::Whitespace(Whitespace::Tab) => {
11979 values.push(Some(core::mem::take(&mut content)));
11980 }
11981 Token::Whitespace(Whitespace::Newline) => {
11982 values.push(Some(core::mem::take(&mut content)));
11983 }
11984 Token::Backslash => {
11985 if self.consume_token(&Token::Period) {
11986 return values;
11987 }
11988 if let Token::Word(w) = self.next_token().token {
11989 if w.value == "N" {
11990 values.push(None);
11991 }
11992 }
11993 }
11994 _ => {
11995 content.push_str(&t.to_string());
11996 }
11997 }
11998 }
11999 values
12000 }
12001
12002 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12004 let next_token = self.next_token();
12005 let span = next_token.span;
12006 let ok_value = |value: Value| Ok(value.with_span(span));
12007 match next_token.token {
12008 Token::Word(w) => match w.keyword {
12009 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12010 ok_value(Value::Boolean(true))
12011 }
12012 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12013 ok_value(Value::Boolean(false))
12014 }
12015 Keyword::NULL => ok_value(Value::Null),
12016 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12017 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12018 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12019 _ => self.expected(
12020 "A value?",
12021 TokenWithSpan {
12022 token: Token::Word(w),
12023 span,
12024 },
12025 )?,
12026 },
12027 _ => self.expected(
12028 "a concrete value",
12029 TokenWithSpan {
12030 token: Token::Word(w),
12031 span,
12032 },
12033 ),
12034 },
12035 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12039 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12040 self.maybe_concat_string_literal(s.to_string()),
12041 )),
12042 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12043 self.maybe_concat_string_literal(s.to_string()),
12044 )),
12045 Token::TripleSingleQuotedString(ref s) => {
12046 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12047 }
12048 Token::TripleDoubleQuotedString(ref s) => {
12049 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12050 }
12051 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12052 Token::SingleQuotedByteStringLiteral(ref s) => {
12053 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12054 }
12055 Token::DoubleQuotedByteStringLiteral(ref s) => {
12056 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12057 }
12058 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12059 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12060 }
12061 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12062 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12063 }
12064 Token::SingleQuotedRawStringLiteral(ref s) => {
12065 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12066 }
12067 Token::DoubleQuotedRawStringLiteral(ref s) => {
12068 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12069 }
12070 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12071 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12072 }
12073 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12074 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12075 }
12076 Token::NationalStringLiteral(ref s) => {
12077 ok_value(Value::NationalStringLiteral(s.to_string()))
12078 }
12079 Token::QuoteDelimitedStringLiteral(v) => {
12080 ok_value(Value::QuoteDelimitedStringLiteral(v))
12081 }
12082 Token::NationalQuoteDelimitedStringLiteral(v) => {
12083 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12084 }
12085 Token::EscapedStringLiteral(ref s) => {
12086 ok_value(Value::EscapedStringLiteral(s.to_string()))
12087 }
12088 Token::UnicodeStringLiteral(ref s) => {
12089 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12090 }
12091 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12092 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12093 tok @ Token::Colon | tok @ Token::AtSign => {
12094 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12102 let ident = match next_token.token {
12103 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12104 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12105 _ => self.expected("placeholder", next_token),
12106 }?;
12107 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12108 .with_span(Span::new(span.start, ident.span.end)))
12109 }
12110 unexpected => self.expected(
12111 "a value",
12112 TokenWithSpan {
12113 token: unexpected,
12114 span,
12115 },
12116 ),
12117 }
12118 }
12119
12120 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12121 if self.dialect.supports_string_literal_concatenation() {
12122 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12123 self.peek_token_ref().token
12124 {
12125 str.push_str(s);
12126 self.advance_token();
12127 }
12128 } else if self
12129 .dialect
12130 .supports_string_literal_concatenation_with_newline()
12131 {
12132 let mut after_newline = false;
12135 loop {
12136 match self.peek_token_no_skip().token {
12137 Token::Whitespace(Whitespace::Newline) => {
12138 after_newline = true;
12139 self.next_token_no_skip();
12140 }
12141 Token::Whitespace(_) => {
12142 self.next_token_no_skip();
12143 }
12144 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12145 if after_newline =>
12146 {
12147 str.push_str(s.clone().as_str());
12148 self.next_token_no_skip();
12149 after_newline = false;
12150 }
12151 _ => break,
12152 }
12153 }
12154 }
12155
12156 str
12157 }
12158
12159 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12161 let value_wrapper = self.parse_value()?;
12162 match &value_wrapper.value {
12163 Value::Number(_, _) => Ok(value_wrapper),
12164 Value::Placeholder(_) => Ok(value_wrapper),
12165 _ => {
12166 self.prev_token();
12167 self.expected_ref("literal number", self.peek_token_ref())
12168 }
12169 }
12170 }
12171
12172 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12175 let next_token = self.next_token();
12176 match next_token.token {
12177 Token::Plus => Ok(Expr::UnaryOp {
12178 op: UnaryOperator::Plus,
12179 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12180 }),
12181 Token::Minus => Ok(Expr::UnaryOp {
12182 op: UnaryOperator::Minus,
12183 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12184 }),
12185 _ => {
12186 self.prev_token();
12187 Ok(Expr::Value(self.parse_number_value()?))
12188 }
12189 }
12190 }
12191
12192 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12193 let next_token = self.next_token();
12194 let span = next_token.span;
12195 match next_token.token {
12196 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12197 Value::SingleQuotedString(s.to_string()).with_span(span),
12198 )),
12199 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12200 Value::DoubleQuotedString(s.to_string()).with_span(span),
12201 )),
12202 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12203 Value::HexStringLiteral(s.to_string()).with_span(span),
12204 )),
12205 unexpected => self.expected(
12206 "a string value",
12207 TokenWithSpan {
12208 token: unexpected,
12209 span,
12210 },
12211 ),
12212 }
12213 }
12214
12215 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12217 let next_token = self.next_token();
12218 match next_token.token {
12219 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12220 _ => self.expected("literal int", next_token),
12221 }
12222 }
12223
12224 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12227 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12228 let peek_token = parser.peek_token();
12229 let span = peek_token.span;
12230 match peek_token.token {
12231 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12232 {
12233 parser.next_token();
12234 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12235 }
12236 _ => Ok(Expr::Value(
12237 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12238 )),
12239 }
12240 };
12241
12242 Ok(CreateFunctionBody::AsBeforeOptions {
12243 body: parse_string_expr(self)?,
12244 link_symbol: if self.consume_token(&Token::Comma) {
12245 Some(parse_string_expr(self)?)
12246 } else {
12247 None
12248 },
12249 })
12250 }
12251
12252 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12254 let next_token = self.next_token();
12255 match next_token.token {
12256 Token::Word(Word {
12257 value,
12258 keyword: Keyword::NoKeyword,
12259 ..
12260 }) => Ok(value),
12261 Token::SingleQuotedString(s) => Ok(s),
12262 Token::DoubleQuotedString(s) => Ok(s),
12263 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12264 Ok(s)
12265 }
12266 Token::UnicodeStringLiteral(s) => Ok(s),
12267 _ => self.expected("literal string", next_token),
12268 }
12269 }
12270
12271 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12273 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12274 Some(Keyword::TRUE) => Ok(true),
12275 Some(Keyword::FALSE) => Ok(false),
12276 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12277 }
12278 }
12279
12280 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12282 let neg = self.parse_keyword(Keyword::NOT);
12283 let normalized_form = self.maybe_parse(|parser| {
12284 match parser.parse_one_of_keywords(&[
12285 Keyword::NFC,
12286 Keyword::NFD,
12287 Keyword::NFKC,
12288 Keyword::NFKD,
12289 ]) {
12290 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12291 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12292 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12293 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12294 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12295 }
12296 })?;
12297 if self.parse_keyword(Keyword::NORMALIZED) {
12298 return Ok(Expr::IsNormalized {
12299 expr: Box::new(expr),
12300 form: normalized_form,
12301 negated: neg,
12302 });
12303 }
12304 self.expected_ref("unicode normalization form", self.peek_token_ref())
12305 }
12306
12307 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12309 self.expect_token(&Token::LParen)?;
12310 let values = self.parse_comma_separated(|parser| {
12311 let name = parser.parse_literal_string()?;
12312 let e = if parser.consume_token(&Token::Eq) {
12313 let value = parser.parse_number()?;
12314 EnumMember::NamedValue(name, value)
12315 } else {
12316 EnumMember::Name(name)
12317 };
12318 Ok(e)
12319 })?;
12320 self.expect_token(&Token::RParen)?;
12321
12322 Ok(values)
12323 }
12324
12325 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12327 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12328 if trailing_bracket.0 {
12329 return parser_err!(
12330 format!("unmatched > after parsing data type {ty}"),
12331 self.peek_token_ref()
12332 );
12333 }
12334
12335 Ok(ty)
12336 }
12337
12338 fn parse_data_type_helper(
12339 &mut self,
12340 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12341 let dialect = self.dialect;
12342 self.advance_token();
12343 let next_token = self.get_current_token();
12344 let next_token_index = self.get_current_index();
12345
12346 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12347 let mut data = match &next_token.token {
12348 Token::Word(w) => match w.keyword {
12349 Keyword::BOOLEAN => Ok(DataType::Boolean),
12350 Keyword::BOOL => Ok(DataType::Bool),
12351 Keyword::FLOAT => {
12352 let precision = self.parse_exact_number_optional_precision_scale()?;
12353
12354 if self.parse_keyword(Keyword::UNSIGNED) {
12355 Ok(DataType::FloatUnsigned(precision))
12356 } else {
12357 Ok(DataType::Float(precision))
12358 }
12359 }
12360 Keyword::REAL => {
12361 if self.parse_keyword(Keyword::UNSIGNED) {
12362 Ok(DataType::RealUnsigned)
12363 } else {
12364 Ok(DataType::Real)
12365 }
12366 }
12367 Keyword::FLOAT4 => Ok(DataType::Float4),
12368 Keyword::FLOAT32 => Ok(DataType::Float32),
12369 Keyword::FLOAT64 => Ok(DataType::Float64),
12370 Keyword::FLOAT8 => Ok(DataType::Float8),
12371 Keyword::DOUBLE => {
12372 if self.parse_keyword(Keyword::PRECISION) {
12373 if self.parse_keyword(Keyword::UNSIGNED) {
12374 Ok(DataType::DoublePrecisionUnsigned)
12375 } else {
12376 Ok(DataType::DoublePrecision)
12377 }
12378 } else {
12379 let precision = self.parse_exact_number_optional_precision_scale()?;
12380
12381 if self.parse_keyword(Keyword::UNSIGNED) {
12382 Ok(DataType::DoubleUnsigned(precision))
12383 } else {
12384 Ok(DataType::Double(precision))
12385 }
12386 }
12387 }
12388 Keyword::TINYINT => {
12389 let optional_precision = self.parse_optional_precision();
12390 if self.parse_keyword(Keyword::UNSIGNED) {
12391 Ok(DataType::TinyIntUnsigned(optional_precision?))
12392 } else {
12393 if dialect.supports_data_type_signed_suffix() {
12394 let _ = self.parse_keyword(Keyword::SIGNED);
12395 }
12396 Ok(DataType::TinyInt(optional_precision?))
12397 }
12398 }
12399 Keyword::INT2 => {
12400 let optional_precision = self.parse_optional_precision();
12401 if self.parse_keyword(Keyword::UNSIGNED) {
12402 Ok(DataType::Int2Unsigned(optional_precision?))
12403 } else {
12404 Ok(DataType::Int2(optional_precision?))
12405 }
12406 }
12407 Keyword::SMALLINT => {
12408 let optional_precision = self.parse_optional_precision();
12409 if self.parse_keyword(Keyword::UNSIGNED) {
12410 Ok(DataType::SmallIntUnsigned(optional_precision?))
12411 } else {
12412 if dialect.supports_data_type_signed_suffix() {
12413 let _ = self.parse_keyword(Keyword::SIGNED);
12414 }
12415 Ok(DataType::SmallInt(optional_precision?))
12416 }
12417 }
12418 Keyword::MEDIUMINT => {
12419 let optional_precision = self.parse_optional_precision();
12420 if self.parse_keyword(Keyword::UNSIGNED) {
12421 Ok(DataType::MediumIntUnsigned(optional_precision?))
12422 } else {
12423 if dialect.supports_data_type_signed_suffix() {
12424 let _ = self.parse_keyword(Keyword::SIGNED);
12425 }
12426 Ok(DataType::MediumInt(optional_precision?))
12427 }
12428 }
12429 Keyword::INT => {
12430 let optional_precision = self.parse_optional_precision();
12431 if self.parse_keyword(Keyword::UNSIGNED) {
12432 Ok(DataType::IntUnsigned(optional_precision?))
12433 } else {
12434 if dialect.supports_data_type_signed_suffix() {
12435 let _ = self.parse_keyword(Keyword::SIGNED);
12436 }
12437 Ok(DataType::Int(optional_precision?))
12438 }
12439 }
12440 Keyword::INT4 => {
12441 let optional_precision = self.parse_optional_precision();
12442 if self.parse_keyword(Keyword::UNSIGNED) {
12443 Ok(DataType::Int4Unsigned(optional_precision?))
12444 } else {
12445 Ok(DataType::Int4(optional_precision?))
12446 }
12447 }
12448 Keyword::INT8 => {
12449 let optional_precision = self.parse_optional_precision();
12450 if self.parse_keyword(Keyword::UNSIGNED) {
12451 Ok(DataType::Int8Unsigned(optional_precision?))
12452 } else {
12453 Ok(DataType::Int8(optional_precision?))
12454 }
12455 }
12456 Keyword::INT16 => Ok(DataType::Int16),
12457 Keyword::INT32 => Ok(DataType::Int32),
12458 Keyword::INT64 => Ok(DataType::Int64),
12459 Keyword::INT128 => Ok(DataType::Int128),
12460 Keyword::INT256 => Ok(DataType::Int256),
12461 Keyword::INTEGER => {
12462 let optional_precision = self.parse_optional_precision();
12463 if self.parse_keyword(Keyword::UNSIGNED) {
12464 Ok(DataType::IntegerUnsigned(optional_precision?))
12465 } else {
12466 if dialect.supports_data_type_signed_suffix() {
12467 let _ = self.parse_keyword(Keyword::SIGNED);
12468 }
12469 Ok(DataType::Integer(optional_precision?))
12470 }
12471 }
12472 Keyword::BIGINT => {
12473 let optional_precision = self.parse_optional_precision();
12474 if self.parse_keyword(Keyword::UNSIGNED) {
12475 Ok(DataType::BigIntUnsigned(optional_precision?))
12476 } else {
12477 if dialect.supports_data_type_signed_suffix() {
12478 let _ = self.parse_keyword(Keyword::SIGNED);
12479 }
12480 Ok(DataType::BigInt(optional_precision?))
12481 }
12482 }
12483 Keyword::HUGEINT => Ok(DataType::HugeInt),
12484 Keyword::UBIGINT => Ok(DataType::UBigInt),
12485 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
12486 Keyword::USMALLINT => Ok(DataType::USmallInt),
12487 Keyword::UTINYINT => Ok(DataType::UTinyInt),
12488 Keyword::UINT8 => Ok(DataType::UInt8),
12489 Keyword::UINT16 => Ok(DataType::UInt16),
12490 Keyword::UINT32 => Ok(DataType::UInt32),
12491 Keyword::UINT64 => Ok(DataType::UInt64),
12492 Keyword::UINT128 => Ok(DataType::UInt128),
12493 Keyword::UINT256 => Ok(DataType::UInt256),
12494 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
12495 Keyword::NVARCHAR => {
12496 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
12497 }
12498 Keyword::CHARACTER => {
12499 if self.parse_keyword(Keyword::VARYING) {
12500 Ok(DataType::CharacterVarying(
12501 self.parse_optional_character_length()?,
12502 ))
12503 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12504 Ok(DataType::CharacterLargeObject(
12505 self.parse_optional_precision()?,
12506 ))
12507 } else {
12508 Ok(DataType::Character(self.parse_optional_character_length()?))
12509 }
12510 }
12511 Keyword::CHAR => {
12512 if self.parse_keyword(Keyword::VARYING) {
12513 Ok(DataType::CharVarying(
12514 self.parse_optional_character_length()?,
12515 ))
12516 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12517 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
12518 } else {
12519 Ok(DataType::Char(self.parse_optional_character_length()?))
12520 }
12521 }
12522 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
12523 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
12524 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
12525 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
12526 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
12527 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
12528 Keyword::LONGBLOB => Ok(DataType::LongBlob),
12529 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
12530 Keyword::BIT => {
12531 if self.parse_keyword(Keyword::VARYING) {
12532 Ok(DataType::BitVarying(self.parse_optional_precision()?))
12533 } else {
12534 Ok(DataType::Bit(self.parse_optional_precision()?))
12535 }
12536 }
12537 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
12538 Keyword::UUID => Ok(DataType::Uuid),
12539 Keyword::DATE => Ok(DataType::Date),
12540 Keyword::DATE32 => Ok(DataType::Date32),
12541 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
12542 Keyword::DATETIME64 => {
12543 self.prev_token();
12544 let (precision, time_zone) = self.parse_datetime_64()?;
12545 Ok(DataType::Datetime64(precision, time_zone))
12546 }
12547 Keyword::TIMESTAMP => {
12548 let precision = self.parse_optional_precision()?;
12549 let tz = if self.parse_keyword(Keyword::WITH) {
12550 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12551 TimezoneInfo::WithTimeZone
12552 } else if self.parse_keyword(Keyword::WITHOUT) {
12553 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12554 TimezoneInfo::WithoutTimeZone
12555 } else {
12556 TimezoneInfo::None
12557 };
12558 Ok(DataType::Timestamp(precision, tz))
12559 }
12560 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
12561 self.parse_optional_precision()?,
12562 TimezoneInfo::Tz,
12563 )),
12564 Keyword::TIMESTAMP_NTZ => {
12565 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
12566 }
12567 Keyword::TIME => {
12568 let precision = self.parse_optional_precision()?;
12569 let tz = if self.parse_keyword(Keyword::WITH) {
12570 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12571 TimezoneInfo::WithTimeZone
12572 } else if self.parse_keyword(Keyword::WITHOUT) {
12573 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12574 TimezoneInfo::WithoutTimeZone
12575 } else {
12576 TimezoneInfo::None
12577 };
12578 Ok(DataType::Time(precision, tz))
12579 }
12580 Keyword::TIMETZ => Ok(DataType::Time(
12581 self.parse_optional_precision()?,
12582 TimezoneInfo::Tz,
12583 )),
12584 Keyword::INTERVAL => {
12585 if self.dialect.supports_interval_options() {
12586 let fields = self.maybe_parse_optional_interval_fields()?;
12587 let precision = self.parse_optional_precision()?;
12588 Ok(DataType::Interval { fields, precision })
12589 } else {
12590 Ok(DataType::Interval {
12591 fields: None,
12592 precision: None,
12593 })
12594 }
12595 }
12596 Keyword::JSON => Ok(DataType::JSON),
12597 Keyword::JSONB => Ok(DataType::JSONB),
12598 Keyword::REGCLASS => Ok(DataType::Regclass),
12599 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
12600 Keyword::FIXEDSTRING => {
12601 self.expect_token(&Token::LParen)?;
12602 let character_length = self.parse_literal_uint()?;
12603 self.expect_token(&Token::RParen)?;
12604 Ok(DataType::FixedString(character_length))
12605 }
12606 Keyword::TEXT => Ok(DataType::Text),
12607 Keyword::TINYTEXT => Ok(DataType::TinyText),
12608 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
12609 Keyword::LONGTEXT => Ok(DataType::LongText),
12610 Keyword::BYTEA => Ok(DataType::Bytea),
12611 Keyword::NUMERIC => Ok(DataType::Numeric(
12612 self.parse_exact_number_optional_precision_scale()?,
12613 )),
12614 Keyword::DECIMAL => {
12615 let precision = self.parse_exact_number_optional_precision_scale()?;
12616
12617 if self.parse_keyword(Keyword::UNSIGNED) {
12618 Ok(DataType::DecimalUnsigned(precision))
12619 } else {
12620 Ok(DataType::Decimal(precision))
12621 }
12622 }
12623 Keyword::DEC => {
12624 let precision = self.parse_exact_number_optional_precision_scale()?;
12625
12626 if self.parse_keyword(Keyword::UNSIGNED) {
12627 Ok(DataType::DecUnsigned(precision))
12628 } else {
12629 Ok(DataType::Dec(precision))
12630 }
12631 }
12632 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
12633 self.parse_exact_number_optional_precision_scale()?,
12634 )),
12635 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
12636 self.parse_exact_number_optional_precision_scale()?,
12637 )),
12638 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
12639 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
12640 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
12641 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
12642 Keyword::ARRAY => {
12643 if self.dialect.supports_array_typedef_without_element_type() {
12644 Ok(DataType::Array(ArrayElemTypeDef::None))
12645 } else if dialect_of!(self is ClickHouseDialect) {
12646 Ok(self.parse_sub_type(|internal_type| {
12647 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
12648 })?)
12649 } else {
12650 self.expect_token(&Token::Lt)?;
12651 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
12652 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
12653 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
12654 inside_type,
12655 ))))
12656 }
12657 }
12658 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
12659 self.prev_token();
12660 let field_defs = self.parse_duckdb_struct_type_def()?;
12661 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
12662 }
12663 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
12664 {
12665 self.prev_token();
12666 let (field_defs, _trailing_bracket) =
12667 self.parse_struct_type_def(Self::parse_struct_field_def)?;
12668 trailing_bracket = _trailing_bracket;
12669 Ok(DataType::Struct(
12670 field_defs,
12671 StructBracketKind::AngleBrackets,
12672 ))
12673 }
12674 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
12675 self.prev_token();
12676 let fields = self.parse_union_type_def()?;
12677 Ok(DataType::Union(fields))
12678 }
12679 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12680 Ok(self.parse_sub_type(DataType::Nullable)?)
12681 }
12682 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12683 Ok(self.parse_sub_type(DataType::LowCardinality)?)
12684 }
12685 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12686 self.prev_token();
12687 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
12688 Ok(DataType::Map(
12689 Box::new(key_data_type),
12690 Box::new(value_data_type),
12691 ))
12692 }
12693 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12694 self.expect_token(&Token::LParen)?;
12695 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
12696 self.expect_token(&Token::RParen)?;
12697 Ok(DataType::Nested(field_defs))
12698 }
12699 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
12700 self.prev_token();
12701 let field_defs = self.parse_click_house_tuple_def()?;
12702 Ok(DataType::Tuple(field_defs))
12703 }
12704 Keyword::TRIGGER => Ok(DataType::Trigger),
12705 Keyword::SETOF => {
12706 let inner = self.parse_data_type()?;
12707 Ok(DataType::SetOf(Box::new(inner)))
12708 }
12709 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
12710 let _ = self.parse_keyword(Keyword::TYPE);
12711 Ok(DataType::AnyType)
12712 }
12713 Keyword::TABLE => {
12714 if self.peek_token_ref().token == Token::LParen {
12717 let columns = self.parse_returns_table_columns()?;
12718 Ok(DataType::Table(Some(columns)))
12719 } else {
12720 Ok(DataType::Table(None))
12721 }
12722 }
12723 Keyword::SIGNED => {
12724 if self.parse_keyword(Keyword::INTEGER) {
12725 Ok(DataType::SignedInteger)
12726 } else {
12727 Ok(DataType::Signed)
12728 }
12729 }
12730 Keyword::UNSIGNED => {
12731 if self.parse_keyword(Keyword::INTEGER) {
12732 Ok(DataType::UnsignedInteger)
12733 } else {
12734 Ok(DataType::Unsigned)
12735 }
12736 }
12737 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
12738 Ok(DataType::TsVector)
12739 }
12740 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
12741 Ok(DataType::TsQuery)
12742 }
12743 _ => {
12744 self.prev_token();
12745 let type_name = self.parse_object_name(false)?;
12746 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
12747 Ok(DataType::Custom(type_name, modifiers))
12748 } else {
12749 Ok(DataType::Custom(type_name, vec![]))
12750 }
12751 }
12752 },
12753 _ => self.expected_at("a data type name", next_token_index),
12754 }?;
12755
12756 if self.dialect.supports_array_typedef_with_brackets() {
12757 while self.consume_token(&Token::LBracket) {
12758 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
12760 self.expect_token(&Token::RBracket)?;
12761 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
12762 }
12763 }
12764 Ok((data, trailing_bracket))
12765 }
12766
12767 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
12768 self.parse_column_def()
12769 }
12770
12771 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
12772 self.expect_token(&Token::LParen)?;
12773 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
12774 self.expect_token(&Token::RParen)?;
12775 Ok(columns)
12776 }
12777
12778 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
12780 self.expect_token(&Token::LParen)?;
12781 let mut values = Vec::new();
12782 loop {
12783 let next_token = self.next_token();
12784 match next_token.token {
12785 Token::SingleQuotedString(value) => values.push(value),
12786 _ => self.expected("a string", next_token)?,
12787 }
12788 let next_token = self.next_token();
12789 match next_token.token {
12790 Token::Comma => (),
12791 Token::RParen => break,
12792 _ => self.expected(", or }", next_token)?,
12793 }
12794 }
12795 Ok(values)
12796 }
12797
12798 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
12800 let ident = self.parse_identifier()?;
12801 self.expect_keyword_is(Keyword::AS)?;
12802 let alias = self.parse_identifier()?;
12803 Ok(IdentWithAlias { ident, alias })
12804 }
12805
12806 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
12808 let ident = self.parse_identifier()?;
12809 let _after_as = self.parse_keyword(Keyword::AS);
12810 let alias = self.parse_identifier()?;
12811 Ok(IdentWithAlias { ident, alias })
12812 }
12813
12814 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
12816 self.parse_comma_separated(|parser| {
12817 parser.expect_token(&Token::LParen)?;
12818 let query = parser.parse_query()?;
12819 parser.expect_token(&Token::RParen)?;
12820 Ok(*query)
12821 })
12822 }
12823
12824 fn parse_distinct_required_set_quantifier(
12826 &mut self,
12827 operator_name: &str,
12828 ) -> Result<SetQuantifier, ParserError> {
12829 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
12830 match quantifier {
12831 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
12832 _ => Err(ParserError::ParserError(format!(
12833 "{operator_name} pipe operator requires DISTINCT modifier",
12834 ))),
12835 }
12836 }
12837
12838 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
12840 if self.parse_keyword(Keyword::AS) {
12841 Ok(Some(self.parse_identifier()?))
12842 } else {
12843 self.maybe_parse(|parser| parser.parse_identifier())
12845 }
12846 }
12847
12848 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
12850 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
12851 parser.dialect.is_select_item_alias(explicit, kw, parser)
12852 }
12853 self.parse_optional_alias_inner(None, validator)
12854 }
12855
12856 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
12860 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
12861 parser.dialect.is_table_factor_alias(explicit, kw, parser)
12862 }
12863 let explicit = self.peek_keyword(Keyword::AS);
12864 match self.parse_optional_alias_inner(None, validator)? {
12865 Some(name) => {
12866 let columns = self.parse_table_alias_column_defs()?;
12867 Ok(Some(TableAlias {
12868 explicit,
12869 name,
12870 columns,
12871 }))
12872 }
12873 None => Ok(None),
12874 }
12875 }
12876
12877 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
12878 let mut hints = vec![];
12879 while let Some(hint_type) =
12880 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
12881 {
12882 let hint_type = match hint_type {
12883 Keyword::USE => TableIndexHintType::Use,
12884 Keyword::IGNORE => TableIndexHintType::Ignore,
12885 Keyword::FORCE => TableIndexHintType::Force,
12886 _ => {
12887 return self.expected_ref(
12888 "expected to match USE/IGNORE/FORCE keyword",
12889 self.peek_token_ref(),
12890 )
12891 }
12892 };
12893 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
12894 Some(Keyword::INDEX) => TableIndexType::Index,
12895 Some(Keyword::KEY) => TableIndexType::Key,
12896 _ => {
12897 return self
12898 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
12899 }
12900 };
12901 let for_clause = if self.parse_keyword(Keyword::FOR) {
12902 let clause = if self.parse_keyword(Keyword::JOIN) {
12903 TableIndexHintForClause::Join
12904 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12905 TableIndexHintForClause::OrderBy
12906 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12907 TableIndexHintForClause::GroupBy
12908 } else {
12909 return self.expected_ref(
12910 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
12911 self.peek_token_ref(),
12912 );
12913 };
12914 Some(clause)
12915 } else {
12916 None
12917 };
12918
12919 self.expect_token(&Token::LParen)?;
12920 let index_names = if self.peek_token_ref().token != Token::RParen {
12921 self.parse_comma_separated(Parser::parse_identifier)?
12922 } else {
12923 vec![]
12924 };
12925 self.expect_token(&Token::RParen)?;
12926 hints.push(TableIndexHints {
12927 hint_type,
12928 index_type,
12929 for_clause,
12930 index_names,
12931 });
12932 }
12933 Ok(hints)
12934 }
12935
12936 pub fn parse_optional_alias(
12940 &mut self,
12941 reserved_kwds: &[Keyword],
12942 ) -> Result<Option<Ident>, ParserError> {
12943 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
12944 false
12945 }
12946 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
12947 }
12948
12949 fn parse_optional_alias_inner<F>(
12956 &mut self,
12957 reserved_kwds: Option<&[Keyword]>,
12958 validator: F,
12959 ) -> Result<Option<Ident>, ParserError>
12960 where
12961 F: Fn(bool, &Keyword, &mut Parser) -> bool,
12962 {
12963 let after_as = self.parse_keyword(Keyword::AS);
12964
12965 let next_token = self.next_token();
12966 match next_token.token {
12967 Token::Word(w)
12970 if reserved_kwds.is_some()
12971 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
12972 {
12973 Ok(Some(w.into_ident(next_token.span)))
12974 }
12975 Token::Word(w) if validator(after_as, &w.keyword, self) => {
12979 Ok(Some(w.into_ident(next_token.span)))
12980 }
12981 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
12983 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
12984 _ => {
12985 if after_as {
12986 return self.expected("an identifier after AS", next_token);
12987 }
12988 self.prev_token();
12989 Ok(None) }
12991 }
12992 }
12993
12994 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
12996 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12997 let expressions = if self.parse_keyword(Keyword::ALL) {
12998 None
12999 } else {
13000 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13001 };
13002
13003 let mut modifiers = vec![];
13004 if self.dialect.supports_group_by_with_modifier() {
13005 loop {
13006 if !self.parse_keyword(Keyword::WITH) {
13007 break;
13008 }
13009 let keyword = self.expect_one_of_keywords(&[
13010 Keyword::ROLLUP,
13011 Keyword::CUBE,
13012 Keyword::TOTALS,
13013 ])?;
13014 modifiers.push(match keyword {
13015 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13016 Keyword::CUBE => GroupByWithModifier::Cube,
13017 Keyword::TOTALS => GroupByWithModifier::Totals,
13018 _ => {
13019 return parser_err!(
13020 "BUG: expected to match GroupBy modifier keyword",
13021 self.peek_token_ref().span.start
13022 )
13023 }
13024 });
13025 }
13026 }
13027 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13028 self.expect_token(&Token::LParen)?;
13029 let result = self.parse_comma_separated(|p| {
13030 if p.peek_token_ref().token == Token::LParen {
13031 p.parse_tuple(true, true)
13032 } else {
13033 Ok(vec![p.parse_expr()?])
13034 }
13035 })?;
13036 self.expect_token(&Token::RParen)?;
13037 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13038 result,
13039 )));
13040 };
13041 let group_by = match expressions {
13042 None => GroupByExpr::All(modifiers),
13043 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13044 };
13045 Ok(Some(group_by))
13046 } else {
13047 Ok(None)
13048 }
13049 }
13050
13051 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13053 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13054 let order_by =
13055 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13056 let order_by_options = self.parse_order_by_options()?;
13057 OrderBy {
13058 kind: OrderByKind::All(order_by_options),
13059 interpolate: None,
13060 }
13061 } else {
13062 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13063 let interpolate = if self.dialect.supports_interpolate() {
13064 self.parse_interpolations()?
13065 } else {
13066 None
13067 };
13068 OrderBy {
13069 kind: OrderByKind::Expressions(exprs),
13070 interpolate,
13071 }
13072 };
13073 Ok(Some(order_by))
13074 } else {
13075 Ok(None)
13076 }
13077 }
13078
13079 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13080 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13081 Some(self.parse_offset()?)
13082 } else {
13083 None
13084 };
13085
13086 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13087 let expr = self.parse_limit()?;
13088
13089 if self.dialect.supports_limit_comma()
13090 && offset.is_none()
13091 && expr.is_some() && self.consume_token(&Token::Comma)
13093 {
13094 let offset = expr.ok_or_else(|| {
13095 ParserError::ParserError(
13096 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13097 )
13098 })?;
13099 return Ok(Some(LimitClause::OffsetCommaLimit {
13100 offset,
13101 limit: self.parse_expr()?,
13102 }));
13103 }
13104
13105 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13106 Some(self.parse_comma_separated(Parser::parse_expr)?)
13107 } else {
13108 None
13109 };
13110
13111 (Some(expr), limit_by)
13112 } else {
13113 (None, None)
13114 };
13115
13116 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13117 offset = Some(self.parse_offset()?);
13118 }
13119
13120 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13121 Ok(Some(LimitClause::LimitOffset {
13122 limit: limit.unwrap_or_default(),
13123 offset,
13124 limit_by: limit_by.unwrap_or_default(),
13125 }))
13126 } else {
13127 Ok(None)
13128 }
13129 }
13130
13131 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13134 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13135 let fn_name = self.parse_object_name(false)?;
13136 self.parse_function_call(fn_name)
13137 .map(TableObject::TableFunction)
13138 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13139 self.parse_parenthesized(|p| p.parse_query())
13140 .map(TableObject::TableQuery)
13141 } else {
13142 self.parse_object_name(false).map(TableObject::TableName)
13143 }
13144 }
13145
13146 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13153 self.parse_object_name_inner(in_table_clause, false)
13154 }
13155
13156 fn parse_object_name_inner(
13166 &mut self,
13167 in_table_clause: bool,
13168 allow_wildcards: bool,
13169 ) -> Result<ObjectName, ParserError> {
13170 let mut parts = vec![];
13171 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13172 loop {
13173 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13174 parts.push(ObjectNamePart::Identifier(ident));
13175 if !self.consume_token(&Token::Period) && !end_with_period {
13176 break;
13177 }
13178 }
13179 } else {
13180 loop {
13181 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13182 let span = self.next_token().span;
13183 parts.push(ObjectNamePart::Identifier(Ident {
13184 value: Token::Mul.to_string(),
13185 quote_style: None,
13186 span,
13187 }));
13188 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13189 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13190 parts.push(ObjectNamePart::Identifier(ident));
13191 if !self.consume_token(&Token::Period) && !end_with_period {
13192 break;
13193 }
13194 } else if self.dialect.supports_object_name_double_dot_notation()
13195 && parts.len() == 1
13196 && matches!(self.peek_token_ref().token, Token::Period)
13197 {
13198 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13200 } else {
13201 let ident = self.parse_identifier()?;
13202 let part = if self
13203 .dialect
13204 .is_identifier_generating_function_name(&ident, &parts)
13205 {
13206 self.expect_token(&Token::LParen)?;
13207 let args: Vec<FunctionArg> =
13208 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13209 self.expect_token(&Token::RParen)?;
13210 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13211 } else {
13212 ObjectNamePart::Identifier(ident)
13213 };
13214 parts.push(part);
13215 }
13216
13217 if !self.consume_token(&Token::Period) {
13218 break;
13219 }
13220 }
13221 }
13222
13223 if dialect_of!(self is BigQueryDialect)
13226 && parts.iter().any(|part| {
13227 part.as_ident()
13228 .is_some_and(|ident| ident.value.contains('.'))
13229 })
13230 {
13231 parts = parts
13232 .into_iter()
13233 .flat_map(|part| match part.as_ident() {
13234 Some(ident) => ident
13235 .value
13236 .split('.')
13237 .map(|value| {
13238 ObjectNamePart::Identifier(Ident {
13239 value: value.into(),
13240 quote_style: ident.quote_style,
13241 span: ident.span,
13242 })
13243 })
13244 .collect::<Vec<_>>(),
13245 None => vec![part],
13246 })
13247 .collect()
13248 }
13249
13250 Ok(ObjectName(parts))
13251 }
13252
13253 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13255 let mut idents = vec![];
13256 loop {
13257 let token = self.peek_token_ref();
13258 match &token.token {
13259 Token::Word(w) => {
13260 idents.push(w.to_ident(token.span));
13261 }
13262 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13263 break
13264 }
13265 _ => {}
13266 }
13267 self.advance_token();
13268 }
13269 Ok(idents)
13270 }
13271
13272 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13312 let mut idents = vec![];
13313
13314 let next_token = self.next_token();
13316 match next_token.token {
13317 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13318 Token::EOF => {
13319 return Err(ParserError::ParserError(
13320 "Empty input when parsing identifier".to_string(),
13321 ))?
13322 }
13323 token => {
13324 return Err(ParserError::ParserError(format!(
13325 "Unexpected token in identifier: {token}"
13326 )))?
13327 }
13328 };
13329
13330 loop {
13332 match self.next_token().token {
13333 Token::Period => {
13335 let next_token = self.next_token();
13336 match next_token.token {
13337 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13338 Token::EOF => {
13339 return Err(ParserError::ParserError(
13340 "Trailing period in identifier".to_string(),
13341 ))?
13342 }
13343 token => {
13344 return Err(ParserError::ParserError(format!(
13345 "Unexpected token following period in identifier: {token}"
13346 )))?
13347 }
13348 }
13349 }
13350 Token::EOF => break,
13351 token => {
13352 return Err(ParserError::ParserError(format!(
13353 "Unexpected token in identifier: {token}"
13354 )))?;
13355 }
13356 }
13357 }
13358
13359 Ok(idents)
13360 }
13361
13362 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
13364 let next_token = self.next_token();
13365 match next_token.token {
13366 Token::Word(w) => Ok(w.into_ident(next_token.span)),
13367 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
13368 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
13369 _ => self.expected("identifier", next_token),
13370 }
13371 }
13372
13373 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
13384 match self.peek_token().token {
13385 Token::Word(w) => {
13386 let quote_style_is_none = w.quote_style.is_none();
13387 let mut requires_whitespace = false;
13388 let mut ident = w.into_ident(self.next_token().span);
13389 if quote_style_is_none {
13390 while matches!(self.peek_token_no_skip().token, Token::Minus) {
13391 self.next_token();
13392 ident.value.push('-');
13393
13394 let token = self
13395 .next_token_no_skip()
13396 .cloned()
13397 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
13398 requires_whitespace = match token.token {
13399 Token::Word(next_word) if next_word.quote_style.is_none() => {
13400 ident.value.push_str(&next_word.value);
13401 false
13402 }
13403 Token::Number(s, false) => {
13404 if s.ends_with('.') {
13411 let Some(s) = s.split('.').next().filter(|s| {
13412 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
13413 }) else {
13414 return self.expected(
13415 "continuation of hyphenated identifier",
13416 TokenWithSpan::new(Token::Number(s, false), token.span),
13417 );
13418 };
13419 ident.value.push_str(s);
13420 return Ok((ident, true));
13421 } else {
13422 ident.value.push_str(&s);
13423 }
13424 !matches!(self.peek_token_ref().token, Token::Period)
13427 }
13428 _ => {
13429 return self
13430 .expected("continuation of hyphenated identifier", token);
13431 }
13432 }
13433 }
13434
13435 if requires_whitespace {
13438 let token = self.next_token();
13439 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
13440 return self
13441 .expected("whitespace following hyphenated identifier", token);
13442 }
13443 }
13444 }
13445 Ok((ident, false))
13446 }
13447 _ => Ok((self.parse_identifier()?, false)),
13448 }
13449 }
13450
13451 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
13453 if self.consume_token(&Token::LParen) {
13454 if self.peek_token_ref().token == Token::RParen {
13455 self.next_token();
13456 Ok(vec![])
13457 } else {
13458 let cols = self.parse_comma_separated_with_trailing_commas(
13459 Parser::parse_view_column,
13460 self.dialect.supports_column_definition_trailing_commas(),
13461 Self::is_reserved_for_column_alias,
13462 )?;
13463 self.expect_token(&Token::RParen)?;
13464 Ok(cols)
13465 }
13466 } else {
13467 Ok(vec![])
13468 }
13469 }
13470
13471 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
13473 let name = self.parse_identifier()?;
13474 let options = self.parse_view_column_options()?;
13475 let data_type = if dialect_of!(self is ClickHouseDialect) {
13476 Some(self.parse_data_type()?)
13477 } else {
13478 None
13479 };
13480 Ok(ViewColumnDef {
13481 name,
13482 data_type,
13483 options,
13484 })
13485 }
13486
13487 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
13488 let mut options = Vec::new();
13489 loop {
13490 let option = self.parse_optional_column_option()?;
13491 if let Some(option) = option {
13492 options.push(option);
13493 } else {
13494 break;
13495 }
13496 }
13497 if options.is_empty() {
13498 Ok(None)
13499 } else if self.dialect.supports_space_separated_column_options() {
13500 Ok(Some(ColumnOptions::SpaceSeparated(options)))
13501 } else {
13502 Ok(Some(ColumnOptions::CommaSeparated(options)))
13503 }
13504 }
13505
13506 pub fn parse_parenthesized_column_list(
13509 &mut self,
13510 optional: IsOptional,
13511 allow_empty: bool,
13512 ) -> Result<Vec<Ident>, ParserError> {
13513 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
13514 }
13515
13516 pub fn parse_parenthesized_compound_identifier_list(
13518 &mut self,
13519 optional: IsOptional,
13520 allow_empty: bool,
13521 ) -> Result<Vec<Expr>, ParserError> {
13522 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13523 Ok(Expr::CompoundIdentifier(
13524 p.parse_period_separated(|p| p.parse_identifier())?,
13525 ))
13526 })
13527 }
13528
13529 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
13532 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
13533 p.parse_create_index_expr()
13534 })
13535 }
13536
13537 pub fn parse_parenthesized_qualified_column_list(
13540 &mut self,
13541 optional: IsOptional,
13542 allow_empty: bool,
13543 ) -> Result<Vec<ObjectName>, ParserError> {
13544 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13545 p.parse_object_name(true)
13546 })
13547 }
13548
13549 fn parse_parenthesized_column_list_inner<F, T>(
13552 &mut self,
13553 optional: IsOptional,
13554 allow_empty: bool,
13555 mut f: F,
13556 ) -> Result<Vec<T>, ParserError>
13557 where
13558 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13559 {
13560 if self.consume_token(&Token::LParen) {
13561 if allow_empty && self.peek_token_ref().token == Token::RParen {
13562 self.next_token();
13563 Ok(vec![])
13564 } else {
13565 let cols = self.parse_comma_separated(|p| f(p))?;
13566 self.expect_token(&Token::RParen)?;
13567 Ok(cols)
13568 }
13569 } else if optional == Optional {
13570 Ok(vec![])
13571 } else {
13572 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
13573 }
13574 }
13575
13576 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
13578 if self.consume_token(&Token::LParen) {
13579 let cols = self.parse_comma_separated(|p| {
13580 let name = p.parse_identifier()?;
13581 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
13582 Ok(TableAliasColumnDef { name, data_type })
13583 })?;
13584 self.expect_token(&Token::RParen)?;
13585 Ok(cols)
13586 } else {
13587 Ok(vec![])
13588 }
13589 }
13590
13591 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
13593 self.expect_token(&Token::LParen)?;
13594 let n = self.parse_literal_uint()?;
13595 self.expect_token(&Token::RParen)?;
13596 Ok(n)
13597 }
13598
13599 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
13601 if self.consume_token(&Token::LParen) {
13602 let n = self.parse_literal_uint()?;
13603 self.expect_token(&Token::RParen)?;
13604 Ok(Some(n))
13605 } else {
13606 Ok(None)
13607 }
13608 }
13609
13610 fn maybe_parse_optional_interval_fields(
13611 &mut self,
13612 ) -> Result<Option<IntervalFields>, ParserError> {
13613 match self.parse_one_of_keywords(&[
13614 Keyword::YEAR,
13616 Keyword::DAY,
13617 Keyword::HOUR,
13618 Keyword::MINUTE,
13619 Keyword::MONTH,
13621 Keyword::SECOND,
13622 ]) {
13623 Some(Keyword::YEAR) => {
13624 if self.peek_keyword(Keyword::TO) {
13625 self.expect_keyword(Keyword::TO)?;
13626 self.expect_keyword(Keyword::MONTH)?;
13627 Ok(Some(IntervalFields::YearToMonth))
13628 } else {
13629 Ok(Some(IntervalFields::Year))
13630 }
13631 }
13632 Some(Keyword::DAY) => {
13633 if self.peek_keyword(Keyword::TO) {
13634 self.expect_keyword(Keyword::TO)?;
13635 match self.expect_one_of_keywords(&[
13636 Keyword::HOUR,
13637 Keyword::MINUTE,
13638 Keyword::SECOND,
13639 ])? {
13640 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
13641 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
13642 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
13643 _ => {
13644 self.prev_token();
13645 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
13646 }
13647 }
13648 } else {
13649 Ok(Some(IntervalFields::Day))
13650 }
13651 }
13652 Some(Keyword::HOUR) => {
13653 if self.peek_keyword(Keyword::TO) {
13654 self.expect_keyword(Keyword::TO)?;
13655 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
13656 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
13657 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
13658 _ => {
13659 self.prev_token();
13660 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
13661 }
13662 }
13663 } else {
13664 Ok(Some(IntervalFields::Hour))
13665 }
13666 }
13667 Some(Keyword::MINUTE) => {
13668 if self.peek_keyword(Keyword::TO) {
13669 self.expect_keyword(Keyword::TO)?;
13670 self.expect_keyword(Keyword::SECOND)?;
13671 Ok(Some(IntervalFields::MinuteToSecond))
13672 } else {
13673 Ok(Some(IntervalFields::Minute))
13674 }
13675 }
13676 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
13677 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
13678 Some(_) => {
13679 self.prev_token();
13680 self.expected_ref(
13681 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
13682 self.peek_token_ref(),
13683 )
13684 }
13685 None => Ok(None),
13686 }
13687 }
13688
13689 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
13697 self.expect_keyword_is(Keyword::DATETIME64)?;
13698 self.expect_token(&Token::LParen)?;
13699 let precision = self.parse_literal_uint()?;
13700 let time_zone = if self.consume_token(&Token::Comma) {
13701 Some(self.parse_literal_string()?)
13702 } else {
13703 None
13704 };
13705 self.expect_token(&Token::RParen)?;
13706 Ok((precision, time_zone))
13707 }
13708
13709 pub fn parse_optional_character_length(
13711 &mut self,
13712 ) -> Result<Option<CharacterLength>, ParserError> {
13713 if self.consume_token(&Token::LParen) {
13714 let character_length = self.parse_character_length()?;
13715 self.expect_token(&Token::RParen)?;
13716 Ok(Some(character_length))
13717 } else {
13718 Ok(None)
13719 }
13720 }
13721
13722 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
13724 if self.consume_token(&Token::LParen) {
13725 let binary_length = self.parse_binary_length()?;
13726 self.expect_token(&Token::RParen)?;
13727 Ok(Some(binary_length))
13728 } else {
13729 Ok(None)
13730 }
13731 }
13732
13733 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
13735 if self.parse_keyword(Keyword::MAX) {
13736 return Ok(CharacterLength::Max);
13737 }
13738 let length = self.parse_literal_uint()?;
13739 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
13740 Some(CharLengthUnits::Characters)
13741 } else if self.parse_keyword(Keyword::OCTETS) {
13742 Some(CharLengthUnits::Octets)
13743 } else {
13744 None
13745 };
13746 Ok(CharacterLength::IntegerLength { length, unit })
13747 }
13748
13749 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
13751 if self.parse_keyword(Keyword::MAX) {
13752 return Ok(BinaryLength::Max);
13753 }
13754 let length = self.parse_literal_uint()?;
13755 Ok(BinaryLength::IntegerLength { length })
13756 }
13757
13758 pub fn parse_optional_precision_scale(
13760 &mut self,
13761 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
13762 if self.consume_token(&Token::LParen) {
13763 let n = self.parse_literal_uint()?;
13764 let scale = if self.consume_token(&Token::Comma) {
13765 Some(self.parse_literal_uint()?)
13766 } else {
13767 None
13768 };
13769 self.expect_token(&Token::RParen)?;
13770 Ok((Some(n), scale))
13771 } else {
13772 Ok((None, None))
13773 }
13774 }
13775
13776 pub fn parse_exact_number_optional_precision_scale(
13778 &mut self,
13779 ) -> Result<ExactNumberInfo, ParserError> {
13780 if self.consume_token(&Token::LParen) {
13781 let precision = self.parse_literal_uint()?;
13782 let scale = if self.consume_token(&Token::Comma) {
13783 Some(self.parse_signed_integer()?)
13784 } else {
13785 None
13786 };
13787
13788 self.expect_token(&Token::RParen)?;
13789
13790 match scale {
13791 None => Ok(ExactNumberInfo::Precision(precision)),
13792 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
13793 }
13794 } else {
13795 Ok(ExactNumberInfo::None)
13796 }
13797 }
13798
13799 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
13801 let is_negative = self.consume_token(&Token::Minus);
13802
13803 if !is_negative {
13804 let _ = self.consume_token(&Token::Plus);
13805 }
13806
13807 let current_token = self.peek_token_ref();
13808 match ¤t_token.token {
13809 Token::Number(s, _) => {
13810 let s = s.clone();
13811 let span_start = current_token.span.start;
13812 self.advance_token();
13813 let value = Self::parse::<i64>(s, span_start)?;
13814 Ok(if is_negative { -value } else { value })
13815 }
13816 _ => self.expected_ref("number", current_token),
13817 }
13818 }
13819
13820 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
13822 if self.consume_token(&Token::LParen) {
13823 let mut modifiers = Vec::new();
13824 loop {
13825 let next_token = self.next_token();
13826 match next_token.token {
13827 Token::Word(w) => modifiers.push(w.to_string()),
13828 Token::Number(n, _) => modifiers.push(n),
13829 Token::SingleQuotedString(s) => modifiers.push(s),
13830
13831 Token::Comma => {
13832 continue;
13833 }
13834 Token::RParen => {
13835 break;
13836 }
13837 _ => self.expected("type modifiers", next_token)?,
13838 }
13839 }
13840
13841 Ok(Some(modifiers))
13842 } else {
13843 Ok(None)
13844 }
13845 }
13846
13847 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
13849 where
13850 F: FnOnce(Box<DataType>) -> DataType,
13851 {
13852 self.expect_token(&Token::LParen)?;
13853 let inside_type = self.parse_data_type()?;
13854 self.expect_token(&Token::RParen)?;
13855 Ok(parent_type(inside_type.into()))
13856 }
13857
13858 fn parse_delete_setexpr_boxed(
13862 &mut self,
13863 delete_token: TokenWithSpan,
13864 ) -> Result<Box<SetExpr>, ParserError> {
13865 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
13866 }
13867
13868 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
13870 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
13871 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
13872 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
13875 (vec![], false)
13876 } else {
13877 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
13878 self.expect_keyword_is(Keyword::FROM)?;
13879 (tables, true)
13880 }
13881 } else {
13882 (vec![], true)
13883 };
13884
13885 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
13886
13887 let output = self.maybe_parse_output_clause()?;
13888
13889 let using = if self.parse_keyword(Keyword::USING) {
13890 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
13891 } else {
13892 None
13893 };
13894 let selection = if self.parse_keyword(Keyword::WHERE) {
13895 Some(self.parse_expr()?)
13896 } else {
13897 None
13898 };
13899 let returning = if self.parse_keyword(Keyword::RETURNING) {
13900 Some(self.parse_comma_separated(Parser::parse_select_item)?)
13901 } else {
13902 None
13903 };
13904 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13905 self.parse_comma_separated(Parser::parse_order_by_expr)?
13906 } else {
13907 vec![]
13908 };
13909 let limit = if self.parse_keyword(Keyword::LIMIT) {
13910 self.parse_limit()?
13911 } else {
13912 None
13913 };
13914
13915 Ok(Statement::Delete(Delete {
13916 delete_token: delete_token.into(),
13917 optimizer_hints,
13918 tables,
13919 from: if with_from_keyword {
13920 FromTable::WithFromKeyword(from)
13921 } else {
13922 FromTable::WithoutKeyword(from)
13923 },
13924 using,
13925 selection,
13926 returning,
13927 output,
13928 order_by,
13929 limit,
13930 }))
13931 }
13932
13933 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
13936 let modifier_keyword =
13937 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
13938
13939 let id = self.parse_literal_uint()?;
13940
13941 let modifier = match modifier_keyword {
13942 Some(Keyword::CONNECTION) => Some(KillType::Connection),
13943 Some(Keyword::QUERY) => Some(KillType::Query),
13944 Some(Keyword::MUTATION) => {
13945 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
13946 Some(KillType::Mutation)
13947 } else {
13948 self.expected_ref(
13949 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
13950 self.peek_token_ref(),
13951 )?
13952 }
13953 }
13954 _ => None,
13955 };
13956
13957 Ok(Statement::Kill { modifier, id })
13958 }
13959
13960 pub fn parse_explain(
13962 &mut self,
13963 describe_alias: DescribeAlias,
13964 ) -> Result<Statement, ParserError> {
13965 let mut analyze = false;
13966 let mut verbose = false;
13967 let mut query_plan = false;
13968 let mut estimate = false;
13969 let mut format = None;
13970 let mut options = None;
13971
13972 if describe_alias == DescribeAlias::Explain
13975 && self.dialect.supports_explain_with_utility_options()
13976 && self.peek_token_ref().token == Token::LParen
13977 {
13978 options = Some(self.parse_utility_options()?)
13979 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
13980 query_plan = true;
13981 } else if self.parse_keyword(Keyword::ESTIMATE) {
13982 estimate = true;
13983 } else {
13984 analyze = self.parse_keyword(Keyword::ANALYZE);
13985 verbose = self.parse_keyword(Keyword::VERBOSE);
13986 if self.parse_keyword(Keyword::FORMAT) {
13987 format = Some(self.parse_analyze_format_kind()?);
13988 }
13989 }
13990
13991 match self.maybe_parse(|parser| parser.parse_statement())? {
13992 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
13993 ParserError::ParserError("Explain must be root of the plan".to_string()),
13994 ),
13995 Some(statement) => Ok(Statement::Explain {
13996 describe_alias,
13997 analyze,
13998 verbose,
13999 query_plan,
14000 estimate,
14001 statement: Box::new(statement),
14002 format,
14003 options,
14004 }),
14005 _ => {
14006 let hive_format =
14007 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14008 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14009 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14010 _ => None,
14011 };
14012
14013 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14014 self.parse_keyword(Keyword::TABLE)
14016 } else {
14017 false
14018 };
14019
14020 let table_name = self.parse_object_name(false)?;
14021 Ok(Statement::ExplainTable {
14022 describe_alias,
14023 hive_format,
14024 has_table_keyword,
14025 table_name,
14026 })
14027 }
14028 }
14029 }
14030
14031 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14036 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14037 let _guard = self.recursion_counter.try_decrease()?;
14038 let with = if self.parse_keyword(Keyword::WITH) {
14039 let with_token = self.get_current_token();
14040 Some(With {
14041 with_token: with_token.clone().into(),
14042 recursive: self.parse_keyword(Keyword::RECURSIVE),
14043 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14044 })
14045 } else {
14046 None
14047 };
14048 if self.parse_keyword(Keyword::INSERT) {
14049 Ok(Query {
14050 with,
14051 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14052 order_by: None,
14053 limit_clause: None,
14054 fetch: None,
14055 locks: vec![],
14056 for_clause: None,
14057 settings: None,
14058 format_clause: None,
14059 pipe_operators: vec![],
14060 }
14061 .into())
14062 } else if self.parse_keyword(Keyword::UPDATE) {
14063 Ok(Query {
14064 with,
14065 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14066 order_by: None,
14067 limit_clause: None,
14068 fetch: None,
14069 locks: vec![],
14070 for_clause: None,
14071 settings: None,
14072 format_clause: None,
14073 pipe_operators: vec![],
14074 }
14075 .into())
14076 } else if self.parse_keyword(Keyword::DELETE) {
14077 Ok(Query {
14078 with,
14079 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14080 limit_clause: None,
14081 order_by: None,
14082 fetch: None,
14083 locks: vec![],
14084 for_clause: None,
14085 settings: None,
14086 format_clause: None,
14087 pipe_operators: vec![],
14088 }
14089 .into())
14090 } else if self.parse_keyword(Keyword::MERGE) {
14091 Ok(Query {
14092 with,
14093 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14094 limit_clause: None,
14095 order_by: None,
14096 fetch: None,
14097 locks: vec![],
14098 for_clause: None,
14099 settings: None,
14100 format_clause: None,
14101 pipe_operators: vec![],
14102 }
14103 .into())
14104 } else {
14105 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14106
14107 let order_by = self.parse_optional_order_by()?;
14108
14109 let limit_clause = self.parse_optional_limit_clause()?;
14110
14111 let settings = self.parse_settings()?;
14112
14113 let fetch = if self.parse_keyword(Keyword::FETCH) {
14114 Some(self.parse_fetch()?)
14115 } else {
14116 None
14117 };
14118
14119 let mut for_clause = None;
14120 let mut locks = Vec::new();
14121 while self.parse_keyword(Keyword::FOR) {
14122 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14123 for_clause = Some(parsed_for_clause);
14124 break;
14125 } else {
14126 locks.push(self.parse_lock()?);
14127 }
14128 }
14129 let format_clause =
14130 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14131 if self.parse_keyword(Keyword::NULL) {
14132 Some(FormatClause::Null)
14133 } else {
14134 let ident = self.parse_identifier()?;
14135 Some(FormatClause::Identifier(ident))
14136 }
14137 } else {
14138 None
14139 };
14140
14141 let pipe_operators = if self.dialect.supports_pipe_operator() {
14142 self.parse_pipe_operators()?
14143 } else {
14144 Vec::new()
14145 };
14146
14147 Ok(Query {
14148 with,
14149 body,
14150 order_by,
14151 limit_clause,
14152 fetch,
14153 locks,
14154 for_clause,
14155 settings,
14156 format_clause,
14157 pipe_operators,
14158 }
14159 .into())
14160 }
14161 }
14162
14163 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14164 let mut pipe_operators = Vec::new();
14165
14166 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14167 let kw = self.expect_one_of_keywords(&[
14168 Keyword::SELECT,
14169 Keyword::EXTEND,
14170 Keyword::SET,
14171 Keyword::DROP,
14172 Keyword::AS,
14173 Keyword::WHERE,
14174 Keyword::LIMIT,
14175 Keyword::AGGREGATE,
14176 Keyword::ORDER,
14177 Keyword::TABLESAMPLE,
14178 Keyword::RENAME,
14179 Keyword::UNION,
14180 Keyword::INTERSECT,
14181 Keyword::EXCEPT,
14182 Keyword::CALL,
14183 Keyword::PIVOT,
14184 Keyword::UNPIVOT,
14185 Keyword::JOIN,
14186 Keyword::INNER,
14187 Keyword::LEFT,
14188 Keyword::RIGHT,
14189 Keyword::FULL,
14190 Keyword::CROSS,
14191 ])?;
14192 match kw {
14193 Keyword::SELECT => {
14194 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14195 pipe_operators.push(PipeOperator::Select { exprs })
14196 }
14197 Keyword::EXTEND => {
14198 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14199 pipe_operators.push(PipeOperator::Extend { exprs })
14200 }
14201 Keyword::SET => {
14202 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14203 pipe_operators.push(PipeOperator::Set { assignments })
14204 }
14205 Keyword::DROP => {
14206 let columns = self.parse_identifiers()?;
14207 pipe_operators.push(PipeOperator::Drop { columns })
14208 }
14209 Keyword::AS => {
14210 let alias = self.parse_identifier()?;
14211 pipe_operators.push(PipeOperator::As { alias })
14212 }
14213 Keyword::WHERE => {
14214 let expr = self.parse_expr()?;
14215 pipe_operators.push(PipeOperator::Where { expr })
14216 }
14217 Keyword::LIMIT => {
14218 let expr = self.parse_expr()?;
14219 let offset = if self.parse_keyword(Keyword::OFFSET) {
14220 Some(self.parse_expr()?)
14221 } else {
14222 None
14223 };
14224 pipe_operators.push(PipeOperator::Limit { expr, offset })
14225 }
14226 Keyword::AGGREGATE => {
14227 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14228 vec![]
14229 } else {
14230 self.parse_comma_separated(|parser| {
14231 parser.parse_expr_with_alias_and_order_by()
14232 })?
14233 };
14234
14235 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14236 self.parse_comma_separated(|parser| {
14237 parser.parse_expr_with_alias_and_order_by()
14238 })?
14239 } else {
14240 vec![]
14241 };
14242
14243 pipe_operators.push(PipeOperator::Aggregate {
14244 full_table_exprs,
14245 group_by_expr,
14246 })
14247 }
14248 Keyword::ORDER => {
14249 self.expect_one_of_keywords(&[Keyword::BY])?;
14250 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14251 pipe_operators.push(PipeOperator::OrderBy { exprs })
14252 }
14253 Keyword::TABLESAMPLE => {
14254 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14255 pipe_operators.push(PipeOperator::TableSample { sample });
14256 }
14257 Keyword::RENAME => {
14258 let mappings =
14259 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14260 pipe_operators.push(PipeOperator::Rename { mappings });
14261 }
14262 Keyword::UNION => {
14263 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14264 let queries = self.parse_pipe_operator_queries()?;
14265 pipe_operators.push(PipeOperator::Union {
14266 set_quantifier,
14267 queries,
14268 });
14269 }
14270 Keyword::INTERSECT => {
14271 let set_quantifier =
14272 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14273 let queries = self.parse_pipe_operator_queries()?;
14274 pipe_operators.push(PipeOperator::Intersect {
14275 set_quantifier,
14276 queries,
14277 });
14278 }
14279 Keyword::EXCEPT => {
14280 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14281 let queries = self.parse_pipe_operator_queries()?;
14282 pipe_operators.push(PipeOperator::Except {
14283 set_quantifier,
14284 queries,
14285 });
14286 }
14287 Keyword::CALL => {
14288 let function_name = self.parse_object_name(false)?;
14289 let function_expr = self.parse_function(function_name)?;
14290 if let Expr::Function(function) = function_expr {
14291 let alias = self.parse_identifier_optional_alias()?;
14292 pipe_operators.push(PipeOperator::Call { function, alias });
14293 } else {
14294 return Err(ParserError::ParserError(
14295 "Expected function call after CALL".to_string(),
14296 ));
14297 }
14298 }
14299 Keyword::PIVOT => {
14300 self.expect_token(&Token::LParen)?;
14301 let aggregate_functions =
14302 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14303 self.expect_keyword_is(Keyword::FOR)?;
14304 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14305 self.expect_keyword_is(Keyword::IN)?;
14306
14307 self.expect_token(&Token::LParen)?;
14308 let value_source = if self.parse_keyword(Keyword::ANY) {
14309 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14310 self.parse_comma_separated(Parser::parse_order_by_expr)?
14311 } else {
14312 vec![]
14313 };
14314 PivotValueSource::Any(order_by)
14315 } else if self.peek_sub_query() {
14316 PivotValueSource::Subquery(self.parse_query()?)
14317 } else {
14318 PivotValueSource::List(
14319 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14320 )
14321 };
14322 self.expect_token(&Token::RParen)?;
14323 self.expect_token(&Token::RParen)?;
14324
14325 let alias = self.parse_identifier_optional_alias()?;
14326
14327 pipe_operators.push(PipeOperator::Pivot {
14328 aggregate_functions,
14329 value_column,
14330 value_source,
14331 alias,
14332 });
14333 }
14334 Keyword::UNPIVOT => {
14335 self.expect_token(&Token::LParen)?;
14336 let value_column = self.parse_identifier()?;
14337 self.expect_keyword(Keyword::FOR)?;
14338 let name_column = self.parse_identifier()?;
14339 self.expect_keyword(Keyword::IN)?;
14340
14341 self.expect_token(&Token::LParen)?;
14342 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14343 self.expect_token(&Token::RParen)?;
14344
14345 self.expect_token(&Token::RParen)?;
14346
14347 let alias = self.parse_identifier_optional_alias()?;
14348
14349 pipe_operators.push(PipeOperator::Unpivot {
14350 value_column,
14351 name_column,
14352 unpivot_columns,
14353 alias,
14354 });
14355 }
14356 Keyword::JOIN
14357 | Keyword::INNER
14358 | Keyword::LEFT
14359 | Keyword::RIGHT
14360 | Keyword::FULL
14361 | Keyword::CROSS => {
14362 self.prev_token();
14363 let mut joins = self.parse_joins()?;
14364 if joins.len() != 1 {
14365 return Err(ParserError::ParserError(
14366 "Join pipe operator must have a single join".to_string(),
14367 ));
14368 }
14369 let join = joins.swap_remove(0);
14370 pipe_operators.push(PipeOperator::Join(join))
14371 }
14372 unhandled => {
14373 return Err(ParserError::ParserError(format!(
14374 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
14375 )))
14376 }
14377 }
14378 }
14379 Ok(pipe_operators)
14380 }
14381
14382 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
14383 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
14384 {
14385 let key_values = self.parse_comma_separated(|p| {
14386 let key = p.parse_identifier()?;
14387 p.expect_token(&Token::Eq)?;
14388 let value = p.parse_expr()?;
14389 Ok(Setting { key, value })
14390 })?;
14391 Some(key_values)
14392 } else {
14393 None
14394 };
14395 Ok(settings)
14396 }
14397
14398 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
14400 if self.parse_keyword(Keyword::XML) {
14401 Ok(Some(self.parse_for_xml()?))
14402 } else if self.parse_keyword(Keyword::JSON) {
14403 Ok(Some(self.parse_for_json()?))
14404 } else if self.parse_keyword(Keyword::BROWSE) {
14405 Ok(Some(ForClause::Browse))
14406 } else {
14407 Ok(None)
14408 }
14409 }
14410
14411 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
14413 let for_xml = if self.parse_keyword(Keyword::RAW) {
14414 let mut element_name = None;
14415 if self.peek_token_ref().token == Token::LParen {
14416 self.expect_token(&Token::LParen)?;
14417 element_name = Some(self.parse_literal_string()?);
14418 self.expect_token(&Token::RParen)?;
14419 }
14420 ForXml::Raw(element_name)
14421 } else if self.parse_keyword(Keyword::AUTO) {
14422 ForXml::Auto
14423 } else if self.parse_keyword(Keyword::EXPLICIT) {
14424 ForXml::Explicit
14425 } else if self.parse_keyword(Keyword::PATH) {
14426 let mut element_name = None;
14427 if self.peek_token_ref().token == Token::LParen {
14428 self.expect_token(&Token::LParen)?;
14429 element_name = Some(self.parse_literal_string()?);
14430 self.expect_token(&Token::RParen)?;
14431 }
14432 ForXml::Path(element_name)
14433 } else {
14434 return Err(ParserError::ParserError(
14435 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
14436 ));
14437 };
14438 let mut elements = false;
14439 let mut binary_base64 = false;
14440 let mut root = None;
14441 let mut r#type = false;
14442 while self.peek_token_ref().token == Token::Comma {
14443 self.next_token();
14444 if self.parse_keyword(Keyword::ELEMENTS) {
14445 elements = true;
14446 } else if self.parse_keyword(Keyword::BINARY) {
14447 self.expect_keyword_is(Keyword::BASE64)?;
14448 binary_base64 = true;
14449 } else if self.parse_keyword(Keyword::ROOT) {
14450 self.expect_token(&Token::LParen)?;
14451 root = Some(self.parse_literal_string()?);
14452 self.expect_token(&Token::RParen)?;
14453 } else if self.parse_keyword(Keyword::TYPE) {
14454 r#type = true;
14455 }
14456 }
14457 Ok(ForClause::Xml {
14458 for_xml,
14459 elements,
14460 binary_base64,
14461 root,
14462 r#type,
14463 })
14464 }
14465
14466 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
14468 let for_json = if self.parse_keyword(Keyword::AUTO) {
14469 ForJson::Auto
14470 } else if self.parse_keyword(Keyword::PATH) {
14471 ForJson::Path
14472 } else {
14473 return Err(ParserError::ParserError(
14474 "Expected FOR JSON [AUTO | PATH ]".to_string(),
14475 ));
14476 };
14477 let mut root = None;
14478 let mut include_null_values = false;
14479 let mut without_array_wrapper = false;
14480 while self.peek_token_ref().token == Token::Comma {
14481 self.next_token();
14482 if self.parse_keyword(Keyword::ROOT) {
14483 self.expect_token(&Token::LParen)?;
14484 root = Some(self.parse_literal_string()?);
14485 self.expect_token(&Token::RParen)?;
14486 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
14487 include_null_values = true;
14488 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
14489 without_array_wrapper = true;
14490 }
14491 }
14492 Ok(ForClause::Json {
14493 for_json,
14494 root,
14495 include_null_values,
14496 without_array_wrapper,
14497 })
14498 }
14499
14500 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
14502 let name = self.parse_identifier()?;
14503
14504 let as_optional = self.dialect.supports_cte_without_as();
14505
14506 if as_optional && !self.peek_keyword(Keyword::AS) {
14508 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
14509 p.expect_token(&Token::LParen)?;
14510 let query = p.parse_query()?;
14511 let closing_paren_token = p.expect_token(&Token::RParen)?;
14512 Ok((query, closing_paren_token))
14513 })? {
14514 let mut cte = Cte {
14515 alias: TableAlias {
14516 explicit: false,
14517 name,
14518 columns: vec![],
14519 },
14520 query,
14521 from: None,
14522 materialized: None,
14523 closing_paren_token: closing_paren_token.into(),
14524 };
14525 if self.parse_keyword(Keyword::FROM) {
14526 cte.from = Some(self.parse_identifier()?);
14527 }
14528 return Ok(cte);
14529 }
14530 }
14531
14532 let columns = if self.parse_keyword(Keyword::AS) {
14534 vec![]
14535 } else {
14536 let columns = self.parse_table_alias_column_defs()?;
14537 if as_optional {
14538 let _ = self.parse_keyword(Keyword::AS);
14539 } else {
14540 self.expect_keyword_is(Keyword::AS)?;
14541 }
14542 columns
14543 };
14544
14545 let mut is_materialized = None;
14546 if dialect_of!(self is PostgreSqlDialect) {
14547 if self.parse_keyword(Keyword::MATERIALIZED) {
14548 is_materialized = Some(CteAsMaterialized::Materialized);
14549 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
14550 is_materialized = Some(CteAsMaterialized::NotMaterialized);
14551 }
14552 }
14553
14554 self.expect_token(&Token::LParen)?;
14555 let query = self.parse_query()?;
14556 let closing_paren_token = self.expect_token(&Token::RParen)?;
14557
14558 let mut cte = Cte {
14559 alias: TableAlias {
14560 explicit: false,
14561 name,
14562 columns,
14563 },
14564 query,
14565 from: None,
14566 materialized: is_materialized,
14567 closing_paren_token: closing_paren_token.into(),
14568 };
14569 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
14570 cte.from = Some(self.parse_identifier()?);
14571 }
14572 Ok(cte)
14573 }
14574
14575 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
14584 let expr = if self.peek_keyword(Keyword::SELECT)
14587 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
14588 {
14589 SetExpr::Select(self.parse_select().map(Box::new)?)
14590 } else if self.consume_token(&Token::LParen) {
14591 let subquery = self.parse_query()?;
14593 self.expect_token(&Token::RParen)?;
14594 SetExpr::Query(subquery)
14595 } else if self.parse_keyword(Keyword::VALUES) {
14596 let is_mysql = dialect_of!(self is MySqlDialect);
14597 SetExpr::Values(self.parse_values(is_mysql, false)?)
14598 } else if self.parse_keyword(Keyword::VALUE) {
14599 let is_mysql = dialect_of!(self is MySqlDialect);
14600 SetExpr::Values(self.parse_values(is_mysql, true)?)
14601 } else if self.parse_keyword(Keyword::TABLE) {
14602 SetExpr::Table(Box::new(self.parse_as_table()?))
14603 } else {
14604 return self.expected_ref(
14605 "SELECT, VALUES, or a subquery in the query body",
14606 self.peek_token_ref(),
14607 );
14608 };
14609
14610 self.parse_remaining_set_exprs(expr, precedence)
14611 }
14612
14613 fn parse_remaining_set_exprs(
14617 &mut self,
14618 mut expr: SetExpr,
14619 precedence: u8,
14620 ) -> Result<Box<SetExpr>, ParserError> {
14621 loop {
14622 let op = self.parse_set_operator(&self.peek_token().token);
14624 let next_precedence = match op {
14625 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
14627 10
14628 }
14629 Some(SetOperator::Intersect) => 20,
14631 None => break,
14633 };
14634 if precedence >= next_precedence {
14635 break;
14636 }
14637 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
14639 expr = SetExpr::SetOperation {
14640 left: Box::new(expr),
14641 op: op.unwrap(),
14642 set_quantifier,
14643 right: self.parse_query_body(next_precedence)?,
14644 };
14645 }
14646
14647 Ok(expr.into())
14648 }
14649
14650 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
14652 match token {
14653 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
14654 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
14655 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
14656 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
14657 _ => None,
14658 }
14659 }
14660
14661 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
14663 match op {
14664 Some(
14665 SetOperator::Except
14666 | SetOperator::Intersect
14667 | SetOperator::Union
14668 | SetOperator::Minus,
14669 ) => {
14670 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
14671 SetQuantifier::DistinctByName
14672 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
14673 SetQuantifier::ByName
14674 } else if self.parse_keyword(Keyword::ALL) {
14675 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
14676 SetQuantifier::AllByName
14677 } else {
14678 SetQuantifier::All
14679 }
14680 } else if self.parse_keyword(Keyword::DISTINCT) {
14681 SetQuantifier::Distinct
14682 } else {
14683 SetQuantifier::None
14684 }
14685 }
14686 _ => SetQuantifier::None,
14687 }
14688 }
14689
14690 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
14692 let mut from_first = None;
14693
14694 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
14695 let from_token = self.expect_keyword(Keyword::FROM)?;
14696 let from = self.parse_table_with_joins()?;
14697 if !self.peek_keyword(Keyword::SELECT) {
14698 return Ok(Select {
14699 select_token: AttachedToken(from_token),
14700 optimizer_hints: vec![],
14701 distinct: None,
14702 select_modifiers: None,
14703 top: None,
14704 top_before_distinct: false,
14705 projection: vec![],
14706 exclude: None,
14707 into: None,
14708 from,
14709 lateral_views: vec![],
14710 prewhere: None,
14711 selection: None,
14712 group_by: GroupByExpr::Expressions(vec![], vec![]),
14713 cluster_by: vec![],
14714 distribute_by: vec![],
14715 sort_by: vec![],
14716 having: None,
14717 named_window: vec![],
14718 window_before_qualify: false,
14719 qualify: None,
14720 value_table_mode: None,
14721 connect_by: vec![],
14722 flavor: SelectFlavor::FromFirstNoSelect,
14723 });
14724 }
14725 from_first = Some(from);
14726 }
14727
14728 let select_token = self.expect_keyword(Keyword::SELECT)?;
14729 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14730 let value_table_mode = self.parse_value_table_mode()?;
14731
14732 let (select_modifiers, distinct_select_modifier) =
14733 if self.dialect.supports_select_modifiers() {
14734 self.parse_select_modifiers()?
14735 } else {
14736 (None, None)
14737 };
14738
14739 let mut top_before_distinct = false;
14740 let mut top = None;
14741 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
14742 top = Some(self.parse_top()?);
14743 top_before_distinct = true;
14744 }
14745
14746 let distinct = if distinct_select_modifier.is_some() {
14747 distinct_select_modifier
14748 } else {
14749 self.parse_all_or_distinct()?
14750 };
14751
14752 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
14753 top = Some(self.parse_top()?);
14754 }
14755
14756 let projection =
14757 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
14758 vec![]
14759 } else {
14760 self.parse_projection()?
14761 };
14762
14763 let exclude = if self.dialect.supports_select_exclude() {
14764 self.parse_optional_select_item_exclude()?
14765 } else {
14766 None
14767 };
14768
14769 let into = if self.parse_keyword(Keyword::INTO) {
14770 Some(self.parse_select_into()?)
14771 } else {
14772 None
14773 };
14774
14775 let (from, from_first) = if let Some(from) = from_first.take() {
14781 (from, true)
14782 } else if self.parse_keyword(Keyword::FROM) {
14783 (self.parse_table_with_joins()?, false)
14784 } else {
14785 (vec![], false)
14786 };
14787
14788 let mut lateral_views = vec![];
14789 loop {
14790 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
14791 let outer = self.parse_keyword(Keyword::OUTER);
14792 let lateral_view = self.parse_expr()?;
14793 let lateral_view_name = self.parse_object_name(false)?;
14794 let lateral_col_alias = self
14795 .parse_comma_separated(|parser| {
14796 parser.parse_optional_alias(&[
14797 Keyword::WHERE,
14798 Keyword::GROUP,
14799 Keyword::CLUSTER,
14800 Keyword::HAVING,
14801 Keyword::LATERAL,
14802 ]) })?
14804 .into_iter()
14805 .flatten()
14806 .collect();
14807
14808 lateral_views.push(LateralView {
14809 lateral_view,
14810 lateral_view_name,
14811 lateral_col_alias,
14812 outer,
14813 });
14814 } else {
14815 break;
14816 }
14817 }
14818
14819 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
14820 {
14821 Some(self.parse_expr()?)
14822 } else {
14823 None
14824 };
14825
14826 let selection = if self.parse_keyword(Keyword::WHERE) {
14827 Some(self.parse_expr()?)
14828 } else {
14829 None
14830 };
14831
14832 let connect_by = self.maybe_parse_connect_by()?;
14833
14834 let group_by = self
14835 .parse_optional_group_by()?
14836 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
14837
14838 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
14839 self.parse_comma_separated(Parser::parse_expr)?
14840 } else {
14841 vec![]
14842 };
14843
14844 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
14845 self.parse_comma_separated(Parser::parse_expr)?
14846 } else {
14847 vec![]
14848 };
14849
14850 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
14851 self.parse_comma_separated(Parser::parse_order_by_expr)?
14852 } else {
14853 vec![]
14854 };
14855
14856 let having = if self.parse_keyword(Keyword::HAVING) {
14857 Some(self.parse_expr()?)
14858 } else {
14859 None
14860 };
14861
14862 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
14864 {
14865 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
14866 if self.parse_keyword(Keyword::QUALIFY) {
14867 (named_windows, Some(self.parse_expr()?), true)
14868 } else {
14869 (named_windows, None, true)
14870 }
14871 } else if self.parse_keyword(Keyword::QUALIFY) {
14872 let qualify = Some(self.parse_expr()?);
14873 if self.parse_keyword(Keyword::WINDOW) {
14874 (
14875 self.parse_comma_separated(Parser::parse_named_window)?,
14876 qualify,
14877 false,
14878 )
14879 } else {
14880 (Default::default(), qualify, false)
14881 }
14882 } else {
14883 Default::default()
14884 };
14885
14886 Ok(Select {
14887 select_token: AttachedToken(select_token),
14888 optimizer_hints,
14889 distinct,
14890 select_modifiers,
14891 top,
14892 top_before_distinct,
14893 projection,
14894 exclude,
14895 into,
14896 from,
14897 lateral_views,
14898 prewhere,
14899 selection,
14900 group_by,
14901 cluster_by,
14902 distribute_by,
14903 sort_by,
14904 having,
14905 named_window: named_windows,
14906 window_before_qualify,
14907 qualify,
14908 value_table_mode,
14909 connect_by,
14910 flavor: if from_first {
14911 SelectFlavor::FromFirst
14912 } else {
14913 SelectFlavor::Standard
14914 },
14915 })
14916 }
14917
14918 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
14927 let supports_hints = self.dialect.supports_comment_optimizer_hint();
14928 if !supports_hints {
14929 return Ok(vec![]);
14930 }
14931 let mut hints = vec![];
14932 loop {
14933 let t = self.peek_nth_token_no_skip_ref(0);
14934 let Token::Whitespace(ws) = &t.token else {
14935 break;
14936 };
14937 match ws {
14938 Whitespace::SingleLineComment { comment, prefix } => {
14939 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
14940 hints.push(OptimizerHint {
14941 prefix: hint_prefix,
14942 text,
14943 style: OptimizerHintStyle::SingleLine {
14944 prefix: prefix.clone(),
14945 },
14946 });
14947 }
14948 self.next_token_no_skip();
14949 }
14950 Whitespace::MultiLineComment(comment) => {
14951 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
14952 hints.push(OptimizerHint {
14953 prefix: hint_prefix,
14954 text,
14955 style: OptimizerHintStyle::MultiLine,
14956 });
14957 }
14958 self.next_token_no_skip();
14959 }
14960 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
14961 self.next_token_no_skip();
14962 }
14963 }
14964 }
14965 Ok(hints)
14966 }
14967
14968 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
14971 let (before_plus, text) = comment.split_once('+')?;
14972 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
14973 Some((before_plus.to_string(), text.to_string()))
14974 } else {
14975 None
14976 }
14977 }
14978
14979 fn parse_select_modifiers(
14986 &mut self,
14987 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
14988 let mut modifiers = SelectModifiers::default();
14989 let mut distinct = None;
14990
14991 let keywords = &[
14992 Keyword::ALL,
14993 Keyword::DISTINCT,
14994 Keyword::DISTINCTROW,
14995 Keyword::HIGH_PRIORITY,
14996 Keyword::STRAIGHT_JOIN,
14997 Keyword::SQL_SMALL_RESULT,
14998 Keyword::SQL_BIG_RESULT,
14999 Keyword::SQL_BUFFER_RESULT,
15000 Keyword::SQL_NO_CACHE,
15001 Keyword::SQL_CALC_FOUND_ROWS,
15002 ];
15003
15004 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15005 match keyword {
15006 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15007 self.prev_token();
15008 distinct = self.parse_all_or_distinct()?;
15009 }
15010 Keyword::DISTINCTROW if distinct.is_none() => {
15012 distinct = Some(Distinct::Distinct);
15013 }
15014 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15015 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15016 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15017 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15018 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15019 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15020 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15021 _ => {
15022 self.prev_token();
15023 return self.expected_ref(
15024 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15025 self.peek_token_ref(),
15026 );
15027 }
15028 }
15029 }
15030
15031 let select_modifiers = if modifiers.is_any_set() {
15034 Some(modifiers)
15035 } else {
15036 None
15037 };
15038 Ok((select_modifiers, distinct))
15039 }
15040
15041 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15042 if !dialect_of!(self is BigQueryDialect) {
15043 return Ok(None);
15044 }
15045
15046 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15047 Some(ValueTableMode::DistinctAsValue)
15048 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15049 Some(ValueTableMode::DistinctAsStruct)
15050 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15051 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15052 {
15053 Some(ValueTableMode::AsValue)
15054 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15055 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15056 {
15057 Some(ValueTableMode::AsStruct)
15058 } else if self.parse_keyword(Keyword::AS) {
15059 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15060 } else {
15061 None
15062 };
15063
15064 Ok(mode)
15065 }
15066
15067 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15071 where
15072 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15073 {
15074 let current_state = self.state;
15075 self.state = state;
15076 let res = f(self);
15077 self.state = current_state;
15078 res
15079 }
15080
15081 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15083 let mut clauses = Vec::with_capacity(2);
15084 loop {
15085 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15086 clauses.push(ConnectByKind::StartWith {
15087 start_token: self.token_at(idx).clone().into(),
15088 condition: self.parse_expr()?.into(),
15089 });
15090 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15091 {
15092 clauses.push(ConnectByKind::ConnectBy {
15093 connect_token: self.token_at(idx).clone().into(),
15094 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15095 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15096 parser.parse_comma_separated(Parser::parse_expr)
15097 })?,
15098 });
15099 } else {
15100 break;
15101 }
15102 }
15103 Ok(clauses)
15104 }
15105
15106 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15108 let token1 = self.next_token();
15109 let token2 = self.next_token();
15110 let token3 = self.next_token();
15111
15112 let table_name;
15113 let schema_name;
15114 if token2 == Token::Period {
15115 match token1.token {
15116 Token::Word(w) => {
15117 schema_name = w.value;
15118 }
15119 _ => {
15120 return self.expected("Schema name", token1);
15121 }
15122 }
15123 match token3.token {
15124 Token::Word(w) => {
15125 table_name = w.value;
15126 }
15127 _ => {
15128 return self.expected("Table name", token3);
15129 }
15130 }
15131 Ok(Table {
15132 table_name: Some(table_name),
15133 schema_name: Some(schema_name),
15134 })
15135 } else {
15136 match token1.token {
15137 Token::Word(w) => {
15138 table_name = w.value;
15139 }
15140 _ => {
15141 return self.expected("Table name", token1);
15142 }
15143 }
15144 Ok(Table {
15145 table_name: Some(table_name),
15146 schema_name: None,
15147 })
15148 }
15149 }
15150
15151 fn parse_set_role(
15153 &mut self,
15154 modifier: Option<ContextModifier>,
15155 ) -> Result<Statement, ParserError> {
15156 self.expect_keyword_is(Keyword::ROLE)?;
15157
15158 let role_name = if self.parse_keyword(Keyword::NONE) {
15159 None
15160 } else {
15161 Some(self.parse_identifier()?)
15162 };
15163 Ok(Statement::Set(Set::SetRole {
15164 context_modifier: modifier,
15165 role_name,
15166 }))
15167 }
15168
15169 fn parse_set_values(
15170 &mut self,
15171 parenthesized_assignment: bool,
15172 ) -> Result<Vec<Expr>, ParserError> {
15173 let mut values = vec![];
15174
15175 if parenthesized_assignment {
15176 self.expect_token(&Token::LParen)?;
15177 }
15178
15179 loop {
15180 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15181 expr
15182 } else if let Ok(expr) = self.parse_expr() {
15183 expr
15184 } else {
15185 self.expected_ref("variable value", self.peek_token_ref())?
15186 };
15187
15188 values.push(value);
15189 if self.consume_token(&Token::Comma) {
15190 continue;
15191 }
15192
15193 if parenthesized_assignment {
15194 self.expect_token(&Token::RParen)?;
15195 }
15196 return Ok(values);
15197 }
15198 }
15199
15200 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15201 let modifier =
15202 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15203
15204 Self::keyword_to_modifier(modifier)
15205 }
15206
15207 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15209 let scope = self.parse_context_modifier();
15210
15211 let name = if self.dialect.supports_parenthesized_set_variables()
15212 && self.consume_token(&Token::LParen)
15213 {
15214 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15218 } else {
15219 self.parse_object_name(false)?
15220 };
15221
15222 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15223 return self.expected_ref("assignment operator", self.peek_token_ref());
15224 }
15225
15226 let value = self.parse_expr()?;
15227
15228 Ok(SetAssignment { scope, name, value })
15229 }
15230
15231 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15232 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15233
15234 let scope = if !hivevar {
15236 self.parse_context_modifier()
15237 } else {
15238 None
15239 };
15240
15241 if hivevar {
15242 self.expect_token(&Token::Colon)?;
15243 }
15244
15245 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15246 return Ok(set_role_stmt);
15247 }
15248
15249 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15251 || self.parse_keyword(Keyword::TIMEZONE)
15252 {
15253 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15254 return Ok(Set::SingleAssignment {
15255 scope,
15256 hivevar,
15257 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15258 values: self.parse_set_values(false)?,
15259 }
15260 .into());
15261 } else {
15262 return Ok(Set::SetTimeZone {
15266 local: scope == Some(ContextModifier::Local),
15267 value: self.parse_expr()?,
15268 }
15269 .into());
15270 }
15271 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15272 if self.parse_keyword(Keyword::DEFAULT) {
15273 return Ok(Set::SetNamesDefault {}.into());
15274 }
15275 let charset_name = self.parse_identifier()?;
15276 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15277 Some(self.parse_literal_string()?)
15278 } else {
15279 None
15280 };
15281
15282 return Ok(Set::SetNames {
15283 charset_name,
15284 collation_name,
15285 }
15286 .into());
15287 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15288 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15289 return Ok(Set::SetTransaction {
15290 modes: self.parse_transaction_modes()?,
15291 snapshot: None,
15292 session: true,
15293 }
15294 .into());
15295 } else if self.parse_keyword(Keyword::TRANSACTION) {
15296 if self.parse_keyword(Keyword::SNAPSHOT) {
15297 let snapshot_id = self.parse_value()?;
15298 return Ok(Set::SetTransaction {
15299 modes: vec![],
15300 snapshot: Some(snapshot_id),
15301 session: false,
15302 }
15303 .into());
15304 }
15305 return Ok(Set::SetTransaction {
15306 modes: self.parse_transaction_modes()?,
15307 snapshot: None,
15308 session: false,
15309 }
15310 .into());
15311 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15312 let scope = match scope {
15313 Some(s) => s,
15314 None => {
15315 return self.expected_at(
15316 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15317 self.get_current_index(),
15318 )
15319 }
15320 };
15321 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15322 SetSessionAuthorizationParamKind::Default
15323 } else {
15324 let value = self.parse_identifier()?;
15325 SetSessionAuthorizationParamKind::User(value)
15326 };
15327 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15328 scope,
15329 kind: auth_value,
15330 })
15331 .into());
15332 }
15333
15334 if self.dialect.supports_comma_separated_set_assignments() {
15335 if scope.is_some() {
15336 self.prev_token();
15337 }
15338
15339 if let Some(assignments) = self
15340 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15341 {
15342 return if assignments.len() > 1 {
15343 Ok(Set::MultipleAssignments { assignments }.into())
15344 } else {
15345 let SetAssignment { scope, name, value } =
15346 assignments.into_iter().next().ok_or_else(|| {
15347 ParserError::ParserError("Expected at least one assignment".to_string())
15348 })?;
15349
15350 Ok(Set::SingleAssignment {
15351 scope,
15352 hivevar,
15353 variable: name,
15354 values: vec![value],
15355 }
15356 .into())
15357 };
15358 }
15359 }
15360
15361 let variables = if self.dialect.supports_parenthesized_set_variables()
15362 && self.consume_token(&Token::LParen)
15363 {
15364 let vars = OneOrManyWithParens::Many(
15365 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
15366 .into_iter()
15367 .map(|ident| ObjectName::from(vec![ident]))
15368 .collect(),
15369 );
15370 self.expect_token(&Token::RParen)?;
15371 vars
15372 } else {
15373 OneOrManyWithParens::One(self.parse_object_name(false)?)
15374 };
15375
15376 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15377 let stmt = match variables {
15378 OneOrManyWithParens::One(var) => Set::SingleAssignment {
15379 scope,
15380 hivevar,
15381 variable: var,
15382 values: self.parse_set_values(false)?,
15383 },
15384 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
15385 variables: vars,
15386 values: self.parse_set_values(true)?,
15387 },
15388 };
15389
15390 return Ok(stmt.into());
15391 }
15392
15393 if self.dialect.supports_set_stmt_without_operator() {
15394 self.prev_token();
15395 return self.parse_set_session_params();
15396 };
15397
15398 self.expected_ref("equals sign or TO", self.peek_token_ref())
15399 }
15400
15401 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
15403 if self.parse_keyword(Keyword::STATISTICS) {
15404 let topic = match self.parse_one_of_keywords(&[
15405 Keyword::IO,
15406 Keyword::PROFILE,
15407 Keyword::TIME,
15408 Keyword::XML,
15409 ]) {
15410 Some(Keyword::IO) => SessionParamStatsTopic::IO,
15411 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
15412 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
15413 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
15414 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
15415 };
15416 let value = self.parse_session_param_value()?;
15417 Ok(
15418 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
15419 topic,
15420 value,
15421 }))
15422 .into(),
15423 )
15424 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
15425 let obj = self.parse_object_name(false)?;
15426 let value = self.parse_session_param_value()?;
15427 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
15428 SetSessionParamIdentityInsert { obj, value },
15429 ))
15430 .into())
15431 } else if self.parse_keyword(Keyword::OFFSETS) {
15432 let keywords = self.parse_comma_separated(|parser| {
15433 let next_token = parser.next_token();
15434 match &next_token.token {
15435 Token::Word(w) => Ok(w.to_string()),
15436 _ => parser.expected("SQL keyword", next_token),
15437 }
15438 })?;
15439 let value = self.parse_session_param_value()?;
15440 Ok(
15441 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
15442 keywords,
15443 value,
15444 }))
15445 .into(),
15446 )
15447 } else {
15448 let names = self.parse_comma_separated(|parser| {
15449 let next_token = parser.next_token();
15450 match next_token.token {
15451 Token::Word(w) => Ok(w.to_string()),
15452 _ => parser.expected("Session param name", next_token),
15453 }
15454 })?;
15455 let value = self.parse_expr()?.to_string();
15456 Ok(
15457 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
15458 names,
15459 value,
15460 }))
15461 .into(),
15462 )
15463 }
15464 }
15465
15466 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
15467 if self.parse_keyword(Keyword::ON) {
15468 Ok(SessionParamValue::On)
15469 } else if self.parse_keyword(Keyword::OFF) {
15470 Ok(SessionParamValue::Off)
15471 } else {
15472 self.expected_ref("ON or OFF", self.peek_token_ref())
15473 }
15474 }
15475
15476 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
15478 let terse = self.parse_keyword(Keyword::TERSE);
15479 let extended = self.parse_keyword(Keyword::EXTENDED);
15480 let full = self.parse_keyword(Keyword::FULL);
15481 let session = self.parse_keyword(Keyword::SESSION);
15482 let global = self.parse_keyword(Keyword::GLOBAL);
15483 let external = self.parse_keyword(Keyword::EXTERNAL);
15484 if self
15485 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
15486 .is_some()
15487 {
15488 Ok(self.parse_show_columns(extended, full)?)
15489 } else if self.parse_keyword(Keyword::TABLES) {
15490 Ok(self.parse_show_tables(terse, extended, full, external)?)
15491 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
15492 Ok(self.parse_show_views(terse, true)?)
15493 } else if self.parse_keyword(Keyword::VIEWS) {
15494 Ok(self.parse_show_views(terse, false)?)
15495 } else if self.parse_keyword(Keyword::FUNCTIONS) {
15496 Ok(self.parse_show_functions()?)
15497 } else if self.parse_keyword(Keyword::PROCESSLIST) {
15498 Ok(Statement::ShowProcessList { full })
15499 } else if extended || full {
15500 Err(ParserError::ParserError(
15501 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
15502 ))
15503 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
15504 Ok(self.parse_show_create()?)
15505 } else if self.parse_keyword(Keyword::COLLATION) {
15506 Ok(self.parse_show_collation()?)
15507 } else if self.parse_keyword(Keyword::VARIABLES)
15508 && dialect_of!(self is MySqlDialect | GenericDialect)
15509 {
15510 Ok(Statement::ShowVariables {
15511 filter: self.parse_show_statement_filter()?,
15512 session,
15513 global,
15514 })
15515 } else if self.parse_keyword(Keyword::STATUS)
15516 && dialect_of!(self is MySqlDialect | GenericDialect)
15517 {
15518 Ok(Statement::ShowStatus {
15519 filter: self.parse_show_statement_filter()?,
15520 session,
15521 global,
15522 })
15523 } else if self.parse_keyword(Keyword::CATALOGS) {
15524 self.parse_show_catalogs(terse)
15525 } else if self.parse_keyword(Keyword::DATABASES) {
15526 self.parse_show_databases(terse)
15527 } else if self.parse_keyword(Keyword::SCHEMAS) {
15528 self.parse_show_schemas(terse)
15529 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
15530 self.parse_show_charset(false)
15531 } else if self.parse_keyword(Keyword::CHARSET) {
15532 self.parse_show_charset(true)
15533 } else {
15534 Ok(Statement::ShowVariable {
15535 variable: self.parse_identifiers()?,
15536 })
15537 }
15538 }
15539
15540 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
15541 Ok(Statement::ShowCharset(ShowCharset {
15543 is_shorthand,
15544 filter: self.parse_show_statement_filter()?,
15545 }))
15546 }
15547
15548 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
15549 let history = self.parse_keyword(Keyword::HISTORY);
15550 let show_options = self.parse_show_stmt_options()?;
15551 Ok(Statement::ShowCatalogs {
15552 terse,
15553 history,
15554 show_options,
15555 })
15556 }
15557
15558 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
15559 let history = self.parse_keyword(Keyword::HISTORY);
15560 let show_options = self.parse_show_stmt_options()?;
15561 Ok(Statement::ShowDatabases {
15562 terse,
15563 history,
15564 show_options,
15565 })
15566 }
15567
15568 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
15569 let history = self.parse_keyword(Keyword::HISTORY);
15570 let show_options = self.parse_show_stmt_options()?;
15571 Ok(Statement::ShowSchemas {
15572 terse,
15573 history,
15574 show_options,
15575 })
15576 }
15577
15578 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
15580 let obj_type = match self.expect_one_of_keywords(&[
15581 Keyword::TABLE,
15582 Keyword::TRIGGER,
15583 Keyword::FUNCTION,
15584 Keyword::PROCEDURE,
15585 Keyword::EVENT,
15586 Keyword::VIEW,
15587 ])? {
15588 Keyword::TABLE => Ok(ShowCreateObject::Table),
15589 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
15590 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
15591 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
15592 Keyword::EVENT => Ok(ShowCreateObject::Event),
15593 Keyword::VIEW => Ok(ShowCreateObject::View),
15594 keyword => Err(ParserError::ParserError(format!(
15595 "Unable to map keyword to ShowCreateObject: {keyword:?}"
15596 ))),
15597 }?;
15598
15599 let obj_name = self.parse_object_name(false)?;
15600
15601 Ok(Statement::ShowCreate { obj_type, obj_name })
15602 }
15603
15604 pub fn parse_show_columns(
15606 &mut self,
15607 extended: bool,
15608 full: bool,
15609 ) -> Result<Statement, ParserError> {
15610 let show_options = self.parse_show_stmt_options()?;
15611 Ok(Statement::ShowColumns {
15612 extended,
15613 full,
15614 show_options,
15615 })
15616 }
15617
15618 fn parse_show_tables(
15619 &mut self,
15620 terse: bool,
15621 extended: bool,
15622 full: bool,
15623 external: bool,
15624 ) -> Result<Statement, ParserError> {
15625 let history = !external && self.parse_keyword(Keyword::HISTORY);
15626 let show_options = self.parse_show_stmt_options()?;
15627 Ok(Statement::ShowTables {
15628 terse,
15629 history,
15630 extended,
15631 full,
15632 external,
15633 show_options,
15634 })
15635 }
15636
15637 fn parse_show_views(
15638 &mut self,
15639 terse: bool,
15640 materialized: bool,
15641 ) -> Result<Statement, ParserError> {
15642 let show_options = self.parse_show_stmt_options()?;
15643 Ok(Statement::ShowViews {
15644 materialized,
15645 terse,
15646 show_options,
15647 })
15648 }
15649
15650 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
15652 let filter = self.parse_show_statement_filter()?;
15653 Ok(Statement::ShowFunctions { filter })
15654 }
15655
15656 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
15658 let filter = self.parse_show_statement_filter()?;
15659 Ok(Statement::ShowCollation { filter })
15660 }
15661
15662 pub fn parse_show_statement_filter(
15664 &mut self,
15665 ) -> Result<Option<ShowStatementFilter>, ParserError> {
15666 if self.parse_keyword(Keyword::LIKE) {
15667 Ok(Some(ShowStatementFilter::Like(
15668 self.parse_literal_string()?,
15669 )))
15670 } else if self.parse_keyword(Keyword::ILIKE) {
15671 Ok(Some(ShowStatementFilter::ILike(
15672 self.parse_literal_string()?,
15673 )))
15674 } else if self.parse_keyword(Keyword::WHERE) {
15675 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
15676 } else {
15677 self.maybe_parse(|parser| -> Result<String, ParserError> {
15678 parser.parse_literal_string()
15679 })?
15680 .map_or(Ok(None), |filter| {
15681 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
15682 })
15683 }
15684 }
15685
15686 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
15688 let parsed_keyword = if dialect_of!(self is HiveDialect) {
15690 if self.parse_keyword(Keyword::DEFAULT) {
15692 return Ok(Statement::Use(Use::Default));
15693 }
15694 None } else if dialect_of!(self is DatabricksDialect) {
15696 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
15697 } else if dialect_of!(self is SnowflakeDialect) {
15698 self.parse_one_of_keywords(&[
15699 Keyword::DATABASE,
15700 Keyword::SCHEMA,
15701 Keyword::WAREHOUSE,
15702 Keyword::ROLE,
15703 Keyword::SECONDARY,
15704 ])
15705 } else {
15706 None };
15708
15709 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
15710 self.parse_secondary_roles()?
15711 } else {
15712 let obj_name = self.parse_object_name(false)?;
15713 match parsed_keyword {
15714 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
15715 Some(Keyword::DATABASE) => Use::Database(obj_name),
15716 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
15717 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
15718 Some(Keyword::ROLE) => Use::Role(obj_name),
15719 _ => Use::Object(obj_name),
15720 }
15721 };
15722
15723 Ok(Statement::Use(result))
15724 }
15725
15726 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
15727 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
15728 if self.parse_keyword(Keyword::NONE) {
15729 Ok(Use::SecondaryRoles(SecondaryRoles::None))
15730 } else if self.parse_keyword(Keyword::ALL) {
15731 Ok(Use::SecondaryRoles(SecondaryRoles::All))
15732 } else {
15733 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
15734 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
15735 }
15736 }
15737
15738 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
15740 let relation = self.parse_table_factor()?;
15741 let joins = self.parse_joins()?;
15745 Ok(TableWithJoins { relation, joins })
15746 }
15747
15748 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
15749 let mut joins = vec![];
15750 loop {
15751 let global = self.parse_keyword(Keyword::GLOBAL);
15752 let join = if self.parse_keyword(Keyword::CROSS) {
15753 let join_operator = if self.parse_keyword(Keyword::JOIN) {
15754 JoinOperator::CrossJoin(JoinConstraint::None)
15755 } else if self.parse_keyword(Keyword::APPLY) {
15756 JoinOperator::CrossApply
15758 } else {
15759 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
15760 };
15761 let relation = self.parse_table_factor()?;
15762 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
15763 && self.dialect.supports_cross_join_constraint()
15764 {
15765 let constraint = self.parse_join_constraint(false)?;
15766 JoinOperator::CrossJoin(constraint)
15767 } else {
15768 join_operator
15769 };
15770 Join {
15771 relation,
15772 global,
15773 join_operator,
15774 }
15775 } else if self.parse_keyword(Keyword::OUTER) {
15776 self.expect_keyword_is(Keyword::APPLY)?;
15778 Join {
15779 relation: self.parse_table_factor()?,
15780 global,
15781 join_operator: JoinOperator::OuterApply,
15782 }
15783 } else if self.parse_keyword(Keyword::ASOF) {
15784 self.expect_keyword_is(Keyword::JOIN)?;
15785 let relation = self.parse_table_factor()?;
15786 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
15787 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
15788 Join {
15789 relation,
15790 global,
15791 join_operator: JoinOperator::AsOf {
15792 match_condition,
15793 constraint: self.parse_join_constraint(false)?,
15794 },
15795 }
15796 } else {
15797 let natural = self.parse_keyword(Keyword::NATURAL);
15798 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
15799 w.keyword
15800 } else {
15801 Keyword::NoKeyword
15802 };
15803
15804 let join_operator_type = match peek_keyword {
15805 Keyword::INNER | Keyword::JOIN => {
15806 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
15808 if inner {
15809 JoinOperator::Inner
15810 } else {
15811 JoinOperator::Join
15812 }
15813 }
15814 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
15815 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
15817 let join_type = self.parse_one_of_keywords(&[
15818 Keyword::OUTER,
15819 Keyword::SEMI,
15820 Keyword::ANTI,
15821 Keyword::JOIN,
15822 ]);
15823 match join_type {
15824 Some(Keyword::OUTER) => {
15825 self.expect_keyword_is(Keyword::JOIN)?;
15826 if is_left {
15827 JoinOperator::LeftOuter
15828 } else {
15829 JoinOperator::RightOuter
15830 }
15831 }
15832 Some(Keyword::SEMI) => {
15833 self.expect_keyword_is(Keyword::JOIN)?;
15834 if is_left {
15835 JoinOperator::LeftSemi
15836 } else {
15837 JoinOperator::RightSemi
15838 }
15839 }
15840 Some(Keyword::ANTI) => {
15841 self.expect_keyword_is(Keyword::JOIN)?;
15842 if is_left {
15843 JoinOperator::LeftAnti
15844 } else {
15845 JoinOperator::RightAnti
15846 }
15847 }
15848 Some(Keyword::JOIN) => {
15849 if is_left {
15850 JoinOperator::Left
15851 } else {
15852 JoinOperator::Right
15853 }
15854 }
15855 _ => {
15856 return Err(ParserError::ParserError(format!(
15857 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
15858 )))
15859 }
15860 }
15861 }
15862 Keyword::ANTI => {
15863 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
15865 JoinOperator::Anti
15866 }
15867 Keyword::SEMI => {
15868 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
15870 JoinOperator::Semi
15871 }
15872 Keyword::FULL => {
15873 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
15876 JoinOperator::FullOuter
15877 }
15878 Keyword::OUTER => {
15879 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
15880 }
15881 Keyword::STRAIGHT_JOIN => {
15882 let _ = self.next_token(); JoinOperator::StraightJoin
15884 }
15885 _ if natural => {
15886 return self
15887 .expected_ref("a join type after NATURAL", self.peek_token_ref());
15888 }
15889 _ => break,
15890 };
15891 let mut relation = self.parse_table_factor()?;
15892
15893 if !self
15894 .dialect
15895 .supports_left_associative_joins_without_parens()
15896 && self.peek_parens_less_nested_join()
15897 {
15898 let joins = self.parse_joins()?;
15899 relation = TableFactor::NestedJoin {
15900 table_with_joins: Box::new(TableWithJoins { relation, joins }),
15901 alias: None,
15902 };
15903 }
15904
15905 let join_constraint = self.parse_join_constraint(natural)?;
15906 Join {
15907 relation,
15908 global,
15909 join_operator: join_operator_type(join_constraint),
15910 }
15911 };
15912 joins.push(join);
15913 }
15914 Ok(joins)
15915 }
15916
15917 fn peek_parens_less_nested_join(&self) -> bool {
15918 matches!(
15919 self.peek_token_ref().token,
15920 Token::Word(Word {
15921 keyword: Keyword::JOIN
15922 | Keyword::INNER
15923 | Keyword::LEFT
15924 | Keyword::RIGHT
15925 | Keyword::FULL,
15926 ..
15927 })
15928 )
15929 }
15930
15931 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
15933 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15934 let _guard = self.recursion_counter.try_decrease()?;
15935 if self.parse_keyword(Keyword::LATERAL) {
15936 if self.consume_token(&Token::LParen) {
15938 self.parse_derived_table_factor(Lateral)
15939 } else {
15940 let name = self.parse_object_name(false)?;
15941 self.expect_token(&Token::LParen)?;
15942 let args = self.parse_optional_args()?;
15943 let alias = self.maybe_parse_table_alias()?;
15944 Ok(TableFactor::Function {
15945 lateral: true,
15946 name,
15947 args,
15948 alias,
15949 })
15950 }
15951 } else if self.parse_keyword(Keyword::TABLE) {
15952 self.expect_token(&Token::LParen)?;
15954 let expr = self.parse_expr()?;
15955 self.expect_token(&Token::RParen)?;
15956 let alias = self.maybe_parse_table_alias()?;
15957 Ok(TableFactor::TableFunction { expr, alias })
15958 } else if self.consume_token(&Token::LParen) {
15959 if let Some(mut table) =
15981 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
15982 {
15983 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
15984 {
15985 table = match kw {
15986 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
15987 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
15988 unexpected_keyword => return Err(ParserError::ParserError(
15989 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
15990 )),
15991 }
15992 }
15993 return Ok(table);
15994 }
15995
15996 let mut table_and_joins = self.parse_table_and_joins()?;
16003
16004 #[allow(clippy::if_same_then_else)]
16005 if !table_and_joins.joins.is_empty() {
16006 self.expect_token(&Token::RParen)?;
16007 let alias = self.maybe_parse_table_alias()?;
16008 Ok(TableFactor::NestedJoin {
16009 table_with_joins: Box::new(table_and_joins),
16010 alias,
16011 }) } else if let TableFactor::NestedJoin {
16013 table_with_joins: _,
16014 alias: _,
16015 } = &table_and_joins.relation
16016 {
16017 self.expect_token(&Token::RParen)?;
16020 let alias = self.maybe_parse_table_alias()?;
16021 Ok(TableFactor::NestedJoin {
16022 table_with_joins: Box::new(table_and_joins),
16023 alias,
16024 })
16025 } else if self.dialect.supports_parens_around_table_factor() {
16026 self.expect_token(&Token::RParen)?;
16033
16034 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16035 match &mut table_and_joins.relation {
16038 TableFactor::Derived { alias, .. }
16039 | TableFactor::Table { alias, .. }
16040 | TableFactor::Function { alias, .. }
16041 | TableFactor::UNNEST { alias, .. }
16042 | TableFactor::JsonTable { alias, .. }
16043 | TableFactor::XmlTable { alias, .. }
16044 | TableFactor::OpenJsonTable { alias, .. }
16045 | TableFactor::TableFunction { alias, .. }
16046 | TableFactor::Pivot { alias, .. }
16047 | TableFactor::Unpivot { alias, .. }
16048 | TableFactor::MatchRecognize { alias, .. }
16049 | TableFactor::SemanticView { alias, .. }
16050 | TableFactor::NestedJoin { alias, .. } => {
16051 if let Some(inner_alias) = alias {
16053 return Err(ParserError::ParserError(format!(
16054 "duplicate alias {inner_alias}"
16055 )));
16056 }
16057 alias.replace(outer_alias);
16061 }
16062 };
16063 }
16064 Ok(table_and_joins.relation)
16066 } else {
16067 self.expected_ref("joined table", self.peek_token_ref())
16070 }
16071 } else if self.dialect.supports_values_as_table_factor()
16072 && matches!(
16073 self.peek_tokens(),
16074 [
16075 Token::Word(Word {
16076 keyword: Keyword::VALUES,
16077 ..
16078 }),
16079 Token::LParen
16080 ]
16081 )
16082 {
16083 self.expect_keyword_is(Keyword::VALUES)?;
16084
16085 let values = SetExpr::Values(self.parse_values(false, false)?);
16089 let alias = self.maybe_parse_table_alias()?;
16090 Ok(TableFactor::Derived {
16091 lateral: false,
16092 subquery: Box::new(Query {
16093 with: None,
16094 body: Box::new(values),
16095 order_by: None,
16096 limit_clause: None,
16097 fetch: None,
16098 locks: vec![],
16099 for_clause: None,
16100 settings: None,
16101 format_clause: None,
16102 pipe_operators: vec![],
16103 }),
16104 alias,
16105 sample: None,
16106 })
16107 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16108 && self.parse_keyword(Keyword::UNNEST)
16109 {
16110 self.expect_token(&Token::LParen)?;
16111 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16112 self.expect_token(&Token::RParen)?;
16113
16114 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16115 let alias = match self.maybe_parse_table_alias() {
16116 Ok(Some(alias)) => Some(alias),
16117 Ok(None) => None,
16118 Err(e) => return Err(e),
16119 };
16120
16121 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16122 Ok(()) => true,
16123 Err(_) => false,
16124 };
16125
16126 let with_offset_alias = if with_offset {
16127 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16128 Ok(Some(alias)) => Some(alias),
16129 Ok(None) => None,
16130 Err(e) => return Err(e),
16131 }
16132 } else {
16133 None
16134 };
16135
16136 Ok(TableFactor::UNNEST {
16137 alias,
16138 array_exprs,
16139 with_offset,
16140 with_offset_alias,
16141 with_ordinality,
16142 })
16143 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16144 let json_expr = self.parse_expr()?;
16145 self.expect_token(&Token::Comma)?;
16146 let json_path = self.parse_value()?;
16147 self.expect_keyword_is(Keyword::COLUMNS)?;
16148 self.expect_token(&Token::LParen)?;
16149 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16150 self.expect_token(&Token::RParen)?;
16151 self.expect_token(&Token::RParen)?;
16152 let alias = self.maybe_parse_table_alias()?;
16153 Ok(TableFactor::JsonTable {
16154 json_expr,
16155 json_path,
16156 columns,
16157 alias,
16158 })
16159 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16160 self.prev_token();
16161 self.parse_open_json_table_factor()
16162 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16163 self.prev_token();
16164 self.parse_xml_table_factor()
16165 } else if self.dialect.supports_semantic_view_table_factor()
16166 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16167 {
16168 self.parse_semantic_view_table_factor()
16169 } else if self.peek_token_ref().token == Token::AtSign {
16170 self.parse_snowflake_stage_table_factor()
16172 } else {
16173 let name = self.parse_object_name(true)?;
16174
16175 let json_path = match &self.peek_token_ref().token {
16176 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16177 _ => None,
16178 };
16179
16180 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16181 && self.parse_keyword(Keyword::PARTITION)
16182 {
16183 self.parse_parenthesized_identifiers()?
16184 } else {
16185 vec![]
16186 };
16187
16188 let version = self.maybe_parse_table_version()?;
16190
16191 let args = if self.consume_token(&Token::LParen) {
16193 Some(self.parse_table_function_args()?)
16194 } else {
16195 None
16196 };
16197
16198 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16199
16200 let mut sample = None;
16201 if self.dialect.supports_table_sample_before_alias() {
16202 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16203 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16204 }
16205 }
16206
16207 let alias = self.maybe_parse_table_alias()?;
16208
16209 let index_hints = if self.dialect.supports_table_hints() {
16211 self.maybe_parse(|p| p.parse_table_index_hints())?
16212 .unwrap_or(vec![])
16213 } else {
16214 vec![]
16215 };
16216
16217 let mut with_hints = vec![];
16219 if self.parse_keyword(Keyword::WITH) {
16220 if self.consume_token(&Token::LParen) {
16221 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16222 self.expect_token(&Token::RParen)?;
16223 } else {
16224 self.prev_token();
16226 }
16227 };
16228
16229 if !self.dialect.supports_table_sample_before_alias() {
16230 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16231 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16232 }
16233 }
16234
16235 let mut table = TableFactor::Table {
16236 name,
16237 alias,
16238 args,
16239 with_hints,
16240 version,
16241 partitions,
16242 with_ordinality,
16243 json_path,
16244 sample,
16245 index_hints,
16246 };
16247
16248 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16249 table = match kw {
16250 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16251 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16252 unexpected_keyword => return Err(ParserError::ParserError(
16253 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16254 )),
16255 }
16256 }
16257
16258 if self.dialect.supports_match_recognize()
16259 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16260 {
16261 table = self.parse_match_recognize(table)?;
16262 }
16263
16264 Ok(table)
16265 }
16266 }
16267
16268 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16273 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16275
16276 let args = if self.consume_token(&Token::LParen) {
16278 Some(self.parse_table_function_args()?)
16279 } else {
16280 None
16281 };
16282
16283 let alias = self.maybe_parse_table_alias()?;
16284
16285 Ok(TableFactor::Table {
16286 name,
16287 alias,
16288 args,
16289 with_hints: vec![],
16290 version: None,
16291 partitions: vec![],
16292 with_ordinality: false,
16293 json_path: None,
16294 sample: None,
16295 index_hints: vec![],
16296 })
16297 }
16298
16299 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16300 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16301 TableSampleModifier::TableSample
16302 } else if self.parse_keyword(Keyword::SAMPLE) {
16303 TableSampleModifier::Sample
16304 } else {
16305 return Ok(None);
16306 };
16307 self.parse_table_sample(modifier).map(Some)
16308 }
16309
16310 fn parse_table_sample(
16311 &mut self,
16312 modifier: TableSampleModifier,
16313 ) -> Result<Box<TableSample>, ParserError> {
16314 let name = match self.parse_one_of_keywords(&[
16315 Keyword::BERNOULLI,
16316 Keyword::ROW,
16317 Keyword::SYSTEM,
16318 Keyword::BLOCK,
16319 ]) {
16320 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16321 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16322 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16323 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16324 _ => None,
16325 };
16326
16327 let parenthesized = self.consume_token(&Token::LParen);
16328
16329 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16330 let selected_bucket = self.parse_number_value()?;
16331 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16332 let total = self.parse_number_value()?;
16333 let on = if self.parse_keyword(Keyword::ON) {
16334 Some(self.parse_expr()?)
16335 } else {
16336 None
16337 };
16338 (
16339 None,
16340 Some(TableSampleBucket {
16341 bucket: selected_bucket,
16342 total,
16343 on,
16344 }),
16345 )
16346 } else {
16347 let value = match self.maybe_parse(|p| p.parse_expr())? {
16348 Some(num) => num,
16349 None => {
16350 let next_token = self.next_token();
16351 if let Token::Word(w) = next_token.token {
16352 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16353 } else {
16354 return parser_err!(
16355 "Expecting number or byte length e.g. 100M",
16356 self.peek_token_ref().span.start
16357 );
16358 }
16359 }
16360 };
16361 let unit = if self.parse_keyword(Keyword::ROWS) {
16362 Some(TableSampleUnit::Rows)
16363 } else if self.parse_keyword(Keyword::PERCENT) {
16364 Some(TableSampleUnit::Percent)
16365 } else {
16366 None
16367 };
16368 (
16369 Some(TableSampleQuantity {
16370 parenthesized,
16371 value,
16372 unit,
16373 }),
16374 None,
16375 )
16376 };
16377 if parenthesized {
16378 self.expect_token(&Token::RParen)?;
16379 }
16380
16381 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
16382 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
16383 } else if self.parse_keyword(Keyword::SEED) {
16384 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
16385 } else {
16386 None
16387 };
16388
16389 let offset = if self.parse_keyword(Keyword::OFFSET) {
16390 Some(self.parse_expr()?)
16391 } else {
16392 None
16393 };
16394
16395 Ok(Box::new(TableSample {
16396 modifier,
16397 name,
16398 quantity,
16399 seed,
16400 bucket,
16401 offset,
16402 }))
16403 }
16404
16405 fn parse_table_sample_seed(
16406 &mut self,
16407 modifier: TableSampleSeedModifier,
16408 ) -> Result<TableSampleSeed, ParserError> {
16409 self.expect_token(&Token::LParen)?;
16410 let value = self.parse_number_value()?;
16411 self.expect_token(&Token::RParen)?;
16412 Ok(TableSampleSeed { modifier, value })
16413 }
16414
16415 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16418 self.expect_token(&Token::LParen)?;
16419 let json_expr = self.parse_expr()?;
16420 let json_path = if self.consume_token(&Token::Comma) {
16421 Some(self.parse_value()?)
16422 } else {
16423 None
16424 };
16425 self.expect_token(&Token::RParen)?;
16426 let columns = if self.parse_keyword(Keyword::WITH) {
16427 self.expect_token(&Token::LParen)?;
16428 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
16429 self.expect_token(&Token::RParen)?;
16430 columns
16431 } else {
16432 Vec::new()
16433 };
16434 let alias = self.maybe_parse_table_alias()?;
16435 Ok(TableFactor::OpenJsonTable {
16436 json_expr,
16437 json_path,
16438 columns,
16439 alias,
16440 })
16441 }
16442
16443 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16444 self.expect_token(&Token::LParen)?;
16445 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
16446 self.expect_token(&Token::LParen)?;
16447 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
16448 self.expect_token(&Token::RParen)?;
16449 self.expect_token(&Token::Comma)?;
16450 namespaces
16451 } else {
16452 vec![]
16453 };
16454 let row_expression = self.parse_expr()?;
16455 let passing = self.parse_xml_passing_clause()?;
16456 self.expect_keyword_is(Keyword::COLUMNS)?;
16457 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
16458 self.expect_token(&Token::RParen)?;
16459 let alias = self.maybe_parse_table_alias()?;
16460 Ok(TableFactor::XmlTable {
16461 namespaces,
16462 row_expression,
16463 passing,
16464 columns,
16465 alias,
16466 })
16467 }
16468
16469 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
16470 let uri = self.parse_expr()?;
16471 self.expect_keyword_is(Keyword::AS)?;
16472 let name = self.parse_identifier()?;
16473 Ok(XmlNamespaceDefinition { uri, name })
16474 }
16475
16476 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
16477 let name = self.parse_identifier()?;
16478
16479 let option = if self.parse_keyword(Keyword::FOR) {
16480 self.expect_keyword(Keyword::ORDINALITY)?;
16481 XmlTableColumnOption::ForOrdinality
16482 } else {
16483 let r#type = self.parse_data_type()?;
16484 let mut path = None;
16485 let mut default = None;
16486
16487 if self.parse_keyword(Keyword::PATH) {
16488 path = Some(self.parse_expr()?);
16489 }
16490
16491 if self.parse_keyword(Keyword::DEFAULT) {
16492 default = Some(self.parse_expr()?);
16493 }
16494
16495 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
16496 if !not_null {
16497 let _ = self.parse_keyword(Keyword::NULL);
16499 }
16500
16501 XmlTableColumnOption::NamedInfo {
16502 r#type,
16503 path,
16504 default,
16505 nullable: !not_null,
16506 }
16507 };
16508 Ok(XmlTableColumn { name, option })
16509 }
16510
16511 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
16512 let mut arguments = vec![];
16513 if self.parse_keyword(Keyword::PASSING) {
16514 loop {
16515 let by_value =
16516 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
16517 let expr = self.parse_expr()?;
16518 let alias = if self.parse_keyword(Keyword::AS) {
16519 Some(self.parse_identifier()?)
16520 } else {
16521 None
16522 };
16523 arguments.push(XmlPassingArgument {
16524 expr,
16525 alias,
16526 by_value,
16527 });
16528 if !self.consume_token(&Token::Comma) {
16529 break;
16530 }
16531 }
16532 }
16533 Ok(XmlPassingClause { arguments })
16534 }
16535
16536 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16538 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
16539 self.expect_token(&Token::LParen)?;
16540
16541 let name = self.parse_object_name(true)?;
16542
16543 let mut dimensions = Vec::new();
16545 let mut metrics = Vec::new();
16546 let mut facts = Vec::new();
16547 let mut where_clause = None;
16548
16549 while self.peek_token_ref().token != Token::RParen {
16550 if self.parse_keyword(Keyword::DIMENSIONS) {
16551 if !dimensions.is_empty() {
16552 return Err(ParserError::ParserError(
16553 "DIMENSIONS clause can only be specified once".to_string(),
16554 ));
16555 }
16556 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16557 } else if self.parse_keyword(Keyword::METRICS) {
16558 if !metrics.is_empty() {
16559 return Err(ParserError::ParserError(
16560 "METRICS clause can only be specified once".to_string(),
16561 ));
16562 }
16563 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16564 } else if self.parse_keyword(Keyword::FACTS) {
16565 if !facts.is_empty() {
16566 return Err(ParserError::ParserError(
16567 "FACTS clause can only be specified once".to_string(),
16568 ));
16569 }
16570 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16571 } else if self.parse_keyword(Keyword::WHERE) {
16572 if where_clause.is_some() {
16573 return Err(ParserError::ParserError(
16574 "WHERE clause can only be specified once".to_string(),
16575 ));
16576 }
16577 where_clause = Some(self.parse_expr()?);
16578 } else {
16579 let tok = self.peek_token_ref();
16580 return parser_err!(
16581 format!(
16582 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
16583 tok.token
16584 ),
16585 tok.span.start
16586 )?;
16587 }
16588 }
16589
16590 self.expect_token(&Token::RParen)?;
16591
16592 let alias = self.maybe_parse_table_alias()?;
16593
16594 Ok(TableFactor::SemanticView {
16595 name,
16596 dimensions,
16597 metrics,
16598 facts,
16599 where_clause,
16600 alias,
16601 })
16602 }
16603
16604 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
16605 self.expect_token(&Token::LParen)?;
16606
16607 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
16608 self.parse_comma_separated(Parser::parse_expr)?
16609 } else {
16610 vec![]
16611 };
16612
16613 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16614 self.parse_comma_separated(Parser::parse_order_by_expr)?
16615 } else {
16616 vec![]
16617 };
16618
16619 let measures = if self.parse_keyword(Keyword::MEASURES) {
16620 self.parse_comma_separated(|p| {
16621 let expr = p.parse_expr()?;
16622 let _ = p.parse_keyword(Keyword::AS);
16623 let alias = p.parse_identifier()?;
16624 Ok(Measure { expr, alias })
16625 })?
16626 } else {
16627 vec![]
16628 };
16629
16630 let rows_per_match =
16631 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
16632 Some(RowsPerMatch::OneRow)
16633 } else if self.parse_keywords(&[
16634 Keyword::ALL,
16635 Keyword::ROWS,
16636 Keyword::PER,
16637 Keyword::MATCH,
16638 ]) {
16639 Some(RowsPerMatch::AllRows(
16640 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
16641 Some(EmptyMatchesMode::Show)
16642 } else if self.parse_keywords(&[
16643 Keyword::OMIT,
16644 Keyword::EMPTY,
16645 Keyword::MATCHES,
16646 ]) {
16647 Some(EmptyMatchesMode::Omit)
16648 } else if self.parse_keywords(&[
16649 Keyword::WITH,
16650 Keyword::UNMATCHED,
16651 Keyword::ROWS,
16652 ]) {
16653 Some(EmptyMatchesMode::WithUnmatched)
16654 } else {
16655 None
16656 },
16657 ))
16658 } else {
16659 None
16660 };
16661
16662 let after_match_skip =
16663 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
16664 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
16665 Some(AfterMatchSkip::PastLastRow)
16666 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
16667 Some(AfterMatchSkip::ToNextRow)
16668 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
16669 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
16670 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
16671 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
16672 } else {
16673 let found = self.next_token();
16674 return self.expected("after match skip option", found);
16675 }
16676 } else {
16677 None
16678 };
16679
16680 self.expect_keyword_is(Keyword::PATTERN)?;
16681 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
16682
16683 self.expect_keyword_is(Keyword::DEFINE)?;
16684
16685 let symbols = self.parse_comma_separated(|p| {
16686 let symbol = p.parse_identifier()?;
16687 p.expect_keyword_is(Keyword::AS)?;
16688 let definition = p.parse_expr()?;
16689 Ok(SymbolDefinition { symbol, definition })
16690 })?;
16691
16692 self.expect_token(&Token::RParen)?;
16693
16694 let alias = self.maybe_parse_table_alias()?;
16695
16696 Ok(TableFactor::MatchRecognize {
16697 table: Box::new(table),
16698 partition_by,
16699 order_by,
16700 measures,
16701 rows_per_match,
16702 after_match_skip,
16703 pattern,
16704 symbols,
16705 alias,
16706 })
16707 }
16708
16709 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16710 match self.next_token().token {
16711 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
16712 Token::Placeholder(s) if s == "$" => {
16713 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
16714 }
16715 Token::LBrace => {
16716 self.expect_token(&Token::Minus)?;
16717 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
16718 self.expect_token(&Token::Minus)?;
16719 self.expect_token(&Token::RBrace)?;
16720 Ok(MatchRecognizePattern::Exclude(symbol))
16721 }
16722 Token::Word(Word {
16723 value,
16724 quote_style: None,
16725 ..
16726 }) if value == "PERMUTE" => {
16727 self.expect_token(&Token::LParen)?;
16728 let symbols = self.parse_comma_separated(|p| {
16729 p.parse_identifier().map(MatchRecognizeSymbol::Named)
16730 })?;
16731 self.expect_token(&Token::RParen)?;
16732 Ok(MatchRecognizePattern::Permute(symbols))
16733 }
16734 Token::LParen => {
16735 let pattern = self.parse_pattern()?;
16736 self.expect_token(&Token::RParen)?;
16737 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
16738 }
16739 _ => {
16740 self.prev_token();
16741 self.parse_identifier()
16742 .map(MatchRecognizeSymbol::Named)
16743 .map(MatchRecognizePattern::Symbol)
16744 }
16745 }
16746 }
16747
16748 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16749 let mut pattern = self.parse_base_pattern()?;
16750 loop {
16751 let token = self.next_token();
16752 let quantifier = match token.token {
16753 Token::Mul => RepetitionQuantifier::ZeroOrMore,
16754 Token::Plus => RepetitionQuantifier::OneOrMore,
16755 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
16756 Token::LBrace => {
16757 let token = self.next_token();
16759 match token.token {
16760 Token::Comma => {
16761 let next_token = self.next_token();
16762 let Token::Number(n, _) = next_token.token else {
16763 return self.expected("literal number", next_token);
16764 };
16765 self.expect_token(&Token::RBrace)?;
16766 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
16767 }
16768 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
16769 let next_token = self.next_token();
16770 match next_token.token {
16771 Token::Number(m, _) => {
16772 self.expect_token(&Token::RBrace)?;
16773 RepetitionQuantifier::Range(
16774 Self::parse(n, token.span.start)?,
16775 Self::parse(m, token.span.start)?,
16776 )
16777 }
16778 Token::RBrace => {
16779 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
16780 }
16781 _ => {
16782 return self.expected("} or upper bound", next_token);
16783 }
16784 }
16785 }
16786 Token::Number(n, _) => {
16787 self.expect_token(&Token::RBrace)?;
16788 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
16789 }
16790 _ => return self.expected("quantifier range", token),
16791 }
16792 }
16793 _ => {
16794 self.prev_token();
16795 break;
16796 }
16797 };
16798 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
16799 }
16800 Ok(pattern)
16801 }
16802
16803 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16804 let mut patterns = vec![self.parse_repetition_pattern()?];
16805 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
16806 patterns.push(self.parse_repetition_pattern()?);
16807 }
16808 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
16809 Ok([pattern]) => Ok(pattern),
16810 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
16811 }
16812 }
16813
16814 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
16815 let pattern = self.parse_concat_pattern()?;
16816 if self.consume_token(&Token::Pipe) {
16817 match self.parse_pattern()? {
16818 MatchRecognizePattern::Alternation(mut patterns) => {
16820 patterns.insert(0, pattern);
16821 Ok(MatchRecognizePattern::Alternation(patterns))
16822 }
16823 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
16824 }
16825 } else {
16826 Ok(pattern)
16827 }
16828 }
16829
16830 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
16832 if self.dialect.supports_table_versioning() {
16833 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
16834 {
16835 let expr = self.parse_expr()?;
16836 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
16837 } else if self.peek_keyword(Keyword::CHANGES) {
16838 return self.parse_table_version_changes().map(Some);
16839 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
16840 let func_name = self.parse_object_name(true)?;
16841 let func = self.parse_function(func_name)?;
16842 return Ok(Some(TableVersion::Function(func)));
16843 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
16844 let expr = self.parse_expr()?;
16845 return Ok(Some(TableVersion::TimestampAsOf(expr)));
16846 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
16847 let expr = Expr::Value(self.parse_number_value()?);
16848 return Ok(Some(TableVersion::VersionAsOf(expr)));
16849 }
16850 }
16851 Ok(None)
16852 }
16853
16854 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
16865 let changes_name = self.parse_object_name(true)?;
16866 let changes = self.parse_function(changes_name)?;
16867 let at_name = self.parse_object_name(true)?;
16868 let at = self.parse_function(at_name)?;
16869 let end = if self.peek_keyword(Keyword::END) {
16870 let end_name = self.parse_object_name(true)?;
16871 Some(self.parse_function(end_name)?)
16872 } else {
16873 None
16874 };
16875 Ok(TableVersion::Changes { changes, at, end })
16876 }
16877
16878 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
16881 if self.parse_keyword(Keyword::NESTED) {
16882 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
16883 let path = self.parse_value()?;
16884 self.expect_keyword_is(Keyword::COLUMNS)?;
16885 let columns = self.parse_parenthesized(|p| {
16886 p.parse_comma_separated(Self::parse_json_table_column_def)
16887 })?;
16888 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
16889 path,
16890 columns,
16891 }));
16892 }
16893 let name = self.parse_identifier()?;
16894 if self.parse_keyword(Keyword::FOR) {
16895 self.expect_keyword_is(Keyword::ORDINALITY)?;
16896 return Ok(JsonTableColumn::ForOrdinality(name));
16897 }
16898 let r#type = self.parse_data_type()?;
16899 let exists = self.parse_keyword(Keyword::EXISTS);
16900 self.expect_keyword_is(Keyword::PATH)?;
16901 let path = self.parse_value()?;
16902 let mut on_empty = None;
16903 let mut on_error = None;
16904 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
16905 if self.parse_keyword(Keyword::EMPTY) {
16906 on_empty = Some(error_handling);
16907 } else {
16908 self.expect_keyword_is(Keyword::ERROR)?;
16909 on_error = Some(error_handling);
16910 }
16911 }
16912 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
16913 name,
16914 r#type,
16915 path,
16916 exists,
16917 on_empty,
16918 on_error,
16919 }))
16920 }
16921
16922 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
16930 let name = self.parse_identifier()?;
16931 let r#type = self.parse_data_type()?;
16932 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
16933 self.next_token();
16934 Some(path)
16935 } else {
16936 None
16937 };
16938 let as_json = self.parse_keyword(Keyword::AS);
16939 if as_json {
16940 self.expect_keyword_is(Keyword::JSON)?;
16941 }
16942 Ok(OpenJsonTableColumn {
16943 name,
16944 r#type,
16945 path,
16946 as_json,
16947 })
16948 }
16949
16950 fn parse_json_table_column_error_handling(
16951 &mut self,
16952 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
16953 let res = if self.parse_keyword(Keyword::NULL) {
16954 JsonTableColumnErrorHandling::Null
16955 } else if self.parse_keyword(Keyword::ERROR) {
16956 JsonTableColumnErrorHandling::Error
16957 } else if self.parse_keyword(Keyword::DEFAULT) {
16958 JsonTableColumnErrorHandling::Default(self.parse_value()?)
16959 } else {
16960 return Ok(None);
16961 };
16962 self.expect_keyword_is(Keyword::ON)?;
16963 Ok(Some(res))
16964 }
16965
16966 pub fn parse_derived_table_factor(
16968 &mut self,
16969 lateral: IsLateral,
16970 ) -> Result<TableFactor, ParserError> {
16971 let subquery = self.parse_query()?;
16972 self.expect_token(&Token::RParen)?;
16973 let alias = self.maybe_parse_table_alias()?;
16974
16975 let sample = self
16977 .maybe_parse_table_sample()?
16978 .map(TableSampleKind::AfterTableAlias);
16979
16980 Ok(TableFactor::Derived {
16981 lateral: match lateral {
16982 Lateral => true,
16983 NotLateral => false,
16984 },
16985 subquery,
16986 alias,
16987 sample,
16988 })
16989 }
16990
16991 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17014 let expr = self.parse_expr()?;
17015 let alias = if self.parse_keyword(Keyword::AS) {
17016 Some(self.parse_identifier()?)
17017 } else {
17018 None
17019 };
17020
17021 Ok(ExprWithAlias { expr, alias })
17022 }
17023
17024 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17028 let expr = self.parse_expr()?;
17029 let alias = self.parse_identifier_optional_alias()?;
17030 Ok(ExprWithAlias { expr, alias })
17031 }
17032
17033 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17035 let function_name = match self.next_token().token {
17036 Token::Word(w) => Ok(w.value),
17037 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17038 }?;
17039 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17040 let alias = {
17041 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17042 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17044 }
17045 self.parse_optional_alias_inner(None, validator)?
17046 };
17047 Ok(ExprWithAlias { expr, alias })
17048 }
17049
17050 pub fn parse_pivot_table_factor(
17052 &mut self,
17053 table: TableFactor,
17054 ) -> Result<TableFactor, ParserError> {
17055 self.expect_token(&Token::LParen)?;
17056 let aggregate_functions =
17057 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17058 self.expect_keyword_is(Keyword::FOR)?;
17059 let value_column = if self.peek_token_ref().token == Token::LParen {
17060 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17061 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17062 })?
17063 } else {
17064 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17065 };
17066 self.expect_keyword_is(Keyword::IN)?;
17067
17068 self.expect_token(&Token::LParen)?;
17069 let value_source = if self.parse_keyword(Keyword::ANY) {
17070 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17071 self.parse_comma_separated(Parser::parse_order_by_expr)?
17072 } else {
17073 vec![]
17074 };
17075 PivotValueSource::Any(order_by)
17076 } else if self.peek_sub_query() {
17077 PivotValueSource::Subquery(self.parse_query()?)
17078 } else {
17079 PivotValueSource::List(
17080 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17081 )
17082 };
17083 self.expect_token(&Token::RParen)?;
17084
17085 let default_on_null =
17086 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17087 self.expect_token(&Token::LParen)?;
17088 let expr = self.parse_expr()?;
17089 self.expect_token(&Token::RParen)?;
17090 Some(expr)
17091 } else {
17092 None
17093 };
17094
17095 self.expect_token(&Token::RParen)?;
17096 let alias = self.maybe_parse_table_alias()?;
17097 Ok(TableFactor::Pivot {
17098 table: Box::new(table),
17099 aggregate_functions,
17100 value_column,
17101 value_source,
17102 default_on_null,
17103 alias,
17104 })
17105 }
17106
17107 pub fn parse_unpivot_table_factor(
17109 &mut self,
17110 table: TableFactor,
17111 ) -> Result<TableFactor, ParserError> {
17112 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17113 self.expect_keyword_is(Keyword::NULLS)?;
17114 Some(NullInclusion::IncludeNulls)
17115 } else if self.parse_keyword(Keyword::EXCLUDE) {
17116 self.expect_keyword_is(Keyword::NULLS)?;
17117 Some(NullInclusion::ExcludeNulls)
17118 } else {
17119 None
17120 };
17121 self.expect_token(&Token::LParen)?;
17122 let value = self.parse_expr()?;
17123 self.expect_keyword_is(Keyword::FOR)?;
17124 let name = self.parse_identifier()?;
17125 self.expect_keyword_is(Keyword::IN)?;
17126 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17127 p.parse_expr_with_alias()
17128 })?;
17129 self.expect_token(&Token::RParen)?;
17130 let alias = self.maybe_parse_table_alias()?;
17131 Ok(TableFactor::Unpivot {
17132 table: Box::new(table),
17133 value,
17134 null_inclusion,
17135 name,
17136 columns,
17137 alias,
17138 })
17139 }
17140
17141 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17143 if natural {
17144 Ok(JoinConstraint::Natural)
17145 } else if self.parse_keyword(Keyword::ON) {
17146 let constraint = self.parse_expr()?;
17147 Ok(JoinConstraint::On(constraint))
17148 } else if self.parse_keyword(Keyword::USING) {
17149 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17150 Ok(JoinConstraint::Using(columns))
17151 } else {
17152 Ok(JoinConstraint::None)
17153 }
17155 }
17156
17157 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17159 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17160
17161 self.expect_keyword_is(Keyword::TO)?;
17162 let grantees = self.parse_grantees()?;
17163
17164 let with_grant_option =
17165 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17166
17167 let current_grants =
17168 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17169 Some(CurrentGrantsKind::CopyCurrentGrants)
17170 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17171 Some(CurrentGrantsKind::RevokeCurrentGrants)
17172 } else {
17173 None
17174 };
17175
17176 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17177 Some(self.parse_identifier()?)
17178 } else {
17179 None
17180 };
17181
17182 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17183 Some(self.parse_identifier()?)
17184 } else {
17185 None
17186 };
17187
17188 Ok(Grant {
17189 privileges,
17190 objects,
17191 grantees,
17192 with_grant_option,
17193 as_grantor,
17194 granted_by,
17195 current_grants,
17196 })
17197 }
17198
17199 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17200 let mut values = vec![];
17201 let mut grantee_type = GranteesType::None;
17202 loop {
17203 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17204 GranteesType::Role
17205 } else if self.parse_keyword(Keyword::USER) {
17206 GranteesType::User
17207 } else if self.parse_keyword(Keyword::SHARE) {
17208 GranteesType::Share
17209 } else if self.parse_keyword(Keyword::GROUP) {
17210 GranteesType::Group
17211 } else if self.parse_keyword(Keyword::PUBLIC) {
17212 GranteesType::Public
17213 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17214 GranteesType::DatabaseRole
17215 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17216 GranteesType::ApplicationRole
17217 } else if self.parse_keyword(Keyword::APPLICATION) {
17218 GranteesType::Application
17219 } else {
17220 grantee_type.clone() };
17222
17223 if self
17224 .dialect
17225 .get_reserved_grantees_types()
17226 .contains(&new_grantee_type)
17227 {
17228 self.prev_token();
17229 } else {
17230 grantee_type = new_grantee_type;
17231 }
17232
17233 let grantee = if grantee_type == GranteesType::Public {
17234 Grantee {
17235 grantee_type: grantee_type.clone(),
17236 name: None,
17237 }
17238 } else {
17239 let mut name = self.parse_grantee_name()?;
17240 if self.consume_token(&Token::Colon) {
17241 let ident = self.parse_identifier()?;
17245 if let GranteeName::ObjectName(namespace) = name {
17246 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17247 format!("{namespace}:{ident}"),
17248 )]));
17249 };
17250 }
17251 Grantee {
17252 grantee_type: grantee_type.clone(),
17253 name: Some(name),
17254 }
17255 };
17256
17257 values.push(grantee);
17258
17259 if !self.consume_token(&Token::Comma) {
17260 break;
17261 }
17262 }
17263
17264 Ok(values)
17265 }
17266
17267 pub fn parse_grant_deny_revoke_privileges_objects(
17269 &mut self,
17270 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17271 let privileges = if self.parse_keyword(Keyword::ALL) {
17272 Privileges::All {
17273 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17274 }
17275 } else {
17276 let actions = self.parse_actions_list()?;
17277 Privileges::Actions(actions)
17278 };
17279
17280 let objects = if self.parse_keyword(Keyword::ON) {
17281 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17282 Some(GrantObjects::AllTablesInSchema {
17283 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17284 })
17285 } else if self.parse_keywords(&[
17286 Keyword::ALL,
17287 Keyword::EXTERNAL,
17288 Keyword::TABLES,
17289 Keyword::IN,
17290 Keyword::SCHEMA,
17291 ]) {
17292 Some(GrantObjects::AllExternalTablesInSchema {
17293 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17294 })
17295 } else if self.parse_keywords(&[
17296 Keyword::ALL,
17297 Keyword::VIEWS,
17298 Keyword::IN,
17299 Keyword::SCHEMA,
17300 ]) {
17301 Some(GrantObjects::AllViewsInSchema {
17302 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17303 })
17304 } else if self.parse_keywords(&[
17305 Keyword::ALL,
17306 Keyword::MATERIALIZED,
17307 Keyword::VIEWS,
17308 Keyword::IN,
17309 Keyword::SCHEMA,
17310 ]) {
17311 Some(GrantObjects::AllMaterializedViewsInSchema {
17312 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17313 })
17314 } else if self.parse_keywords(&[
17315 Keyword::ALL,
17316 Keyword::FUNCTIONS,
17317 Keyword::IN,
17318 Keyword::SCHEMA,
17319 ]) {
17320 Some(GrantObjects::AllFunctionsInSchema {
17321 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17322 })
17323 } else if self.parse_keywords(&[
17324 Keyword::FUTURE,
17325 Keyword::SCHEMAS,
17326 Keyword::IN,
17327 Keyword::DATABASE,
17328 ]) {
17329 Some(GrantObjects::FutureSchemasInDatabase {
17330 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17331 })
17332 } else if self.parse_keywords(&[
17333 Keyword::FUTURE,
17334 Keyword::TABLES,
17335 Keyword::IN,
17336 Keyword::SCHEMA,
17337 ]) {
17338 Some(GrantObjects::FutureTablesInSchema {
17339 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17340 })
17341 } else if self.parse_keywords(&[
17342 Keyword::FUTURE,
17343 Keyword::EXTERNAL,
17344 Keyword::TABLES,
17345 Keyword::IN,
17346 Keyword::SCHEMA,
17347 ]) {
17348 Some(GrantObjects::FutureExternalTablesInSchema {
17349 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17350 })
17351 } else if self.parse_keywords(&[
17352 Keyword::FUTURE,
17353 Keyword::VIEWS,
17354 Keyword::IN,
17355 Keyword::SCHEMA,
17356 ]) {
17357 Some(GrantObjects::FutureViewsInSchema {
17358 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17359 })
17360 } else if self.parse_keywords(&[
17361 Keyword::FUTURE,
17362 Keyword::MATERIALIZED,
17363 Keyword::VIEWS,
17364 Keyword::IN,
17365 Keyword::SCHEMA,
17366 ]) {
17367 Some(GrantObjects::FutureMaterializedViewsInSchema {
17368 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17369 })
17370 } else if self.parse_keywords(&[
17371 Keyword::ALL,
17372 Keyword::SEQUENCES,
17373 Keyword::IN,
17374 Keyword::SCHEMA,
17375 ]) {
17376 Some(GrantObjects::AllSequencesInSchema {
17377 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17378 })
17379 } else if self.parse_keywords(&[
17380 Keyword::FUTURE,
17381 Keyword::SEQUENCES,
17382 Keyword::IN,
17383 Keyword::SCHEMA,
17384 ]) {
17385 Some(GrantObjects::FutureSequencesInSchema {
17386 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17387 })
17388 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
17389 Some(GrantObjects::ResourceMonitors(
17390 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17391 ))
17392 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17393 Some(GrantObjects::ComputePools(
17394 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17395 ))
17396 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17397 Some(GrantObjects::FailoverGroup(
17398 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17399 ))
17400 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17401 Some(GrantObjects::ReplicationGroup(
17402 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17403 ))
17404 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17405 Some(GrantObjects::ExternalVolumes(
17406 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17407 ))
17408 } else {
17409 let object_type = self.parse_one_of_keywords(&[
17410 Keyword::SEQUENCE,
17411 Keyword::DATABASE,
17412 Keyword::SCHEMA,
17413 Keyword::TABLE,
17414 Keyword::VIEW,
17415 Keyword::WAREHOUSE,
17416 Keyword::INTEGRATION,
17417 Keyword::VIEW,
17418 Keyword::WAREHOUSE,
17419 Keyword::INTEGRATION,
17420 Keyword::USER,
17421 Keyword::CONNECTION,
17422 Keyword::PROCEDURE,
17423 Keyword::FUNCTION,
17424 ]);
17425 let objects =
17426 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
17427 match object_type {
17428 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
17429 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
17430 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
17431 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
17432 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
17433 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
17434 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
17435 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
17436 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
17437 if let Some(name) = objects?.first() {
17438 self.parse_grant_procedure_or_function(name, &kw)?
17439 } else {
17440 self.expected_ref("procedure or function name", self.peek_token_ref())?
17441 }
17442 }
17443 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
17444 Some(unexpected_keyword) => return Err(ParserError::ParserError(
17445 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
17446 )),
17447 }
17448 }
17449 } else {
17450 None
17451 };
17452
17453 Ok((privileges, objects))
17454 }
17455
17456 fn parse_grant_procedure_or_function(
17457 &mut self,
17458 name: &ObjectName,
17459 kw: &Option<Keyword>,
17460 ) -> Result<Option<GrantObjects>, ParserError> {
17461 let arg_types = if self.consume_token(&Token::LParen) {
17462 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
17463 self.expect_token(&Token::RParen)?;
17464 list
17465 } else {
17466 vec![]
17467 };
17468 match kw {
17469 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
17470 name: name.clone(),
17471 arg_types,
17472 })),
17473 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
17474 name: name.clone(),
17475 arg_types,
17476 })),
17477 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
17478 }
17479 }
17480
17481 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
17483 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
17484 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
17485 if columns.is_empty() {
17486 Ok(None)
17487 } else {
17488 Ok(Some(columns))
17489 }
17490 }
17491
17492 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
17494 Ok(Action::ImportedPrivileges)
17495 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
17496 Ok(Action::AddSearchOptimization)
17497 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
17498 Ok(Action::AttachListing)
17499 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
17500 Ok(Action::AttachPolicy)
17501 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
17502 Ok(Action::BindServiceEndpoint)
17503 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17504 let role = self.parse_object_name(false)?;
17505 Ok(Action::DatabaseRole { role })
17506 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
17507 Ok(Action::EvolveSchema)
17508 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
17509 Ok(Action::ImportShare)
17510 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
17511 Ok(Action::ManageVersions)
17512 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
17513 Ok(Action::ManageReleases)
17514 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
17515 Ok(Action::OverrideShareRestrictions)
17516 } else if self.parse_keywords(&[
17517 Keyword::PURCHASE,
17518 Keyword::DATA,
17519 Keyword::EXCHANGE,
17520 Keyword::LISTING,
17521 ]) {
17522 Ok(Action::PurchaseDataExchangeListing)
17523 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
17524 Ok(Action::ResolveAll)
17525 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
17526 Ok(Action::ReadSession)
17527
17528 } else if self.parse_keyword(Keyword::APPLY) {
17530 let apply_type = self.parse_action_apply_type()?;
17531 Ok(Action::Apply { apply_type })
17532 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
17533 Ok(Action::ApplyBudget)
17534 } else if self.parse_keyword(Keyword::AUDIT) {
17535 Ok(Action::Audit)
17536 } else if self.parse_keyword(Keyword::CONNECT) {
17537 Ok(Action::Connect)
17538 } else if self.parse_keyword(Keyword::CREATE) {
17539 let obj_type = self.maybe_parse_action_create_object_type();
17540 Ok(Action::Create { obj_type })
17541 } else if self.parse_keyword(Keyword::DELETE) {
17542 Ok(Action::Delete)
17543 } else if self.parse_keyword(Keyword::EXEC) {
17544 let obj_type = self.maybe_parse_action_execute_obj_type();
17545 Ok(Action::Exec { obj_type })
17546 } else if self.parse_keyword(Keyword::EXECUTE) {
17547 let obj_type = self.maybe_parse_action_execute_obj_type();
17548 Ok(Action::Execute { obj_type })
17549 } else if self.parse_keyword(Keyword::FAILOVER) {
17550 Ok(Action::Failover)
17551 } else if self.parse_keyword(Keyword::INSERT) {
17552 Ok(Action::Insert {
17553 columns: parse_columns(self)?,
17554 })
17555 } else if self.parse_keyword(Keyword::MANAGE) {
17556 let manage_type = self.parse_action_manage_type()?;
17557 Ok(Action::Manage { manage_type })
17558 } else if self.parse_keyword(Keyword::MODIFY) {
17559 let modify_type = self.parse_action_modify_type();
17560 Ok(Action::Modify { modify_type })
17561 } else if self.parse_keyword(Keyword::MONITOR) {
17562 let monitor_type = self.parse_action_monitor_type();
17563 Ok(Action::Monitor { monitor_type })
17564 } else if self.parse_keyword(Keyword::OPERATE) {
17565 Ok(Action::Operate)
17566 } else if self.parse_keyword(Keyword::REFERENCES) {
17567 Ok(Action::References {
17568 columns: parse_columns(self)?,
17569 })
17570 } else if self.parse_keyword(Keyword::READ) {
17571 Ok(Action::Read)
17572 } else if self.parse_keyword(Keyword::REPLICATE) {
17573 Ok(Action::Replicate)
17574 } else if self.parse_keyword(Keyword::ROLE) {
17575 let role = self.parse_object_name(false)?;
17576 Ok(Action::Role { role })
17577 } else if self.parse_keyword(Keyword::SELECT) {
17578 Ok(Action::Select {
17579 columns: parse_columns(self)?,
17580 })
17581 } else if self.parse_keyword(Keyword::TEMPORARY) {
17582 Ok(Action::Temporary)
17583 } else if self.parse_keyword(Keyword::TRIGGER) {
17584 Ok(Action::Trigger)
17585 } else if self.parse_keyword(Keyword::TRUNCATE) {
17586 Ok(Action::Truncate)
17587 } else if self.parse_keyword(Keyword::UPDATE) {
17588 Ok(Action::Update {
17589 columns: parse_columns(self)?,
17590 })
17591 } else if self.parse_keyword(Keyword::USAGE) {
17592 Ok(Action::Usage)
17593 } else if self.parse_keyword(Keyword::OWNERSHIP) {
17594 Ok(Action::Ownership)
17595 } else if self.parse_keyword(Keyword::DROP) {
17596 Ok(Action::Drop)
17597 } else {
17598 self.expected_ref("a privilege keyword", self.peek_token_ref())?
17599 }
17600 }
17601
17602 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
17603 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
17605 Some(ActionCreateObjectType::ApplicationPackage)
17606 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17607 Some(ActionCreateObjectType::ComputePool)
17608 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
17609 Some(ActionCreateObjectType::DataExchangeListing)
17610 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17611 Some(ActionCreateObjectType::ExternalVolume)
17612 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17613 Some(ActionCreateObjectType::FailoverGroup)
17614 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
17615 Some(ActionCreateObjectType::NetworkPolicy)
17616 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
17617 Some(ActionCreateObjectType::OrganiationListing)
17618 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17619 Some(ActionCreateObjectType::ReplicationGroup)
17620 }
17621 else if self.parse_keyword(Keyword::ACCOUNT) {
17623 Some(ActionCreateObjectType::Account)
17624 } else if self.parse_keyword(Keyword::APPLICATION) {
17625 Some(ActionCreateObjectType::Application)
17626 } else if self.parse_keyword(Keyword::DATABASE) {
17627 Some(ActionCreateObjectType::Database)
17628 } else if self.parse_keyword(Keyword::INTEGRATION) {
17629 Some(ActionCreateObjectType::Integration)
17630 } else if self.parse_keyword(Keyword::ROLE) {
17631 Some(ActionCreateObjectType::Role)
17632 } else if self.parse_keyword(Keyword::SCHEMA) {
17633 Some(ActionCreateObjectType::Schema)
17634 } else if self.parse_keyword(Keyword::SHARE) {
17635 Some(ActionCreateObjectType::Share)
17636 } else if self.parse_keyword(Keyword::USER) {
17637 Some(ActionCreateObjectType::User)
17638 } else if self.parse_keyword(Keyword::WAREHOUSE) {
17639 Some(ActionCreateObjectType::Warehouse)
17640 } else {
17641 None
17642 }
17643 }
17644
17645 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
17646 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
17647 Ok(ActionApplyType::AggregationPolicy)
17648 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
17649 Ok(ActionApplyType::AuthenticationPolicy)
17650 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
17651 Ok(ActionApplyType::JoinPolicy)
17652 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
17653 Ok(ActionApplyType::MaskingPolicy)
17654 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
17655 Ok(ActionApplyType::PackagesPolicy)
17656 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
17657 Ok(ActionApplyType::PasswordPolicy)
17658 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
17659 Ok(ActionApplyType::ProjectionPolicy)
17660 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
17661 Ok(ActionApplyType::RowAccessPolicy)
17662 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
17663 Ok(ActionApplyType::SessionPolicy)
17664 } else if self.parse_keyword(Keyword::TAG) {
17665 Ok(ActionApplyType::Tag)
17666 } else {
17667 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
17668 }
17669 }
17670
17671 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
17672 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
17673 Some(ActionExecuteObjectType::DataMetricFunction)
17674 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
17675 Some(ActionExecuteObjectType::ManagedAlert)
17676 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
17677 Some(ActionExecuteObjectType::ManagedTask)
17678 } else if self.parse_keyword(Keyword::ALERT) {
17679 Some(ActionExecuteObjectType::Alert)
17680 } else if self.parse_keyword(Keyword::TASK) {
17681 Some(ActionExecuteObjectType::Task)
17682 } else {
17683 None
17684 }
17685 }
17686
17687 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
17688 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
17689 Ok(ActionManageType::AccountSupportCases)
17690 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
17691 Ok(ActionManageType::EventSharing)
17692 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
17693 Ok(ActionManageType::ListingAutoFulfillment)
17694 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
17695 Ok(ActionManageType::OrganizationSupportCases)
17696 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
17697 Ok(ActionManageType::UserSupportCases)
17698 } else if self.parse_keyword(Keyword::GRANTS) {
17699 Ok(ActionManageType::Grants)
17700 } else if self.parse_keyword(Keyword::WAREHOUSES) {
17701 Ok(ActionManageType::Warehouses)
17702 } else {
17703 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
17704 }
17705 }
17706
17707 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
17708 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
17709 Some(ActionModifyType::LogLevel)
17710 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
17711 Some(ActionModifyType::TraceLevel)
17712 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
17713 Some(ActionModifyType::SessionLogLevel)
17714 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
17715 Some(ActionModifyType::SessionTraceLevel)
17716 } else {
17717 None
17718 }
17719 }
17720
17721 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
17722 if self.parse_keyword(Keyword::EXECUTION) {
17723 Some(ActionMonitorType::Execution)
17724 } else if self.parse_keyword(Keyword::SECURITY) {
17725 Some(ActionMonitorType::Security)
17726 } else if self.parse_keyword(Keyword::USAGE) {
17727 Some(ActionMonitorType::Usage)
17728 } else {
17729 None
17730 }
17731 }
17732
17733 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
17735 let mut name = self.parse_object_name(false)?;
17736 if self.dialect.supports_user_host_grantee()
17737 && name.0.len() == 1
17738 && name.0[0].as_ident().is_some()
17739 && self.consume_token(&Token::AtSign)
17740 {
17741 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
17742 let host = self.parse_identifier()?;
17743 Ok(GranteeName::UserHost { user, host })
17744 } else {
17745 Ok(GranteeName::ObjectName(name))
17746 }
17747 }
17748
17749 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
17751 self.expect_keyword(Keyword::DENY)?;
17752
17753 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17754 let objects = match objects {
17755 Some(o) => o,
17756 None => {
17757 return parser_err!(
17758 "DENY statements must specify an object",
17759 self.peek_token_ref().span.start
17760 )
17761 }
17762 };
17763
17764 self.expect_keyword_is(Keyword::TO)?;
17765 let grantees = self.parse_grantees()?;
17766 let cascade = self.parse_cascade_option();
17767 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
17768 Some(self.parse_identifier()?)
17769 } else {
17770 None
17771 };
17772
17773 Ok(Statement::Deny(DenyStatement {
17774 privileges,
17775 objects,
17776 grantees,
17777 cascade,
17778 granted_by,
17779 }))
17780 }
17781
17782 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
17784 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17785
17786 self.expect_keyword_is(Keyword::FROM)?;
17787 let grantees = self.parse_grantees()?;
17788
17789 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17790 Some(self.parse_identifier()?)
17791 } else {
17792 None
17793 };
17794
17795 let cascade = self.parse_cascade_option();
17796
17797 Ok(Revoke {
17798 privileges,
17799 objects,
17800 grantees,
17801 granted_by,
17802 cascade,
17803 })
17804 }
17805
17806 pub fn parse_replace(
17808 &mut self,
17809 replace_token: TokenWithSpan,
17810 ) -> Result<Statement, ParserError> {
17811 if !dialect_of!(self is MySqlDialect | GenericDialect) {
17812 return parser_err!(
17813 "Unsupported statement REPLACE",
17814 self.peek_token_ref().span.start
17815 );
17816 }
17817
17818 let mut insert = self.parse_insert(replace_token)?;
17819 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
17820 *replace_into = true;
17821 }
17822
17823 Ok(insert)
17824 }
17825
17826 fn parse_insert_setexpr_boxed(
17830 &mut self,
17831 insert_token: TokenWithSpan,
17832 ) -> Result<Box<SetExpr>, ParserError> {
17833 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
17834 }
17835
17836 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
17838 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
17839 let or = self.parse_conflict_clause();
17840 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
17841 None
17842 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
17843 Some(MysqlInsertPriority::LowPriority)
17844 } else if self.parse_keyword(Keyword::DELAYED) {
17845 Some(MysqlInsertPriority::Delayed)
17846 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
17847 Some(MysqlInsertPriority::HighPriority)
17848 } else {
17849 None
17850 };
17851
17852 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
17853 && self.parse_keyword(Keyword::IGNORE);
17854
17855 let replace_into = false;
17856
17857 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
17858 let into = self.parse_keyword(Keyword::INTO);
17859
17860 let local = self.parse_keyword(Keyword::LOCAL);
17861
17862 if self.parse_keyword(Keyword::DIRECTORY) {
17863 let path = self.parse_literal_string()?;
17864 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
17865 Some(self.parse_file_format()?)
17866 } else {
17867 None
17868 };
17869 let source = self.parse_query()?;
17870 Ok(Statement::Directory {
17871 local,
17872 path,
17873 overwrite,
17874 file_format,
17875 source,
17876 })
17877 } else {
17878 let table = self.parse_keyword(Keyword::TABLE);
17880 let table_object = self.parse_table_object()?;
17881
17882 let table_alias = if self.dialect.supports_insert_table_alias()
17883 && !self.peek_sub_query()
17884 && self
17885 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
17886 .is_none()
17887 {
17888 if self.parse_keyword(Keyword::AS) {
17889 Some(TableAliasWithoutColumns {
17890 explicit: true,
17891 alias: self.parse_identifier()?,
17892 })
17893 } else {
17894 self.maybe_parse(|parser| parser.parse_identifier())?
17895 .map(|alias| TableAliasWithoutColumns {
17896 explicit: false,
17897 alias,
17898 })
17899 }
17900 } else {
17901 None
17902 };
17903
17904 let is_mysql = dialect_of!(self is MySqlDialect);
17905
17906 let (columns, partitioned, after_columns, output, source, assignments) = if self
17907 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
17908 {
17909 (vec![], None, vec![], None, None, vec![])
17910 } else {
17911 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
17912 let columns =
17913 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
17914
17915 let partitioned = self.parse_insert_partition()?;
17916 let after_columns = if dialect_of!(self is HiveDialect) {
17918 self.parse_parenthesized_column_list(Optional, false)?
17919 } else {
17920 vec![]
17921 };
17922 (columns, partitioned, after_columns)
17923 } else {
17924 Default::default()
17925 };
17926
17927 let output = self.maybe_parse_output_clause()?;
17928
17929 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
17930 || self.peek_keyword(Keyword::SETTINGS)
17931 {
17932 (None, vec![])
17933 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
17934 (None, self.parse_comma_separated(Parser::parse_assignment)?)
17935 } else {
17936 (Some(self.parse_query()?), vec![])
17937 };
17938
17939 (
17940 columns,
17941 partitioned,
17942 after_columns,
17943 output,
17944 source,
17945 assignments,
17946 )
17947 };
17948
17949 let (format_clause, settings) = if self.dialect.supports_insert_format() {
17950 let settings = self.parse_settings()?;
17953
17954 let format = if self.parse_keyword(Keyword::FORMAT) {
17955 Some(self.parse_input_format_clause()?)
17956 } else {
17957 None
17958 };
17959
17960 (format, settings)
17961 } else {
17962 Default::default()
17963 };
17964
17965 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
17966 && self.parse_keyword(Keyword::AS)
17967 {
17968 let row_alias = self.parse_object_name(false)?;
17969 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
17970 Some(InsertAliases {
17971 row_alias,
17972 col_aliases,
17973 })
17974 } else {
17975 None
17976 };
17977
17978 let on = if self.parse_keyword(Keyword::ON) {
17979 if self.parse_keyword(Keyword::CONFLICT) {
17980 let conflict_target =
17981 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
17982 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
17983 } else if self.peek_token_ref().token == Token::LParen {
17984 Some(ConflictTarget::Columns(
17985 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
17986 ))
17987 } else {
17988 None
17989 };
17990
17991 self.expect_keyword_is(Keyword::DO)?;
17992 let action = if self.parse_keyword(Keyword::NOTHING) {
17993 OnConflictAction::DoNothing
17994 } else {
17995 self.expect_keyword_is(Keyword::UPDATE)?;
17996 self.expect_keyword_is(Keyword::SET)?;
17997 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
17998 let selection = if self.parse_keyword(Keyword::WHERE) {
17999 Some(self.parse_expr()?)
18000 } else {
18001 None
18002 };
18003 OnConflictAction::DoUpdate(DoUpdate {
18004 assignments,
18005 selection,
18006 })
18007 };
18008
18009 Some(OnInsert::OnConflict(OnConflict {
18010 conflict_target,
18011 action,
18012 }))
18013 } else {
18014 self.expect_keyword_is(Keyword::DUPLICATE)?;
18015 self.expect_keyword_is(Keyword::KEY)?;
18016 self.expect_keyword_is(Keyword::UPDATE)?;
18017 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18018
18019 Some(OnInsert::DuplicateKeyUpdate(l))
18020 }
18021 } else {
18022 None
18023 };
18024
18025 let returning = if self.parse_keyword(Keyword::RETURNING) {
18026 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18027 } else {
18028 None
18029 };
18030
18031 Ok(Insert {
18032 insert_token: insert_token.into(),
18033 optimizer_hints,
18034 or,
18035 table: table_object,
18036 table_alias,
18037 ignore,
18038 into,
18039 overwrite,
18040 partitioned,
18041 columns,
18042 after_columns,
18043 source,
18044 assignments,
18045 has_table_keyword: table,
18046 on,
18047 returning,
18048 output,
18049 replace_into,
18050 priority,
18051 insert_alias,
18052 settings,
18053 format_clause,
18054 multi_table_insert_type: None,
18055 multi_table_into_clauses: vec![],
18056 multi_table_when_clauses: vec![],
18057 multi_table_else_clause: None,
18058 }
18059 .into())
18060 }
18061 }
18062
18063 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18067 let ident = self.parse_identifier()?;
18068 let values = self
18069 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18070 .unwrap_or_default();
18071
18072 Ok(InputFormatClause { ident, values })
18073 }
18074
18075 fn peek_subquery_start(&mut self) -> bool {
18078 matches!(
18079 self.peek_tokens_ref(),
18080 [
18081 TokenWithSpan {
18082 token: Token::LParen,
18083 ..
18084 },
18085 TokenWithSpan {
18086 token: Token::Word(Word {
18087 keyword: Keyword::SELECT,
18088 ..
18089 }),
18090 ..
18091 },
18092 ]
18093 )
18094 }
18095
18096 fn peek_subquery_or_cte_start(&mut self) -> bool {
18100 matches!(
18101 self.peek_tokens_ref(),
18102 [
18103 TokenWithSpan {
18104 token: Token::LParen,
18105 ..
18106 },
18107 TokenWithSpan {
18108 token: Token::Word(Word {
18109 keyword: Keyword::SELECT | Keyword::WITH,
18110 ..
18111 }),
18112 ..
18113 },
18114 ]
18115 )
18116 }
18117
18118 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18119 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18120 Some(SqliteOnConflict::Replace)
18121 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18122 Some(SqliteOnConflict::Rollback)
18123 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18124 Some(SqliteOnConflict::Abort)
18125 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18126 Some(SqliteOnConflict::Fail)
18127 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18128 Some(SqliteOnConflict::Ignore)
18129 } else if self.parse_keyword(Keyword::REPLACE) {
18130 Some(SqliteOnConflict::Replace)
18131 } else {
18132 None
18133 }
18134 }
18135
18136 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18138 if self.parse_keyword(Keyword::PARTITION) {
18139 self.expect_token(&Token::LParen)?;
18140 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18141 self.expect_token(&Token::RParen)?;
18142 Ok(partition_cols)
18143 } else {
18144 Ok(None)
18145 }
18146 }
18147
18148 pub fn parse_load_data_table_format(
18150 &mut self,
18151 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18152 if self.parse_keyword(Keyword::INPUTFORMAT) {
18153 let input_format = self.parse_expr()?;
18154 self.expect_keyword_is(Keyword::SERDE)?;
18155 let serde = self.parse_expr()?;
18156 Ok(Some(HiveLoadDataFormat {
18157 input_format,
18158 serde,
18159 }))
18160 } else {
18161 Ok(None)
18162 }
18163 }
18164
18165 fn parse_update_setexpr_boxed(
18169 &mut self,
18170 update_token: TokenWithSpan,
18171 ) -> Result<Box<SetExpr>, ParserError> {
18172 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18173 }
18174
18175 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18177 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18178 let or = self.parse_conflict_clause();
18179 let table = self.parse_table_and_joins()?;
18180 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18181 Some(UpdateTableFromKind::BeforeSet(
18182 self.parse_table_with_joins()?,
18183 ))
18184 } else {
18185 None
18186 };
18187 self.expect_keyword(Keyword::SET)?;
18188 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18189
18190 let output = self.maybe_parse_output_clause()?;
18191
18192 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18193 Some(UpdateTableFromKind::AfterSet(
18194 self.parse_table_with_joins()?,
18195 ))
18196 } else {
18197 from_before_set
18198 };
18199 let selection = if self.parse_keyword(Keyword::WHERE) {
18200 Some(self.parse_expr()?)
18201 } else {
18202 None
18203 };
18204 let returning = if self.parse_keyword(Keyword::RETURNING) {
18205 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18206 } else {
18207 None
18208 };
18209 let order_by = if self.dialect.supports_update_order_by()
18210 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18211 {
18212 self.parse_comma_separated(Parser::parse_order_by_expr)?
18213 } else {
18214 vec![]
18215 };
18216 let limit = if self.parse_keyword(Keyword::LIMIT) {
18217 Some(self.parse_expr()?)
18218 } else {
18219 None
18220 };
18221 Ok(Update {
18222 update_token: update_token.into(),
18223 optimizer_hints,
18224 table,
18225 assignments,
18226 from,
18227 selection,
18228 returning,
18229 output,
18230 or,
18231 order_by,
18232 limit,
18233 }
18234 .into())
18235 }
18236
18237 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18239 let target = self.parse_assignment_target()?;
18240 self.expect_token(&Token::Eq)?;
18241 let value = self.parse_expr()?;
18242 Ok(Assignment { target, value })
18243 }
18244
18245 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18247 if self.consume_token(&Token::LParen) {
18248 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18249 self.expect_token(&Token::RParen)?;
18250 Ok(AssignmentTarget::Tuple(columns))
18251 } else {
18252 let column = self.parse_object_name(false)?;
18253 Ok(AssignmentTarget::ColumnName(column))
18254 }
18255 }
18256
18257 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18259 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18260 self.maybe_parse(|p| {
18261 let name = p.parse_expr()?;
18262 let operator = p.parse_function_named_arg_operator()?;
18263 let arg = p.parse_wildcard_expr()?.into();
18264 Ok(FunctionArg::ExprNamed {
18265 name,
18266 arg,
18267 operator,
18268 })
18269 })?
18270 } else {
18271 self.maybe_parse(|p| {
18272 let name = p.parse_identifier()?;
18273 let operator = p.parse_function_named_arg_operator()?;
18274 let arg = p.parse_wildcard_expr()?.into();
18275 Ok(FunctionArg::Named {
18276 name,
18277 arg,
18278 operator,
18279 })
18280 })?
18281 };
18282 if let Some(arg) = arg {
18283 return Ok(arg);
18284 }
18285 let wildcard_expr = self.parse_wildcard_expr()?;
18286 let arg_expr: FunctionArgExpr = match wildcard_expr {
18287 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18288 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18291 if opts.opt_exclude.is_some()
18292 || opts.opt_except.is_some()
18293 || opts.opt_replace.is_some()
18294 || opts.opt_rename.is_some()
18295 || opts.opt_ilike.is_some()
18296 {
18297 FunctionArgExpr::WildcardWithOptions(opts)
18298 } else {
18299 wildcard_expr.into()
18300 }
18301 }
18302 other => other.into(),
18303 };
18304 Ok(FunctionArg::Unnamed(arg_expr))
18305 }
18306
18307 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18308 if self.parse_keyword(Keyword::VALUE) {
18309 return Ok(FunctionArgOperator::Value);
18310 }
18311 let tok = self.next_token();
18312 match tok.token {
18313 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18314 Ok(FunctionArgOperator::RightArrow)
18315 }
18316 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18317 Ok(FunctionArgOperator::Equals)
18318 }
18319 Token::Assignment
18320 if self
18321 .dialect
18322 .supports_named_fn_args_with_assignment_operator() =>
18323 {
18324 Ok(FunctionArgOperator::Assignment)
18325 }
18326 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18327 Ok(FunctionArgOperator::Colon)
18328 }
18329 _ => {
18330 self.prev_token();
18331 self.expected("argument operator", tok)
18332 }
18333 }
18334 }
18335
18336 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18338 if self.consume_token(&Token::RParen) {
18339 Ok(vec![])
18340 } else {
18341 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18342 self.expect_token(&Token::RParen)?;
18343 Ok(args)
18344 }
18345 }
18346
18347 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18348 if self.consume_token(&Token::RParen) {
18349 return Ok(TableFunctionArgs {
18350 args: vec![],
18351 settings: None,
18352 });
18353 }
18354 let mut args = vec![];
18355 let settings = loop {
18356 if let Some(settings) = self.parse_settings()? {
18357 break Some(settings);
18358 }
18359 args.push(self.parse_function_args()?);
18360 if self.is_parse_comma_separated_end() {
18361 break None;
18362 }
18363 };
18364 self.expect_token(&Token::RParen)?;
18365 Ok(TableFunctionArgs { args, settings })
18366 }
18367
18368 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
18377 let mut clauses = vec![];
18378
18379 if let Some(null_clause) = self.parse_json_null_clause() {
18382 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18383 }
18384
18385 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18386 clauses.push(FunctionArgumentClause::JsonReturningClause(
18387 json_returning_clause,
18388 ));
18389 }
18390
18391 if self.consume_token(&Token::RParen) {
18392 return Ok(FunctionArgumentList {
18393 duplicate_treatment: None,
18394 args: vec![],
18395 clauses,
18396 });
18397 }
18398
18399 let duplicate_treatment = self.parse_duplicate_treatment()?;
18400 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18401
18402 if self.dialect.supports_window_function_null_treatment_arg() {
18403 if let Some(null_treatment) = self.parse_null_treatment()? {
18404 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
18405 }
18406 }
18407
18408 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18409 clauses.push(FunctionArgumentClause::OrderBy(
18410 self.parse_comma_separated(Parser::parse_order_by_expr)?,
18411 ));
18412 }
18413
18414 if self.parse_keyword(Keyword::LIMIT) {
18415 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
18416 }
18417
18418 if dialect_of!(self is GenericDialect | BigQueryDialect)
18419 && self.parse_keyword(Keyword::HAVING)
18420 {
18421 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
18422 Keyword::MIN => HavingBoundKind::Min,
18423 Keyword::MAX => HavingBoundKind::Max,
18424 unexpected_keyword => return Err(ParserError::ParserError(
18425 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
18426 )),
18427 };
18428 clauses.push(FunctionArgumentClause::Having(HavingBound(
18429 kind,
18430 self.parse_expr()?,
18431 )))
18432 }
18433
18434 if dialect_of!(self is GenericDialect | MySqlDialect)
18435 && self.parse_keyword(Keyword::SEPARATOR)
18436 {
18437 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
18438 }
18439
18440 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
18441 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
18442 }
18443
18444 if let Some(null_clause) = self.parse_json_null_clause() {
18445 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18446 }
18447
18448 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18449 clauses.push(FunctionArgumentClause::JsonReturningClause(
18450 json_returning_clause,
18451 ));
18452 }
18453
18454 self.expect_token(&Token::RParen)?;
18455 Ok(FunctionArgumentList {
18456 duplicate_treatment,
18457 args,
18458 clauses,
18459 })
18460 }
18461
18462 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
18463 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
18464 Some(JsonNullClause::AbsentOnNull)
18465 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
18466 Some(JsonNullClause::NullOnNull)
18467 } else {
18468 None
18469 }
18470 }
18471
18472 fn maybe_parse_json_returning_clause(
18473 &mut self,
18474 ) -> Result<Option<JsonReturningClause>, ParserError> {
18475 if self.parse_keyword(Keyword::RETURNING) {
18476 let data_type = self.parse_data_type()?;
18477 Ok(Some(JsonReturningClause { data_type }))
18478 } else {
18479 Ok(None)
18480 }
18481 }
18482
18483 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
18484 let loc = self.peek_token_ref().span.start;
18485 match (
18486 self.parse_keyword(Keyword::ALL),
18487 self.parse_keyword(Keyword::DISTINCT),
18488 ) {
18489 (true, false) => Ok(Some(DuplicateTreatment::All)),
18490 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
18491 (false, false) => Ok(None),
18492 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
18493 }
18494 }
18495
18496 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
18498 let prefix = self
18499 .parse_one_of_keywords(
18500 self.dialect
18501 .get_reserved_keywords_for_select_item_operator(),
18502 )
18503 .map(|keyword| Ident::new(format!("{keyword:?}")));
18504
18505 match self.parse_wildcard_expr()? {
18506 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
18507 SelectItemQualifiedWildcardKind::ObjectName(prefix),
18508 self.parse_wildcard_additional_options(token.0)?,
18509 )),
18510 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
18511 self.parse_wildcard_additional_options(token.0)?,
18512 )),
18513 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
18514 parser_err!(
18515 format!("Expected an expression, found: {}", v),
18516 self.peek_token_ref().span.start
18517 )
18518 }
18519 Expr::BinaryOp {
18520 left,
18521 op: BinaryOperator::Eq,
18522 right,
18523 } if self.dialect.supports_eq_alias_assignment()
18524 && matches!(left.as_ref(), Expr::Identifier(_)) =>
18525 {
18526 let Expr::Identifier(alias) = *left else {
18527 return parser_err!(
18528 "BUG: expected identifier expression as alias",
18529 self.peek_token_ref().span.start
18530 );
18531 };
18532 Ok(SelectItem::ExprWithAlias {
18533 expr: *right,
18534 alias,
18535 })
18536 }
18537 expr if self.dialect.supports_select_expr_star()
18538 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
18539 {
18540 let wildcard_token = self.get_previous_token().clone();
18541 Ok(SelectItem::QualifiedWildcard(
18542 SelectItemQualifiedWildcardKind::Expr(expr),
18543 self.parse_wildcard_additional_options(wildcard_token)?,
18544 ))
18545 }
18546 expr if self.dialect.supports_select_item_multi_column_alias()
18547 && self.peek_keyword(Keyword::AS)
18548 && self.peek_nth_token(1).token == Token::LParen =>
18549 {
18550 self.expect_keyword(Keyword::AS)?;
18551 self.expect_token(&Token::LParen)?;
18552 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
18553 self.expect_token(&Token::RParen)?;
18554 Ok(SelectItem::ExprWithAliases {
18555 expr: maybe_prefixed_expr(expr, prefix),
18556 aliases,
18557 })
18558 }
18559 expr => self
18560 .maybe_parse_select_item_alias()
18561 .map(|alias| match alias {
18562 Some(alias) => SelectItem::ExprWithAlias {
18563 expr: maybe_prefixed_expr(expr, prefix),
18564 alias,
18565 },
18566 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
18567 }),
18568 }
18569 }
18570
18571 pub fn parse_wildcard_additional_options(
18575 &mut self,
18576 wildcard_token: TokenWithSpan,
18577 ) -> Result<WildcardAdditionalOptions, ParserError> {
18578 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
18579 self.parse_optional_select_item_ilike()?
18580 } else {
18581 None
18582 };
18583 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
18584 {
18585 self.parse_optional_select_item_exclude()?
18586 } else {
18587 None
18588 };
18589 let opt_except = if self.dialect.supports_select_wildcard_except() {
18590 self.parse_optional_select_item_except()?
18591 } else {
18592 None
18593 };
18594 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
18595 self.parse_optional_select_item_replace()?
18596 } else {
18597 None
18598 };
18599 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
18600 self.parse_optional_select_item_rename()?
18601 } else {
18602 None
18603 };
18604
18605 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
18606 self.maybe_parse_select_item_alias()?
18607 } else {
18608 None
18609 };
18610
18611 Ok(WildcardAdditionalOptions {
18612 wildcard_token: wildcard_token.into(),
18613 opt_ilike,
18614 opt_exclude,
18615 opt_except,
18616 opt_rename,
18617 opt_replace,
18618 opt_alias,
18619 })
18620 }
18621
18622 pub fn parse_optional_select_item_ilike(
18626 &mut self,
18627 ) -> Result<Option<IlikeSelectItem>, ParserError> {
18628 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
18629 let next_token = self.next_token();
18630 let pattern = match next_token.token {
18631 Token::SingleQuotedString(s) => s,
18632 _ => return self.expected("ilike pattern", next_token),
18633 };
18634 Some(IlikeSelectItem { pattern })
18635 } else {
18636 None
18637 };
18638 Ok(opt_ilike)
18639 }
18640
18641 pub fn parse_optional_select_item_exclude(
18645 &mut self,
18646 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
18647 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
18648 if self.consume_token(&Token::LParen) {
18649 let columns =
18650 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
18651 self.expect_token(&Token::RParen)?;
18652 Some(ExcludeSelectItem::Multiple(columns))
18653 } else {
18654 let column = self.parse_object_name(false)?;
18655 Some(ExcludeSelectItem::Single(column))
18656 }
18657 } else {
18658 None
18659 };
18660
18661 Ok(opt_exclude)
18662 }
18663
18664 pub fn parse_optional_select_item_except(
18668 &mut self,
18669 ) -> Result<Option<ExceptSelectItem>, ParserError> {
18670 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
18671 if self.peek_token_ref().token == Token::LParen {
18672 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
18673 match &idents[..] {
18674 [] => {
18675 return self.expected_ref(
18676 "at least one column should be parsed by the expect clause",
18677 self.peek_token_ref(),
18678 )?;
18679 }
18680 [first, idents @ ..] => Some(ExceptSelectItem {
18681 first_element: first.clone(),
18682 additional_elements: idents.to_vec(),
18683 }),
18684 }
18685 } else {
18686 let ident = self.parse_identifier()?;
18688 Some(ExceptSelectItem {
18689 first_element: ident,
18690 additional_elements: vec![],
18691 })
18692 }
18693 } else {
18694 None
18695 };
18696
18697 Ok(opt_except)
18698 }
18699
18700 pub fn parse_optional_select_item_rename(
18702 &mut self,
18703 ) -> Result<Option<RenameSelectItem>, ParserError> {
18704 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
18705 if self.consume_token(&Token::LParen) {
18706 let idents =
18707 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
18708 self.expect_token(&Token::RParen)?;
18709 Some(RenameSelectItem::Multiple(idents))
18710 } else {
18711 let ident = self.parse_identifier_with_alias()?;
18712 Some(RenameSelectItem::Single(ident))
18713 }
18714 } else {
18715 None
18716 };
18717
18718 Ok(opt_rename)
18719 }
18720
18721 pub fn parse_optional_select_item_replace(
18723 &mut self,
18724 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
18725 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
18726 if self.consume_token(&Token::LParen) {
18727 let items = self.parse_comma_separated(|parser| {
18728 Ok(Box::new(parser.parse_replace_elements()?))
18729 })?;
18730 self.expect_token(&Token::RParen)?;
18731 Some(ReplaceSelectItem { items })
18732 } else {
18733 let tok = self.next_token();
18734 return self.expected("( after REPLACE but", tok);
18735 }
18736 } else {
18737 None
18738 };
18739
18740 Ok(opt_replace)
18741 }
18742 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
18744 let expr = self.parse_expr()?;
18745 let as_keyword = self.parse_keyword(Keyword::AS);
18746 let ident = self.parse_identifier()?;
18747 Ok(ReplaceSelectElement {
18748 expr,
18749 column_name: ident,
18750 as_keyword,
18751 })
18752 }
18753
18754 pub fn parse_asc_desc(&mut self) -> Option<bool> {
18757 if self.parse_keyword(Keyword::ASC) {
18758 Some(true)
18759 } else if self.parse_keyword(Keyword::DESC) {
18760 Some(false)
18761 } else {
18762 None
18763 }
18764 }
18765
18766 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
18768 self.parse_order_by_expr_inner(false)
18769 .map(|(order_by, _)| order_by)
18770 }
18771
18772 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
18774 self.parse_order_by_expr_inner(true)
18775 .map(|(column, operator_class)| IndexColumn {
18776 column,
18777 operator_class,
18778 })
18779 }
18780
18781 fn parse_order_by_expr_inner(
18782 &mut self,
18783 with_operator_class: bool,
18784 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
18785 let expr = self.parse_expr()?;
18786
18787 let operator_class: Option<ObjectName> = if with_operator_class {
18788 if self
18791 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
18792 .is_some()
18793 {
18794 None
18795 } else {
18796 self.maybe_parse(|parser| parser.parse_object_name(false))?
18797 }
18798 } else {
18799 None
18800 };
18801
18802 let options = self.parse_order_by_options()?;
18803
18804 let with_fill = if self.dialect.supports_with_fill()
18805 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
18806 {
18807 Some(self.parse_with_fill()?)
18808 } else {
18809 None
18810 };
18811
18812 Ok((
18813 OrderByExpr {
18814 expr,
18815 options,
18816 with_fill,
18817 },
18818 operator_class,
18819 ))
18820 }
18821
18822 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
18823 let asc = self.parse_asc_desc();
18824
18825 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
18826 Some(true)
18827 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
18828 Some(false)
18829 } else {
18830 None
18831 };
18832
18833 Ok(OrderByOptions { asc, nulls_first })
18834 }
18835
18836 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
18840 let from = if self.parse_keyword(Keyword::FROM) {
18841 Some(self.parse_expr()?)
18842 } else {
18843 None
18844 };
18845
18846 let to = if self.parse_keyword(Keyword::TO) {
18847 Some(self.parse_expr()?)
18848 } else {
18849 None
18850 };
18851
18852 let step = if self.parse_keyword(Keyword::STEP) {
18853 Some(self.parse_expr()?)
18854 } else {
18855 None
18856 };
18857
18858 Ok(WithFill { from, to, step })
18859 }
18860
18861 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
18864 if !self.parse_keyword(Keyword::INTERPOLATE) {
18865 return Ok(None);
18866 }
18867
18868 if self.consume_token(&Token::LParen) {
18869 let interpolations =
18870 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
18871 self.expect_token(&Token::RParen)?;
18872 return Ok(Some(Interpolate {
18874 exprs: Some(interpolations),
18875 }));
18876 }
18877
18878 Ok(Some(Interpolate { exprs: None }))
18880 }
18881
18882 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
18884 let column = self.parse_identifier()?;
18885 let expr = if self.parse_keyword(Keyword::AS) {
18886 Some(self.parse_expr()?)
18887 } else {
18888 None
18889 };
18890 Ok(InterpolateExpr { column, expr })
18891 }
18892
18893 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
18896 let quantity = if self.consume_token(&Token::LParen) {
18897 let quantity = self.parse_expr()?;
18898 self.expect_token(&Token::RParen)?;
18899 Some(TopQuantity::Expr(quantity))
18900 } else {
18901 let next_token = self.next_token();
18902 let quantity = match next_token.token {
18903 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
18904 _ => self.expected("literal int", next_token)?,
18905 };
18906 Some(TopQuantity::Constant(quantity))
18907 };
18908
18909 let percent = self.parse_keyword(Keyword::PERCENT);
18910
18911 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
18912
18913 Ok(Top {
18914 with_ties,
18915 percent,
18916 quantity,
18917 })
18918 }
18919
18920 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18922 if self.parse_keyword(Keyword::ALL) {
18923 Ok(None)
18924 } else {
18925 Ok(Some(self.parse_expr()?))
18926 }
18927 }
18928
18929 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
18931 let value = self.parse_expr()?;
18932 let rows = if self.parse_keyword(Keyword::ROW) {
18933 OffsetRows::Row
18934 } else if self.parse_keyword(Keyword::ROWS) {
18935 OffsetRows::Rows
18936 } else {
18937 OffsetRows::None
18938 };
18939 Ok(Offset { value, rows })
18940 }
18941
18942 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
18944 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
18945
18946 let (quantity, percent) = if self
18947 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
18948 .is_some()
18949 {
18950 (None, false)
18951 } else {
18952 let quantity = Expr::Value(self.parse_value()?);
18953 let percent = self.parse_keyword(Keyword::PERCENT);
18954 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
18955 (Some(quantity), percent)
18956 };
18957
18958 let with_ties = if self.parse_keyword(Keyword::ONLY) {
18959 false
18960 } else {
18961 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
18962 };
18963
18964 Ok(Fetch {
18965 with_ties,
18966 percent,
18967 quantity,
18968 })
18969 }
18970
18971 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
18973 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
18974 Keyword::UPDATE => LockType::Update,
18975 Keyword::SHARE => LockType::Share,
18976 unexpected_keyword => return Err(ParserError::ParserError(
18977 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
18978 )),
18979 };
18980 let of = if self.parse_keyword(Keyword::OF) {
18981 Some(self.parse_object_name(false)?)
18982 } else {
18983 None
18984 };
18985 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
18986 Some(NonBlock::Nowait)
18987 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
18988 Some(NonBlock::SkipLocked)
18989 } else {
18990 None
18991 };
18992 Ok(LockClause {
18993 lock_type,
18994 of,
18995 nonblock,
18996 })
18997 }
18998
18999 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19001 self.expect_keyword(Keyword::LOCK)?;
19002
19003 if self.peek_keyword(Keyword::TABLES) {
19004 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19005 }
19006
19007 let _ = self.parse_keyword(Keyword::TABLE);
19008 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19009 let lock_mode = if self.parse_keyword(Keyword::IN) {
19010 let lock_mode = self.parse_lock_table_mode()?;
19011 self.expect_keyword(Keyword::MODE)?;
19012 Some(lock_mode)
19013 } else {
19014 None
19015 };
19016 let nowait = self.parse_keyword(Keyword::NOWAIT);
19017
19018 Ok(Lock {
19019 tables,
19020 lock_mode,
19021 nowait,
19022 })
19023 }
19024
19025 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19026 let only = self.parse_keyword(Keyword::ONLY);
19027 let name = self.parse_object_name(false)?;
19028 let has_asterisk = self.consume_token(&Token::Mul);
19029
19030 Ok(LockTableTarget {
19031 name,
19032 only,
19033 has_asterisk,
19034 })
19035 }
19036
19037 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19038 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19039 Ok(LockTableMode::AccessShare)
19040 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19041 Ok(LockTableMode::AccessExclusive)
19042 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19043 Ok(LockTableMode::RowShare)
19044 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19045 Ok(LockTableMode::RowExclusive)
19046 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19047 Ok(LockTableMode::ShareUpdateExclusive)
19048 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19049 Ok(LockTableMode::ShareRowExclusive)
19050 } else if self.parse_keyword(Keyword::SHARE) {
19051 Ok(LockTableMode::Share)
19052 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19053 Ok(LockTableMode::Exclusive)
19054 } else {
19055 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19056 }
19057 }
19058
19059 pub fn parse_values(
19061 &mut self,
19062 allow_empty: bool,
19063 value_keyword: bool,
19064 ) -> Result<Values, ParserError> {
19065 let mut explicit_row = false;
19066
19067 let rows = self.parse_comma_separated(|parser| {
19068 if parser.parse_keyword(Keyword::ROW) {
19069 explicit_row = true;
19070 }
19071
19072 parser.expect_token(&Token::LParen)?;
19073 if allow_empty && parser.peek_token().token == Token::RParen {
19074 parser.next_token();
19075 Ok(vec![])
19076 } else {
19077 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19078 parser.expect_token(&Token::RParen)?;
19079 Ok(exprs)
19080 }
19081 })?;
19082 Ok(Values {
19083 explicit_row,
19084 rows,
19085 value_keyword,
19086 })
19087 }
19088
19089 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19091 self.expect_keyword_is(Keyword::TRANSACTION)?;
19092 Ok(Statement::StartTransaction {
19093 modes: self.parse_transaction_modes()?,
19094 begin: false,
19095 transaction: Some(BeginTransactionKind::Transaction),
19096 modifier: None,
19097 statements: vec![],
19098 exception: None,
19099 has_end_keyword: false,
19100 })
19101 }
19102
19103 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19105 if !self.dialect.supports_start_transaction_modifier() {
19106 None
19107 } else if self.parse_keyword(Keyword::DEFERRED) {
19108 Some(TransactionModifier::Deferred)
19109 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19110 Some(TransactionModifier::Immediate)
19111 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19112 Some(TransactionModifier::Exclusive)
19113 } else if self.parse_keyword(Keyword::TRY) {
19114 Some(TransactionModifier::Try)
19115 } else if self.parse_keyword(Keyword::CATCH) {
19116 Some(TransactionModifier::Catch)
19117 } else {
19118 None
19119 }
19120 }
19121
19122 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19124 let modifier = self.parse_transaction_modifier();
19125 let transaction =
19126 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19127 {
19128 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19129 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19130 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19131 _ => None,
19132 };
19133 Ok(Statement::StartTransaction {
19134 modes: self.parse_transaction_modes()?,
19135 begin: true,
19136 transaction,
19137 modifier,
19138 statements: vec![],
19139 exception: None,
19140 has_end_keyword: false,
19141 })
19142 }
19143
19144 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19146 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19147
19148 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19149 let mut when = Vec::new();
19150
19151 while !self.peek_keyword(Keyword::END) {
19153 self.expect_keyword(Keyword::WHEN)?;
19154
19155 let mut idents = Vec::new();
19159
19160 while !self.parse_keyword(Keyword::THEN) {
19161 let ident = self.parse_identifier()?;
19162 idents.push(ident);
19163
19164 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19165 }
19166
19167 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19168
19169 when.push(ExceptionWhen { idents, statements });
19170 }
19171
19172 Some(when)
19173 } else {
19174 None
19175 };
19176
19177 self.expect_keyword(Keyword::END)?;
19178
19179 Ok(Statement::StartTransaction {
19180 begin: true,
19181 statements,
19182 exception,
19183 has_end_keyword: true,
19184 transaction: None,
19185 modifier: None,
19186 modes: Default::default(),
19187 })
19188 }
19189
19190 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19192 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19193 None
19194 } else if self.parse_keyword(Keyword::TRY) {
19195 Some(TransactionModifier::Try)
19196 } else if self.parse_keyword(Keyword::CATCH) {
19197 Some(TransactionModifier::Catch)
19198 } else {
19199 None
19200 };
19201 Ok(Statement::Commit {
19202 chain: self.parse_commit_rollback_chain()?,
19203 end: true,
19204 modifier,
19205 })
19206 }
19207
19208 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19210 let mut modes = vec![];
19211 let mut required = false;
19212 loop {
19213 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19214 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19215 TransactionIsolationLevel::ReadUncommitted
19216 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19217 TransactionIsolationLevel::ReadCommitted
19218 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19219 TransactionIsolationLevel::RepeatableRead
19220 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19221 TransactionIsolationLevel::Serializable
19222 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19223 TransactionIsolationLevel::Snapshot
19224 } else {
19225 self.expected_ref("isolation level", self.peek_token_ref())?
19226 };
19227 TransactionMode::IsolationLevel(iso_level)
19228 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19229 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19230 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19231 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19232 } else if required {
19233 self.expected_ref("transaction mode", self.peek_token_ref())?
19234 } else {
19235 break;
19236 };
19237 modes.push(mode);
19238 required = self.consume_token(&Token::Comma);
19243 }
19244 Ok(modes)
19245 }
19246
19247 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19249 Ok(Statement::Commit {
19250 chain: self.parse_commit_rollback_chain()?,
19251 end: false,
19252 modifier: None,
19253 })
19254 }
19255
19256 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19258 let chain = self.parse_commit_rollback_chain()?;
19259 let savepoint = self.parse_rollback_savepoint()?;
19260
19261 Ok(Statement::Rollback { chain, savepoint })
19262 }
19263
19264 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19266 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19267 if self.parse_keyword(Keyword::AND) {
19268 let chain = !self.parse_keyword(Keyword::NO);
19269 self.expect_keyword_is(Keyword::CHAIN)?;
19270 Ok(chain)
19271 } else {
19272 Ok(false)
19273 }
19274 }
19275
19276 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19278 if self.parse_keyword(Keyword::TO) {
19279 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19280 let savepoint = self.parse_identifier()?;
19281
19282 Ok(Some(savepoint))
19283 } else {
19284 Ok(None)
19285 }
19286 }
19287
19288 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19290 self.expect_token(&Token::LParen)?;
19291 let message = Box::new(self.parse_expr()?);
19292 self.expect_token(&Token::Comma)?;
19293 let severity = Box::new(self.parse_expr()?);
19294 self.expect_token(&Token::Comma)?;
19295 let state = Box::new(self.parse_expr()?);
19296 let arguments = if self.consume_token(&Token::Comma) {
19297 self.parse_comma_separated(Parser::parse_expr)?
19298 } else {
19299 vec![]
19300 };
19301 self.expect_token(&Token::RParen)?;
19302 let options = if self.parse_keyword(Keyword::WITH) {
19303 self.parse_comma_separated(Parser::parse_raiserror_option)?
19304 } else {
19305 vec![]
19306 };
19307 Ok(Statement::RaisError {
19308 message,
19309 severity,
19310 state,
19311 arguments,
19312 options,
19313 })
19314 }
19315
19316 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19318 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19319 Keyword::LOG => Ok(RaisErrorOption::Log),
19320 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19321 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19322 _ => self.expected_ref(
19323 "LOG, NOWAIT OR SETERROR raiserror option",
19324 self.peek_token_ref(),
19325 ),
19326 }
19327 }
19328
19329 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19333 self.expect_keyword_is(Keyword::THROW)?;
19334
19335 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19336 let (message, state) = if error_number.is_some() {
19337 self.expect_token(&Token::Comma)?;
19338 let message = Box::new(self.parse_expr()?);
19339 self.expect_token(&Token::Comma)?;
19340 let state = Box::new(self.parse_expr()?);
19341 (Some(message), Some(state))
19342 } else {
19343 (None, None)
19344 };
19345
19346 Ok(ThrowStatement {
19347 error_number,
19348 message,
19349 state,
19350 })
19351 }
19352
19353 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
19355 let prepare = self.parse_keyword(Keyword::PREPARE);
19356 let name = self.parse_identifier()?;
19357 Ok(Statement::Deallocate { name, prepare })
19358 }
19359
19360 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
19362 let immediate =
19363 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
19364
19365 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
19371 None
19372 } else {
19373 Some(self.parse_object_name(false)?)
19374 };
19375
19376 let has_parentheses = self.consume_token(&Token::LParen);
19377
19378 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
19379 let end_token = match (has_parentheses, self.peek_token().token) {
19380 (true, _) => Token::RParen,
19381 (false, Token::EOF) => Token::EOF,
19382 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
19383 (false, _) => Token::SemiColon,
19384 };
19385
19386 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
19387
19388 if has_parentheses {
19389 self.expect_token(&Token::RParen)?;
19390 }
19391
19392 let into = if self.parse_keyword(Keyword::INTO) {
19393 self.parse_comma_separated(Self::parse_identifier)?
19394 } else {
19395 vec![]
19396 };
19397
19398 let using = if self.parse_keyword(Keyword::USING) {
19399 self.parse_comma_separated(Self::parse_expr_with_alias)?
19400 } else {
19401 vec![]
19402 };
19403
19404 let output = self.parse_keyword(Keyword::OUTPUT);
19405
19406 let default = self.parse_keyword(Keyword::DEFAULT);
19407
19408 Ok(Statement::Execute {
19409 immediate,
19410 name,
19411 parameters,
19412 has_parentheses,
19413 into,
19414 using,
19415 output,
19416 default,
19417 })
19418 }
19419
19420 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
19422 let name = self.parse_identifier()?;
19423
19424 let mut data_types = vec![];
19425 if self.consume_token(&Token::LParen) {
19426 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
19427 self.expect_token(&Token::RParen)?;
19428 }
19429
19430 self.expect_keyword_is(Keyword::AS)?;
19431 let statement = Box::new(self.parse_statement()?);
19432 Ok(Statement::Prepare {
19433 name,
19434 data_types,
19435 statement,
19436 })
19437 }
19438
19439 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
19441 self.expect_keyword(Keyword::UNLOAD)?;
19442 self.expect_token(&Token::LParen)?;
19443 let (query, query_text) =
19444 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
19445 (None, Some(self.parse_literal_string()?))
19446 } else {
19447 (Some(self.parse_query()?), None)
19448 };
19449 self.expect_token(&Token::RParen)?;
19450
19451 self.expect_keyword_is(Keyword::TO)?;
19452 let to = self.parse_identifier()?;
19453 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
19454 Some(self.parse_iam_role_kind()?)
19455 } else {
19456 None
19457 };
19458 let with = self.parse_options(Keyword::WITH)?;
19459 let mut options = vec![];
19460 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
19461 options.push(opt);
19462 }
19463 Ok(Statement::Unload {
19464 query,
19465 query_text,
19466 to,
19467 auth,
19468 with,
19469 options,
19470 })
19471 }
19472
19473 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
19474 let temporary = self
19475 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
19476 .is_some();
19477 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
19478 let table = self.parse_keyword(Keyword::TABLE);
19479 let name = self.parse_object_name(false)?;
19480
19481 Ok(SelectInto {
19482 temporary,
19483 unlogged,
19484 table,
19485 name,
19486 })
19487 }
19488
19489 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
19490 let v = self.parse_value()?;
19491 match &v.value {
19492 Value::SingleQuotedString(_) => Ok(v),
19493 Value::DoubleQuotedString(_) => Ok(v),
19494 Value::Number(_, _) => Ok(v),
19495 Value::Placeholder(_) => Ok(v),
19496 _ => {
19497 self.prev_token();
19498 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
19499 }
19500 }
19501 }
19502
19503 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
19505 let name = self.parse_object_name(false)?;
19506 if self.consume_token(&Token::LParen) {
19507 let value = self.parse_pragma_value()?;
19508 self.expect_token(&Token::RParen)?;
19509 Ok(Statement::Pragma {
19510 name,
19511 value: Some(value),
19512 is_eq: false,
19513 })
19514 } else if self.consume_token(&Token::Eq) {
19515 Ok(Statement::Pragma {
19516 name,
19517 value: Some(self.parse_pragma_value()?),
19518 is_eq: true,
19519 })
19520 } else {
19521 Ok(Statement::Pragma {
19522 name,
19523 value: None,
19524 is_eq: false,
19525 })
19526 }
19527 }
19528
19529 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
19531 let extension_name = self.parse_identifier()?;
19532
19533 Ok(Statement::Install { extension_name })
19534 }
19535
19536 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
19538 if self.dialect.supports_load_extension() {
19539 let extension_name = self.parse_identifier()?;
19540 Ok(Statement::Load { extension_name })
19541 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
19542 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
19543 self.expect_keyword_is(Keyword::INPATH)?;
19544 let inpath = self.parse_literal_string()?;
19545 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
19546 self.expect_keyword_is(Keyword::INTO)?;
19547 self.expect_keyword_is(Keyword::TABLE)?;
19548 let table_name = self.parse_object_name(false)?;
19549 let partitioned = self.parse_insert_partition()?;
19550 let table_format = self.parse_load_data_table_format()?;
19551 Ok(Statement::LoadData {
19552 local,
19553 inpath,
19554 overwrite,
19555 table_name,
19556 partitioned,
19557 table_format,
19558 })
19559 } else {
19560 self.expected_ref(
19561 "`DATA` or an extension name after `LOAD`",
19562 self.peek_token_ref(),
19563 )
19564 }
19565 }
19566
19567 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
19579 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
19580
19581 let name = self.parse_object_name(false)?;
19582
19583 let on_cluster = self.parse_optional_on_cluster()?;
19585
19586 let partition = if self.parse_keyword(Keyword::PARTITION) {
19587 if self.parse_keyword(Keyword::ID) {
19588 Some(Partition::Identifier(self.parse_identifier()?))
19589 } else {
19590 Some(Partition::Expr(self.parse_expr()?))
19591 }
19592 } else {
19593 None
19594 };
19595
19596 let include_final = self.parse_keyword(Keyword::FINAL);
19597
19598 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
19599 if self.parse_keyword(Keyword::BY) {
19600 Some(Deduplicate::ByExpression(self.parse_expr()?))
19601 } else {
19602 Some(Deduplicate::All)
19603 }
19604 } else {
19605 None
19606 };
19607
19608 let predicate = if self.parse_keyword(Keyword::WHERE) {
19610 Some(self.parse_expr()?)
19611 } else {
19612 None
19613 };
19614
19615 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
19616 self.expect_token(&Token::LParen)?;
19617 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
19618 self.expect_token(&Token::RParen)?;
19619 Some(columns)
19620 } else {
19621 None
19622 };
19623
19624 Ok(Statement::OptimizeTable {
19625 name,
19626 has_table_keyword,
19627 on_cluster,
19628 partition,
19629 include_final,
19630 deduplicate,
19631 predicate,
19632 zorder,
19633 })
19634 }
19635
19636 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
19642 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
19644 let name = self.parse_object_name(false)?;
19646 let mut data_type: Option<DataType> = None;
19648 if self.parse_keywords(&[Keyword::AS]) {
19649 data_type = Some(self.parse_data_type()?)
19650 }
19651 let sequence_options = self.parse_create_sequence_options()?;
19652 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
19654 if self.parse_keywords(&[Keyword::NONE]) {
19655 Some(ObjectName::from(vec![Ident::new("NONE")]))
19656 } else {
19657 Some(self.parse_object_name(false)?)
19658 }
19659 } else {
19660 None
19661 };
19662 Ok(Statement::CreateSequence {
19663 temporary,
19664 if_not_exists,
19665 name,
19666 data_type,
19667 sequence_options,
19668 owned_by,
19669 })
19670 }
19671
19672 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
19673 let mut sequence_options = vec![];
19674 if self.parse_keywords(&[Keyword::INCREMENT]) {
19676 if self.parse_keywords(&[Keyword::BY]) {
19677 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
19678 } else {
19679 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
19680 }
19681 }
19682 if self.parse_keyword(Keyword::MINVALUE) {
19684 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
19685 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
19686 sequence_options.push(SequenceOptions::MinValue(None));
19687 }
19688 if self.parse_keywords(&[Keyword::MAXVALUE]) {
19690 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
19691 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
19692 sequence_options.push(SequenceOptions::MaxValue(None));
19693 }
19694
19695 if self.parse_keywords(&[Keyword::START]) {
19697 if self.parse_keywords(&[Keyword::WITH]) {
19698 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
19699 } else {
19700 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
19701 }
19702 }
19703 if self.parse_keywords(&[Keyword::CACHE]) {
19705 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
19706 }
19707 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
19709 sequence_options.push(SequenceOptions::Cycle(true));
19710 } else if self.parse_keywords(&[Keyword::CYCLE]) {
19711 sequence_options.push(SequenceOptions::Cycle(false));
19712 }
19713
19714 Ok(sequence_options)
19715 }
19716
19717 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
19721 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
19722 let name = self.parse_object_name(false)?;
19723
19724 let server_type = if self.parse_keyword(Keyword::TYPE) {
19725 Some(self.parse_identifier()?)
19726 } else {
19727 None
19728 };
19729
19730 let version = if self.parse_keyword(Keyword::VERSION) {
19731 Some(self.parse_identifier()?)
19732 } else {
19733 None
19734 };
19735
19736 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
19737 let foreign_data_wrapper = self.parse_object_name(false)?;
19738
19739 let mut options = None;
19740 if self.parse_keyword(Keyword::OPTIONS) {
19741 self.expect_token(&Token::LParen)?;
19742 options = Some(self.parse_comma_separated(|p| {
19743 let key = p.parse_identifier()?;
19744 let value = p.parse_identifier()?;
19745 Ok(CreateServerOption { key, value })
19746 })?);
19747 self.expect_token(&Token::RParen)?;
19748 }
19749
19750 Ok(Statement::CreateServer(CreateServerStatement {
19751 name,
19752 if_not_exists: ine,
19753 server_type,
19754 version,
19755 foreign_data_wrapper,
19756 options,
19757 }))
19758 }
19759
19760 pub fn index(&self) -> usize {
19762 self.index
19763 }
19764
19765 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
19767 let ident = self.parse_identifier()?;
19768 self.expect_keyword_is(Keyword::AS)?;
19769
19770 let window_expr = if self.consume_token(&Token::LParen) {
19771 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
19772 } else if self.dialect.supports_window_clause_named_window_reference() {
19773 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
19774 } else {
19775 return self.expected_ref("(", self.peek_token_ref());
19776 };
19777
19778 Ok(NamedWindowDefinition(ident, window_expr))
19779 }
19780
19781 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
19783 let name = self.parse_object_name(false)?;
19784 let params = self.parse_optional_procedure_parameters()?;
19785
19786 let language = if self.parse_keyword(Keyword::LANGUAGE) {
19787 Some(self.parse_identifier()?)
19788 } else {
19789 None
19790 };
19791
19792 self.expect_keyword_is(Keyword::AS)?;
19793
19794 let body = self.parse_conditional_statements(&[Keyword::END])?;
19795
19796 Ok(Statement::CreateProcedure {
19797 name,
19798 or_alter,
19799 params,
19800 language,
19801 body,
19802 })
19803 }
19804
19805 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
19807 let window_name = match &self.peek_token_ref().token {
19808 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
19809 self.parse_optional_ident()?
19810 }
19811 _ => None,
19812 };
19813
19814 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
19815 self.parse_comma_separated(Parser::parse_expr)?
19816 } else {
19817 vec![]
19818 };
19819 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
19820 self.parse_comma_separated(Parser::parse_order_by_expr)?
19821 } else {
19822 vec![]
19823 };
19824
19825 let window_frame = if !self.consume_token(&Token::RParen) {
19826 let window_frame = self.parse_window_frame()?;
19827 self.expect_token(&Token::RParen)?;
19828 Some(window_frame)
19829 } else {
19830 None
19831 };
19832 Ok(WindowSpec {
19833 window_name,
19834 partition_by,
19835 order_by,
19836 window_frame,
19837 })
19838 }
19839
19840 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
19842 let name = self.parse_object_name(false)?;
19843
19844 let has_as = self.parse_keyword(Keyword::AS);
19846
19847 if !has_as {
19848 if self.consume_token(&Token::LParen) {
19850 let options = self.parse_create_type_sql_definition_options()?;
19852 self.expect_token(&Token::RParen)?;
19853 return Ok(Statement::CreateType {
19854 name,
19855 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
19856 });
19857 }
19858
19859 return Ok(Statement::CreateType {
19861 name,
19862 representation: None,
19863 });
19864 }
19865
19866 if self.parse_keyword(Keyword::ENUM) {
19868 self.parse_create_type_enum(name)
19870 } else if self.parse_keyword(Keyword::RANGE) {
19871 self.parse_create_type_range(name)
19873 } else if self.consume_token(&Token::LParen) {
19874 self.parse_create_type_composite(name)
19876 } else {
19877 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
19878 }
19879 }
19880
19881 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
19885 if self.consume_token(&Token::RParen) {
19886 return Ok(Statement::CreateType {
19888 name,
19889 representation: Some(UserDefinedTypeRepresentation::Composite {
19890 attributes: vec![],
19891 }),
19892 });
19893 }
19894
19895 let mut attributes = vec![];
19896 loop {
19897 let attr_name = self.parse_identifier()?;
19898 let attr_data_type = self.parse_data_type()?;
19899 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
19900 Some(self.parse_object_name(false)?)
19901 } else {
19902 None
19903 };
19904 attributes.push(UserDefinedTypeCompositeAttributeDef {
19905 name: attr_name,
19906 data_type: attr_data_type,
19907 collation: attr_collation,
19908 });
19909
19910 if !self.consume_token(&Token::Comma) {
19911 break;
19912 }
19913 }
19914 self.expect_token(&Token::RParen)?;
19915
19916 Ok(Statement::CreateType {
19917 name,
19918 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
19919 })
19920 }
19921
19922 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
19926 self.expect_token(&Token::LParen)?;
19927 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
19928 self.expect_token(&Token::RParen)?;
19929
19930 Ok(Statement::CreateType {
19931 name,
19932 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
19933 })
19934 }
19935
19936 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
19940 self.expect_token(&Token::LParen)?;
19941 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
19942 self.expect_token(&Token::RParen)?;
19943
19944 Ok(Statement::CreateType {
19945 name,
19946 representation: Some(UserDefinedTypeRepresentation::Range { options }),
19947 })
19948 }
19949
19950 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
19952 let keyword = self.parse_one_of_keywords(&[
19953 Keyword::SUBTYPE,
19954 Keyword::SUBTYPE_OPCLASS,
19955 Keyword::COLLATION,
19956 Keyword::CANONICAL,
19957 Keyword::SUBTYPE_DIFF,
19958 Keyword::MULTIRANGE_TYPE_NAME,
19959 ]);
19960
19961 match keyword {
19962 Some(Keyword::SUBTYPE) => {
19963 self.expect_token(&Token::Eq)?;
19964 let data_type = self.parse_data_type()?;
19965 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
19966 }
19967 Some(Keyword::SUBTYPE_OPCLASS) => {
19968 self.expect_token(&Token::Eq)?;
19969 let name = self.parse_object_name(false)?;
19970 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
19971 }
19972 Some(Keyword::COLLATION) => {
19973 self.expect_token(&Token::Eq)?;
19974 let name = self.parse_object_name(false)?;
19975 Ok(UserDefinedTypeRangeOption::Collation(name))
19976 }
19977 Some(Keyword::CANONICAL) => {
19978 self.expect_token(&Token::Eq)?;
19979 let name = self.parse_object_name(false)?;
19980 Ok(UserDefinedTypeRangeOption::Canonical(name))
19981 }
19982 Some(Keyword::SUBTYPE_DIFF) => {
19983 self.expect_token(&Token::Eq)?;
19984 let name = self.parse_object_name(false)?;
19985 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
19986 }
19987 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
19988 self.expect_token(&Token::Eq)?;
19989 let name = self.parse_object_name(false)?;
19990 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
19991 }
19992 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
19993 }
19994 }
19995
19996 fn parse_create_type_sql_definition_options(
19998 &mut self,
19999 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
20000 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
20001 }
20002
20003 fn parse_sql_definition_option(
20005 &mut self,
20006 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
20007 let keyword = self.parse_one_of_keywords(&[
20008 Keyword::INPUT,
20009 Keyword::OUTPUT,
20010 Keyword::RECEIVE,
20011 Keyword::SEND,
20012 Keyword::TYPMOD_IN,
20013 Keyword::TYPMOD_OUT,
20014 Keyword::ANALYZE,
20015 Keyword::SUBSCRIPT,
20016 Keyword::INTERNALLENGTH,
20017 Keyword::PASSEDBYVALUE,
20018 Keyword::ALIGNMENT,
20019 Keyword::STORAGE,
20020 Keyword::LIKE,
20021 Keyword::CATEGORY,
20022 Keyword::PREFERRED,
20023 Keyword::DEFAULT,
20024 Keyword::ELEMENT,
20025 Keyword::DELIMITER,
20026 Keyword::COLLATABLE,
20027 ]);
20028
20029 match keyword {
20030 Some(Keyword::INPUT) => {
20031 self.expect_token(&Token::Eq)?;
20032 let name = self.parse_object_name(false)?;
20033 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
20034 }
20035 Some(Keyword::OUTPUT) => {
20036 self.expect_token(&Token::Eq)?;
20037 let name = self.parse_object_name(false)?;
20038 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
20039 }
20040 Some(Keyword::RECEIVE) => {
20041 self.expect_token(&Token::Eq)?;
20042 let name = self.parse_object_name(false)?;
20043 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
20044 }
20045 Some(Keyword::SEND) => {
20046 self.expect_token(&Token::Eq)?;
20047 let name = self.parse_object_name(false)?;
20048 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
20049 }
20050 Some(Keyword::TYPMOD_IN) => {
20051 self.expect_token(&Token::Eq)?;
20052 let name = self.parse_object_name(false)?;
20053 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
20054 }
20055 Some(Keyword::TYPMOD_OUT) => {
20056 self.expect_token(&Token::Eq)?;
20057 let name = self.parse_object_name(false)?;
20058 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
20059 }
20060 Some(Keyword::ANALYZE) => {
20061 self.expect_token(&Token::Eq)?;
20062 let name = self.parse_object_name(false)?;
20063 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
20064 }
20065 Some(Keyword::SUBSCRIPT) => {
20066 self.expect_token(&Token::Eq)?;
20067 let name = self.parse_object_name(false)?;
20068 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
20069 }
20070 Some(Keyword::INTERNALLENGTH) => {
20071 self.expect_token(&Token::Eq)?;
20072 if self.parse_keyword(Keyword::VARIABLE) {
20073 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20074 UserDefinedTypeInternalLength::Variable,
20075 ))
20076 } else {
20077 let value = self.parse_literal_uint()?;
20078 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20079 UserDefinedTypeInternalLength::Fixed(value),
20080 ))
20081 }
20082 }
20083 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
20084 Some(Keyword::ALIGNMENT) => {
20085 self.expect_token(&Token::Eq)?;
20086 let align_keyword = self.parse_one_of_keywords(&[
20087 Keyword::CHAR,
20088 Keyword::INT2,
20089 Keyword::INT4,
20090 Keyword::DOUBLE,
20091 ]);
20092 match align_keyword {
20093 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20094 Alignment::Char,
20095 )),
20096 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20097 Alignment::Int2,
20098 )),
20099 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20100 Alignment::Int4,
20101 )),
20102 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20103 Alignment::Double,
20104 )),
20105 _ => self.expected_ref(
20106 "alignment value (char, int2, int4, or double)",
20107 self.peek_token_ref(),
20108 ),
20109 }
20110 }
20111 Some(Keyword::STORAGE) => {
20112 self.expect_token(&Token::Eq)?;
20113 let storage_keyword = self.parse_one_of_keywords(&[
20114 Keyword::PLAIN,
20115 Keyword::EXTERNAL,
20116 Keyword::EXTENDED,
20117 Keyword::MAIN,
20118 ]);
20119 match storage_keyword {
20120 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20121 UserDefinedTypeStorage::Plain,
20122 )),
20123 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20124 UserDefinedTypeStorage::External,
20125 )),
20126 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20127 UserDefinedTypeStorage::Extended,
20128 )),
20129 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20130 UserDefinedTypeStorage::Main,
20131 )),
20132 _ => self.expected_ref(
20133 "storage value (plain, external, extended, or main)",
20134 self.peek_token_ref(),
20135 ),
20136 }
20137 }
20138 Some(Keyword::LIKE) => {
20139 self.expect_token(&Token::Eq)?;
20140 let name = self.parse_object_name(false)?;
20141 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
20142 }
20143 Some(Keyword::CATEGORY) => {
20144 self.expect_token(&Token::Eq)?;
20145 let category_str = self.parse_literal_string()?;
20146 let category_char = category_str.chars().next().ok_or_else(|| {
20147 ParserError::ParserError(
20148 "CATEGORY value must be a single character".to_string(),
20149 )
20150 })?;
20151 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
20152 }
20153 Some(Keyword::PREFERRED) => {
20154 self.expect_token(&Token::Eq)?;
20155 let value =
20156 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20157 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
20158 }
20159 Some(Keyword::DEFAULT) => {
20160 self.expect_token(&Token::Eq)?;
20161 let expr = self.parse_expr()?;
20162 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
20163 }
20164 Some(Keyword::ELEMENT) => {
20165 self.expect_token(&Token::Eq)?;
20166 let data_type = self.parse_data_type()?;
20167 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
20168 }
20169 Some(Keyword::DELIMITER) => {
20170 self.expect_token(&Token::Eq)?;
20171 let delimiter = self.parse_literal_string()?;
20172 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
20173 }
20174 Some(Keyword::COLLATABLE) => {
20175 self.expect_token(&Token::Eq)?;
20176 let value =
20177 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20178 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
20179 }
20180 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
20181 }
20182 }
20183
20184 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
20185 self.expect_token(&Token::LParen)?;
20186 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
20187 self.expect_token(&Token::RParen)?;
20188 Ok(idents)
20189 }
20190
20191 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
20192 if dialect_of!(self is MySqlDialect | GenericDialect) {
20193 if self.parse_keyword(Keyword::FIRST) {
20194 Ok(Some(MySQLColumnPosition::First))
20195 } else if self.parse_keyword(Keyword::AFTER) {
20196 let ident = self.parse_identifier()?;
20197 Ok(Some(MySQLColumnPosition::After(ident)))
20198 } else {
20199 Ok(None)
20200 }
20201 } else {
20202 Ok(None)
20203 }
20204 }
20205
20206 fn parse_print(&mut self) -> Result<Statement, ParserError> {
20208 Ok(Statement::Print(PrintStatement {
20209 message: Box::new(self.parse_expr()?),
20210 }))
20211 }
20212
20213 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
20217 let wait_type = if self.parse_keyword(Keyword::DELAY) {
20218 WaitForType::Delay
20219 } else if self.parse_keyword(Keyword::TIME) {
20220 WaitForType::Time
20221 } else {
20222 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
20223 };
20224 let expr = self.parse_expr()?;
20225 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
20226 }
20227
20228 fn parse_return(&mut self) -> Result<Statement, ParserError> {
20230 match self.maybe_parse(|p| p.parse_expr())? {
20231 Some(expr) => Ok(Statement::Return(ReturnStatement {
20232 value: Some(ReturnStatementValue::Expr(expr)),
20233 })),
20234 None => Ok(Statement::Return(ReturnStatement { value: None })),
20235 }
20236 }
20237
20238 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
20242 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
20243
20244 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
20245 Some(self.parse_object_name(false)?)
20246 } else {
20247 None
20248 };
20249 self.expect_keyword(Keyword::OPTIONS)?;
20250 self.expect_token(&Token::LParen)?;
20251 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
20252 self.expect_token(&Token::RParen)?;
20253 self.expect_keyword(Keyword::AS)?;
20254 let query = self.parse_query()?;
20255 Ok(Statement::ExportData(ExportData {
20256 options,
20257 query,
20258 connection,
20259 }))
20260 }
20261
20262 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
20263 self.expect_keyword(Keyword::VACUUM)?;
20264 let full = self.parse_keyword(Keyword::FULL);
20265 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
20266 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
20267 let reindex = self.parse_keyword(Keyword::REINDEX);
20268 let recluster = self.parse_keyword(Keyword::RECLUSTER);
20269 let (table_name, threshold, boost) =
20270 match self.maybe_parse(|p| p.parse_object_name(false))? {
20271 Some(table_name) => {
20272 let threshold = if self.parse_keyword(Keyword::TO) {
20273 let value = self.parse_value()?;
20274 self.expect_keyword(Keyword::PERCENT)?;
20275 Some(value)
20276 } else {
20277 None
20278 };
20279 let boost = self.parse_keyword(Keyword::BOOST);
20280 (Some(table_name), threshold, boost)
20281 }
20282 _ => (None, None, false),
20283 };
20284 Ok(Statement::Vacuum(VacuumStatement {
20285 full,
20286 sort_only,
20287 delete_only,
20288 reindex,
20289 recluster,
20290 table_name,
20291 threshold,
20292 boost,
20293 }))
20294 }
20295
20296 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
20298 self.tokens
20299 }
20300
20301 fn peek_sub_query(&mut self) -> bool {
20303 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
20304 .is_some()
20305 }
20306
20307 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
20308 let show_in;
20309 let mut filter_position = None;
20310 if self.dialect.supports_show_like_before_in() {
20311 if let Some(filter) = self.parse_show_statement_filter()? {
20312 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
20313 }
20314 show_in = self.maybe_parse_show_stmt_in()?;
20315 } else {
20316 show_in = self.maybe_parse_show_stmt_in()?;
20317 if let Some(filter) = self.parse_show_statement_filter()? {
20318 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
20319 }
20320 }
20321 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
20322 let limit = self.maybe_parse_show_stmt_limit()?;
20323 let from = self.maybe_parse_show_stmt_from()?;
20324 Ok(ShowStatementOptions {
20325 filter_position,
20326 show_in,
20327 starts_with,
20328 limit,
20329 limit_from: from,
20330 })
20331 }
20332
20333 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
20334 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
20335 Some(Keyword::FROM) => ShowStatementInClause::FROM,
20336 Some(Keyword::IN) => ShowStatementInClause::IN,
20337 None => return Ok(None),
20338 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
20339 };
20340
20341 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
20342 Keyword::ACCOUNT,
20343 Keyword::DATABASE,
20344 Keyword::SCHEMA,
20345 Keyword::TABLE,
20346 Keyword::VIEW,
20347 ]) {
20348 Some(Keyword::DATABASE)
20350 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20351 | self.peek_keyword(Keyword::LIMIT) =>
20352 {
20353 (Some(ShowStatementInParentType::Database), None)
20354 }
20355 Some(Keyword::SCHEMA)
20356 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20357 | self.peek_keyword(Keyword::LIMIT) =>
20358 {
20359 (Some(ShowStatementInParentType::Schema), None)
20360 }
20361 Some(parent_kw) => {
20362 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
20366 match parent_kw {
20367 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
20368 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
20369 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
20370 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
20371 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
20372 _ => {
20373 return self.expected_ref(
20374 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
20375 self.peek_token_ref(),
20376 )
20377 }
20378 }
20379 }
20380 None => {
20381 let mut parent_name = self.parse_object_name(false)?;
20384 if self
20385 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
20386 .is_some()
20387 {
20388 parent_name
20389 .0
20390 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
20391 }
20392 (None, Some(parent_name))
20393 }
20394 };
20395
20396 Ok(Some(ShowStatementIn {
20397 clause,
20398 parent_type,
20399 parent_name,
20400 }))
20401 }
20402
20403 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20404 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
20405 Ok(Some(self.parse_value()?))
20406 } else {
20407 Ok(None)
20408 }
20409 }
20410
20411 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
20412 if self.parse_keyword(Keyword::LIMIT) {
20413 Ok(self.parse_limit()?)
20414 } else {
20415 Ok(None)
20416 }
20417 }
20418
20419 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20420 if self.parse_keyword(Keyword::FROM) {
20421 Ok(Some(self.parse_value()?))
20422 } else {
20423 Ok(None)
20424 }
20425 }
20426
20427 pub(crate) fn in_column_definition_state(&self) -> bool {
20428 matches!(self.state, ColumnDefinition)
20429 }
20430
20431 pub(crate) fn parse_key_value_options(
20436 &mut self,
20437 parenthesized: bool,
20438 end_words: &[Keyword],
20439 ) -> Result<KeyValueOptions, ParserError> {
20440 let mut options: Vec<KeyValueOption> = Vec::new();
20441 let mut delimiter = KeyValueOptionsDelimiter::Space;
20442 if parenthesized {
20443 self.expect_token(&Token::LParen)?;
20444 }
20445 loop {
20446 match self.next_token().token {
20447 Token::RParen => {
20448 if parenthesized {
20449 break;
20450 } else {
20451 return self.expected_ref(" another option or EOF", self.peek_token_ref());
20452 }
20453 }
20454 Token::EOF | Token::SemiColon => break,
20455 Token::Comma => {
20456 delimiter = KeyValueOptionsDelimiter::Comma;
20457 continue;
20458 }
20459 Token::Word(w) if !end_words.contains(&w.keyword) => {
20460 options.push(self.parse_key_value_option(&w)?)
20461 }
20462 Token::Word(w) if end_words.contains(&w.keyword) => {
20463 self.prev_token();
20464 break;
20465 }
20466 _ => {
20467 return self.expected_ref(
20468 "another option, EOF, SemiColon, Comma or ')'",
20469 self.peek_token_ref(),
20470 )
20471 }
20472 };
20473 }
20474
20475 Ok(KeyValueOptions { delimiter, options })
20476 }
20477
20478 pub(crate) fn parse_key_value_option(
20480 &mut self,
20481 key: &Word,
20482 ) -> Result<KeyValueOption, ParserError> {
20483 self.expect_token(&Token::Eq)?;
20484 let peeked_token = self.peek_token();
20485 match peeked_token.token {
20486 Token::SingleQuotedString(_) => Ok(KeyValueOption {
20487 option_name: key.value.clone(),
20488 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20489 }),
20490 Token::Word(word)
20491 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
20492 {
20493 Ok(KeyValueOption {
20494 option_name: key.value.clone(),
20495 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20496 })
20497 }
20498 Token::Number(..) => Ok(KeyValueOption {
20499 option_name: key.value.clone(),
20500 option_value: KeyValueOptionKind::Single(self.parse_value()?),
20501 }),
20502 Token::Word(word) => {
20503 self.next_token();
20504 Ok(KeyValueOption {
20505 option_name: key.value.clone(),
20506 option_value: KeyValueOptionKind::Single(
20507 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
20508 ),
20509 })
20510 }
20511 Token::LParen => {
20512 match self.maybe_parse(|parser| {
20516 parser.expect_token(&Token::LParen)?;
20517 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
20518 parser.expect_token(&Token::RParen)?;
20519 values
20520 })? {
20521 Some(values) => Ok(KeyValueOption {
20522 option_name: key.value.clone(),
20523 option_value: KeyValueOptionKind::Multi(values),
20524 }),
20525 None => Ok(KeyValueOption {
20526 option_name: key.value.clone(),
20527 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
20528 self.parse_key_value_options(true, &[])?,
20529 )),
20530 }),
20531 }
20532 }
20533 _ => self.expected_ref("expected option value", self.peek_token_ref()),
20534 }
20535 }
20536
20537 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
20539 if self.parse_keyword(Keyword::ALL) {
20540 return Ok(ResetStatement { reset: Reset::ALL });
20541 }
20542
20543 let obj = self.parse_object_name(false)?;
20544 Ok(ResetStatement {
20545 reset: Reset::ConfigurationParameter(obj),
20546 })
20547 }
20548}
20549
20550fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
20551 if let Some(prefix) = prefix {
20552 Expr::Prefixed {
20553 prefix,
20554 value: Box::new(expr),
20555 }
20556 } else {
20557 expr
20558 }
20559}
20560
20561impl Word {
20562 pub fn to_ident(&self, span: Span) -> Ident {
20568 Ident {
20569 value: self.value.clone(),
20570 quote_style: self.quote_style,
20571 span,
20572 }
20573 }
20574
20575 pub fn into_ident(self, span: Span) -> Ident {
20580 Ident {
20581 value: self.value,
20582 quote_style: self.quote_style,
20583 span,
20584 }
20585 }
20586}
20587
20588#[cfg(test)]
20589mod tests {
20590 use crate::test_utils::{all_dialects, TestedDialects};
20591
20592 use super::*;
20593
20594 #[test]
20595 fn test_prev_index() {
20596 let sql = "SELECT version";
20597 all_dialects().run_parser_method(sql, |parser| {
20598 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
20599 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
20600 parser.prev_token();
20601 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
20602 assert_eq!(parser.next_token(), Token::make_word("version", None));
20603 parser.prev_token();
20604 assert_eq!(parser.peek_token(), Token::make_word("version", None));
20605 assert_eq!(parser.next_token(), Token::make_word("version", None));
20606 assert_eq!(parser.peek_token(), Token::EOF);
20607 parser.prev_token();
20608 assert_eq!(parser.next_token(), Token::make_word("version", None));
20609 assert_eq!(parser.next_token(), Token::EOF);
20610 assert_eq!(parser.next_token(), Token::EOF);
20611 parser.prev_token();
20612 });
20613 }
20614
20615 #[test]
20616 fn test_peek_tokens() {
20617 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
20618 assert!(matches!(
20619 parser.peek_tokens(),
20620 [Token::Word(Word {
20621 keyword: Keyword::SELECT,
20622 ..
20623 })]
20624 ));
20625
20626 assert!(matches!(
20627 parser.peek_tokens(),
20628 [
20629 Token::Word(Word {
20630 keyword: Keyword::SELECT,
20631 ..
20632 }),
20633 Token::Word(_),
20634 Token::Word(Word {
20635 keyword: Keyword::AS,
20636 ..
20637 }),
20638 ]
20639 ));
20640
20641 for _ in 0..4 {
20642 parser.next_token();
20643 }
20644
20645 assert!(matches!(
20646 parser.peek_tokens(),
20647 [
20648 Token::Word(Word {
20649 keyword: Keyword::FROM,
20650 ..
20651 }),
20652 Token::Word(_),
20653 Token::EOF,
20654 Token::EOF,
20655 ]
20656 ))
20657 })
20658 }
20659
20660 #[cfg(test)]
20661 mod test_parse_data_type {
20662 use crate::ast::{
20663 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
20664 };
20665 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
20666 use crate::test_utils::TestedDialects;
20667
20668 macro_rules! test_parse_data_type {
20669 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
20670 $dialect.run_parser_method(&*$input, |parser| {
20671 let data_type = parser.parse_data_type().unwrap();
20672 assert_eq!($expected_type, data_type);
20673 assert_eq!($input.to_string(), data_type.to_string());
20674 });
20675 }};
20676 }
20677
20678 #[test]
20679 fn test_ansii_character_string_types() {
20680 let dialect =
20682 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20683
20684 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
20685
20686 test_parse_data_type!(
20687 dialect,
20688 "CHARACTER(20)",
20689 DataType::Character(Some(CharacterLength::IntegerLength {
20690 length: 20,
20691 unit: None
20692 }))
20693 );
20694
20695 test_parse_data_type!(
20696 dialect,
20697 "CHARACTER(20 CHARACTERS)",
20698 DataType::Character(Some(CharacterLength::IntegerLength {
20699 length: 20,
20700 unit: Some(CharLengthUnits::Characters)
20701 }))
20702 );
20703
20704 test_parse_data_type!(
20705 dialect,
20706 "CHARACTER(20 OCTETS)",
20707 DataType::Character(Some(CharacterLength::IntegerLength {
20708 length: 20,
20709 unit: Some(CharLengthUnits::Octets)
20710 }))
20711 );
20712
20713 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
20714
20715 test_parse_data_type!(
20716 dialect,
20717 "CHAR(20)",
20718 DataType::Char(Some(CharacterLength::IntegerLength {
20719 length: 20,
20720 unit: None
20721 }))
20722 );
20723
20724 test_parse_data_type!(
20725 dialect,
20726 "CHAR(20 CHARACTERS)",
20727 DataType::Char(Some(CharacterLength::IntegerLength {
20728 length: 20,
20729 unit: Some(CharLengthUnits::Characters)
20730 }))
20731 );
20732
20733 test_parse_data_type!(
20734 dialect,
20735 "CHAR(20 OCTETS)",
20736 DataType::Char(Some(CharacterLength::IntegerLength {
20737 length: 20,
20738 unit: Some(CharLengthUnits::Octets)
20739 }))
20740 );
20741
20742 test_parse_data_type!(
20743 dialect,
20744 "CHARACTER VARYING(20)",
20745 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
20746 length: 20,
20747 unit: None
20748 }))
20749 );
20750
20751 test_parse_data_type!(
20752 dialect,
20753 "CHARACTER VARYING(20 CHARACTERS)",
20754 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
20755 length: 20,
20756 unit: Some(CharLengthUnits::Characters)
20757 }))
20758 );
20759
20760 test_parse_data_type!(
20761 dialect,
20762 "CHARACTER VARYING(20 OCTETS)",
20763 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
20764 length: 20,
20765 unit: Some(CharLengthUnits::Octets)
20766 }))
20767 );
20768
20769 test_parse_data_type!(
20770 dialect,
20771 "CHAR VARYING(20)",
20772 DataType::CharVarying(Some(CharacterLength::IntegerLength {
20773 length: 20,
20774 unit: None
20775 }))
20776 );
20777
20778 test_parse_data_type!(
20779 dialect,
20780 "CHAR VARYING(20 CHARACTERS)",
20781 DataType::CharVarying(Some(CharacterLength::IntegerLength {
20782 length: 20,
20783 unit: Some(CharLengthUnits::Characters)
20784 }))
20785 );
20786
20787 test_parse_data_type!(
20788 dialect,
20789 "CHAR VARYING(20 OCTETS)",
20790 DataType::CharVarying(Some(CharacterLength::IntegerLength {
20791 length: 20,
20792 unit: Some(CharLengthUnits::Octets)
20793 }))
20794 );
20795
20796 test_parse_data_type!(
20797 dialect,
20798 "VARCHAR(20)",
20799 DataType::Varchar(Some(CharacterLength::IntegerLength {
20800 length: 20,
20801 unit: None
20802 }))
20803 );
20804 }
20805
20806 #[test]
20807 fn test_ansii_character_large_object_types() {
20808 let dialect =
20810 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20811
20812 test_parse_data_type!(
20813 dialect,
20814 "CHARACTER LARGE OBJECT",
20815 DataType::CharacterLargeObject(None)
20816 );
20817 test_parse_data_type!(
20818 dialect,
20819 "CHARACTER LARGE OBJECT(20)",
20820 DataType::CharacterLargeObject(Some(20))
20821 );
20822
20823 test_parse_data_type!(
20824 dialect,
20825 "CHAR LARGE OBJECT",
20826 DataType::CharLargeObject(None)
20827 );
20828 test_parse_data_type!(
20829 dialect,
20830 "CHAR LARGE OBJECT(20)",
20831 DataType::CharLargeObject(Some(20))
20832 );
20833
20834 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
20835 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
20836 }
20837
20838 #[test]
20839 fn test_parse_custom_types() {
20840 let dialect =
20841 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20842
20843 test_parse_data_type!(
20844 dialect,
20845 "GEOMETRY",
20846 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
20847 );
20848
20849 test_parse_data_type!(
20850 dialect,
20851 "GEOMETRY(POINT)",
20852 DataType::Custom(
20853 ObjectName::from(vec!["GEOMETRY".into()]),
20854 vec!["POINT".to_string()]
20855 )
20856 );
20857
20858 test_parse_data_type!(
20859 dialect,
20860 "GEOMETRY(POINT, 4326)",
20861 DataType::Custom(
20862 ObjectName::from(vec!["GEOMETRY".into()]),
20863 vec!["POINT".to_string(), "4326".to_string()]
20864 )
20865 );
20866 }
20867
20868 #[test]
20869 fn test_ansii_exact_numeric_types() {
20870 let dialect = TestedDialects::new(vec![
20872 Box::new(GenericDialect {}),
20873 Box::new(AnsiDialect {}),
20874 Box::new(PostgreSqlDialect {}),
20875 ]);
20876
20877 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
20878
20879 test_parse_data_type!(
20880 dialect,
20881 "NUMERIC(2)",
20882 DataType::Numeric(ExactNumberInfo::Precision(2))
20883 );
20884
20885 test_parse_data_type!(
20886 dialect,
20887 "NUMERIC(2,10)",
20888 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
20889 );
20890
20891 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
20892
20893 test_parse_data_type!(
20894 dialect,
20895 "DECIMAL(2)",
20896 DataType::Decimal(ExactNumberInfo::Precision(2))
20897 );
20898
20899 test_parse_data_type!(
20900 dialect,
20901 "DECIMAL(2,10)",
20902 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
20903 );
20904
20905 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
20906
20907 test_parse_data_type!(
20908 dialect,
20909 "DEC(2)",
20910 DataType::Dec(ExactNumberInfo::Precision(2))
20911 );
20912
20913 test_parse_data_type!(
20914 dialect,
20915 "DEC(2,10)",
20916 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
20917 );
20918
20919 test_parse_data_type!(
20921 dialect,
20922 "NUMERIC(10,-2)",
20923 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
20924 );
20925
20926 test_parse_data_type!(
20927 dialect,
20928 "DECIMAL(1000,-10)",
20929 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
20930 );
20931
20932 test_parse_data_type!(
20933 dialect,
20934 "DEC(5,-1000)",
20935 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
20936 );
20937
20938 test_parse_data_type!(
20939 dialect,
20940 "NUMERIC(10,-5)",
20941 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
20942 );
20943
20944 test_parse_data_type!(
20945 dialect,
20946 "DECIMAL(20,-10)",
20947 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
20948 );
20949
20950 test_parse_data_type!(
20951 dialect,
20952 "DEC(5,-2)",
20953 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
20954 );
20955
20956 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
20957 let data_type = parser.parse_data_type().unwrap();
20958 assert_eq!(
20959 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
20960 data_type
20961 );
20962 assert_eq!("NUMERIC(10,5)", data_type.to_string());
20964 });
20965 }
20966
20967 #[test]
20968 fn test_ansii_date_type() {
20969 let dialect =
20971 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
20972
20973 test_parse_data_type!(dialect, "DATE", DataType::Date);
20974
20975 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
20976
20977 test_parse_data_type!(
20978 dialect,
20979 "TIME(6)",
20980 DataType::Time(Some(6), TimezoneInfo::None)
20981 );
20982
20983 test_parse_data_type!(
20984 dialect,
20985 "TIME WITH TIME ZONE",
20986 DataType::Time(None, TimezoneInfo::WithTimeZone)
20987 );
20988
20989 test_parse_data_type!(
20990 dialect,
20991 "TIME(6) WITH TIME ZONE",
20992 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
20993 );
20994
20995 test_parse_data_type!(
20996 dialect,
20997 "TIME WITHOUT TIME ZONE",
20998 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
20999 );
21000
21001 test_parse_data_type!(
21002 dialect,
21003 "TIME(6) WITHOUT TIME ZONE",
21004 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
21005 );
21006
21007 test_parse_data_type!(
21008 dialect,
21009 "TIMESTAMP",
21010 DataType::Timestamp(None, TimezoneInfo::None)
21011 );
21012
21013 test_parse_data_type!(
21014 dialect,
21015 "TIMESTAMP(22)",
21016 DataType::Timestamp(Some(22), TimezoneInfo::None)
21017 );
21018
21019 test_parse_data_type!(
21020 dialect,
21021 "TIMESTAMP(22) WITH TIME ZONE",
21022 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
21023 );
21024
21025 test_parse_data_type!(
21026 dialect,
21027 "TIMESTAMP(33) WITHOUT TIME ZONE",
21028 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
21029 );
21030 }
21031 }
21032
21033 #[test]
21034 fn test_parse_schema_name() {
21035 macro_rules! test_parse_schema_name {
21037 ($input:expr, $expected_name:expr $(,)?) => {{
21038 all_dialects().run_parser_method(&*$input, |parser| {
21039 let schema_name = parser.parse_schema_name().unwrap();
21040 assert_eq!(schema_name, $expected_name);
21042 assert_eq!(schema_name.to_string(), $input.to_string());
21044 });
21045 }};
21046 }
21047
21048 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
21049 let dummy_authorization = Ident::new("dummy_authorization");
21050
21051 test_parse_schema_name!(
21052 format!("{dummy_name}"),
21053 SchemaName::Simple(dummy_name.clone())
21054 );
21055
21056 test_parse_schema_name!(
21057 format!("AUTHORIZATION {dummy_authorization}"),
21058 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
21059 );
21060 test_parse_schema_name!(
21061 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
21062 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
21063 );
21064 }
21065
21066 #[test]
21067 fn mysql_parse_index_table_constraint() {
21068 macro_rules! test_parse_table_constraint {
21069 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
21070 $dialect.run_parser_method(&*$input, |parser| {
21071 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
21072 assert_eq!(constraint, $expected);
21074 assert_eq!(constraint.to_string(), $input.to_string());
21076 });
21077 }};
21078 }
21079
21080 fn mk_expected_col(name: &str) -> IndexColumn {
21081 IndexColumn {
21082 column: OrderByExpr {
21083 expr: Expr::Identifier(name.into()),
21084 options: OrderByOptions {
21085 asc: None,
21086 nulls_first: None,
21087 },
21088 with_fill: None,
21089 },
21090 operator_class: None,
21091 }
21092 }
21093
21094 let dialect =
21095 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
21096
21097 test_parse_table_constraint!(
21098 dialect,
21099 "INDEX (c1)",
21100 IndexConstraint {
21101 display_as_key: false,
21102 name: None,
21103 index_type: None,
21104 columns: vec![mk_expected_col("c1")],
21105 index_options: vec![],
21106 }
21107 .into()
21108 );
21109
21110 test_parse_table_constraint!(
21111 dialect,
21112 "KEY (c1)",
21113 IndexConstraint {
21114 display_as_key: true,
21115 name: None,
21116 index_type: None,
21117 columns: vec![mk_expected_col("c1")],
21118 index_options: vec![],
21119 }
21120 .into()
21121 );
21122
21123 test_parse_table_constraint!(
21124 dialect,
21125 "INDEX 'index' (c1, c2)",
21126 TableConstraint::Index(IndexConstraint {
21127 display_as_key: false,
21128 name: Some(Ident::with_quote('\'', "index")),
21129 index_type: None,
21130 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
21131 index_options: vec![],
21132 })
21133 );
21134
21135 test_parse_table_constraint!(
21136 dialect,
21137 "INDEX USING BTREE (c1)",
21138 IndexConstraint {
21139 display_as_key: false,
21140 name: None,
21141 index_type: Some(IndexType::BTree),
21142 columns: vec![mk_expected_col("c1")],
21143 index_options: vec![],
21144 }
21145 .into()
21146 );
21147
21148 test_parse_table_constraint!(
21149 dialect,
21150 "INDEX USING HASH (c1)",
21151 IndexConstraint {
21152 display_as_key: false,
21153 name: None,
21154 index_type: Some(IndexType::Hash),
21155 columns: vec![mk_expected_col("c1")],
21156 index_options: vec![],
21157 }
21158 .into()
21159 );
21160
21161 test_parse_table_constraint!(
21162 dialect,
21163 "INDEX idx_name USING BTREE (c1)",
21164 IndexConstraint {
21165 display_as_key: false,
21166 name: Some(Ident::new("idx_name")),
21167 index_type: Some(IndexType::BTree),
21168 columns: vec![mk_expected_col("c1")],
21169 index_options: vec![],
21170 }
21171 .into()
21172 );
21173
21174 test_parse_table_constraint!(
21175 dialect,
21176 "INDEX idx_name USING HASH (c1)",
21177 IndexConstraint {
21178 display_as_key: false,
21179 name: Some(Ident::new("idx_name")),
21180 index_type: Some(IndexType::Hash),
21181 columns: vec![mk_expected_col("c1")],
21182 index_options: vec![],
21183 }
21184 .into()
21185 );
21186 }
21187
21188 #[test]
21189 fn test_tokenizer_error_loc() {
21190 let sql = "foo '";
21191 let ast = Parser::parse_sql(&GenericDialect, sql);
21192 assert_eq!(
21193 ast,
21194 Err(ParserError::TokenizerError(
21195 "Unterminated string literal at Line: 1, Column: 5".to_string()
21196 ))
21197 );
21198 }
21199
21200 #[test]
21201 fn test_parser_error_loc() {
21202 let sql = "SELECT this is a syntax error";
21203 let ast = Parser::parse_sql(&GenericDialect, sql);
21204 assert_eq!(
21205 ast,
21206 Err(ParserError::ParserError(
21207 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
21208 .to_string()
21209 ))
21210 );
21211 }
21212
21213 #[test]
21214 fn test_nested_explain_error() {
21215 let sql = "EXPLAIN EXPLAIN SELECT 1";
21216 let ast = Parser::parse_sql(&GenericDialect, sql);
21217 assert_eq!(
21218 ast,
21219 Err(ParserError::ParserError(
21220 "Explain must be root of the plan".to_string()
21221 ))
21222 );
21223 }
21224
21225 #[test]
21226 fn test_parse_multipart_identifier_positive() {
21227 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
21228
21229 let expected = vec![
21231 Ident {
21232 value: "CATALOG".to_string(),
21233 quote_style: None,
21234 span: Span::empty(),
21235 },
21236 Ident {
21237 value: "F(o)o. \"bar".to_string(),
21238 quote_style: Some('"'),
21239 span: Span::empty(),
21240 },
21241 Ident {
21242 value: "table".to_string(),
21243 quote_style: None,
21244 span: Span::empty(),
21245 },
21246 ];
21247 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
21248 let actual = parser.parse_multipart_identifier().unwrap();
21249 assert_eq!(expected, actual);
21250 });
21251
21252 let expected = vec![
21254 Ident {
21255 value: "CATALOG".to_string(),
21256 quote_style: None,
21257 span: Span::empty(),
21258 },
21259 Ident {
21260 value: "table".to_string(),
21261 quote_style: None,
21262 span: Span::empty(),
21263 },
21264 ];
21265 dialect.run_parser_method("CATALOG . table", |parser| {
21266 let actual = parser.parse_multipart_identifier().unwrap();
21267 assert_eq!(expected, actual);
21268 });
21269 }
21270
21271 #[test]
21272 fn test_parse_multipart_identifier_negative() {
21273 macro_rules! test_parse_multipart_identifier_error {
21274 ($input:expr, $expected_err:expr $(,)?) => {{
21275 all_dialects().run_parser_method(&*$input, |parser| {
21276 let actual_err = parser.parse_multipart_identifier().unwrap_err();
21277 assert_eq!(actual_err.to_string(), $expected_err);
21278 });
21279 }};
21280 }
21281
21282 test_parse_multipart_identifier_error!(
21283 "",
21284 "sql parser error: Empty input when parsing identifier",
21285 );
21286
21287 test_parse_multipart_identifier_error!(
21288 "*schema.table",
21289 "sql parser error: Unexpected token in identifier: *",
21290 );
21291
21292 test_parse_multipart_identifier_error!(
21293 "schema.table*",
21294 "sql parser error: Unexpected token in identifier: *",
21295 );
21296
21297 test_parse_multipart_identifier_error!(
21298 "schema.table.",
21299 "sql parser error: Trailing period in identifier",
21300 );
21301
21302 test_parse_multipart_identifier_error!(
21303 "schema.*",
21304 "sql parser error: Unexpected token following period in identifier: *",
21305 );
21306 }
21307
21308 #[test]
21309 fn test_mysql_partition_selection() {
21310 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
21311 let expected = vec!["p0", "p2"];
21312
21313 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
21314 assert_eq!(ast.len(), 1);
21315 if let Statement::Query(v) = &ast[0] {
21316 if let SetExpr::Select(select) = &*v.body {
21317 assert_eq!(select.from.len(), 1);
21318 let from: &TableWithJoins = &select.from[0];
21319 let table_factor = &from.relation;
21320 if let TableFactor::Table { partitions, .. } = table_factor {
21321 let actual: Vec<&str> = partitions
21322 .iter()
21323 .map(|ident| ident.value.as_str())
21324 .collect();
21325 assert_eq!(expected, actual);
21326 }
21327 }
21328 } else {
21329 panic!("fail to parse mysql partition selection");
21330 }
21331 }
21332
21333 #[test]
21334 fn test_replace_into_placeholders() {
21335 let sql = "REPLACE INTO t (a) VALUES (&a)";
21336
21337 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21338 }
21339
21340 #[test]
21341 fn test_replace_into_set_placeholder() {
21342 let sql = "REPLACE INTO t SET ?";
21343
21344 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21345 }
21346
21347 #[test]
21348 fn test_replace_incomplete() {
21349 let sql = r#"REPLACE"#;
21350
21351 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
21352 }
21353
21354 #[test]
21355 fn test_placeholder_invalid_whitespace() {
21356 for w in [" ", "/*invalid*/"] {
21357 let sql = format!("\nSELECT\n :{w}fooBar");
21358 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
21359 }
21360 }
21361}