1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration};
36use crate::ast::Statement::CreatePolicy;
37use crate::ast::*;
38use crate::dialect::*;
39use crate::keywords::{Keyword, ALL_KEYWORDS};
40use crate::tokenizer::*;
41
42mod alter;
43
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub enum ParserError {
46 TokenizerError(String),
47 ParserError(String),
48 RecursionLimitExceeded,
49}
50
51macro_rules! parser_err {
53 ($MSG:expr, $loc:expr) => {
54 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
55 };
56}
57
58#[cfg(feature = "std")]
59mod recursion {
61 use std::cell::Cell;
62 use std::rc::Rc;
63
64 use super::ParserError;
65
66 pub(crate) struct RecursionCounter {
77 remaining_depth: Rc<Cell<usize>>,
78 }
79
80 impl RecursionCounter {
81 pub fn new(remaining_depth: usize) -> Self {
84 Self {
85 remaining_depth: Rc::new(remaining_depth.into()),
86 }
87 }
88
89 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
96 let old_value = self.remaining_depth.get();
97 if old_value == 0 {
99 Err(ParserError::RecursionLimitExceeded)
100 } else {
101 self.remaining_depth.set(old_value - 1);
102 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
103 }
104 }
105 }
106
107 pub struct DepthGuard {
109 remaining_depth: Rc<Cell<usize>>,
110 }
111
112 impl DepthGuard {
113 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
114 Self { remaining_depth }
115 }
116 }
117 impl Drop for DepthGuard {
118 fn drop(&mut self) {
119 let old_value = self.remaining_depth.get();
120 self.remaining_depth.set(old_value + 1);
121 }
122 }
123}
124
125#[cfg(not(feature = "std"))]
126mod recursion {
127 pub(crate) struct RecursionCounter {}
133
134 impl RecursionCounter {
135 pub fn new(_remaining_depth: usize) -> Self {
136 Self {}
137 }
138 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
139 Ok(DepthGuard {})
140 }
141 }
142
143 pub struct DepthGuard {}
144}
145
146#[derive(PartialEq, Eq)]
147pub enum IsOptional {
148 Optional,
149 Mandatory,
150}
151
152pub enum IsLateral {
153 Lateral,
154 NotLateral,
155}
156
157pub enum WildcardExpr {
158 Expr(Expr),
159 QualifiedWildcard(ObjectName),
160 Wildcard,
161}
162
163impl From<TokenizerError> for ParserError {
164 fn from(e: TokenizerError) -> Self {
165 ParserError::TokenizerError(e.to_string())
166 }
167}
168
169impl fmt::Display for ParserError {
170 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
171 write!(
172 f,
173 "sql parser error: {}",
174 match self {
175 ParserError::TokenizerError(s) => s,
176 ParserError::ParserError(s) => s,
177 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
178 }
179 )
180 }
181}
182
183#[cfg(feature = "std")]
184impl std::error::Error for ParserError {}
185
186const DEFAULT_REMAINING_DEPTH: usize = 50;
188
189const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
191 token: Token::EOF,
192 span: Span {
193 start: Location { line: 0, column: 0 },
194 end: Location { line: 0, column: 0 },
195 },
196};
197
198struct MatchedTrailingBracket(bool);
211
212impl From<bool> for MatchedTrailingBracket {
213 fn from(value: bool) -> Self {
214 Self(value)
215 }
216}
217
218#[derive(Debug, Clone, PartialEq, Eq)]
220pub struct ParserOptions {
221 pub trailing_commas: bool,
222 pub unescape: bool,
225}
226
227impl Default for ParserOptions {
228 fn default() -> Self {
229 Self {
230 trailing_commas: false,
231 unescape: true,
232 }
233 }
234}
235
236impl ParserOptions {
237 pub fn new() -> Self {
239 Default::default()
240 }
241
242 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
254 self.trailing_commas = trailing_commas;
255 self
256 }
257
258 pub fn with_unescape(mut self, unescape: bool) -> Self {
261 self.unescape = unescape;
262 self
263 }
264}
265
266#[derive(Copy, Clone)]
267enum ParserState {
268 Normal,
270 ConnectBy,
274}
275
276pub struct Parser<'a> {
315 tokens: Vec<TokenWithSpan>,
317 index: usize,
319 state: ParserState,
321 dialect: &'a dyn Dialect,
323 options: ParserOptions,
327 recursion_counter: RecursionCounter,
329}
330
331impl<'a> Parser<'a> {
332 pub fn new(dialect: &'a dyn Dialect) -> Self {
348 Self {
349 tokens: vec![],
350 index: 0,
351 state: ParserState::Normal,
352 dialect,
353 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
354 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
355 }
356 }
357
358 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
381 self.recursion_counter = RecursionCounter::new(recursion_limit);
382 self
383 }
384
385 pub fn with_options(mut self, options: ParserOptions) -> Self {
408 self.options = options;
409 self
410 }
411
412 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
414 self.tokens = tokens;
415 self.index = 0;
416 self
417 }
418
419 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
421 let tokens_with_locations: Vec<TokenWithSpan> = tokens
423 .into_iter()
424 .map(|token| TokenWithSpan {
425 token,
426 span: Span::empty(),
427 })
428 .collect();
429 self.with_tokens_with_locations(tokens_with_locations)
430 }
431
432 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
439 debug!("Parsing sql '{}'...", sql);
440 let tokens = Tokenizer::new(self.dialect, sql)
441 .with_unescape(self.options.unescape)
442 .tokenize_with_location()?;
443 Ok(self.with_tokens_with_locations(tokens))
444 }
445
446 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
462 let mut stmts = Vec::new();
463 let mut expecting_statement_delimiter = false;
464 loop {
465 while self.consume_token(&Token::SemiColon) {
467 expecting_statement_delimiter = false;
468 }
469
470 match self.peek_token().token {
471 Token::EOF => break,
472
473 Token::Word(word) => {
475 if expecting_statement_delimiter && word.keyword == Keyword::END {
476 break;
477 }
478 }
479 _ => {}
480 }
481
482 if expecting_statement_delimiter {
483 return self.expected("end of statement", self.peek_token());
484 }
485
486 let statement = self.parse_statement()?;
487 stmts.push(statement);
488 expecting_statement_delimiter = true;
489 }
490 Ok(stmts)
491 }
492
493 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
509 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
510 }
511
512 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
515 let _guard = self.recursion_counter.try_decrease()?;
516
517 if let Some(statement) = self.dialect.parse_statement(self) {
519 return statement;
520 }
521
522 let next_token = self.next_token();
523 match &next_token.token {
524 Token::Word(w) => match w.keyword {
525 Keyword::KILL => self.parse_kill(),
526 Keyword::FLUSH => self.parse_flush(),
527 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
528 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
529 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
530 Keyword::ANALYZE => self.parse_analyze(),
531 Keyword::CASE => {
532 self.prev_token();
533 self.parse_case_stmt()
534 }
535 Keyword::IF => {
536 self.prev_token();
537 self.parse_if_stmt()
538 }
539 Keyword::WHILE => {
540 self.prev_token();
541 self.parse_while()
542 }
543 Keyword::RAISE => {
544 self.prev_token();
545 self.parse_raise_stmt()
546 }
547 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
548 self.prev_token();
549 self.parse_query().map(Statement::Query)
550 }
551 Keyword::TRUNCATE => self.parse_truncate(),
552 Keyword::ATTACH => {
553 if dialect_of!(self is DuckDbDialect) {
554 self.parse_attach_duckdb_database()
555 } else {
556 self.parse_attach_database()
557 }
558 }
559 Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
560 self.parse_detach_duckdb_database()
561 }
562 Keyword::MSCK => self.parse_msck(),
563 Keyword::CREATE => self.parse_create(),
564 Keyword::CACHE => self.parse_cache_table(),
565 Keyword::DROP => self.parse_drop(),
566 Keyword::DISCARD => self.parse_discard(),
567 Keyword::DECLARE => self.parse_declare(),
568 Keyword::FETCH => self.parse_fetch_statement(),
569 Keyword::DELETE => self.parse_delete(),
570 Keyword::INSERT => self.parse_insert(),
571 Keyword::REPLACE => self.parse_replace(),
572 Keyword::UNCACHE => self.parse_uncache_table(),
573 Keyword::UPDATE => self.parse_update(),
574 Keyword::ALTER => self.parse_alter(),
575 Keyword::CALL => self.parse_call(),
576 Keyword::COPY => self.parse_copy(),
577 Keyword::OPEN => {
578 self.prev_token();
579 self.parse_open()
580 }
581 Keyword::CLOSE => self.parse_close(),
582 Keyword::SET => self.parse_set(),
583 Keyword::SHOW => self.parse_show(),
584 Keyword::USE => self.parse_use(),
585 Keyword::GRANT => self.parse_grant(),
586 Keyword::REVOKE => self.parse_revoke(),
587 Keyword::START => self.parse_start_transaction(),
588 Keyword::BEGIN => self.parse_begin(),
589 Keyword::END => self.parse_end(),
590 Keyword::SAVEPOINT => self.parse_savepoint(),
591 Keyword::RELEASE => self.parse_release(),
592 Keyword::COMMIT => self.parse_commit(),
593 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
594 Keyword::ROLLBACK => self.parse_rollback(),
595 Keyword::ASSERT => self.parse_assert(),
596 Keyword::DEALLOCATE => self.parse_deallocate(),
599 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
600 Keyword::PREPARE => self.parse_prepare(),
601 Keyword::MERGE => self.parse_merge(),
602 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
605 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
606 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
607 Keyword::PRAGMA => self.parse_pragma(),
609 Keyword::UNLOAD => self.parse_unload(),
610 Keyword::RENAME => self.parse_rename(),
611 Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
613 self.parse_install()
614 }
615 Keyword::LOAD => self.parse_load(),
616 Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
618 self.parse_optimize_table()
619 }
620 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
622 Keyword::PRINT => self.parse_print(),
623 Keyword::RETURN => self.parse_return(),
624 _ => self.expected("an SQL statement", next_token),
625 },
626 Token::LParen => {
627 self.prev_token();
628 self.parse_query().map(Statement::Query)
629 }
630 _ => self.expected("an SQL statement", next_token),
631 }
632 }
633
634 pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
638 let case_token = self.expect_keyword(Keyword::CASE)?;
639
640 let match_expr = if self.peek_keyword(Keyword::WHEN) {
641 None
642 } else {
643 Some(self.parse_expr()?)
644 };
645
646 self.expect_keyword_is(Keyword::WHEN)?;
647 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
648 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
649 })?;
650
651 let else_block = if self.parse_keyword(Keyword::ELSE) {
652 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
653 } else {
654 None
655 };
656
657 let mut end_case_token = self.expect_keyword(Keyword::END)?;
658 if self.peek_keyword(Keyword::CASE) {
659 end_case_token = self.expect_keyword(Keyword::CASE)?;
660 }
661
662 Ok(Statement::Case(CaseStatement {
663 case_token: AttachedToken(case_token),
664 match_expr,
665 when_blocks,
666 else_block,
667 end_case_token: AttachedToken(end_case_token),
668 }))
669 }
670
671 pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
675 self.expect_keyword_is(Keyword::IF)?;
676 let if_block = self.parse_conditional_statement_block(&[
677 Keyword::ELSE,
678 Keyword::ELSEIF,
679 Keyword::END,
680 ])?;
681
682 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
683 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
684 parser.parse_conditional_statement_block(&[
685 Keyword::ELSEIF,
686 Keyword::ELSE,
687 Keyword::END,
688 ])
689 })?
690 } else {
691 vec![]
692 };
693
694 let else_block = if self.parse_keyword(Keyword::ELSE) {
695 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
696 } else {
697 None
698 };
699
700 self.expect_keyword_is(Keyword::END)?;
701 let end_token = self.expect_keyword(Keyword::IF)?;
702
703 Ok(Statement::If(IfStatement {
704 if_block,
705 elseif_blocks,
706 else_block,
707 end_token: Some(AttachedToken(end_token)),
708 }))
709 }
710
711 fn parse_while(&mut self) -> Result<Statement, ParserError> {
715 self.expect_keyword_is(Keyword::WHILE)?;
716 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
717
718 Ok(Statement::While(WhileStatement { while_block }))
719 }
720
721 fn parse_conditional_statement_block(
729 &mut self,
730 terminal_keywords: &[Keyword],
731 ) -> Result<ConditionalStatementBlock, ParserError> {
732 let start_token = self.get_current_token().clone(); let mut then_token = None;
734
735 let condition = match &start_token.token {
736 Token::Word(w) if w.keyword == Keyword::ELSE => None,
737 Token::Word(w) if w.keyword == Keyword::WHILE => {
738 let expr = self.parse_expr()?;
739 Some(expr)
740 }
741 _ => {
742 let expr = self.parse_expr()?;
743 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
744 Some(expr)
745 }
746 };
747
748 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
749
750 Ok(ConditionalStatementBlock {
751 start_token: AttachedToken(start_token),
752 condition,
753 then_token,
754 conditional_statements,
755 })
756 }
757
758 pub(crate) fn parse_conditional_statements(
761 &mut self,
762 terminal_keywords: &[Keyword],
763 ) -> Result<ConditionalStatements, ParserError> {
764 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
765 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
766 let statements = self.parse_statement_list(terminal_keywords)?;
767 let end_token = self.expect_keyword(Keyword::END)?;
768
769 ConditionalStatements::BeginEnd(BeginEndStatements {
770 begin_token: AttachedToken(begin_token),
771 statements,
772 end_token: AttachedToken(end_token),
773 })
774 } else {
775 ConditionalStatements::Sequence {
776 statements: self.parse_statement_list(terminal_keywords)?,
777 }
778 };
779 Ok(conditional_statements)
780 }
781
782 pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
786 self.expect_keyword_is(Keyword::RAISE)?;
787
788 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
789 self.expect_token(&Token::Eq)?;
790 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
791 } else {
792 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
793 };
794
795 Ok(Statement::Raise(RaiseStatement { value }))
796 }
797
798 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
799 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
800
801 self.expect_keyword_is(Keyword::ON)?;
802 let token = self.next_token();
803
804 let (object_type, object_name) = match token.token {
805 Token::Word(w) if w.keyword == Keyword::COLUMN => {
806 (CommentObject::Column, self.parse_object_name(false)?)
807 }
808 Token::Word(w) if w.keyword == Keyword::TABLE => {
809 (CommentObject::Table, self.parse_object_name(false)?)
810 }
811 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
812 (CommentObject::Extension, self.parse_object_name(false)?)
813 }
814 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
815 (CommentObject::Schema, self.parse_object_name(false)?)
816 }
817 Token::Word(w) if w.keyword == Keyword::DATABASE => {
818 (CommentObject::Database, self.parse_object_name(false)?)
819 }
820 Token::Word(w) if w.keyword == Keyword::USER => {
821 (CommentObject::User, self.parse_object_name(false)?)
822 }
823 Token::Word(w) if w.keyword == Keyword::ROLE => {
824 (CommentObject::Role, self.parse_object_name(false)?)
825 }
826 _ => self.expected("comment object_type", token)?,
827 };
828
829 self.expect_keyword_is(Keyword::IS)?;
830 let comment = if self.parse_keyword(Keyword::NULL) {
831 None
832 } else {
833 Some(self.parse_literal_string()?)
834 };
835 Ok(Statement::Comment {
836 object_type,
837 object_name,
838 comment,
839 if_exists,
840 })
841 }
842
843 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
844 let mut channel = None;
845 let mut tables: Vec<ObjectName> = vec![];
846 let mut read_lock = false;
847 let mut export = false;
848
849 if !dialect_of!(self is MySqlDialect | GenericDialect) {
850 return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
851 }
852
853 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
854 Some(FlushLocation::NoWriteToBinlog)
855 } else if self.parse_keyword(Keyword::LOCAL) {
856 Some(FlushLocation::Local)
857 } else {
858 None
859 };
860
861 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
862 FlushType::BinaryLogs
863 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
864 FlushType::EngineLogs
865 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
866 FlushType::ErrorLogs
867 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
868 FlushType::GeneralLogs
869 } else if self.parse_keywords(&[Keyword::HOSTS]) {
870 FlushType::Hosts
871 } else if self.parse_keyword(Keyword::PRIVILEGES) {
872 FlushType::Privileges
873 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
874 FlushType::OptimizerCosts
875 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
876 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
877 channel = Some(self.parse_object_name(false).unwrap().to_string());
878 }
879 FlushType::RelayLogs
880 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
881 FlushType::SlowLogs
882 } else if self.parse_keyword(Keyword::STATUS) {
883 FlushType::Status
884 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
885 FlushType::UserResources
886 } else if self.parse_keywords(&[Keyword::LOGS]) {
887 FlushType::Logs
888 } else if self.parse_keywords(&[Keyword::TABLES]) {
889 loop {
890 let next_token = self.next_token();
891 match &next_token.token {
892 Token::Word(w) => match w.keyword {
893 Keyword::WITH => {
894 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
895 }
896 Keyword::FOR => {
897 export = self.parse_keyword(Keyword::EXPORT);
898 }
899 Keyword::NoKeyword => {
900 self.prev_token();
901 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
902 }
903 _ => {}
904 },
905 _ => {
906 break;
907 }
908 }
909 }
910
911 FlushType::Tables
912 } else {
913 return self.expected(
914 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
915 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
916 self.peek_token(),
917 );
918 };
919
920 Ok(Statement::Flush {
921 object_type,
922 location,
923 channel,
924 read_lock,
925 export,
926 tables,
927 })
928 }
929
930 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
931 let repair = self.parse_keyword(Keyword::REPAIR);
932 self.expect_keyword_is(Keyword::TABLE)?;
933 let table_name = self.parse_object_name(false)?;
934 let partition_action = self
935 .maybe_parse(|parser| {
936 let pa = match parser.parse_one_of_keywords(&[
937 Keyword::ADD,
938 Keyword::DROP,
939 Keyword::SYNC,
940 ]) {
941 Some(Keyword::ADD) => Some(AddDropSync::ADD),
942 Some(Keyword::DROP) => Some(AddDropSync::DROP),
943 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
944 _ => None,
945 };
946 parser.expect_keyword_is(Keyword::PARTITIONS)?;
947 Ok(pa)
948 })?
949 .unwrap_or_default();
950 Ok(Statement::Msck {
951 repair,
952 table_name,
953 partition_action,
954 })
955 }
956
957 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
958 let table = self.parse_keyword(Keyword::TABLE);
959 let only = self.parse_keyword(Keyword::ONLY);
960
961 let table_names = self
962 .parse_comma_separated(|p| p.parse_object_name(false))?
963 .into_iter()
964 .map(|n| TruncateTableTarget { name: n })
965 .collect();
966
967 let mut partitions = None;
968 if self.parse_keyword(Keyword::PARTITION) {
969 self.expect_token(&Token::LParen)?;
970 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
971 self.expect_token(&Token::RParen)?;
972 }
973
974 let mut identity = None;
975 let mut cascade = None;
976
977 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
978 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
979 Some(TruncateIdentityOption::Restart)
980 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
981 Some(TruncateIdentityOption::Continue)
982 } else {
983 None
984 };
985
986 cascade = self.parse_cascade_option();
987 };
988
989 let on_cluster = self.parse_optional_on_cluster()?;
990
991 Ok(Statement::Truncate {
992 table_names,
993 partitions,
994 table,
995 only,
996 identity,
997 cascade,
998 on_cluster,
999 })
1000 }
1001
1002 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1003 if self.parse_keyword(Keyword::CASCADE) {
1004 Some(CascadeOption::Cascade)
1005 } else if self.parse_keyword(Keyword::RESTRICT) {
1006 Some(CascadeOption::Restrict)
1007 } else {
1008 None
1009 }
1010 }
1011
1012 pub fn parse_attach_duckdb_database_options(
1013 &mut self,
1014 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1015 if !self.consume_token(&Token::LParen) {
1016 return Ok(vec![]);
1017 }
1018
1019 let mut options = vec![];
1020 loop {
1021 if self.parse_keyword(Keyword::READ_ONLY) {
1022 let boolean = if self.parse_keyword(Keyword::TRUE) {
1023 Some(true)
1024 } else if self.parse_keyword(Keyword::FALSE) {
1025 Some(false)
1026 } else {
1027 None
1028 };
1029 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1030 } else if self.parse_keyword(Keyword::TYPE) {
1031 let ident = self.parse_identifier()?;
1032 options.push(AttachDuckDBDatabaseOption::Type(ident));
1033 } else {
1034 return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1035 };
1036
1037 if self.consume_token(&Token::RParen) {
1038 return Ok(options);
1039 } else if self.consume_token(&Token::Comma) {
1040 continue;
1041 } else {
1042 return self.expected("expected one of: ')', ','", self.peek_token());
1043 }
1044 }
1045 }
1046
1047 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1048 let database = self.parse_keyword(Keyword::DATABASE);
1049 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1050 let database_path = self.parse_identifier()?;
1051 let database_alias = if self.parse_keyword(Keyword::AS) {
1052 Some(self.parse_identifier()?)
1053 } else {
1054 None
1055 };
1056
1057 let attach_options = self.parse_attach_duckdb_database_options()?;
1058 Ok(Statement::AttachDuckDBDatabase {
1059 if_not_exists,
1060 database,
1061 database_path,
1062 database_alias,
1063 attach_options,
1064 })
1065 }
1066
1067 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1068 let database = self.parse_keyword(Keyword::DATABASE);
1069 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1070 let database_alias = self.parse_identifier()?;
1071 Ok(Statement::DetachDuckDBDatabase {
1072 if_exists,
1073 database,
1074 database_alias,
1075 })
1076 }
1077
1078 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1079 let database = self.parse_keyword(Keyword::DATABASE);
1080 let database_file_name = self.parse_expr()?;
1081 self.expect_keyword_is(Keyword::AS)?;
1082 let schema_name = self.parse_identifier()?;
1083 Ok(Statement::AttachDatabase {
1084 database,
1085 schema_name,
1086 database_file_name,
1087 })
1088 }
1089
1090 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1091 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1092 let table_name = self.parse_object_name(false)?;
1093 let mut for_columns = false;
1094 let mut cache_metadata = false;
1095 let mut noscan = false;
1096 let mut partitions = None;
1097 let mut compute_statistics = false;
1098 let mut columns = vec![];
1099 loop {
1100 match self.parse_one_of_keywords(&[
1101 Keyword::PARTITION,
1102 Keyword::FOR,
1103 Keyword::CACHE,
1104 Keyword::NOSCAN,
1105 Keyword::COMPUTE,
1106 ]) {
1107 Some(Keyword::PARTITION) => {
1108 self.expect_token(&Token::LParen)?;
1109 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1110 self.expect_token(&Token::RParen)?;
1111 }
1112 Some(Keyword::NOSCAN) => noscan = true,
1113 Some(Keyword::FOR) => {
1114 self.expect_keyword_is(Keyword::COLUMNS)?;
1115
1116 columns = self
1117 .maybe_parse(|parser| {
1118 parser.parse_comma_separated(|p| p.parse_identifier())
1119 })?
1120 .unwrap_or_default();
1121 for_columns = true
1122 }
1123 Some(Keyword::CACHE) => {
1124 self.expect_keyword_is(Keyword::METADATA)?;
1125 cache_metadata = true
1126 }
1127 Some(Keyword::COMPUTE) => {
1128 self.expect_keyword_is(Keyword::STATISTICS)?;
1129 compute_statistics = true
1130 }
1131 _ => break,
1132 }
1133 }
1134
1135 Ok(Statement::Analyze {
1136 has_table_keyword,
1137 table_name,
1138 for_columns,
1139 columns,
1140 partitions,
1141 cache_metadata,
1142 noscan,
1143 compute_statistics,
1144 })
1145 }
1146
1147 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1149 let index = self.index;
1150
1151 let next_token = self.next_token();
1152 match next_token.token {
1153 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1154 if self.peek_token().token == Token::Period {
1155 let mut id_parts: Vec<Ident> = vec![match t {
1156 Token::Word(w) => w.into_ident(next_token.span),
1157 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1158 _ => unreachable!(), }];
1160
1161 while self.consume_token(&Token::Period) {
1162 let next_token = self.next_token();
1163 match next_token.token {
1164 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1165 Token::SingleQuotedString(s) => {
1166 id_parts.push(Ident::with_quote('\'', s))
1168 }
1169 Token::Mul => {
1170 return Ok(Expr::QualifiedWildcard(
1171 ObjectName::from(id_parts),
1172 AttachedToken(next_token),
1173 ));
1174 }
1175 _ => {
1176 return self
1177 .expected("an identifier or a '*' after '.'", next_token);
1178 }
1179 }
1180 }
1181 }
1182 }
1183 Token::Mul => {
1184 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1185 }
1186 _ => (),
1187 };
1188
1189 self.index = index;
1190 self.parse_expr()
1191 }
1192
1193 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1195 self.parse_subexpr(self.dialect.prec_unknown())
1196 }
1197
1198 pub fn parse_expr_with_alias_and_order_by(
1199 &mut self,
1200 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1201 let expr = self.parse_expr()?;
1202
1203 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1204 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1205 }
1206 let alias = self.parse_optional_alias_inner(None, validator)?;
1207 let order_by = OrderByOptions {
1208 asc: self.parse_asc_desc(),
1209 nulls_first: None,
1210 };
1211 Ok(ExprWithAliasAndOrderBy {
1212 expr: ExprWithAlias { expr, alias },
1213 order_by,
1214 })
1215 }
1216
1217 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1219 let _guard = self.recursion_counter.try_decrease()?;
1220 debug!("parsing expr");
1221 let mut expr = self.parse_prefix()?;
1222
1223 expr = self.parse_compound_expr(expr, vec![])?;
1224
1225 debug!("prefix: {:?}", expr);
1226 loop {
1227 let next_precedence = self.get_next_precedence()?;
1228 debug!("next precedence: {:?}", next_precedence);
1229
1230 if precedence >= next_precedence {
1231 break;
1232 }
1233
1234 if Token::Period == self.peek_token_ref().token {
1237 break;
1238 }
1239
1240 expr = self.parse_infix(expr, next_precedence)?;
1241 }
1242 Ok(expr)
1243 }
1244
1245 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1246 let condition = self.parse_expr()?;
1247 let message = if self.parse_keyword(Keyword::AS) {
1248 Some(self.parse_expr()?)
1249 } else {
1250 None
1251 };
1252
1253 Ok(Statement::Assert { condition, message })
1254 }
1255
1256 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1257 let name = self.parse_identifier()?;
1258 Ok(Statement::Savepoint { name })
1259 }
1260
1261 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1262 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1263 let name = self.parse_identifier()?;
1264
1265 Ok(Statement::ReleaseSavepoint { name })
1266 }
1267
1268 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1269 let channel = self.parse_identifier()?;
1270 Ok(Statement::LISTEN { channel })
1271 }
1272
1273 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1274 let channel = if self.consume_token(&Token::Mul) {
1275 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1276 } else {
1277 match self.parse_identifier() {
1278 Ok(expr) => expr,
1279 _ => {
1280 self.prev_token();
1281 return self.expected("wildcard or identifier", self.peek_token());
1282 }
1283 }
1284 };
1285 Ok(Statement::UNLISTEN { channel })
1286 }
1287
1288 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1289 let channel = self.parse_identifier()?;
1290 let payload = if self.consume_token(&Token::Comma) {
1291 Some(self.parse_literal_string()?)
1292 } else {
1293 None
1294 };
1295 Ok(Statement::NOTIFY { channel, payload })
1296 }
1297
1298 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1300 if self.peek_keyword(Keyword::TABLE) {
1301 self.expect_keyword(Keyword::TABLE)?;
1302 let rename_tables = self.parse_comma_separated(|parser| {
1303 let old_name = parser.parse_object_name(false)?;
1304 parser.expect_keyword(Keyword::TO)?;
1305 let new_name = parser.parse_object_name(false)?;
1306
1307 Ok(RenameTable { old_name, new_name })
1308 })?;
1309 Ok(Statement::RenameTable(rename_tables))
1310 } else {
1311 self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1312 }
1313 }
1314
1315 fn parse_expr_prefix_by_reserved_word(
1318 &mut self,
1319 w: &Word,
1320 w_span: Span,
1321 ) -> Result<Option<Expr>, ParserError> {
1322 match w.keyword {
1323 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1324 self.prev_token();
1325 Ok(Some(Expr::Value(self.parse_value()?)))
1326 }
1327 Keyword::NULL => {
1328 self.prev_token();
1329 Ok(Some(Expr::Value(self.parse_value()?)))
1330 }
1331 Keyword::CURRENT_CATALOG
1332 | Keyword::CURRENT_USER
1333 | Keyword::SESSION_USER
1334 | Keyword::USER
1335 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1336 {
1337 Ok(Some(Expr::Function(Function {
1338 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1339 uses_odbc_syntax: false,
1340 parameters: FunctionArguments::None,
1341 args: FunctionArguments::None,
1342 null_treatment: None,
1343 filter: None,
1344 over: None,
1345 within_group: vec![],
1346 })))
1347 }
1348 Keyword::CURRENT_TIMESTAMP
1349 | Keyword::CURRENT_TIME
1350 | Keyword::CURRENT_DATE
1351 | Keyword::LOCALTIME
1352 | Keyword::LOCALTIMESTAMP => {
1353 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1354 }
1355 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1356 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1357 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1358 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1359 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1360 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1361 Keyword::EXISTS
1362 if !dialect_of!(self is DatabricksDialect)
1364 || matches!(
1365 self.peek_nth_token_ref(1).token,
1366 Token::Word(Word {
1367 keyword: Keyword::SELECT | Keyword::WITH,
1368 ..
1369 })
1370 ) =>
1371 {
1372 Ok(Some(self.parse_exists_expr(false)?))
1373 }
1374 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1375 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1376 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1377 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1378 Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1379 }
1380 Keyword::SUBSTR | Keyword::SUBSTRING => {
1381 self.prev_token();
1382 Ok(Some(self.parse_substring()?))
1383 }
1384 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1385 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1386 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1387 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1389 self.expect_token(&Token::LBracket)?;
1390 Ok(Some(self.parse_array_expr(true)?))
1391 }
1392 Keyword::ARRAY
1393 if self.peek_token() == Token::LParen
1394 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1395 {
1396 self.expect_token(&Token::LParen)?;
1397 let query = self.parse_query()?;
1398 self.expect_token(&Token::RParen)?;
1399 Ok(Some(Expr::Function(Function {
1400 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1401 uses_odbc_syntax: false,
1402 parameters: FunctionArguments::None,
1403 args: FunctionArguments::Subquery(query),
1404 filter: None,
1405 null_treatment: None,
1406 over: None,
1407 within_group: vec![],
1408 })))
1409 }
1410 Keyword::NOT => Ok(Some(self.parse_not()?)),
1411 Keyword::MATCH if self.dialect.supports_match_against() => {
1412 Ok(Some(self.parse_match_against()?))
1413 }
1414 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1415 let struct_expr = self.parse_struct_literal()?;
1416 Ok(Some(struct_expr))
1417 }
1418 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1419 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1420 Ok(Some(Expr::Prior(Box::new(expr))))
1421 }
1422 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1423 Ok(Some(self.parse_duckdb_map_literal()?))
1424 }
1425 _ if self.dialect.supports_geometric_types() => match w.keyword {
1426 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1427 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1428 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1429 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1430 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1431 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1432 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1433 _ => Ok(None),
1434 },
1435 _ => Ok(None),
1436 }
1437 }
1438
1439 fn parse_expr_prefix_by_unreserved_word(
1441 &mut self,
1442 w: &Word,
1443 w_span: Span,
1444 ) -> Result<Expr, ParserError> {
1445 match self.peek_token().token {
1446 Token::LParen if !self.peek_outer_join_operator() => {
1447 let id_parts = vec![w.clone().into_ident(w_span)];
1448 self.parse_function(ObjectName::from(id_parts))
1449 }
1450 Token::SingleQuotedString(_)
1452 | Token::DoubleQuotedString(_)
1453 | Token::HexStringLiteral(_)
1454 if w.value.starts_with('_') =>
1455 {
1456 Ok(Expr::Prefixed {
1457 prefix: w.clone().into_ident(w_span),
1458 value: self.parse_introduced_string_expr()?.into(),
1459 })
1460 }
1461 Token::SingleQuotedString(_)
1463 | Token::DoubleQuotedString(_)
1464 | Token::HexStringLiteral(_)
1465 if w.value.starts_with('_') =>
1466 {
1467 Ok(Expr::Prefixed {
1468 prefix: w.clone().into_ident(w_span),
1469 value: self.parse_introduced_string_expr()?.into(),
1470 })
1471 }
1472 Token::Arrow if self.dialect.supports_lambda_functions() => {
1473 self.expect_token(&Token::Arrow)?;
1474 Ok(Expr::Lambda(LambdaFunction {
1475 params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1476 body: Box::new(self.parse_expr()?),
1477 }))
1478 }
1479 _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1480 }
1481 }
1482
1483 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1485 if let Some(prefix) = self.dialect.parse_prefix(self) {
1487 return prefix;
1488 }
1489
1490 let loc = self.peek_token_ref().span.start;
1507 let opt_expr = self.maybe_parse(|parser| {
1508 match parser.parse_data_type()? {
1509 DataType::Interval => parser.parse_interval(),
1510 DataType::Custom(..) => parser_err!("dummy", loc),
1518 data_type => Ok(Expr::TypedString {
1519 data_type,
1520 value: parser.parse_value()?.value,
1521 }),
1522 }
1523 })?;
1524
1525 if let Some(expr) = opt_expr {
1526 return Ok(expr);
1527 }
1528
1529 let dialect = self.dialect;
1533
1534 self.advance_token();
1535 let next_token_index = self.get_current_index();
1536 let next_token = self.get_current_token();
1537 let span = next_token.span;
1538 let expr = match &next_token.token {
1539 Token::Word(w) => {
1540 let w = w.clone();
1549 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1550 Ok(Some(expr)) => Ok(expr),
1552
1553 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1555
1556 Err(e) => {
1563 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1564 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1565 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1566 }) {
1567 return Ok(expr);
1568 }
1569 }
1570 return Err(e);
1571 }
1572 }
1573 } Token::LBracket => self.parse_array_expr(false),
1576 tok @ Token::Minus | tok @ Token::Plus => {
1577 let op = if *tok == Token::Plus {
1578 UnaryOperator::Plus
1579 } else {
1580 UnaryOperator::Minus
1581 };
1582 Ok(Expr::UnaryOp {
1583 op,
1584 expr: Box::new(
1585 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1586 ),
1587 })
1588 }
1589 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1590 op: UnaryOperator::BangNot,
1591 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1592 }),
1593 tok @ Token::DoubleExclamationMark
1594 | tok @ Token::PGSquareRoot
1595 | tok @ Token::PGCubeRoot
1596 | tok @ Token::AtSign
1597 | tok @ Token::Tilde
1598 if dialect_is!(dialect is PostgreSqlDialect) =>
1599 {
1600 let op = match tok {
1601 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1602 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1603 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1604 Token::AtSign => UnaryOperator::PGAbs,
1605 Token::Tilde => UnaryOperator::PGBitwiseNot,
1606 _ => unreachable!(),
1607 };
1608 Ok(Expr::UnaryOp {
1609 op,
1610 expr: Box::new(
1611 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1612 ),
1613 })
1614 }
1615 tok @ Token::Sharp
1616 | tok @ Token::AtDashAt
1617 | tok @ Token::AtAt
1618 | tok @ Token::QuestionMarkDash
1619 | tok @ Token::QuestionPipe
1620 if self.dialect.supports_geometric_types() =>
1621 {
1622 let op = match tok {
1623 Token::Sharp => UnaryOperator::Hash,
1624 Token::AtDashAt => UnaryOperator::AtDashAt,
1625 Token::AtAt => UnaryOperator::DoubleAt,
1626 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1627 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1628 _ => {
1629 return Err(ParserError::ParserError(format!(
1630 "Unexpected token in unary operator parsing: {:?}",
1631 tok
1632 )))
1633 }
1634 };
1635 Ok(Expr::UnaryOp {
1636 op,
1637 expr: Box::new(
1638 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1639 ),
1640 })
1641 }
1642 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1643 {
1644 self.prev_token();
1645 Ok(Expr::Value(self.parse_value()?))
1646 }
1647 Token::UnicodeStringLiteral(_) => {
1648 self.prev_token();
1649 Ok(Expr::Value(self.parse_value()?))
1650 }
1651 Token::Number(_, _)
1652 | Token::SingleQuotedString(_)
1653 | Token::DoubleQuotedString(_)
1654 | Token::TripleSingleQuotedString(_)
1655 | Token::TripleDoubleQuotedString(_)
1656 | Token::DollarQuotedString(_)
1657 | Token::SingleQuotedByteStringLiteral(_)
1658 | Token::DoubleQuotedByteStringLiteral(_)
1659 | Token::TripleSingleQuotedByteStringLiteral(_)
1660 | Token::TripleDoubleQuotedByteStringLiteral(_)
1661 | Token::SingleQuotedRawStringLiteral(_)
1662 | Token::DoubleQuotedRawStringLiteral(_)
1663 | Token::TripleSingleQuotedRawStringLiteral(_)
1664 | Token::TripleDoubleQuotedRawStringLiteral(_)
1665 | Token::NationalStringLiteral(_)
1666 | Token::HexStringLiteral(_) => {
1667 self.prev_token();
1668 Ok(Expr::Value(self.parse_value()?))
1669 }
1670 Token::LParen => {
1671 let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1672 expr
1673 } else if let Some(lambda) = self.try_parse_lambda()? {
1674 return Ok(lambda);
1675 } else {
1676 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1677 match exprs.len() {
1678 0 => unreachable!(), 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1680 _ => Expr::Tuple(exprs),
1681 }
1682 };
1683 self.expect_token(&Token::RParen)?;
1684 Ok(expr)
1685 }
1686 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1687 self.prev_token();
1688 Ok(Expr::Value(self.parse_value()?))
1689 }
1690 Token::LBrace => {
1691 self.prev_token();
1692 self.parse_lbrace_expr()
1693 }
1694 _ => self.expected_at("an expression", next_token_index),
1695 }?;
1696
1697 if self.parse_keyword(Keyword::COLLATE) {
1698 Ok(Expr::Collate {
1699 expr: Box::new(expr),
1700 collation: self.parse_object_name(false)?,
1701 })
1702 } else {
1703 Ok(expr)
1704 }
1705 }
1706
1707 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1708 let value: Value = self.parse_value()?.value;
1709 Ok(Expr::TypedString {
1710 data_type: DataType::GeometricType(kind),
1711 value,
1712 })
1713 }
1714
1715 pub fn parse_compound_expr(
1722 &mut self,
1723 root: Expr,
1724 mut chain: Vec<AccessExpr>,
1725 ) -> Result<Expr, ParserError> {
1726 let mut ending_wildcard: Option<TokenWithSpan> = None;
1727 loop {
1728 if self.consume_token(&Token::Period) {
1729 let next_token = self.peek_token_ref();
1730 match &next_token.token {
1731 Token::Mul => {
1732 if dialect_of!(self is PostgreSqlDialect) {
1735 ending_wildcard = Some(self.next_token());
1736 } else {
1737 self.prev_token(); }
1744
1745 break;
1746 }
1747 Token::SingleQuotedString(s) => {
1748 let expr =
1749 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1750 chain.push(AccessExpr::Dot(expr));
1751 self.advance_token(); }
1753 _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1755 Expr::CompoundFieldAccess { root, access_chain } => {
1764 chain.push(AccessExpr::Dot(*root));
1765 chain.extend(access_chain);
1766 }
1767 Expr::CompoundIdentifier(parts) => chain
1768 .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1769 expr => {
1770 chain.push(AccessExpr::Dot(expr));
1771 }
1772 },
1773 }
1774 } else if !self.dialect.supports_partiql()
1775 && self.peek_token_ref().token == Token::LBracket
1776 {
1777 self.parse_multi_dim_subscript(&mut chain)?;
1778 } else {
1779 break;
1780 }
1781 }
1782
1783 let tok_index = self.get_current_index();
1784 if let Some(wildcard_token) = ending_wildcard {
1785 if !Self::is_all_ident(&root, &chain) {
1786 return self.expected("an identifier or a '*' after '.'", self.peek_token());
1787 };
1788 Ok(Expr::QualifiedWildcard(
1789 ObjectName::from(Self::exprs_to_idents(root, chain)?),
1790 AttachedToken(wildcard_token),
1791 ))
1792 } else if self.maybe_parse_outer_join_operator() {
1793 if !Self::is_all_ident(&root, &chain) {
1794 return self.expected_at("column identifier before (+)", tok_index);
1795 };
1796 let expr = if chain.is_empty() {
1797 root
1798 } else {
1799 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1800 };
1801 Ok(Expr::OuterJoin(expr.into()))
1802 } else {
1803 Self::build_compound_expr(root, chain)
1804 }
1805 }
1806
1807 fn build_compound_expr(
1812 root: Expr,
1813 mut access_chain: Vec<AccessExpr>,
1814 ) -> Result<Expr, ParserError> {
1815 if access_chain.is_empty() {
1816 return Ok(root);
1817 }
1818
1819 if Self::is_all_ident(&root, &access_chain) {
1820 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1821 root,
1822 access_chain,
1823 )?));
1824 }
1825
1826 if matches!(root, Expr::Identifier(_))
1831 && matches!(
1832 access_chain.last(),
1833 Some(AccessExpr::Dot(Expr::Function(_)))
1834 )
1835 && access_chain
1836 .iter()
1837 .rev()
1838 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1840 {
1841 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1842 return parser_err!("expected function expression", root.span().start);
1843 };
1844
1845 let compound_func_name = [root]
1846 .into_iter()
1847 .chain(access_chain.into_iter().flat_map(|access| match access {
1848 AccessExpr::Dot(expr) => Some(expr),
1849 _ => None,
1850 }))
1851 .flat_map(|expr| match expr {
1852 Expr::Identifier(ident) => Some(ident),
1853 _ => None,
1854 })
1855 .map(ObjectNamePart::Identifier)
1856 .chain(func.name.0)
1857 .collect::<Vec<_>>();
1858 func.name = ObjectName(compound_func_name);
1859
1860 return Ok(Expr::Function(func));
1861 }
1862
1863 if access_chain.len() == 1
1868 && matches!(
1869 access_chain.last(),
1870 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1871 )
1872 {
1873 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1874 return parser_err!("expected (+) expression", root.span().start);
1875 };
1876
1877 if !Self::is_all_ident(&root, &[]) {
1878 return parser_err!("column identifier before (+)", root.span().start);
1879 };
1880
1881 let token_start = root.span().start;
1882 let mut idents = Self::exprs_to_idents(root, vec![])?;
1883 match *inner_expr {
1884 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1885 Expr::Identifier(suffix) => idents.push(suffix),
1886 _ => {
1887 return parser_err!("column identifier before (+)", token_start);
1888 }
1889 }
1890
1891 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1892 }
1893
1894 Ok(Expr::CompoundFieldAccess {
1895 root: Box::new(root),
1896 access_chain,
1897 })
1898 }
1899
1900 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1901 match k {
1902 Keyword::LOCAL => Some(ContextModifier::Local),
1903 Keyword::GLOBAL => Some(ContextModifier::Global),
1904 Keyword::SESSION => Some(ContextModifier::Session),
1905 _ => None,
1906 }
1907 }
1908
1909 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1911 if !matches!(root, Expr::Identifier(_)) {
1912 return false;
1913 }
1914 fields
1915 .iter()
1916 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1917 }
1918
1919 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1921 let mut idents = vec![];
1922 if let Expr::Identifier(root) = root {
1923 idents.push(root);
1924 for x in fields {
1925 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1926 idents.push(ident);
1927 } else {
1928 return parser_err!(
1929 format!("Expected identifier, found: {}", x),
1930 x.span().start
1931 );
1932 }
1933 }
1934 Ok(idents)
1935 } else {
1936 parser_err!(
1937 format!("Expected identifier, found: {}", root),
1938 root.span().start
1939 )
1940 }
1941 }
1942
1943 fn peek_outer_join_operator(&mut self) -> bool {
1945 if !self.dialect.supports_outer_join_operator() {
1946 return false;
1947 }
1948
1949 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
1950 Token::LParen == maybe_lparen.token
1951 && Token::Plus == maybe_plus.token
1952 && Token::RParen == maybe_rparen.token
1953 }
1954
1955 fn maybe_parse_outer_join_operator(&mut self) -> bool {
1958 self.dialect.supports_outer_join_operator()
1959 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
1960 }
1961
1962 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1963 self.expect_token(&Token::LParen)?;
1964 let options = self.parse_comma_separated(Self::parse_utility_option)?;
1965 self.expect_token(&Token::RParen)?;
1966
1967 Ok(options)
1968 }
1969
1970 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
1971 let name = self.parse_identifier()?;
1972
1973 let next_token = self.peek_token();
1974 if next_token == Token::Comma || next_token == Token::RParen {
1975 return Ok(UtilityOption { name, arg: None });
1976 }
1977 let arg = self.parse_expr()?;
1978
1979 Ok(UtilityOption {
1980 name,
1981 arg: Some(arg),
1982 })
1983 }
1984
1985 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
1986 if !self.peek_sub_query() {
1987 return Ok(None);
1988 }
1989
1990 Ok(Some(Expr::Subquery(self.parse_query()?)))
1991 }
1992
1993 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
1994 if !self.dialect.supports_lambda_functions() {
1995 return Ok(None);
1996 }
1997 self.maybe_parse(|p| {
1998 let params = p.parse_comma_separated(|p| p.parse_identifier())?;
1999 p.expect_token(&Token::RParen)?;
2000 p.expect_token(&Token::Arrow)?;
2001 let expr = p.parse_expr()?;
2002 Ok(Expr::Lambda(LambdaFunction {
2003 params: OneOrManyWithParens::Many(params),
2004 body: Box::new(expr),
2005 }))
2006 })
2007 }
2008
2009 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2018 self.maybe_parse(|p| {
2019 p.expect_keyword(Keyword::FN)?;
2020 let fn_name = p.parse_object_name(false)?;
2021 let mut fn_call = p.parse_function_call(fn_name)?;
2022 fn_call.uses_odbc_syntax = true;
2023 Ok(Expr::Function(fn_call))
2024 })
2025 }
2026
2027 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2028 self.parse_function_call(name).map(Expr::Function)
2029 }
2030
2031 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2032 self.expect_token(&Token::LParen)?;
2033
2034 if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2037 let subquery = self.parse_query()?;
2038 self.expect_token(&Token::RParen)?;
2039 return Ok(Function {
2040 name,
2041 uses_odbc_syntax: false,
2042 parameters: FunctionArguments::None,
2043 args: FunctionArguments::Subquery(subquery),
2044 filter: None,
2045 null_treatment: None,
2046 over: None,
2047 within_group: vec![],
2048 });
2049 }
2050
2051 let mut args = self.parse_function_argument_list()?;
2052 let mut parameters = FunctionArguments::None;
2053 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2056 && self.consume_token(&Token::LParen)
2057 {
2058 parameters = FunctionArguments::List(args);
2059 args = self.parse_function_argument_list()?;
2060 }
2061
2062 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2063 self.expect_token(&Token::LParen)?;
2064 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2065 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2066 self.expect_token(&Token::RParen)?;
2067 order_by
2068 } else {
2069 vec![]
2070 };
2071
2072 let filter = if self.dialect.supports_filter_during_aggregation()
2073 && self.parse_keyword(Keyword::FILTER)
2074 && self.consume_token(&Token::LParen)
2075 && self.parse_keyword(Keyword::WHERE)
2076 {
2077 let filter = Some(Box::new(self.parse_expr()?));
2078 self.expect_token(&Token::RParen)?;
2079 filter
2080 } else {
2081 None
2082 };
2083
2084 let null_treatment = if args
2087 .clauses
2088 .iter()
2089 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2090 {
2091 self.parse_null_treatment()?
2092 } else {
2093 None
2094 };
2095
2096 let over = if self.parse_keyword(Keyword::OVER) {
2097 if self.consume_token(&Token::LParen) {
2098 let window_spec = self.parse_window_spec()?;
2099 Some(WindowType::WindowSpec(window_spec))
2100 } else {
2101 Some(WindowType::NamedWindow(self.parse_identifier()?))
2102 }
2103 } else {
2104 None
2105 };
2106
2107 Ok(Function {
2108 name,
2109 uses_odbc_syntax: false,
2110 parameters,
2111 args: FunctionArguments::List(args),
2112 null_treatment,
2113 filter,
2114 over,
2115 within_group,
2116 })
2117 }
2118
2119 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2121 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2122 Some(keyword) => {
2123 self.expect_keyword_is(Keyword::NULLS)?;
2124
2125 Ok(match keyword {
2126 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2127 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2128 _ => None,
2129 })
2130 }
2131 None => Ok(None),
2132 }
2133 }
2134
2135 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2136 let args = if self.consume_token(&Token::LParen) {
2137 FunctionArguments::List(self.parse_function_argument_list()?)
2138 } else {
2139 FunctionArguments::None
2140 };
2141 Ok(Expr::Function(Function {
2142 name,
2143 uses_odbc_syntax: false,
2144 parameters: FunctionArguments::None,
2145 args,
2146 filter: None,
2147 over: None,
2148 null_treatment: None,
2149 within_group: vec![],
2150 }))
2151 }
2152
2153 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2154 let next_token = self.next_token();
2155 match &next_token.token {
2156 Token::Word(w) => match w.keyword {
2157 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2158 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2159 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2160 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2161 },
2162 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2163 }
2164 }
2165
2166 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2167 let units = self.parse_window_frame_units()?;
2168 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2169 let start_bound = self.parse_window_frame_bound()?;
2170 self.expect_keyword_is(Keyword::AND)?;
2171 let end_bound = Some(self.parse_window_frame_bound()?);
2172 (start_bound, end_bound)
2173 } else {
2174 (self.parse_window_frame_bound()?, None)
2175 };
2176 Ok(WindowFrame {
2177 units,
2178 start_bound,
2179 end_bound,
2180 })
2181 }
2182
2183 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2185 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2186 Ok(WindowFrameBound::CurrentRow)
2187 } else {
2188 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2189 None
2190 } else {
2191 Some(Box::new(match self.peek_token().token {
2192 Token::SingleQuotedString(_) => self.parse_interval()?,
2193 _ => self.parse_expr()?,
2194 }))
2195 };
2196 if self.parse_keyword(Keyword::PRECEDING) {
2197 Ok(WindowFrameBound::Preceding(rows))
2198 } else if self.parse_keyword(Keyword::FOLLOWING) {
2199 Ok(WindowFrameBound::Following(rows))
2200 } else {
2201 self.expected("PRECEDING or FOLLOWING", self.peek_token())
2202 }
2203 }
2204 }
2205
2206 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2208 if self.dialect.supports_group_by_expr() {
2209 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2210 self.expect_token(&Token::LParen)?;
2211 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2212 self.expect_token(&Token::RParen)?;
2213 Ok(Expr::GroupingSets(result))
2214 } else if self.parse_keyword(Keyword::CUBE) {
2215 self.expect_token(&Token::LParen)?;
2216 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2217 self.expect_token(&Token::RParen)?;
2218 Ok(Expr::Cube(result))
2219 } else if self.parse_keyword(Keyword::ROLLUP) {
2220 self.expect_token(&Token::LParen)?;
2221 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2222 self.expect_token(&Token::RParen)?;
2223 Ok(Expr::Rollup(result))
2224 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2225 Ok(Expr::Tuple(vec![]))
2229 } else {
2230 self.parse_expr()
2231 }
2232 } else {
2233 self.parse_expr()
2235 }
2236 }
2237
2238 fn parse_tuple(
2242 &mut self,
2243 lift_singleton: bool,
2244 allow_empty: bool,
2245 ) -> Result<Vec<Expr>, ParserError> {
2246 if lift_singleton {
2247 if self.consume_token(&Token::LParen) {
2248 let result = if allow_empty && self.consume_token(&Token::RParen) {
2249 vec![]
2250 } else {
2251 let result = self.parse_comma_separated(Parser::parse_expr)?;
2252 self.expect_token(&Token::RParen)?;
2253 result
2254 };
2255 Ok(result)
2256 } else {
2257 Ok(vec![self.parse_expr()?])
2258 }
2259 } else {
2260 self.expect_token(&Token::LParen)?;
2261 let result = if allow_empty && self.consume_token(&Token::RParen) {
2262 vec![]
2263 } else {
2264 let result = self.parse_comma_separated(Parser::parse_expr)?;
2265 self.expect_token(&Token::RParen)?;
2266 result
2267 };
2268 Ok(result)
2269 }
2270 }
2271
2272 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2273 let mut operand = None;
2274 if !self.parse_keyword(Keyword::WHEN) {
2275 operand = Some(Box::new(self.parse_expr()?));
2276 self.expect_keyword_is(Keyword::WHEN)?;
2277 }
2278 let mut conditions = vec![];
2279 loop {
2280 let condition = self.parse_expr()?;
2281 self.expect_keyword_is(Keyword::THEN)?;
2282 let result = self.parse_expr()?;
2283 conditions.push(CaseWhen { condition, result });
2284 if !self.parse_keyword(Keyword::WHEN) {
2285 break;
2286 }
2287 }
2288 let else_result = if self.parse_keyword(Keyword::ELSE) {
2289 Some(Box::new(self.parse_expr()?))
2290 } else {
2291 None
2292 };
2293 self.expect_keyword_is(Keyword::END)?;
2294 Ok(Expr::Case {
2295 operand,
2296 conditions,
2297 else_result,
2298 })
2299 }
2300
2301 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2302 if self.parse_keyword(Keyword::FORMAT) {
2303 let value = self.parse_value()?.value;
2304 match self.parse_optional_time_zone()? {
2305 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2306 None => Ok(Some(CastFormat::Value(value))),
2307 }
2308 } else {
2309 Ok(None)
2310 }
2311 }
2312
2313 pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2314 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2315 self.parse_value().map(|v| Some(v.value))
2316 } else {
2317 Ok(None)
2318 }
2319 }
2320
2321 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2323 self.expect_token(&Token::LParen)?;
2324 let data_type = self.parse_data_type()?;
2325 self.expect_token(&Token::Comma)?;
2326 let expr = self.parse_expr()?;
2327 let styles = if self.consume_token(&Token::Comma) {
2328 self.parse_comma_separated(Parser::parse_expr)?
2329 } else {
2330 Default::default()
2331 };
2332 self.expect_token(&Token::RParen)?;
2333 Ok(Expr::Convert {
2334 is_try,
2335 expr: Box::new(expr),
2336 data_type: Some(data_type),
2337 charset: None,
2338 target_before_value: true,
2339 styles,
2340 })
2341 }
2342
2343 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2348 if self.dialect.convert_type_before_value() {
2349 return self.parse_mssql_convert(is_try);
2350 }
2351 self.expect_token(&Token::LParen)?;
2352 let expr = self.parse_expr()?;
2353 if self.parse_keyword(Keyword::USING) {
2354 let charset = self.parse_object_name(false)?;
2355 self.expect_token(&Token::RParen)?;
2356 return Ok(Expr::Convert {
2357 is_try,
2358 expr: Box::new(expr),
2359 data_type: None,
2360 charset: Some(charset),
2361 target_before_value: false,
2362 styles: vec![],
2363 });
2364 }
2365 self.expect_token(&Token::Comma)?;
2366 let data_type = self.parse_data_type()?;
2367 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2368 Some(self.parse_object_name(false)?)
2369 } else {
2370 None
2371 };
2372 self.expect_token(&Token::RParen)?;
2373 Ok(Expr::Convert {
2374 is_try,
2375 expr: Box::new(expr),
2376 data_type: Some(data_type),
2377 charset,
2378 target_before_value: false,
2379 styles: vec![],
2380 })
2381 }
2382
2383 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2385 self.expect_token(&Token::LParen)?;
2386 let expr = self.parse_expr()?;
2387 self.expect_keyword_is(Keyword::AS)?;
2388 let data_type = self.parse_data_type()?;
2389 let format = self.parse_optional_cast_format()?;
2390 self.expect_token(&Token::RParen)?;
2391 Ok(Expr::Cast {
2392 kind,
2393 expr: Box::new(expr),
2394 data_type,
2395 format,
2396 })
2397 }
2398
2399 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2401 self.expect_token(&Token::LParen)?;
2402 let exists_node = Expr::Exists {
2403 negated,
2404 subquery: self.parse_query()?,
2405 };
2406 self.expect_token(&Token::RParen)?;
2407 Ok(exists_node)
2408 }
2409
2410 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2411 self.expect_token(&Token::LParen)?;
2412 let field = self.parse_date_time_field()?;
2413
2414 let syntax = if self.parse_keyword(Keyword::FROM) {
2415 ExtractSyntax::From
2416 } else if self.consume_token(&Token::Comma)
2417 && dialect_of!(self is SnowflakeDialect | GenericDialect)
2418 {
2419 ExtractSyntax::Comma
2420 } else {
2421 return Err(ParserError::ParserError(
2422 "Expected 'FROM' or ','".to_string(),
2423 ));
2424 };
2425
2426 let expr = self.parse_expr()?;
2427 self.expect_token(&Token::RParen)?;
2428 Ok(Expr::Extract {
2429 field,
2430 expr: Box::new(expr),
2431 syntax,
2432 })
2433 }
2434
2435 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2436 self.expect_token(&Token::LParen)?;
2437 let expr = self.parse_expr()?;
2438 let field = if self.parse_keyword(Keyword::TO) {
2440 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2442 } else if self.consume_token(&Token::Comma) {
2443 match self.parse_value()?.value {
2445 Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2446 _ => {
2447 return Err(ParserError::ParserError(
2448 "Scale field can only be of number type".to_string(),
2449 ))
2450 }
2451 }
2452 } else {
2453 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2454 };
2455 self.expect_token(&Token::RParen)?;
2456 if is_ceil {
2457 Ok(Expr::Ceil {
2458 expr: Box::new(expr),
2459 field,
2460 })
2461 } else {
2462 Ok(Expr::Floor {
2463 expr: Box::new(expr),
2464 field,
2465 })
2466 }
2467 }
2468
2469 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2470 let between_prec = self.dialect.prec_value(Precedence::Between);
2471 let position_expr = self.maybe_parse(|p| {
2472 p.expect_token(&Token::LParen)?;
2474
2475 let expr = p.parse_subexpr(between_prec)?;
2477 p.expect_keyword_is(Keyword::IN)?;
2478 let from = p.parse_expr()?;
2479 p.expect_token(&Token::RParen)?;
2480 Ok(Expr::Position {
2481 expr: Box::new(expr),
2482 r#in: Box::new(from),
2483 })
2484 })?;
2485 match position_expr {
2486 Some(expr) => Ok(expr),
2487 None => self.parse_function(ObjectName::from(vec![ident])),
2490 }
2491 }
2492
2493 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2495 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2496 Keyword::SUBSTR => true,
2497 Keyword::SUBSTRING => false,
2498 _ => {
2499 self.prev_token();
2500 return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2501 }
2502 };
2503 self.expect_token(&Token::LParen)?;
2504 let expr = self.parse_expr()?;
2505 let mut from_expr = None;
2506 let special = self.consume_token(&Token::Comma);
2507 if special || self.parse_keyword(Keyword::FROM) {
2508 from_expr = Some(self.parse_expr()?);
2509 }
2510
2511 let mut to_expr = None;
2512 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2513 to_expr = Some(self.parse_expr()?);
2514 }
2515 self.expect_token(&Token::RParen)?;
2516
2517 Ok(Expr::Substring {
2518 expr: Box::new(expr),
2519 substring_from: from_expr.map(Box::new),
2520 substring_for: to_expr.map(Box::new),
2521 special,
2522 shorthand,
2523 })
2524 }
2525
2526 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2527 self.expect_token(&Token::LParen)?;
2529 let expr = self.parse_expr()?;
2530 self.expect_keyword_is(Keyword::PLACING)?;
2531 let what_expr = self.parse_expr()?;
2532 self.expect_keyword_is(Keyword::FROM)?;
2533 let from_expr = self.parse_expr()?;
2534 let mut for_expr = None;
2535 if self.parse_keyword(Keyword::FOR) {
2536 for_expr = Some(self.parse_expr()?);
2537 }
2538 self.expect_token(&Token::RParen)?;
2539
2540 Ok(Expr::Overlay {
2541 expr: Box::new(expr),
2542 overlay_what: Box::new(what_expr),
2543 overlay_from: Box::new(from_expr),
2544 overlay_for: for_expr.map(Box::new),
2545 })
2546 }
2547
2548 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2554 self.expect_token(&Token::LParen)?;
2555 let mut trim_where = None;
2556 if let Token::Word(word) = self.peek_token().token {
2557 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING]
2558 .iter()
2559 .any(|d| word.keyword == *d)
2560 {
2561 trim_where = Some(self.parse_trim_where()?);
2562 }
2563 }
2564 let expr = self.parse_expr()?;
2565 if self.parse_keyword(Keyword::FROM) {
2566 let trim_what = Box::new(expr);
2567 let expr = self.parse_expr()?;
2568 self.expect_token(&Token::RParen)?;
2569 Ok(Expr::Trim {
2570 expr: Box::new(expr),
2571 trim_where,
2572 trim_what: Some(trim_what),
2573 trim_characters: None,
2574 })
2575 } else if self.consume_token(&Token::Comma)
2576 && dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect)
2577 {
2578 let characters = self.parse_comma_separated(Parser::parse_expr)?;
2579 self.expect_token(&Token::RParen)?;
2580 Ok(Expr::Trim {
2581 expr: Box::new(expr),
2582 trim_where: None,
2583 trim_what: None,
2584 trim_characters: Some(characters),
2585 })
2586 } else {
2587 self.expect_token(&Token::RParen)?;
2588 Ok(Expr::Trim {
2589 expr: Box::new(expr),
2590 trim_where,
2591 trim_what: None,
2592 trim_characters: None,
2593 })
2594 }
2595 }
2596
2597 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2598 let next_token = self.next_token();
2599 match &next_token.token {
2600 Token::Word(w) => match w.keyword {
2601 Keyword::BOTH => Ok(TrimWhereField::Both),
2602 Keyword::LEADING => Ok(TrimWhereField::Leading),
2603 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2604 _ => self.expected("trim_where field", next_token)?,
2605 },
2606 _ => self.expected("trim_where field", next_token),
2607 }
2608 }
2609
2610 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2613 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2614 self.expect_token(&Token::RBracket)?;
2615 Ok(Expr::Array(Array { elem: exprs, named }))
2616 }
2617
2618 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2619 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2620 if self.parse_keyword(Keyword::ERROR) {
2621 Ok(Some(ListAggOnOverflow::Error))
2622 } else {
2623 self.expect_keyword_is(Keyword::TRUNCATE)?;
2624 let filler = match self.peek_token().token {
2625 Token::Word(w)
2626 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2627 {
2628 None
2629 }
2630 Token::SingleQuotedString(_)
2631 | Token::EscapedStringLiteral(_)
2632 | Token::UnicodeStringLiteral(_)
2633 | Token::NationalStringLiteral(_)
2634 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2635 _ => self.expected(
2636 "either filler, WITH, or WITHOUT in LISTAGG",
2637 self.peek_token(),
2638 )?,
2639 };
2640 let with_count = self.parse_keyword(Keyword::WITH);
2641 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2642 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2643 }
2644 self.expect_keyword_is(Keyword::COUNT)?;
2645 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2646 }
2647 } else {
2648 Ok(None)
2649 }
2650 }
2651
2652 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2657 let next_token = self.next_token();
2658 match &next_token.token {
2659 Token::Word(w) => match w.keyword {
2660 Keyword::YEAR => Ok(DateTimeField::Year),
2661 Keyword::YEARS => Ok(DateTimeField::Years),
2662 Keyword::MONTH => Ok(DateTimeField::Month),
2663 Keyword::MONTHS => Ok(DateTimeField::Months),
2664 Keyword::WEEK => {
2665 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2666 && self.consume_token(&Token::LParen)
2667 {
2668 let week_day = self.parse_identifier()?;
2669 self.expect_token(&Token::RParen)?;
2670 Some(week_day)
2671 } else {
2672 None
2673 };
2674 Ok(DateTimeField::Week(week_day))
2675 }
2676 Keyword::WEEKS => Ok(DateTimeField::Weeks),
2677 Keyword::DAY => Ok(DateTimeField::Day),
2678 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2679 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2680 Keyword::DAYS => Ok(DateTimeField::Days),
2681 Keyword::DATE => Ok(DateTimeField::Date),
2682 Keyword::DATETIME => Ok(DateTimeField::Datetime),
2683 Keyword::HOUR => Ok(DateTimeField::Hour),
2684 Keyword::HOURS => Ok(DateTimeField::Hours),
2685 Keyword::MINUTE => Ok(DateTimeField::Minute),
2686 Keyword::MINUTES => Ok(DateTimeField::Minutes),
2687 Keyword::SECOND => Ok(DateTimeField::Second),
2688 Keyword::SECONDS => Ok(DateTimeField::Seconds),
2689 Keyword::CENTURY => Ok(DateTimeField::Century),
2690 Keyword::DECADE => Ok(DateTimeField::Decade),
2691 Keyword::DOY => Ok(DateTimeField::Doy),
2692 Keyword::DOW => Ok(DateTimeField::Dow),
2693 Keyword::EPOCH => Ok(DateTimeField::Epoch),
2694 Keyword::ISODOW => Ok(DateTimeField::Isodow),
2695 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2696 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2697 Keyword::JULIAN => Ok(DateTimeField::Julian),
2698 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2699 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2700 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2701 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2702 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2703 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2704 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2705 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2706 Keyword::QUARTER => Ok(DateTimeField::Quarter),
2707 Keyword::TIME => Ok(DateTimeField::Time),
2708 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2709 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2710 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2711 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2712 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2713 _ if self.dialect.allow_extract_custom() => {
2714 self.prev_token();
2715 let custom = self.parse_identifier()?;
2716 Ok(DateTimeField::Custom(custom))
2717 }
2718 _ => self.expected("date/time field", next_token),
2719 },
2720 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2721 self.prev_token();
2722 let custom = self.parse_identifier()?;
2723 Ok(DateTimeField::Custom(custom))
2724 }
2725 _ => self.expected("date/time field", next_token),
2726 }
2727 }
2728
2729 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2730 match self.peek_token().token {
2731 Token::Word(w) => match w.keyword {
2732 Keyword::EXISTS => {
2733 let negated = true;
2734 let _ = self.parse_keyword(Keyword::EXISTS);
2735 self.parse_exists_expr(negated)
2736 }
2737 _ => Ok(Expr::UnaryOp {
2738 op: UnaryOperator::Not,
2739 expr: Box::new(
2740 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2741 ),
2742 }),
2743 },
2744 _ => Ok(Expr::UnaryOp {
2745 op: UnaryOperator::Not,
2746 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2747 }),
2748 }
2749 }
2750
2751 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2761 let token = self.expect_token(&Token::LBrace)?;
2762
2763 if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? {
2764 self.expect_token(&Token::RBrace)?;
2765 return Ok(fn_expr);
2766 }
2767
2768 if self.dialect.supports_dictionary_syntax() {
2769 self.prev_token(); return self.parse_duckdb_struct_literal();
2771 }
2772
2773 self.expected("an expression", token)
2774 }
2775
2776 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2782 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2783
2784 self.expect_keyword_is(Keyword::AGAINST)?;
2785
2786 self.expect_token(&Token::LParen)?;
2787
2788 let match_value = self.parse_value()?.value;
2790
2791 let in_natural_language_mode_keywords = &[
2792 Keyword::IN,
2793 Keyword::NATURAL,
2794 Keyword::LANGUAGE,
2795 Keyword::MODE,
2796 ];
2797
2798 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2799
2800 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2801
2802 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2803 if self.parse_keywords(with_query_expansion_keywords) {
2804 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2805 } else {
2806 Some(SearchModifier::InNaturalLanguageMode)
2807 }
2808 } else if self.parse_keywords(in_boolean_mode_keywords) {
2809 Some(SearchModifier::InBooleanMode)
2810 } else if self.parse_keywords(with_query_expansion_keywords) {
2811 Some(SearchModifier::WithQueryExpansion)
2812 } else {
2813 None
2814 };
2815
2816 self.expect_token(&Token::RParen)?;
2817
2818 Ok(Expr::MatchAgainst {
2819 columns,
2820 match_value,
2821 opt_search_modifier,
2822 })
2823 }
2824
2825 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2841 let value = if self.dialect.require_interval_qualifier() {
2850 self.parse_expr()?
2852 } else {
2853 self.parse_prefix()?
2856 };
2857
2858 let leading_field = if self.next_token_is_temporal_unit() {
2864 Some(self.parse_date_time_field()?)
2865 } else if self.dialect.require_interval_qualifier() {
2866 return parser_err!(
2867 "INTERVAL requires a unit after the literal value",
2868 self.peek_token().span.start
2869 );
2870 } else {
2871 None
2872 };
2873
2874 let (leading_precision, last_field, fsec_precision) =
2875 if leading_field == Some(DateTimeField::Second) {
2876 let last_field = None;
2882 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2883 (leading_precision, last_field, fsec_precision)
2884 } else {
2885 let leading_precision = self.parse_optional_precision()?;
2886 if self.parse_keyword(Keyword::TO) {
2887 let last_field = Some(self.parse_date_time_field()?);
2888 let fsec_precision = if last_field == Some(DateTimeField::Second) {
2889 self.parse_optional_precision()?
2890 } else {
2891 None
2892 };
2893 (leading_precision, last_field, fsec_precision)
2894 } else {
2895 (leading_precision, None, None)
2896 }
2897 };
2898
2899 Ok(Expr::Interval(Interval {
2900 value: Box::new(value),
2901 leading_field,
2902 leading_precision,
2903 last_field,
2904 fractional_seconds_precision: fsec_precision,
2905 }))
2906 }
2907
2908 pub fn next_token_is_temporal_unit(&mut self) -> bool {
2911 if let Token::Word(word) = self.peek_token().token {
2912 matches!(
2913 word.keyword,
2914 Keyword::YEAR
2915 | Keyword::YEARS
2916 | Keyword::MONTH
2917 | Keyword::MONTHS
2918 | Keyword::WEEK
2919 | Keyword::WEEKS
2920 | Keyword::DAY
2921 | Keyword::DAYS
2922 | Keyword::HOUR
2923 | Keyword::HOURS
2924 | Keyword::MINUTE
2925 | Keyword::MINUTES
2926 | Keyword::SECOND
2927 | Keyword::SECONDS
2928 | Keyword::CENTURY
2929 | Keyword::DECADE
2930 | Keyword::DOW
2931 | Keyword::DOY
2932 | Keyword::EPOCH
2933 | Keyword::ISODOW
2934 | Keyword::ISOYEAR
2935 | Keyword::JULIAN
2936 | Keyword::MICROSECOND
2937 | Keyword::MICROSECONDS
2938 | Keyword::MILLENIUM
2939 | Keyword::MILLENNIUM
2940 | Keyword::MILLISECOND
2941 | Keyword::MILLISECONDS
2942 | Keyword::NANOSECOND
2943 | Keyword::NANOSECONDS
2944 | Keyword::QUARTER
2945 | Keyword::TIMEZONE
2946 | Keyword::TIMEZONE_HOUR
2947 | Keyword::TIMEZONE_MINUTE
2948 )
2949 } else {
2950 false
2951 }
2952 }
2953
2954 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
2962 self.prev_token();
2964 let (fields, trailing_bracket) =
2965 self.parse_struct_type_def(Self::parse_struct_field_def)?;
2966 if trailing_bracket.0 {
2967 return parser_err!(
2968 "unmatched > in STRUCT literal",
2969 self.peek_token().span.start
2970 );
2971 }
2972
2973 self.expect_token(&Token::LParen)?;
2975 let values = self
2976 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
2977 self.expect_token(&Token::RParen)?;
2978
2979 Ok(Expr::Struct { values, fields })
2980 }
2981
2982 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
2996 let expr = self.parse_expr()?;
2997 if self.parse_keyword(Keyword::AS) {
2998 if typed_syntax {
2999 return parser_err!("Typed syntax does not allow AS", {
3000 self.prev_token();
3001 self.peek_token().span.start
3002 });
3003 }
3004 let field_name = self.parse_identifier()?;
3005 Ok(Expr::Named {
3006 expr: expr.into(),
3007 name: field_name,
3008 })
3009 } else {
3010 Ok(expr)
3011 }
3012 }
3013
3014 fn parse_struct_type_def<F>(
3027 &mut self,
3028 mut elem_parser: F,
3029 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3030 where
3031 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3032 {
3033 let start_token = self.peek_token();
3034 self.expect_keyword_is(Keyword::STRUCT)?;
3035
3036 if Token::Lt != self.peek_token() {
3038 return Ok((Default::default(), false.into()));
3039 }
3040 self.next_token();
3041
3042 let mut field_defs = vec![];
3043 let trailing_bracket = loop {
3044 let (def, trailing_bracket) = elem_parser(self)?;
3045 field_defs.push(def);
3046 if !self.consume_token(&Token::Comma) {
3047 break trailing_bracket;
3048 }
3049
3050 if trailing_bracket.0 {
3054 return parser_err!("unmatched > in STRUCT definition", start_token.span.start);
3055 }
3056 };
3057
3058 Ok((
3059 field_defs,
3060 self.expect_closing_angle_bracket(trailing_bracket)?,
3061 ))
3062 }
3063
3064 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3066 self.expect_keyword_is(Keyword::STRUCT)?;
3067 self.expect_token(&Token::LParen)?;
3068 let struct_body = self.parse_comma_separated(|parser| {
3069 let field_name = parser.parse_identifier()?;
3070 let field_type = parser.parse_data_type()?;
3071
3072 Ok(StructField {
3073 field_name: Some(field_name),
3074 field_type,
3075 })
3076 });
3077 self.expect_token(&Token::RParen)?;
3078 struct_body
3079 }
3080
3081 fn parse_struct_field_def(
3091 &mut self,
3092 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3093 let is_anonymous_field = !matches!(
3096 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3097 (Token::Word(_), Token::Word(_))
3098 );
3099
3100 let field_name = if is_anonymous_field {
3101 None
3102 } else {
3103 Some(self.parse_identifier()?)
3104 };
3105
3106 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3107
3108 Ok((
3109 StructField {
3110 field_name,
3111 field_type,
3112 },
3113 trailing_bracket,
3114 ))
3115 }
3116
3117 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3127 self.expect_keyword_is(Keyword::UNION)?;
3128
3129 self.expect_token(&Token::LParen)?;
3130
3131 let fields = self.parse_comma_separated(|p| {
3132 Ok(UnionField {
3133 field_name: p.parse_identifier()?,
3134 field_type: p.parse_data_type()?,
3135 })
3136 })?;
3137
3138 self.expect_token(&Token::RParen)?;
3139
3140 Ok(fields)
3141 }
3142
3143 fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
3153 self.expect_token(&Token::LBrace)?;
3154
3155 let fields =
3156 self.parse_comma_separated0(Self::parse_duckdb_dictionary_field, Token::RBrace)?;
3157
3158 self.expect_token(&Token::RBrace)?;
3159
3160 Ok(Expr::Dictionary(fields))
3161 }
3162
3163 fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3173 let key = self.parse_identifier()?;
3174
3175 self.expect_token(&Token::Colon)?;
3176
3177 let expr = self.parse_expr()?;
3178
3179 Ok(DictionaryField {
3180 key,
3181 value: Box::new(expr),
3182 })
3183 }
3184
3185 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3195 self.expect_token(&Token::LBrace)?;
3196 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3197 self.expect_token(&Token::RBrace)?;
3198 Ok(Expr::Map(Map { entries: fields }))
3199 }
3200
3201 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3211 let key = self.parse_expr()?;
3212
3213 self.expect_token(&Token::Colon)?;
3214
3215 let value = self.parse_expr()?;
3216
3217 Ok(MapEntry {
3218 key: Box::new(key),
3219 value: Box::new(value),
3220 })
3221 }
3222
3223 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3233 self.expect_keyword_is(Keyword::MAP)?;
3234 self.expect_token(&Token::LParen)?;
3235 let key_data_type = self.parse_data_type()?;
3236 self.expect_token(&Token::Comma)?;
3237 let value_data_type = self.parse_data_type()?;
3238 self.expect_token(&Token::RParen)?;
3239
3240 Ok((key_data_type, value_data_type))
3241 }
3242
3243 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3253 self.expect_keyword_is(Keyword::TUPLE)?;
3254 self.expect_token(&Token::LParen)?;
3255 let mut field_defs = vec![];
3256 loop {
3257 let (def, _) = self.parse_struct_field_def()?;
3258 field_defs.push(def);
3259 if !self.consume_token(&Token::Comma) {
3260 break;
3261 }
3262 }
3263 self.expect_token(&Token::RParen)?;
3264
3265 Ok(field_defs)
3266 }
3267
3268 fn expect_closing_angle_bracket(
3273 &mut self,
3274 trailing_bracket: MatchedTrailingBracket,
3275 ) -> Result<MatchedTrailingBracket, ParserError> {
3276 let trailing_bracket = if !trailing_bracket.0 {
3277 match self.peek_token().token {
3278 Token::Gt => {
3279 self.next_token();
3280 false.into()
3281 }
3282 Token::ShiftRight => {
3283 self.next_token();
3284 true.into()
3285 }
3286 _ => return self.expected(">", self.peek_token()),
3287 }
3288 } else {
3289 false.into()
3290 };
3291
3292 Ok(trailing_bracket)
3293 }
3294
3295 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3297 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3299 return infix;
3300 }
3301
3302 let dialect = self.dialect;
3303
3304 self.advance_token();
3305 let tok = self.get_current_token();
3306 let tok_index = self.get_current_index();
3307 let span = tok.span;
3308 let regular_binary_operator = match &tok.token {
3309 Token::Spaceship => Some(BinaryOperator::Spaceship),
3310 Token::DoubleEq => Some(BinaryOperator::Eq),
3311 Token::Assignment => Some(BinaryOperator::Assignment),
3312 Token::Eq => Some(BinaryOperator::Eq),
3313 Token::Neq => Some(BinaryOperator::NotEq),
3314 Token::Gt => Some(BinaryOperator::Gt),
3315 Token::GtEq => Some(BinaryOperator::GtEq),
3316 Token::Lt => Some(BinaryOperator::Lt),
3317 Token::LtEq => Some(BinaryOperator::LtEq),
3318 Token::Plus => Some(BinaryOperator::Plus),
3319 Token::Minus => Some(BinaryOperator::Minus),
3320 Token::Mul => Some(BinaryOperator::Multiply),
3321 Token::Mod => Some(BinaryOperator::Modulo),
3322 Token::StringConcat => Some(BinaryOperator::StringConcat),
3323 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3324 Token::Caret => {
3325 if dialect_is!(dialect is PostgreSqlDialect) {
3328 Some(BinaryOperator::PGExp)
3329 } else {
3330 Some(BinaryOperator::BitwiseXor)
3331 }
3332 }
3333 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3334 Token::Div => Some(BinaryOperator::Divide),
3335 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3336 Some(BinaryOperator::DuckIntegerDivide)
3337 }
3338 Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3339 Some(BinaryOperator::PGBitwiseShiftLeft)
3340 }
3341 Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3342 Some(BinaryOperator::PGBitwiseShiftRight)
3343 }
3344 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3345 Some(BinaryOperator::PGBitwiseXor)
3346 }
3347 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3348 Some(BinaryOperator::PGOverlap)
3349 }
3350 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3351 Some(BinaryOperator::PGOverlap)
3352 }
3353 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3354 Some(BinaryOperator::PGStartsWith)
3355 }
3356 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3357 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3358 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3359 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3360 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3361 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3362 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3363 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3364 Token::Arrow => Some(BinaryOperator::Arrow),
3365 Token::LongArrow => Some(BinaryOperator::LongArrow),
3366 Token::HashArrow => Some(BinaryOperator::HashArrow),
3367 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3368 Token::AtArrow => Some(BinaryOperator::AtArrow),
3369 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3370 Token::HashMinus => Some(BinaryOperator::HashMinus),
3371 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3372 Token::AtAt => Some(BinaryOperator::AtAt),
3373 Token::Question => Some(BinaryOperator::Question),
3374 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3375 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3376 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3377 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3378 Some(BinaryOperator::DoubleHash)
3379 }
3380
3381 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3382 Some(BinaryOperator::AndLt)
3383 }
3384 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3385 Some(BinaryOperator::AndGt)
3386 }
3387 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3388 Some(BinaryOperator::QuestionDash)
3389 }
3390 Token::AmpersandLeftAngleBracketVerticalBar
3391 if self.dialect.supports_geometric_types() =>
3392 {
3393 Some(BinaryOperator::AndLtPipe)
3394 }
3395 Token::VerticalBarAmpersandRightAngleBracket
3396 if self.dialect.supports_geometric_types() =>
3397 {
3398 Some(BinaryOperator::PipeAndGt)
3399 }
3400 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3401 Some(BinaryOperator::LtDashGt)
3402 }
3403 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3404 Some(BinaryOperator::LtCaret)
3405 }
3406 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3407 Some(BinaryOperator::GtCaret)
3408 }
3409 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3410 Some(BinaryOperator::QuestionHash)
3411 }
3412 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3413 Some(BinaryOperator::QuestionDoublePipe)
3414 }
3415 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3416 Some(BinaryOperator::QuestionDashPipe)
3417 }
3418 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3419 Some(BinaryOperator::TildeEq)
3420 }
3421 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3422 Some(BinaryOperator::LtLtPipe)
3423 }
3424 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3425 Some(BinaryOperator::PipeGtGt)
3426 }
3427 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3428
3429 Token::Word(w) => match w.keyword {
3430 Keyword::AND => Some(BinaryOperator::And),
3431 Keyword::OR => Some(BinaryOperator::Or),
3432 Keyword::XOR => Some(BinaryOperator::Xor),
3433 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3434 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3435 self.expect_token(&Token::LParen)?;
3436 let mut idents = vec![];
3441 loop {
3442 self.advance_token();
3443 idents.push(self.get_current_token().to_string());
3444 if !self.consume_token(&Token::Period) {
3445 break;
3446 }
3447 }
3448 self.expect_token(&Token::RParen)?;
3449 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3450 }
3451 _ => None,
3452 },
3453 _ => None,
3454 };
3455
3456 let tok = self.token_at(tok_index);
3457 if let Some(op) = regular_binary_operator {
3458 if let Some(keyword) =
3459 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3460 {
3461 self.expect_token(&Token::LParen)?;
3462 let right = if self.peek_sub_query() {
3463 self.prev_token(); self.parse_subexpr(precedence)?
3467 } else {
3468 let right = self.parse_subexpr(precedence)?;
3470 self.expect_token(&Token::RParen)?;
3471 right
3472 };
3473
3474 if !dialect_of!(self is PostgreSqlDialect)
3475 && !matches!(
3476 op,
3477 BinaryOperator::Gt
3478 | BinaryOperator::Lt
3479 | BinaryOperator::GtEq
3480 | BinaryOperator::LtEq
3481 | BinaryOperator::Eq
3482 | BinaryOperator::NotEq
3483 )
3484 {
3485 return parser_err!(
3486 format!(
3487 "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}"
3488 ),
3489 span.start
3490 );
3491 };
3492
3493 Ok(match keyword {
3494 Keyword::ALL => Expr::AllOp {
3495 left: Box::new(expr),
3496 compare_op: op,
3497 right: Box::new(right),
3498 },
3499 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3500 left: Box::new(expr),
3501 compare_op: op,
3502 right: Box::new(right),
3503 is_some: keyword == Keyword::SOME,
3504 },
3505 _ => unreachable!(),
3506 })
3507 } else {
3508 Ok(Expr::BinaryOp {
3509 left: Box::new(expr),
3510 op,
3511 right: Box::new(self.parse_subexpr(precedence)?),
3512 })
3513 }
3514 } else if let Token::Word(w) = &tok.token {
3515 match w.keyword {
3516 Keyword::IS => {
3517 if self.parse_keyword(Keyword::NULL) {
3518 Ok(Expr::IsNull(Box::new(expr)))
3519 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3520 Ok(Expr::IsNotNull(Box::new(expr)))
3521 } else if self.parse_keywords(&[Keyword::TRUE]) {
3522 Ok(Expr::IsTrue(Box::new(expr)))
3523 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3524 Ok(Expr::IsNotTrue(Box::new(expr)))
3525 } else if self.parse_keywords(&[Keyword::FALSE]) {
3526 Ok(Expr::IsFalse(Box::new(expr)))
3527 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3528 Ok(Expr::IsNotFalse(Box::new(expr)))
3529 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3530 Ok(Expr::IsUnknown(Box::new(expr)))
3531 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3532 Ok(Expr::IsNotUnknown(Box::new(expr)))
3533 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3534 let expr2 = self.parse_expr()?;
3535 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3536 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3537 {
3538 let expr2 = self.parse_expr()?;
3539 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3540 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3541 Ok(is_normalized)
3542 } else {
3543 self.expected(
3544 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3545 self.peek_token(),
3546 )
3547 }
3548 }
3549 Keyword::AT => {
3550 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3551 Ok(Expr::AtTimeZone {
3552 timestamp: Box::new(expr),
3553 time_zone: Box::new(self.parse_subexpr(precedence)?),
3554 })
3555 }
3556 Keyword::NOT
3557 | Keyword::IN
3558 | Keyword::BETWEEN
3559 | Keyword::LIKE
3560 | Keyword::ILIKE
3561 | Keyword::SIMILAR
3562 | Keyword::REGEXP
3563 | Keyword::RLIKE => {
3564 self.prev_token();
3565 let negated = self.parse_keyword(Keyword::NOT);
3566 let regexp = self.parse_keyword(Keyword::REGEXP);
3567 let rlike = self.parse_keyword(Keyword::RLIKE);
3568 if regexp || rlike {
3569 Ok(Expr::RLike {
3570 negated,
3571 expr: Box::new(expr),
3572 pattern: Box::new(
3573 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3574 ),
3575 regexp,
3576 })
3577 } else if self.parse_keyword(Keyword::IN) {
3578 self.parse_in(expr, negated)
3579 } else if self.parse_keyword(Keyword::BETWEEN) {
3580 self.parse_between(expr, negated)
3581 } else if self.parse_keyword(Keyword::LIKE) {
3582 Ok(Expr::Like {
3583 negated,
3584 any: self.parse_keyword(Keyword::ANY),
3585 expr: Box::new(expr),
3586 pattern: Box::new(
3587 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3588 ),
3589 escape_char: self.parse_escape_char()?,
3590 })
3591 } else if self.parse_keyword(Keyword::ILIKE) {
3592 Ok(Expr::ILike {
3593 negated,
3594 any: self.parse_keyword(Keyword::ANY),
3595 expr: Box::new(expr),
3596 pattern: Box::new(
3597 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3598 ),
3599 escape_char: self.parse_escape_char()?,
3600 })
3601 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3602 Ok(Expr::SimilarTo {
3603 negated,
3604 expr: Box::new(expr),
3605 pattern: Box::new(
3606 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3607 ),
3608 escape_char: self.parse_escape_char()?,
3609 })
3610 } else {
3611 self.expected("IN or BETWEEN after NOT", self.peek_token())
3612 }
3613 }
3614 _ => parser_err!(
3616 format!("No infix parser for token {:?}", tok.token),
3617 tok.span.start
3618 ),
3619 }
3620 } else if Token::DoubleColon == *tok {
3621 Ok(Expr::Cast {
3622 kind: CastKind::DoubleColon,
3623 expr: Box::new(expr),
3624 data_type: self.parse_data_type()?,
3625 format: None,
3626 })
3627 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3628 Ok(Expr::UnaryOp {
3629 op: UnaryOperator::PGPostfixFactorial,
3630 expr: Box::new(expr),
3631 })
3632 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3633 || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3634 {
3635 self.prev_token();
3636 self.parse_json_access(expr)
3637 } else {
3638 parser_err!(
3640 format!("No infix parser for token {:?}", tok.token),
3641 tok.span.start
3642 )
3643 }
3644 }
3645
3646 pub fn parse_escape_char(&mut self) -> Result<Option<String>, ParserError> {
3648 if self.parse_keyword(Keyword::ESCAPE) {
3649 Ok(Some(self.parse_literal_string()?))
3650 } else {
3651 Ok(None)
3652 }
3653 }
3654
3655 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3665 let lower_bound = if self.consume_token(&Token::Colon) {
3667 None
3668 } else {
3669 Some(self.parse_expr()?)
3670 };
3671
3672 if self.consume_token(&Token::RBracket) {
3674 if let Some(lower_bound) = lower_bound {
3675 return Ok(Subscript::Index { index: lower_bound });
3676 };
3677 return Ok(Subscript::Slice {
3678 lower_bound,
3679 upper_bound: None,
3680 stride: None,
3681 });
3682 }
3683
3684 if lower_bound.is_some() {
3686 self.expect_token(&Token::Colon)?;
3687 }
3688
3689 let upper_bound = if self.consume_token(&Token::RBracket) {
3691 return Ok(Subscript::Slice {
3692 lower_bound,
3693 upper_bound: None,
3694 stride: None,
3695 });
3696 } else {
3697 Some(self.parse_expr()?)
3698 };
3699
3700 if self.consume_token(&Token::RBracket) {
3702 return Ok(Subscript::Slice {
3703 lower_bound,
3704 upper_bound,
3705 stride: None,
3706 });
3707 }
3708
3709 self.expect_token(&Token::Colon)?;
3711 let stride = if self.consume_token(&Token::RBracket) {
3712 None
3713 } else {
3714 Some(self.parse_expr()?)
3715 };
3716
3717 if stride.is_some() {
3718 self.expect_token(&Token::RBracket)?;
3719 }
3720
3721 Ok(Subscript::Slice {
3722 lower_bound,
3723 upper_bound,
3724 stride,
3725 })
3726 }
3727
3728 pub fn parse_multi_dim_subscript(
3730 &mut self,
3731 chain: &mut Vec<AccessExpr>,
3732 ) -> Result<(), ParserError> {
3733 while self.consume_token(&Token::LBracket) {
3734 self.parse_subscript(chain)?;
3735 }
3736 Ok(())
3737 }
3738
3739 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3743 let subscript = self.parse_subscript_inner()?;
3744 chain.push(AccessExpr::Subscript(subscript));
3745 Ok(())
3746 }
3747
3748 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3749 let token = self.next_token();
3750 match token.token {
3751 Token::Word(Word {
3752 value,
3753 quote_style: quote_style @ (Some('"') | None),
3755 keyword: _,
3758 }) => Ok(JsonPathElem::Dot {
3759 key: value,
3760 quoted: quote_style.is_some(),
3761 }),
3762
3763 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3767
3768 _ => self.expected("variant object key name", token),
3769 }
3770 }
3771
3772 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3773 let path = self.parse_json_path()?;
3774 Ok(Expr::JsonAccess {
3775 value: Box::new(expr),
3776 path,
3777 })
3778 }
3779
3780 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3781 let mut path = Vec::new();
3782 loop {
3783 match self.next_token().token {
3784 Token::Colon if path.is_empty() => {
3785 path.push(self.parse_json_path_object_key()?);
3786 }
3787 Token::Period if !path.is_empty() => {
3788 path.push(self.parse_json_path_object_key()?);
3789 }
3790 Token::LBracket => {
3791 let key = self.parse_expr()?;
3792 self.expect_token(&Token::RBracket)?;
3793
3794 path.push(JsonPathElem::Bracket { key });
3795 }
3796 _ => {
3797 self.prev_token();
3798 break;
3799 }
3800 };
3801 }
3802
3803 debug_assert!(!path.is_empty());
3804 Ok(JsonPath { path })
3805 }
3806
3807 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3809 if self.parse_keyword(Keyword::UNNEST) {
3812 self.expect_token(&Token::LParen)?;
3813 let array_expr = self.parse_expr()?;
3814 self.expect_token(&Token::RParen)?;
3815 return Ok(Expr::InUnnest {
3816 expr: Box::new(expr),
3817 array_expr: Box::new(array_expr),
3818 negated,
3819 });
3820 }
3821 self.expect_token(&Token::LParen)?;
3822 let in_op = match self.maybe_parse(|p| p.parse_query())? {
3823 Some(subquery) => Expr::InSubquery {
3824 expr: Box::new(expr),
3825 subquery,
3826 negated,
3827 },
3828 None => Expr::InList {
3829 expr: Box::new(expr),
3830 list: if self.dialect.supports_in_empty_list() {
3831 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3832 } else {
3833 self.parse_comma_separated(Parser::parse_expr)?
3834 },
3835 negated,
3836 },
3837 };
3838 self.expect_token(&Token::RParen)?;
3839 Ok(in_op)
3840 }
3841
3842 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3844 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3847 self.expect_keyword_is(Keyword::AND)?;
3848 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3849 Ok(Expr::Between {
3850 expr: Box::new(expr),
3851 negated,
3852 low: Box::new(low),
3853 high: Box::new(high),
3854 })
3855 }
3856
3857 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3859 Ok(Expr::Cast {
3860 kind: CastKind::DoubleColon,
3861 expr: Box::new(expr),
3862 data_type: self.parse_data_type()?,
3863 format: None,
3864 })
3865 }
3866
3867 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3869 self.dialect.get_next_precedence_default(self)
3870 }
3871
3872 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
3875 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
3876 }
3877
3878 pub fn peek_token(&self) -> TokenWithSpan {
3883 self.peek_nth_token(0)
3884 }
3885
3886 pub fn peek_token_ref(&self) -> &TokenWithSpan {
3889 self.peek_nth_token_ref(0)
3890 }
3891
3892 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
3915 self.peek_tokens_with_location()
3916 .map(|with_loc| with_loc.token)
3917 }
3918
3919 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
3924 let mut index = self.index;
3925 core::array::from_fn(|_| loop {
3926 let token = self.tokens.get(index);
3927 index += 1;
3928 if let Some(TokenWithSpan {
3929 token: Token::Whitespace(_),
3930 span: _,
3931 }) = token
3932 {
3933 continue;
3934 }
3935 break token.cloned().unwrap_or(TokenWithSpan {
3936 token: Token::EOF,
3937 span: Span::empty(),
3938 });
3939 })
3940 }
3941
3942 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
3947 let mut index = self.index;
3948 core::array::from_fn(|_| loop {
3949 let token = self.tokens.get(index);
3950 index += 1;
3951 if let Some(TokenWithSpan {
3952 token: Token::Whitespace(_),
3953 span: _,
3954 }) = token
3955 {
3956 continue;
3957 }
3958 break token.unwrap_or(&EOF_TOKEN);
3959 })
3960 }
3961
3962 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
3964 self.peek_nth_token_ref(n).clone()
3965 }
3966
3967 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
3969 let mut index = self.index;
3970 loop {
3971 index += 1;
3972 match self.tokens.get(index - 1) {
3973 Some(TokenWithSpan {
3974 token: Token::Whitespace(_),
3975 span: _,
3976 }) => continue,
3977 non_whitespace => {
3978 if n == 0 {
3979 return non_whitespace.unwrap_or(&EOF_TOKEN);
3980 }
3981 n -= 1;
3982 }
3983 }
3984 }
3985 }
3986
3987 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
3990 self.peek_nth_token_no_skip(0)
3991 }
3992
3993 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
3995 self.tokens
3996 .get(self.index + n)
3997 .cloned()
3998 .unwrap_or(TokenWithSpan {
3999 token: Token::EOF,
4000 span: Span::empty(),
4001 })
4002 }
4003
4004 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4008 let index = self.index;
4009 let matched = self.parse_keywords(expected);
4010 self.index = index;
4011 matched
4012 }
4013
4014 pub fn next_token(&mut self) -> TokenWithSpan {
4019 self.advance_token();
4020 self.get_current_token().clone()
4021 }
4022
4023 pub fn get_current_index(&self) -> usize {
4028 self.index.saturating_sub(1)
4029 }
4030
4031 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4033 self.index += 1;
4034 self.tokens.get(self.index - 1)
4035 }
4036
4037 pub fn advance_token(&mut self) {
4041 loop {
4042 self.index += 1;
4043 match self.tokens.get(self.index - 1) {
4044 Some(TokenWithSpan {
4045 token: Token::Whitespace(_),
4046 span: _,
4047 }) => continue,
4048 _ => break,
4049 }
4050 }
4051 }
4052
4053 pub fn get_current_token(&self) -> &TokenWithSpan {
4057 self.token_at(self.index.saturating_sub(1))
4058 }
4059
4060 pub fn get_previous_token(&self) -> &TokenWithSpan {
4064 self.token_at(self.index.saturating_sub(2))
4065 }
4066
4067 pub fn get_next_token(&self) -> &TokenWithSpan {
4071 self.token_at(self.index)
4072 }
4073
4074 pub fn prev_token(&mut self) {
4081 loop {
4082 assert!(self.index > 0);
4083 self.index -= 1;
4084 if let Some(TokenWithSpan {
4085 token: Token::Whitespace(_),
4086 span: _,
4087 }) = self.tokens.get(self.index)
4088 {
4089 continue;
4090 }
4091 return;
4092 }
4093 }
4094
4095 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4097 parser_err!(
4098 format!("Expected: {expected}, found: {found}"),
4099 found.span.start
4100 )
4101 }
4102
4103 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4105 parser_err!(
4106 format!("Expected: {expected}, found: {found}"),
4107 found.span.start
4108 )
4109 }
4110
4111 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4113 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4114 parser_err!(
4115 format!("Expected: {expected}, found: {found}"),
4116 found.span.start
4117 )
4118 }
4119
4120 #[must_use]
4123 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4124 if self.peek_keyword(expected) {
4125 self.advance_token();
4126 true
4127 } else {
4128 false
4129 }
4130 }
4131
4132 #[must_use]
4133 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4134 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4135 }
4136
4137 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4145 match &self.peek_token_ref().token {
4146 Token::Word(w) if expected == w.keyword => {
4147 for (idx, token) in tokens.iter().enumerate() {
4148 if self.peek_nth_token_ref(idx + 1).token != *token {
4149 return false;
4150 }
4151 }
4152 for _ in 0..(tokens.len() + 1) {
4154 self.advance_token();
4155 }
4156 true
4157 }
4158 _ => false,
4159 }
4160 }
4161
4162 #[must_use]
4166 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4167 let index = self.index;
4168 for &keyword in keywords {
4169 if !self.parse_keyword(keyword) {
4170 self.index = index;
4173 return false;
4174 }
4175 }
4176 true
4177 }
4178
4179 #[must_use]
4182 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4183 for keyword in keywords {
4184 if self.peek_keyword(*keyword) {
4185 return Some(*keyword);
4186 }
4187 }
4188 None
4189 }
4190
4191 #[must_use]
4195 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4196 match &self.peek_token_ref().token {
4197 Token::Word(w) => {
4198 keywords
4199 .iter()
4200 .find(|keyword| **keyword == w.keyword)
4201 .map(|keyword| {
4202 self.advance_token();
4203 *keyword
4204 })
4205 }
4206 _ => None,
4207 }
4208 }
4209
4210 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4213 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4214 Ok(keyword)
4215 } else {
4216 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4217 self.expected_ref(
4218 &format!("one of {}", keywords.join(" or ")),
4219 self.peek_token_ref(),
4220 )
4221 }
4222 }
4223
4224 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4229 if self.parse_keyword(expected) {
4230 Ok(self.get_current_token().clone())
4231 } else {
4232 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4233 }
4234 }
4235
4236 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4242 if self.parse_keyword(expected) {
4243 Ok(())
4244 } else {
4245 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4246 }
4247 }
4248
4249 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4252 for &kw in expected {
4253 self.expect_keyword_is(kw)?;
4254 }
4255 Ok(())
4256 }
4257
4258 #[must_use]
4262 pub fn consume_token(&mut self, expected: &Token) -> bool {
4263 if self.peek_token_ref() == expected {
4264 self.advance_token();
4265 true
4266 } else {
4267 false
4268 }
4269 }
4270
4271 #[must_use]
4275 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4276 let index = self.index;
4277 for token in tokens {
4278 if !self.consume_token(token) {
4279 self.index = index;
4280 return false;
4281 }
4282 }
4283 true
4284 }
4285
4286 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4288 if self.peek_token_ref() == expected {
4289 Ok(self.next_token())
4290 } else {
4291 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4292 }
4293 }
4294
4295 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4296 where
4297 <T as FromStr>::Err: Display,
4298 {
4299 s.parse::<T>().map_err(|e| {
4300 ParserError::ParserError(format!(
4301 "Could not parse '{s}' as {}: {e}{loc}",
4302 core::any::type_name::<T>()
4303 ))
4304 })
4305 }
4306
4307 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4309 let trailing_commas =
4315 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4316
4317 self.parse_comma_separated_with_trailing_commas(
4318 |p| p.parse_select_item(),
4319 trailing_commas,
4320 Self::is_reserved_for_column_alias,
4321 )
4322 }
4323
4324 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4325 let mut values = vec![];
4326 loop {
4327 values.push(self.parse_grant_permission()?);
4328 if !self.consume_token(&Token::Comma) {
4329 break;
4330 } else if self.options.trailing_commas {
4331 match self.peek_token().token {
4332 Token::Word(kw) if kw.keyword == Keyword::ON => {
4333 break;
4334 }
4335 Token::RParen
4336 | Token::SemiColon
4337 | Token::EOF
4338 | Token::RBracket
4339 | Token::RBrace => break,
4340 _ => continue,
4341 }
4342 }
4343 }
4344 Ok(values)
4345 }
4346
4347 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4349 let trailing_commas = self.dialect.supports_from_trailing_commas();
4350
4351 self.parse_comma_separated_with_trailing_commas(
4352 Parser::parse_table_and_joins,
4353 trailing_commas,
4354 |kw, _parser| {
4355 self.dialect
4356 .get_reserved_keywords_for_table_factor()
4357 .contains(kw)
4358 },
4359 )
4360 }
4361
4362 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4369 &mut self,
4370 trailing_commas: bool,
4371 is_reserved_keyword: &R,
4372 ) -> bool
4373 where
4374 R: Fn(&Keyword, &mut Parser) -> bool,
4375 {
4376 if !self.consume_token(&Token::Comma) {
4377 true
4378 } else if trailing_commas {
4379 let token = self.next_token().token;
4380 let is_end = match token {
4381 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4382 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4383 true
4384 }
4385 _ => false,
4386 };
4387 self.prev_token();
4388
4389 is_end
4390 } else {
4391 false
4392 }
4393 }
4394
4395 fn is_parse_comma_separated_end(&mut self) -> bool {
4398 self.is_parse_comma_separated_end_with_trailing_commas(
4399 self.options.trailing_commas,
4400 &Self::is_reserved_for_column_alias,
4401 )
4402 }
4403
4404 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4406 where
4407 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4408 {
4409 self.parse_comma_separated_with_trailing_commas(
4410 f,
4411 self.options.trailing_commas,
4412 Self::is_reserved_for_column_alias,
4413 )
4414 }
4415
4416 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4421 &mut self,
4422 mut f: F,
4423 trailing_commas: bool,
4424 is_reserved_keyword: R,
4425 ) -> Result<Vec<T>, ParserError>
4426 where
4427 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4428 R: Fn(&Keyword, &mut Parser) -> bool,
4429 {
4430 let mut values = vec![];
4431 loop {
4432 values.push(f(self)?);
4433 if self.is_parse_comma_separated_end_with_trailing_commas(
4434 trailing_commas,
4435 &is_reserved_keyword,
4436 ) {
4437 break;
4438 }
4439 }
4440 Ok(values)
4441 }
4442
4443 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4445 where
4446 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4447 {
4448 let mut values = vec![];
4449 loop {
4450 values.push(f(self)?);
4451 if !self.consume_token(&Token::Period) {
4452 break;
4453 }
4454 }
4455 Ok(values)
4456 }
4457
4458 pub fn parse_keyword_separated<T, F>(
4460 &mut self,
4461 keyword: Keyword,
4462 mut f: F,
4463 ) -> Result<Vec<T>, ParserError>
4464 where
4465 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4466 {
4467 let mut values = vec![];
4468 loop {
4469 values.push(f(self)?);
4470 if !self.parse_keyword(keyword) {
4471 break;
4472 }
4473 }
4474 Ok(values)
4475 }
4476
4477 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4478 where
4479 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4480 {
4481 self.expect_token(&Token::LParen)?;
4482 let res = f(self)?;
4483 self.expect_token(&Token::RParen)?;
4484 Ok(res)
4485 }
4486
4487 pub fn parse_comma_separated0<T, F>(
4490 &mut self,
4491 f: F,
4492 end_token: Token,
4493 ) -> Result<Vec<T>, ParserError>
4494 where
4495 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4496 {
4497 if self.peek_token().token == end_token {
4498 return Ok(vec![]);
4499 }
4500
4501 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4502 let _ = self.consume_token(&Token::Comma);
4503 return Ok(vec![]);
4504 }
4505
4506 self.parse_comma_separated(f)
4507 }
4508
4509 pub(crate) fn parse_statement_list(
4513 &mut self,
4514 terminal_keywords: &[Keyword],
4515 ) -> Result<Vec<Statement>, ParserError> {
4516 let mut values = vec![];
4517 loop {
4518 match &self.peek_nth_token_ref(0).token {
4519 Token::EOF => break,
4520 Token::Word(w) => {
4521 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4522 break;
4523 }
4524 }
4525 _ => {}
4526 }
4527
4528 values.push(self.parse_statement()?);
4529 self.expect_token(&Token::SemiColon)?;
4530 }
4531 Ok(values)
4532 }
4533
4534 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4538 !parser.dialect.is_column_alias(kw, parser)
4539 }
4540
4541 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4545 where
4546 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4547 {
4548 match self.try_parse(f) {
4549 Ok(t) => Ok(Some(t)),
4550 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4551 _ => Ok(None),
4552 }
4553 }
4554
4555 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4557 where
4558 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4559 {
4560 let index = self.index;
4561 match f(self) {
4562 Ok(t) => Ok(t),
4563 Err(e) => {
4564 self.index = index;
4566 Err(e)
4567 }
4568 }
4569 }
4570
4571 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4574 let loc = self.peek_token().span.start;
4575 let all = self.parse_keyword(Keyword::ALL);
4576 let distinct = self.parse_keyword(Keyword::DISTINCT);
4577 if !distinct {
4578 return Ok(None);
4579 }
4580 if all {
4581 return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4582 }
4583 let on = self.parse_keyword(Keyword::ON);
4584 if !on {
4585 return Ok(Some(Distinct::Distinct));
4586 }
4587
4588 self.expect_token(&Token::LParen)?;
4589 let col_names = if self.consume_token(&Token::RParen) {
4590 self.prev_token();
4591 Vec::new()
4592 } else {
4593 self.parse_comma_separated(Parser::parse_expr)?
4594 };
4595 self.expect_token(&Token::RParen)?;
4596 Ok(Some(Distinct::On(col_names)))
4597 }
4598
4599 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4601 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4602 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4603 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4604 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4605 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4606 let global: Option<bool> = if global {
4607 Some(true)
4608 } else if local {
4609 Some(false)
4610 } else {
4611 None
4612 };
4613 let temporary = self
4614 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4615 .is_some();
4616 let persistent = dialect_of!(self is DuckDbDialect)
4617 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4618 let create_view_params = self.parse_create_view_params()?;
4619 if self.parse_keyword(Keyword::TABLE) {
4620 self.parse_create_table(or_replace, temporary, global, transient)
4621 } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
4622 self.prev_token();
4623 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4624 } else if self.parse_keyword(Keyword::POLICY) {
4625 self.parse_create_policy()
4626 } else if self.parse_keyword(Keyword::EXTERNAL) {
4627 self.parse_create_external_table(or_replace)
4628 } else if self.parse_keyword(Keyword::FUNCTION) {
4629 self.parse_create_function(or_alter, or_replace, temporary)
4630 } else if self.parse_keyword(Keyword::DOMAIN) {
4631 self.parse_create_domain()
4632 } else if self.parse_keyword(Keyword::TRIGGER) {
4633 self.parse_create_trigger(or_alter, or_replace, false)
4634 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4635 self.parse_create_trigger(or_alter, or_replace, true)
4636 } else if self.parse_keyword(Keyword::MACRO) {
4637 self.parse_create_macro(or_replace, temporary)
4638 } else if self.parse_keyword(Keyword::SECRET) {
4639 self.parse_create_secret(or_replace, temporary, persistent)
4640 } else if or_replace {
4641 self.expected(
4642 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4643 self.peek_token(),
4644 )
4645 } else if self.parse_keyword(Keyword::EXTENSION) {
4646 self.parse_create_extension()
4647 } else if self.parse_keyword(Keyword::INDEX) {
4648 self.parse_create_index(false)
4649 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4650 self.parse_create_index(true)
4651 } else if self.parse_keyword(Keyword::VIRTUAL) {
4652 self.parse_create_virtual_table()
4653 } else if self.parse_keyword(Keyword::SCHEMA) {
4654 self.parse_create_schema()
4655 } else if self.parse_keyword(Keyword::DATABASE) {
4656 self.parse_create_database()
4657 } else if self.parse_keyword(Keyword::ROLE) {
4658 self.parse_create_role()
4659 } else if self.parse_keyword(Keyword::SEQUENCE) {
4660 self.parse_create_sequence(temporary)
4661 } else if self.parse_keyword(Keyword::TYPE) {
4662 self.parse_create_type()
4663 } else if self.parse_keyword(Keyword::PROCEDURE) {
4664 self.parse_create_procedure(or_alter)
4665 } else if self.parse_keyword(Keyword::CONNECTOR) {
4666 self.parse_create_connector()
4667 } else {
4668 self.expected("an object type after CREATE", self.peek_token())
4669 }
4670 }
4671
4672 pub fn parse_create_secret(
4674 &mut self,
4675 or_replace: bool,
4676 temporary: bool,
4677 persistent: bool,
4678 ) -> Result<Statement, ParserError> {
4679 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4680
4681 let mut storage_specifier = None;
4682 let mut name = None;
4683 if self.peek_token() != Token::LParen {
4684 if self.parse_keyword(Keyword::IN) {
4685 storage_specifier = self.parse_identifier().ok()
4686 } else {
4687 name = self.parse_identifier().ok();
4688 }
4689
4690 if storage_specifier.is_none()
4692 && self.peek_token() != Token::LParen
4693 && self.parse_keyword(Keyword::IN)
4694 {
4695 storage_specifier = self.parse_identifier().ok();
4696 }
4697 }
4698
4699 self.expect_token(&Token::LParen)?;
4700 self.expect_keyword_is(Keyword::TYPE)?;
4701 let secret_type = self.parse_identifier()?;
4702
4703 let mut options = Vec::new();
4704 if self.consume_token(&Token::Comma) {
4705 options.append(&mut self.parse_comma_separated(|p| {
4706 let key = p.parse_identifier()?;
4707 let value = p.parse_identifier()?;
4708 Ok(SecretOption { key, value })
4709 })?);
4710 }
4711 self.expect_token(&Token::RParen)?;
4712
4713 let temp = match (temporary, persistent) {
4714 (true, false) => Some(true),
4715 (false, true) => Some(false),
4716 (false, false) => None,
4717 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4718 };
4719
4720 Ok(Statement::CreateSecret {
4721 or_replace,
4722 temporary: temp,
4723 if_not_exists,
4724 name,
4725 storage_specifier,
4726 secret_type,
4727 options,
4728 })
4729 }
4730
4731 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4733 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4734 if self.parse_keyword(Keyword::TABLE) {
4735 let table_name = self.parse_object_name(false)?;
4736 if self.peek_token().token != Token::EOF {
4737 if let Token::Word(word) = self.peek_token().token {
4738 if word.keyword == Keyword::OPTIONS {
4739 options = self.parse_options(Keyword::OPTIONS)?
4740 }
4741 };
4742
4743 if self.peek_token().token != Token::EOF {
4744 let (a, q) = self.parse_as_query()?;
4745 has_as = a;
4746 query = Some(q);
4747 }
4748
4749 Ok(Statement::Cache {
4750 table_flag,
4751 table_name,
4752 has_as,
4753 options,
4754 query,
4755 })
4756 } else {
4757 Ok(Statement::Cache {
4758 table_flag,
4759 table_name,
4760 has_as,
4761 options,
4762 query,
4763 })
4764 }
4765 } else {
4766 table_flag = Some(self.parse_object_name(false)?);
4767 if self.parse_keyword(Keyword::TABLE) {
4768 let table_name = self.parse_object_name(false)?;
4769 if self.peek_token() != Token::EOF {
4770 if let Token::Word(word) = self.peek_token().token {
4771 if word.keyword == Keyword::OPTIONS {
4772 options = self.parse_options(Keyword::OPTIONS)?
4773 }
4774 };
4775
4776 if self.peek_token() != Token::EOF {
4777 let (a, q) = self.parse_as_query()?;
4778 has_as = a;
4779 query = Some(q);
4780 }
4781
4782 Ok(Statement::Cache {
4783 table_flag,
4784 table_name,
4785 has_as,
4786 options,
4787 query,
4788 })
4789 } else {
4790 Ok(Statement::Cache {
4791 table_flag,
4792 table_name,
4793 has_as,
4794 options,
4795 query,
4796 })
4797 }
4798 } else {
4799 if self.peek_token() == Token::EOF {
4800 self.prev_token();
4801 }
4802 self.expected("a `TABLE` keyword", self.peek_token())
4803 }
4804 }
4805 }
4806
4807 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4809 match self.peek_token().token {
4810 Token::Word(word) => match word.keyword {
4811 Keyword::AS => {
4812 self.next_token();
4813 Ok((true, self.parse_query()?))
4814 }
4815 _ => Ok((false, self.parse_query()?)),
4816 },
4817 _ => self.expected("a QUERY statement", self.peek_token()),
4818 }
4819 }
4820
4821 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4823 self.expect_keyword_is(Keyword::TABLE)?;
4824 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4825 let table_name = self.parse_object_name(false)?;
4826 Ok(Statement::UNCache {
4827 table_name,
4828 if_exists,
4829 })
4830 }
4831
4832 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
4834 self.expect_keyword_is(Keyword::TABLE)?;
4835 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4836 let table_name = self.parse_object_name(false)?;
4837 self.expect_keyword_is(Keyword::USING)?;
4838 let module_name = self.parse_identifier()?;
4839 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
4844 Ok(Statement::CreateVirtualTable {
4845 name: table_name,
4846 if_not_exists,
4847 module_name,
4848 module_args,
4849 })
4850 }
4851
4852 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
4853 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4854
4855 let schema_name = self.parse_schema_name()?;
4856
4857 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
4858 Some(self.parse_expr()?)
4859 } else {
4860 None
4861 };
4862
4863 let options = if self.peek_keyword(Keyword::OPTIONS) {
4864 Some(self.parse_options(Keyword::OPTIONS)?)
4865 } else {
4866 None
4867 };
4868
4869 Ok(Statement::CreateSchema {
4870 schema_name,
4871 if_not_exists,
4872 options,
4873 default_collate_spec,
4874 })
4875 }
4876
4877 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
4878 if self.parse_keyword(Keyword::AUTHORIZATION) {
4879 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
4880 } else {
4881 let name = self.parse_object_name(false)?;
4882
4883 if self.parse_keyword(Keyword::AUTHORIZATION) {
4884 Ok(SchemaName::NamedAuthorization(
4885 name,
4886 self.parse_identifier()?,
4887 ))
4888 } else {
4889 Ok(SchemaName::Simple(name))
4890 }
4891 }
4892 }
4893
4894 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
4895 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4896 let db_name = self.parse_object_name(false)?;
4897 let mut location = None;
4898 let mut managed_location = None;
4899 loop {
4900 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
4901 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
4902 Some(Keyword::MANAGEDLOCATION) => {
4903 managed_location = Some(self.parse_literal_string()?)
4904 }
4905 _ => break,
4906 }
4907 }
4908 Ok(Statement::CreateDatabase {
4909 db_name,
4910 if_not_exists: ine,
4911 location,
4912 managed_location,
4913 })
4914 }
4915
4916 pub fn parse_optional_create_function_using(
4917 &mut self,
4918 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
4919 if !self.parse_keyword(Keyword::USING) {
4920 return Ok(None);
4921 };
4922 let keyword =
4923 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
4924
4925 let uri = self.parse_literal_string()?;
4926
4927 match keyword {
4928 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
4929 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
4930 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
4931 _ => self.expected(
4932 "JAR, FILE or ARCHIVE, got {:?}",
4933 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
4934 ),
4935 }
4936 }
4937
4938 pub fn parse_create_function(
4939 &mut self,
4940 or_alter: bool,
4941 or_replace: bool,
4942 temporary: bool,
4943 ) -> Result<Statement, ParserError> {
4944 if dialect_of!(self is HiveDialect) {
4945 self.parse_hive_create_function(or_replace, temporary)
4946 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4947 self.parse_postgres_create_function(or_replace, temporary)
4948 } else if dialect_of!(self is DuckDbDialect) {
4949 self.parse_create_macro(or_replace, temporary)
4950 } else if dialect_of!(self is BigQueryDialect) {
4951 self.parse_bigquery_create_function(or_replace, temporary)
4952 } else if dialect_of!(self is MsSqlDialect) {
4953 self.parse_mssql_create_function(or_alter, or_replace, temporary)
4954 } else {
4955 self.prev_token();
4956 self.expected("an object type after CREATE", self.peek_token())
4957 }
4958 }
4959
4960 fn parse_postgres_create_function(
4964 &mut self,
4965 or_replace: bool,
4966 temporary: bool,
4967 ) -> Result<Statement, ParserError> {
4968 let name = self.parse_object_name(false)?;
4969
4970 self.expect_token(&Token::LParen)?;
4971 let args = if Token::RParen != self.peek_token_ref().token {
4972 self.parse_comma_separated(Parser::parse_function_arg)?
4973 } else {
4974 vec![]
4975 };
4976 self.expect_token(&Token::RParen)?;
4977
4978 let return_type = if self.parse_keyword(Keyword::RETURNS) {
4979 Some(self.parse_data_type()?)
4980 } else {
4981 None
4982 };
4983
4984 #[derive(Default)]
4985 struct Body {
4986 language: Option<Ident>,
4987 behavior: Option<FunctionBehavior>,
4988 function_body: Option<CreateFunctionBody>,
4989 called_on_null: Option<FunctionCalledOnNull>,
4990 parallel: Option<FunctionParallel>,
4991 }
4992 let mut body = Body::default();
4993 loop {
4994 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
4995 if field.is_some() {
4996 return Err(ParserError::ParserError(format!(
4997 "{name} specified more than once",
4998 )));
4999 }
5000 Ok(())
5001 }
5002 if self.parse_keyword(Keyword::AS) {
5003 ensure_not_set(&body.function_body, "AS")?;
5004 body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
5005 self.parse_create_function_body_string()?,
5006 ));
5007 } else if self.parse_keyword(Keyword::LANGUAGE) {
5008 ensure_not_set(&body.language, "LANGUAGE")?;
5009 body.language = Some(self.parse_identifier()?);
5010 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5011 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5012 body.behavior = Some(FunctionBehavior::Immutable);
5013 } else if self.parse_keyword(Keyword::STABLE) {
5014 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5015 body.behavior = Some(FunctionBehavior::Stable);
5016 } else if self.parse_keyword(Keyword::VOLATILE) {
5017 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5018 body.behavior = Some(FunctionBehavior::Volatile);
5019 } else if self.parse_keywords(&[
5020 Keyword::CALLED,
5021 Keyword::ON,
5022 Keyword::NULL,
5023 Keyword::INPUT,
5024 ]) {
5025 ensure_not_set(
5026 &body.called_on_null,
5027 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5028 )?;
5029 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5030 } else if self.parse_keywords(&[
5031 Keyword::RETURNS,
5032 Keyword::NULL,
5033 Keyword::ON,
5034 Keyword::NULL,
5035 Keyword::INPUT,
5036 ]) {
5037 ensure_not_set(
5038 &body.called_on_null,
5039 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5040 )?;
5041 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5042 } else if self.parse_keyword(Keyword::STRICT) {
5043 ensure_not_set(
5044 &body.called_on_null,
5045 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5046 )?;
5047 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5048 } else if self.parse_keyword(Keyword::PARALLEL) {
5049 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5050 if self.parse_keyword(Keyword::UNSAFE) {
5051 body.parallel = Some(FunctionParallel::Unsafe);
5052 } else if self.parse_keyword(Keyword::RESTRICTED) {
5053 body.parallel = Some(FunctionParallel::Restricted);
5054 } else if self.parse_keyword(Keyword::SAFE) {
5055 body.parallel = Some(FunctionParallel::Safe);
5056 } else {
5057 return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5058 }
5059 } else if self.parse_keyword(Keyword::RETURN) {
5060 ensure_not_set(&body.function_body, "RETURN")?;
5061 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5062 } else {
5063 break;
5064 }
5065 }
5066
5067 Ok(Statement::CreateFunction(CreateFunction {
5068 or_alter: false,
5069 or_replace,
5070 temporary,
5071 name,
5072 args: Some(args),
5073 return_type,
5074 behavior: body.behavior,
5075 called_on_null: body.called_on_null,
5076 parallel: body.parallel,
5077 language: body.language,
5078 function_body: body.function_body,
5079 if_not_exists: false,
5080 using: None,
5081 determinism_specifier: None,
5082 options: None,
5083 remote_connection: None,
5084 }))
5085 }
5086
5087 fn parse_hive_create_function(
5091 &mut self,
5092 or_replace: bool,
5093 temporary: bool,
5094 ) -> Result<Statement, ParserError> {
5095 let name = self.parse_object_name(false)?;
5096 self.expect_keyword_is(Keyword::AS)?;
5097
5098 let as_ = self.parse_create_function_body_string()?;
5099 let using = self.parse_optional_create_function_using()?;
5100
5101 Ok(Statement::CreateFunction(CreateFunction {
5102 or_alter: false,
5103 or_replace,
5104 temporary,
5105 name,
5106 function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
5107 using,
5108 if_not_exists: false,
5109 args: None,
5110 return_type: None,
5111 behavior: None,
5112 called_on_null: None,
5113 parallel: None,
5114 language: None,
5115 determinism_specifier: None,
5116 options: None,
5117 remote_connection: None,
5118 }))
5119 }
5120
5121 fn parse_bigquery_create_function(
5125 &mut self,
5126 or_replace: bool,
5127 temporary: bool,
5128 ) -> Result<Statement, ParserError> {
5129 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5130 let (name, args) = self.parse_create_function_name_and_params()?;
5131
5132 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5133 Some(self.parse_data_type()?)
5134 } else {
5135 None
5136 };
5137
5138 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5139 Some(FunctionDeterminismSpecifier::Deterministic)
5140 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5141 Some(FunctionDeterminismSpecifier::NotDeterministic)
5142 } else {
5143 None
5144 };
5145
5146 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5147 Some(self.parse_identifier()?)
5148 } else {
5149 None
5150 };
5151
5152 let remote_connection =
5153 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5154 Some(self.parse_object_name(false)?)
5155 } else {
5156 None
5157 };
5158
5159 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5162
5163 let function_body = if remote_connection.is_none() {
5164 self.expect_keyword_is(Keyword::AS)?;
5165 let expr = self.parse_expr()?;
5166 if options.is_none() {
5167 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5168 Some(CreateFunctionBody::AsBeforeOptions(expr))
5169 } else {
5170 Some(CreateFunctionBody::AsAfterOptions(expr))
5171 }
5172 } else {
5173 None
5174 };
5175
5176 Ok(Statement::CreateFunction(CreateFunction {
5177 or_alter: false,
5178 or_replace,
5179 temporary,
5180 if_not_exists,
5181 name,
5182 args: Some(args),
5183 return_type,
5184 function_body,
5185 language,
5186 determinism_specifier,
5187 options,
5188 remote_connection,
5189 using: None,
5190 behavior: None,
5191 called_on_null: None,
5192 parallel: None,
5193 }))
5194 }
5195
5196 fn parse_mssql_create_function(
5200 &mut self,
5201 or_alter: bool,
5202 or_replace: bool,
5203 temporary: bool,
5204 ) -> Result<Statement, ParserError> {
5205 let (name, args) = self.parse_create_function_name_and_params()?;
5206
5207 self.expect_keyword(Keyword::RETURNS)?;
5208 let return_type = Some(self.parse_data_type()?);
5209
5210 self.expect_keyword_is(Keyword::AS)?;
5211
5212 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5213 let statements = self.parse_statement_list(&[Keyword::END])?;
5214 let end_token = self.expect_keyword(Keyword::END)?;
5215
5216 let function_body = Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5217 begin_token: AttachedToken(begin_token),
5218 statements,
5219 end_token: AttachedToken(end_token),
5220 }));
5221
5222 Ok(Statement::CreateFunction(CreateFunction {
5223 or_alter,
5224 or_replace,
5225 temporary,
5226 if_not_exists: false,
5227 name,
5228 args: Some(args),
5229 return_type,
5230 function_body,
5231 language: None,
5232 determinism_specifier: None,
5233 options: None,
5234 remote_connection: None,
5235 using: None,
5236 behavior: None,
5237 called_on_null: None,
5238 parallel: None,
5239 }))
5240 }
5241
5242 fn parse_create_function_name_and_params(
5243 &mut self,
5244 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5245 let name = self.parse_object_name(false)?;
5246 let parse_function_param =
5247 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5248 let name = parser.parse_identifier()?;
5249 let data_type = parser.parse_data_type()?;
5250 Ok(OperateFunctionArg {
5251 mode: None,
5252 name: Some(name),
5253 data_type,
5254 default_expr: None,
5255 })
5256 };
5257 self.expect_token(&Token::LParen)?;
5258 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5259 self.expect_token(&Token::RParen)?;
5260 Ok((name, args))
5261 }
5262
5263 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5264 let mode = if self.parse_keyword(Keyword::IN) {
5265 Some(ArgMode::In)
5266 } else if self.parse_keyword(Keyword::OUT) {
5267 Some(ArgMode::Out)
5268 } else if self.parse_keyword(Keyword::INOUT) {
5269 Some(ArgMode::InOut)
5270 } else {
5271 None
5272 };
5273
5274 let mut name = None;
5276 let mut data_type = self.parse_data_type()?;
5277 if let DataType::Custom(n, _) = &data_type {
5278 match n.0[0].clone() {
5280 ObjectNamePart::Identifier(ident) => name = Some(ident),
5281 }
5282 data_type = self.parse_data_type()?;
5283 }
5284
5285 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5286 {
5287 Some(self.parse_expr()?)
5288 } else {
5289 None
5290 };
5291 Ok(OperateFunctionArg {
5292 mode,
5293 name,
5294 data_type,
5295 default_expr,
5296 })
5297 }
5298
5299 pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5305 if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5306 self.prev_token();
5307 return self.expected("an object type after DROP", self.peek_token());
5308 }
5309 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5310 let trigger_name = self.parse_object_name(false)?;
5311 let table_name = if self.parse_keyword(Keyword::ON) {
5312 Some(self.parse_object_name(false)?)
5313 } else {
5314 None
5315 };
5316 let option = self
5317 .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5318 .map(|keyword| match keyword {
5319 Keyword::CASCADE => ReferentialAction::Cascade,
5320 Keyword::RESTRICT => ReferentialAction::Restrict,
5321 _ => unreachable!(),
5322 });
5323 Ok(Statement::DropTrigger {
5324 if_exists,
5325 trigger_name,
5326 table_name,
5327 option,
5328 })
5329 }
5330
5331 pub fn parse_create_trigger(
5332 &mut self,
5333 or_alter: bool,
5334 or_replace: bool,
5335 is_constraint: bool,
5336 ) -> Result<Statement, ParserError> {
5337 if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5338 self.prev_token();
5339 return self.expected("an object type after CREATE", self.peek_token());
5340 }
5341
5342 let name = self.parse_object_name(false)?;
5343 let period = self.parse_trigger_period()?;
5344
5345 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5346 self.expect_keyword_is(Keyword::ON)?;
5347 let table_name = self.parse_object_name(false)?;
5348
5349 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5350 self.parse_object_name(true).ok()
5351 } else {
5352 None
5353 };
5354
5355 let characteristics = self.parse_constraint_characteristics()?;
5356
5357 let mut referencing = vec![];
5358 if self.parse_keyword(Keyword::REFERENCING) {
5359 while let Some(refer) = self.parse_trigger_referencing()? {
5360 referencing.push(refer);
5361 }
5362 }
5363
5364 self.expect_keyword_is(Keyword::FOR)?;
5365 let include_each = self.parse_keyword(Keyword::EACH);
5366 let trigger_object =
5367 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5368 Keyword::ROW => TriggerObject::Row,
5369 Keyword::STATEMENT => TriggerObject::Statement,
5370 _ => unreachable!(),
5371 };
5372
5373 let condition = self
5374 .parse_keyword(Keyword::WHEN)
5375 .then(|| self.parse_expr())
5376 .transpose()?;
5377
5378 self.expect_keyword_is(Keyword::EXECUTE)?;
5379
5380 let exec_body = self.parse_trigger_exec_body()?;
5381
5382 Ok(Statement::CreateTrigger {
5383 or_alter,
5384 or_replace,
5385 is_constraint,
5386 name,
5387 period,
5388 events,
5389 table_name,
5390 referenced_table_name,
5391 referencing,
5392 trigger_object,
5393 include_each,
5394 condition,
5395 exec_body: Some(exec_body),
5396 statements: None,
5397 characteristics,
5398 })
5399 }
5400
5401 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5402 Ok(
5403 match self.expect_one_of_keywords(&[
5404 Keyword::FOR,
5405 Keyword::BEFORE,
5406 Keyword::AFTER,
5407 Keyword::INSTEAD,
5408 ])? {
5409 Keyword::FOR => TriggerPeriod::For,
5410 Keyword::BEFORE => TriggerPeriod::Before,
5411 Keyword::AFTER => TriggerPeriod::After,
5412 Keyword::INSTEAD => self
5413 .expect_keyword_is(Keyword::OF)
5414 .map(|_| TriggerPeriod::InsteadOf)?,
5415 _ => unreachable!(),
5416 },
5417 )
5418 }
5419
5420 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5421 Ok(
5422 match self.expect_one_of_keywords(&[
5423 Keyword::INSERT,
5424 Keyword::UPDATE,
5425 Keyword::DELETE,
5426 Keyword::TRUNCATE,
5427 ])? {
5428 Keyword::INSERT => TriggerEvent::Insert,
5429 Keyword::UPDATE => {
5430 if self.parse_keyword(Keyword::OF) {
5431 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5432 TriggerEvent::Update(cols)
5433 } else {
5434 TriggerEvent::Update(vec![])
5435 }
5436 }
5437 Keyword::DELETE => TriggerEvent::Delete,
5438 Keyword::TRUNCATE => TriggerEvent::Truncate,
5439 _ => unreachable!(),
5440 },
5441 )
5442 }
5443
5444 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5445 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5446 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5447 TriggerReferencingType::OldTable
5448 }
5449 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5450 TriggerReferencingType::NewTable
5451 }
5452 _ => {
5453 return Ok(None);
5454 }
5455 };
5456
5457 let is_as = self.parse_keyword(Keyword::AS);
5458 let transition_relation_name = self.parse_object_name(false)?;
5459 Ok(Some(TriggerReferencing {
5460 refer_type,
5461 is_as,
5462 transition_relation_name,
5463 }))
5464 }
5465
5466 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5467 Ok(TriggerExecBody {
5468 exec_type: match self
5469 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5470 {
5471 Keyword::FUNCTION => TriggerExecBodyType::Function,
5472 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5473 _ => unreachable!(),
5474 },
5475 func_desc: self.parse_function_desc()?,
5476 })
5477 }
5478
5479 pub fn parse_create_macro(
5480 &mut self,
5481 or_replace: bool,
5482 temporary: bool,
5483 ) -> Result<Statement, ParserError> {
5484 if dialect_of!(self is DuckDbDialect | GenericDialect) {
5485 let name = self.parse_object_name(false)?;
5486 self.expect_token(&Token::LParen)?;
5487 let args = if self.consume_token(&Token::RParen) {
5488 self.prev_token();
5489 None
5490 } else {
5491 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5492 };
5493
5494 self.expect_token(&Token::RParen)?;
5495 self.expect_keyword_is(Keyword::AS)?;
5496
5497 Ok(Statement::CreateMacro {
5498 or_replace,
5499 temporary,
5500 name,
5501 args,
5502 definition: if self.parse_keyword(Keyword::TABLE) {
5503 MacroDefinition::Table(self.parse_query()?)
5504 } else {
5505 MacroDefinition::Expr(self.parse_expr()?)
5506 },
5507 })
5508 } else {
5509 self.prev_token();
5510 self.expected("an object type after CREATE", self.peek_token())
5511 }
5512 }
5513
5514 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5515 let name = self.parse_identifier()?;
5516
5517 let default_expr =
5518 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5519 Some(self.parse_expr()?)
5520 } else {
5521 None
5522 };
5523 Ok(MacroArg { name, default_expr })
5524 }
5525
5526 pub fn parse_create_external_table(
5527 &mut self,
5528 or_replace: bool,
5529 ) -> Result<Statement, ParserError> {
5530 self.expect_keyword_is(Keyword::TABLE)?;
5531 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5532 let table_name = self.parse_object_name(false)?;
5533 let (columns, constraints) = self.parse_columns()?;
5534
5535 let hive_distribution = self.parse_hive_distribution()?;
5536 let hive_formats = self.parse_hive_formats()?;
5537
5538 let file_format = if let Some(ff) = &hive_formats.storage {
5539 match ff {
5540 HiveIOFormat::FileFormat { format } => Some(*format),
5541 _ => None,
5542 }
5543 } else {
5544 None
5545 };
5546 let location = hive_formats.location.clone();
5547 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5548 let table_options = if !table_properties.is_empty() {
5549 CreateTableOptions::TableProperties(table_properties)
5550 } else {
5551 CreateTableOptions::None
5552 };
5553 Ok(CreateTableBuilder::new(table_name)
5554 .columns(columns)
5555 .constraints(constraints)
5556 .hive_distribution(hive_distribution)
5557 .hive_formats(Some(hive_formats))
5558 .table_options(table_options)
5559 .or_replace(or_replace)
5560 .if_not_exists(if_not_exists)
5561 .external(true)
5562 .file_format(file_format)
5563 .location(location)
5564 .build())
5565 }
5566
5567 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5568 let next_token = self.next_token();
5569 match &next_token.token {
5570 Token::Word(w) => match w.keyword {
5571 Keyword::AVRO => Ok(FileFormat::AVRO),
5572 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5573 Keyword::ORC => Ok(FileFormat::ORC),
5574 Keyword::PARQUET => Ok(FileFormat::PARQUET),
5575 Keyword::RCFILE => Ok(FileFormat::RCFILE),
5576 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5577 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5578 _ => self.expected("fileformat", next_token),
5579 },
5580 _ => self.expected("fileformat", next_token),
5581 }
5582 }
5583
5584 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5585 let next_token = self.next_token();
5586 match &next_token.token {
5587 Token::Word(w) => match w.keyword {
5588 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5589 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5590 Keyword::JSON => Ok(AnalyzeFormat::JSON),
5591 _ => self.expected("fileformat", next_token),
5592 },
5593 _ => self.expected("fileformat", next_token),
5594 }
5595 }
5596
5597 pub fn parse_create_view(
5598 &mut self,
5599 or_alter: bool,
5600 or_replace: bool,
5601 temporary: bool,
5602 create_view_params: Option<CreateViewParams>,
5603 ) -> Result<Statement, ParserError> {
5604 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5605 self.expect_keyword_is(Keyword::VIEW)?;
5606 let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect)
5607 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5608 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5611 let name = self.parse_object_name(allow_unquoted_hyphen)?;
5612 let columns = self.parse_view_columns()?;
5613 let mut options = CreateTableOptions::None;
5614 let with_options = self.parse_options(Keyword::WITH)?;
5615 if !with_options.is_empty() {
5616 options = CreateTableOptions::With(with_options);
5617 }
5618
5619 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5620 self.expect_keyword_is(Keyword::BY)?;
5621 self.parse_parenthesized_column_list(Optional, false)?
5622 } else {
5623 vec![]
5624 };
5625
5626 if dialect_of!(self is BigQueryDialect | GenericDialect) {
5627 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5628 if !opts.is_empty() {
5629 options = CreateTableOptions::Options(opts);
5630 }
5631 };
5632 }
5633
5634 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5635 && self.parse_keyword(Keyword::TO)
5636 {
5637 Some(self.parse_object_name(false)?)
5638 } else {
5639 None
5640 };
5641
5642 let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5643 && self.parse_keyword(Keyword::COMMENT)
5644 {
5645 self.expect_token(&Token::Eq)?;
5646 Some(self.parse_comment_value()?)
5647 } else {
5648 None
5649 };
5650
5651 self.expect_keyword_is(Keyword::AS)?;
5652 let query = self.parse_query()?;
5653 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5656 && self.parse_keywords(&[
5657 Keyword::WITH,
5658 Keyword::NO,
5659 Keyword::SCHEMA,
5660 Keyword::BINDING,
5661 ]);
5662
5663 Ok(Statement::CreateView {
5664 or_alter,
5665 name,
5666 columns,
5667 query,
5668 materialized,
5669 or_replace,
5670 options,
5671 cluster_by,
5672 comment,
5673 with_no_schema_binding,
5674 if_not_exists,
5675 temporary,
5676 to,
5677 params: create_view_params,
5678 })
5679 }
5680
5681 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
5685 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
5686 self.expect_token(&Token::Eq)?;
5687 Some(
5688 match self.expect_one_of_keywords(&[
5689 Keyword::UNDEFINED,
5690 Keyword::MERGE,
5691 Keyword::TEMPTABLE,
5692 ])? {
5693 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
5694 Keyword::MERGE => CreateViewAlgorithm::Merge,
5695 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
5696 _ => {
5697 self.prev_token();
5698 let found = self.next_token();
5699 return self
5700 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
5701 }
5702 },
5703 )
5704 } else {
5705 None
5706 };
5707 let definer = if self.parse_keyword(Keyword::DEFINER) {
5708 self.expect_token(&Token::Eq)?;
5709 Some(self.parse_grantee_name()?)
5710 } else {
5711 None
5712 };
5713 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
5714 Some(
5715 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
5716 Keyword::DEFINER => CreateViewSecurity::Definer,
5717 Keyword::INVOKER => CreateViewSecurity::Invoker,
5718 _ => {
5719 self.prev_token();
5720 let found = self.next_token();
5721 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
5722 }
5723 },
5724 )
5725 } else {
5726 None
5727 };
5728 if algorithm.is_some() || definer.is_some() || security.is_some() {
5729 Ok(Some(CreateViewParams {
5730 algorithm,
5731 definer,
5732 security,
5733 }))
5734 } else {
5735 Ok(None)
5736 }
5737 }
5738
5739 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
5740 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5741 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
5742
5743 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
5746 vec![Keyword::AUTHORIZATION]
5747 } else if dialect_of!(self is PostgreSqlDialect) {
5748 vec![
5749 Keyword::LOGIN,
5750 Keyword::NOLOGIN,
5751 Keyword::INHERIT,
5752 Keyword::NOINHERIT,
5753 Keyword::BYPASSRLS,
5754 Keyword::NOBYPASSRLS,
5755 Keyword::PASSWORD,
5756 Keyword::CREATEDB,
5757 Keyword::NOCREATEDB,
5758 Keyword::CREATEROLE,
5759 Keyword::NOCREATEROLE,
5760 Keyword::SUPERUSER,
5761 Keyword::NOSUPERUSER,
5762 Keyword::REPLICATION,
5763 Keyword::NOREPLICATION,
5764 Keyword::CONNECTION,
5765 Keyword::VALID,
5766 Keyword::IN,
5767 Keyword::ROLE,
5768 Keyword::ADMIN,
5769 Keyword::USER,
5770 ]
5771 } else {
5772 vec![]
5773 };
5774
5775 let mut authorization_owner = None;
5777 let mut login = None;
5779 let mut inherit = None;
5780 let mut bypassrls = None;
5781 let mut password = None;
5782 let mut create_db = None;
5783 let mut create_role = None;
5784 let mut superuser = None;
5785 let mut replication = None;
5786 let mut connection_limit = None;
5787 let mut valid_until = None;
5788 let mut in_role = vec![];
5789 let mut in_group = vec![];
5790 let mut role = vec![];
5791 let mut user = vec![];
5792 let mut admin = vec![];
5793
5794 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
5795 let loc = self
5796 .tokens
5797 .get(self.index - 1)
5798 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
5799 match keyword {
5800 Keyword::AUTHORIZATION => {
5801 if authorization_owner.is_some() {
5802 parser_err!("Found multiple AUTHORIZATION", loc)
5803 } else {
5804 authorization_owner = Some(self.parse_object_name(false)?);
5805 Ok(())
5806 }
5807 }
5808 Keyword::LOGIN | Keyword::NOLOGIN => {
5809 if login.is_some() {
5810 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
5811 } else {
5812 login = Some(keyword == Keyword::LOGIN);
5813 Ok(())
5814 }
5815 }
5816 Keyword::INHERIT | Keyword::NOINHERIT => {
5817 if inherit.is_some() {
5818 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
5819 } else {
5820 inherit = Some(keyword == Keyword::INHERIT);
5821 Ok(())
5822 }
5823 }
5824 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
5825 if bypassrls.is_some() {
5826 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
5827 } else {
5828 bypassrls = Some(keyword == Keyword::BYPASSRLS);
5829 Ok(())
5830 }
5831 }
5832 Keyword::CREATEDB | Keyword::NOCREATEDB => {
5833 if create_db.is_some() {
5834 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
5835 } else {
5836 create_db = Some(keyword == Keyword::CREATEDB);
5837 Ok(())
5838 }
5839 }
5840 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
5841 if create_role.is_some() {
5842 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
5843 } else {
5844 create_role = Some(keyword == Keyword::CREATEROLE);
5845 Ok(())
5846 }
5847 }
5848 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
5849 if superuser.is_some() {
5850 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
5851 } else {
5852 superuser = Some(keyword == Keyword::SUPERUSER);
5853 Ok(())
5854 }
5855 }
5856 Keyword::REPLICATION | Keyword::NOREPLICATION => {
5857 if replication.is_some() {
5858 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
5859 } else {
5860 replication = Some(keyword == Keyword::REPLICATION);
5861 Ok(())
5862 }
5863 }
5864 Keyword::PASSWORD => {
5865 if password.is_some() {
5866 parser_err!("Found multiple PASSWORD", loc)
5867 } else {
5868 password = if self.parse_keyword(Keyword::NULL) {
5869 Some(Password::NullPassword)
5870 } else {
5871 Some(Password::Password(Expr::Value(self.parse_value()?)))
5872 };
5873 Ok(())
5874 }
5875 }
5876 Keyword::CONNECTION => {
5877 self.expect_keyword_is(Keyword::LIMIT)?;
5878 if connection_limit.is_some() {
5879 parser_err!("Found multiple CONNECTION LIMIT", loc)
5880 } else {
5881 connection_limit = Some(Expr::Value(self.parse_number_value()?));
5882 Ok(())
5883 }
5884 }
5885 Keyword::VALID => {
5886 self.expect_keyword_is(Keyword::UNTIL)?;
5887 if valid_until.is_some() {
5888 parser_err!("Found multiple VALID UNTIL", loc)
5889 } else {
5890 valid_until = Some(Expr::Value(self.parse_value()?));
5891 Ok(())
5892 }
5893 }
5894 Keyword::IN => {
5895 if self.parse_keyword(Keyword::ROLE) {
5896 if !in_role.is_empty() {
5897 parser_err!("Found multiple IN ROLE", loc)
5898 } else {
5899 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
5900 Ok(())
5901 }
5902 } else if self.parse_keyword(Keyword::GROUP) {
5903 if !in_group.is_empty() {
5904 parser_err!("Found multiple IN GROUP", loc)
5905 } else {
5906 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
5907 Ok(())
5908 }
5909 } else {
5910 self.expected("ROLE or GROUP after IN", self.peek_token())
5911 }
5912 }
5913 Keyword::ROLE => {
5914 if !role.is_empty() {
5915 parser_err!("Found multiple ROLE", loc)
5916 } else {
5917 role = self.parse_comma_separated(|p| p.parse_identifier())?;
5918 Ok(())
5919 }
5920 }
5921 Keyword::USER => {
5922 if !user.is_empty() {
5923 parser_err!("Found multiple USER", loc)
5924 } else {
5925 user = self.parse_comma_separated(|p| p.parse_identifier())?;
5926 Ok(())
5927 }
5928 }
5929 Keyword::ADMIN => {
5930 if !admin.is_empty() {
5931 parser_err!("Found multiple ADMIN", loc)
5932 } else {
5933 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
5934 Ok(())
5935 }
5936 }
5937 _ => break,
5938 }?
5939 }
5940
5941 Ok(Statement::CreateRole {
5942 names,
5943 if_not_exists,
5944 login,
5945 inherit,
5946 bypassrls,
5947 password,
5948 create_db,
5949 create_role,
5950 replication,
5951 superuser,
5952 connection_limit,
5953 valid_until,
5954 in_role,
5955 in_group,
5956 role,
5957 user,
5958 admin,
5959 authorization_owner,
5960 })
5961 }
5962
5963 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
5964 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
5965 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
5966 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
5967 Some(Keyword::SESSION_USER) => Owner::SessionUser,
5968 Some(_) => unreachable!(),
5969 None => {
5970 match self.parse_identifier() {
5971 Ok(ident) => Owner::Ident(ident),
5972 Err(e) => {
5973 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
5974 }
5975 }
5976 }
5977 };
5978 Ok(owner)
5979 }
5980
5981 fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
5983 let name = self.parse_object_name(false)?;
5984 self.expect_keyword_is(Keyword::AS)?;
5985 let data_type = self.parse_data_type()?;
5986 let collation = if self.parse_keyword(Keyword::COLLATE) {
5987 Some(self.parse_identifier()?)
5988 } else {
5989 None
5990 };
5991 let default = if self.parse_keyword(Keyword::DEFAULT) {
5992 Some(self.parse_expr()?)
5993 } else {
5994 None
5995 };
5996 let mut constraints = Vec::new();
5997 while let Some(constraint) = self.parse_optional_table_constraint()? {
5998 constraints.push(constraint);
5999 }
6000
6001 Ok(Statement::CreateDomain(CreateDomain {
6002 name,
6003 data_type,
6004 collation,
6005 default,
6006 constraints,
6007 }))
6008 }
6009
6010 pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6020 let name = self.parse_identifier()?;
6021 self.expect_keyword_is(Keyword::ON)?;
6022 let table_name = self.parse_object_name(false)?;
6023
6024 let policy_type = if self.parse_keyword(Keyword::AS) {
6025 let keyword =
6026 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6027 Some(match keyword {
6028 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6029 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6030 _ => unreachable!(),
6031 })
6032 } else {
6033 None
6034 };
6035
6036 let command = if self.parse_keyword(Keyword::FOR) {
6037 let keyword = self.expect_one_of_keywords(&[
6038 Keyword::ALL,
6039 Keyword::SELECT,
6040 Keyword::INSERT,
6041 Keyword::UPDATE,
6042 Keyword::DELETE,
6043 ])?;
6044 Some(match keyword {
6045 Keyword::ALL => CreatePolicyCommand::All,
6046 Keyword::SELECT => CreatePolicyCommand::Select,
6047 Keyword::INSERT => CreatePolicyCommand::Insert,
6048 Keyword::UPDATE => CreatePolicyCommand::Update,
6049 Keyword::DELETE => CreatePolicyCommand::Delete,
6050 _ => unreachable!(),
6051 })
6052 } else {
6053 None
6054 };
6055
6056 let to = if self.parse_keyword(Keyword::TO) {
6057 Some(self.parse_comma_separated(|p| p.parse_owner())?)
6058 } else {
6059 None
6060 };
6061
6062 let using = if self.parse_keyword(Keyword::USING) {
6063 self.expect_token(&Token::LParen)?;
6064 let expr = self.parse_expr()?;
6065 self.expect_token(&Token::RParen)?;
6066 Some(expr)
6067 } else {
6068 None
6069 };
6070
6071 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6072 self.expect_token(&Token::LParen)?;
6073 let expr = self.parse_expr()?;
6074 self.expect_token(&Token::RParen)?;
6075 Some(expr)
6076 } else {
6077 None
6078 };
6079
6080 Ok(CreatePolicy {
6081 name,
6082 table_name,
6083 policy_type,
6084 command,
6085 to,
6086 using,
6087 with_check,
6088 })
6089 }
6090
6091 pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6101 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6102 let name = self.parse_identifier()?;
6103
6104 let connector_type = if self.parse_keyword(Keyword::TYPE) {
6105 Some(self.parse_literal_string()?)
6106 } else {
6107 None
6108 };
6109
6110 let url = if self.parse_keyword(Keyword::URL) {
6111 Some(self.parse_literal_string()?)
6112 } else {
6113 None
6114 };
6115
6116 let comment = self.parse_optional_inline_comment()?;
6117
6118 let with_dcproperties =
6119 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6120 properties if !properties.is_empty() => Some(properties),
6121 _ => None,
6122 };
6123
6124 Ok(Statement::CreateConnector(CreateConnector {
6125 name,
6126 if_not_exists,
6127 connector_type,
6128 url,
6129 comment,
6130 with_dcproperties,
6131 }))
6132 }
6133
6134 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6135 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6137 && self.parse_keyword(Keyword::TEMPORARY);
6138 let persistent = dialect_of!(self is DuckDbDialect)
6139 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6140
6141 let object_type = if self.parse_keyword(Keyword::TABLE) {
6142 ObjectType::Table
6143 } else if self.parse_keyword(Keyword::VIEW) {
6144 ObjectType::View
6145 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6146 ObjectType::MaterializedView
6147 } else if self.parse_keyword(Keyword::INDEX) {
6148 ObjectType::Index
6149 } else if self.parse_keyword(Keyword::ROLE) {
6150 ObjectType::Role
6151 } else if self.parse_keyword(Keyword::SCHEMA) {
6152 ObjectType::Schema
6153 } else if self.parse_keyword(Keyword::DATABASE) {
6154 ObjectType::Database
6155 } else if self.parse_keyword(Keyword::SEQUENCE) {
6156 ObjectType::Sequence
6157 } else if self.parse_keyword(Keyword::STAGE) {
6158 ObjectType::Stage
6159 } else if self.parse_keyword(Keyword::TYPE) {
6160 ObjectType::Type
6161 } else if self.parse_keyword(Keyword::FUNCTION) {
6162 return self.parse_drop_function();
6163 } else if self.parse_keyword(Keyword::POLICY) {
6164 return self.parse_drop_policy();
6165 } else if self.parse_keyword(Keyword::CONNECTOR) {
6166 return self.parse_drop_connector();
6167 } else if self.parse_keyword(Keyword::DOMAIN) {
6168 return self.parse_drop_domain();
6169 } else if self.parse_keyword(Keyword::PROCEDURE) {
6170 return self.parse_drop_procedure();
6171 } else if self.parse_keyword(Keyword::SECRET) {
6172 return self.parse_drop_secret(temporary, persistent);
6173 } else if self.parse_keyword(Keyword::TRIGGER) {
6174 return self.parse_drop_trigger();
6175 } else if self.parse_keyword(Keyword::EXTENSION) {
6176 return self.parse_drop_extension();
6177 } else {
6178 return self.expected(
6179 "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, or MATERIALIZED VIEW after DROP",
6180 self.peek_token(),
6181 );
6182 };
6183 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6186 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6187
6188 let loc = self.peek_token().span.start;
6189 let cascade = self.parse_keyword(Keyword::CASCADE);
6190 let restrict = self.parse_keyword(Keyword::RESTRICT);
6191 let purge = self.parse_keyword(Keyword::PURGE);
6192 if cascade && restrict {
6193 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6194 }
6195 if object_type == ObjectType::Role && (cascade || restrict || purge) {
6196 return parser_err!(
6197 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6198 loc
6199 );
6200 }
6201 Ok(Statement::Drop {
6202 object_type,
6203 if_exists,
6204 names,
6205 cascade,
6206 restrict,
6207 purge,
6208 temporary,
6209 })
6210 }
6211
6212 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6213 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6214 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6215 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6216 _ => None,
6217 }
6218 }
6219
6220 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6225 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6226 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6227 let drop_behavior = self.parse_optional_drop_behavior();
6228 Ok(Statement::DropFunction {
6229 if_exists,
6230 func_desc,
6231 drop_behavior,
6232 })
6233 }
6234
6235 fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6241 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6242 let name = self.parse_identifier()?;
6243 self.expect_keyword_is(Keyword::ON)?;
6244 let table_name = self.parse_object_name(false)?;
6245 let drop_behavior = self.parse_optional_drop_behavior();
6246 Ok(Statement::DropPolicy {
6247 if_exists,
6248 name,
6249 table_name,
6250 drop_behavior,
6251 })
6252 }
6253 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6259 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6260 let name = self.parse_identifier()?;
6261 Ok(Statement::DropConnector { if_exists, name })
6262 }
6263
6264 fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6268 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6269 let name = self.parse_object_name(false)?;
6270 let drop_behavior = self.parse_optional_drop_behavior();
6271 Ok(Statement::DropDomain(DropDomain {
6272 if_exists,
6273 name,
6274 drop_behavior,
6275 }))
6276 }
6277
6278 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6283 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6284 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6285 let drop_behavior = self.parse_optional_drop_behavior();
6286 Ok(Statement::DropProcedure {
6287 if_exists,
6288 proc_desc,
6289 drop_behavior,
6290 })
6291 }
6292
6293 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6294 let name = self.parse_object_name(false)?;
6295
6296 let args = if self.consume_token(&Token::LParen) {
6297 if self.consume_token(&Token::RParen) {
6298 None
6299 } else {
6300 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6301 self.expect_token(&Token::RParen)?;
6302 Some(args)
6303 }
6304 } else {
6305 None
6306 };
6307
6308 Ok(FunctionDesc { name, args })
6309 }
6310
6311 fn parse_drop_secret(
6313 &mut self,
6314 temporary: bool,
6315 persistent: bool,
6316 ) -> Result<Statement, ParserError> {
6317 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6318 let name = self.parse_identifier()?;
6319 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
6320 self.parse_identifier().ok()
6321 } else {
6322 None
6323 };
6324 let temp = match (temporary, persistent) {
6325 (true, false) => Some(true),
6326 (false, true) => Some(false),
6327 (false, false) => None,
6328 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
6329 };
6330
6331 Ok(Statement::DropSecret {
6332 if_exists,
6333 temporary: temp,
6334 name,
6335 storage_specifier,
6336 })
6337 }
6338
6339 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
6349 if dialect_of!(self is BigQueryDialect) {
6350 return self.parse_big_query_declare();
6351 }
6352 if dialect_of!(self is SnowflakeDialect) {
6353 return self.parse_snowflake_declare();
6354 }
6355 if dialect_of!(self is MsSqlDialect) {
6356 return self.parse_mssql_declare();
6357 }
6358
6359 let name = self.parse_identifier()?;
6360
6361 let binary = Some(self.parse_keyword(Keyword::BINARY));
6362 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
6363 Some(true)
6364 } else if self.parse_keyword(Keyword::ASENSITIVE) {
6365 Some(false)
6366 } else {
6367 None
6368 };
6369 let scroll = if self.parse_keyword(Keyword::SCROLL) {
6370 Some(true)
6371 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
6372 Some(false)
6373 } else {
6374 None
6375 };
6376
6377 self.expect_keyword_is(Keyword::CURSOR)?;
6378 let declare_type = Some(DeclareType::Cursor);
6379
6380 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
6381 Some(keyword) => {
6382 self.expect_keyword_is(Keyword::HOLD)?;
6383
6384 match keyword {
6385 Keyword::WITH => Some(true),
6386 Keyword::WITHOUT => Some(false),
6387 _ => unreachable!(),
6388 }
6389 }
6390 None => None,
6391 };
6392
6393 self.expect_keyword_is(Keyword::FOR)?;
6394
6395 let query = Some(self.parse_query()?);
6396
6397 Ok(Statement::Declare {
6398 stmts: vec![Declare {
6399 names: vec![name],
6400 data_type: None,
6401 assignment: None,
6402 declare_type,
6403 binary,
6404 sensitive,
6405 scroll,
6406 hold,
6407 for_query: query,
6408 }],
6409 })
6410 }
6411
6412 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
6420 let names = self.parse_comma_separated(Parser::parse_identifier)?;
6421
6422 let data_type = match self.peek_token().token {
6423 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
6424 _ => Some(self.parse_data_type()?),
6425 };
6426
6427 let expr = if data_type.is_some() {
6428 if self.parse_keyword(Keyword::DEFAULT) {
6429 Some(self.parse_expr()?)
6430 } else {
6431 None
6432 }
6433 } else {
6434 self.expect_keyword_is(Keyword::DEFAULT)?;
6437 Some(self.parse_expr()?)
6438 };
6439
6440 Ok(Statement::Declare {
6441 stmts: vec![Declare {
6442 names,
6443 data_type,
6444 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
6445 declare_type: None,
6446 binary: None,
6447 sensitive: None,
6448 scroll: None,
6449 hold: None,
6450 for_query: None,
6451 }],
6452 })
6453 }
6454
6455 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
6480 let mut stmts = vec![];
6481 loop {
6482 let name = self.parse_identifier()?;
6483 let (declare_type, for_query, assigned_expr, data_type) =
6484 if self.parse_keyword(Keyword::CURSOR) {
6485 self.expect_keyword_is(Keyword::FOR)?;
6486 match self.peek_token().token {
6487 Token::Word(w) if w.keyword == Keyword::SELECT => (
6488 Some(DeclareType::Cursor),
6489 Some(self.parse_query()?),
6490 None,
6491 None,
6492 ),
6493 _ => (
6494 Some(DeclareType::Cursor),
6495 None,
6496 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
6497 None,
6498 ),
6499 }
6500 } else if self.parse_keyword(Keyword::RESULTSET) {
6501 let assigned_expr = if self.peek_token().token != Token::SemiColon {
6502 self.parse_snowflake_variable_declaration_expression()?
6503 } else {
6504 None
6506 };
6507
6508 (Some(DeclareType::ResultSet), None, assigned_expr, None)
6509 } else if self.parse_keyword(Keyword::EXCEPTION) {
6510 let assigned_expr = if self.peek_token().token == Token::LParen {
6511 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
6512 } else {
6513 None
6515 };
6516
6517 (Some(DeclareType::Exception), None, assigned_expr, None)
6518 } else {
6519 let (assigned_expr, data_type) = if let Some(assigned_expr) =
6521 self.parse_snowflake_variable_declaration_expression()?
6522 {
6523 (Some(assigned_expr), None)
6524 } else if let Token::Word(_) = self.peek_token().token {
6525 let data_type = self.parse_data_type()?;
6526 (
6527 self.parse_snowflake_variable_declaration_expression()?,
6528 Some(data_type),
6529 )
6530 } else {
6531 (None, None)
6532 };
6533 (None, None, assigned_expr, data_type)
6534 };
6535 let stmt = Declare {
6536 names: vec![name],
6537 data_type,
6538 assignment: assigned_expr,
6539 declare_type,
6540 binary: None,
6541 sensitive: None,
6542 scroll: None,
6543 hold: None,
6544 for_query,
6545 };
6546
6547 stmts.push(stmt);
6548 if self.consume_token(&Token::SemiColon) {
6549 match self.peek_token().token {
6550 Token::Word(w)
6551 if ALL_KEYWORDS
6552 .binary_search(&w.value.to_uppercase().as_str())
6553 .is_err() =>
6554 {
6555 continue;
6557 }
6558 _ => {
6559 self.prev_token();
6561 }
6562 }
6563 }
6564
6565 break;
6566 }
6567
6568 Ok(Statement::Declare { stmts })
6569 }
6570
6571 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
6583 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
6584
6585 Ok(Statement::Declare { stmts })
6586 }
6587
6588 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
6599 let name = {
6600 let ident = self.parse_identifier()?;
6601 if !ident.value.starts_with('@')
6602 && !matches!(
6603 self.peek_token().token,
6604 Token::Word(w) if w.keyword == Keyword::CURSOR
6605 )
6606 {
6607 Err(ParserError::TokenizerError(
6608 "Invalid MsSql variable declaration.".to_string(),
6609 ))
6610 } else {
6611 Ok(ident)
6612 }
6613 }?;
6614
6615 let (declare_type, data_type) = match self.peek_token().token {
6616 Token::Word(w) => match w.keyword {
6617 Keyword::CURSOR => {
6618 self.next_token();
6619 (Some(DeclareType::Cursor), None)
6620 }
6621 Keyword::AS => {
6622 self.next_token();
6623 (None, Some(self.parse_data_type()?))
6624 }
6625 _ => (None, Some(self.parse_data_type()?)),
6626 },
6627 _ => (None, Some(self.parse_data_type()?)),
6628 };
6629
6630 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
6631 self.next_token();
6632 let query = Some(self.parse_query()?);
6633 (query, None)
6634 } else {
6635 let assignment = self.parse_mssql_variable_declaration_expression()?;
6636 (None, assignment)
6637 };
6638
6639 Ok(Declare {
6640 names: vec![name],
6641 data_type,
6642 assignment,
6643 declare_type,
6644 binary: None,
6645 sensitive: None,
6646 scroll: None,
6647 hold: None,
6648 for_query,
6649 })
6650 }
6651
6652 pub fn parse_snowflake_variable_declaration_expression(
6660 &mut self,
6661 ) -> Result<Option<DeclareAssignment>, ParserError> {
6662 Ok(match self.peek_token().token {
6663 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
6664 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
6666 }
6667 Token::Assignment => {
6668 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
6670 self.parse_expr()?,
6671 )))
6672 }
6673 _ => None,
6674 })
6675 }
6676
6677 pub fn parse_mssql_variable_declaration_expression(
6684 &mut self,
6685 ) -> Result<Option<DeclareAssignment>, ParserError> {
6686 Ok(match self.peek_token().token {
6687 Token::Eq => {
6688 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
6690 self.parse_expr()?,
6691 )))
6692 }
6693 _ => None,
6694 })
6695 }
6696
6697 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
6699 let direction = if self.parse_keyword(Keyword::NEXT) {
6700 FetchDirection::Next
6701 } else if self.parse_keyword(Keyword::PRIOR) {
6702 FetchDirection::Prior
6703 } else if self.parse_keyword(Keyword::FIRST) {
6704 FetchDirection::First
6705 } else if self.parse_keyword(Keyword::LAST) {
6706 FetchDirection::Last
6707 } else if self.parse_keyword(Keyword::ABSOLUTE) {
6708 FetchDirection::Absolute {
6709 limit: self.parse_number_value()?.value,
6710 }
6711 } else if self.parse_keyword(Keyword::RELATIVE) {
6712 FetchDirection::Relative {
6713 limit: self.parse_number_value()?.value,
6714 }
6715 } else if self.parse_keyword(Keyword::FORWARD) {
6716 if self.parse_keyword(Keyword::ALL) {
6717 FetchDirection::ForwardAll
6718 } else {
6719 FetchDirection::Forward {
6720 limit: Some(self.parse_number_value()?.value),
6722 }
6723 }
6724 } else if self.parse_keyword(Keyword::BACKWARD) {
6725 if self.parse_keyword(Keyword::ALL) {
6726 FetchDirection::BackwardAll
6727 } else {
6728 FetchDirection::Backward {
6729 limit: Some(self.parse_number_value()?.value),
6731 }
6732 }
6733 } else if self.parse_keyword(Keyword::ALL) {
6734 FetchDirection::All
6735 } else {
6736 FetchDirection::Count {
6737 limit: self.parse_number_value()?.value,
6738 }
6739 };
6740
6741 let position = if self.peek_keyword(Keyword::FROM) {
6742 self.expect_keyword(Keyword::FROM)?;
6743 FetchPosition::From
6744 } else if self.peek_keyword(Keyword::IN) {
6745 self.expect_keyword(Keyword::IN)?;
6746 FetchPosition::In
6747 } else {
6748 return parser_err!("Expected FROM or IN", self.peek_token().span.start);
6749 };
6750
6751 let name = self.parse_identifier()?;
6752
6753 let into = if self.parse_keyword(Keyword::INTO) {
6754 Some(self.parse_object_name(false)?)
6755 } else {
6756 None
6757 };
6758
6759 Ok(Statement::Fetch {
6760 name,
6761 direction,
6762 position,
6763 into,
6764 })
6765 }
6766
6767 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
6768 let object_type = if self.parse_keyword(Keyword::ALL) {
6769 DiscardObject::ALL
6770 } else if self.parse_keyword(Keyword::PLANS) {
6771 DiscardObject::PLANS
6772 } else if self.parse_keyword(Keyword::SEQUENCES) {
6773 DiscardObject::SEQUENCES
6774 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
6775 DiscardObject::TEMP
6776 } else {
6777 return self.expected(
6778 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
6779 self.peek_token(),
6780 );
6781 };
6782 Ok(Statement::Discard { object_type })
6783 }
6784
6785 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
6786 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
6787 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6788 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
6789 let index_name = self.parse_object_name(false)?;
6790 self.expect_keyword_is(Keyword::ON)?;
6791 Some(index_name)
6792 } else {
6793 None
6794 };
6795 let table_name = self.parse_object_name(false)?;
6796 let using = if self.parse_keyword(Keyword::USING) {
6797 Some(self.parse_index_type()?)
6798 } else {
6799 None
6800 };
6801
6802 self.expect_token(&Token::LParen)?;
6803 let columns = self.parse_comma_separated(Parser::parse_create_index_expr)?;
6804 self.expect_token(&Token::RParen)?;
6805
6806 let include = if self.parse_keyword(Keyword::INCLUDE) {
6807 self.expect_token(&Token::LParen)?;
6808 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
6809 self.expect_token(&Token::RParen)?;
6810 columns
6811 } else {
6812 vec![]
6813 };
6814
6815 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
6816 let not = self.parse_keyword(Keyword::NOT);
6817 self.expect_keyword_is(Keyword::DISTINCT)?;
6818 Some(!not)
6819 } else {
6820 None
6821 };
6822
6823 let with = if self.dialect.supports_create_index_with_clause()
6824 && self.parse_keyword(Keyword::WITH)
6825 {
6826 self.expect_token(&Token::LParen)?;
6827 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
6828 self.expect_token(&Token::RParen)?;
6829 with_params
6830 } else {
6831 Vec::new()
6832 };
6833
6834 let predicate = if self.parse_keyword(Keyword::WHERE) {
6835 Some(self.parse_expr()?)
6836 } else {
6837 None
6838 };
6839
6840 Ok(Statement::CreateIndex(CreateIndex {
6841 name: index_name,
6842 table_name,
6843 using,
6844 columns,
6845 unique,
6846 concurrently,
6847 if_not_exists,
6848 include,
6849 nulls_distinct,
6850 with,
6851 predicate,
6852 }))
6853 }
6854
6855 pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
6856 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6857 let name = self.parse_identifier()?;
6858
6859 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
6860 let schema = if self.parse_keyword(Keyword::SCHEMA) {
6861 Some(self.parse_identifier()?)
6862 } else {
6863 None
6864 };
6865
6866 let version = if self.parse_keyword(Keyword::VERSION) {
6867 Some(self.parse_identifier()?)
6868 } else {
6869 None
6870 };
6871
6872 let cascade = self.parse_keyword(Keyword::CASCADE);
6873
6874 (schema, version, cascade)
6875 } else {
6876 (None, None, false)
6877 };
6878
6879 Ok(Statement::CreateExtension {
6880 name,
6881 if_not_exists,
6882 schema,
6883 version,
6884 cascade,
6885 })
6886 }
6887
6888 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
6890 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6891 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
6892 let cascade_or_restrict =
6893 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
6894 Ok(Statement::DropExtension {
6895 names,
6896 if_exists,
6897 cascade_or_restrict: cascade_or_restrict
6898 .map(|k| match k {
6899 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
6900 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
6901 _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
6902 })
6903 .transpose()?,
6904 })
6905 }
6906
6907 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
6909 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
6910 self.expect_token(&Token::LParen)?;
6911 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
6912 self.expect_token(&Token::RParen)?;
6913 Ok(HiveDistributionStyle::PARTITIONED { columns })
6914 } else {
6915 Ok(HiveDistributionStyle::NONE)
6916 }
6917 }
6918
6919 pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
6920 let mut hive_format = HiveFormat::default();
6921 loop {
6922 match self.parse_one_of_keywords(&[
6923 Keyword::ROW,
6924 Keyword::STORED,
6925 Keyword::LOCATION,
6926 Keyword::WITH,
6927 ]) {
6928 Some(Keyword::ROW) => {
6929 hive_format.row_format = Some(self.parse_row_format()?);
6930 }
6931 Some(Keyword::STORED) => {
6932 self.expect_keyword_is(Keyword::AS)?;
6933 if self.parse_keyword(Keyword::INPUTFORMAT) {
6934 let input_format = self.parse_expr()?;
6935 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
6936 let output_format = self.parse_expr()?;
6937 hive_format.storage = Some(HiveIOFormat::IOF {
6938 input_format,
6939 output_format,
6940 });
6941 } else {
6942 let format = self.parse_file_format()?;
6943 hive_format.storage = Some(HiveIOFormat::FileFormat { format });
6944 }
6945 }
6946 Some(Keyword::LOCATION) => {
6947 hive_format.location = Some(self.parse_literal_string()?);
6948 }
6949 Some(Keyword::WITH) => {
6950 self.prev_token();
6951 let properties = self
6952 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
6953 if !properties.is_empty() {
6954 hive_format.serde_properties = Some(properties);
6955 } else {
6956 break;
6957 }
6958 }
6959 None => break,
6960 _ => break,
6961 }
6962 }
6963
6964 Ok(hive_format)
6965 }
6966
6967 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
6968 self.expect_keyword_is(Keyword::FORMAT)?;
6969 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
6970 Some(Keyword::SERDE) => {
6971 let class = self.parse_literal_string()?;
6972 Ok(HiveRowFormat::SERDE { class })
6973 }
6974 _ => {
6975 let mut row_delimiters = vec![];
6976
6977 loop {
6978 match self.parse_one_of_keywords(&[
6979 Keyword::FIELDS,
6980 Keyword::COLLECTION,
6981 Keyword::MAP,
6982 Keyword::LINES,
6983 Keyword::NULL,
6984 ]) {
6985 Some(Keyword::FIELDS) => {
6986 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
6987 row_delimiters.push(HiveRowDelimiter {
6988 delimiter: HiveDelimiter::FieldsTerminatedBy,
6989 char: self.parse_identifier()?,
6990 });
6991
6992 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
6993 row_delimiters.push(HiveRowDelimiter {
6994 delimiter: HiveDelimiter::FieldsEscapedBy,
6995 char: self.parse_identifier()?,
6996 });
6997 }
6998 } else {
6999 break;
7000 }
7001 }
7002 Some(Keyword::COLLECTION) => {
7003 if self.parse_keywords(&[
7004 Keyword::ITEMS,
7005 Keyword::TERMINATED,
7006 Keyword::BY,
7007 ]) {
7008 row_delimiters.push(HiveRowDelimiter {
7009 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7010 char: self.parse_identifier()?,
7011 });
7012 } else {
7013 break;
7014 }
7015 }
7016 Some(Keyword::MAP) => {
7017 if self.parse_keywords(&[
7018 Keyword::KEYS,
7019 Keyword::TERMINATED,
7020 Keyword::BY,
7021 ]) {
7022 row_delimiters.push(HiveRowDelimiter {
7023 delimiter: HiveDelimiter::MapKeysTerminatedBy,
7024 char: self.parse_identifier()?,
7025 });
7026 } else {
7027 break;
7028 }
7029 }
7030 Some(Keyword::LINES) => {
7031 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7032 row_delimiters.push(HiveRowDelimiter {
7033 delimiter: HiveDelimiter::LinesTerminatedBy,
7034 char: self.parse_identifier()?,
7035 });
7036 } else {
7037 break;
7038 }
7039 }
7040 Some(Keyword::NULL) => {
7041 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7042 row_delimiters.push(HiveRowDelimiter {
7043 delimiter: HiveDelimiter::NullDefinedAs,
7044 char: self.parse_identifier()?,
7045 });
7046 } else {
7047 break;
7048 }
7049 }
7050 _ => {
7051 break;
7052 }
7053 }
7054 }
7055
7056 Ok(HiveRowFormat::DELIMITED {
7057 delimiters: row_delimiters,
7058 })
7059 }
7060 }
7061 }
7062
7063 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7064 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7065 Ok(Some(self.parse_identifier()?))
7066 } else {
7067 Ok(None)
7068 }
7069 }
7070
7071 pub fn parse_create_table(
7072 &mut self,
7073 or_replace: bool,
7074 temporary: bool,
7075 global: Option<bool>,
7076 transient: bool,
7077 ) -> Result<Statement, ParserError> {
7078 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7079 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7080 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7081
7082 let on_cluster = self.parse_optional_on_cluster()?;
7084
7085 let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
7086 self.parse_object_name(allow_unquoted_hyphen).ok()
7087 } else {
7088 None
7089 };
7090
7091 let clone = if self.parse_keyword(Keyword::CLONE) {
7092 self.parse_object_name(allow_unquoted_hyphen).ok()
7093 } else {
7094 None
7095 };
7096
7097 let (columns, constraints) = self.parse_columns()?;
7099 let comment_after_column_def =
7100 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7101 let next_token = self.next_token();
7102 match next_token.token {
7103 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7104 _ => self.expected("comment", next_token)?,
7105 }
7106 } else {
7107 None
7108 };
7109
7110 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7112
7113 let hive_distribution = self.parse_hive_distribution()?;
7114 let clustered_by = self.parse_optional_clustered_by()?;
7115 let hive_formats = self.parse_hive_formats()?;
7116
7117 let create_table_config = self.parse_optional_create_table_config()?;
7118
7119 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7122 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7123 {
7124 Some(Box::new(self.parse_expr()?))
7125 } else {
7126 None
7127 };
7128
7129 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7130 if self.consume_token(&Token::LParen) {
7131 let columns = if self.peek_token() != Token::RParen {
7132 self.parse_comma_separated(|p| p.parse_expr())?
7133 } else {
7134 vec![]
7135 };
7136 self.expect_token(&Token::RParen)?;
7137 Some(OneOrManyWithParens::Many(columns))
7138 } else {
7139 Some(OneOrManyWithParens::One(self.parse_expr()?))
7140 }
7141 } else {
7142 None
7143 };
7144
7145 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7146 Some(self.parse_create_table_on_commit()?)
7147 } else {
7148 None
7149 };
7150
7151 let strict = self.parse_keyword(Keyword::STRICT);
7152
7153 let query = if self.parse_keyword(Keyword::AS) {
7155 Some(self.parse_query()?)
7156 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7157 {
7158 self.prev_token();
7160 Some(self.parse_query()?)
7161 } else {
7162 None
7163 };
7164
7165 Ok(CreateTableBuilder::new(table_name)
7166 .temporary(temporary)
7167 .columns(columns)
7168 .constraints(constraints)
7169 .or_replace(or_replace)
7170 .if_not_exists(if_not_exists)
7171 .transient(transient)
7172 .hive_distribution(hive_distribution)
7173 .hive_formats(Some(hive_formats))
7174 .global(global)
7175 .query(query)
7176 .without_rowid(without_rowid)
7177 .like(like)
7178 .clone_clause(clone)
7179 .comment_after_column_def(comment_after_column_def)
7180 .order_by(order_by)
7181 .on_commit(on_commit)
7182 .on_cluster(on_cluster)
7183 .clustered_by(clustered_by)
7184 .partition_by(create_table_config.partition_by)
7185 .cluster_by(create_table_config.cluster_by)
7186 .inherits(create_table_config.inherits)
7187 .table_options(create_table_config.table_options)
7188 .primary_key(primary_key)
7189 .strict(strict)
7190 .build())
7191 }
7192
7193 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
7194 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
7195 Ok(OnCommit::DeleteRows)
7196 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
7197 Ok(OnCommit::PreserveRows)
7198 } else if self.parse_keywords(&[Keyword::DROP]) {
7199 Ok(OnCommit::Drop)
7200 } else {
7201 parser_err!(
7202 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
7203 self.peek_token()
7204 )
7205 }
7206 }
7207
7208 fn parse_optional_create_table_config(
7214 &mut self,
7215 ) -> Result<CreateTableConfiguration, ParserError> {
7216 let mut table_options = CreateTableOptions::None;
7217
7218 let inherits = if self.parse_keyword(Keyword::INHERITS) {
7219 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
7220 } else {
7221 None
7222 };
7223
7224 let with_options = self.parse_options(Keyword::WITH)?;
7226 if !with_options.is_empty() {
7227 table_options = CreateTableOptions::With(with_options)
7228 }
7229
7230 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
7231 if !table_properties.is_empty() {
7232 table_options = CreateTableOptions::TableProperties(table_properties);
7233 }
7234 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
7235 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
7236 {
7237 Some(Box::new(self.parse_expr()?))
7238 } else {
7239 None
7240 };
7241
7242 let mut cluster_by = None;
7243 if dialect_of!(self is BigQueryDialect | GenericDialect) {
7244 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
7245 cluster_by = Some(WrappedCollection::NoWrapping(
7246 self.parse_comma_separated(|p| p.parse_identifier())?,
7247 ));
7248 };
7249
7250 if let Token::Word(word) = self.peek_token().token {
7251 if word.keyword == Keyword::OPTIONS {
7252 table_options =
7253 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
7254 }
7255 };
7256 }
7257
7258 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
7259 let plain_options = self.parse_plain_options()?;
7260 if !plain_options.is_empty() {
7261 table_options = CreateTableOptions::Plain(plain_options)
7262 }
7263 };
7264
7265 Ok(CreateTableConfiguration {
7266 partition_by,
7267 cluster_by,
7268 inherits,
7269 table_options,
7270 })
7271 }
7272
7273 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
7274 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
7277 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
7278 }
7279
7280 if self.parse_keywords(&[Keyword::COMMENT]) {
7283 let has_eq = self.consume_token(&Token::Eq);
7284 let value = self.next_token();
7285
7286 let comment = match (has_eq, value.token) {
7287 (true, Token::SingleQuotedString(s)) => {
7288 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
7289 }
7290 (false, Token::SingleQuotedString(s)) => {
7291 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
7292 }
7293 (_, token) => {
7294 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
7295 }
7296 };
7297 return comment;
7298 }
7299
7300 if self.parse_keywords(&[Keyword::ENGINE]) {
7303 let _ = self.consume_token(&Token::Eq);
7304 let value = self.next_token();
7305
7306 let engine = match value.token {
7307 Token::Word(w) => {
7308 let parameters = if self.peek_token() == Token::LParen {
7309 self.parse_parenthesized_identifiers()?
7310 } else {
7311 vec![]
7312 };
7313
7314 Ok(Some(SqlOption::NamedParenthesizedList(
7315 NamedParenthesizedList {
7316 key: Ident::new("ENGINE"),
7317 name: Some(Ident::new(w.value)),
7318 values: parameters,
7319 },
7320 )))
7321 }
7322 _ => {
7323 return self.expected("Token::Word", value)?;
7324 }
7325 };
7326
7327 return engine;
7328 }
7329
7330 if self.parse_keywords(&[Keyword::TABLESPACE]) {
7332 let _ = self.consume_token(&Token::Eq);
7333 let value = self.next_token();
7334
7335 let tablespace = match value.token {
7336 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
7337 let storage = match self.parse_keyword(Keyword::STORAGE) {
7338 true => {
7339 let _ = self.consume_token(&Token::Eq);
7340 let storage_token = self.next_token();
7341 match &storage_token.token {
7342 Token::Word(w) => match w.value.to_uppercase().as_str() {
7343 "DISK" => Some(StorageType::Disk),
7344 "MEMORY" => Some(StorageType::Memory),
7345 _ => self
7346 .expected("Storage type (DISK or MEMORY)", storage_token)?,
7347 },
7348 _ => self.expected("Token::Word", storage_token)?,
7349 }
7350 }
7351 false => None,
7352 };
7353
7354 Ok(Some(SqlOption::TableSpace(TablespaceOption {
7355 name,
7356 storage,
7357 })))
7358 }
7359 _ => {
7360 return self.expected("Token::Word", value)?;
7361 }
7362 };
7363
7364 return tablespace;
7365 }
7366
7367 if self.parse_keyword(Keyword::UNION) {
7369 let _ = self.consume_token(&Token::Eq);
7370 let value = self.next_token();
7371
7372 match value.token {
7373 Token::LParen => {
7374 let tables: Vec<Ident> =
7375 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
7376 self.expect_token(&Token::RParen)?;
7377
7378 return Ok(Some(SqlOption::NamedParenthesizedList(
7379 NamedParenthesizedList {
7380 key: Ident::new("UNION"),
7381 name: None,
7382 values: tables,
7383 },
7384 )));
7385 }
7386 _ => {
7387 return self.expected("Token::LParen", value)?;
7388 }
7389 }
7390 }
7391
7392 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
7394 Ident::new("DEFAULT CHARSET")
7395 } else if self.parse_keyword(Keyword::CHARSET) {
7396 Ident::new("CHARSET")
7397 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
7398 Ident::new("DEFAULT CHARACTER SET")
7399 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7400 Ident::new("CHARACTER SET")
7401 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
7402 Ident::new("DEFAULT COLLATE")
7403 } else if self.parse_keyword(Keyword::COLLATE) {
7404 Ident::new("COLLATE")
7405 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
7406 Ident::new("DATA DIRECTORY")
7407 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
7408 Ident::new("INDEX DIRECTORY")
7409 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
7410 Ident::new("KEY_BLOCK_SIZE")
7411 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
7412 Ident::new("ROW_FORMAT")
7413 } else if self.parse_keyword(Keyword::PACK_KEYS) {
7414 Ident::new("PACK_KEYS")
7415 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
7416 Ident::new("STATS_AUTO_RECALC")
7417 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
7418 Ident::new("STATS_PERSISTENT")
7419 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
7420 Ident::new("STATS_SAMPLE_PAGES")
7421 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
7422 Ident::new("DELAY_KEY_WRITE")
7423 } else if self.parse_keyword(Keyword::COMPRESSION) {
7424 Ident::new("COMPRESSION")
7425 } else if self.parse_keyword(Keyword::ENCRYPTION) {
7426 Ident::new("ENCRYPTION")
7427 } else if self.parse_keyword(Keyword::MAX_ROWS) {
7428 Ident::new("MAX_ROWS")
7429 } else if self.parse_keyword(Keyword::MIN_ROWS) {
7430 Ident::new("MIN_ROWS")
7431 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
7432 Ident::new("AUTOEXTEND_SIZE")
7433 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
7434 Ident::new("AVG_ROW_LENGTH")
7435 } else if self.parse_keyword(Keyword::CHECKSUM) {
7436 Ident::new("CHECKSUM")
7437 } else if self.parse_keyword(Keyword::CONNECTION) {
7438 Ident::new("CONNECTION")
7439 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
7440 Ident::new("ENGINE_ATTRIBUTE")
7441 } else if self.parse_keyword(Keyword::PASSWORD) {
7442 Ident::new("PASSWORD")
7443 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
7444 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
7445 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
7446 Ident::new("INSERT_METHOD")
7447 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
7448 Ident::new("AUTO_INCREMENT")
7449 } else {
7450 return Ok(None);
7451 };
7452
7453 let _ = self.consume_token(&Token::Eq);
7454
7455 let value = match self
7456 .maybe_parse(|parser| parser.parse_value())?
7457 .map(Expr::Value)
7458 {
7459 Some(expr) => expr,
7460 None => Expr::Identifier(self.parse_identifier()?),
7461 };
7462
7463 Ok(Some(SqlOption::KeyValue { key, value }))
7464 }
7465
7466 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
7467 let mut options = Vec::new();
7468
7469 while let Some(option) = self.parse_plain_option()? {
7470 options.push(option);
7471 }
7472
7473 Ok(options)
7474 }
7475
7476 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
7477 let comment = if self.parse_keyword(Keyword::COMMENT) {
7478 let has_eq = self.consume_token(&Token::Eq);
7479 let comment = self.parse_comment_value()?;
7480 Some(if has_eq {
7481 CommentDef::WithEq(comment)
7482 } else {
7483 CommentDef::WithoutEq(comment)
7484 })
7485 } else {
7486 None
7487 };
7488 Ok(comment)
7489 }
7490
7491 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
7492 let next_token = self.next_token();
7493 let value = match next_token.token {
7494 Token::SingleQuotedString(str) => str,
7495 Token::DollarQuotedString(str) => str.value,
7496 _ => self.expected("string literal", next_token)?,
7497 };
7498 Ok(value)
7499 }
7500
7501 pub fn parse_optional_procedure_parameters(
7502 &mut self,
7503 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
7504 let mut params = vec![];
7505 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7506 return Ok(Some(params));
7507 }
7508 loop {
7509 if let Token::Word(_) = self.peek_token().token {
7510 params.push(self.parse_procedure_param()?)
7511 }
7512 let comma = self.consume_token(&Token::Comma);
7513 if self.consume_token(&Token::RParen) {
7514 break;
7516 } else if !comma {
7517 return self.expected("',' or ')' after parameter definition", self.peek_token());
7518 }
7519 }
7520 Ok(Some(params))
7521 }
7522
7523 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
7524 let mut columns = vec![];
7525 let mut constraints = vec![];
7526 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7527 return Ok((columns, constraints));
7528 }
7529
7530 loop {
7531 if let Some(constraint) = self.parse_optional_table_constraint()? {
7532 constraints.push(constraint);
7533 } else if let Token::Word(_) = self.peek_token().token {
7534 columns.push(self.parse_column_def()?);
7535 } else {
7536 return self.expected("column name or constraint definition", self.peek_token());
7537 }
7538
7539 let comma = self.consume_token(&Token::Comma);
7540 let rparen = self.peek_token().token == Token::RParen;
7541
7542 if !comma && !rparen {
7543 return self.expected("',' or ')' after column definition", self.peek_token());
7544 };
7545
7546 if rparen
7547 && (!comma
7548 || self.dialect.supports_column_definition_trailing_commas()
7549 || self.options.trailing_commas)
7550 {
7551 let _ = self.consume_token(&Token::RParen);
7552 break;
7553 }
7554 }
7555
7556 Ok((columns, constraints))
7557 }
7558
7559 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
7560 let name = self.parse_identifier()?;
7561 let data_type = self.parse_data_type()?;
7562 Ok(ProcedureParam { name, data_type })
7563 }
7564
7565 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
7566 let name = self.parse_identifier()?;
7567 let data_type = if self.is_column_type_sqlite_unspecified() {
7568 DataType::Unspecified
7569 } else {
7570 self.parse_data_type()?
7571 };
7572 let mut options = vec![];
7573 loop {
7574 if self.parse_keyword(Keyword::CONSTRAINT) {
7575 let name = Some(self.parse_identifier()?);
7576 if let Some(option) = self.parse_optional_column_option()? {
7577 options.push(ColumnOptionDef { name, option });
7578 } else {
7579 return self.expected(
7580 "constraint details after CONSTRAINT <name>",
7581 self.peek_token(),
7582 );
7583 }
7584 } else if let Some(option) = self.parse_optional_column_option()? {
7585 options.push(ColumnOptionDef { name: None, option });
7586 } else {
7587 break;
7588 };
7589 }
7590 Ok(ColumnDef {
7591 name,
7592 data_type,
7593 options,
7594 })
7595 }
7596
7597 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
7598 if dialect_of!(self is SQLiteDialect) {
7599 match self.peek_token().token {
7600 Token::Word(word) => matches!(
7601 word.keyword,
7602 Keyword::CONSTRAINT
7603 | Keyword::PRIMARY
7604 | Keyword::NOT
7605 | Keyword::UNIQUE
7606 | Keyword::CHECK
7607 | Keyword::DEFAULT
7608 | Keyword::COLLATE
7609 | Keyword::REFERENCES
7610 | Keyword::GENERATED
7611 | Keyword::AS
7612 ),
7613 _ => true, }
7615 } else {
7616 false
7617 }
7618 }
7619
7620 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7621 if let Some(option) = self.dialect.parse_column_option(self)? {
7622 return option;
7623 }
7624
7625 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7626 Ok(Some(ColumnOption::CharacterSet(
7627 self.parse_object_name(false)?,
7628 )))
7629 } else if self.parse_keywords(&[Keyword::COLLATE]) {
7630 Ok(Some(ColumnOption::Collation(
7631 self.parse_object_name(false)?,
7632 )))
7633 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
7634 Ok(Some(ColumnOption::NotNull))
7635 } else if self.parse_keywords(&[Keyword::COMMENT]) {
7636 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
7637 } else if self.parse_keyword(Keyword::NULL) {
7638 Ok(Some(ColumnOption::Null))
7639 } else if self.parse_keyword(Keyword::DEFAULT) {
7640 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
7641 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7642 && self.parse_keyword(Keyword::MATERIALIZED)
7643 {
7644 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
7645 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7646 && self.parse_keyword(Keyword::ALIAS)
7647 {
7648 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
7649 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7650 && self.parse_keyword(Keyword::EPHEMERAL)
7651 {
7652 if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
7655 Ok(Some(ColumnOption::Ephemeral(None)))
7656 } else {
7657 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
7658 }
7659 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
7660 let characteristics = self.parse_constraint_characteristics()?;
7661 Ok(Some(ColumnOption::Unique {
7662 is_primary: true,
7663 characteristics,
7664 }))
7665 } else if self.parse_keyword(Keyword::UNIQUE) {
7666 let characteristics = self.parse_constraint_characteristics()?;
7667 Ok(Some(ColumnOption::Unique {
7668 is_primary: false,
7669 characteristics,
7670 }))
7671 } else if self.parse_keyword(Keyword::REFERENCES) {
7672 let foreign_table = self.parse_object_name(false)?;
7673 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
7676 let mut on_delete = None;
7677 let mut on_update = None;
7678 loop {
7679 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
7680 on_delete = Some(self.parse_referential_action()?);
7681 } else if on_update.is_none()
7682 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7683 {
7684 on_update = Some(self.parse_referential_action()?);
7685 } else {
7686 break;
7687 }
7688 }
7689 let characteristics = self.parse_constraint_characteristics()?;
7690
7691 Ok(Some(ColumnOption::ForeignKey {
7692 foreign_table,
7693 referred_columns,
7694 on_delete,
7695 on_update,
7696 characteristics,
7697 }))
7698 } else if self.parse_keyword(Keyword::CHECK) {
7699 self.expect_token(&Token::LParen)?;
7700 let expr = self.parse_expr()?;
7701 self.expect_token(&Token::RParen)?;
7702 Ok(Some(ColumnOption::Check(expr)))
7703 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
7704 && dialect_of!(self is MySqlDialect | GenericDialect)
7705 {
7706 Ok(Some(ColumnOption::DialectSpecific(vec![
7708 Token::make_keyword("AUTO_INCREMENT"),
7709 ])))
7710 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
7711 && dialect_of!(self is SQLiteDialect | GenericDialect)
7712 {
7713 Ok(Some(ColumnOption::DialectSpecific(vec![
7715 Token::make_keyword("AUTOINCREMENT"),
7716 ])))
7717 } else if self.parse_keyword(Keyword::ASC)
7718 && self.dialect.supports_asc_desc_in_column_definition()
7719 {
7720 Ok(Some(ColumnOption::DialectSpecific(vec![
7722 Token::make_keyword("ASC"),
7723 ])))
7724 } else if self.parse_keyword(Keyword::DESC)
7725 && self.dialect.supports_asc_desc_in_column_definition()
7726 {
7727 Ok(Some(ColumnOption::DialectSpecific(vec![
7729 Token::make_keyword("DESC"),
7730 ])))
7731 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7732 && dialect_of!(self is MySqlDialect | GenericDialect)
7733 {
7734 let expr = self.parse_expr()?;
7735 Ok(Some(ColumnOption::OnUpdate(expr)))
7736 } else if self.parse_keyword(Keyword::GENERATED) {
7737 self.parse_optional_column_option_generated()
7738 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
7739 && self.parse_keyword(Keyword::OPTIONS)
7740 {
7741 self.prev_token();
7742 Ok(Some(ColumnOption::Options(
7743 self.parse_options(Keyword::OPTIONS)?,
7744 )))
7745 } else if self.parse_keyword(Keyword::AS)
7746 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
7747 {
7748 self.parse_optional_column_option_as()
7749 } else if self.parse_keyword(Keyword::IDENTITY)
7750 && dialect_of!(self is MsSqlDialect | GenericDialect)
7751 {
7752 let parameters = if self.consume_token(&Token::LParen) {
7753 let seed = self.parse_number()?;
7754 self.expect_token(&Token::Comma)?;
7755 let increment = self.parse_number()?;
7756 self.expect_token(&Token::RParen)?;
7757
7758 Some(IdentityPropertyFormatKind::FunctionCall(
7759 IdentityParameters { seed, increment },
7760 ))
7761 } else {
7762 None
7763 };
7764 Ok(Some(ColumnOption::Identity(
7765 IdentityPropertyKind::Identity(IdentityProperty {
7766 parameters,
7767 order: None,
7768 }),
7769 )))
7770 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
7771 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
7772 {
7773 Ok(Some(ColumnOption::OnConflict(
7775 self.expect_one_of_keywords(&[
7776 Keyword::ROLLBACK,
7777 Keyword::ABORT,
7778 Keyword::FAIL,
7779 Keyword::IGNORE,
7780 Keyword::REPLACE,
7781 ])?,
7782 )))
7783 } else {
7784 Ok(None)
7785 }
7786 }
7787
7788 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
7789 let name = self.parse_identifier()?;
7790 self.expect_token(&Token::Eq)?;
7791 let value = self.parse_literal_string()?;
7792
7793 Ok(Tag::new(name, value))
7794 }
7795
7796 fn parse_optional_column_option_generated(
7797 &mut self,
7798 ) -> Result<Option<ColumnOption>, ParserError> {
7799 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
7800 let mut sequence_options = vec![];
7801 if self.expect_token(&Token::LParen).is_ok() {
7802 sequence_options = self.parse_create_sequence_options()?;
7803 self.expect_token(&Token::RParen)?;
7804 }
7805 Ok(Some(ColumnOption::Generated {
7806 generated_as: GeneratedAs::Always,
7807 sequence_options: Some(sequence_options),
7808 generation_expr: None,
7809 generation_expr_mode: None,
7810 generated_keyword: true,
7811 }))
7812 } else if self.parse_keywords(&[
7813 Keyword::BY,
7814 Keyword::DEFAULT,
7815 Keyword::AS,
7816 Keyword::IDENTITY,
7817 ]) {
7818 let mut sequence_options = vec![];
7819 if self.expect_token(&Token::LParen).is_ok() {
7820 sequence_options = self.parse_create_sequence_options()?;
7821 self.expect_token(&Token::RParen)?;
7822 }
7823 Ok(Some(ColumnOption::Generated {
7824 generated_as: GeneratedAs::ByDefault,
7825 sequence_options: Some(sequence_options),
7826 generation_expr: None,
7827 generation_expr_mode: None,
7828 generated_keyword: true,
7829 }))
7830 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
7831 if self.expect_token(&Token::LParen).is_ok() {
7832 let expr = self.parse_expr()?;
7833 self.expect_token(&Token::RParen)?;
7834 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7835 Ok((
7836 GeneratedAs::ExpStored,
7837 Some(GeneratedExpressionMode::Stored),
7838 ))
7839 } else if dialect_of!(self is PostgreSqlDialect) {
7840 self.expected("STORED", self.peek_token())
7842 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7843 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
7844 } else {
7845 Ok((GeneratedAs::Always, None))
7846 }?;
7847
7848 Ok(Some(ColumnOption::Generated {
7849 generated_as: gen_as,
7850 sequence_options: None,
7851 generation_expr: Some(expr),
7852 generation_expr_mode: expr_mode,
7853 generated_keyword: true,
7854 }))
7855 } else {
7856 Ok(None)
7857 }
7858 } else {
7859 Ok(None)
7860 }
7861 }
7862
7863 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7864 self.expect_token(&Token::LParen)?;
7866 let expr = self.parse_expr()?;
7867 self.expect_token(&Token::RParen)?;
7868
7869 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7870 (
7871 GeneratedAs::ExpStored,
7872 Some(GeneratedExpressionMode::Stored),
7873 )
7874 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7875 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
7876 } else {
7877 (GeneratedAs::Always, None)
7878 };
7879
7880 Ok(Some(ColumnOption::Generated {
7881 generated_as: gen_as,
7882 sequence_options: None,
7883 generation_expr: Some(expr),
7884 generation_expr_mode: expr_mode,
7885 generated_keyword: false,
7886 }))
7887 }
7888
7889 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
7890 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
7891 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
7892 {
7893 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7894
7895 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
7896 self.expect_token(&Token::LParen)?;
7897 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
7898 self.expect_token(&Token::RParen)?;
7899 Some(sorted_by_columns)
7900 } else {
7901 None
7902 };
7903
7904 self.expect_keyword_is(Keyword::INTO)?;
7905 let num_buckets = self.parse_number_value()?.value;
7906 self.expect_keyword_is(Keyword::BUCKETS)?;
7907 Some(ClusteredBy {
7908 columns,
7909 sorted_by,
7910 num_buckets,
7911 })
7912 } else {
7913 None
7914 };
7915 Ok(clustered_by)
7916 }
7917
7918 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
7919 if self.parse_keyword(Keyword::RESTRICT) {
7920 Ok(ReferentialAction::Restrict)
7921 } else if self.parse_keyword(Keyword::CASCADE) {
7922 Ok(ReferentialAction::Cascade)
7923 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
7924 Ok(ReferentialAction::SetNull)
7925 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
7926 Ok(ReferentialAction::NoAction)
7927 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
7928 Ok(ReferentialAction::SetDefault)
7929 } else {
7930 self.expected(
7931 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
7932 self.peek_token(),
7933 )
7934 }
7935 }
7936
7937 pub fn parse_constraint_characteristics(
7938 &mut self,
7939 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
7940 let mut cc = ConstraintCharacteristics::default();
7941
7942 loop {
7943 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
7944 {
7945 cc.deferrable = Some(false);
7946 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
7947 cc.deferrable = Some(true);
7948 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
7949 if self.parse_keyword(Keyword::DEFERRED) {
7950 cc.initially = Some(DeferrableInitial::Deferred);
7951 } else if self.parse_keyword(Keyword::IMMEDIATE) {
7952 cc.initially = Some(DeferrableInitial::Immediate);
7953 } else {
7954 self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
7955 }
7956 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
7957 cc.enforced = Some(true);
7958 } else if cc.enforced.is_none()
7959 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
7960 {
7961 cc.enforced = Some(false);
7962 } else {
7963 break;
7964 }
7965 }
7966
7967 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
7968 Ok(Some(cc))
7969 } else {
7970 Ok(None)
7971 }
7972 }
7973
7974 pub fn parse_optional_table_constraint(
7975 &mut self,
7976 ) -> Result<Option<TableConstraint>, ParserError> {
7977 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
7978 Some(self.parse_identifier()?)
7979 } else {
7980 None
7981 };
7982
7983 let next_token = self.next_token();
7984 match next_token.token {
7985 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
7986 let index_type_display = self.parse_index_type_display();
7987 if !dialect_of!(self is GenericDialect | MySqlDialect)
7988 && !index_type_display.is_none()
7989 {
7990 return self
7991 .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
7992 }
7993
7994 let nulls_distinct = self.parse_optional_nulls_distinct()?;
7995
7996 let index_name = self.parse_optional_indent()?;
7998 let index_type = self.parse_optional_using_then_index_type()?;
7999
8000 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8001 let index_options = self.parse_index_options()?;
8002 let characteristics = self.parse_constraint_characteristics()?;
8003 Ok(Some(TableConstraint::Unique {
8004 name,
8005 index_name,
8006 index_type_display,
8007 index_type,
8008 columns,
8009 index_options,
8010 characteristics,
8011 nulls_distinct,
8012 }))
8013 }
8014 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8015 self.expect_keyword_is(Keyword::KEY)?;
8017
8018 let index_name = self.parse_optional_indent()?;
8020 let index_type = self.parse_optional_using_then_index_type()?;
8021
8022 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8023 let index_options = self.parse_index_options()?;
8024 let characteristics = self.parse_constraint_characteristics()?;
8025 Ok(Some(TableConstraint::PrimaryKey {
8026 name,
8027 index_name,
8028 index_type,
8029 columns,
8030 index_options,
8031 characteristics,
8032 }))
8033 }
8034 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
8035 self.expect_keyword_is(Keyword::KEY)?;
8036 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8037 self.expect_keyword_is(Keyword::REFERENCES)?;
8038 let foreign_table = self.parse_object_name(false)?;
8039 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8040 let mut on_delete = None;
8041 let mut on_update = None;
8042 loop {
8043 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
8044 on_delete = Some(self.parse_referential_action()?);
8045 } else if on_update.is_none()
8046 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8047 {
8048 on_update = Some(self.parse_referential_action()?);
8049 } else {
8050 break;
8051 }
8052 }
8053
8054 let characteristics = self.parse_constraint_characteristics()?;
8055
8056 Ok(Some(TableConstraint::ForeignKey {
8057 name,
8058 columns,
8059 foreign_table,
8060 referred_columns,
8061 on_delete,
8062 on_update,
8063 characteristics,
8064 }))
8065 }
8066 Token::Word(w) if w.keyword == Keyword::CHECK => {
8067 self.expect_token(&Token::LParen)?;
8068 let expr = Box::new(self.parse_expr()?);
8069 self.expect_token(&Token::RParen)?;
8070 Ok(Some(TableConstraint::Check { name, expr }))
8071 }
8072 Token::Word(w)
8073 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
8074 && dialect_of!(self is GenericDialect | MySqlDialect)
8075 && name.is_none() =>
8076 {
8077 let display_as_key = w.keyword == Keyword::KEY;
8078
8079 let name = match self.peek_token().token {
8080 Token::Word(word) if word.keyword == Keyword::USING => None,
8081 _ => self.parse_optional_indent()?,
8082 };
8083
8084 let index_type = self.parse_optional_using_then_index_type()?;
8085 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8086
8087 Ok(Some(TableConstraint::Index {
8088 display_as_key,
8089 name,
8090 index_type,
8091 columns,
8092 }))
8093 }
8094 Token::Word(w)
8095 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
8096 && dialect_of!(self is GenericDialect | MySqlDialect) =>
8097 {
8098 if let Some(name) = name {
8099 return self.expected(
8100 "FULLTEXT or SPATIAL option without constraint name",
8101 TokenWithSpan {
8102 token: Token::make_keyword(&name.to_string()),
8103 span: next_token.span,
8104 },
8105 );
8106 }
8107
8108 let fulltext = w.keyword == Keyword::FULLTEXT;
8109
8110 let index_type_display = self.parse_index_type_display();
8111
8112 let opt_index_name = self.parse_optional_indent()?;
8113
8114 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8115
8116 Ok(Some(TableConstraint::FulltextOrSpatial {
8117 fulltext,
8118 index_type_display,
8119 opt_index_name,
8120 columns,
8121 }))
8122 }
8123 _ => {
8124 if name.is_some() {
8125 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
8126 } else {
8127 self.prev_token();
8128 Ok(None)
8129 }
8130 }
8131 }
8132 }
8133
8134 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
8135 Ok(if self.parse_keyword(Keyword::NULLS) {
8136 let not = self.parse_keyword(Keyword::NOT);
8137 self.expect_keyword_is(Keyword::DISTINCT)?;
8138 if not {
8139 NullsDistinctOption::NotDistinct
8140 } else {
8141 NullsDistinctOption::Distinct
8142 }
8143 } else {
8144 NullsDistinctOption::None
8145 })
8146 }
8147
8148 pub fn maybe_parse_options(
8149 &mut self,
8150 keyword: Keyword,
8151 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
8152 if let Token::Word(word) = self.peek_token().token {
8153 if word.keyword == keyword {
8154 return Ok(Some(self.parse_options(keyword)?));
8155 }
8156 };
8157 Ok(None)
8158 }
8159
8160 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
8161 if self.parse_keyword(keyword) {
8162 self.expect_token(&Token::LParen)?;
8163 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
8164 self.expect_token(&Token::RParen)?;
8165 Ok(options)
8166 } else {
8167 Ok(vec![])
8168 }
8169 }
8170
8171 pub fn parse_options_with_keywords(
8172 &mut self,
8173 keywords: &[Keyword],
8174 ) -> Result<Vec<SqlOption>, ParserError> {
8175 if self.parse_keywords(keywords) {
8176 self.expect_token(&Token::LParen)?;
8177 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8178 self.expect_token(&Token::RParen)?;
8179 Ok(options)
8180 } else {
8181 Ok(vec![])
8182 }
8183 }
8184
8185 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
8186 Ok(if self.parse_keyword(Keyword::BTREE) {
8187 IndexType::BTree
8188 } else if self.parse_keyword(Keyword::HASH) {
8189 IndexType::Hash
8190 } else if self.parse_keyword(Keyword::GIN) {
8191 IndexType::GIN
8192 } else if self.parse_keyword(Keyword::GIST) {
8193 IndexType::GiST
8194 } else if self.parse_keyword(Keyword::SPGIST) {
8195 IndexType::SPGiST
8196 } else if self.parse_keyword(Keyword::BRIN) {
8197 IndexType::BRIN
8198 } else if self.parse_keyword(Keyword::BLOOM) {
8199 IndexType::Bloom
8200 } else {
8201 IndexType::Custom(self.parse_identifier()?)
8202 })
8203 }
8204
8205 pub fn parse_optional_using_then_index_type(
8211 &mut self,
8212 ) -> Result<Option<IndexType>, ParserError> {
8213 if self.parse_keyword(Keyword::USING) {
8214 Ok(Some(self.parse_index_type()?))
8215 } else {
8216 Ok(None)
8217 }
8218 }
8219
8220 pub fn parse_optional_indent(&mut self) -> Result<Option<Ident>, ParserError> {
8223 self.maybe_parse(|parser| parser.parse_identifier())
8224 }
8225
8226 #[must_use]
8227 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
8228 if self.parse_keyword(Keyword::KEY) {
8229 KeyOrIndexDisplay::Key
8230 } else if self.parse_keyword(Keyword::INDEX) {
8231 KeyOrIndexDisplay::Index
8232 } else {
8233 KeyOrIndexDisplay::None
8234 }
8235 }
8236
8237 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
8238 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
8239 Ok(Some(IndexOption::Using(index_type)))
8240 } else if self.parse_keyword(Keyword::COMMENT) {
8241 let s = self.parse_literal_string()?;
8242 Ok(Some(IndexOption::Comment(s)))
8243 } else {
8244 Ok(None)
8245 }
8246 }
8247
8248 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
8249 let mut options = Vec::new();
8250
8251 loop {
8252 match self.parse_optional_index_option()? {
8253 Some(index_option) => options.push(index_option),
8254 None => return Ok(options),
8255 }
8256 }
8257 }
8258
8259 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
8260 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
8261
8262 match self.peek_token().token {
8263 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
8264 Ok(SqlOption::Ident(self.parse_identifier()?))
8265 }
8266 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
8267 self.parse_option_partition()
8268 }
8269 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
8270 self.parse_option_clustered()
8271 }
8272 _ => {
8273 let name = self.parse_identifier()?;
8274 self.expect_token(&Token::Eq)?;
8275 let value = self.parse_expr()?;
8276
8277 Ok(SqlOption::KeyValue { key: name, value })
8278 }
8279 }
8280 }
8281
8282 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
8283 if self.parse_keywords(&[
8284 Keyword::CLUSTERED,
8285 Keyword::COLUMNSTORE,
8286 Keyword::INDEX,
8287 Keyword::ORDER,
8288 ]) {
8289 Ok(SqlOption::Clustered(
8290 TableOptionsClustered::ColumnstoreIndexOrder(
8291 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
8292 ),
8293 ))
8294 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
8295 Ok(SqlOption::Clustered(
8296 TableOptionsClustered::ColumnstoreIndex,
8297 ))
8298 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
8299 self.expect_token(&Token::LParen)?;
8300
8301 let columns = self.parse_comma_separated(|p| {
8302 let name = p.parse_identifier()?;
8303 let asc = p.parse_asc_desc();
8304
8305 Ok(ClusteredIndex { name, asc })
8306 })?;
8307
8308 self.expect_token(&Token::RParen)?;
8309
8310 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
8311 } else {
8312 Err(ParserError::ParserError(
8313 "invalid CLUSTERED sequence".to_string(),
8314 ))
8315 }
8316 }
8317
8318 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
8319 self.expect_keyword_is(Keyword::PARTITION)?;
8320 self.expect_token(&Token::LParen)?;
8321 let column_name = self.parse_identifier()?;
8322
8323 self.expect_keyword_is(Keyword::RANGE)?;
8324 let range_direction = if self.parse_keyword(Keyword::LEFT) {
8325 Some(PartitionRangeDirection::Left)
8326 } else if self.parse_keyword(Keyword::RIGHT) {
8327 Some(PartitionRangeDirection::Right)
8328 } else {
8329 None
8330 };
8331
8332 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8333 self.expect_token(&Token::LParen)?;
8334
8335 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
8336
8337 self.expect_token(&Token::RParen)?;
8338 self.expect_token(&Token::RParen)?;
8339
8340 Ok(SqlOption::Partition {
8341 column_name,
8342 range_direction,
8343 for_values,
8344 })
8345 }
8346
8347 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
8348 self.expect_token(&Token::LParen)?;
8349 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8350 self.expect_token(&Token::RParen)?;
8351 Ok(Partition::Partitions(partitions))
8352 }
8353
8354 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
8355 self.expect_token(&Token::LParen)?;
8356 self.expect_keyword_is(Keyword::SELECT)?;
8357 let projection = self.parse_projection()?;
8358 let group_by = self.parse_optional_group_by()?;
8359 let order_by = self.parse_optional_order_by()?;
8360 self.expect_token(&Token::RParen)?;
8361 Ok(ProjectionSelect {
8362 projection,
8363 group_by,
8364 order_by,
8365 })
8366 }
8367 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
8368 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8369 let name = self.parse_identifier()?;
8370 let query = self.parse_projection_select()?;
8371 Ok(AlterTableOperation::AddProjection {
8372 if_not_exists,
8373 name,
8374 select: query,
8375 })
8376 }
8377
8378 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
8379 let operation = if self.parse_keyword(Keyword::ADD) {
8380 if let Some(constraint) = self.parse_optional_table_constraint()? {
8381 AlterTableOperation::AddConstraint(constraint)
8382 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8383 && self.parse_keyword(Keyword::PROJECTION)
8384 {
8385 return self.parse_alter_table_add_projection();
8386 } else {
8387 let if_not_exists =
8388 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8389 let mut new_partitions = vec![];
8390 loop {
8391 if self.parse_keyword(Keyword::PARTITION) {
8392 new_partitions.push(self.parse_partition()?);
8393 } else {
8394 break;
8395 }
8396 }
8397 if !new_partitions.is_empty() {
8398 AlterTableOperation::AddPartitions {
8399 if_not_exists,
8400 new_partitions,
8401 }
8402 } else {
8403 let column_keyword = self.parse_keyword(Keyword::COLUMN);
8404
8405 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
8406 {
8407 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
8408 || if_not_exists
8409 } else {
8410 false
8411 };
8412
8413 let column_def = self.parse_column_def()?;
8414
8415 let column_position = self.parse_column_position()?;
8416
8417 AlterTableOperation::AddColumn {
8418 column_keyword,
8419 if_not_exists,
8420 column_def,
8421 column_position,
8422 }
8423 }
8424 }
8425 } else if self.parse_keyword(Keyword::RENAME) {
8426 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
8427 let old_name = self.parse_identifier()?;
8428 self.expect_keyword_is(Keyword::TO)?;
8429 let new_name = self.parse_identifier()?;
8430 AlterTableOperation::RenameConstraint { old_name, new_name }
8431 } else if self.parse_keyword(Keyword::TO) {
8432 let table_name = self.parse_object_name(false)?;
8433 AlterTableOperation::RenameTable { table_name }
8434 } else {
8435 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
8437 self.expect_keyword_is(Keyword::TO)?;
8438 let new_column_name = self.parse_identifier()?;
8439 AlterTableOperation::RenameColumn {
8440 old_column_name,
8441 new_column_name,
8442 }
8443 }
8444 } else if self.parse_keyword(Keyword::DISABLE) {
8445 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8446 AlterTableOperation::DisableRowLevelSecurity {}
8447 } else if self.parse_keyword(Keyword::RULE) {
8448 let name = self.parse_identifier()?;
8449 AlterTableOperation::DisableRule { name }
8450 } else if self.parse_keyword(Keyword::TRIGGER) {
8451 let name = self.parse_identifier()?;
8452 AlterTableOperation::DisableTrigger { name }
8453 } else {
8454 return self.expected(
8455 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
8456 self.peek_token(),
8457 );
8458 }
8459 } else if self.parse_keyword(Keyword::ENABLE) {
8460 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
8461 let name = self.parse_identifier()?;
8462 AlterTableOperation::EnableAlwaysRule { name }
8463 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
8464 let name = self.parse_identifier()?;
8465 AlterTableOperation::EnableAlwaysTrigger { name }
8466 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8467 AlterTableOperation::EnableRowLevelSecurity {}
8468 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
8469 let name = self.parse_identifier()?;
8470 AlterTableOperation::EnableReplicaRule { name }
8471 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
8472 let name = self.parse_identifier()?;
8473 AlterTableOperation::EnableReplicaTrigger { name }
8474 } else if self.parse_keyword(Keyword::RULE) {
8475 let name = self.parse_identifier()?;
8476 AlterTableOperation::EnableRule { name }
8477 } else if self.parse_keyword(Keyword::TRIGGER) {
8478 let name = self.parse_identifier()?;
8479 AlterTableOperation::EnableTrigger { name }
8480 } else {
8481 return self.expected(
8482 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
8483 self.peek_token(),
8484 );
8485 }
8486 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
8487 && dialect_of!(self is ClickHouseDialect|GenericDialect)
8488 {
8489 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8490 let name = self.parse_identifier()?;
8491 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8492 Some(self.parse_identifier()?)
8493 } else {
8494 None
8495 };
8496 AlterTableOperation::ClearProjection {
8497 if_exists,
8498 name,
8499 partition,
8500 }
8501 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
8502 && dialect_of!(self is ClickHouseDialect|GenericDialect)
8503 {
8504 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8505 let name = self.parse_identifier()?;
8506 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8507 Some(self.parse_identifier()?)
8508 } else {
8509 None
8510 };
8511 AlterTableOperation::MaterializeProjection {
8512 if_exists,
8513 name,
8514 partition,
8515 }
8516 } else if self.parse_keyword(Keyword::DROP) {
8517 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
8518 self.expect_token(&Token::LParen)?;
8519 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8520 self.expect_token(&Token::RParen)?;
8521 AlterTableOperation::DropPartitions {
8522 partitions,
8523 if_exists: true,
8524 }
8525 } else if self.parse_keyword(Keyword::PARTITION) {
8526 self.expect_token(&Token::LParen)?;
8527 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8528 self.expect_token(&Token::RParen)?;
8529 AlterTableOperation::DropPartitions {
8530 partitions,
8531 if_exists: false,
8532 }
8533 } else if self.parse_keyword(Keyword::CONSTRAINT) {
8534 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8535 let name = self.parse_identifier()?;
8536 let drop_behavior = self.parse_optional_drop_behavior();
8537 AlterTableOperation::DropConstraint {
8538 if_exists,
8539 name,
8540 drop_behavior,
8541 }
8542 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8543 AlterTableOperation::DropPrimaryKey
8544 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
8545 let name = self.parse_identifier()?;
8546 AlterTableOperation::DropForeignKey { name }
8547 } else if self.parse_keyword(Keyword::PROJECTION)
8548 && dialect_of!(self is ClickHouseDialect|GenericDialect)
8549 {
8550 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8551 let name = self.parse_identifier()?;
8552 AlterTableOperation::DropProjection { if_exists, name }
8553 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
8554 AlterTableOperation::DropClusteringKey
8555 } else {
8556 let _ = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8558 let column_name = self.parse_identifier()?;
8559 let drop_behavior = self.parse_optional_drop_behavior();
8560 AlterTableOperation::DropColumn {
8561 column_name,
8562 if_exists,
8563 drop_behavior,
8564 }
8565 }
8566 } else if self.parse_keyword(Keyword::PARTITION) {
8567 self.expect_token(&Token::LParen)?;
8568 let before = self.parse_comma_separated(Parser::parse_expr)?;
8569 self.expect_token(&Token::RParen)?;
8570 self.expect_keyword_is(Keyword::RENAME)?;
8571 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
8572 self.expect_token(&Token::LParen)?;
8573 let renames = self.parse_comma_separated(Parser::parse_expr)?;
8574 self.expect_token(&Token::RParen)?;
8575 AlterTableOperation::RenamePartitions {
8576 old_partitions: before,
8577 new_partitions: renames,
8578 }
8579 } else if self.parse_keyword(Keyword::CHANGE) {
8580 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
8582 let new_name = self.parse_identifier()?;
8583 let data_type = self.parse_data_type()?;
8584 let mut options = vec![];
8585 while let Some(option) = self.parse_optional_column_option()? {
8586 options.push(option);
8587 }
8588
8589 let column_position = self.parse_column_position()?;
8590
8591 AlterTableOperation::ChangeColumn {
8592 old_name,
8593 new_name,
8594 data_type,
8595 options,
8596 column_position,
8597 }
8598 } else if self.parse_keyword(Keyword::MODIFY) {
8599 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
8601 let data_type = self.parse_data_type()?;
8602 let mut options = vec![];
8603 while let Some(option) = self.parse_optional_column_option()? {
8604 options.push(option);
8605 }
8606
8607 let column_position = self.parse_column_position()?;
8608
8609 AlterTableOperation::ModifyColumn {
8610 col_name,
8611 data_type,
8612 options,
8613 column_position,
8614 }
8615 } else if self.parse_keyword(Keyword::ALTER) {
8616 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
8618 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
8619
8620 let op: AlterColumnOperation = if self.parse_keywords(&[
8621 Keyword::SET,
8622 Keyword::NOT,
8623 Keyword::NULL,
8624 ]) {
8625 AlterColumnOperation::SetNotNull {}
8626 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
8627 AlterColumnOperation::DropNotNull {}
8628 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8629 AlterColumnOperation::SetDefault {
8630 value: self.parse_expr()?,
8631 }
8632 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
8633 AlterColumnOperation::DropDefault {}
8634 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
8635 || (is_postgresql && self.parse_keyword(Keyword::TYPE))
8636 {
8637 let data_type = self.parse_data_type()?;
8638 let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
8639 Some(self.parse_expr()?)
8640 } else {
8641 None
8642 };
8643 AlterColumnOperation::SetDataType { data_type, using }
8644 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
8645 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
8646 Some(GeneratedAs::Always)
8647 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
8648 Some(GeneratedAs::ByDefault)
8649 } else {
8650 None
8651 };
8652
8653 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
8654
8655 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
8656
8657 if self.peek_token().token == Token::LParen {
8658 self.expect_token(&Token::LParen)?;
8659 sequence_options = Some(self.parse_create_sequence_options()?);
8660 self.expect_token(&Token::RParen)?;
8661 }
8662
8663 AlterColumnOperation::AddGenerated {
8664 generated_as,
8665 sequence_options,
8666 }
8667 } else {
8668 let message = if is_postgresql {
8669 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
8670 } else {
8671 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
8672 };
8673
8674 return self.expected(message, self.peek_token());
8675 };
8676 AlterTableOperation::AlterColumn { column_name, op }
8677 } else if self.parse_keyword(Keyword::SWAP) {
8678 self.expect_keyword_is(Keyword::WITH)?;
8679 let table_name = self.parse_object_name(false)?;
8680 AlterTableOperation::SwapWith { table_name }
8681 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
8682 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
8683 {
8684 let new_owner = self.parse_owner()?;
8685 AlterTableOperation::OwnerTo { new_owner }
8686 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8687 && self.parse_keyword(Keyword::ATTACH)
8688 {
8689 AlterTableOperation::AttachPartition {
8690 partition: self.parse_part_or_partition()?,
8691 }
8692 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8693 && self.parse_keyword(Keyword::DETACH)
8694 {
8695 AlterTableOperation::DetachPartition {
8696 partition: self.parse_part_or_partition()?,
8697 }
8698 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8699 && self.parse_keyword(Keyword::FREEZE)
8700 {
8701 let partition = self.parse_part_or_partition()?;
8702 let with_name = if self.parse_keyword(Keyword::WITH) {
8703 self.expect_keyword_is(Keyword::NAME)?;
8704 Some(self.parse_identifier()?)
8705 } else {
8706 None
8707 };
8708 AlterTableOperation::FreezePartition {
8709 partition,
8710 with_name,
8711 }
8712 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8713 && self.parse_keyword(Keyword::UNFREEZE)
8714 {
8715 let partition = self.parse_part_or_partition()?;
8716 let with_name = if self.parse_keyword(Keyword::WITH) {
8717 self.expect_keyword_is(Keyword::NAME)?;
8718 Some(self.parse_identifier()?)
8719 } else {
8720 None
8721 };
8722 AlterTableOperation::UnfreezePartition {
8723 partition,
8724 with_name,
8725 }
8726 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8727 self.expect_token(&Token::LParen)?;
8728 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
8729 self.expect_token(&Token::RParen)?;
8730 AlterTableOperation::ClusterBy { exprs }
8731 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
8732 AlterTableOperation::SuspendRecluster
8733 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
8734 AlterTableOperation::ResumeRecluster
8735 } else if self.parse_keyword(Keyword::LOCK) {
8736 let equals = self.consume_token(&Token::Eq);
8737 let lock = match self.parse_one_of_keywords(&[
8738 Keyword::DEFAULT,
8739 Keyword::EXCLUSIVE,
8740 Keyword::NONE,
8741 Keyword::SHARED,
8742 ]) {
8743 Some(Keyword::DEFAULT) => AlterTableLock::Default,
8744 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
8745 Some(Keyword::NONE) => AlterTableLock::None,
8746 Some(Keyword::SHARED) => AlterTableLock::Shared,
8747 _ => self.expected(
8748 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
8749 self.peek_token(),
8750 )?,
8751 };
8752 AlterTableOperation::Lock { equals, lock }
8753 } else if self.parse_keyword(Keyword::ALGORITHM) {
8754 let equals = self.consume_token(&Token::Eq);
8755 let algorithm = match self.parse_one_of_keywords(&[
8756 Keyword::DEFAULT,
8757 Keyword::INSTANT,
8758 Keyword::INPLACE,
8759 Keyword::COPY,
8760 ]) {
8761 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
8762 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
8763 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
8764 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
8765 _ => self.expected(
8766 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
8767 self.peek_token(),
8768 )?,
8769 };
8770 AlterTableOperation::Algorithm { equals, algorithm }
8771 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8772 let equals = self.consume_token(&Token::Eq);
8773 let value = self.parse_number_value()?;
8774 AlterTableOperation::AutoIncrement { equals, value }
8775 } else {
8776 let options: Vec<SqlOption> =
8777 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
8778 if !options.is_empty() {
8779 AlterTableOperation::SetTblProperties {
8780 table_properties: options,
8781 }
8782 } else {
8783 return self.expected(
8784 "ADD, RENAME, PARTITION, SWAP, DROP, or SET TBLPROPERTIES after ALTER TABLE",
8785 self.peek_token(),
8786 );
8787 }
8788 };
8789 Ok(operation)
8790 }
8791
8792 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
8793 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
8794 match keyword {
8795 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
8796 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
8797 _ => unreachable!(),
8799 }
8800 }
8801
8802 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
8803 let object_type = self.expect_one_of_keywords(&[
8804 Keyword::VIEW,
8805 Keyword::TYPE,
8806 Keyword::TABLE,
8807 Keyword::INDEX,
8808 Keyword::ROLE,
8809 Keyword::POLICY,
8810 Keyword::CONNECTOR,
8811 ])?;
8812 match object_type {
8813 Keyword::VIEW => self.parse_alter_view(),
8814 Keyword::TYPE => self.parse_alter_type(),
8815 Keyword::TABLE => {
8816 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8817 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
8819 let on_cluster = self.parse_optional_on_cluster()?;
8820 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
8821
8822 let mut location = None;
8823 if self.parse_keyword(Keyword::LOCATION) {
8824 location = Some(HiveSetLocation {
8825 has_set: false,
8826 location: self.parse_identifier()?,
8827 });
8828 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
8829 location = Some(HiveSetLocation {
8830 has_set: true,
8831 location: self.parse_identifier()?,
8832 });
8833 }
8834
8835 Ok(Statement::AlterTable {
8836 name: table_name,
8837 if_exists,
8838 only,
8839 operations,
8840 location,
8841 on_cluster,
8842 })
8843 }
8844 Keyword::INDEX => {
8845 let index_name = self.parse_object_name(false)?;
8846 let operation = if self.parse_keyword(Keyword::RENAME) {
8847 if self.parse_keyword(Keyword::TO) {
8848 let index_name = self.parse_object_name(false)?;
8849 AlterIndexOperation::RenameIndex { index_name }
8850 } else {
8851 return self.expected("TO after RENAME", self.peek_token());
8852 }
8853 } else {
8854 return self.expected("RENAME after ALTER INDEX", self.peek_token());
8855 };
8856
8857 Ok(Statement::AlterIndex {
8858 name: index_name,
8859 operation,
8860 })
8861 }
8862 Keyword::ROLE => self.parse_alter_role(),
8863 Keyword::POLICY => self.parse_alter_policy(),
8864 Keyword::CONNECTOR => self.parse_alter_connector(),
8865 _ => unreachable!(),
8867 }
8868 }
8869
8870 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
8871 let name = self.parse_object_name(false)?;
8872 let columns = self.parse_parenthesized_column_list(Optional, false)?;
8873
8874 let with_options = self.parse_options(Keyword::WITH)?;
8875
8876 self.expect_keyword_is(Keyword::AS)?;
8877 let query = self.parse_query()?;
8878
8879 Ok(Statement::AlterView {
8880 name,
8881 columns,
8882 query,
8883 with_options,
8884 })
8885 }
8886
8887 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
8889 let name = self.parse_object_name(false)?;
8890
8891 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
8892 let new_name = self.parse_identifier()?;
8893 Ok(Statement::AlterType(AlterType {
8894 name,
8895 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
8896 }))
8897 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
8898 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8899 let new_enum_value = self.parse_identifier()?;
8900 let position = if self.parse_keyword(Keyword::BEFORE) {
8901 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
8902 } else if self.parse_keyword(Keyword::AFTER) {
8903 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
8904 } else {
8905 None
8906 };
8907
8908 Ok(Statement::AlterType(AlterType {
8909 name,
8910 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
8911 if_not_exists,
8912 value: new_enum_value,
8913 position,
8914 }),
8915 }))
8916 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
8917 let existing_enum_value = self.parse_identifier()?;
8918 self.expect_keyword(Keyword::TO)?;
8919 let new_enum_value = self.parse_identifier()?;
8920
8921 Ok(Statement::AlterType(AlterType {
8922 name,
8923 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
8924 from: existing_enum_value,
8925 to: new_enum_value,
8926 }),
8927 }))
8928 } else {
8929 return self.expected_ref(
8930 "{RENAME TO | { RENAME | ADD } VALUE}",
8931 self.peek_token_ref(),
8932 );
8933 }
8934 }
8935
8936 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
8939 let object_name = self.parse_object_name(false)?;
8940 if self.peek_token().token == Token::LParen {
8941 match self.parse_function(object_name)? {
8942 Expr::Function(f) => Ok(Statement::Call(f)),
8943 other => parser_err!(
8944 format!("Expected a simple procedure call but found: {other}"),
8945 self.peek_token().span.start
8946 ),
8947 }
8948 } else {
8949 Ok(Statement::Call(Function {
8950 name: object_name,
8951 uses_odbc_syntax: false,
8952 parameters: FunctionArguments::None,
8953 args: FunctionArguments::None,
8954 over: None,
8955 filter: None,
8956 null_treatment: None,
8957 within_group: vec![],
8958 }))
8959 }
8960 }
8961
8962 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
8964 let source;
8965 if self.consume_token(&Token::LParen) {
8966 source = CopySource::Query(self.parse_query()?);
8967 self.expect_token(&Token::RParen)?;
8968 } else {
8969 let table_name = self.parse_object_name(false)?;
8970 let columns = self.parse_parenthesized_column_list(Optional, false)?;
8971 source = CopySource::Table {
8972 table_name,
8973 columns,
8974 };
8975 }
8976 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
8977 Some(Keyword::FROM) => false,
8978 Some(Keyword::TO) => true,
8979 _ => self.expected("FROM or TO", self.peek_token())?,
8980 };
8981 if !to {
8982 if let CopySource::Query(_) = source {
8985 return Err(ParserError::ParserError(
8986 "COPY ... FROM does not support query as a source".to_string(),
8987 ));
8988 }
8989 }
8990 let target = if self.parse_keyword(Keyword::STDIN) {
8991 CopyTarget::Stdin
8992 } else if self.parse_keyword(Keyword::STDOUT) {
8993 CopyTarget::Stdout
8994 } else if self.parse_keyword(Keyword::PROGRAM) {
8995 CopyTarget::Program {
8996 command: self.parse_literal_string()?,
8997 }
8998 } else {
8999 CopyTarget::File {
9000 filename: self.parse_literal_string()?,
9001 }
9002 };
9003 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
9005 if self.consume_token(&Token::LParen) {
9006 options = self.parse_comma_separated(Parser::parse_copy_option)?;
9007 self.expect_token(&Token::RParen)?;
9008 }
9009 let mut legacy_options = vec![];
9010 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
9011 legacy_options.push(opt);
9012 }
9013 let values = if let CopyTarget::Stdin = target {
9014 self.expect_token(&Token::SemiColon)?;
9015 self.parse_tsv()
9016 } else {
9017 vec![]
9018 };
9019 Ok(Statement::Copy {
9020 source,
9021 to,
9022 target,
9023 options,
9024 legacy_options,
9025 values,
9026 })
9027 }
9028
9029 fn parse_open(&mut self) -> Result<Statement, ParserError> {
9031 self.expect_keyword(Keyword::OPEN)?;
9032 Ok(Statement::Open(OpenStatement {
9033 cursor_name: self.parse_identifier()?,
9034 }))
9035 }
9036
9037 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
9038 let cursor = if self.parse_keyword(Keyword::ALL) {
9039 CloseCursor::All
9040 } else {
9041 let name = self.parse_identifier()?;
9042
9043 CloseCursor::Specific { name }
9044 };
9045
9046 Ok(Statement::Close { cursor })
9047 }
9048
9049 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
9050 let ret = match self.parse_one_of_keywords(&[
9051 Keyword::FORMAT,
9052 Keyword::FREEZE,
9053 Keyword::DELIMITER,
9054 Keyword::NULL,
9055 Keyword::HEADER,
9056 Keyword::QUOTE,
9057 Keyword::ESCAPE,
9058 Keyword::FORCE_QUOTE,
9059 Keyword::FORCE_NOT_NULL,
9060 Keyword::FORCE_NULL,
9061 Keyword::ENCODING,
9062 ]) {
9063 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
9064 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
9065 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9066 Some(Keyword::FALSE)
9067 )),
9068 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
9069 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
9070 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
9071 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9072 Some(Keyword::FALSE)
9073 )),
9074 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
9075 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
9076 Some(Keyword::FORCE_QUOTE) => {
9077 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
9078 }
9079 Some(Keyword::FORCE_NOT_NULL) => {
9080 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9081 }
9082 Some(Keyword::FORCE_NULL) => {
9083 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9084 }
9085 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
9086 _ => self.expected("option", self.peek_token())?,
9087 };
9088 Ok(ret)
9089 }
9090
9091 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
9092 let ret = match self.parse_one_of_keywords(&[
9093 Keyword::BINARY,
9094 Keyword::DELIMITER,
9095 Keyword::NULL,
9096 Keyword::CSV,
9097 ]) {
9098 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
9099 Some(Keyword::DELIMITER) => {
9100 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Delimiter(self.parse_literal_char()?)
9102 }
9103 Some(Keyword::NULL) => {
9104 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Null(self.parse_literal_string()?)
9106 }
9107 Some(Keyword::CSV) => CopyLegacyOption::Csv({
9108 let mut opts = vec![];
9109 while let Some(opt) =
9110 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
9111 {
9112 opts.push(opt);
9113 }
9114 opts
9115 }),
9116 _ => self.expected("option", self.peek_token())?,
9117 };
9118 Ok(ret)
9119 }
9120
9121 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
9122 let ret = match self.parse_one_of_keywords(&[
9123 Keyword::HEADER,
9124 Keyword::QUOTE,
9125 Keyword::ESCAPE,
9126 Keyword::FORCE,
9127 ]) {
9128 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
9129 Some(Keyword::QUOTE) => {
9130 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
9132 }
9133 Some(Keyword::ESCAPE) => {
9134 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
9136 }
9137 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
9138 CopyLegacyCsvOption::ForceNotNull(
9139 self.parse_comma_separated(|p| p.parse_identifier())?,
9140 )
9141 }
9142 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
9143 CopyLegacyCsvOption::ForceQuote(
9144 self.parse_comma_separated(|p| p.parse_identifier())?,
9145 )
9146 }
9147 _ => self.expected("csv option", self.peek_token())?,
9148 };
9149 Ok(ret)
9150 }
9151
9152 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
9153 let s = self.parse_literal_string()?;
9154 if s.len() != 1 {
9155 let loc = self
9156 .tokens
9157 .get(self.index - 1)
9158 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
9159 return parser_err!(format!("Expect a char, found {s:?}"), loc);
9160 }
9161 Ok(s.chars().next().unwrap())
9162 }
9163
9164 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
9167 self.parse_tab_value()
9168 }
9169
9170 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
9171 let mut values = vec![];
9172 let mut content = String::from("");
9173 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
9174 match t {
9175 Token::Whitespace(Whitespace::Tab) => {
9176 values.push(Some(content.to_string()));
9177 content.clear();
9178 }
9179 Token::Whitespace(Whitespace::Newline) => {
9180 values.push(Some(content.to_string()));
9181 content.clear();
9182 }
9183 Token::Backslash => {
9184 if self.consume_token(&Token::Period) {
9185 return values;
9186 }
9187 if let Token::Word(w) = self.next_token().token {
9188 if w.value == "N" {
9189 values.push(None);
9190 }
9191 }
9192 }
9193 _ => {
9194 content.push_str(&t.to_string());
9195 }
9196 }
9197 }
9198 values
9199 }
9200
9201 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9203 let next_token = self.next_token();
9204 let span = next_token.span;
9205 let ok_value = |value: Value| Ok(value.with_span(span));
9206 match next_token.token {
9207 Token::Word(w) => match w.keyword {
9208 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
9209 ok_value(Value::Boolean(true))
9210 }
9211 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
9212 ok_value(Value::Boolean(false))
9213 }
9214 Keyword::NULL => ok_value(Value::Null),
9215 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
9216 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
9217 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
9218 _ => self.expected(
9219 "A value?",
9220 TokenWithSpan {
9221 token: Token::Word(w),
9222 span,
9223 },
9224 )?,
9225 },
9226 _ => self.expected(
9227 "a concrete value",
9228 TokenWithSpan {
9229 token: Token::Word(w),
9230 span,
9231 },
9232 ),
9233 },
9234 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
9238 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(s.to_string())),
9239 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(s.to_string())),
9240 Token::TripleSingleQuotedString(ref s) => {
9241 ok_value(Value::TripleSingleQuotedString(s.to_string()))
9242 }
9243 Token::TripleDoubleQuotedString(ref s) => {
9244 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
9245 }
9246 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
9247 Token::SingleQuotedByteStringLiteral(ref s) => {
9248 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
9249 }
9250 Token::DoubleQuotedByteStringLiteral(ref s) => {
9251 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
9252 }
9253 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
9254 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
9255 }
9256 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
9257 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
9258 }
9259 Token::SingleQuotedRawStringLiteral(ref s) => {
9260 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
9261 }
9262 Token::DoubleQuotedRawStringLiteral(ref s) => {
9263 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
9264 }
9265 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
9266 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
9267 }
9268 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
9269 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
9270 }
9271 Token::NationalStringLiteral(ref s) => {
9272 ok_value(Value::NationalStringLiteral(s.to_string()))
9273 }
9274 Token::EscapedStringLiteral(ref s) => {
9275 ok_value(Value::EscapedStringLiteral(s.to_string()))
9276 }
9277 Token::UnicodeStringLiteral(ref s) => {
9278 ok_value(Value::UnicodeStringLiteral(s.to_string()))
9279 }
9280 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
9281 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
9282 tok @ Token::Colon | tok @ Token::AtSign => {
9283 let next_token = self.next_token();
9286 let ident = match next_token.token {
9287 Token::Word(w) => Ok(w.into_ident(next_token.span)),
9288 Token::Number(w, false) => Ok(Ident::new(w)),
9289 _ => self.expected("placeholder", next_token),
9290 }?;
9291 let placeholder = tok.to_string() + &ident.value;
9292 ok_value(Value::Placeholder(placeholder))
9293 }
9294 unexpected => self.expected(
9295 "a value",
9296 TokenWithSpan {
9297 token: unexpected,
9298 span,
9299 },
9300 ),
9301 }
9302 }
9303
9304 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9306 let value_wrapper = self.parse_value()?;
9307 match &value_wrapper.value {
9308 Value::Number(_, _) => Ok(value_wrapper),
9309 Value::Placeholder(_) => Ok(value_wrapper),
9310 _ => {
9311 self.prev_token();
9312 self.expected("literal number", self.peek_token())
9313 }
9314 }
9315 }
9316
9317 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
9320 let next_token = self.next_token();
9321 match next_token.token {
9322 Token::Plus => Ok(Expr::UnaryOp {
9323 op: UnaryOperator::Plus,
9324 expr: Box::new(Expr::Value(self.parse_number_value()?)),
9325 }),
9326 Token::Minus => Ok(Expr::UnaryOp {
9327 op: UnaryOperator::Minus,
9328 expr: Box::new(Expr::Value(self.parse_number_value()?)),
9329 }),
9330 _ => {
9331 self.prev_token();
9332 Ok(Expr::Value(self.parse_number_value()?))
9333 }
9334 }
9335 }
9336
9337 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
9338 let next_token = self.next_token();
9339 let span = next_token.span;
9340 match next_token.token {
9341 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
9342 Value::SingleQuotedString(s.to_string()).with_span(span),
9343 )),
9344 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
9345 Value::DoubleQuotedString(s.to_string()).with_span(span),
9346 )),
9347 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
9348 Value::HexStringLiteral(s.to_string()).with_span(span),
9349 )),
9350 unexpected => self.expected(
9351 "a string value",
9352 TokenWithSpan {
9353 token: unexpected,
9354 span,
9355 },
9356 ),
9357 }
9358 }
9359
9360 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
9362 let next_token = self.next_token();
9363 match next_token.token {
9364 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
9365 _ => self.expected("literal int", next_token),
9366 }
9367 }
9368
9369 fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
9372 let peek_token = self.peek_token();
9373 let span = peek_token.span;
9374 match peek_token.token {
9375 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
9376 {
9377 self.next_token();
9378 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
9379 }
9380 _ => Ok(Expr::Value(
9381 Value::SingleQuotedString(self.parse_literal_string()?).with_span(span),
9382 )),
9383 }
9384 }
9385
9386 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
9388 let next_token = self.next_token();
9389 match next_token.token {
9390 Token::Word(Word {
9391 value,
9392 keyword: Keyword::NoKeyword,
9393 ..
9394 }) => Ok(value),
9395 Token::SingleQuotedString(s) => Ok(s),
9396 Token::DoubleQuotedString(s) => Ok(s),
9397 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
9398 Ok(s)
9399 }
9400 Token::UnicodeStringLiteral(s) => Ok(s),
9401 _ => self.expected("literal string", next_token),
9402 }
9403 }
9404
9405 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
9407 let neg = self.parse_keyword(Keyword::NOT);
9408 let normalized_form = self.maybe_parse(|parser| {
9409 match parser.parse_one_of_keywords(&[
9410 Keyword::NFC,
9411 Keyword::NFD,
9412 Keyword::NFKC,
9413 Keyword::NFKD,
9414 ]) {
9415 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
9416 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
9417 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
9418 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
9419 _ => parser.expected("unicode normalization form", parser.peek_token()),
9420 }
9421 })?;
9422 if self.parse_keyword(Keyword::NORMALIZED) {
9423 return Ok(Expr::IsNormalized {
9424 expr: Box::new(expr),
9425 form: normalized_form,
9426 negated: neg,
9427 });
9428 }
9429 self.expected("unicode normalization form", self.peek_token())
9430 }
9431
9432 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
9433 self.expect_token(&Token::LParen)?;
9434 let values = self.parse_comma_separated(|parser| {
9435 let name = parser.parse_literal_string()?;
9436 let e = if parser.consume_token(&Token::Eq) {
9437 let value = parser.parse_number()?;
9438 EnumMember::NamedValue(name, value)
9439 } else {
9440 EnumMember::Name(name)
9441 };
9442 Ok(e)
9443 })?;
9444 self.expect_token(&Token::RParen)?;
9445
9446 Ok(values)
9447 }
9448
9449 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
9451 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
9452 if trailing_bracket.0 {
9453 return parser_err!(
9454 format!("unmatched > after parsing data type {ty}"),
9455 self.peek_token()
9456 );
9457 }
9458
9459 Ok(ty)
9460 }
9461
9462 fn parse_data_type_helper(
9463 &mut self,
9464 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
9465 let dialect = self.dialect;
9466 self.advance_token();
9467 let next_token = self.get_current_token();
9468 let next_token_index = self.get_current_index();
9469
9470 let mut trailing_bracket: MatchedTrailingBracket = false.into();
9471 let mut data = match &next_token.token {
9472 Token::Word(w) => match w.keyword {
9473 Keyword::BOOLEAN => Ok(DataType::Boolean),
9474 Keyword::BOOL => Ok(DataType::Bool),
9475 Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
9476 Keyword::REAL => Ok(DataType::Real),
9477 Keyword::FLOAT4 => Ok(DataType::Float4),
9478 Keyword::FLOAT32 => Ok(DataType::Float32),
9479 Keyword::FLOAT64 => Ok(DataType::Float64),
9480 Keyword::FLOAT8 => Ok(DataType::Float8),
9481 Keyword::DOUBLE => {
9482 if self.parse_keyword(Keyword::PRECISION) {
9483 Ok(DataType::DoublePrecision)
9484 } else {
9485 Ok(DataType::Double(
9486 self.parse_exact_number_optional_precision_scale()?,
9487 ))
9488 }
9489 }
9490 Keyword::TINYINT => {
9491 let optional_precision = self.parse_optional_precision();
9492 if self.parse_keyword(Keyword::UNSIGNED) {
9493 Ok(DataType::TinyIntUnsigned(optional_precision?))
9494 } else {
9495 Ok(DataType::TinyInt(optional_precision?))
9496 }
9497 }
9498 Keyword::INT2 => {
9499 let optional_precision = self.parse_optional_precision();
9500 if self.parse_keyword(Keyword::UNSIGNED) {
9501 Ok(DataType::Int2Unsigned(optional_precision?))
9502 } else {
9503 Ok(DataType::Int2(optional_precision?))
9504 }
9505 }
9506 Keyword::SMALLINT => {
9507 let optional_precision = self.parse_optional_precision();
9508 if self.parse_keyword(Keyword::UNSIGNED) {
9509 Ok(DataType::SmallIntUnsigned(optional_precision?))
9510 } else {
9511 Ok(DataType::SmallInt(optional_precision?))
9512 }
9513 }
9514 Keyword::MEDIUMINT => {
9515 let optional_precision = self.parse_optional_precision();
9516 if self.parse_keyword(Keyword::UNSIGNED) {
9517 Ok(DataType::MediumIntUnsigned(optional_precision?))
9518 } else {
9519 Ok(DataType::MediumInt(optional_precision?))
9520 }
9521 }
9522 Keyword::INT => {
9523 let optional_precision = self.parse_optional_precision();
9524 if self.parse_keyword(Keyword::UNSIGNED) {
9525 Ok(DataType::IntUnsigned(optional_precision?))
9526 } else {
9527 Ok(DataType::Int(optional_precision?))
9528 }
9529 }
9530 Keyword::INT4 => {
9531 let optional_precision = self.parse_optional_precision();
9532 if self.parse_keyword(Keyword::UNSIGNED) {
9533 Ok(DataType::Int4Unsigned(optional_precision?))
9534 } else {
9535 Ok(DataType::Int4(optional_precision?))
9536 }
9537 }
9538 Keyword::INT8 => {
9539 let optional_precision = self.parse_optional_precision();
9540 if self.parse_keyword(Keyword::UNSIGNED) {
9541 Ok(DataType::Int8Unsigned(optional_precision?))
9542 } else {
9543 Ok(DataType::Int8(optional_precision?))
9544 }
9545 }
9546 Keyword::INT16 => Ok(DataType::Int16),
9547 Keyword::INT32 => Ok(DataType::Int32),
9548 Keyword::INT64 => Ok(DataType::Int64),
9549 Keyword::INT128 => Ok(DataType::Int128),
9550 Keyword::INT256 => Ok(DataType::Int256),
9551 Keyword::INTEGER => {
9552 let optional_precision = self.parse_optional_precision();
9553 if self.parse_keyword(Keyword::UNSIGNED) {
9554 Ok(DataType::IntegerUnsigned(optional_precision?))
9555 } else {
9556 Ok(DataType::Integer(optional_precision?))
9557 }
9558 }
9559 Keyword::BIGINT => {
9560 let optional_precision = self.parse_optional_precision();
9561 if self.parse_keyword(Keyword::UNSIGNED) {
9562 Ok(DataType::BigIntUnsigned(optional_precision?))
9563 } else {
9564 Ok(DataType::BigInt(optional_precision?))
9565 }
9566 }
9567 Keyword::HUGEINT => Ok(DataType::HugeInt),
9568 Keyword::UBIGINT => Ok(DataType::UBigInt),
9569 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
9570 Keyword::USMALLINT => Ok(DataType::USmallInt),
9571 Keyword::UTINYINT => Ok(DataType::UTinyInt),
9572 Keyword::UINT8 => Ok(DataType::UInt8),
9573 Keyword::UINT16 => Ok(DataType::UInt16),
9574 Keyword::UINT32 => Ok(DataType::UInt32),
9575 Keyword::UINT64 => Ok(DataType::UInt64),
9576 Keyword::UINT128 => Ok(DataType::UInt128),
9577 Keyword::UINT256 => Ok(DataType::UInt256),
9578 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
9579 Keyword::NVARCHAR => {
9580 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
9581 }
9582 Keyword::CHARACTER => {
9583 if self.parse_keyword(Keyword::VARYING) {
9584 Ok(DataType::CharacterVarying(
9585 self.parse_optional_character_length()?,
9586 ))
9587 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9588 Ok(DataType::CharacterLargeObject(
9589 self.parse_optional_precision()?,
9590 ))
9591 } else {
9592 Ok(DataType::Character(self.parse_optional_character_length()?))
9593 }
9594 }
9595 Keyword::CHAR => {
9596 if self.parse_keyword(Keyword::VARYING) {
9597 Ok(DataType::CharVarying(
9598 self.parse_optional_character_length()?,
9599 ))
9600 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9601 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
9602 } else {
9603 Ok(DataType::Char(self.parse_optional_character_length()?))
9604 }
9605 }
9606 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
9607 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
9608 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
9609 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
9610 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
9611 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
9612 Keyword::LONGBLOB => Ok(DataType::LongBlob),
9613 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
9614 Keyword::BIT => {
9615 if self.parse_keyword(Keyword::VARYING) {
9616 Ok(DataType::BitVarying(self.parse_optional_precision()?))
9617 } else {
9618 Ok(DataType::Bit(self.parse_optional_precision()?))
9619 }
9620 }
9621 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
9622 Keyword::UUID => Ok(DataType::Uuid),
9623 Keyword::DATE => Ok(DataType::Date),
9624 Keyword::DATE32 => Ok(DataType::Date32),
9625 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
9626 Keyword::DATETIME64 => {
9627 self.prev_token();
9628 let (precision, time_zone) = self.parse_datetime_64()?;
9629 Ok(DataType::Datetime64(precision, time_zone))
9630 }
9631 Keyword::TIMESTAMP => {
9632 let precision = self.parse_optional_precision()?;
9633 let tz = if self.parse_keyword(Keyword::WITH) {
9634 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9635 TimezoneInfo::WithTimeZone
9636 } else if self.parse_keyword(Keyword::WITHOUT) {
9637 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9638 TimezoneInfo::WithoutTimeZone
9639 } else {
9640 TimezoneInfo::None
9641 };
9642 Ok(DataType::Timestamp(precision, tz))
9643 }
9644 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
9645 self.parse_optional_precision()?,
9646 TimezoneInfo::Tz,
9647 )),
9648 Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz),
9649 Keyword::TIME => {
9650 let precision = self.parse_optional_precision()?;
9651 let tz = if self.parse_keyword(Keyword::WITH) {
9652 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9653 TimezoneInfo::WithTimeZone
9654 } else if self.parse_keyword(Keyword::WITHOUT) {
9655 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9656 TimezoneInfo::WithoutTimeZone
9657 } else {
9658 TimezoneInfo::None
9659 };
9660 Ok(DataType::Time(precision, tz))
9661 }
9662 Keyword::TIMETZ => Ok(DataType::Time(
9663 self.parse_optional_precision()?,
9664 TimezoneInfo::Tz,
9665 )),
9666 Keyword::INTERVAL => Ok(DataType::Interval),
9670 Keyword::JSON => Ok(DataType::JSON),
9671 Keyword::JSONB => Ok(DataType::JSONB),
9672 Keyword::REGCLASS => Ok(DataType::Regclass),
9673 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
9674 Keyword::FIXEDSTRING => {
9675 self.expect_token(&Token::LParen)?;
9676 let character_length = self.parse_literal_uint()?;
9677 self.expect_token(&Token::RParen)?;
9678 Ok(DataType::FixedString(character_length))
9679 }
9680 Keyword::TEXT => Ok(DataType::Text),
9681 Keyword::TINYTEXT => Ok(DataType::TinyText),
9682 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
9683 Keyword::LONGTEXT => Ok(DataType::LongText),
9684 Keyword::BYTEA => Ok(DataType::Bytea),
9685 Keyword::NUMERIC => Ok(DataType::Numeric(
9686 self.parse_exact_number_optional_precision_scale()?,
9687 )),
9688 Keyword::DECIMAL => Ok(DataType::Decimal(
9689 self.parse_exact_number_optional_precision_scale()?,
9690 )),
9691 Keyword::DEC => Ok(DataType::Dec(
9692 self.parse_exact_number_optional_precision_scale()?,
9693 )),
9694 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
9695 self.parse_exact_number_optional_precision_scale()?,
9696 )),
9697 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
9698 self.parse_exact_number_optional_precision_scale()?,
9699 )),
9700 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
9701 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
9702 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
9703 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
9704 Keyword::ARRAY => {
9705 if dialect_of!(self is SnowflakeDialect) {
9706 Ok(DataType::Array(ArrayElemTypeDef::None))
9707 } else if dialect_of!(self is ClickHouseDialect) {
9708 Ok(self.parse_sub_type(|internal_type| {
9709 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
9710 })?)
9711 } else {
9712 self.expect_token(&Token::Lt)?;
9713 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
9714 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
9715 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
9716 inside_type,
9717 ))))
9718 }
9719 }
9720 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
9721 self.prev_token();
9722 let field_defs = self.parse_duckdb_struct_type_def()?;
9723 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
9724 }
9725 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
9726 self.prev_token();
9727 let (field_defs, _trailing_bracket) =
9728 self.parse_struct_type_def(Self::parse_struct_field_def)?;
9729 trailing_bracket = _trailing_bracket;
9730 Ok(DataType::Struct(
9731 field_defs,
9732 StructBracketKind::AngleBrackets,
9733 ))
9734 }
9735 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
9736 self.prev_token();
9737 let fields = self.parse_union_type_def()?;
9738 Ok(DataType::Union(fields))
9739 }
9740 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9741 Ok(self.parse_sub_type(DataType::Nullable)?)
9742 }
9743 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9744 Ok(self.parse_sub_type(DataType::LowCardinality)?)
9745 }
9746 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9747 self.prev_token();
9748 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
9749 Ok(DataType::Map(
9750 Box::new(key_data_type),
9751 Box::new(value_data_type),
9752 ))
9753 }
9754 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9755 self.expect_token(&Token::LParen)?;
9756 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
9757 self.expect_token(&Token::RParen)?;
9758 Ok(DataType::Nested(field_defs))
9759 }
9760 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9761 self.prev_token();
9762 let field_defs = self.parse_click_house_tuple_def()?;
9763 Ok(DataType::Tuple(field_defs))
9764 }
9765 Keyword::TRIGGER => Ok(DataType::Trigger),
9766 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
9767 let _ = self.parse_keyword(Keyword::TYPE);
9768 Ok(DataType::AnyType)
9769 }
9770 Keyword::TABLE => {
9771 let columns = self.parse_returns_table_columns()?;
9772 Ok(DataType::Table(columns))
9773 }
9774 Keyword::SIGNED => {
9775 if self.parse_keyword(Keyword::INTEGER) {
9776 Ok(DataType::SignedInteger)
9777 } else {
9778 Ok(DataType::Signed)
9779 }
9780 }
9781 Keyword::UNSIGNED => {
9782 if self.parse_keyword(Keyword::INTEGER) {
9783 Ok(DataType::UnsignedInteger)
9784 } else {
9785 Ok(DataType::Unsigned)
9786 }
9787 }
9788 _ => {
9789 self.prev_token();
9790 let type_name = self.parse_object_name(false)?;
9791 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
9792 Ok(DataType::Custom(type_name, modifiers))
9793 } else {
9794 Ok(DataType::Custom(type_name, vec![]))
9795 }
9796 }
9797 },
9798 _ => self.expected_at("a data type name", next_token_index),
9799 }?;
9800
9801 if self.dialect.supports_array_typedef_with_brackets() {
9802 while self.consume_token(&Token::LBracket) {
9803 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
9805 self.expect_token(&Token::RBracket)?;
9806 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
9807 }
9808 }
9809 Ok((data, trailing_bracket))
9810 }
9811
9812 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
9813 let name = self.parse_identifier()?;
9814 let data_type = self.parse_data_type()?;
9815 Ok(ColumnDef {
9816 name,
9817 data_type,
9818 options: Vec::new(), })
9820 }
9821
9822 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
9823 self.expect_token(&Token::LParen)?;
9824 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
9825 self.expect_token(&Token::RParen)?;
9826 Ok(columns)
9827 }
9828
9829 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
9830 self.expect_token(&Token::LParen)?;
9831 let mut values = Vec::new();
9832 loop {
9833 let next_token = self.next_token();
9834 match next_token.token {
9835 Token::SingleQuotedString(value) => values.push(value),
9836 _ => self.expected("a string", next_token)?,
9837 }
9838 let next_token = self.next_token();
9839 match next_token.token {
9840 Token::Comma => (),
9841 Token::RParen => break,
9842 _ => self.expected(", or }", next_token)?,
9843 }
9844 }
9845 Ok(values)
9846 }
9847
9848 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
9850 let ident = self.parse_identifier()?;
9851 self.expect_keyword_is(Keyword::AS)?;
9852 let alias = self.parse_identifier()?;
9853 Ok(IdentWithAlias { ident, alias })
9854 }
9855
9856 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
9858 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9859 parser.dialect.is_select_item_alias(explicit, kw, parser)
9860 }
9861 self.parse_optional_alias_inner(None, validator)
9862 }
9863
9864 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
9868 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9869 parser.dialect.is_table_factor_alias(explicit, kw, parser)
9870 }
9871 match self.parse_optional_alias_inner(None, validator)? {
9872 Some(name) => {
9873 let columns = self.parse_table_alias_column_defs()?;
9874 Ok(Some(TableAlias { name, columns }))
9875 }
9876 None => Ok(None),
9877 }
9878 }
9879
9880 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
9881 let mut hints = vec![];
9882 while let Some(hint_type) =
9883 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
9884 {
9885 let hint_type = match hint_type {
9886 Keyword::USE => TableIndexHintType::Use,
9887 Keyword::IGNORE => TableIndexHintType::Ignore,
9888 Keyword::FORCE => TableIndexHintType::Force,
9889 _ => {
9890 return self.expected(
9891 "expected to match USE/IGNORE/FORCE keyword",
9892 self.peek_token(),
9893 )
9894 }
9895 };
9896 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
9897 Some(Keyword::INDEX) => TableIndexType::Index,
9898 Some(Keyword::KEY) => TableIndexType::Key,
9899 _ => {
9900 return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
9901 }
9902 };
9903 let for_clause = if self.parse_keyword(Keyword::FOR) {
9904 let clause = if self.parse_keyword(Keyword::JOIN) {
9905 TableIndexHintForClause::Join
9906 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
9907 TableIndexHintForClause::OrderBy
9908 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
9909 TableIndexHintForClause::GroupBy
9910 } else {
9911 return self.expected(
9912 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
9913 self.peek_token(),
9914 );
9915 };
9916 Some(clause)
9917 } else {
9918 None
9919 };
9920
9921 self.expect_token(&Token::LParen)?;
9922 let index_names = if self.peek_token().token != Token::RParen {
9923 self.parse_comma_separated(Parser::parse_identifier)?
9924 } else {
9925 vec![]
9926 };
9927 self.expect_token(&Token::RParen)?;
9928 hints.push(TableIndexHints {
9929 hint_type,
9930 index_type,
9931 for_clause,
9932 index_names,
9933 });
9934 }
9935 Ok(hints)
9936 }
9937
9938 pub fn parse_optional_alias(
9942 &mut self,
9943 reserved_kwds: &[Keyword],
9944 ) -> Result<Option<Ident>, ParserError> {
9945 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
9946 false
9947 }
9948 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
9949 }
9950
9951 fn parse_optional_alias_inner<F>(
9958 &mut self,
9959 reserved_kwds: Option<&[Keyword]>,
9960 validator: F,
9961 ) -> Result<Option<Ident>, ParserError>
9962 where
9963 F: Fn(bool, &Keyword, &mut Parser) -> bool,
9964 {
9965 let after_as = self.parse_keyword(Keyword::AS);
9966
9967 let next_token = self.next_token();
9968 match next_token.token {
9969 Token::Word(w)
9972 if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
9973 {
9974 Ok(Some(w.into_ident(next_token.span)))
9975 }
9976 Token::Word(w) if validator(after_as, &w.keyword, self) => {
9980 Ok(Some(w.into_ident(next_token.span)))
9981 }
9982 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
9984 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
9985 _ => {
9986 if after_as {
9987 return self.expected("an identifier after AS", next_token);
9988 }
9989 self.prev_token();
9990 Ok(None) }
9992 }
9993 }
9994
9995 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
9996 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
9997 let expressions = if self.parse_keyword(Keyword::ALL) {
9998 None
9999 } else {
10000 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
10001 };
10002
10003 let mut modifiers = vec![];
10004 if self.dialect.supports_group_by_with_modifier() {
10005 loop {
10006 if !self.parse_keyword(Keyword::WITH) {
10007 break;
10008 }
10009 let keyword = self.expect_one_of_keywords(&[
10010 Keyword::ROLLUP,
10011 Keyword::CUBE,
10012 Keyword::TOTALS,
10013 ])?;
10014 modifiers.push(match keyword {
10015 Keyword::ROLLUP => GroupByWithModifier::Rollup,
10016 Keyword::CUBE => GroupByWithModifier::Cube,
10017 Keyword::TOTALS => GroupByWithModifier::Totals,
10018 _ => {
10019 return parser_err!(
10020 "BUG: expected to match GroupBy modifier keyword",
10021 self.peek_token().span.start
10022 )
10023 }
10024 });
10025 }
10026 }
10027 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
10028 self.expect_token(&Token::LParen)?;
10029 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
10030 self.expect_token(&Token::RParen)?;
10031 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
10032 result,
10033 )));
10034 };
10035 let group_by = match expressions {
10036 None => GroupByExpr::All(modifiers),
10037 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
10038 };
10039 Ok(Some(group_by))
10040 } else {
10041 Ok(None)
10042 }
10043 }
10044
10045 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
10046 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10047 let order_by =
10048 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
10049 let order_by_options = self.parse_order_by_options()?;
10050 OrderBy {
10051 kind: OrderByKind::All(order_by_options),
10052 interpolate: None,
10053 }
10054 } else {
10055 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
10056 let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10057 self.parse_interpolations()?
10058 } else {
10059 None
10060 };
10061 OrderBy {
10062 kind: OrderByKind::Expressions(exprs),
10063 interpolate,
10064 }
10065 };
10066 Ok(Some(order_by))
10067 } else {
10068 Ok(None)
10069 }
10070 }
10071
10072 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
10073 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
10074 Some(self.parse_offset()?)
10075 } else {
10076 None
10077 };
10078
10079 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
10080 let expr = self.parse_limit()?;
10081
10082 if self.dialect.supports_limit_comma()
10083 && offset.is_none()
10084 && expr.is_some() && self.consume_token(&Token::Comma)
10086 {
10087 let offset = expr.ok_or_else(|| {
10088 ParserError::ParserError(
10089 "Missing offset for LIMIT <offset>, <limit>".to_string(),
10090 )
10091 })?;
10092 return Ok(Some(LimitClause::OffsetCommaLimit {
10093 offset,
10094 limit: self.parse_expr()?,
10095 }));
10096 }
10097
10098 let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10099 && self.parse_keyword(Keyword::BY)
10100 {
10101 Some(self.parse_comma_separated(Parser::parse_expr)?)
10102 } else {
10103 None
10104 };
10105
10106 (Some(expr), limit_by)
10107 } else {
10108 (None, None)
10109 };
10110
10111 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
10112 offset = Some(self.parse_offset()?);
10113 }
10114
10115 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
10116 Ok(Some(LimitClause::LimitOffset {
10117 limit: limit.unwrap_or_default(),
10118 offset,
10119 limit_by: limit_by.unwrap_or_default(),
10120 }))
10121 } else {
10122 Ok(None)
10123 }
10124 }
10125
10126 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
10129 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
10130 let fn_name = self.parse_object_name(false)?;
10131 self.parse_function_call(fn_name)
10132 .map(TableObject::TableFunction)
10133 } else {
10134 self.parse_object_name(false).map(TableObject::TableName)
10135 }
10136 }
10137
10138 fn parse_object_name_with_wildcards(
10141 &mut self,
10142 in_table_clause: bool,
10143 allow_wildcards: bool,
10144 ) -> Result<ObjectName, ParserError> {
10145 let mut idents = vec![];
10146
10147 if dialect_of!(self is BigQueryDialect) && in_table_clause {
10148 loop {
10149 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
10150 idents.push(ident);
10151 if !self.consume_token(&Token::Period) && !end_with_period {
10152 break;
10153 }
10154 }
10155 } else {
10156 loop {
10157 let ident = if allow_wildcards && self.peek_token().token == Token::Mul {
10158 let span = self.next_token().span;
10159 Ident {
10160 value: Token::Mul.to_string(),
10161 quote_style: None,
10162 span,
10163 }
10164 } else {
10165 if self.dialect.supports_object_name_double_dot_notation()
10166 && idents.len() == 1
10167 && self.consume_token(&Token::Period)
10168 {
10169 idents.push(Ident::new(""));
10171 }
10172 self.parse_identifier()?
10173 };
10174 idents.push(ident);
10175 if !self.consume_token(&Token::Period) {
10176 break;
10177 }
10178 }
10179 }
10180 Ok(ObjectName::from(idents))
10181 }
10182
10183 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
10190 let ObjectName(mut idents) =
10191 self.parse_object_name_with_wildcards(in_table_clause, false)?;
10192
10193 if dialect_of!(self is BigQueryDialect)
10196 && idents.iter().any(|part| {
10197 part.as_ident()
10198 .is_some_and(|ident| ident.value.contains('.'))
10199 })
10200 {
10201 idents = idents
10202 .into_iter()
10203 .flat_map(|part| match part.as_ident() {
10204 Some(ident) => ident
10205 .value
10206 .split('.')
10207 .map(|value| {
10208 ObjectNamePart::Identifier(Ident {
10209 value: value.into(),
10210 quote_style: ident.quote_style,
10211 span: ident.span,
10212 })
10213 })
10214 .collect::<Vec<_>>(),
10215 None => vec![part],
10216 })
10217 .collect()
10218 }
10219
10220 Ok(ObjectName(idents))
10221 }
10222
10223 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
10225 let mut idents = vec![];
10226 loop {
10227 match &self.peek_token_ref().token {
10228 Token::Word(w) => {
10229 idents.push(w.clone().into_ident(self.peek_token_ref().span));
10230 }
10231 Token::EOF | Token::Eq => break,
10232 _ => {}
10233 }
10234 self.advance_token();
10235 }
10236 Ok(idents)
10237 }
10238
10239 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
10279 let mut idents = vec![];
10280
10281 let next_token = self.next_token();
10283 match next_token.token {
10284 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10285 Token::EOF => {
10286 return Err(ParserError::ParserError(
10287 "Empty input when parsing identifier".to_string(),
10288 ))?
10289 }
10290 token => {
10291 return Err(ParserError::ParserError(format!(
10292 "Unexpected token in identifier: {token}"
10293 )))?
10294 }
10295 };
10296
10297 loop {
10299 match self.next_token().token {
10300 Token::Period => {
10302 let next_token = self.next_token();
10303 match next_token.token {
10304 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10305 Token::EOF => {
10306 return Err(ParserError::ParserError(
10307 "Trailing period in identifier".to_string(),
10308 ))?
10309 }
10310 token => {
10311 return Err(ParserError::ParserError(format!(
10312 "Unexpected token following period in identifier: {token}"
10313 )))?
10314 }
10315 }
10316 }
10317 Token::EOF => break,
10318 token => {
10319 return Err(ParserError::ParserError(format!(
10320 "Unexpected token in identifier: {token}"
10321 )))?
10322 }
10323 }
10324 }
10325
10326 Ok(idents)
10327 }
10328
10329 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
10331 let next_token = self.next_token();
10332 match next_token.token {
10333 Token::Word(w) => Ok(w.into_ident(next_token.span)),
10334 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
10335 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
10336 _ => self.expected("identifier", next_token),
10337 }
10338 }
10339
10340 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
10351 match self.peek_token().token {
10352 Token::Word(w) => {
10353 let quote_style_is_none = w.quote_style.is_none();
10354 let mut requires_whitespace = false;
10355 let mut ident = w.into_ident(self.next_token().span);
10356 if quote_style_is_none {
10357 while matches!(self.peek_token_no_skip().token, Token::Minus) {
10358 self.next_token();
10359 ident.value.push('-');
10360
10361 let token = self
10362 .next_token_no_skip()
10363 .cloned()
10364 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
10365 requires_whitespace = match token.token {
10366 Token::Word(next_word) if next_word.quote_style.is_none() => {
10367 ident.value.push_str(&next_word.value);
10368 false
10369 }
10370 Token::Number(s, false) => {
10371 if s.ends_with('.') {
10378 let Some(s) = s.split('.').next().filter(|s| {
10379 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
10380 }) else {
10381 return self.expected(
10382 "continuation of hyphenated identifier",
10383 TokenWithSpan::new(Token::Number(s, false), token.span),
10384 );
10385 };
10386 ident.value.push_str(s);
10387 return Ok((ident, true));
10388 } else {
10389 ident.value.push_str(&s);
10390 }
10391 !matches!(self.peek_token().token, Token::Period)
10394 }
10395 _ => {
10396 return self
10397 .expected("continuation of hyphenated identifier", token);
10398 }
10399 }
10400 }
10401
10402 if requires_whitespace {
10405 let token = self.next_token();
10406 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
10407 return self
10408 .expected("whitespace following hyphenated identifier", token);
10409 }
10410 }
10411 }
10412 Ok((ident, false))
10413 }
10414 _ => Ok((self.parse_identifier()?, false)),
10415 }
10416 }
10417
10418 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
10420 if self.consume_token(&Token::LParen) {
10421 if self.peek_token().token == Token::RParen {
10422 self.next_token();
10423 Ok(vec![])
10424 } else {
10425 let cols = self.parse_comma_separated_with_trailing_commas(
10426 Parser::parse_view_column,
10427 self.dialect.supports_column_definition_trailing_commas(),
10428 Self::is_reserved_for_column_alias,
10429 )?;
10430 self.expect_token(&Token::RParen)?;
10431 Ok(cols)
10432 }
10433 } else {
10434 Ok(vec![])
10435 }
10436 }
10437
10438 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
10440 let name = self.parse_identifier()?;
10441 let options = if (dialect_of!(self is BigQueryDialect | GenericDialect)
10442 && self.parse_keyword(Keyword::OPTIONS))
10443 || (dialect_of!(self is SnowflakeDialect | GenericDialect)
10444 && self.parse_keyword(Keyword::COMMENT))
10445 {
10446 self.prev_token();
10447 self.parse_optional_column_option()?
10448 .map(|option| vec![option])
10449 } else {
10450 None
10451 };
10452 let data_type = if dialect_of!(self is ClickHouseDialect) {
10453 Some(self.parse_data_type()?)
10454 } else {
10455 None
10456 };
10457 Ok(ViewColumnDef {
10458 name,
10459 data_type,
10460 options,
10461 })
10462 }
10463
10464 pub fn parse_parenthesized_column_list(
10467 &mut self,
10468 optional: IsOptional,
10469 allow_empty: bool,
10470 ) -> Result<Vec<Ident>, ParserError> {
10471 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
10472 }
10473
10474 pub fn parse_parenthesized_qualified_column_list(
10477 &mut self,
10478 optional: IsOptional,
10479 allow_empty: bool,
10480 ) -> Result<Vec<ObjectName>, ParserError> {
10481 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
10482 p.parse_object_name(true)
10483 })
10484 }
10485
10486 fn parse_parenthesized_column_list_inner<F, T>(
10489 &mut self,
10490 optional: IsOptional,
10491 allow_empty: bool,
10492 mut f: F,
10493 ) -> Result<Vec<T>, ParserError>
10494 where
10495 F: FnMut(&mut Parser) -> Result<T, ParserError>,
10496 {
10497 if self.consume_token(&Token::LParen) {
10498 if allow_empty && self.peek_token().token == Token::RParen {
10499 self.next_token();
10500 Ok(vec![])
10501 } else {
10502 let cols = self.parse_comma_separated(|p| f(p))?;
10503 self.expect_token(&Token::RParen)?;
10504 Ok(cols)
10505 }
10506 } else if optional == Optional {
10507 Ok(vec![])
10508 } else {
10509 self.expected("a list of columns in parentheses", self.peek_token())
10510 }
10511 }
10512
10513 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
10515 if self.consume_token(&Token::LParen) {
10516 let cols = self.parse_comma_separated(|p| {
10517 let name = p.parse_identifier()?;
10518 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
10519 Ok(TableAliasColumnDef { name, data_type })
10520 })?;
10521 self.expect_token(&Token::RParen)?;
10522 Ok(cols)
10523 } else {
10524 Ok(vec![])
10525 }
10526 }
10527
10528 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
10529 self.expect_token(&Token::LParen)?;
10530 let n = self.parse_literal_uint()?;
10531 self.expect_token(&Token::RParen)?;
10532 Ok(n)
10533 }
10534
10535 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
10536 if self.consume_token(&Token::LParen) {
10537 let n = self.parse_literal_uint()?;
10538 self.expect_token(&Token::RParen)?;
10539 Ok(Some(n))
10540 } else {
10541 Ok(None)
10542 }
10543 }
10544
10545 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
10553 self.expect_keyword_is(Keyword::DATETIME64)?;
10554 self.expect_token(&Token::LParen)?;
10555 let precision = self.parse_literal_uint()?;
10556 let time_zone = if self.consume_token(&Token::Comma) {
10557 Some(self.parse_literal_string()?)
10558 } else {
10559 None
10560 };
10561 self.expect_token(&Token::RParen)?;
10562 Ok((precision, time_zone))
10563 }
10564
10565 pub fn parse_optional_character_length(
10566 &mut self,
10567 ) -> Result<Option<CharacterLength>, ParserError> {
10568 if self.consume_token(&Token::LParen) {
10569 let character_length = self.parse_character_length()?;
10570 self.expect_token(&Token::RParen)?;
10571 Ok(Some(character_length))
10572 } else {
10573 Ok(None)
10574 }
10575 }
10576
10577 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
10578 if self.consume_token(&Token::LParen) {
10579 let binary_length = self.parse_binary_length()?;
10580 self.expect_token(&Token::RParen)?;
10581 Ok(Some(binary_length))
10582 } else {
10583 Ok(None)
10584 }
10585 }
10586
10587 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
10588 if self.parse_keyword(Keyword::MAX) {
10589 return Ok(CharacterLength::Max);
10590 }
10591 let length = self.parse_literal_uint()?;
10592 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
10593 Some(CharLengthUnits::Characters)
10594 } else if self.parse_keyword(Keyword::OCTETS) {
10595 Some(CharLengthUnits::Octets)
10596 } else {
10597 None
10598 };
10599 Ok(CharacterLength::IntegerLength { length, unit })
10600 }
10601
10602 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
10603 if self.parse_keyword(Keyword::MAX) {
10604 return Ok(BinaryLength::Max);
10605 }
10606 let length = self.parse_literal_uint()?;
10607 Ok(BinaryLength::IntegerLength { length })
10608 }
10609
10610 pub fn parse_optional_precision_scale(
10611 &mut self,
10612 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
10613 if self.consume_token(&Token::LParen) {
10614 let n = self.parse_literal_uint()?;
10615 let scale = if self.consume_token(&Token::Comma) {
10616 Some(self.parse_literal_uint()?)
10617 } else {
10618 None
10619 };
10620 self.expect_token(&Token::RParen)?;
10621 Ok((Some(n), scale))
10622 } else {
10623 Ok((None, None))
10624 }
10625 }
10626
10627 pub fn parse_exact_number_optional_precision_scale(
10628 &mut self,
10629 ) -> Result<ExactNumberInfo, ParserError> {
10630 if self.consume_token(&Token::LParen) {
10631 let precision = self.parse_literal_uint()?;
10632 let scale = if self.consume_token(&Token::Comma) {
10633 Some(self.parse_literal_uint()?)
10634 } else {
10635 None
10636 };
10637
10638 self.expect_token(&Token::RParen)?;
10639
10640 match scale {
10641 None => Ok(ExactNumberInfo::Precision(precision)),
10642 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
10643 }
10644 } else {
10645 Ok(ExactNumberInfo::None)
10646 }
10647 }
10648
10649 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
10650 if self.consume_token(&Token::LParen) {
10651 let mut modifiers = Vec::new();
10652 loop {
10653 let next_token = self.next_token();
10654 match next_token.token {
10655 Token::Word(w) => modifiers.push(w.to_string()),
10656 Token::Number(n, _) => modifiers.push(n),
10657 Token::SingleQuotedString(s) => modifiers.push(s),
10658
10659 Token::Comma => {
10660 continue;
10661 }
10662 Token::RParen => {
10663 break;
10664 }
10665 _ => self.expected("type modifiers", next_token)?,
10666 }
10667 }
10668
10669 Ok(Some(modifiers))
10670 } else {
10671 Ok(None)
10672 }
10673 }
10674
10675 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
10677 where
10678 F: FnOnce(Box<DataType>) -> DataType,
10679 {
10680 self.expect_token(&Token::LParen)?;
10681 let inside_type = self.parse_data_type()?;
10682 self.expect_token(&Token::RParen)?;
10683 Ok(parent_type(inside_type.into()))
10684 }
10685
10686 fn parse_delete_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
10690 Ok(Box::new(SetExpr::Delete(self.parse_delete()?)))
10691 }
10692
10693 pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
10694 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
10695 if dialect_of!(self is BigQueryDialect | GenericDialect) {
10698 (vec![], false)
10699 } else {
10700 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
10701 self.expect_keyword_is(Keyword::FROM)?;
10702 (tables, true)
10703 }
10704 } else {
10705 (vec![], true)
10706 };
10707
10708 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
10709 let using = if self.parse_keyword(Keyword::USING) {
10710 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
10711 } else {
10712 None
10713 };
10714 let selection = if self.parse_keyword(Keyword::WHERE) {
10715 Some(self.parse_expr()?)
10716 } else {
10717 None
10718 };
10719 let returning = if self.parse_keyword(Keyword::RETURNING) {
10720 Some(self.parse_comma_separated(Parser::parse_select_item)?)
10721 } else {
10722 None
10723 };
10724 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10725 self.parse_comma_separated(Parser::parse_order_by_expr)?
10726 } else {
10727 vec![]
10728 };
10729 let limit = if self.parse_keyword(Keyword::LIMIT) {
10730 self.parse_limit()?
10731 } else {
10732 None
10733 };
10734
10735 Ok(Statement::Delete(Delete {
10736 tables,
10737 from: if with_from_keyword {
10738 FromTable::WithFromKeyword(from)
10739 } else {
10740 FromTable::WithoutKeyword(from)
10741 },
10742 using,
10743 selection,
10744 returning,
10745 order_by,
10746 limit,
10747 }))
10748 }
10749
10750 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
10752 let modifier_keyword =
10753 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
10754
10755 let id = self.parse_literal_uint()?;
10756
10757 let modifier = match modifier_keyword {
10758 Some(Keyword::CONNECTION) => Some(KillType::Connection),
10759 Some(Keyword::QUERY) => Some(KillType::Query),
10760 Some(Keyword::MUTATION) => {
10761 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10762 Some(KillType::Mutation)
10763 } else {
10764 self.expected(
10765 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
10766 self.peek_token(),
10767 )?
10768 }
10769 }
10770 _ => None,
10771 };
10772
10773 Ok(Statement::Kill { modifier, id })
10774 }
10775
10776 pub fn parse_explain(
10777 &mut self,
10778 describe_alias: DescribeAlias,
10779 ) -> Result<Statement, ParserError> {
10780 let mut analyze = false;
10781 let mut verbose = false;
10782 let mut query_plan = false;
10783 let mut estimate = false;
10784 let mut format = None;
10785 let mut options = None;
10786
10787 if describe_alias == DescribeAlias::Explain
10790 && self.dialect.supports_explain_with_utility_options()
10791 && self.peek_token().token == Token::LParen
10792 {
10793 options = Some(self.parse_utility_options()?)
10794 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
10795 query_plan = true;
10796 } else if self.parse_keyword(Keyword::ESTIMATE) {
10797 estimate = true;
10798 } else {
10799 analyze = self.parse_keyword(Keyword::ANALYZE);
10800 verbose = self.parse_keyword(Keyword::VERBOSE);
10801 if self.parse_keyword(Keyword::FORMAT) {
10802 format = Some(self.parse_analyze_format()?);
10803 }
10804 }
10805
10806 match self.maybe_parse(|parser| parser.parse_statement())? {
10807 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
10808 ParserError::ParserError("Explain must be root of the plan".to_string()),
10809 ),
10810 Some(statement) => Ok(Statement::Explain {
10811 describe_alias,
10812 analyze,
10813 verbose,
10814 query_plan,
10815 estimate,
10816 statement: Box::new(statement),
10817 format,
10818 options,
10819 }),
10820 _ => {
10821 let hive_format =
10822 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
10823 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
10824 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
10825 _ => None,
10826 };
10827
10828 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
10829 self.parse_keyword(Keyword::TABLE)
10831 } else {
10832 false
10833 };
10834
10835 let table_name = self.parse_object_name(false)?;
10836 Ok(Statement::ExplainTable {
10837 describe_alias,
10838 hive_format,
10839 has_table_keyword,
10840 table_name,
10841 })
10842 }
10843 }
10844 }
10845
10846 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
10851 let _guard = self.recursion_counter.try_decrease()?;
10852 let with = if self.parse_keyword(Keyword::WITH) {
10853 let with_token = self.get_current_token();
10854 Some(With {
10855 with_token: with_token.clone().into(),
10856 recursive: self.parse_keyword(Keyword::RECURSIVE),
10857 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
10858 })
10859 } else {
10860 None
10861 };
10862 if self.parse_keyword(Keyword::INSERT) {
10863 Ok(Query {
10864 with,
10865 body: self.parse_insert_setexpr_boxed()?,
10866 order_by: None,
10867 limit_clause: None,
10868 fetch: None,
10869 locks: vec![],
10870 for_clause: None,
10871 settings: None,
10872 format_clause: None,
10873 pipe_operators: vec![],
10874 }
10875 .into())
10876 } else if self.parse_keyword(Keyword::UPDATE) {
10877 Ok(Query {
10878 with,
10879 body: self.parse_update_setexpr_boxed()?,
10880 order_by: None,
10881 limit_clause: None,
10882 fetch: None,
10883 locks: vec![],
10884 for_clause: None,
10885 settings: None,
10886 format_clause: None,
10887 pipe_operators: vec![],
10888 }
10889 .into())
10890 } else if self.parse_keyword(Keyword::DELETE) {
10891 Ok(Query {
10892 with,
10893 body: self.parse_delete_setexpr_boxed()?,
10894 limit_clause: None,
10895 order_by: None,
10896 fetch: None,
10897 locks: vec![],
10898 for_clause: None,
10899 settings: None,
10900 format_clause: None,
10901 pipe_operators: vec![],
10902 }
10903 .into())
10904 } else {
10905 let body = self.parse_query_body(self.dialect.prec_unknown())?;
10906
10907 let order_by = self.parse_optional_order_by()?;
10908
10909 let limit_clause = self.parse_optional_limit_clause()?;
10910
10911 let settings = self.parse_settings()?;
10912
10913 let fetch = if self.parse_keyword(Keyword::FETCH) {
10914 Some(self.parse_fetch()?)
10915 } else {
10916 None
10917 };
10918
10919 let mut for_clause = None;
10920 let mut locks = Vec::new();
10921 while self.parse_keyword(Keyword::FOR) {
10922 if let Some(parsed_for_clause) = self.parse_for_clause()? {
10923 for_clause = Some(parsed_for_clause);
10924 break;
10925 } else {
10926 locks.push(self.parse_lock()?);
10927 }
10928 }
10929 let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10930 && self.parse_keyword(Keyword::FORMAT)
10931 {
10932 if self.parse_keyword(Keyword::NULL) {
10933 Some(FormatClause::Null)
10934 } else {
10935 let ident = self.parse_identifier()?;
10936 Some(FormatClause::Identifier(ident))
10937 }
10938 } else {
10939 None
10940 };
10941
10942 let pipe_operators = if self.dialect.supports_pipe_operator() {
10943 self.parse_pipe_operators()?
10944 } else {
10945 Vec::new()
10946 };
10947
10948 Ok(Query {
10949 with,
10950 body,
10951 order_by,
10952 limit_clause,
10953 fetch,
10954 locks,
10955 for_clause,
10956 settings,
10957 format_clause,
10958 pipe_operators,
10959 }
10960 .into())
10961 }
10962 }
10963
10964 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
10965 let mut pipe_operators = Vec::new();
10966
10967 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
10968 let kw = self.expect_one_of_keywords(&[
10969 Keyword::SELECT,
10970 Keyword::EXTEND,
10971 Keyword::SET,
10972 Keyword::DROP,
10973 Keyword::AS,
10974 Keyword::WHERE,
10975 Keyword::LIMIT,
10976 Keyword::AGGREGATE,
10977 Keyword::ORDER,
10978 ])?;
10979 match kw {
10980 Keyword::SELECT => {
10981 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
10982 pipe_operators.push(PipeOperator::Select { exprs })
10983 }
10984 Keyword::EXTEND => {
10985 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
10986 pipe_operators.push(PipeOperator::Extend { exprs })
10987 }
10988 Keyword::SET => {
10989 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
10990 pipe_operators.push(PipeOperator::Set { assignments })
10991 }
10992 Keyword::DROP => {
10993 let columns = self.parse_identifiers()?;
10994 pipe_operators.push(PipeOperator::Drop { columns })
10995 }
10996 Keyword::AS => {
10997 let alias = self.parse_identifier()?;
10998 pipe_operators.push(PipeOperator::As { alias })
10999 }
11000 Keyword::WHERE => {
11001 let expr = self.parse_expr()?;
11002 pipe_operators.push(PipeOperator::Where { expr })
11003 }
11004 Keyword::LIMIT => {
11005 let expr = self.parse_expr()?;
11006 let offset = if self.parse_keyword(Keyword::OFFSET) {
11007 Some(self.parse_expr()?)
11008 } else {
11009 None
11010 };
11011 pipe_operators.push(PipeOperator::Limit { expr, offset })
11012 }
11013 Keyword::AGGREGATE => {
11014 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
11015 vec![]
11016 } else {
11017 self.parse_comma_separated(|parser| {
11018 parser.parse_expr_with_alias_and_order_by()
11019 })?
11020 };
11021
11022 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11023 self.parse_comma_separated(|parser| {
11024 parser.parse_expr_with_alias_and_order_by()
11025 })?
11026 } else {
11027 vec![]
11028 };
11029
11030 pipe_operators.push(PipeOperator::Aggregate {
11031 full_table_exprs,
11032 group_by_expr,
11033 })
11034 }
11035 Keyword::ORDER => {
11036 self.expect_one_of_keywords(&[Keyword::BY])?;
11037 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11038 pipe_operators.push(PipeOperator::OrderBy { exprs })
11039 }
11040 unhandled => {
11041 return Err(ParserError::ParserError(format!(
11042 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
11043 )))
11044 }
11045 }
11046 }
11047 Ok(pipe_operators)
11048 }
11049
11050 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
11051 let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11052 && self.parse_keyword(Keyword::SETTINGS)
11053 {
11054 let key_values = self.parse_comma_separated(|p| {
11055 let key = p.parse_identifier()?;
11056 p.expect_token(&Token::Eq)?;
11057 let value = p.parse_value()?.value;
11058 Ok(Setting { key, value })
11059 })?;
11060 Some(key_values)
11061 } else {
11062 None
11063 };
11064 Ok(settings)
11065 }
11066
11067 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
11069 if self.parse_keyword(Keyword::XML) {
11070 Ok(Some(self.parse_for_xml()?))
11071 } else if self.parse_keyword(Keyword::JSON) {
11072 Ok(Some(self.parse_for_json()?))
11073 } else if self.parse_keyword(Keyword::BROWSE) {
11074 Ok(Some(ForClause::Browse))
11075 } else {
11076 Ok(None)
11077 }
11078 }
11079
11080 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
11082 let for_xml = if self.parse_keyword(Keyword::RAW) {
11083 let mut element_name = None;
11084 if self.peek_token().token == Token::LParen {
11085 self.expect_token(&Token::LParen)?;
11086 element_name = Some(self.parse_literal_string()?);
11087 self.expect_token(&Token::RParen)?;
11088 }
11089 ForXml::Raw(element_name)
11090 } else if self.parse_keyword(Keyword::AUTO) {
11091 ForXml::Auto
11092 } else if self.parse_keyword(Keyword::EXPLICIT) {
11093 ForXml::Explicit
11094 } else if self.parse_keyword(Keyword::PATH) {
11095 let mut element_name = None;
11096 if self.peek_token().token == Token::LParen {
11097 self.expect_token(&Token::LParen)?;
11098 element_name = Some(self.parse_literal_string()?);
11099 self.expect_token(&Token::RParen)?;
11100 }
11101 ForXml::Path(element_name)
11102 } else {
11103 return Err(ParserError::ParserError(
11104 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
11105 ));
11106 };
11107 let mut elements = false;
11108 let mut binary_base64 = false;
11109 let mut root = None;
11110 let mut r#type = false;
11111 while self.peek_token().token == Token::Comma {
11112 self.next_token();
11113 if self.parse_keyword(Keyword::ELEMENTS) {
11114 elements = true;
11115 } else if self.parse_keyword(Keyword::BINARY) {
11116 self.expect_keyword_is(Keyword::BASE64)?;
11117 binary_base64 = true;
11118 } else if self.parse_keyword(Keyword::ROOT) {
11119 self.expect_token(&Token::LParen)?;
11120 root = Some(self.parse_literal_string()?);
11121 self.expect_token(&Token::RParen)?;
11122 } else if self.parse_keyword(Keyword::TYPE) {
11123 r#type = true;
11124 }
11125 }
11126 Ok(ForClause::Xml {
11127 for_xml,
11128 elements,
11129 binary_base64,
11130 root,
11131 r#type,
11132 })
11133 }
11134
11135 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
11137 let for_json = if self.parse_keyword(Keyword::AUTO) {
11138 ForJson::Auto
11139 } else if self.parse_keyword(Keyword::PATH) {
11140 ForJson::Path
11141 } else {
11142 return Err(ParserError::ParserError(
11143 "Expected FOR JSON [AUTO | PATH ]".to_string(),
11144 ));
11145 };
11146 let mut root = None;
11147 let mut include_null_values = false;
11148 let mut without_array_wrapper = false;
11149 while self.peek_token().token == Token::Comma {
11150 self.next_token();
11151 if self.parse_keyword(Keyword::ROOT) {
11152 self.expect_token(&Token::LParen)?;
11153 root = Some(self.parse_literal_string()?);
11154 self.expect_token(&Token::RParen)?;
11155 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
11156 include_null_values = true;
11157 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
11158 without_array_wrapper = true;
11159 }
11160 }
11161 Ok(ForClause::Json {
11162 for_json,
11163 root,
11164 include_null_values,
11165 without_array_wrapper,
11166 })
11167 }
11168
11169 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
11171 let name = self.parse_identifier()?;
11172
11173 let mut cte = if self.parse_keyword(Keyword::AS) {
11174 let mut is_materialized = None;
11175 if dialect_of!(self is PostgreSqlDialect) {
11176 if self.parse_keyword(Keyword::MATERIALIZED) {
11177 is_materialized = Some(CteAsMaterialized::Materialized);
11178 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11179 is_materialized = Some(CteAsMaterialized::NotMaterialized);
11180 }
11181 }
11182 self.expect_token(&Token::LParen)?;
11183
11184 let query = self.parse_query()?;
11185 let closing_paren_token = self.expect_token(&Token::RParen)?;
11186
11187 let alias = TableAlias {
11188 name,
11189 columns: vec![],
11190 };
11191 Cte {
11192 alias,
11193 query,
11194 from: None,
11195 materialized: is_materialized,
11196 closing_paren_token: closing_paren_token.into(),
11197 }
11198 } else {
11199 let columns = self.parse_table_alias_column_defs()?;
11200 self.expect_keyword_is(Keyword::AS)?;
11201 let mut is_materialized = None;
11202 if dialect_of!(self is PostgreSqlDialect) {
11203 if self.parse_keyword(Keyword::MATERIALIZED) {
11204 is_materialized = Some(CteAsMaterialized::Materialized);
11205 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11206 is_materialized = Some(CteAsMaterialized::NotMaterialized);
11207 }
11208 }
11209 self.expect_token(&Token::LParen)?;
11210
11211 let query = self.parse_query()?;
11212 let closing_paren_token = self.expect_token(&Token::RParen)?;
11213
11214 let alias = TableAlias { name, columns };
11215 Cte {
11216 alias,
11217 query,
11218 from: None,
11219 materialized: is_materialized,
11220 closing_paren_token: closing_paren_token.into(),
11221 }
11222 };
11223 if self.parse_keyword(Keyword::FROM) {
11224 cte.from = Some(self.parse_identifier()?);
11225 }
11226 Ok(cte)
11227 }
11228
11229 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
11238 let expr = if self.peek_keyword(Keyword::SELECT)
11241 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
11242 {
11243 SetExpr::Select(self.parse_select().map(Box::new)?)
11244 } else if self.consume_token(&Token::LParen) {
11245 let subquery = self.parse_query()?;
11247 self.expect_token(&Token::RParen)?;
11248 SetExpr::Query(subquery)
11249 } else if self.parse_keyword(Keyword::VALUES) {
11250 let is_mysql = dialect_of!(self is MySqlDialect);
11251 SetExpr::Values(self.parse_values(is_mysql)?)
11252 } else if self.parse_keyword(Keyword::TABLE) {
11253 SetExpr::Table(Box::new(self.parse_as_table()?))
11254 } else {
11255 return self.expected(
11256 "SELECT, VALUES, or a subquery in the query body",
11257 self.peek_token(),
11258 );
11259 };
11260
11261 self.parse_remaining_set_exprs(expr, precedence)
11262 }
11263
11264 fn parse_remaining_set_exprs(
11268 &mut self,
11269 mut expr: SetExpr,
11270 precedence: u8,
11271 ) -> Result<Box<SetExpr>, ParserError> {
11272 loop {
11273 let op = self.parse_set_operator(&self.peek_token().token);
11275 let next_precedence = match op {
11276 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
11278 10
11279 }
11280 Some(SetOperator::Intersect) => 20,
11282 None => break,
11284 };
11285 if precedence >= next_precedence {
11286 break;
11287 }
11288 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
11290 expr = SetExpr::SetOperation {
11291 left: Box::new(expr),
11292 op: op.unwrap(),
11293 set_quantifier,
11294 right: self.parse_query_body(next_precedence)?,
11295 };
11296 }
11297
11298 Ok(expr.into())
11299 }
11300
11301 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
11302 match token {
11303 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
11304 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
11305 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
11306 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
11307 _ => None,
11308 }
11309 }
11310
11311 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
11312 match op {
11313 Some(
11314 SetOperator::Except
11315 | SetOperator::Intersect
11316 | SetOperator::Union
11317 | SetOperator::Minus,
11318 ) => {
11319 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
11320 SetQuantifier::DistinctByName
11321 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11322 SetQuantifier::ByName
11323 } else if self.parse_keyword(Keyword::ALL) {
11324 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11325 SetQuantifier::AllByName
11326 } else {
11327 SetQuantifier::All
11328 }
11329 } else if self.parse_keyword(Keyword::DISTINCT) {
11330 SetQuantifier::Distinct
11331 } else {
11332 SetQuantifier::None
11333 }
11334 }
11335 _ => SetQuantifier::None,
11336 }
11337 }
11338
11339 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
11341 let mut from_first = None;
11342
11343 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
11344 let from_token = self.expect_keyword(Keyword::FROM)?;
11345 let from = self.parse_table_with_joins()?;
11346 if !self.peek_keyword(Keyword::SELECT) {
11347 return Ok(Select {
11348 select_token: AttachedToken(from_token),
11349 distinct: None,
11350 top: None,
11351 top_before_distinct: false,
11352 projection: vec![],
11353 into: None,
11354 from,
11355 lateral_views: vec![],
11356 prewhere: None,
11357 selection: None,
11358 group_by: GroupByExpr::Expressions(vec![], vec![]),
11359 cluster_by: vec![],
11360 distribute_by: vec![],
11361 sort_by: vec![],
11362 having: None,
11363 named_window: vec![],
11364 window_before_qualify: false,
11365 qualify: None,
11366 value_table_mode: None,
11367 connect_by: None,
11368 flavor: SelectFlavor::FromFirstNoSelect,
11369 });
11370 }
11371 from_first = Some(from);
11372 }
11373
11374 let select_token = self.expect_keyword(Keyword::SELECT)?;
11375 let value_table_mode =
11376 if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) {
11377 if self.parse_keyword(Keyword::VALUE) {
11378 Some(ValueTableMode::AsValue)
11379 } else if self.parse_keyword(Keyword::STRUCT) {
11380 Some(ValueTableMode::AsStruct)
11381 } else {
11382 self.expected("VALUE or STRUCT", self.peek_token())?
11383 }
11384 } else {
11385 None
11386 };
11387
11388 let mut top_before_distinct = false;
11389 let mut top = None;
11390 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11391 top = Some(self.parse_top()?);
11392 top_before_distinct = true;
11393 }
11394 let distinct = self.parse_all_or_distinct()?;
11395 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11396 top = Some(self.parse_top()?);
11397 }
11398
11399 let projection =
11400 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
11401 vec![]
11402 } else {
11403 self.parse_projection()?
11404 };
11405
11406 let into = if self.parse_keyword(Keyword::INTO) {
11407 Some(self.parse_select_into()?)
11408 } else {
11409 None
11410 };
11411
11412 let (from, from_first) = if let Some(from) = from_first.take() {
11418 (from, true)
11419 } else if self.parse_keyword(Keyword::FROM) {
11420 (self.parse_table_with_joins()?, false)
11421 } else {
11422 (vec![], false)
11423 };
11424
11425 let mut lateral_views = vec![];
11426 loop {
11427 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
11428 let outer = self.parse_keyword(Keyword::OUTER);
11429 let lateral_view = self.parse_expr()?;
11430 let lateral_view_name = self.parse_object_name(false)?;
11431 let lateral_col_alias = self
11432 .parse_comma_separated(|parser| {
11433 parser.parse_optional_alias(&[
11434 Keyword::WHERE,
11435 Keyword::GROUP,
11436 Keyword::CLUSTER,
11437 Keyword::HAVING,
11438 Keyword::LATERAL,
11439 ]) })?
11441 .into_iter()
11442 .flatten()
11443 .collect();
11444
11445 lateral_views.push(LateralView {
11446 lateral_view,
11447 lateral_view_name,
11448 lateral_col_alias,
11449 outer,
11450 });
11451 } else {
11452 break;
11453 }
11454 }
11455
11456 let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11457 && self.parse_keyword(Keyword::PREWHERE)
11458 {
11459 Some(self.parse_expr()?)
11460 } else {
11461 None
11462 };
11463
11464 let selection = if self.parse_keyword(Keyword::WHERE) {
11465 Some(self.parse_expr()?)
11466 } else {
11467 None
11468 };
11469
11470 let group_by = self
11471 .parse_optional_group_by()?
11472 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
11473
11474 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
11475 self.parse_comma_separated(Parser::parse_expr)?
11476 } else {
11477 vec![]
11478 };
11479
11480 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
11481 self.parse_comma_separated(Parser::parse_expr)?
11482 } else {
11483 vec![]
11484 };
11485
11486 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
11487 self.parse_comma_separated(Parser::parse_expr)?
11488 } else {
11489 vec![]
11490 };
11491
11492 let having = if self.parse_keyword(Keyword::HAVING) {
11493 Some(self.parse_expr()?)
11494 } else {
11495 None
11496 };
11497
11498 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
11500 {
11501 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
11502 if self.parse_keyword(Keyword::QUALIFY) {
11503 (named_windows, Some(self.parse_expr()?), true)
11504 } else {
11505 (named_windows, None, true)
11506 }
11507 } else if self.parse_keyword(Keyword::QUALIFY) {
11508 let qualify = Some(self.parse_expr()?);
11509 if self.parse_keyword(Keyword::WINDOW) {
11510 (
11511 self.parse_comma_separated(Parser::parse_named_window)?,
11512 qualify,
11513 false,
11514 )
11515 } else {
11516 (Default::default(), qualify, false)
11517 }
11518 } else {
11519 Default::default()
11520 };
11521
11522 let connect_by = if self.dialect.supports_connect_by()
11523 && self
11524 .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
11525 .is_some()
11526 {
11527 self.prev_token();
11528 Some(self.parse_connect_by()?)
11529 } else {
11530 None
11531 };
11532
11533 Ok(Select {
11534 select_token: AttachedToken(select_token),
11535 distinct,
11536 top,
11537 top_before_distinct,
11538 projection,
11539 into,
11540 from,
11541 lateral_views,
11542 prewhere,
11543 selection,
11544 group_by,
11545 cluster_by,
11546 distribute_by,
11547 sort_by,
11548 having,
11549 named_window: named_windows,
11550 window_before_qualify,
11551 qualify,
11552 value_table_mode,
11553 connect_by,
11554 flavor: if from_first {
11555 SelectFlavor::FromFirst
11556 } else {
11557 SelectFlavor::Standard
11558 },
11559 })
11560 }
11561
11562 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
11566 where
11567 F: FnMut(&mut Parser) -> Result<T, ParserError>,
11568 {
11569 let current_state = self.state;
11570 self.state = state;
11571 let res = f(self);
11572 self.state = current_state;
11573 res
11574 }
11575
11576 pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
11577 let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
11578 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
11579 parser.parse_comma_separated(Parser::parse_expr)
11580 })?;
11581 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
11582 let condition = self.parse_expr()?;
11583 (condition, relationships)
11584 } else {
11585 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
11586 let condition = self.parse_expr()?;
11587 self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
11588 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
11589 parser.parse_comma_separated(Parser::parse_expr)
11590 })?;
11591 (condition, relationships)
11592 };
11593 Ok(ConnectBy {
11594 condition,
11595 relationships,
11596 })
11597 }
11598
11599 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
11601 let token1 = self.next_token();
11602 let token2 = self.next_token();
11603 let token3 = self.next_token();
11604
11605 let table_name;
11606 let schema_name;
11607 if token2 == Token::Period {
11608 match token1.token {
11609 Token::Word(w) => {
11610 schema_name = w.value;
11611 }
11612 _ => {
11613 return self.expected("Schema name", token1);
11614 }
11615 }
11616 match token3.token {
11617 Token::Word(w) => {
11618 table_name = w.value;
11619 }
11620 _ => {
11621 return self.expected("Table name", token3);
11622 }
11623 }
11624 Ok(Table {
11625 table_name: Some(table_name),
11626 schema_name: Some(schema_name),
11627 })
11628 } else {
11629 match token1.token {
11630 Token::Word(w) => {
11631 table_name = w.value;
11632 }
11633 _ => {
11634 return self.expected("Table name", token1);
11635 }
11636 }
11637 Ok(Table {
11638 table_name: Some(table_name),
11639 schema_name: None,
11640 })
11641 }
11642 }
11643
11644 fn parse_set_role(
11646 &mut self,
11647 modifier: Option<ContextModifier>,
11648 ) -> Result<Statement, ParserError> {
11649 self.expect_keyword_is(Keyword::ROLE)?;
11650
11651 let role_name = if self.parse_keyword(Keyword::NONE) {
11652 None
11653 } else {
11654 Some(self.parse_identifier()?)
11655 };
11656 Ok(Statement::Set(Set::SetRole {
11657 context_modifier: modifier,
11658 role_name,
11659 }))
11660 }
11661
11662 fn parse_set_values(
11663 &mut self,
11664 parenthesized_assignment: bool,
11665 ) -> Result<Vec<Expr>, ParserError> {
11666 let mut values = vec![];
11667
11668 if parenthesized_assignment {
11669 self.expect_token(&Token::LParen)?;
11670 }
11671
11672 loop {
11673 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
11674 expr
11675 } else if let Ok(expr) = self.parse_expr() {
11676 expr
11677 } else {
11678 self.expected("variable value", self.peek_token())?
11679 };
11680
11681 values.push(value);
11682 if self.consume_token(&Token::Comma) {
11683 continue;
11684 }
11685
11686 if parenthesized_assignment {
11687 self.expect_token(&Token::RParen)?;
11688 }
11689 return Ok(values);
11690 }
11691 }
11692
11693 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
11694 let modifier =
11695 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
11696
11697 Self::keyword_to_modifier(modifier)
11698 }
11699
11700 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
11702 let scope = self.parse_context_modifier();
11703
11704 let name = if self.dialect.supports_parenthesized_set_variables()
11705 && self.consume_token(&Token::LParen)
11706 {
11707 self.expected("Unparenthesized assignment", self.peek_token())?
11711 } else {
11712 self.parse_object_name(false)?
11713 };
11714
11715 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
11716 return self.expected("assignment operator", self.peek_token());
11717 }
11718
11719 let value = self.parse_expr()?;
11720
11721 Ok(SetAssignment { scope, name, value })
11722 }
11723
11724 fn parse_set(&mut self) -> Result<Statement, ParserError> {
11725 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
11726
11727 let scope = if !hivevar {
11729 self.parse_context_modifier()
11730 } else {
11731 None
11732 };
11733
11734 if hivevar {
11735 self.expect_token(&Token::Colon)?;
11736 }
11737
11738 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
11739 return Ok(set_role_stmt);
11740 }
11741
11742 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
11744 || self.parse_keyword(Keyword::TIMEZONE)
11745 {
11746 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
11747 return Ok(Set::SingleAssignment {
11748 scope,
11749 hivevar,
11750 variable: ObjectName::from(vec!["TIMEZONE".into()]),
11751 values: self.parse_set_values(false)?,
11752 }
11753 .into());
11754 } else {
11755 return Ok(Set::SetTimeZone {
11759 local: scope == Some(ContextModifier::Local),
11760 value: self.parse_expr()?,
11761 }
11762 .into());
11763 }
11764 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
11765 if self.parse_keyword(Keyword::DEFAULT) {
11766 return Ok(Set::SetNamesDefault {}.into());
11767 }
11768 let charset_name = self.parse_identifier()?;
11769 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
11770 Some(self.parse_literal_string()?)
11771 } else {
11772 None
11773 };
11774
11775 return Ok(Set::SetNames {
11776 charset_name,
11777 collation_name,
11778 }
11779 .into());
11780 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
11781 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
11782 return Ok(Set::SetTransaction {
11783 modes: self.parse_transaction_modes()?,
11784 snapshot: None,
11785 session: true,
11786 }
11787 .into());
11788 } else if self.parse_keyword(Keyword::TRANSACTION) {
11789 if self.parse_keyword(Keyword::SNAPSHOT) {
11790 let snapshot_id = self.parse_value()?.value;
11791 return Ok(Set::SetTransaction {
11792 modes: vec![],
11793 snapshot: Some(snapshot_id),
11794 session: false,
11795 }
11796 .into());
11797 }
11798 return Ok(Set::SetTransaction {
11799 modes: self.parse_transaction_modes()?,
11800 snapshot: None,
11801 session: false,
11802 }
11803 .into());
11804 }
11805
11806 if self.dialect.supports_comma_separated_set_assignments() {
11807 if scope.is_some() {
11808 self.prev_token();
11809 }
11810
11811 if let Some(assignments) = self
11812 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
11813 {
11814 return if assignments.len() > 1 {
11815 Ok(Set::MultipleAssignments { assignments }.into())
11816 } else {
11817 let SetAssignment { scope, name, value } =
11818 assignments.into_iter().next().ok_or_else(|| {
11819 ParserError::ParserError("Expected at least one assignment".to_string())
11820 })?;
11821
11822 Ok(Set::SingleAssignment {
11823 scope,
11824 hivevar,
11825 variable: name,
11826 values: vec![value],
11827 }
11828 .into())
11829 };
11830 }
11831 }
11832
11833 let variables = if self.dialect.supports_parenthesized_set_variables()
11834 && self.consume_token(&Token::LParen)
11835 {
11836 let vars = OneOrManyWithParens::Many(
11837 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
11838 .into_iter()
11839 .map(|ident| ObjectName::from(vec![ident]))
11840 .collect(),
11841 );
11842 self.expect_token(&Token::RParen)?;
11843 vars
11844 } else {
11845 OneOrManyWithParens::One(self.parse_object_name(false)?)
11846 };
11847
11848 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
11849 let stmt = match variables {
11850 OneOrManyWithParens::One(var) => Set::SingleAssignment {
11851 scope,
11852 hivevar,
11853 variable: var,
11854 values: self.parse_set_values(false)?,
11855 },
11856 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
11857 variables: vars,
11858 values: self.parse_set_values(true)?,
11859 },
11860 };
11861
11862 return Ok(stmt.into());
11863 }
11864
11865 if self.dialect.supports_set_stmt_without_operator() {
11866 self.prev_token();
11867 return self.parse_set_session_params();
11868 };
11869
11870 self.expected("equals sign or TO", self.peek_token())
11871 }
11872
11873 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
11874 if self.parse_keyword(Keyword::STATISTICS) {
11875 let topic = match self.parse_one_of_keywords(&[
11876 Keyword::IO,
11877 Keyword::PROFILE,
11878 Keyword::TIME,
11879 Keyword::XML,
11880 ]) {
11881 Some(Keyword::IO) => SessionParamStatsTopic::IO,
11882 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
11883 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
11884 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
11885 _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
11886 };
11887 let value = self.parse_session_param_value()?;
11888 Ok(
11889 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
11890 topic,
11891 value,
11892 }))
11893 .into(),
11894 )
11895 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
11896 let obj = self.parse_object_name(false)?;
11897 let value = self.parse_session_param_value()?;
11898 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
11899 SetSessionParamIdentityInsert { obj, value },
11900 ))
11901 .into())
11902 } else if self.parse_keyword(Keyword::OFFSETS) {
11903 let keywords = self.parse_comma_separated(|parser| {
11904 let next_token = parser.next_token();
11905 match &next_token.token {
11906 Token::Word(w) => Ok(w.to_string()),
11907 _ => parser.expected("SQL keyword", next_token),
11908 }
11909 })?;
11910 let value = self.parse_session_param_value()?;
11911 Ok(
11912 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
11913 keywords,
11914 value,
11915 }))
11916 .into(),
11917 )
11918 } else {
11919 let names = self.parse_comma_separated(|parser| {
11920 let next_token = parser.next_token();
11921 match next_token.token {
11922 Token::Word(w) => Ok(w.to_string()),
11923 _ => parser.expected("Session param name", next_token),
11924 }
11925 })?;
11926 let value = self.parse_expr()?.to_string();
11927 Ok(
11928 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
11929 names,
11930 value,
11931 }))
11932 .into(),
11933 )
11934 }
11935 }
11936
11937 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
11938 if self.parse_keyword(Keyword::ON) {
11939 Ok(SessionParamValue::On)
11940 } else if self.parse_keyword(Keyword::OFF) {
11941 Ok(SessionParamValue::Off)
11942 } else {
11943 self.expected("ON or OFF", self.peek_token())
11944 }
11945 }
11946
11947 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
11948 let terse = self.parse_keyword(Keyword::TERSE);
11949 let extended = self.parse_keyword(Keyword::EXTENDED);
11950 let full = self.parse_keyword(Keyword::FULL);
11951 let session = self.parse_keyword(Keyword::SESSION);
11952 let global = self.parse_keyword(Keyword::GLOBAL);
11953 let external = self.parse_keyword(Keyword::EXTERNAL);
11954 if self
11955 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
11956 .is_some()
11957 {
11958 Ok(self.parse_show_columns(extended, full)?)
11959 } else if self.parse_keyword(Keyword::TABLES) {
11960 Ok(self.parse_show_tables(terse, extended, full, external)?)
11961 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
11962 Ok(self.parse_show_views(terse, true)?)
11963 } else if self.parse_keyword(Keyword::VIEWS) {
11964 Ok(self.parse_show_views(terse, false)?)
11965 } else if self.parse_keyword(Keyword::FUNCTIONS) {
11966 Ok(self.parse_show_functions()?)
11967 } else if extended || full {
11968 Err(ParserError::ParserError(
11969 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
11970 ))
11971 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
11972 Ok(self.parse_show_create()?)
11973 } else if self.parse_keyword(Keyword::COLLATION) {
11974 Ok(self.parse_show_collation()?)
11975 } else if self.parse_keyword(Keyword::VARIABLES)
11976 && dialect_of!(self is MySqlDialect | GenericDialect)
11977 {
11978 Ok(Statement::ShowVariables {
11979 filter: self.parse_show_statement_filter()?,
11980 session,
11981 global,
11982 })
11983 } else if self.parse_keyword(Keyword::STATUS)
11984 && dialect_of!(self is MySqlDialect | GenericDialect)
11985 {
11986 Ok(Statement::ShowStatus {
11987 filter: self.parse_show_statement_filter()?,
11988 session,
11989 global,
11990 })
11991 } else if self.parse_keyword(Keyword::DATABASES) {
11992 self.parse_show_databases(terse)
11993 } else if self.parse_keyword(Keyword::SCHEMAS) {
11994 self.parse_show_schemas(terse)
11995 } else {
11996 Ok(Statement::ShowVariable {
11997 variable: self.parse_identifiers()?,
11998 })
11999 }
12000 }
12001
12002 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
12003 let history = self.parse_keyword(Keyword::HISTORY);
12004 let show_options = self.parse_show_stmt_options()?;
12005 Ok(Statement::ShowDatabases {
12006 terse,
12007 history,
12008 show_options,
12009 })
12010 }
12011
12012 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
12013 let history = self.parse_keyword(Keyword::HISTORY);
12014 let show_options = self.parse_show_stmt_options()?;
12015 Ok(Statement::ShowSchemas {
12016 terse,
12017 history,
12018 show_options,
12019 })
12020 }
12021
12022 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
12023 let obj_type = match self.expect_one_of_keywords(&[
12024 Keyword::TABLE,
12025 Keyword::TRIGGER,
12026 Keyword::FUNCTION,
12027 Keyword::PROCEDURE,
12028 Keyword::EVENT,
12029 Keyword::VIEW,
12030 ])? {
12031 Keyword::TABLE => Ok(ShowCreateObject::Table),
12032 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
12033 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
12034 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
12035 Keyword::EVENT => Ok(ShowCreateObject::Event),
12036 Keyword::VIEW => Ok(ShowCreateObject::View),
12037 keyword => Err(ParserError::ParserError(format!(
12038 "Unable to map keyword to ShowCreateObject: {keyword:?}"
12039 ))),
12040 }?;
12041
12042 let obj_name = self.parse_object_name(false)?;
12043
12044 Ok(Statement::ShowCreate { obj_type, obj_name })
12045 }
12046
12047 pub fn parse_show_columns(
12048 &mut self,
12049 extended: bool,
12050 full: bool,
12051 ) -> Result<Statement, ParserError> {
12052 let show_options = self.parse_show_stmt_options()?;
12053 Ok(Statement::ShowColumns {
12054 extended,
12055 full,
12056 show_options,
12057 })
12058 }
12059
12060 fn parse_show_tables(
12061 &mut self,
12062 terse: bool,
12063 extended: bool,
12064 full: bool,
12065 external: bool,
12066 ) -> Result<Statement, ParserError> {
12067 let history = !external && self.parse_keyword(Keyword::HISTORY);
12068 let show_options = self.parse_show_stmt_options()?;
12069 Ok(Statement::ShowTables {
12070 terse,
12071 history,
12072 extended,
12073 full,
12074 external,
12075 show_options,
12076 })
12077 }
12078
12079 fn parse_show_views(
12080 &mut self,
12081 terse: bool,
12082 materialized: bool,
12083 ) -> Result<Statement, ParserError> {
12084 let show_options = self.parse_show_stmt_options()?;
12085 Ok(Statement::ShowViews {
12086 materialized,
12087 terse,
12088 show_options,
12089 })
12090 }
12091
12092 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
12093 let filter = self.parse_show_statement_filter()?;
12094 Ok(Statement::ShowFunctions { filter })
12095 }
12096
12097 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
12098 let filter = self.parse_show_statement_filter()?;
12099 Ok(Statement::ShowCollation { filter })
12100 }
12101
12102 pub fn parse_show_statement_filter(
12103 &mut self,
12104 ) -> Result<Option<ShowStatementFilter>, ParserError> {
12105 if self.parse_keyword(Keyword::LIKE) {
12106 Ok(Some(ShowStatementFilter::Like(
12107 self.parse_literal_string()?,
12108 )))
12109 } else if self.parse_keyword(Keyword::ILIKE) {
12110 Ok(Some(ShowStatementFilter::ILike(
12111 self.parse_literal_string()?,
12112 )))
12113 } else if self.parse_keyword(Keyword::WHERE) {
12114 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
12115 } else {
12116 self.maybe_parse(|parser| -> Result<String, ParserError> {
12117 parser.parse_literal_string()
12118 })?
12119 .map_or(Ok(None), |filter| {
12120 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
12121 })
12122 }
12123 }
12124
12125 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
12126 let parsed_keyword = if dialect_of!(self is HiveDialect) {
12128 if self.parse_keyword(Keyword::DEFAULT) {
12130 return Ok(Statement::Use(Use::Default));
12131 }
12132 None } else if dialect_of!(self is DatabricksDialect) {
12134 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
12135 } else if dialect_of!(self is SnowflakeDialect) {
12136 self.parse_one_of_keywords(&[
12137 Keyword::DATABASE,
12138 Keyword::SCHEMA,
12139 Keyword::WAREHOUSE,
12140 Keyword::ROLE,
12141 Keyword::SECONDARY,
12142 ])
12143 } else {
12144 None };
12146
12147 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
12148 self.parse_secondary_roles()?
12149 } else {
12150 let obj_name = self.parse_object_name(false)?;
12151 match parsed_keyword {
12152 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
12153 Some(Keyword::DATABASE) => Use::Database(obj_name),
12154 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
12155 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
12156 Some(Keyword::ROLE) => Use::Role(obj_name),
12157 _ => Use::Object(obj_name),
12158 }
12159 };
12160
12161 Ok(Statement::Use(result))
12162 }
12163
12164 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
12165 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
12166 if self.parse_keyword(Keyword::NONE) {
12167 Ok(Use::SecondaryRoles(SecondaryRoles::None))
12168 } else if self.parse_keyword(Keyword::ALL) {
12169 Ok(Use::SecondaryRoles(SecondaryRoles::All))
12170 } else {
12171 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
12172 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
12173 }
12174 }
12175
12176 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
12177 let relation = self.parse_table_factor()?;
12178 let joins = self.parse_joins()?;
12182 Ok(TableWithJoins { relation, joins })
12183 }
12184
12185 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
12186 let mut joins = vec![];
12187 loop {
12188 let global = self.parse_keyword(Keyword::GLOBAL);
12189 let join = if self.parse_keyword(Keyword::CROSS) {
12190 let join_operator = if self.parse_keyword(Keyword::JOIN) {
12191 JoinOperator::CrossJoin
12192 } else if self.parse_keyword(Keyword::APPLY) {
12193 JoinOperator::CrossApply
12195 } else {
12196 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
12197 };
12198 Join {
12199 relation: self.parse_table_factor()?,
12200 global,
12201 join_operator,
12202 }
12203 } else if self.parse_keyword(Keyword::OUTER) {
12204 self.expect_keyword_is(Keyword::APPLY)?;
12206 Join {
12207 relation: self.parse_table_factor()?,
12208 global,
12209 join_operator: JoinOperator::OuterApply,
12210 }
12211 } else if self.parse_keyword(Keyword::ASOF) {
12212 self.expect_keyword_is(Keyword::JOIN)?;
12213 let relation = self.parse_table_factor()?;
12214 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
12215 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
12216 Join {
12217 relation,
12218 global,
12219 join_operator: JoinOperator::AsOf {
12220 match_condition,
12221 constraint: self.parse_join_constraint(false)?,
12222 },
12223 }
12224 } else {
12225 let natural = self.parse_keyword(Keyword::NATURAL);
12226 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
12227 w.keyword
12228 } else {
12229 Keyword::NoKeyword
12230 };
12231
12232 let join_operator_type = match peek_keyword {
12233 Keyword::INNER | Keyword::JOIN => {
12234 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
12236 if inner {
12237 JoinOperator::Inner
12238 } else {
12239 JoinOperator::Join
12240 }
12241 }
12242 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
12243 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
12245 let join_type = self.parse_one_of_keywords(&[
12246 Keyword::OUTER,
12247 Keyword::SEMI,
12248 Keyword::ANTI,
12249 Keyword::JOIN,
12250 ]);
12251 match join_type {
12252 Some(Keyword::OUTER) => {
12253 self.expect_keyword_is(Keyword::JOIN)?;
12254 if is_left {
12255 JoinOperator::LeftOuter
12256 } else {
12257 JoinOperator::RightOuter
12258 }
12259 }
12260 Some(Keyword::SEMI) => {
12261 self.expect_keyword_is(Keyword::JOIN)?;
12262 if is_left {
12263 JoinOperator::LeftSemi
12264 } else {
12265 JoinOperator::RightSemi
12266 }
12267 }
12268 Some(Keyword::ANTI) => {
12269 self.expect_keyword_is(Keyword::JOIN)?;
12270 if is_left {
12271 JoinOperator::LeftAnti
12272 } else {
12273 JoinOperator::RightAnti
12274 }
12275 }
12276 Some(Keyword::JOIN) => {
12277 if is_left {
12278 JoinOperator::Left
12279 } else {
12280 JoinOperator::Right
12281 }
12282 }
12283 _ => {
12284 return Err(ParserError::ParserError(format!(
12285 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
12286 )))
12287 }
12288 }
12289 }
12290 Keyword::ANTI => {
12291 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
12293 JoinOperator::Anti
12294 }
12295 Keyword::SEMI => {
12296 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
12298 JoinOperator::Semi
12299 }
12300 Keyword::FULL => {
12301 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
12304 JoinOperator::FullOuter
12305 }
12306 Keyword::OUTER => {
12307 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
12308 }
12309 Keyword::STRAIGHT_JOIN => {
12310 let _ = self.next_token(); JoinOperator::StraightJoin
12312 }
12313 _ if natural => {
12314 return self.expected("a join type after NATURAL", self.peek_token());
12315 }
12316 _ => break,
12317 };
12318 let mut relation = self.parse_table_factor()?;
12319
12320 if self.peek_parens_less_nested_join() {
12321 let joins = self.parse_joins()?;
12322 relation = TableFactor::NestedJoin {
12323 table_with_joins: Box::new(TableWithJoins { relation, joins }),
12324 alias: None,
12325 };
12326 }
12327
12328 let join_constraint = self.parse_join_constraint(natural)?;
12329 Join {
12330 relation,
12331 global,
12332 join_operator: join_operator_type(join_constraint),
12333 }
12334 };
12335 joins.push(join);
12336 }
12337 Ok(joins)
12338 }
12339
12340 fn peek_parens_less_nested_join(&self) -> bool {
12341 matches!(
12342 self.peek_token_ref().token,
12343 Token::Word(Word {
12344 keyword: Keyword::JOIN
12345 | Keyword::INNER
12346 | Keyword::LEFT
12347 | Keyword::RIGHT
12348 | Keyword::FULL,
12349 ..
12350 })
12351 )
12352 }
12353
12354 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12356 if self.parse_keyword(Keyword::LATERAL) {
12357 if self.consume_token(&Token::LParen) {
12359 self.parse_derived_table_factor(Lateral)
12360 } else {
12361 let name = self.parse_object_name(false)?;
12362 self.expect_token(&Token::LParen)?;
12363 let args = self.parse_optional_args()?;
12364 let alias = self.maybe_parse_table_alias()?;
12365 Ok(TableFactor::Function {
12366 lateral: true,
12367 name,
12368 args,
12369 alias,
12370 })
12371 }
12372 } else if self.parse_keyword(Keyword::TABLE) {
12373 self.expect_token(&Token::LParen)?;
12375 let expr = self.parse_expr()?;
12376 self.expect_token(&Token::RParen)?;
12377 let alias = self.maybe_parse_table_alias()?;
12378 Ok(TableFactor::TableFunction { expr, alias })
12379 } else if self.consume_token(&Token::LParen) {
12380 if let Some(mut table) =
12402 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
12403 {
12404 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
12405 {
12406 table = match kw {
12407 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
12408 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
12409 _ => unreachable!(),
12410 }
12411 }
12412 return Ok(table);
12413 }
12414
12415 let mut table_and_joins = self.parse_table_and_joins()?;
12422
12423 #[allow(clippy::if_same_then_else)]
12424 if !table_and_joins.joins.is_empty() {
12425 self.expect_token(&Token::RParen)?;
12426 let alias = self.maybe_parse_table_alias()?;
12427 Ok(TableFactor::NestedJoin {
12428 table_with_joins: Box::new(table_and_joins),
12429 alias,
12430 }) } else if let TableFactor::NestedJoin {
12432 table_with_joins: _,
12433 alias: _,
12434 } = &table_and_joins.relation
12435 {
12436 self.expect_token(&Token::RParen)?;
12439 let alias = self.maybe_parse_table_alias()?;
12440 Ok(TableFactor::NestedJoin {
12441 table_with_joins: Box::new(table_and_joins),
12442 alias,
12443 })
12444 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
12445 self.expect_token(&Token::RParen)?;
12452
12453 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
12454 match &mut table_and_joins.relation {
12457 TableFactor::Derived { alias, .. }
12458 | TableFactor::Table { alias, .. }
12459 | TableFactor::Function { alias, .. }
12460 | TableFactor::UNNEST { alias, .. }
12461 | TableFactor::JsonTable { alias, .. }
12462 | TableFactor::XmlTable { alias, .. }
12463 | TableFactor::OpenJsonTable { alias, .. }
12464 | TableFactor::TableFunction { alias, .. }
12465 | TableFactor::Pivot { alias, .. }
12466 | TableFactor::Unpivot { alias, .. }
12467 | TableFactor::MatchRecognize { alias, .. }
12468 | TableFactor::NestedJoin { alias, .. } => {
12469 if let Some(inner_alias) = alias {
12471 return Err(ParserError::ParserError(format!(
12472 "duplicate alias {inner_alias}"
12473 )));
12474 }
12475 alias.replace(outer_alias);
12479 }
12480 };
12481 }
12482 Ok(table_and_joins.relation)
12484 } else {
12485 self.expected("joined table", self.peek_token())
12488 }
12489 } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
12490 && matches!(
12491 self.peek_tokens(),
12492 [
12493 Token::Word(Word {
12494 keyword: Keyword::VALUES,
12495 ..
12496 }),
12497 Token::LParen
12498 ]
12499 )
12500 {
12501 self.expect_keyword_is(Keyword::VALUES)?;
12502
12503 let values = SetExpr::Values(self.parse_values(false)?);
12507 let alias = self.maybe_parse_table_alias()?;
12508 Ok(TableFactor::Derived {
12509 lateral: false,
12510 subquery: Box::new(Query {
12511 with: None,
12512 body: Box::new(values),
12513 order_by: None,
12514 limit_clause: None,
12515 fetch: None,
12516 locks: vec![],
12517 for_clause: None,
12518 settings: None,
12519 format_clause: None,
12520 pipe_operators: vec![],
12521 }),
12522 alias,
12523 })
12524 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
12525 && self.parse_keyword(Keyword::UNNEST)
12526 {
12527 self.expect_token(&Token::LParen)?;
12528 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
12529 self.expect_token(&Token::RParen)?;
12530
12531 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
12532 let alias = match self.maybe_parse_table_alias() {
12533 Ok(Some(alias)) => Some(alias),
12534 Ok(None) => None,
12535 Err(e) => return Err(e),
12536 };
12537
12538 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
12539 Ok(()) => true,
12540 Err(_) => false,
12541 };
12542
12543 let with_offset_alias = if with_offset {
12544 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
12545 Ok(Some(alias)) => Some(alias),
12546 Ok(None) => None,
12547 Err(e) => return Err(e),
12548 }
12549 } else {
12550 None
12551 };
12552
12553 Ok(TableFactor::UNNEST {
12554 alias,
12555 array_exprs,
12556 with_offset,
12557 with_offset_alias,
12558 with_ordinality,
12559 })
12560 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
12561 let json_expr = self.parse_expr()?;
12562 self.expect_token(&Token::Comma)?;
12563 let json_path = self.parse_value()?.value;
12564 self.expect_keyword_is(Keyword::COLUMNS)?;
12565 self.expect_token(&Token::LParen)?;
12566 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
12567 self.expect_token(&Token::RParen)?;
12568 self.expect_token(&Token::RParen)?;
12569 let alias = self.maybe_parse_table_alias()?;
12570 Ok(TableFactor::JsonTable {
12571 json_expr,
12572 json_path,
12573 columns,
12574 alias,
12575 })
12576 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
12577 self.prev_token();
12578 self.parse_open_json_table_factor()
12579 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
12580 self.prev_token();
12581 self.parse_xml_table_factor()
12582 } else {
12583 let name = self.parse_object_name(true)?;
12584
12585 let json_path = match self.peek_token().token {
12586 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
12587 _ => None,
12588 };
12589
12590 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
12591 && self.parse_keyword(Keyword::PARTITION)
12592 {
12593 self.parse_parenthesized_identifiers()?
12594 } else {
12595 vec![]
12596 };
12597
12598 let version = self.maybe_parse_table_version()?;
12600
12601 let args = if self.consume_token(&Token::LParen) {
12603 Some(self.parse_table_function_args()?)
12604 } else {
12605 None
12606 };
12607
12608 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
12609
12610 let mut sample = None;
12611 if self.dialect.supports_table_sample_before_alias() {
12612 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
12613 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
12614 }
12615 }
12616
12617 let alias = self.maybe_parse_table_alias()?;
12618
12619 let index_hints = if self.dialect.supports_table_hints() {
12621 self.maybe_parse(|p| p.parse_table_index_hints())?
12622 .unwrap_or(vec![])
12623 } else {
12624 vec![]
12625 };
12626
12627 let mut with_hints = vec![];
12629 if self.parse_keyword(Keyword::WITH) {
12630 if self.consume_token(&Token::LParen) {
12631 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
12632 self.expect_token(&Token::RParen)?;
12633 } else {
12634 self.prev_token();
12636 }
12637 };
12638
12639 if !self.dialect.supports_table_sample_before_alias() {
12640 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
12641 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
12642 }
12643 }
12644
12645 let mut table = TableFactor::Table {
12646 name,
12647 alias,
12648 args,
12649 with_hints,
12650 version,
12651 partitions,
12652 with_ordinality,
12653 json_path,
12654 sample,
12655 index_hints,
12656 };
12657
12658 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
12659 table = match kw {
12660 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
12661 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
12662 _ => unreachable!(),
12663 }
12664 }
12665
12666 if self.dialect.supports_match_recognize()
12667 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
12668 {
12669 table = self.parse_match_recognize(table)?;
12670 }
12671
12672 Ok(table)
12673 }
12674 }
12675
12676 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
12677 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
12678 TableSampleModifier::TableSample
12679 } else if self.parse_keyword(Keyword::SAMPLE) {
12680 TableSampleModifier::Sample
12681 } else {
12682 return Ok(None);
12683 };
12684
12685 let name = match self.parse_one_of_keywords(&[
12686 Keyword::BERNOULLI,
12687 Keyword::ROW,
12688 Keyword::SYSTEM,
12689 Keyword::BLOCK,
12690 ]) {
12691 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
12692 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
12693 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
12694 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
12695 _ => None,
12696 };
12697
12698 let parenthesized = self.consume_token(&Token::LParen);
12699
12700 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
12701 let selected_bucket = self.parse_number_value()?.value;
12702 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
12703 let total = self.parse_number_value()?.value;
12704 let on = if self.parse_keyword(Keyword::ON) {
12705 Some(self.parse_expr()?)
12706 } else {
12707 None
12708 };
12709 (
12710 None,
12711 Some(TableSampleBucket {
12712 bucket: selected_bucket,
12713 total,
12714 on,
12715 }),
12716 )
12717 } else {
12718 let value = match self.maybe_parse(|p| p.parse_expr())? {
12719 Some(num) => num,
12720 None => {
12721 let next_token = self.next_token();
12722 if let Token::Word(w) = next_token.token {
12723 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
12724 } else {
12725 return parser_err!(
12726 "Expecting number or byte length e.g. 100M",
12727 self.peek_token().span.start
12728 );
12729 }
12730 }
12731 };
12732 let unit = if self.parse_keyword(Keyword::ROWS) {
12733 Some(TableSampleUnit::Rows)
12734 } else if self.parse_keyword(Keyword::PERCENT) {
12735 Some(TableSampleUnit::Percent)
12736 } else {
12737 None
12738 };
12739 (
12740 Some(TableSampleQuantity {
12741 parenthesized,
12742 value,
12743 unit,
12744 }),
12745 None,
12746 )
12747 };
12748 if parenthesized {
12749 self.expect_token(&Token::RParen)?;
12750 }
12751
12752 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
12753 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
12754 } else if self.parse_keyword(Keyword::SEED) {
12755 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
12756 } else {
12757 None
12758 };
12759
12760 let offset = if self.parse_keyword(Keyword::OFFSET) {
12761 Some(self.parse_expr()?)
12762 } else {
12763 None
12764 };
12765
12766 Ok(Some(Box::new(TableSample {
12767 modifier,
12768 name,
12769 quantity,
12770 seed,
12771 bucket,
12772 offset,
12773 })))
12774 }
12775
12776 fn parse_table_sample_seed(
12777 &mut self,
12778 modifier: TableSampleSeedModifier,
12779 ) -> Result<TableSampleSeed, ParserError> {
12780 self.expect_token(&Token::LParen)?;
12781 let value = self.parse_number_value()?.value;
12782 self.expect_token(&Token::RParen)?;
12783 Ok(TableSampleSeed { modifier, value })
12784 }
12785
12786 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12789 self.expect_token(&Token::LParen)?;
12790 let json_expr = self.parse_expr()?;
12791 let json_path = if self.consume_token(&Token::Comma) {
12792 Some(self.parse_value()?.value)
12793 } else {
12794 None
12795 };
12796 self.expect_token(&Token::RParen)?;
12797 let columns = if self.parse_keyword(Keyword::WITH) {
12798 self.expect_token(&Token::LParen)?;
12799 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
12800 self.expect_token(&Token::RParen)?;
12801 columns
12802 } else {
12803 Vec::new()
12804 };
12805 let alias = self.maybe_parse_table_alias()?;
12806 Ok(TableFactor::OpenJsonTable {
12807 json_expr,
12808 json_path,
12809 columns,
12810 alias,
12811 })
12812 }
12813
12814 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12815 self.expect_token(&Token::LParen)?;
12816 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
12817 self.expect_token(&Token::LParen)?;
12818 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
12819 self.expect_token(&Token::RParen)?;
12820 self.expect_token(&Token::Comma)?;
12821 namespaces
12822 } else {
12823 vec![]
12824 };
12825 let row_expression = self.parse_expr()?;
12826 let passing = self.parse_xml_passing_clause()?;
12827 self.expect_keyword_is(Keyword::COLUMNS)?;
12828 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
12829 self.expect_token(&Token::RParen)?;
12830 let alias = self.maybe_parse_table_alias()?;
12831 Ok(TableFactor::XmlTable {
12832 namespaces,
12833 row_expression,
12834 passing,
12835 columns,
12836 alias,
12837 })
12838 }
12839
12840 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
12841 let uri = self.parse_expr()?;
12842 self.expect_keyword_is(Keyword::AS)?;
12843 let name = self.parse_identifier()?;
12844 Ok(XmlNamespaceDefinition { uri, name })
12845 }
12846
12847 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
12848 let name = self.parse_identifier()?;
12849
12850 let option = if self.parse_keyword(Keyword::FOR) {
12851 self.expect_keyword(Keyword::ORDINALITY)?;
12852 XmlTableColumnOption::ForOrdinality
12853 } else {
12854 let r#type = self.parse_data_type()?;
12855 let mut path = None;
12856 let mut default = None;
12857
12858 if self.parse_keyword(Keyword::PATH) {
12859 path = Some(self.parse_expr()?);
12860 }
12861
12862 if self.parse_keyword(Keyword::DEFAULT) {
12863 default = Some(self.parse_expr()?);
12864 }
12865
12866 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
12867 if !not_null {
12868 let _ = self.parse_keyword(Keyword::NULL);
12870 }
12871
12872 XmlTableColumnOption::NamedInfo {
12873 r#type,
12874 path,
12875 default,
12876 nullable: !not_null,
12877 }
12878 };
12879 Ok(XmlTableColumn { name, option })
12880 }
12881
12882 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
12883 let mut arguments = vec![];
12884 if self.parse_keyword(Keyword::PASSING) {
12885 loop {
12886 let by_value =
12887 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
12888 let expr = self.parse_expr()?;
12889 let alias = if self.parse_keyword(Keyword::AS) {
12890 Some(self.parse_identifier()?)
12891 } else {
12892 None
12893 };
12894 arguments.push(XmlPassingArgument {
12895 expr,
12896 alias,
12897 by_value,
12898 });
12899 if !self.consume_token(&Token::Comma) {
12900 break;
12901 }
12902 }
12903 }
12904 Ok(XmlPassingClause { arguments })
12905 }
12906
12907 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
12908 self.expect_token(&Token::LParen)?;
12909
12910 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
12911 self.parse_comma_separated(Parser::parse_expr)?
12912 } else {
12913 vec![]
12914 };
12915
12916 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12917 self.parse_comma_separated(Parser::parse_order_by_expr)?
12918 } else {
12919 vec![]
12920 };
12921
12922 let measures = if self.parse_keyword(Keyword::MEASURES) {
12923 self.parse_comma_separated(|p| {
12924 let expr = p.parse_expr()?;
12925 let _ = p.parse_keyword(Keyword::AS);
12926 let alias = p.parse_identifier()?;
12927 Ok(Measure { expr, alias })
12928 })?
12929 } else {
12930 vec![]
12931 };
12932
12933 let rows_per_match =
12934 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
12935 Some(RowsPerMatch::OneRow)
12936 } else if self.parse_keywords(&[
12937 Keyword::ALL,
12938 Keyword::ROWS,
12939 Keyword::PER,
12940 Keyword::MATCH,
12941 ]) {
12942 Some(RowsPerMatch::AllRows(
12943 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
12944 Some(EmptyMatchesMode::Show)
12945 } else if self.parse_keywords(&[
12946 Keyword::OMIT,
12947 Keyword::EMPTY,
12948 Keyword::MATCHES,
12949 ]) {
12950 Some(EmptyMatchesMode::Omit)
12951 } else if self.parse_keywords(&[
12952 Keyword::WITH,
12953 Keyword::UNMATCHED,
12954 Keyword::ROWS,
12955 ]) {
12956 Some(EmptyMatchesMode::WithUnmatched)
12957 } else {
12958 None
12959 },
12960 ))
12961 } else {
12962 None
12963 };
12964
12965 let after_match_skip =
12966 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
12967 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
12968 Some(AfterMatchSkip::PastLastRow)
12969 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
12970 Some(AfterMatchSkip::ToNextRow)
12971 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
12972 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
12973 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
12974 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
12975 } else {
12976 let found = self.next_token();
12977 return self.expected("after match skip option", found);
12978 }
12979 } else {
12980 None
12981 };
12982
12983 self.expect_keyword_is(Keyword::PATTERN)?;
12984 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
12985
12986 self.expect_keyword_is(Keyword::DEFINE)?;
12987
12988 let symbols = self.parse_comma_separated(|p| {
12989 let symbol = p.parse_identifier()?;
12990 p.expect_keyword_is(Keyword::AS)?;
12991 let definition = p.parse_expr()?;
12992 Ok(SymbolDefinition { symbol, definition })
12993 })?;
12994
12995 self.expect_token(&Token::RParen)?;
12996
12997 let alias = self.maybe_parse_table_alias()?;
12998
12999 Ok(TableFactor::MatchRecognize {
13000 table: Box::new(table),
13001 partition_by,
13002 order_by,
13003 measures,
13004 rows_per_match,
13005 after_match_skip,
13006 pattern,
13007 symbols,
13008 alias,
13009 })
13010 }
13011
13012 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13013 match self.next_token().token {
13014 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
13015 Token::Placeholder(s) if s == "$" => {
13016 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
13017 }
13018 Token::LBrace => {
13019 self.expect_token(&Token::Minus)?;
13020 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
13021 self.expect_token(&Token::Minus)?;
13022 self.expect_token(&Token::RBrace)?;
13023 Ok(MatchRecognizePattern::Exclude(symbol))
13024 }
13025 Token::Word(Word {
13026 value,
13027 quote_style: None,
13028 ..
13029 }) if value == "PERMUTE" => {
13030 self.expect_token(&Token::LParen)?;
13031 let symbols = self.parse_comma_separated(|p| {
13032 p.parse_identifier().map(MatchRecognizeSymbol::Named)
13033 })?;
13034 self.expect_token(&Token::RParen)?;
13035 Ok(MatchRecognizePattern::Permute(symbols))
13036 }
13037 Token::LParen => {
13038 let pattern = self.parse_pattern()?;
13039 self.expect_token(&Token::RParen)?;
13040 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
13041 }
13042 _ => {
13043 self.prev_token();
13044 self.parse_identifier()
13045 .map(MatchRecognizeSymbol::Named)
13046 .map(MatchRecognizePattern::Symbol)
13047 }
13048 }
13049 }
13050
13051 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13052 let mut pattern = self.parse_base_pattern()?;
13053 loop {
13054 let token = self.next_token();
13055 let quantifier = match token.token {
13056 Token::Mul => RepetitionQuantifier::ZeroOrMore,
13057 Token::Plus => RepetitionQuantifier::OneOrMore,
13058 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
13059 Token::LBrace => {
13060 let token = self.next_token();
13062 match token.token {
13063 Token::Comma => {
13064 let next_token = self.next_token();
13065 let Token::Number(n, _) = next_token.token else {
13066 return self.expected("literal number", next_token);
13067 };
13068 self.expect_token(&Token::RBrace)?;
13069 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
13070 }
13071 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
13072 let next_token = self.next_token();
13073 match next_token.token {
13074 Token::Number(m, _) => {
13075 self.expect_token(&Token::RBrace)?;
13076 RepetitionQuantifier::Range(
13077 Self::parse(n, token.span.start)?,
13078 Self::parse(m, token.span.start)?,
13079 )
13080 }
13081 Token::RBrace => {
13082 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
13083 }
13084 _ => {
13085 return self.expected("} or upper bound", next_token);
13086 }
13087 }
13088 }
13089 Token::Number(n, _) => {
13090 self.expect_token(&Token::RBrace)?;
13091 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
13092 }
13093 _ => return self.expected("quantifier range", token),
13094 }
13095 }
13096 _ => {
13097 self.prev_token();
13098 break;
13099 }
13100 };
13101 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
13102 }
13103 Ok(pattern)
13104 }
13105
13106 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13107 let mut patterns = vec![self.parse_repetition_pattern()?];
13108 while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
13109 patterns.push(self.parse_repetition_pattern()?);
13110 }
13111 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
13112 Ok([pattern]) => Ok(pattern),
13113 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
13114 }
13115 }
13116
13117 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13118 let pattern = self.parse_concat_pattern()?;
13119 if self.consume_token(&Token::Pipe) {
13120 match self.parse_pattern()? {
13121 MatchRecognizePattern::Alternation(mut patterns) => {
13123 patterns.insert(0, pattern);
13124 Ok(MatchRecognizePattern::Alternation(patterns))
13125 }
13126 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
13127 }
13128 } else {
13129 Ok(pattern)
13130 }
13131 }
13132
13133 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
13135 if self.dialect.supports_timestamp_versioning() {
13136 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
13137 {
13138 let expr = self.parse_expr()?;
13139 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
13140 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
13141 let func_name = self.parse_object_name(true)?;
13142 let func = self.parse_function(func_name)?;
13143 return Ok(Some(TableVersion::Function(func)));
13144 }
13145 }
13146 Ok(None)
13147 }
13148
13149 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
13152 if self.parse_keyword(Keyword::NESTED) {
13153 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
13154 let path = self.parse_value()?.value;
13155 self.expect_keyword_is(Keyword::COLUMNS)?;
13156 let columns = self.parse_parenthesized(|p| {
13157 p.parse_comma_separated(Self::parse_json_table_column_def)
13158 })?;
13159 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
13160 path,
13161 columns,
13162 }));
13163 }
13164 let name = self.parse_identifier()?;
13165 if self.parse_keyword(Keyword::FOR) {
13166 self.expect_keyword_is(Keyword::ORDINALITY)?;
13167 return Ok(JsonTableColumn::ForOrdinality(name));
13168 }
13169 let r#type = self.parse_data_type()?;
13170 let exists = self.parse_keyword(Keyword::EXISTS);
13171 self.expect_keyword_is(Keyword::PATH)?;
13172 let path = self.parse_value()?.value;
13173 let mut on_empty = None;
13174 let mut on_error = None;
13175 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
13176 if self.parse_keyword(Keyword::EMPTY) {
13177 on_empty = Some(error_handling);
13178 } else {
13179 self.expect_keyword_is(Keyword::ERROR)?;
13180 on_error = Some(error_handling);
13181 }
13182 }
13183 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
13184 name,
13185 r#type,
13186 path,
13187 exists,
13188 on_empty,
13189 on_error,
13190 }))
13191 }
13192
13193 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
13201 let name = self.parse_identifier()?;
13202 let r#type = self.parse_data_type()?;
13203 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
13204 self.next_token();
13205 Some(path)
13206 } else {
13207 None
13208 };
13209 let as_json = self.parse_keyword(Keyword::AS);
13210 if as_json {
13211 self.expect_keyword_is(Keyword::JSON)?;
13212 }
13213 Ok(OpenJsonTableColumn {
13214 name,
13215 r#type,
13216 path,
13217 as_json,
13218 })
13219 }
13220
13221 fn parse_json_table_column_error_handling(
13222 &mut self,
13223 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
13224 let res = if self.parse_keyword(Keyword::NULL) {
13225 JsonTableColumnErrorHandling::Null
13226 } else if self.parse_keyword(Keyword::ERROR) {
13227 JsonTableColumnErrorHandling::Error
13228 } else if self.parse_keyword(Keyword::DEFAULT) {
13229 JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
13230 } else {
13231 return Ok(None);
13232 };
13233 self.expect_keyword_is(Keyword::ON)?;
13234 Ok(Some(res))
13235 }
13236
13237 pub fn parse_derived_table_factor(
13238 &mut self,
13239 lateral: IsLateral,
13240 ) -> Result<TableFactor, ParserError> {
13241 let subquery = self.parse_query()?;
13242 self.expect_token(&Token::RParen)?;
13243 let alias = self.maybe_parse_table_alias()?;
13244 Ok(TableFactor::Derived {
13245 lateral: match lateral {
13246 Lateral => true,
13247 NotLateral => false,
13248 },
13249 subquery,
13250 alias,
13251 })
13252 }
13253
13254 fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
13255 let function_name = match self.next_token().token {
13256 Token::Word(w) => Ok(w.value),
13257 _ => self.expected("a function identifier", self.peek_token()),
13258 }?;
13259 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
13260 let alias = if self.parse_keyword(Keyword::AS) {
13261 Some(self.parse_identifier()?)
13262 } else {
13263 None
13264 };
13265
13266 Ok(ExprWithAlias { expr, alias })
13267 }
13268 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
13291 let expr = self.parse_expr()?;
13292 let alias = if self.parse_keyword(Keyword::AS) {
13293 Some(self.parse_identifier()?)
13294 } else {
13295 None
13296 };
13297
13298 Ok(ExprWithAlias { expr, alias })
13299 }
13300
13301 pub fn parse_pivot_table_factor(
13302 &mut self,
13303 table: TableFactor,
13304 ) -> Result<TableFactor, ParserError> {
13305 self.expect_token(&Token::LParen)?;
13306 let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
13307 self.expect_keyword_is(Keyword::FOR)?;
13308 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
13309 self.expect_keyword_is(Keyword::IN)?;
13310
13311 self.expect_token(&Token::LParen)?;
13312 let value_source = if self.parse_keyword(Keyword::ANY) {
13313 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13314 self.parse_comma_separated(Parser::parse_order_by_expr)?
13315 } else {
13316 vec![]
13317 };
13318 PivotValueSource::Any(order_by)
13319 } else if self.peek_sub_query() {
13320 PivotValueSource::Subquery(self.parse_query()?)
13321 } else {
13322 PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
13323 };
13324 self.expect_token(&Token::RParen)?;
13325
13326 let default_on_null =
13327 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
13328 self.expect_token(&Token::LParen)?;
13329 let expr = self.parse_expr()?;
13330 self.expect_token(&Token::RParen)?;
13331 Some(expr)
13332 } else {
13333 None
13334 };
13335
13336 self.expect_token(&Token::RParen)?;
13337 let alias = self.maybe_parse_table_alias()?;
13338 Ok(TableFactor::Pivot {
13339 table: Box::new(table),
13340 aggregate_functions,
13341 value_column,
13342 value_source,
13343 default_on_null,
13344 alias,
13345 })
13346 }
13347
13348 pub fn parse_unpivot_table_factor(
13349 &mut self,
13350 table: TableFactor,
13351 ) -> Result<TableFactor, ParserError> {
13352 self.expect_token(&Token::LParen)?;
13353 let value = self.parse_identifier()?;
13354 self.expect_keyword_is(Keyword::FOR)?;
13355 let name = self.parse_identifier()?;
13356 self.expect_keyword_is(Keyword::IN)?;
13357 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
13358 self.expect_token(&Token::RParen)?;
13359 let alias = self.maybe_parse_table_alias()?;
13360 Ok(TableFactor::Unpivot {
13361 table: Box::new(table),
13362 value,
13363 name,
13364 columns,
13365 alias,
13366 })
13367 }
13368
13369 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
13370 if natural {
13371 Ok(JoinConstraint::Natural)
13372 } else if self.parse_keyword(Keyword::ON) {
13373 let constraint = self.parse_expr()?;
13374 Ok(JoinConstraint::On(constraint))
13375 } else if self.parse_keyword(Keyword::USING) {
13376 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
13377 Ok(JoinConstraint::Using(columns))
13378 } else {
13379 Ok(JoinConstraint::None)
13380 }
13382 }
13383
13384 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
13386 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
13387
13388 self.expect_keyword_is(Keyword::TO)?;
13389 let grantees = self.parse_grantees()?;
13390
13391 let with_grant_option =
13392 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
13393
13394 let granted_by = self
13395 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
13396 .then(|| self.parse_identifier().unwrap());
13397
13398 Ok(Statement::Grant {
13399 privileges,
13400 objects,
13401 grantees,
13402 with_grant_option,
13403 granted_by,
13404 })
13405 }
13406
13407 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
13408 let mut values = vec![];
13409 let mut grantee_type = GranteesType::None;
13410 loop {
13411 grantee_type = if self.parse_keyword(Keyword::ROLE) {
13412 GranteesType::Role
13413 } else if self.parse_keyword(Keyword::USER) {
13414 GranteesType::User
13415 } else if self.parse_keyword(Keyword::SHARE) {
13416 GranteesType::Share
13417 } else if self.parse_keyword(Keyword::GROUP) {
13418 GranteesType::Group
13419 } else if self.parse_keyword(Keyword::PUBLIC) {
13420 GranteesType::Public
13421 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
13422 GranteesType::DatabaseRole
13423 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
13424 GranteesType::ApplicationRole
13425 } else if self.parse_keyword(Keyword::APPLICATION) {
13426 GranteesType::Application
13427 } else {
13428 grantee_type };
13430
13431 let grantee = if grantee_type == GranteesType::Public {
13432 Grantee {
13433 grantee_type: grantee_type.clone(),
13434 name: None,
13435 }
13436 } else {
13437 let mut name = self.parse_grantee_name()?;
13438 if self.consume_token(&Token::Colon) {
13439 let ident = self.parse_identifier()?;
13443 if let GranteeName::ObjectName(namespace) = name {
13444 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
13445 format!("{}:{}", namespace, ident),
13446 )]));
13447 };
13448 }
13449 Grantee {
13450 grantee_type: grantee_type.clone(),
13451 name: Some(name),
13452 }
13453 };
13454
13455 values.push(grantee);
13456
13457 if !self.consume_token(&Token::Comma) {
13458 break;
13459 }
13460 }
13461
13462 Ok(values)
13463 }
13464
13465 pub fn parse_grant_revoke_privileges_objects(
13466 &mut self,
13467 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
13468 let privileges = if self.parse_keyword(Keyword::ALL) {
13469 Privileges::All {
13470 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
13471 }
13472 } else {
13473 let actions = self.parse_actions_list()?;
13474 Privileges::Actions(actions)
13475 };
13476
13477 let objects = if self.parse_keyword(Keyword::ON) {
13478 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
13479 Some(GrantObjects::AllTablesInSchema {
13480 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13481 })
13482 } else if self.parse_keywords(&[
13483 Keyword::ALL,
13484 Keyword::SEQUENCES,
13485 Keyword::IN,
13486 Keyword::SCHEMA,
13487 ]) {
13488 Some(GrantObjects::AllSequencesInSchema {
13489 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13490 })
13491 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
13492 Some(GrantObjects::ResourceMonitors(self.parse_comma_separated(
13493 |p| p.parse_object_name_with_wildcards(false, true),
13494 )?))
13495 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
13496 Some(GrantObjects::ComputePools(self.parse_comma_separated(
13497 |p| p.parse_object_name_with_wildcards(false, true),
13498 )?))
13499 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
13500 Some(GrantObjects::FailoverGroup(self.parse_comma_separated(
13501 |p| p.parse_object_name_with_wildcards(false, true),
13502 )?))
13503 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
13504 Some(GrantObjects::ReplicationGroup(self.parse_comma_separated(
13505 |p| p.parse_object_name_with_wildcards(false, true),
13506 )?))
13507 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
13508 Some(GrantObjects::ExternalVolumes(self.parse_comma_separated(
13509 |p| p.parse_object_name_with_wildcards(false, true),
13510 )?))
13511 } else {
13512 let object_type = self.parse_one_of_keywords(&[
13513 Keyword::SEQUENCE,
13514 Keyword::DATABASE,
13515 Keyword::DATABASE,
13516 Keyword::SCHEMA,
13517 Keyword::TABLE,
13518 Keyword::VIEW,
13519 Keyword::WAREHOUSE,
13520 Keyword::INTEGRATION,
13521 Keyword::VIEW,
13522 Keyword::WAREHOUSE,
13523 Keyword::INTEGRATION,
13524 Keyword::USER,
13525 Keyword::CONNECTION,
13526 ]);
13527 let objects =
13528 self.parse_comma_separated(|p| p.parse_object_name_with_wildcards(false, true));
13529 match object_type {
13530 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
13531 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
13532 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
13533 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
13534 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
13535 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
13536 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
13537 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
13538 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
13539 _ => unreachable!(),
13540 }
13541 }
13542 } else {
13543 None
13544 };
13545
13546 Ok((privileges, objects))
13547 }
13548
13549 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
13550 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
13551 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
13552 if columns.is_empty() {
13553 Ok(None)
13554 } else {
13555 Ok(Some(columns))
13556 }
13557 }
13558
13559 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
13561 Ok(Action::ImportedPrivileges)
13562 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
13563 Ok(Action::AddSearchOptimization)
13564 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
13565 Ok(Action::AttachListing)
13566 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
13567 Ok(Action::AttachPolicy)
13568 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
13569 Ok(Action::BindServiceEndpoint)
13570 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
13571 let role = self.parse_object_name(false)?;
13572 Ok(Action::DatabaseRole { role })
13573 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
13574 Ok(Action::EvolveSchema)
13575 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
13576 Ok(Action::ImportShare)
13577 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
13578 Ok(Action::ManageVersions)
13579 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
13580 Ok(Action::ManageReleases)
13581 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
13582 Ok(Action::OverrideShareRestrictions)
13583 } else if self.parse_keywords(&[
13584 Keyword::PURCHASE,
13585 Keyword::DATA,
13586 Keyword::EXCHANGE,
13587 Keyword::LISTING,
13588 ]) {
13589 Ok(Action::PurchaseDataExchangeListing)
13590 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
13591 Ok(Action::ResolveAll)
13592 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
13593 Ok(Action::ReadSession)
13594
13595 } else if self.parse_keyword(Keyword::APPLY) {
13597 let apply_type = self.parse_action_apply_type()?;
13598 Ok(Action::Apply { apply_type })
13599 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
13600 Ok(Action::ApplyBudget)
13601 } else if self.parse_keyword(Keyword::AUDIT) {
13602 Ok(Action::Audit)
13603 } else if self.parse_keyword(Keyword::CONNECT) {
13604 Ok(Action::Connect)
13605 } else if self.parse_keyword(Keyword::CREATE) {
13606 let obj_type = self.maybe_parse_action_create_object_type();
13607 Ok(Action::Create { obj_type })
13608 } else if self.parse_keyword(Keyword::DELETE) {
13609 Ok(Action::Delete)
13610 } else if self.parse_keyword(Keyword::EXECUTE) {
13611 let obj_type = self.maybe_parse_action_execute_obj_type();
13612 Ok(Action::Execute { obj_type })
13613 } else if self.parse_keyword(Keyword::FAILOVER) {
13614 Ok(Action::Failover)
13615 } else if self.parse_keyword(Keyword::INSERT) {
13616 Ok(Action::Insert {
13617 columns: parse_columns(self)?,
13618 })
13619 } else if self.parse_keyword(Keyword::MANAGE) {
13620 let manage_type = self.parse_action_manage_type()?;
13621 Ok(Action::Manage { manage_type })
13622 } else if self.parse_keyword(Keyword::MODIFY) {
13623 let modify_type = self.parse_action_modify_type();
13624 Ok(Action::Modify { modify_type })
13625 } else if self.parse_keyword(Keyword::MONITOR) {
13626 let monitor_type = self.parse_action_monitor_type();
13627 Ok(Action::Monitor { monitor_type })
13628 } else if self.parse_keyword(Keyword::OPERATE) {
13629 Ok(Action::Operate)
13630 } else if self.parse_keyword(Keyword::REFERENCES) {
13631 Ok(Action::References {
13632 columns: parse_columns(self)?,
13633 })
13634 } else if self.parse_keyword(Keyword::READ) {
13635 Ok(Action::Read)
13636 } else if self.parse_keyword(Keyword::REPLICATE) {
13637 Ok(Action::Replicate)
13638 } else if self.parse_keyword(Keyword::ROLE) {
13639 let role = self.parse_identifier()?;
13640 Ok(Action::Role { role })
13641 } else if self.parse_keyword(Keyword::SELECT) {
13642 Ok(Action::Select {
13643 columns: parse_columns(self)?,
13644 })
13645 } else if self.parse_keyword(Keyword::TEMPORARY) {
13646 Ok(Action::Temporary)
13647 } else if self.parse_keyword(Keyword::TRIGGER) {
13648 Ok(Action::Trigger)
13649 } else if self.parse_keyword(Keyword::TRUNCATE) {
13650 Ok(Action::Truncate)
13651 } else if self.parse_keyword(Keyword::UPDATE) {
13652 Ok(Action::Update {
13653 columns: parse_columns(self)?,
13654 })
13655 } else if self.parse_keyword(Keyword::USAGE) {
13656 Ok(Action::Usage)
13657 } else if self.parse_keyword(Keyword::OWNERSHIP) {
13658 Ok(Action::Ownership)
13659 } else {
13660 self.expected("a privilege keyword", self.peek_token())?
13661 }
13662 }
13663
13664 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
13665 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
13667 Some(ActionCreateObjectType::ApplicationPackage)
13668 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
13669 Some(ActionCreateObjectType::ComputePool)
13670 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
13671 Some(ActionCreateObjectType::DataExchangeListing)
13672 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
13673 Some(ActionCreateObjectType::ExternalVolume)
13674 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
13675 Some(ActionCreateObjectType::FailoverGroup)
13676 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
13677 Some(ActionCreateObjectType::NetworkPolicy)
13678 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
13679 Some(ActionCreateObjectType::OrganiationListing)
13680 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
13681 Some(ActionCreateObjectType::ReplicationGroup)
13682 }
13683 else if self.parse_keyword(Keyword::ACCOUNT) {
13685 Some(ActionCreateObjectType::Account)
13686 } else if self.parse_keyword(Keyword::APPLICATION) {
13687 Some(ActionCreateObjectType::Application)
13688 } else if self.parse_keyword(Keyword::DATABASE) {
13689 Some(ActionCreateObjectType::Database)
13690 } else if self.parse_keyword(Keyword::INTEGRATION) {
13691 Some(ActionCreateObjectType::Integration)
13692 } else if self.parse_keyword(Keyword::ROLE) {
13693 Some(ActionCreateObjectType::Role)
13694 } else if self.parse_keyword(Keyword::SHARE) {
13695 Some(ActionCreateObjectType::Share)
13696 } else if self.parse_keyword(Keyword::USER) {
13697 Some(ActionCreateObjectType::User)
13698 } else if self.parse_keyword(Keyword::WAREHOUSE) {
13699 Some(ActionCreateObjectType::Warehouse)
13700 } else {
13701 None
13702 }
13703 }
13704
13705 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
13706 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
13707 Ok(ActionApplyType::AggregationPolicy)
13708 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
13709 Ok(ActionApplyType::AuthenticationPolicy)
13710 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
13711 Ok(ActionApplyType::JoinPolicy)
13712 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
13713 Ok(ActionApplyType::MaskingPolicy)
13714 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
13715 Ok(ActionApplyType::PackagesPolicy)
13716 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
13717 Ok(ActionApplyType::PasswordPolicy)
13718 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
13719 Ok(ActionApplyType::ProjectionPolicy)
13720 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
13721 Ok(ActionApplyType::RowAccessPolicy)
13722 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
13723 Ok(ActionApplyType::SessionPolicy)
13724 } else if self.parse_keyword(Keyword::TAG) {
13725 Ok(ActionApplyType::Tag)
13726 } else {
13727 self.expected("GRANT APPLY type", self.peek_token())
13728 }
13729 }
13730
13731 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
13732 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
13733 Some(ActionExecuteObjectType::DataMetricFunction)
13734 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
13735 Some(ActionExecuteObjectType::ManagedAlert)
13736 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
13737 Some(ActionExecuteObjectType::ManagedTask)
13738 } else if self.parse_keyword(Keyword::ALERT) {
13739 Some(ActionExecuteObjectType::Alert)
13740 } else if self.parse_keyword(Keyword::TASK) {
13741 Some(ActionExecuteObjectType::Task)
13742 } else {
13743 None
13744 }
13745 }
13746
13747 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
13748 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
13749 Ok(ActionManageType::AccountSupportCases)
13750 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
13751 Ok(ActionManageType::EventSharing)
13752 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
13753 Ok(ActionManageType::ListingAutoFulfillment)
13754 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
13755 Ok(ActionManageType::OrganizationSupportCases)
13756 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
13757 Ok(ActionManageType::UserSupportCases)
13758 } else if self.parse_keyword(Keyword::GRANTS) {
13759 Ok(ActionManageType::Grants)
13760 } else if self.parse_keyword(Keyword::WAREHOUSES) {
13761 Ok(ActionManageType::Warehouses)
13762 } else {
13763 self.expected("GRANT MANAGE type", self.peek_token())
13764 }
13765 }
13766
13767 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
13768 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
13769 Some(ActionModifyType::LogLevel)
13770 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
13771 Some(ActionModifyType::TraceLevel)
13772 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
13773 Some(ActionModifyType::SessionLogLevel)
13774 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
13775 Some(ActionModifyType::SessionTraceLevel)
13776 } else {
13777 None
13778 }
13779 }
13780
13781 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
13782 if self.parse_keyword(Keyword::EXECUTION) {
13783 Some(ActionMonitorType::Execution)
13784 } else if self.parse_keyword(Keyword::SECURITY) {
13785 Some(ActionMonitorType::Security)
13786 } else if self.parse_keyword(Keyword::USAGE) {
13787 Some(ActionMonitorType::Usage)
13788 } else {
13789 None
13790 }
13791 }
13792
13793 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
13794 let mut name = self.parse_object_name(false)?;
13795 if self.dialect.supports_user_host_grantee()
13796 && name.0.len() == 1
13797 && name.0[0].as_ident().is_some()
13798 && self.consume_token(&Token::AtSign)
13799 {
13800 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
13801 let host = self.parse_identifier()?;
13802 Ok(GranteeName::UserHost { user, host })
13803 } else {
13804 Ok(GranteeName::ObjectName(name))
13805 }
13806 }
13807
13808 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
13810 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
13811
13812 self.expect_keyword_is(Keyword::FROM)?;
13813 let grantees = self.parse_grantees()?;
13814
13815 let granted_by = self
13816 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
13817 .then(|| self.parse_identifier().unwrap());
13818
13819 let cascade = self.parse_cascade_option();
13820
13821 Ok(Statement::Revoke {
13822 privileges,
13823 objects,
13824 grantees,
13825 granted_by,
13826 cascade,
13827 })
13828 }
13829
13830 pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
13832 if !dialect_of!(self is MySqlDialect | GenericDialect) {
13833 return parser_err!(
13834 "Unsupported statement REPLACE",
13835 self.peek_token().span.start
13836 );
13837 }
13838
13839 let mut insert = self.parse_insert()?;
13840 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
13841 *replace_into = true;
13842 }
13843
13844 Ok(insert)
13845 }
13846
13847 fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
13851 Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
13852 }
13853
13854 pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
13856 let or = self.parse_conflict_clause();
13857 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
13858 None
13859 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
13860 Some(MysqlInsertPriority::LowPriority)
13861 } else if self.parse_keyword(Keyword::DELAYED) {
13862 Some(MysqlInsertPriority::Delayed)
13863 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
13864 Some(MysqlInsertPriority::HighPriority)
13865 } else {
13866 None
13867 };
13868
13869 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
13870 && self.parse_keyword(Keyword::IGNORE);
13871
13872 let replace_into = false;
13873
13874 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
13875 let into = self.parse_keyword(Keyword::INTO);
13876
13877 let local = self.parse_keyword(Keyword::LOCAL);
13878
13879 if self.parse_keyword(Keyword::DIRECTORY) {
13880 let path = self.parse_literal_string()?;
13881 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
13882 Some(self.parse_file_format()?)
13883 } else {
13884 None
13885 };
13886 let source = self.parse_query()?;
13887 Ok(Statement::Directory {
13888 local,
13889 path,
13890 overwrite,
13891 file_format,
13892 source,
13893 })
13894 } else {
13895 let table = self.parse_keyword(Keyword::TABLE);
13897 let table_object = self.parse_table_object()?;
13898
13899 let table_alias =
13900 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
13901 Some(self.parse_identifier()?)
13902 } else {
13903 None
13904 };
13905
13906 let is_mysql = dialect_of!(self is MySqlDialect);
13907
13908 let (columns, partitioned, after_columns, source, assignments) = if self
13909 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
13910 {
13911 (vec![], None, vec![], None, vec![])
13912 } else {
13913 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
13914 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
13915
13916 let partitioned = self.parse_insert_partition()?;
13917 let after_columns = if dialect_of!(self is HiveDialect) {
13919 self.parse_parenthesized_column_list(Optional, false)?
13920 } else {
13921 vec![]
13922 };
13923 (columns, partitioned, after_columns)
13924 } else {
13925 Default::default()
13926 };
13927
13928 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
13929 || self.peek_keyword(Keyword::SETTINGS)
13930 {
13931 (None, vec![])
13932 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
13933 (None, self.parse_comma_separated(Parser::parse_assignment)?)
13934 } else {
13935 (Some(self.parse_query()?), vec![])
13936 };
13937
13938 (columns, partitioned, after_columns, source, assignments)
13939 };
13940
13941 let (format_clause, settings) = if self.dialect.supports_insert_format() {
13942 let settings = self.parse_settings()?;
13945
13946 let format = if self.parse_keyword(Keyword::FORMAT) {
13947 Some(self.parse_input_format_clause()?)
13948 } else {
13949 None
13950 };
13951
13952 (format, settings)
13953 } else {
13954 Default::default()
13955 };
13956
13957 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
13958 && self.parse_keyword(Keyword::AS)
13959 {
13960 let row_alias = self.parse_object_name(false)?;
13961 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
13962 Some(InsertAliases {
13963 row_alias,
13964 col_aliases,
13965 })
13966 } else {
13967 None
13968 };
13969
13970 let on = if self.parse_keyword(Keyword::ON) {
13971 if self.parse_keyword(Keyword::CONFLICT) {
13972 let conflict_target =
13973 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
13974 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
13975 } else if self.peek_token() == Token::LParen {
13976 Some(ConflictTarget::Columns(
13977 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
13978 ))
13979 } else {
13980 None
13981 };
13982
13983 self.expect_keyword_is(Keyword::DO)?;
13984 let action = if self.parse_keyword(Keyword::NOTHING) {
13985 OnConflictAction::DoNothing
13986 } else {
13987 self.expect_keyword_is(Keyword::UPDATE)?;
13988 self.expect_keyword_is(Keyword::SET)?;
13989 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
13990 let selection = if self.parse_keyword(Keyword::WHERE) {
13991 Some(self.parse_expr()?)
13992 } else {
13993 None
13994 };
13995 OnConflictAction::DoUpdate(DoUpdate {
13996 assignments,
13997 selection,
13998 })
13999 };
14000
14001 Some(OnInsert::OnConflict(OnConflict {
14002 conflict_target,
14003 action,
14004 }))
14005 } else {
14006 self.expect_keyword_is(Keyword::DUPLICATE)?;
14007 self.expect_keyword_is(Keyword::KEY)?;
14008 self.expect_keyword_is(Keyword::UPDATE)?;
14009 let l = self.parse_comma_separated(Parser::parse_assignment)?;
14010
14011 Some(OnInsert::DuplicateKeyUpdate(l))
14012 }
14013 } else {
14014 None
14015 };
14016
14017 let returning = if self.parse_keyword(Keyword::RETURNING) {
14018 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14019 } else {
14020 None
14021 };
14022
14023 Ok(Statement::Insert(Insert {
14024 or,
14025 table: table_object,
14026 table_alias,
14027 ignore,
14028 into,
14029 overwrite,
14030 partitioned,
14031 columns,
14032 after_columns,
14033 source,
14034 assignments,
14035 has_table_keyword: table,
14036 on,
14037 returning,
14038 replace_into,
14039 priority,
14040 insert_alias,
14041 settings,
14042 format_clause,
14043 }))
14044 }
14045 }
14046
14047 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
14051 let ident = self.parse_identifier()?;
14052 let values = self
14053 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
14054 .unwrap_or_default();
14055
14056 Ok(InputFormatClause { ident, values })
14057 }
14058
14059 fn peek_subquery_start(&mut self) -> bool {
14062 let [maybe_lparen, maybe_select] = self.peek_tokens();
14063 Token::LParen == maybe_lparen
14064 && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
14065 }
14066
14067 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
14068 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
14069 Some(SqliteOnConflict::Replace)
14070 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
14071 Some(SqliteOnConflict::Rollback)
14072 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
14073 Some(SqliteOnConflict::Abort)
14074 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
14075 Some(SqliteOnConflict::Fail)
14076 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
14077 Some(SqliteOnConflict::Ignore)
14078 } else if self.parse_keyword(Keyword::REPLACE) {
14079 Some(SqliteOnConflict::Replace)
14080 } else {
14081 None
14082 }
14083 }
14084
14085 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
14086 if self.parse_keyword(Keyword::PARTITION) {
14087 self.expect_token(&Token::LParen)?;
14088 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
14089 self.expect_token(&Token::RParen)?;
14090 Ok(partition_cols)
14091 } else {
14092 Ok(None)
14093 }
14094 }
14095
14096 pub fn parse_load_data_table_format(
14097 &mut self,
14098 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
14099 if self.parse_keyword(Keyword::INPUTFORMAT) {
14100 let input_format = self.parse_expr()?;
14101 self.expect_keyword_is(Keyword::SERDE)?;
14102 let serde = self.parse_expr()?;
14103 Ok(Some(HiveLoadDataFormat {
14104 input_format,
14105 serde,
14106 }))
14107 } else {
14108 Ok(None)
14109 }
14110 }
14111
14112 fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
14116 Ok(Box::new(SetExpr::Update(self.parse_update()?)))
14117 }
14118
14119 pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
14120 let or = self.parse_conflict_clause();
14121 let table = self.parse_table_and_joins()?;
14122 let from_before_set = if self.parse_keyword(Keyword::FROM) {
14123 Some(UpdateTableFromKind::BeforeSet(
14124 self.parse_table_with_joins()?,
14125 ))
14126 } else {
14127 None
14128 };
14129 self.expect_keyword(Keyword::SET)?;
14130 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14131 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
14132 Some(UpdateTableFromKind::AfterSet(
14133 self.parse_table_with_joins()?,
14134 ))
14135 } else {
14136 from_before_set
14137 };
14138 let selection = if self.parse_keyword(Keyword::WHERE) {
14139 Some(self.parse_expr()?)
14140 } else {
14141 None
14142 };
14143 let returning = if self.parse_keyword(Keyword::RETURNING) {
14144 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14145 } else {
14146 None
14147 };
14148 Ok(Statement::Update {
14149 table,
14150 assignments,
14151 from,
14152 selection,
14153 returning,
14154 or,
14155 })
14156 }
14157
14158 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
14160 let target = self.parse_assignment_target()?;
14161 self.expect_token(&Token::Eq)?;
14162 let value = self.parse_expr()?;
14163 Ok(Assignment { target, value })
14164 }
14165
14166 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
14168 if self.consume_token(&Token::LParen) {
14169 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14170 self.expect_token(&Token::RParen)?;
14171 Ok(AssignmentTarget::Tuple(columns))
14172 } else {
14173 let column = self.parse_object_name(false)?;
14174 Ok(AssignmentTarget::ColumnName(column))
14175 }
14176 }
14177
14178 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
14179 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
14180 self.maybe_parse(|p| {
14181 let name = p.parse_expr()?;
14182 let operator = p.parse_function_named_arg_operator()?;
14183 let arg = p.parse_wildcard_expr()?.into();
14184 Ok(FunctionArg::ExprNamed {
14185 name,
14186 arg,
14187 operator,
14188 })
14189 })?
14190 } else {
14191 self.maybe_parse(|p| {
14192 let name = p.parse_identifier()?;
14193 let operator = p.parse_function_named_arg_operator()?;
14194 let arg = p.parse_wildcard_expr()?.into();
14195 Ok(FunctionArg::Named {
14196 name,
14197 arg,
14198 operator,
14199 })
14200 })?
14201 };
14202 if let Some(arg) = arg {
14203 return Ok(arg);
14204 }
14205 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
14206 }
14207
14208 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
14209 if self.parse_keyword(Keyword::VALUE) {
14210 return Ok(FunctionArgOperator::Value);
14211 }
14212 let tok = self.next_token();
14213 match tok.token {
14214 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
14215 Ok(FunctionArgOperator::RightArrow)
14216 }
14217 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
14218 Ok(FunctionArgOperator::Equals)
14219 }
14220 Token::Assignment
14221 if self
14222 .dialect
14223 .supports_named_fn_args_with_assignment_operator() =>
14224 {
14225 Ok(FunctionArgOperator::Assignment)
14226 }
14227 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
14228 Ok(FunctionArgOperator::Colon)
14229 }
14230 _ => {
14231 self.prev_token();
14232 self.expected("argument operator", tok)
14233 }
14234 }
14235 }
14236
14237 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
14238 if self.consume_token(&Token::RParen) {
14239 Ok(vec![])
14240 } else {
14241 let args = self.parse_comma_separated(Parser::parse_function_args)?;
14242 self.expect_token(&Token::RParen)?;
14243 Ok(args)
14244 }
14245 }
14246
14247 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
14248 if self.consume_token(&Token::RParen) {
14249 return Ok(TableFunctionArgs {
14250 args: vec![],
14251 settings: None,
14252 });
14253 }
14254 let mut args = vec![];
14255 let settings = loop {
14256 if let Some(settings) = self.parse_settings()? {
14257 break Some(settings);
14258 }
14259 args.push(self.parse_function_args()?);
14260 if self.is_parse_comma_separated_end() {
14261 break None;
14262 }
14263 };
14264 self.expect_token(&Token::RParen)?;
14265 Ok(TableFunctionArgs { args, settings })
14266 }
14267
14268 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
14277 let mut clauses = vec![];
14278
14279 if let Some(null_clause) = self.parse_json_null_clause() {
14281 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14282 }
14283
14284 if self.consume_token(&Token::RParen) {
14285 return Ok(FunctionArgumentList {
14286 duplicate_treatment: None,
14287 args: vec![],
14288 clauses,
14289 });
14290 }
14291
14292 let duplicate_treatment = self.parse_duplicate_treatment()?;
14293 let args = self.parse_comma_separated(Parser::parse_function_args)?;
14294
14295 if self.dialect.supports_window_function_null_treatment_arg() {
14296 if let Some(null_treatment) = self.parse_null_treatment()? {
14297 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
14298 }
14299 }
14300
14301 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14302 clauses.push(FunctionArgumentClause::OrderBy(
14303 self.parse_comma_separated(Parser::parse_order_by_expr)?,
14304 ));
14305 }
14306
14307 if self.parse_keyword(Keyword::LIMIT) {
14308 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
14309 }
14310
14311 if dialect_of!(self is GenericDialect | BigQueryDialect)
14312 && self.parse_keyword(Keyword::HAVING)
14313 {
14314 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
14315 Keyword::MIN => HavingBoundKind::Min,
14316 Keyword::MAX => HavingBoundKind::Max,
14317 _ => unreachable!(),
14318 };
14319 clauses.push(FunctionArgumentClause::Having(HavingBound(
14320 kind,
14321 self.parse_expr()?,
14322 )))
14323 }
14324
14325 if dialect_of!(self is GenericDialect | MySqlDialect)
14326 && self.parse_keyword(Keyword::SEPARATOR)
14327 {
14328 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
14329 }
14330
14331 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
14332 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
14333 }
14334
14335 if let Some(null_clause) = self.parse_json_null_clause() {
14336 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14337 }
14338
14339 self.expect_token(&Token::RParen)?;
14340 Ok(FunctionArgumentList {
14341 duplicate_treatment,
14342 args,
14343 clauses,
14344 })
14345 }
14346
14347 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
14349 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
14350 Some(JsonNullClause::AbsentOnNull)
14351 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
14352 Some(JsonNullClause::NullOnNull)
14353 } else {
14354 None
14355 }
14356 }
14357
14358 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
14359 let loc = self.peek_token().span.start;
14360 match (
14361 self.parse_keyword(Keyword::ALL),
14362 self.parse_keyword(Keyword::DISTINCT),
14363 ) {
14364 (true, false) => Ok(Some(DuplicateTreatment::All)),
14365 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
14366 (false, false) => Ok(None),
14367 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
14368 }
14369 }
14370
14371 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
14373 let prefix = self
14374 .parse_one_of_keywords(
14375 self.dialect
14376 .get_reserved_keywords_for_select_item_operator(),
14377 )
14378 .map(|keyword| Ident::new(format!("{:?}", keyword)));
14379
14380 match self.parse_wildcard_expr()? {
14381 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
14382 SelectItemQualifiedWildcardKind::ObjectName(prefix),
14383 self.parse_wildcard_additional_options(token.0)?,
14384 )),
14385 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
14386 self.parse_wildcard_additional_options(token.0)?,
14387 )),
14388 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
14389 parser_err!(
14390 format!("Expected an expression, found: {}", v),
14391 self.peek_token().span.start
14392 )
14393 }
14394 Expr::BinaryOp {
14395 left,
14396 op: BinaryOperator::Eq,
14397 right,
14398 } if self.dialect.supports_eq_alias_assignment()
14399 && matches!(left.as_ref(), Expr::Identifier(_)) =>
14400 {
14401 let Expr::Identifier(alias) = *left else {
14402 return parser_err!(
14403 "BUG: expected identifier expression as alias",
14404 self.peek_token().span.start
14405 );
14406 };
14407 Ok(SelectItem::ExprWithAlias {
14408 expr: *right,
14409 alias,
14410 })
14411 }
14412 expr if self.dialect.supports_select_expr_star()
14413 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
14414 {
14415 let wildcard_token = self.get_previous_token().clone();
14416 Ok(SelectItem::QualifiedWildcard(
14417 SelectItemQualifiedWildcardKind::Expr(expr),
14418 self.parse_wildcard_additional_options(wildcard_token)?,
14419 ))
14420 }
14421 expr => self
14422 .maybe_parse_select_item_alias()
14423 .map(|alias| match alias {
14424 Some(alias) => SelectItem::ExprWithAlias {
14425 expr: maybe_prefixed_expr(expr, prefix),
14426 alias,
14427 },
14428 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
14429 }),
14430 }
14431 }
14432
14433 pub fn parse_wildcard_additional_options(
14437 &mut self,
14438 wildcard_token: TokenWithSpan,
14439 ) -> Result<WildcardAdditionalOptions, ParserError> {
14440 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
14441 self.parse_optional_select_item_ilike()?
14442 } else {
14443 None
14444 };
14445 let opt_exclude = if opt_ilike.is_none()
14446 && dialect_of!(self is GenericDialect | DuckDbDialect | SnowflakeDialect)
14447 {
14448 self.parse_optional_select_item_exclude()?
14449 } else {
14450 None
14451 };
14452 let opt_except = if self.dialect.supports_select_wildcard_except() {
14453 self.parse_optional_select_item_except()?
14454 } else {
14455 None
14456 };
14457 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
14458 {
14459 self.parse_optional_select_item_replace()?
14460 } else {
14461 None
14462 };
14463 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
14464 self.parse_optional_select_item_rename()?
14465 } else {
14466 None
14467 };
14468
14469 Ok(WildcardAdditionalOptions {
14470 wildcard_token: wildcard_token.into(),
14471 opt_ilike,
14472 opt_exclude,
14473 opt_except,
14474 opt_rename,
14475 opt_replace,
14476 })
14477 }
14478
14479 pub fn parse_optional_select_item_ilike(
14483 &mut self,
14484 ) -> Result<Option<IlikeSelectItem>, ParserError> {
14485 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
14486 let next_token = self.next_token();
14487 let pattern = match next_token.token {
14488 Token::SingleQuotedString(s) => s,
14489 _ => return self.expected("ilike pattern", next_token),
14490 };
14491 Some(IlikeSelectItem { pattern })
14492 } else {
14493 None
14494 };
14495 Ok(opt_ilike)
14496 }
14497
14498 pub fn parse_optional_select_item_exclude(
14502 &mut self,
14503 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
14504 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
14505 if self.consume_token(&Token::LParen) {
14506 let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14507 self.expect_token(&Token::RParen)?;
14508 Some(ExcludeSelectItem::Multiple(columns))
14509 } else {
14510 let column = self.parse_identifier()?;
14511 Some(ExcludeSelectItem::Single(column))
14512 }
14513 } else {
14514 None
14515 };
14516
14517 Ok(opt_exclude)
14518 }
14519
14520 pub fn parse_optional_select_item_except(
14524 &mut self,
14525 ) -> Result<Option<ExceptSelectItem>, ParserError> {
14526 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
14527 if self.peek_token().token == Token::LParen {
14528 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
14529 match &idents[..] {
14530 [] => {
14531 return self.expected(
14532 "at least one column should be parsed by the expect clause",
14533 self.peek_token(),
14534 )?;
14535 }
14536 [first, idents @ ..] => Some(ExceptSelectItem {
14537 first_element: first.clone(),
14538 additional_elements: idents.to_vec(),
14539 }),
14540 }
14541 } else {
14542 let ident = self.parse_identifier()?;
14544 Some(ExceptSelectItem {
14545 first_element: ident,
14546 additional_elements: vec![],
14547 })
14548 }
14549 } else {
14550 None
14551 };
14552
14553 Ok(opt_except)
14554 }
14555
14556 pub fn parse_optional_select_item_rename(
14558 &mut self,
14559 ) -> Result<Option<RenameSelectItem>, ParserError> {
14560 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
14561 if self.consume_token(&Token::LParen) {
14562 let idents =
14563 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
14564 self.expect_token(&Token::RParen)?;
14565 Some(RenameSelectItem::Multiple(idents))
14566 } else {
14567 let ident = self.parse_identifier_with_alias()?;
14568 Some(RenameSelectItem::Single(ident))
14569 }
14570 } else {
14571 None
14572 };
14573
14574 Ok(opt_rename)
14575 }
14576
14577 pub fn parse_optional_select_item_replace(
14579 &mut self,
14580 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
14581 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
14582 if self.consume_token(&Token::LParen) {
14583 let items = self.parse_comma_separated(|parser| {
14584 Ok(Box::new(parser.parse_replace_elements()?))
14585 })?;
14586 self.expect_token(&Token::RParen)?;
14587 Some(ReplaceSelectItem { items })
14588 } else {
14589 let tok = self.next_token();
14590 return self.expected("( after REPLACE but", tok);
14591 }
14592 } else {
14593 None
14594 };
14595
14596 Ok(opt_replace)
14597 }
14598 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
14599 let expr = self.parse_expr()?;
14600 let as_keyword = self.parse_keyword(Keyword::AS);
14601 let ident = self.parse_identifier()?;
14602 Ok(ReplaceSelectElement {
14603 expr,
14604 column_name: ident,
14605 as_keyword,
14606 })
14607 }
14608
14609 pub fn parse_asc_desc(&mut self) -> Option<bool> {
14612 if self.parse_keyword(Keyword::ASC) {
14613 Some(true)
14614 } else if self.parse_keyword(Keyword::DESC) {
14615 Some(false)
14616 } else {
14617 None
14618 }
14619 }
14620
14621 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
14623 self.parse_order_by_expr_inner(false)
14624 .map(|(order_by, _)| order_by)
14625 }
14626
14627 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
14629 self.parse_order_by_expr_inner(true)
14630 .map(|(column, operator_class)| IndexColumn {
14631 column,
14632 operator_class,
14633 })
14634 }
14635
14636 fn parse_order_by_expr_inner(
14637 &mut self,
14638 with_operator_class: bool,
14639 ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
14640 let expr = self.parse_expr()?;
14641
14642 let operator_class: Option<Ident> = if with_operator_class {
14643 if self
14646 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
14647 .is_some()
14648 {
14649 None
14650 } else {
14651 self.maybe_parse(|parser| parser.parse_identifier())?
14652 }
14653 } else {
14654 None
14655 };
14656
14657 let options = self.parse_order_by_options()?;
14658
14659 let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
14660 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
14661 {
14662 Some(self.parse_with_fill()?)
14663 } else {
14664 None
14665 };
14666
14667 Ok((
14668 OrderByExpr {
14669 expr,
14670 options,
14671 with_fill,
14672 },
14673 operator_class,
14674 ))
14675 }
14676
14677 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
14678 let asc = self.parse_asc_desc();
14679
14680 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
14681 Some(true)
14682 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
14683 Some(false)
14684 } else {
14685 None
14686 };
14687
14688 Ok(OrderByOptions { asc, nulls_first })
14689 }
14690
14691 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
14694 let from = if self.parse_keyword(Keyword::FROM) {
14695 Some(self.parse_expr()?)
14696 } else {
14697 None
14698 };
14699
14700 let to = if self.parse_keyword(Keyword::TO) {
14701 Some(self.parse_expr()?)
14702 } else {
14703 None
14704 };
14705
14706 let step = if self.parse_keyword(Keyword::STEP) {
14707 Some(self.parse_expr()?)
14708 } else {
14709 None
14710 };
14711
14712 Ok(WithFill { from, to, step })
14713 }
14714
14715 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
14718 if !self.parse_keyword(Keyword::INTERPOLATE) {
14719 return Ok(None);
14720 }
14721
14722 if self.consume_token(&Token::LParen) {
14723 let interpolations =
14724 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
14725 self.expect_token(&Token::RParen)?;
14726 return Ok(Some(Interpolate {
14728 exprs: Some(interpolations),
14729 }));
14730 }
14731
14732 Ok(Some(Interpolate { exprs: None }))
14734 }
14735
14736 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
14738 let column = self.parse_identifier()?;
14739 let expr = if self.parse_keyword(Keyword::AS) {
14740 Some(self.parse_expr()?)
14741 } else {
14742 None
14743 };
14744 Ok(InterpolateExpr { column, expr })
14745 }
14746
14747 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
14750 let quantity = if self.consume_token(&Token::LParen) {
14751 let quantity = self.parse_expr()?;
14752 self.expect_token(&Token::RParen)?;
14753 Some(TopQuantity::Expr(quantity))
14754 } else {
14755 let next_token = self.next_token();
14756 let quantity = match next_token.token {
14757 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
14758 _ => self.expected("literal int", next_token)?,
14759 };
14760 Some(TopQuantity::Constant(quantity))
14761 };
14762
14763 let percent = self.parse_keyword(Keyword::PERCENT);
14764
14765 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
14766
14767 Ok(Top {
14768 with_ties,
14769 percent,
14770 quantity,
14771 })
14772 }
14773
14774 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
14776 if self.parse_keyword(Keyword::ALL) {
14777 Ok(None)
14778 } else {
14779 Ok(Some(self.parse_expr()?))
14780 }
14781 }
14782
14783 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
14785 let value = self.parse_expr()?;
14786 let rows = if self.parse_keyword(Keyword::ROW) {
14787 OffsetRows::Row
14788 } else if self.parse_keyword(Keyword::ROWS) {
14789 OffsetRows::Rows
14790 } else {
14791 OffsetRows::None
14792 };
14793 Ok(Offset { value, rows })
14794 }
14795
14796 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
14798 self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
14799 let (quantity, percent) = if self
14800 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
14801 .is_some()
14802 {
14803 (None, false)
14804 } else {
14805 let quantity = Expr::Value(self.parse_value()?);
14806 let percent = self.parse_keyword(Keyword::PERCENT);
14807 self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
14808 (Some(quantity), percent)
14809 };
14810 let with_ties = if self.parse_keyword(Keyword::ONLY) {
14811 false
14812 } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
14813 true
14814 } else {
14815 return self.expected("one of ONLY or WITH TIES", self.peek_token());
14816 };
14817 Ok(Fetch {
14818 with_ties,
14819 percent,
14820 quantity,
14821 })
14822 }
14823
14824 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
14826 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
14827 Keyword::UPDATE => LockType::Update,
14828 Keyword::SHARE => LockType::Share,
14829 _ => unreachable!(),
14830 };
14831 let of = if self.parse_keyword(Keyword::OF) {
14832 Some(self.parse_object_name(false)?)
14833 } else {
14834 None
14835 };
14836 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
14837 Some(NonBlock::Nowait)
14838 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
14839 Some(NonBlock::SkipLocked)
14840 } else {
14841 None
14842 };
14843 Ok(LockClause {
14844 lock_type,
14845 of,
14846 nonblock,
14847 })
14848 }
14849
14850 pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
14851 let mut explicit_row = false;
14852
14853 let rows = self.parse_comma_separated(|parser| {
14854 if parser.parse_keyword(Keyword::ROW) {
14855 explicit_row = true;
14856 }
14857
14858 parser.expect_token(&Token::LParen)?;
14859 if allow_empty && parser.peek_token().token == Token::RParen {
14860 parser.next_token();
14861 Ok(vec![])
14862 } else {
14863 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
14864 parser.expect_token(&Token::RParen)?;
14865 Ok(exprs)
14866 }
14867 })?;
14868 Ok(Values { explicit_row, rows })
14869 }
14870
14871 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
14872 self.expect_keyword_is(Keyword::TRANSACTION)?;
14873 Ok(Statement::StartTransaction {
14874 modes: self.parse_transaction_modes()?,
14875 begin: false,
14876 transaction: Some(BeginTransactionKind::Transaction),
14877 modifier: None,
14878 statements: vec![],
14879 exception_statements: None,
14880 has_end_keyword: false,
14881 })
14882 }
14883
14884 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
14885 let modifier = if !self.dialect.supports_start_transaction_modifier() {
14886 None
14887 } else if self.parse_keyword(Keyword::DEFERRED) {
14888 Some(TransactionModifier::Deferred)
14889 } else if self.parse_keyword(Keyword::IMMEDIATE) {
14890 Some(TransactionModifier::Immediate)
14891 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
14892 Some(TransactionModifier::Exclusive)
14893 } else if self.parse_keyword(Keyword::TRY) {
14894 Some(TransactionModifier::Try)
14895 } else if self.parse_keyword(Keyword::CATCH) {
14896 Some(TransactionModifier::Catch)
14897 } else {
14898 None
14899 };
14900 let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
14901 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
14902 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
14903 _ => None,
14904 };
14905 Ok(Statement::StartTransaction {
14906 modes: self.parse_transaction_modes()?,
14907 begin: true,
14908 transaction,
14909 modifier,
14910 statements: vec![],
14911 exception_statements: None,
14912 has_end_keyword: false,
14913 })
14914 }
14915
14916 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
14917 let modifier = if !self.dialect.supports_end_transaction_modifier() {
14918 None
14919 } else if self.parse_keyword(Keyword::TRY) {
14920 Some(TransactionModifier::Try)
14921 } else if self.parse_keyword(Keyword::CATCH) {
14922 Some(TransactionModifier::Catch)
14923 } else {
14924 None
14925 };
14926 Ok(Statement::Commit {
14927 chain: self.parse_commit_rollback_chain()?,
14928 end: true,
14929 modifier,
14930 })
14931 }
14932
14933 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
14934 let mut modes = vec![];
14935 let mut required = false;
14936 loop {
14937 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
14938 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
14939 TransactionIsolationLevel::ReadUncommitted
14940 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
14941 TransactionIsolationLevel::ReadCommitted
14942 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
14943 TransactionIsolationLevel::RepeatableRead
14944 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
14945 TransactionIsolationLevel::Serializable
14946 } else if self.parse_keyword(Keyword::SNAPSHOT) {
14947 TransactionIsolationLevel::Snapshot
14948 } else {
14949 self.expected("isolation level", self.peek_token())?
14950 };
14951 TransactionMode::IsolationLevel(iso_level)
14952 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
14953 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
14954 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
14955 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
14956 } else if required {
14957 self.expected("transaction mode", self.peek_token())?
14958 } else {
14959 break;
14960 };
14961 modes.push(mode);
14962 required = self.consume_token(&Token::Comma);
14967 }
14968 Ok(modes)
14969 }
14970
14971 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
14972 Ok(Statement::Commit {
14973 chain: self.parse_commit_rollback_chain()?,
14974 end: false,
14975 modifier: None,
14976 })
14977 }
14978
14979 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
14980 let chain = self.parse_commit_rollback_chain()?;
14981 let savepoint = self.parse_rollback_savepoint()?;
14982
14983 Ok(Statement::Rollback { chain, savepoint })
14984 }
14985
14986 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
14987 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
14988 if self.parse_keyword(Keyword::AND) {
14989 let chain = !self.parse_keyword(Keyword::NO);
14990 self.expect_keyword_is(Keyword::CHAIN)?;
14991 Ok(chain)
14992 } else {
14993 Ok(false)
14994 }
14995 }
14996
14997 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
14998 if self.parse_keyword(Keyword::TO) {
14999 let _ = self.parse_keyword(Keyword::SAVEPOINT);
15000 let savepoint = self.parse_identifier()?;
15001
15002 Ok(Some(savepoint))
15003 } else {
15004 Ok(None)
15005 }
15006 }
15007
15008 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
15010 self.expect_token(&Token::LParen)?;
15011 let message = Box::new(self.parse_expr()?);
15012 self.expect_token(&Token::Comma)?;
15013 let severity = Box::new(self.parse_expr()?);
15014 self.expect_token(&Token::Comma)?;
15015 let state = Box::new(self.parse_expr()?);
15016 let arguments = if self.consume_token(&Token::Comma) {
15017 self.parse_comma_separated(Parser::parse_expr)?
15018 } else {
15019 vec![]
15020 };
15021 self.expect_token(&Token::RParen)?;
15022 let options = if self.parse_keyword(Keyword::WITH) {
15023 self.parse_comma_separated(Parser::parse_raiserror_option)?
15024 } else {
15025 vec![]
15026 };
15027 Ok(Statement::RaisError {
15028 message,
15029 severity,
15030 state,
15031 arguments,
15032 options,
15033 })
15034 }
15035
15036 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
15037 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
15038 Keyword::LOG => Ok(RaisErrorOption::Log),
15039 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
15040 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
15041 _ => self.expected(
15042 "LOG, NOWAIT OR SETERROR raiserror option",
15043 self.peek_token(),
15044 ),
15045 }
15046 }
15047
15048 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
15049 let prepare = self.parse_keyword(Keyword::PREPARE);
15050 let name = self.parse_identifier()?;
15051 Ok(Statement::Deallocate { name, prepare })
15052 }
15053
15054 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
15055 let name = if self.dialect.supports_execute_immediate()
15056 && self.parse_keyword(Keyword::IMMEDIATE)
15057 {
15058 None
15059 } else {
15060 let name = self.parse_object_name(false)?;
15061 Some(name)
15062 };
15063
15064 let has_parentheses = self.consume_token(&Token::LParen);
15065
15066 let end_token = match (has_parentheses, self.peek_token().token) {
15067 (true, _) => Token::RParen,
15068 (false, Token::EOF) => Token::EOF,
15069 (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w),
15070 (false, _) => Token::SemiColon,
15071 };
15072
15073 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
15074
15075 if has_parentheses {
15076 self.expect_token(&Token::RParen)?;
15077 }
15078
15079 let into = if self.parse_keyword(Keyword::INTO) {
15080 self.parse_comma_separated(Self::parse_identifier)?
15081 } else {
15082 vec![]
15083 };
15084
15085 let using = if self.parse_keyword(Keyword::USING) {
15086 self.parse_comma_separated(Self::parse_expr_with_alias)?
15087 } else {
15088 vec![]
15089 };
15090
15091 Ok(Statement::Execute {
15092 immediate: name.is_none(),
15093 name,
15094 parameters,
15095 has_parentheses,
15096 into,
15097 using,
15098 })
15099 }
15100
15101 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
15102 let name = self.parse_identifier()?;
15103
15104 let mut data_types = vec![];
15105 if self.consume_token(&Token::LParen) {
15106 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
15107 self.expect_token(&Token::RParen)?;
15108 }
15109
15110 self.expect_keyword_is(Keyword::AS)?;
15111 let statement = Box::new(self.parse_statement()?);
15112 Ok(Statement::Prepare {
15113 name,
15114 data_types,
15115 statement,
15116 })
15117 }
15118
15119 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
15120 self.expect_token(&Token::LParen)?;
15121 let query = self.parse_query()?;
15122 self.expect_token(&Token::RParen)?;
15123
15124 self.expect_keyword_is(Keyword::TO)?;
15125 let to = self.parse_identifier()?;
15126
15127 let with_options = self.parse_options(Keyword::WITH)?;
15128
15129 Ok(Statement::Unload {
15130 query,
15131 to,
15132 with: with_options,
15133 })
15134 }
15135
15136 pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
15137 let mut clauses = vec![];
15138 loop {
15139 if !(self.parse_keyword(Keyword::WHEN)) {
15140 break;
15141 }
15142
15143 let mut clause_kind = MergeClauseKind::Matched;
15144 if self.parse_keyword(Keyword::NOT) {
15145 clause_kind = MergeClauseKind::NotMatched;
15146 }
15147 self.expect_keyword_is(Keyword::MATCHED)?;
15148
15149 if matches!(clause_kind, MergeClauseKind::NotMatched)
15150 && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
15151 {
15152 clause_kind = MergeClauseKind::NotMatchedBySource;
15153 } else if matches!(clause_kind, MergeClauseKind::NotMatched)
15154 && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
15155 {
15156 clause_kind = MergeClauseKind::NotMatchedByTarget;
15157 }
15158
15159 let predicate = if self.parse_keyword(Keyword::AND) {
15160 Some(self.parse_expr()?)
15161 } else {
15162 None
15163 };
15164
15165 self.expect_keyword_is(Keyword::THEN)?;
15166
15167 let merge_clause = match self.parse_one_of_keywords(&[
15168 Keyword::UPDATE,
15169 Keyword::INSERT,
15170 Keyword::DELETE,
15171 ]) {
15172 Some(Keyword::UPDATE) => {
15173 if matches!(
15174 clause_kind,
15175 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15176 ) {
15177 return Err(ParserError::ParserError(format!(
15178 "UPDATE is not allowed in a {clause_kind} merge clause"
15179 )));
15180 }
15181 self.expect_keyword_is(Keyword::SET)?;
15182 MergeAction::Update {
15183 assignments: self.parse_comma_separated(Parser::parse_assignment)?,
15184 }
15185 }
15186 Some(Keyword::DELETE) => {
15187 if matches!(
15188 clause_kind,
15189 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15190 ) {
15191 return Err(ParserError::ParserError(format!(
15192 "DELETE is not allowed in a {clause_kind} merge clause"
15193 )));
15194 }
15195 MergeAction::Delete
15196 }
15197 Some(Keyword::INSERT) => {
15198 if !matches!(
15199 clause_kind,
15200 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15201 ) {
15202 return Err(ParserError::ParserError(format!(
15203 "INSERT is not allowed in a {clause_kind} merge clause"
15204 )));
15205 }
15206 let is_mysql = dialect_of!(self is MySqlDialect);
15207
15208 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
15209 let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
15210 && self.parse_keyword(Keyword::ROW)
15211 {
15212 MergeInsertKind::Row
15213 } else {
15214 self.expect_keyword_is(Keyword::VALUES)?;
15215 let values = self.parse_values(is_mysql)?;
15216 MergeInsertKind::Values(values)
15217 };
15218 MergeAction::Insert(MergeInsertExpr { columns, kind })
15219 }
15220 _ => {
15221 return Err(ParserError::ParserError(
15222 "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
15223 ));
15224 }
15225 };
15226 clauses.push(MergeClause {
15227 clause_kind,
15228 predicate,
15229 action: merge_clause,
15230 });
15231 }
15232 Ok(clauses)
15233 }
15234
15235 fn parse_output(&mut self) -> Result<OutputClause, ParserError> {
15236 self.expect_keyword_is(Keyword::OUTPUT)?;
15237 let select_items = self.parse_projection()?;
15238 self.expect_keyword_is(Keyword::INTO)?;
15239 let into_table = self.parse_select_into()?;
15240
15241 Ok(OutputClause {
15242 select_items,
15243 into_table,
15244 })
15245 }
15246
15247 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
15248 let temporary = self
15249 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
15250 .is_some();
15251 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
15252 let table = self.parse_keyword(Keyword::TABLE);
15253 let name = self.parse_object_name(false)?;
15254
15255 Ok(SelectInto {
15256 temporary,
15257 unlogged,
15258 table,
15259 name,
15260 })
15261 }
15262
15263 pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
15264 let into = self.parse_keyword(Keyword::INTO);
15265
15266 let table = self.parse_table_factor()?;
15267
15268 self.expect_keyword_is(Keyword::USING)?;
15269 let source = self.parse_table_factor()?;
15270 self.expect_keyword_is(Keyword::ON)?;
15271 let on = self.parse_expr()?;
15272 let clauses = self.parse_merge_clauses()?;
15273 let output = if self.peek_keyword(Keyword::OUTPUT) {
15274 Some(self.parse_output()?)
15275 } else {
15276 None
15277 };
15278
15279 Ok(Statement::Merge {
15280 into,
15281 table,
15282 source,
15283 on: Box::new(on),
15284 clauses,
15285 output,
15286 })
15287 }
15288
15289 fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
15290 match self.parse_value()?.value {
15291 v @ Value::SingleQuotedString(_) => Ok(v),
15292 v @ Value::DoubleQuotedString(_) => Ok(v),
15293 v @ Value::Number(_, _) => Ok(v),
15294 v @ Value::Placeholder(_) => Ok(v),
15295 _ => {
15296 self.prev_token();
15297 self.expected("number or string or ? placeholder", self.peek_token())
15298 }
15299 }
15300 }
15301
15302 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
15304 let name = self.parse_object_name(false)?;
15305 if self.consume_token(&Token::LParen) {
15306 let value = self.parse_pragma_value()?;
15307 self.expect_token(&Token::RParen)?;
15308 Ok(Statement::Pragma {
15309 name,
15310 value: Some(value),
15311 is_eq: false,
15312 })
15313 } else if self.consume_token(&Token::Eq) {
15314 Ok(Statement::Pragma {
15315 name,
15316 value: Some(self.parse_pragma_value()?),
15317 is_eq: true,
15318 })
15319 } else {
15320 Ok(Statement::Pragma {
15321 name,
15322 value: None,
15323 is_eq: false,
15324 })
15325 }
15326 }
15327
15328 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
15330 let extension_name = self.parse_identifier()?;
15331
15332 Ok(Statement::Install { extension_name })
15333 }
15334
15335 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
15337 if self.dialect.supports_load_extension() {
15338 let extension_name = self.parse_identifier()?;
15339 Ok(Statement::Load { extension_name })
15340 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
15341 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
15342 self.expect_keyword_is(Keyword::INPATH)?;
15343 let inpath = self.parse_literal_string()?;
15344 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
15345 self.expect_keyword_is(Keyword::INTO)?;
15346 self.expect_keyword_is(Keyword::TABLE)?;
15347 let table_name = self.parse_object_name(false)?;
15348 let partitioned = self.parse_insert_partition()?;
15349 let table_format = self.parse_load_data_table_format()?;
15350 Ok(Statement::LoadData {
15351 local,
15352 inpath,
15353 overwrite,
15354 table_name,
15355 partitioned,
15356 table_format,
15357 })
15358 } else {
15359 self.expected(
15360 "`DATA` or an extension name after `LOAD`",
15361 self.peek_token(),
15362 )
15363 }
15364 }
15365
15366 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
15371 self.expect_keyword_is(Keyword::TABLE)?;
15372 let name = self.parse_object_name(false)?;
15373 let on_cluster = self.parse_optional_on_cluster()?;
15374
15375 let partition = if self.parse_keyword(Keyword::PARTITION) {
15376 if self.parse_keyword(Keyword::ID) {
15377 Some(Partition::Identifier(self.parse_identifier()?))
15378 } else {
15379 Some(Partition::Expr(self.parse_expr()?))
15380 }
15381 } else {
15382 None
15383 };
15384
15385 let include_final = self.parse_keyword(Keyword::FINAL);
15386 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
15387 if self.parse_keyword(Keyword::BY) {
15388 Some(Deduplicate::ByExpression(self.parse_expr()?))
15389 } else {
15390 Some(Deduplicate::All)
15391 }
15392 } else {
15393 None
15394 };
15395
15396 Ok(Statement::OptimizeTable {
15397 name,
15398 on_cluster,
15399 partition,
15400 include_final,
15401 deduplicate,
15402 })
15403 }
15404
15405 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
15411 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
15413 let name = self.parse_object_name(false)?;
15415 let mut data_type: Option<DataType> = None;
15417 if self.parse_keywords(&[Keyword::AS]) {
15418 data_type = Some(self.parse_data_type()?)
15419 }
15420 let sequence_options = self.parse_create_sequence_options()?;
15421 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
15423 if self.parse_keywords(&[Keyword::NONE]) {
15424 Some(ObjectName::from(vec![Ident::new("NONE")]))
15425 } else {
15426 Some(self.parse_object_name(false)?)
15427 }
15428 } else {
15429 None
15430 };
15431 Ok(Statement::CreateSequence {
15432 temporary,
15433 if_not_exists,
15434 name,
15435 data_type,
15436 sequence_options,
15437 owned_by,
15438 })
15439 }
15440
15441 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
15442 let mut sequence_options = vec![];
15443 if self.parse_keywords(&[Keyword::INCREMENT]) {
15445 if self.parse_keywords(&[Keyword::BY]) {
15446 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
15447 } else {
15448 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
15449 }
15450 }
15451 if self.parse_keyword(Keyword::MINVALUE) {
15453 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
15454 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
15455 sequence_options.push(SequenceOptions::MinValue(None));
15456 }
15457 if self.parse_keywords(&[Keyword::MAXVALUE]) {
15459 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
15460 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
15461 sequence_options.push(SequenceOptions::MaxValue(None));
15462 }
15463
15464 if self.parse_keywords(&[Keyword::START]) {
15466 if self.parse_keywords(&[Keyword::WITH]) {
15467 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
15468 } else {
15469 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
15470 }
15471 }
15472 if self.parse_keywords(&[Keyword::CACHE]) {
15474 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
15475 }
15476 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
15478 sequence_options.push(SequenceOptions::Cycle(true));
15479 } else if self.parse_keywords(&[Keyword::CYCLE]) {
15480 sequence_options.push(SequenceOptions::Cycle(false));
15481 }
15482
15483 Ok(sequence_options)
15484 }
15485
15486 pub fn index(&self) -> usize {
15488 self.index
15489 }
15490
15491 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
15492 let ident = self.parse_identifier()?;
15493 self.expect_keyword_is(Keyword::AS)?;
15494
15495 let window_expr = if self.consume_token(&Token::LParen) {
15496 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
15497 } else if self.dialect.supports_window_clause_named_window_reference() {
15498 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
15499 } else {
15500 return self.expected("(", self.peek_token());
15501 };
15502
15503 Ok(NamedWindowDefinition(ident, window_expr))
15504 }
15505
15506 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
15507 let name = self.parse_object_name(false)?;
15508 let params = self.parse_optional_procedure_parameters()?;
15509 self.expect_keyword_is(Keyword::AS)?;
15510 self.expect_keyword_is(Keyword::BEGIN)?;
15511 let statements = self.parse_statements()?;
15512 self.expect_keyword_is(Keyword::END)?;
15513 Ok(Statement::CreateProcedure {
15514 name,
15515 or_alter,
15516 params,
15517 body: statements,
15518 })
15519 }
15520
15521 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
15522 let window_name = match self.peek_token().token {
15523 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
15524 self.parse_optional_indent()?
15525 }
15526 _ => None,
15527 };
15528
15529 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15530 self.parse_comma_separated(Parser::parse_expr)?
15531 } else {
15532 vec![]
15533 };
15534 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15535 self.parse_comma_separated(Parser::parse_order_by_expr)?
15536 } else {
15537 vec![]
15538 };
15539
15540 let window_frame = if !self.consume_token(&Token::RParen) {
15541 let window_frame = self.parse_window_frame()?;
15542 self.expect_token(&Token::RParen)?;
15543 Some(window_frame)
15544 } else {
15545 None
15546 };
15547 Ok(WindowSpec {
15548 window_name,
15549 partition_by,
15550 order_by,
15551 window_frame,
15552 })
15553 }
15554
15555 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
15556 let name = self.parse_object_name(false)?;
15557 self.expect_keyword_is(Keyword::AS)?;
15558
15559 if self.parse_keyword(Keyword::ENUM) {
15560 return self.parse_create_type_enum(name);
15561 }
15562
15563 let mut attributes = vec![];
15564 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
15565 return Ok(Statement::CreateType {
15566 name,
15567 representation: UserDefinedTypeRepresentation::Composite { attributes },
15568 });
15569 }
15570
15571 loop {
15572 let attr_name = self.parse_identifier()?;
15573 let attr_data_type = self.parse_data_type()?;
15574 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
15575 Some(self.parse_object_name(false)?)
15576 } else {
15577 None
15578 };
15579 attributes.push(UserDefinedTypeCompositeAttributeDef {
15580 name: attr_name,
15581 data_type: attr_data_type,
15582 collation: attr_collation,
15583 });
15584 let comma = self.consume_token(&Token::Comma);
15585 if self.consume_token(&Token::RParen) {
15586 break;
15588 } else if !comma {
15589 return self.expected("',' or ')' after attribute definition", self.peek_token());
15590 }
15591 }
15592
15593 Ok(Statement::CreateType {
15594 name,
15595 representation: UserDefinedTypeRepresentation::Composite { attributes },
15596 })
15597 }
15598
15599 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
15603 self.expect_token(&Token::LParen)?;
15604 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
15605 self.expect_token(&Token::RParen)?;
15606
15607 Ok(Statement::CreateType {
15608 name,
15609 representation: UserDefinedTypeRepresentation::Enum { labels },
15610 })
15611 }
15612
15613 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
15614 self.expect_token(&Token::LParen)?;
15615 let partitions = self.parse_comma_separated(|p| p.parse_identifier())?;
15616 self.expect_token(&Token::RParen)?;
15617 Ok(partitions)
15618 }
15619
15620 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
15621 if dialect_of!(self is MySqlDialect | GenericDialect) {
15622 if self.parse_keyword(Keyword::FIRST) {
15623 Ok(Some(MySQLColumnPosition::First))
15624 } else if self.parse_keyword(Keyword::AFTER) {
15625 let ident = self.parse_identifier()?;
15626 Ok(Some(MySQLColumnPosition::After(ident)))
15627 } else {
15628 Ok(None)
15629 }
15630 } else {
15631 Ok(None)
15632 }
15633 }
15634
15635 fn parse_print(&mut self) -> Result<Statement, ParserError> {
15637 Ok(Statement::Print(PrintStatement {
15638 message: Box::new(self.parse_expr()?),
15639 }))
15640 }
15641
15642 fn parse_return(&mut self) -> Result<Statement, ParserError> {
15644 match self.maybe_parse(|p| p.parse_expr())? {
15645 Some(expr) => Ok(Statement::Return(ReturnStatement {
15646 value: Some(ReturnStatementValue::Expr(expr)),
15647 })),
15648 None => Ok(Statement::Return(ReturnStatement { value: None })),
15649 }
15650 }
15651
15652 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
15654 self.tokens
15655 }
15656
15657 fn peek_sub_query(&mut self) -> bool {
15659 if self
15660 .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
15661 .is_some()
15662 {
15663 self.prev_token();
15664 return true;
15665 }
15666 false
15667 }
15668
15669 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
15670 let show_in;
15671 let mut filter_position = None;
15672 if self.dialect.supports_show_like_before_in() {
15673 if let Some(filter) = self.parse_show_statement_filter()? {
15674 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
15675 }
15676 show_in = self.maybe_parse_show_stmt_in()?;
15677 } else {
15678 show_in = self.maybe_parse_show_stmt_in()?;
15679 if let Some(filter) = self.parse_show_statement_filter()? {
15680 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
15681 }
15682 }
15683 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
15684 let limit = self.maybe_parse_show_stmt_limit()?;
15685 let from = self.maybe_parse_show_stmt_from()?;
15686 Ok(ShowStatementOptions {
15687 filter_position,
15688 show_in,
15689 starts_with,
15690 limit,
15691 limit_from: from,
15692 })
15693 }
15694
15695 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
15696 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
15697 Some(Keyword::FROM) => ShowStatementInClause::FROM,
15698 Some(Keyword::IN) => ShowStatementInClause::IN,
15699 None => return Ok(None),
15700 _ => return self.expected("FROM or IN", self.peek_token()),
15701 };
15702
15703 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
15704 Keyword::ACCOUNT,
15705 Keyword::DATABASE,
15706 Keyword::SCHEMA,
15707 Keyword::TABLE,
15708 Keyword::VIEW,
15709 ]) {
15710 Some(Keyword::DATABASE)
15712 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
15713 | self.peek_keyword(Keyword::LIMIT) =>
15714 {
15715 (Some(ShowStatementInParentType::Database), None)
15716 }
15717 Some(Keyword::SCHEMA)
15718 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
15719 | self.peek_keyword(Keyword::LIMIT) =>
15720 {
15721 (Some(ShowStatementInParentType::Schema), None)
15722 }
15723 Some(parent_kw) => {
15724 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
15728 match parent_kw {
15729 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
15730 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
15731 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
15732 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
15733 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
15734 _ => {
15735 return self.expected(
15736 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
15737 self.peek_token(),
15738 )
15739 }
15740 }
15741 }
15742 None => {
15743 let mut parent_name = self.parse_object_name(false)?;
15746 if self
15747 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
15748 .is_some()
15749 {
15750 parent_name
15751 .0
15752 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
15753 }
15754 (None, Some(parent_name))
15755 }
15756 };
15757
15758 Ok(Some(ShowStatementIn {
15759 clause,
15760 parent_type,
15761 parent_name,
15762 }))
15763 }
15764
15765 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
15766 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
15767 Ok(Some(self.parse_value()?.value))
15768 } else {
15769 Ok(None)
15770 }
15771 }
15772
15773 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
15774 if self.parse_keyword(Keyword::LIMIT) {
15775 Ok(self.parse_limit()?)
15776 } else {
15777 Ok(None)
15778 }
15779 }
15780
15781 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
15782 if self.parse_keyword(Keyword::FROM) {
15783 Ok(Some(self.parse_value()?.value))
15784 } else {
15785 Ok(None)
15786 }
15787 }
15788}
15789
15790fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
15791 if let Some(prefix) = prefix {
15792 Expr::Prefixed {
15793 prefix,
15794 value: Box::new(expr),
15795 }
15796 } else {
15797 expr
15798 }
15799}
15800
15801impl Word {
15802 #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
15803 pub fn to_ident(&self, span: Span) -> Ident {
15804 Ident {
15805 value: self.value.clone(),
15806 quote_style: self.quote_style,
15807 span,
15808 }
15809 }
15810
15811 pub fn into_ident(self, span: Span) -> Ident {
15813 Ident {
15814 value: self.value,
15815 quote_style: self.quote_style,
15816 span,
15817 }
15818 }
15819}
15820
15821#[cfg(test)]
15822mod tests {
15823 use crate::test_utils::{all_dialects, TestedDialects};
15824
15825 use super::*;
15826
15827 #[test]
15828 fn test_prev_index() {
15829 let sql = "SELECT version";
15830 all_dialects().run_parser_method(sql, |parser| {
15831 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
15832 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
15833 parser.prev_token();
15834 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
15835 assert_eq!(parser.next_token(), Token::make_word("version", None));
15836 parser.prev_token();
15837 assert_eq!(parser.peek_token(), Token::make_word("version", None));
15838 assert_eq!(parser.next_token(), Token::make_word("version", None));
15839 assert_eq!(parser.peek_token(), Token::EOF);
15840 parser.prev_token();
15841 assert_eq!(parser.next_token(), Token::make_word("version", None));
15842 assert_eq!(parser.next_token(), Token::EOF);
15843 assert_eq!(parser.next_token(), Token::EOF);
15844 parser.prev_token();
15845 });
15846 }
15847
15848 #[test]
15849 fn test_peek_tokens() {
15850 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
15851 assert!(matches!(
15852 parser.peek_tokens(),
15853 [Token::Word(Word {
15854 keyword: Keyword::SELECT,
15855 ..
15856 })]
15857 ));
15858
15859 assert!(matches!(
15860 parser.peek_tokens(),
15861 [
15862 Token::Word(Word {
15863 keyword: Keyword::SELECT,
15864 ..
15865 }),
15866 Token::Word(_),
15867 Token::Word(Word {
15868 keyword: Keyword::AS,
15869 ..
15870 }),
15871 ]
15872 ));
15873
15874 for _ in 0..4 {
15875 parser.next_token();
15876 }
15877
15878 assert!(matches!(
15879 parser.peek_tokens(),
15880 [
15881 Token::Word(Word {
15882 keyword: Keyword::FROM,
15883 ..
15884 }),
15885 Token::Word(_),
15886 Token::EOF,
15887 Token::EOF,
15888 ]
15889 ))
15890 })
15891 }
15892
15893 #[cfg(test)]
15894 mod test_parse_data_type {
15895 use crate::ast::{
15896 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
15897 };
15898 use crate::dialect::{AnsiDialect, GenericDialect};
15899 use crate::test_utils::TestedDialects;
15900
15901 macro_rules! test_parse_data_type {
15902 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
15903 $dialect.run_parser_method(&*$input, |parser| {
15904 let data_type = parser.parse_data_type().unwrap();
15905 assert_eq!($expected_type, data_type);
15906 assert_eq!($input.to_string(), data_type.to_string());
15907 });
15908 }};
15909 }
15910
15911 #[test]
15912 fn test_ansii_character_string_types() {
15913 let dialect =
15915 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
15916
15917 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
15918
15919 test_parse_data_type!(
15920 dialect,
15921 "CHARACTER(20)",
15922 DataType::Character(Some(CharacterLength::IntegerLength {
15923 length: 20,
15924 unit: None
15925 }))
15926 );
15927
15928 test_parse_data_type!(
15929 dialect,
15930 "CHARACTER(20 CHARACTERS)",
15931 DataType::Character(Some(CharacterLength::IntegerLength {
15932 length: 20,
15933 unit: Some(CharLengthUnits::Characters)
15934 }))
15935 );
15936
15937 test_parse_data_type!(
15938 dialect,
15939 "CHARACTER(20 OCTETS)",
15940 DataType::Character(Some(CharacterLength::IntegerLength {
15941 length: 20,
15942 unit: Some(CharLengthUnits::Octets)
15943 }))
15944 );
15945
15946 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
15947
15948 test_parse_data_type!(
15949 dialect,
15950 "CHAR(20)",
15951 DataType::Char(Some(CharacterLength::IntegerLength {
15952 length: 20,
15953 unit: None
15954 }))
15955 );
15956
15957 test_parse_data_type!(
15958 dialect,
15959 "CHAR(20 CHARACTERS)",
15960 DataType::Char(Some(CharacterLength::IntegerLength {
15961 length: 20,
15962 unit: Some(CharLengthUnits::Characters)
15963 }))
15964 );
15965
15966 test_parse_data_type!(
15967 dialect,
15968 "CHAR(20 OCTETS)",
15969 DataType::Char(Some(CharacterLength::IntegerLength {
15970 length: 20,
15971 unit: Some(CharLengthUnits::Octets)
15972 }))
15973 );
15974
15975 test_parse_data_type!(
15976 dialect,
15977 "CHARACTER VARYING(20)",
15978 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
15979 length: 20,
15980 unit: None
15981 }))
15982 );
15983
15984 test_parse_data_type!(
15985 dialect,
15986 "CHARACTER VARYING(20 CHARACTERS)",
15987 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
15988 length: 20,
15989 unit: Some(CharLengthUnits::Characters)
15990 }))
15991 );
15992
15993 test_parse_data_type!(
15994 dialect,
15995 "CHARACTER VARYING(20 OCTETS)",
15996 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
15997 length: 20,
15998 unit: Some(CharLengthUnits::Octets)
15999 }))
16000 );
16001
16002 test_parse_data_type!(
16003 dialect,
16004 "CHAR VARYING(20)",
16005 DataType::CharVarying(Some(CharacterLength::IntegerLength {
16006 length: 20,
16007 unit: None
16008 }))
16009 );
16010
16011 test_parse_data_type!(
16012 dialect,
16013 "CHAR VARYING(20 CHARACTERS)",
16014 DataType::CharVarying(Some(CharacterLength::IntegerLength {
16015 length: 20,
16016 unit: Some(CharLengthUnits::Characters)
16017 }))
16018 );
16019
16020 test_parse_data_type!(
16021 dialect,
16022 "CHAR VARYING(20 OCTETS)",
16023 DataType::CharVarying(Some(CharacterLength::IntegerLength {
16024 length: 20,
16025 unit: Some(CharLengthUnits::Octets)
16026 }))
16027 );
16028
16029 test_parse_data_type!(
16030 dialect,
16031 "VARCHAR(20)",
16032 DataType::Varchar(Some(CharacterLength::IntegerLength {
16033 length: 20,
16034 unit: None
16035 }))
16036 );
16037 }
16038
16039 #[test]
16040 fn test_ansii_character_large_object_types() {
16041 let dialect =
16043 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16044
16045 test_parse_data_type!(
16046 dialect,
16047 "CHARACTER LARGE OBJECT",
16048 DataType::CharacterLargeObject(None)
16049 );
16050 test_parse_data_type!(
16051 dialect,
16052 "CHARACTER LARGE OBJECT(20)",
16053 DataType::CharacterLargeObject(Some(20))
16054 );
16055
16056 test_parse_data_type!(
16057 dialect,
16058 "CHAR LARGE OBJECT",
16059 DataType::CharLargeObject(None)
16060 );
16061 test_parse_data_type!(
16062 dialect,
16063 "CHAR LARGE OBJECT(20)",
16064 DataType::CharLargeObject(Some(20))
16065 );
16066
16067 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
16068 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
16069 }
16070
16071 #[test]
16072 fn test_parse_custom_types() {
16073 let dialect =
16074 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16075
16076 test_parse_data_type!(
16077 dialect,
16078 "GEOMETRY",
16079 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
16080 );
16081
16082 test_parse_data_type!(
16083 dialect,
16084 "GEOMETRY(POINT)",
16085 DataType::Custom(
16086 ObjectName::from(vec!["GEOMETRY".into()]),
16087 vec!["POINT".to_string()]
16088 )
16089 );
16090
16091 test_parse_data_type!(
16092 dialect,
16093 "GEOMETRY(POINT, 4326)",
16094 DataType::Custom(
16095 ObjectName::from(vec!["GEOMETRY".into()]),
16096 vec!["POINT".to_string(), "4326".to_string()]
16097 )
16098 );
16099 }
16100
16101 #[test]
16102 fn test_ansii_exact_numeric_types() {
16103 let dialect =
16105 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16106
16107 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
16108
16109 test_parse_data_type!(
16110 dialect,
16111 "NUMERIC(2)",
16112 DataType::Numeric(ExactNumberInfo::Precision(2))
16113 );
16114
16115 test_parse_data_type!(
16116 dialect,
16117 "NUMERIC(2,10)",
16118 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
16119 );
16120
16121 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
16122
16123 test_parse_data_type!(
16124 dialect,
16125 "DECIMAL(2)",
16126 DataType::Decimal(ExactNumberInfo::Precision(2))
16127 );
16128
16129 test_parse_data_type!(
16130 dialect,
16131 "DECIMAL(2,10)",
16132 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
16133 );
16134
16135 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
16136
16137 test_parse_data_type!(
16138 dialect,
16139 "DEC(2)",
16140 DataType::Dec(ExactNumberInfo::Precision(2))
16141 );
16142
16143 test_parse_data_type!(
16144 dialect,
16145 "DEC(2,10)",
16146 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
16147 );
16148 }
16149
16150 #[test]
16151 fn test_ansii_date_type() {
16152 let dialect =
16154 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16155
16156 test_parse_data_type!(dialect, "DATE", DataType::Date);
16157
16158 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
16159
16160 test_parse_data_type!(
16161 dialect,
16162 "TIME(6)",
16163 DataType::Time(Some(6), TimezoneInfo::None)
16164 );
16165
16166 test_parse_data_type!(
16167 dialect,
16168 "TIME WITH TIME ZONE",
16169 DataType::Time(None, TimezoneInfo::WithTimeZone)
16170 );
16171
16172 test_parse_data_type!(
16173 dialect,
16174 "TIME(6) WITH TIME ZONE",
16175 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
16176 );
16177
16178 test_parse_data_type!(
16179 dialect,
16180 "TIME WITHOUT TIME ZONE",
16181 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
16182 );
16183
16184 test_parse_data_type!(
16185 dialect,
16186 "TIME(6) WITHOUT TIME ZONE",
16187 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
16188 );
16189
16190 test_parse_data_type!(
16191 dialect,
16192 "TIMESTAMP",
16193 DataType::Timestamp(None, TimezoneInfo::None)
16194 );
16195
16196 test_parse_data_type!(
16197 dialect,
16198 "TIMESTAMP(22)",
16199 DataType::Timestamp(Some(22), TimezoneInfo::None)
16200 );
16201
16202 test_parse_data_type!(
16203 dialect,
16204 "TIMESTAMP(22) WITH TIME ZONE",
16205 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
16206 );
16207
16208 test_parse_data_type!(
16209 dialect,
16210 "TIMESTAMP(33) WITHOUT TIME ZONE",
16211 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
16212 );
16213 }
16214 }
16215
16216 #[test]
16217 fn test_parse_schema_name() {
16218 macro_rules! test_parse_schema_name {
16220 ($input:expr, $expected_name:expr $(,)?) => {{
16221 all_dialects().run_parser_method(&*$input, |parser| {
16222 let schema_name = parser.parse_schema_name().unwrap();
16223 assert_eq!(schema_name, $expected_name);
16225 assert_eq!(schema_name.to_string(), $input.to_string());
16227 });
16228 }};
16229 }
16230
16231 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
16232 let dummy_authorization = Ident::new("dummy_authorization");
16233
16234 test_parse_schema_name!(
16235 format!("{dummy_name}"),
16236 SchemaName::Simple(dummy_name.clone())
16237 );
16238
16239 test_parse_schema_name!(
16240 format!("AUTHORIZATION {dummy_authorization}"),
16241 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
16242 );
16243 test_parse_schema_name!(
16244 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
16245 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
16246 );
16247 }
16248
16249 #[test]
16250 fn mysql_parse_index_table_constraint() {
16251 macro_rules! test_parse_table_constraint {
16252 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
16253 $dialect.run_parser_method(&*$input, |parser| {
16254 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
16255 assert_eq!(constraint, $expected);
16257 assert_eq!(constraint.to_string(), $input.to_string());
16259 });
16260 }};
16261 }
16262
16263 let dialect =
16264 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
16265
16266 test_parse_table_constraint!(
16267 dialect,
16268 "INDEX (c1)",
16269 TableConstraint::Index {
16270 display_as_key: false,
16271 name: None,
16272 index_type: None,
16273 columns: vec![Ident::new("c1")],
16274 }
16275 );
16276
16277 test_parse_table_constraint!(
16278 dialect,
16279 "KEY (c1)",
16280 TableConstraint::Index {
16281 display_as_key: true,
16282 name: None,
16283 index_type: None,
16284 columns: vec![Ident::new("c1")],
16285 }
16286 );
16287
16288 test_parse_table_constraint!(
16289 dialect,
16290 "INDEX 'index' (c1, c2)",
16291 TableConstraint::Index {
16292 display_as_key: false,
16293 name: Some(Ident::with_quote('\'', "index")),
16294 index_type: None,
16295 columns: vec![Ident::new("c1"), Ident::new("c2")],
16296 }
16297 );
16298
16299 test_parse_table_constraint!(
16300 dialect,
16301 "INDEX USING BTREE (c1)",
16302 TableConstraint::Index {
16303 display_as_key: false,
16304 name: None,
16305 index_type: Some(IndexType::BTree),
16306 columns: vec![Ident::new("c1")],
16307 }
16308 );
16309
16310 test_parse_table_constraint!(
16311 dialect,
16312 "INDEX USING HASH (c1)",
16313 TableConstraint::Index {
16314 display_as_key: false,
16315 name: None,
16316 index_type: Some(IndexType::Hash),
16317 columns: vec![Ident::new("c1")],
16318 }
16319 );
16320
16321 test_parse_table_constraint!(
16322 dialect,
16323 "INDEX idx_name USING BTREE (c1)",
16324 TableConstraint::Index {
16325 display_as_key: false,
16326 name: Some(Ident::new("idx_name")),
16327 index_type: Some(IndexType::BTree),
16328 columns: vec![Ident::new("c1")],
16329 }
16330 );
16331
16332 test_parse_table_constraint!(
16333 dialect,
16334 "INDEX idx_name USING HASH (c1)",
16335 TableConstraint::Index {
16336 display_as_key: false,
16337 name: Some(Ident::new("idx_name")),
16338 index_type: Some(IndexType::Hash),
16339 columns: vec![Ident::new("c1")],
16340 }
16341 );
16342 }
16343
16344 #[test]
16345 fn test_tokenizer_error_loc() {
16346 let sql = "foo '";
16347 let ast = Parser::parse_sql(&GenericDialect, sql);
16348 assert_eq!(
16349 ast,
16350 Err(ParserError::TokenizerError(
16351 "Unterminated string literal at Line: 1, Column: 5".to_string()
16352 ))
16353 );
16354 }
16355
16356 #[test]
16357 fn test_parser_error_loc() {
16358 let sql = "SELECT this is a syntax error";
16359 let ast = Parser::parse_sql(&GenericDialect, sql);
16360 assert_eq!(
16361 ast,
16362 Err(ParserError::ParserError(
16363 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
16364 .to_string()
16365 ))
16366 );
16367 }
16368
16369 #[test]
16370 fn test_nested_explain_error() {
16371 let sql = "EXPLAIN EXPLAIN SELECT 1";
16372 let ast = Parser::parse_sql(&GenericDialect, sql);
16373 assert_eq!(
16374 ast,
16375 Err(ParserError::ParserError(
16376 "Explain must be root of the plan".to_string()
16377 ))
16378 );
16379 }
16380
16381 #[test]
16382 fn test_parse_multipart_identifier_positive() {
16383 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
16384
16385 let expected = vec![
16387 Ident {
16388 value: "CATALOG".to_string(),
16389 quote_style: None,
16390 span: Span::empty(),
16391 },
16392 Ident {
16393 value: "F(o)o. \"bar".to_string(),
16394 quote_style: Some('"'),
16395 span: Span::empty(),
16396 },
16397 Ident {
16398 value: "table".to_string(),
16399 quote_style: None,
16400 span: Span::empty(),
16401 },
16402 ];
16403 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
16404 let actual = parser.parse_multipart_identifier().unwrap();
16405 assert_eq!(expected, actual);
16406 });
16407
16408 let expected = vec![
16410 Ident {
16411 value: "CATALOG".to_string(),
16412 quote_style: None,
16413 span: Span::empty(),
16414 },
16415 Ident {
16416 value: "table".to_string(),
16417 quote_style: None,
16418 span: Span::empty(),
16419 },
16420 ];
16421 dialect.run_parser_method("CATALOG . table", |parser| {
16422 let actual = parser.parse_multipart_identifier().unwrap();
16423 assert_eq!(expected, actual);
16424 });
16425 }
16426
16427 #[test]
16428 fn test_parse_multipart_identifier_negative() {
16429 macro_rules! test_parse_multipart_identifier_error {
16430 ($input:expr, $expected_err:expr $(,)?) => {{
16431 all_dialects().run_parser_method(&*$input, |parser| {
16432 let actual_err = parser.parse_multipart_identifier().unwrap_err();
16433 assert_eq!(actual_err.to_string(), $expected_err);
16434 });
16435 }};
16436 }
16437
16438 test_parse_multipart_identifier_error!(
16439 "",
16440 "sql parser error: Empty input when parsing identifier",
16441 );
16442
16443 test_parse_multipart_identifier_error!(
16444 "*schema.table",
16445 "sql parser error: Unexpected token in identifier: *",
16446 );
16447
16448 test_parse_multipart_identifier_error!(
16449 "schema.table*",
16450 "sql parser error: Unexpected token in identifier: *",
16451 );
16452
16453 test_parse_multipart_identifier_error!(
16454 "schema.table.",
16455 "sql parser error: Trailing period in identifier",
16456 );
16457
16458 test_parse_multipart_identifier_error!(
16459 "schema.*",
16460 "sql parser error: Unexpected token following period in identifier: *",
16461 );
16462 }
16463
16464 #[test]
16465 fn test_mysql_partition_selection() {
16466 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
16467 let expected = vec!["p0", "p2"];
16468
16469 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
16470 assert_eq!(ast.len(), 1);
16471 if let Statement::Query(v) = &ast[0] {
16472 if let SetExpr::Select(select) = &*v.body {
16473 assert_eq!(select.from.len(), 1);
16474 let from: &TableWithJoins = &select.from[0];
16475 let table_factor = &from.relation;
16476 if let TableFactor::Table { partitions, .. } = table_factor {
16477 let actual: Vec<&str> = partitions
16478 .iter()
16479 .map(|ident| ident.value.as_str())
16480 .collect();
16481 assert_eq!(expected, actual);
16482 }
16483 }
16484 } else {
16485 panic!("fail to parse mysql partition selection");
16486 }
16487 }
16488
16489 #[test]
16490 fn test_replace_into_placeholders() {
16491 let sql = "REPLACE INTO t (a) VALUES (&a)";
16492
16493 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
16494 }
16495
16496 #[test]
16497 fn test_replace_into_set_placeholder() {
16498 let sql = "REPLACE INTO t SET ?";
16499
16500 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
16501 }
16502
16503 #[test]
16504 fn test_replace_incomplete() {
16505 let sql = r#"REPLACE"#;
16506
16507 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
16508 }
16509}