1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration};
36use crate::ast::Statement::CreatePolicy;
37use crate::ast::*;
38use crate::dialect::*;
39use crate::keywords::{Keyword, ALL_KEYWORDS};
40use crate::tokenizer::*;
41
42mod alter;
43
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub enum ParserError {
46 TokenizerError(String),
47 ParserError(String),
48 RecursionLimitExceeded,
49}
50
51macro_rules! parser_err {
53 ($MSG:expr, $loc:expr) => {
54 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
55 };
56}
57
58#[cfg(feature = "std")]
59mod recursion {
61 use std::cell::Cell;
62 use std::rc::Rc;
63
64 use super::ParserError;
65
66 pub(crate) struct RecursionCounter {
77 remaining_depth: Rc<Cell<usize>>,
78 }
79
80 impl RecursionCounter {
81 pub fn new(remaining_depth: usize) -> Self {
84 Self {
85 remaining_depth: Rc::new(remaining_depth.into()),
86 }
87 }
88
89 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
96 let old_value = self.remaining_depth.get();
97 if old_value == 0 {
99 Err(ParserError::RecursionLimitExceeded)
100 } else {
101 self.remaining_depth.set(old_value - 1);
102 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
103 }
104 }
105 }
106
107 pub struct DepthGuard {
109 remaining_depth: Rc<Cell<usize>>,
110 }
111
112 impl DepthGuard {
113 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
114 Self { remaining_depth }
115 }
116 }
117 impl Drop for DepthGuard {
118 fn drop(&mut self) {
119 let old_value = self.remaining_depth.get();
120 self.remaining_depth.set(old_value + 1);
121 }
122 }
123}
124
125#[cfg(not(feature = "std"))]
126mod recursion {
127 pub(crate) struct RecursionCounter {}
133
134 impl RecursionCounter {
135 pub fn new(_remaining_depth: usize) -> Self {
136 Self {}
137 }
138 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
139 Ok(DepthGuard {})
140 }
141 }
142
143 pub struct DepthGuard {}
144}
145
146#[derive(PartialEq, Eq)]
147pub enum IsOptional {
148 Optional,
149 Mandatory,
150}
151
152pub enum IsLateral {
153 Lateral,
154 NotLateral,
155}
156
157pub enum WildcardExpr {
158 Expr(Expr),
159 QualifiedWildcard(ObjectName),
160 Wildcard,
161}
162
163impl From<TokenizerError> for ParserError {
164 fn from(e: TokenizerError) -> Self {
165 ParserError::TokenizerError(e.to_string())
166 }
167}
168
169impl fmt::Display for ParserError {
170 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
171 write!(
172 f,
173 "sql parser error: {}",
174 match self {
175 ParserError::TokenizerError(s) => s,
176 ParserError::ParserError(s) => s,
177 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
178 }
179 )
180 }
181}
182
183#[cfg(feature = "std")]
184impl std::error::Error for ParserError {}
185
186const DEFAULT_REMAINING_DEPTH: usize = 50;
188
189const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
191 token: Token::EOF,
192 span: Span {
193 start: Location { line: 0, column: 0 },
194 end: Location { line: 0, column: 0 },
195 },
196};
197
198struct MatchedTrailingBracket(bool);
211
212impl From<bool> for MatchedTrailingBracket {
213 fn from(value: bool) -> Self {
214 Self(value)
215 }
216}
217
218#[derive(Debug, Clone, PartialEq, Eq)]
220pub struct ParserOptions {
221 pub trailing_commas: bool,
222 pub unescape: bool,
225}
226
227impl Default for ParserOptions {
228 fn default() -> Self {
229 Self {
230 trailing_commas: false,
231 unescape: true,
232 }
233 }
234}
235
236impl ParserOptions {
237 pub fn new() -> Self {
239 Default::default()
240 }
241
242 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
254 self.trailing_commas = trailing_commas;
255 self
256 }
257
258 pub fn with_unescape(mut self, unescape: bool) -> Self {
261 self.unescape = unescape;
262 self
263 }
264}
265
266#[derive(Copy, Clone)]
267enum ParserState {
268 Normal,
270 ConnectBy,
274}
275
276pub struct Parser<'a> {
315 tokens: Vec<TokenWithSpan>,
317 index: usize,
319 state: ParserState,
321 dialect: &'a dyn Dialect,
323 options: ParserOptions,
327 recursion_counter: RecursionCounter,
329}
330
331impl<'a> Parser<'a> {
332 pub fn new(dialect: &'a dyn Dialect) -> Self {
348 Self {
349 tokens: vec![],
350 index: 0,
351 state: ParserState::Normal,
352 dialect,
353 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
354 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
355 }
356 }
357
358 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
381 self.recursion_counter = RecursionCounter::new(recursion_limit);
382 self
383 }
384
385 pub fn with_options(mut self, options: ParserOptions) -> Self {
408 self.options = options;
409 self
410 }
411
412 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
414 self.tokens = tokens;
415 self.index = 0;
416 self
417 }
418
419 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
421 let tokens_with_locations: Vec<TokenWithSpan> = tokens
423 .into_iter()
424 .map(|token| TokenWithSpan {
425 token,
426 span: Span::empty(),
427 })
428 .collect();
429 self.with_tokens_with_locations(tokens_with_locations)
430 }
431
432 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
439 debug!("Parsing sql '{}'...", sql);
440 let tokens = Tokenizer::new(self.dialect, sql)
441 .with_unescape(self.options.unescape)
442 .tokenize_with_location()?;
443 Ok(self.with_tokens_with_locations(tokens))
444 }
445
446 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
462 let mut stmts = Vec::new();
463 let mut expecting_statement_delimiter = false;
464 loop {
465 while self.consume_token(&Token::SemiColon) {
467 expecting_statement_delimiter = false;
468 }
469
470 match self.peek_token().token {
471 Token::EOF => break,
472
473 Token::Word(word) => {
475 if expecting_statement_delimiter && word.keyword == Keyword::END {
476 break;
477 }
478 }
479 _ => {}
480 }
481
482 if expecting_statement_delimiter {
483 return self.expected("end of statement", self.peek_token());
484 }
485
486 let statement = self.parse_statement()?;
487 stmts.push(statement);
488 expecting_statement_delimiter = true;
489 }
490 Ok(stmts)
491 }
492
493 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
509 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
510 }
511
512 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
515 let _guard = self.recursion_counter.try_decrease()?;
516
517 if let Some(statement) = self.dialect.parse_statement(self) {
519 return statement;
520 }
521
522 let next_token = self.next_token();
523 match &next_token.token {
524 Token::Word(w) => match w.keyword {
525 Keyword::KILL => self.parse_kill(),
526 Keyword::FLUSH => self.parse_flush(),
527 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
528 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
529 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
530 Keyword::ANALYZE => self.parse_analyze(),
531 Keyword::CASE => {
532 self.prev_token();
533 self.parse_case_stmt()
534 }
535 Keyword::IF => {
536 self.prev_token();
537 self.parse_if_stmt()
538 }
539 Keyword::WHILE => {
540 self.prev_token();
541 self.parse_while()
542 }
543 Keyword::RAISE => {
544 self.prev_token();
545 self.parse_raise_stmt()
546 }
547 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
548 self.prev_token();
549 self.parse_query().map(Statement::Query)
550 }
551 Keyword::TRUNCATE => self.parse_truncate(),
552 Keyword::ATTACH => {
553 if dialect_of!(self is DuckDbDialect) {
554 self.parse_attach_duckdb_database()
555 } else {
556 self.parse_attach_database()
557 }
558 }
559 Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
560 self.parse_detach_duckdb_database()
561 }
562 Keyword::MSCK => self.parse_msck(),
563 Keyword::CREATE => self.parse_create(),
564 Keyword::CACHE => self.parse_cache_table(),
565 Keyword::DROP => self.parse_drop(),
566 Keyword::DISCARD => self.parse_discard(),
567 Keyword::DECLARE => self.parse_declare(),
568 Keyword::FETCH => self.parse_fetch_statement(),
569 Keyword::DELETE => self.parse_delete(),
570 Keyword::INSERT => self.parse_insert(),
571 Keyword::REPLACE => self.parse_replace(),
572 Keyword::UNCACHE => self.parse_uncache_table(),
573 Keyword::UPDATE => self.parse_update(),
574 Keyword::ALTER => self.parse_alter(),
575 Keyword::CALL => self.parse_call(),
576 Keyword::COPY => self.parse_copy(),
577 Keyword::OPEN => {
578 self.prev_token();
579 self.parse_open()
580 }
581 Keyword::CLOSE => self.parse_close(),
582 Keyword::SET => self.parse_set(),
583 Keyword::SHOW => self.parse_show(),
584 Keyword::USE => self.parse_use(),
585 Keyword::GRANT => self.parse_grant(),
586 Keyword::REVOKE => self.parse_revoke(),
587 Keyword::START => self.parse_start_transaction(),
588 Keyword::BEGIN => self.parse_begin(),
589 Keyword::END => self.parse_end(),
590 Keyword::SAVEPOINT => self.parse_savepoint(),
591 Keyword::RELEASE => self.parse_release(),
592 Keyword::COMMIT => self.parse_commit(),
593 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
594 Keyword::ROLLBACK => self.parse_rollback(),
595 Keyword::ASSERT => self.parse_assert(),
596 Keyword::DEALLOCATE => self.parse_deallocate(),
599 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
600 Keyword::PREPARE => self.parse_prepare(),
601 Keyword::MERGE => self.parse_merge(),
602 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
605 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
606 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
607 Keyword::PRAGMA => self.parse_pragma(),
609 Keyword::UNLOAD => self.parse_unload(),
610 Keyword::RENAME => self.parse_rename(),
611 Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
613 self.parse_install()
614 }
615 Keyword::LOAD => self.parse_load(),
616 Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
618 self.parse_optimize_table()
619 }
620 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
622 Keyword::PRINT => self.parse_print(),
623 Keyword::RETURN => self.parse_return(),
624 _ => self.expected("an SQL statement", next_token),
625 },
626 Token::LParen => {
627 self.prev_token();
628 self.parse_query().map(Statement::Query)
629 }
630 _ => self.expected("an SQL statement", next_token),
631 }
632 }
633
634 pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
638 let case_token = self.expect_keyword(Keyword::CASE)?;
639
640 let match_expr = if self.peek_keyword(Keyword::WHEN) {
641 None
642 } else {
643 Some(self.parse_expr()?)
644 };
645
646 self.expect_keyword_is(Keyword::WHEN)?;
647 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
648 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
649 })?;
650
651 let else_block = if self.parse_keyword(Keyword::ELSE) {
652 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
653 } else {
654 None
655 };
656
657 let mut end_case_token = self.expect_keyword(Keyword::END)?;
658 if self.peek_keyword(Keyword::CASE) {
659 end_case_token = self.expect_keyword(Keyword::CASE)?;
660 }
661
662 Ok(Statement::Case(CaseStatement {
663 case_token: AttachedToken(case_token),
664 match_expr,
665 when_blocks,
666 else_block,
667 end_case_token: AttachedToken(end_case_token),
668 }))
669 }
670
671 pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
675 self.expect_keyword_is(Keyword::IF)?;
676 let if_block = self.parse_conditional_statement_block(&[
677 Keyword::ELSE,
678 Keyword::ELSEIF,
679 Keyword::END,
680 ])?;
681
682 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
683 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
684 parser.parse_conditional_statement_block(&[
685 Keyword::ELSEIF,
686 Keyword::ELSE,
687 Keyword::END,
688 ])
689 })?
690 } else {
691 vec![]
692 };
693
694 let else_block = if self.parse_keyword(Keyword::ELSE) {
695 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
696 } else {
697 None
698 };
699
700 self.expect_keyword_is(Keyword::END)?;
701 let end_token = self.expect_keyword(Keyword::IF)?;
702
703 Ok(Statement::If(IfStatement {
704 if_block,
705 elseif_blocks,
706 else_block,
707 end_token: Some(AttachedToken(end_token)),
708 }))
709 }
710
711 fn parse_while(&mut self) -> Result<Statement, ParserError> {
715 self.expect_keyword_is(Keyword::WHILE)?;
716 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
717
718 Ok(Statement::While(WhileStatement { while_block }))
719 }
720
721 fn parse_conditional_statement_block(
729 &mut self,
730 terminal_keywords: &[Keyword],
731 ) -> Result<ConditionalStatementBlock, ParserError> {
732 let start_token = self.get_current_token().clone(); let mut then_token = None;
734
735 let condition = match &start_token.token {
736 Token::Word(w) if w.keyword == Keyword::ELSE => None,
737 Token::Word(w) if w.keyword == Keyword::WHILE => {
738 let expr = self.parse_expr()?;
739 Some(expr)
740 }
741 _ => {
742 let expr = self.parse_expr()?;
743 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
744 Some(expr)
745 }
746 };
747
748 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
749
750 Ok(ConditionalStatementBlock {
751 start_token: AttachedToken(start_token),
752 condition,
753 then_token,
754 conditional_statements,
755 })
756 }
757
758 pub(crate) fn parse_conditional_statements(
761 &mut self,
762 terminal_keywords: &[Keyword],
763 ) -> Result<ConditionalStatements, ParserError> {
764 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
765 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
766 let statements = self.parse_statement_list(terminal_keywords)?;
767 let end_token = self.expect_keyword(Keyword::END)?;
768
769 ConditionalStatements::BeginEnd(BeginEndStatements {
770 begin_token: AttachedToken(begin_token),
771 statements,
772 end_token: AttachedToken(end_token),
773 })
774 } else {
775 ConditionalStatements::Sequence {
776 statements: self.parse_statement_list(terminal_keywords)?,
777 }
778 };
779 Ok(conditional_statements)
780 }
781
782 pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
786 self.expect_keyword_is(Keyword::RAISE)?;
787
788 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
789 self.expect_token(&Token::Eq)?;
790 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
791 } else {
792 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
793 };
794
795 Ok(Statement::Raise(RaiseStatement { value }))
796 }
797
798 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
799 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
800
801 self.expect_keyword_is(Keyword::ON)?;
802 let token = self.next_token();
803
804 let (object_type, object_name) = match token.token {
805 Token::Word(w) if w.keyword == Keyword::COLUMN => {
806 (CommentObject::Column, self.parse_object_name(false)?)
807 }
808 Token::Word(w) if w.keyword == Keyword::TABLE => {
809 (CommentObject::Table, self.parse_object_name(false)?)
810 }
811 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
812 (CommentObject::Extension, self.parse_object_name(false)?)
813 }
814 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
815 (CommentObject::Schema, self.parse_object_name(false)?)
816 }
817 Token::Word(w) if w.keyword == Keyword::DATABASE => {
818 (CommentObject::Database, self.parse_object_name(false)?)
819 }
820 Token::Word(w) if w.keyword == Keyword::USER => {
821 (CommentObject::User, self.parse_object_name(false)?)
822 }
823 Token::Word(w) if w.keyword == Keyword::ROLE => {
824 (CommentObject::Role, self.parse_object_name(false)?)
825 }
826 _ => self.expected("comment object_type", token)?,
827 };
828
829 self.expect_keyword_is(Keyword::IS)?;
830 let comment = if self.parse_keyword(Keyword::NULL) {
831 None
832 } else {
833 Some(self.parse_literal_string()?)
834 };
835 Ok(Statement::Comment {
836 object_type,
837 object_name,
838 comment,
839 if_exists,
840 })
841 }
842
843 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
844 let mut channel = None;
845 let mut tables: Vec<ObjectName> = vec![];
846 let mut read_lock = false;
847 let mut export = false;
848
849 if !dialect_of!(self is MySqlDialect | GenericDialect) {
850 return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
851 }
852
853 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
854 Some(FlushLocation::NoWriteToBinlog)
855 } else if self.parse_keyword(Keyword::LOCAL) {
856 Some(FlushLocation::Local)
857 } else {
858 None
859 };
860
861 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
862 FlushType::BinaryLogs
863 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
864 FlushType::EngineLogs
865 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
866 FlushType::ErrorLogs
867 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
868 FlushType::GeneralLogs
869 } else if self.parse_keywords(&[Keyword::HOSTS]) {
870 FlushType::Hosts
871 } else if self.parse_keyword(Keyword::PRIVILEGES) {
872 FlushType::Privileges
873 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
874 FlushType::OptimizerCosts
875 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
876 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
877 channel = Some(self.parse_object_name(false).unwrap().to_string());
878 }
879 FlushType::RelayLogs
880 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
881 FlushType::SlowLogs
882 } else if self.parse_keyword(Keyword::STATUS) {
883 FlushType::Status
884 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
885 FlushType::UserResources
886 } else if self.parse_keywords(&[Keyword::LOGS]) {
887 FlushType::Logs
888 } else if self.parse_keywords(&[Keyword::TABLES]) {
889 loop {
890 let next_token = self.next_token();
891 match &next_token.token {
892 Token::Word(w) => match w.keyword {
893 Keyword::WITH => {
894 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
895 }
896 Keyword::FOR => {
897 export = self.parse_keyword(Keyword::EXPORT);
898 }
899 Keyword::NoKeyword => {
900 self.prev_token();
901 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
902 }
903 _ => {}
904 },
905 _ => {
906 break;
907 }
908 }
909 }
910
911 FlushType::Tables
912 } else {
913 return self.expected(
914 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
915 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
916 self.peek_token(),
917 );
918 };
919
920 Ok(Statement::Flush {
921 object_type,
922 location,
923 channel,
924 read_lock,
925 export,
926 tables,
927 })
928 }
929
930 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
931 let repair = self.parse_keyword(Keyword::REPAIR);
932 self.expect_keyword_is(Keyword::TABLE)?;
933 let table_name = self.parse_object_name(false)?;
934 let partition_action = self
935 .maybe_parse(|parser| {
936 let pa = match parser.parse_one_of_keywords(&[
937 Keyword::ADD,
938 Keyword::DROP,
939 Keyword::SYNC,
940 ]) {
941 Some(Keyword::ADD) => Some(AddDropSync::ADD),
942 Some(Keyword::DROP) => Some(AddDropSync::DROP),
943 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
944 _ => None,
945 };
946 parser.expect_keyword_is(Keyword::PARTITIONS)?;
947 Ok(pa)
948 })?
949 .unwrap_or_default();
950 Ok(Statement::Msck {
951 repair,
952 table_name,
953 partition_action,
954 })
955 }
956
957 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
958 let table = self.parse_keyword(Keyword::TABLE);
959 let only = self.parse_keyword(Keyword::ONLY);
960
961 let table_names = self
962 .parse_comma_separated(|p| p.parse_object_name(false))?
963 .into_iter()
964 .map(|n| TruncateTableTarget { name: n })
965 .collect();
966
967 let mut partitions = None;
968 if self.parse_keyword(Keyword::PARTITION) {
969 self.expect_token(&Token::LParen)?;
970 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
971 self.expect_token(&Token::RParen)?;
972 }
973
974 let mut identity = None;
975 let mut cascade = None;
976
977 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
978 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
979 Some(TruncateIdentityOption::Restart)
980 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
981 Some(TruncateIdentityOption::Continue)
982 } else {
983 None
984 };
985
986 cascade = self.parse_cascade_option();
987 };
988
989 let on_cluster = self.parse_optional_on_cluster()?;
990
991 Ok(Statement::Truncate {
992 table_names,
993 partitions,
994 table,
995 only,
996 identity,
997 cascade,
998 on_cluster,
999 })
1000 }
1001
1002 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1003 if self.parse_keyword(Keyword::CASCADE) {
1004 Some(CascadeOption::Cascade)
1005 } else if self.parse_keyword(Keyword::RESTRICT) {
1006 Some(CascadeOption::Restrict)
1007 } else {
1008 None
1009 }
1010 }
1011
1012 pub fn parse_attach_duckdb_database_options(
1013 &mut self,
1014 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1015 if !self.consume_token(&Token::LParen) {
1016 return Ok(vec![]);
1017 }
1018
1019 let mut options = vec![];
1020 loop {
1021 if self.parse_keyword(Keyword::READ_ONLY) {
1022 let boolean = if self.parse_keyword(Keyword::TRUE) {
1023 Some(true)
1024 } else if self.parse_keyword(Keyword::FALSE) {
1025 Some(false)
1026 } else {
1027 None
1028 };
1029 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1030 } else if self.parse_keyword(Keyword::TYPE) {
1031 let ident = self.parse_identifier()?;
1032 options.push(AttachDuckDBDatabaseOption::Type(ident));
1033 } else {
1034 return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1035 };
1036
1037 if self.consume_token(&Token::RParen) {
1038 return Ok(options);
1039 } else if self.consume_token(&Token::Comma) {
1040 continue;
1041 } else {
1042 return self.expected("expected one of: ')', ','", self.peek_token());
1043 }
1044 }
1045 }
1046
1047 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1048 let database = self.parse_keyword(Keyword::DATABASE);
1049 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1050 let database_path = self.parse_identifier()?;
1051 let database_alias = if self.parse_keyword(Keyword::AS) {
1052 Some(self.parse_identifier()?)
1053 } else {
1054 None
1055 };
1056
1057 let attach_options = self.parse_attach_duckdb_database_options()?;
1058 Ok(Statement::AttachDuckDBDatabase {
1059 if_not_exists,
1060 database,
1061 database_path,
1062 database_alias,
1063 attach_options,
1064 })
1065 }
1066
1067 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1068 let database = self.parse_keyword(Keyword::DATABASE);
1069 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1070 let database_alias = self.parse_identifier()?;
1071 Ok(Statement::DetachDuckDBDatabase {
1072 if_exists,
1073 database,
1074 database_alias,
1075 })
1076 }
1077
1078 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1079 let database = self.parse_keyword(Keyword::DATABASE);
1080 let database_file_name = self.parse_expr()?;
1081 self.expect_keyword_is(Keyword::AS)?;
1082 let schema_name = self.parse_identifier()?;
1083 Ok(Statement::AttachDatabase {
1084 database,
1085 schema_name,
1086 database_file_name,
1087 })
1088 }
1089
1090 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1091 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1092 let table_name = self.parse_object_name(false)?;
1093 let mut for_columns = false;
1094 let mut cache_metadata = false;
1095 let mut noscan = false;
1096 let mut partitions = None;
1097 let mut compute_statistics = false;
1098 let mut columns = vec![];
1099 loop {
1100 match self.parse_one_of_keywords(&[
1101 Keyword::PARTITION,
1102 Keyword::FOR,
1103 Keyword::CACHE,
1104 Keyword::NOSCAN,
1105 Keyword::COMPUTE,
1106 ]) {
1107 Some(Keyword::PARTITION) => {
1108 self.expect_token(&Token::LParen)?;
1109 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1110 self.expect_token(&Token::RParen)?;
1111 }
1112 Some(Keyword::NOSCAN) => noscan = true,
1113 Some(Keyword::FOR) => {
1114 self.expect_keyword_is(Keyword::COLUMNS)?;
1115
1116 columns = self
1117 .maybe_parse(|parser| {
1118 parser.parse_comma_separated(|p| p.parse_identifier())
1119 })?
1120 .unwrap_or_default();
1121 for_columns = true
1122 }
1123 Some(Keyword::CACHE) => {
1124 self.expect_keyword_is(Keyword::METADATA)?;
1125 cache_metadata = true
1126 }
1127 Some(Keyword::COMPUTE) => {
1128 self.expect_keyword_is(Keyword::STATISTICS)?;
1129 compute_statistics = true
1130 }
1131 _ => break,
1132 }
1133 }
1134
1135 Ok(Statement::Analyze {
1136 has_table_keyword,
1137 table_name,
1138 for_columns,
1139 columns,
1140 partitions,
1141 cache_metadata,
1142 noscan,
1143 compute_statistics,
1144 })
1145 }
1146
1147 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1149 let index = self.index;
1150
1151 let next_token = self.next_token();
1152 match next_token.token {
1153 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1154 if self.peek_token().token == Token::Period {
1155 let mut id_parts: Vec<Ident> = vec![match t {
1156 Token::Word(w) => w.into_ident(next_token.span),
1157 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1158 _ => unreachable!(), }];
1160
1161 while self.consume_token(&Token::Period) {
1162 let next_token = self.next_token();
1163 match next_token.token {
1164 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1165 Token::SingleQuotedString(s) => {
1166 id_parts.push(Ident::with_quote('\'', s))
1168 }
1169 Token::Mul => {
1170 return Ok(Expr::QualifiedWildcard(
1171 ObjectName::from(id_parts),
1172 AttachedToken(next_token),
1173 ));
1174 }
1175 _ => {
1176 return self
1177 .expected("an identifier or a '*' after '.'", next_token);
1178 }
1179 }
1180 }
1181 }
1182 }
1183 Token::Mul => {
1184 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1185 }
1186 _ => (),
1187 };
1188
1189 self.index = index;
1190 self.parse_expr()
1191 }
1192
1193 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1195 self.parse_subexpr(self.dialect.prec_unknown())
1196 }
1197
1198 pub fn parse_expr_with_alias_and_order_by(
1199 &mut self,
1200 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1201 let expr = self.parse_expr()?;
1202
1203 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1204 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1205 }
1206 let alias = self.parse_optional_alias_inner(None, validator)?;
1207 let order_by = OrderByOptions {
1208 asc: self.parse_asc_desc(),
1209 nulls_first: None,
1210 };
1211 Ok(ExprWithAliasAndOrderBy {
1212 expr: ExprWithAlias { expr, alias },
1213 order_by,
1214 })
1215 }
1216
1217 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1219 let _guard = self.recursion_counter.try_decrease()?;
1220 debug!("parsing expr");
1221 let mut expr = self.parse_prefix()?;
1222
1223 expr = self.parse_compound_expr(expr, vec![])?;
1224
1225 debug!("prefix: {:?}", expr);
1226 loop {
1227 let next_precedence = self.get_next_precedence()?;
1228 debug!("next precedence: {:?}", next_precedence);
1229
1230 if precedence >= next_precedence {
1231 break;
1232 }
1233
1234 if Token::Period == self.peek_token_ref().token {
1237 break;
1238 }
1239
1240 expr = self.parse_infix(expr, next_precedence)?;
1241 }
1242 Ok(expr)
1243 }
1244
1245 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1246 let condition = self.parse_expr()?;
1247 let message = if self.parse_keyword(Keyword::AS) {
1248 Some(self.parse_expr()?)
1249 } else {
1250 None
1251 };
1252
1253 Ok(Statement::Assert { condition, message })
1254 }
1255
1256 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1257 let name = self.parse_identifier()?;
1258 Ok(Statement::Savepoint { name })
1259 }
1260
1261 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1262 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1263 let name = self.parse_identifier()?;
1264
1265 Ok(Statement::ReleaseSavepoint { name })
1266 }
1267
1268 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1269 let channel = self.parse_identifier()?;
1270 Ok(Statement::LISTEN { channel })
1271 }
1272
1273 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1274 let channel = if self.consume_token(&Token::Mul) {
1275 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1276 } else {
1277 match self.parse_identifier() {
1278 Ok(expr) => expr,
1279 _ => {
1280 self.prev_token();
1281 return self.expected("wildcard or identifier", self.peek_token());
1282 }
1283 }
1284 };
1285 Ok(Statement::UNLISTEN { channel })
1286 }
1287
1288 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1289 let channel = self.parse_identifier()?;
1290 let payload = if self.consume_token(&Token::Comma) {
1291 Some(self.parse_literal_string()?)
1292 } else {
1293 None
1294 };
1295 Ok(Statement::NOTIFY { channel, payload })
1296 }
1297
1298 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1300 if self.peek_keyword(Keyword::TABLE) {
1301 self.expect_keyword(Keyword::TABLE)?;
1302 let rename_tables = self.parse_comma_separated(|parser| {
1303 let old_name = parser.parse_object_name(false)?;
1304 parser.expect_keyword(Keyword::TO)?;
1305 let new_name = parser.parse_object_name(false)?;
1306
1307 Ok(RenameTable { old_name, new_name })
1308 })?;
1309 Ok(Statement::RenameTable(rename_tables))
1310 } else {
1311 self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1312 }
1313 }
1314
1315 fn parse_expr_prefix_by_reserved_word(
1318 &mut self,
1319 w: &Word,
1320 w_span: Span,
1321 ) -> Result<Option<Expr>, ParserError> {
1322 match w.keyword {
1323 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1324 self.prev_token();
1325 Ok(Some(Expr::Value(self.parse_value()?)))
1326 }
1327 Keyword::NULL => {
1328 self.prev_token();
1329 Ok(Some(Expr::Value(self.parse_value()?)))
1330 }
1331 Keyword::CURRENT_CATALOG
1332 | Keyword::CURRENT_USER
1333 | Keyword::SESSION_USER
1334 | Keyword::USER
1335 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1336 {
1337 Ok(Some(Expr::Function(Function {
1338 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1339 uses_odbc_syntax: false,
1340 parameters: FunctionArguments::None,
1341 args: FunctionArguments::None,
1342 null_treatment: None,
1343 filter: None,
1344 over: None,
1345 within_group: vec![],
1346 })))
1347 }
1348 Keyword::CURRENT_TIMESTAMP
1349 | Keyword::CURRENT_TIME
1350 | Keyword::CURRENT_DATE
1351 | Keyword::LOCALTIME
1352 | Keyword::LOCALTIMESTAMP => {
1353 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1354 }
1355 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1356 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1357 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1358 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1359 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1360 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1361 Keyword::EXISTS
1362 if !dialect_of!(self is DatabricksDialect)
1364 || matches!(
1365 self.peek_nth_token_ref(1).token,
1366 Token::Word(Word {
1367 keyword: Keyword::SELECT | Keyword::WITH,
1368 ..
1369 })
1370 ) =>
1371 {
1372 Ok(Some(self.parse_exists_expr(false)?))
1373 }
1374 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1375 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1376 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1377 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1378 Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1379 }
1380 Keyword::SUBSTR | Keyword::SUBSTRING => {
1381 self.prev_token();
1382 Ok(Some(self.parse_substring()?))
1383 }
1384 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1385 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1386 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1387 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1389 self.expect_token(&Token::LBracket)?;
1390 Ok(Some(self.parse_array_expr(true)?))
1391 }
1392 Keyword::ARRAY
1393 if self.peek_token() == Token::LParen
1394 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1395 {
1396 self.expect_token(&Token::LParen)?;
1397 let query = self.parse_query()?;
1398 self.expect_token(&Token::RParen)?;
1399 Ok(Some(Expr::Function(Function {
1400 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1401 uses_odbc_syntax: false,
1402 parameters: FunctionArguments::None,
1403 args: FunctionArguments::Subquery(query),
1404 filter: None,
1405 null_treatment: None,
1406 over: None,
1407 within_group: vec![],
1408 })))
1409 }
1410 Keyword::NOT => Ok(Some(self.parse_not()?)),
1411 Keyword::MATCH if self.dialect.supports_match_against() => {
1412 Ok(Some(self.parse_match_against()?))
1413 }
1414 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1415 let struct_expr = self.parse_struct_literal()?;
1416 Ok(Some(struct_expr))
1417 }
1418 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1419 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1420 Ok(Some(Expr::Prior(Box::new(expr))))
1421 }
1422 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1423 Ok(Some(self.parse_duckdb_map_literal()?))
1424 }
1425 _ if self.dialect.supports_geometric_types() => match w.keyword {
1426 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1427 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1428 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1429 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1430 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1431 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1432 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1433 _ => Ok(None),
1434 },
1435 _ => Ok(None),
1436 }
1437 }
1438
1439 fn parse_expr_prefix_by_unreserved_word(
1441 &mut self,
1442 w: &Word,
1443 w_span: Span,
1444 ) -> Result<Expr, ParserError> {
1445 match self.peek_token().token {
1446 Token::LParen if !self.peek_outer_join_operator() => {
1447 let id_parts = vec![w.clone().into_ident(w_span)];
1448 self.parse_function(ObjectName::from(id_parts))
1449 }
1450 Token::SingleQuotedString(_)
1452 | Token::DoubleQuotedString(_)
1453 | Token::HexStringLiteral(_)
1454 if w.value.starts_with('_') =>
1455 {
1456 Ok(Expr::Prefixed {
1457 prefix: w.clone().into_ident(w_span),
1458 value: self.parse_introduced_string_expr()?.into(),
1459 })
1460 }
1461 Token::SingleQuotedString(_)
1463 | Token::DoubleQuotedString(_)
1464 | Token::HexStringLiteral(_)
1465 if w.value.starts_with('_') =>
1466 {
1467 Ok(Expr::Prefixed {
1468 prefix: w.clone().into_ident(w_span),
1469 value: self.parse_introduced_string_expr()?.into(),
1470 })
1471 }
1472 Token::Arrow if self.dialect.supports_lambda_functions() => {
1473 self.expect_token(&Token::Arrow)?;
1474 Ok(Expr::Lambda(LambdaFunction {
1475 params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1476 body: Box::new(self.parse_expr()?),
1477 }))
1478 }
1479 _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1480 }
1481 }
1482
1483 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1485 if let Some(prefix) = self.dialect.parse_prefix(self) {
1487 return prefix;
1488 }
1489
1490 let loc = self.peek_token_ref().span.start;
1507 let opt_expr = self.maybe_parse(|parser| {
1508 match parser.parse_data_type()? {
1509 DataType::Interval => parser.parse_interval(),
1510 DataType::Custom(..) => parser_err!("dummy", loc),
1518 data_type => Ok(Expr::TypedString {
1519 data_type,
1520 value: parser.parse_value()?.value,
1521 }),
1522 }
1523 })?;
1524
1525 if let Some(expr) = opt_expr {
1526 return Ok(expr);
1527 }
1528
1529 let dialect = self.dialect;
1533
1534 self.advance_token();
1535 let next_token_index = self.get_current_index();
1536 let next_token = self.get_current_token();
1537 let span = next_token.span;
1538 let expr = match &next_token.token {
1539 Token::Word(w) => {
1540 let w = w.clone();
1549 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1550 Ok(Some(expr)) => Ok(expr),
1552
1553 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1555
1556 Err(e) => {
1563 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1564 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1565 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1566 }) {
1567 return Ok(expr);
1568 }
1569 }
1570 return Err(e);
1571 }
1572 }
1573 } Token::LBracket => self.parse_array_expr(false),
1576 tok @ Token::Minus | tok @ Token::Plus => {
1577 let op = if *tok == Token::Plus {
1578 UnaryOperator::Plus
1579 } else {
1580 UnaryOperator::Minus
1581 };
1582 Ok(Expr::UnaryOp {
1583 op,
1584 expr: Box::new(
1585 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1586 ),
1587 })
1588 }
1589 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1590 op: UnaryOperator::BangNot,
1591 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1592 }),
1593 tok @ Token::DoubleExclamationMark
1594 | tok @ Token::PGSquareRoot
1595 | tok @ Token::PGCubeRoot
1596 | tok @ Token::AtSign
1597 | tok @ Token::Tilde
1598 if dialect_is!(dialect is PostgreSqlDialect) =>
1599 {
1600 let op = match tok {
1601 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1602 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1603 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1604 Token::AtSign => UnaryOperator::PGAbs,
1605 Token::Tilde => UnaryOperator::PGBitwiseNot,
1606 _ => unreachable!(),
1607 };
1608 Ok(Expr::UnaryOp {
1609 op,
1610 expr: Box::new(
1611 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1612 ),
1613 })
1614 }
1615 tok @ Token::Sharp
1616 | tok @ Token::AtDashAt
1617 | tok @ Token::AtAt
1618 | tok @ Token::QuestionMarkDash
1619 | tok @ Token::QuestionPipe
1620 if self.dialect.supports_geometric_types() =>
1621 {
1622 let op = match tok {
1623 Token::Sharp => UnaryOperator::Hash,
1624 Token::AtDashAt => UnaryOperator::AtDashAt,
1625 Token::AtAt => UnaryOperator::DoubleAt,
1626 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1627 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1628 _ => {
1629 return Err(ParserError::ParserError(format!(
1630 "Unexpected token in unary operator parsing: {:?}",
1631 tok
1632 )))
1633 }
1634 };
1635 Ok(Expr::UnaryOp {
1636 op,
1637 expr: Box::new(
1638 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1639 ),
1640 })
1641 }
1642 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1643 {
1644 self.prev_token();
1645 Ok(Expr::Value(self.parse_value()?))
1646 }
1647 Token::UnicodeStringLiteral(_) => {
1648 self.prev_token();
1649 Ok(Expr::Value(self.parse_value()?))
1650 }
1651 Token::Number(_, _)
1652 | Token::SingleQuotedString(_)
1653 | Token::DoubleQuotedString(_)
1654 | Token::TripleSingleQuotedString(_)
1655 | Token::TripleDoubleQuotedString(_)
1656 | Token::DollarQuotedString(_)
1657 | Token::SingleQuotedByteStringLiteral(_)
1658 | Token::DoubleQuotedByteStringLiteral(_)
1659 | Token::TripleSingleQuotedByteStringLiteral(_)
1660 | Token::TripleDoubleQuotedByteStringLiteral(_)
1661 | Token::SingleQuotedRawStringLiteral(_)
1662 | Token::DoubleQuotedRawStringLiteral(_)
1663 | Token::TripleSingleQuotedRawStringLiteral(_)
1664 | Token::TripleDoubleQuotedRawStringLiteral(_)
1665 | Token::NationalStringLiteral(_)
1666 | Token::HexStringLiteral(_) => {
1667 self.prev_token();
1668 Ok(Expr::Value(self.parse_value()?))
1669 }
1670 Token::LParen => {
1671 let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1672 expr
1673 } else if let Some(lambda) = self.try_parse_lambda()? {
1674 return Ok(lambda);
1675 } else {
1676 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1677 match exprs.len() {
1678 0 => unreachable!(), 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1680 _ => Expr::Tuple(exprs),
1681 }
1682 };
1683 self.expect_token(&Token::RParen)?;
1684 Ok(expr)
1685 }
1686 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1687 self.prev_token();
1688 Ok(Expr::Value(self.parse_value()?))
1689 }
1690 Token::LBrace => {
1691 self.prev_token();
1692 self.parse_lbrace_expr()
1693 }
1694 _ => self.expected_at("an expression", next_token_index),
1695 }?;
1696
1697 if self.parse_keyword(Keyword::COLLATE) {
1698 Ok(Expr::Collate {
1699 expr: Box::new(expr),
1700 collation: self.parse_object_name(false)?,
1701 })
1702 } else {
1703 Ok(expr)
1704 }
1705 }
1706
1707 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1708 let value: Value = self.parse_value()?.value;
1709 Ok(Expr::TypedString {
1710 data_type: DataType::GeometricType(kind),
1711 value,
1712 })
1713 }
1714
1715 pub fn parse_compound_expr(
1722 &mut self,
1723 root: Expr,
1724 mut chain: Vec<AccessExpr>,
1725 ) -> Result<Expr, ParserError> {
1726 let mut ending_wildcard: Option<TokenWithSpan> = None;
1727 loop {
1728 if self.consume_token(&Token::Period) {
1729 let next_token = self.peek_token_ref();
1730 match &next_token.token {
1731 Token::Mul => {
1732 if dialect_of!(self is PostgreSqlDialect) {
1735 ending_wildcard = Some(self.next_token());
1736 } else {
1737 self.prev_token(); }
1744
1745 break;
1746 }
1747 Token::SingleQuotedString(s) => {
1748 let expr =
1749 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1750 chain.push(AccessExpr::Dot(expr));
1751 self.advance_token(); }
1753 _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1755 Expr::CompoundFieldAccess { root, access_chain } => {
1764 chain.push(AccessExpr::Dot(*root));
1765 chain.extend(access_chain);
1766 }
1767 Expr::CompoundIdentifier(parts) => chain
1768 .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1769 expr => {
1770 chain.push(AccessExpr::Dot(expr));
1771 }
1772 },
1773 }
1774 } else if !self.dialect.supports_partiql()
1775 && self.peek_token_ref().token == Token::LBracket
1776 {
1777 self.parse_multi_dim_subscript(&mut chain)?;
1778 } else {
1779 break;
1780 }
1781 }
1782
1783 let tok_index = self.get_current_index();
1784 if let Some(wildcard_token) = ending_wildcard {
1785 if !Self::is_all_ident(&root, &chain) {
1786 return self.expected("an identifier or a '*' after '.'", self.peek_token());
1787 };
1788 Ok(Expr::QualifiedWildcard(
1789 ObjectName::from(Self::exprs_to_idents(root, chain)?),
1790 AttachedToken(wildcard_token),
1791 ))
1792 } else if self.maybe_parse_outer_join_operator() {
1793 if !Self::is_all_ident(&root, &chain) {
1794 return self.expected_at("column identifier before (+)", tok_index);
1795 };
1796 let expr = if chain.is_empty() {
1797 root
1798 } else {
1799 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1800 };
1801 Ok(Expr::OuterJoin(expr.into()))
1802 } else {
1803 Self::build_compound_expr(root, chain)
1804 }
1805 }
1806
1807 fn build_compound_expr(
1812 root: Expr,
1813 mut access_chain: Vec<AccessExpr>,
1814 ) -> Result<Expr, ParserError> {
1815 if access_chain.is_empty() {
1816 return Ok(root);
1817 }
1818
1819 if Self::is_all_ident(&root, &access_chain) {
1820 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1821 root,
1822 access_chain,
1823 )?));
1824 }
1825
1826 if matches!(root, Expr::Identifier(_))
1831 && matches!(
1832 access_chain.last(),
1833 Some(AccessExpr::Dot(Expr::Function(_)))
1834 )
1835 && access_chain
1836 .iter()
1837 .rev()
1838 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1840 {
1841 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1842 return parser_err!("expected function expression", root.span().start);
1843 };
1844
1845 let compound_func_name = [root]
1846 .into_iter()
1847 .chain(access_chain.into_iter().flat_map(|access| match access {
1848 AccessExpr::Dot(expr) => Some(expr),
1849 _ => None,
1850 }))
1851 .flat_map(|expr| match expr {
1852 Expr::Identifier(ident) => Some(ident),
1853 _ => None,
1854 })
1855 .map(ObjectNamePart::Identifier)
1856 .chain(func.name.0)
1857 .collect::<Vec<_>>();
1858 func.name = ObjectName(compound_func_name);
1859
1860 return Ok(Expr::Function(func));
1861 }
1862
1863 if access_chain.len() == 1
1868 && matches!(
1869 access_chain.last(),
1870 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1871 )
1872 {
1873 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1874 return parser_err!("expected (+) expression", root.span().start);
1875 };
1876
1877 if !Self::is_all_ident(&root, &[]) {
1878 return parser_err!("column identifier before (+)", root.span().start);
1879 };
1880
1881 let token_start = root.span().start;
1882 let mut idents = Self::exprs_to_idents(root, vec![])?;
1883 match *inner_expr {
1884 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1885 Expr::Identifier(suffix) => idents.push(suffix),
1886 _ => {
1887 return parser_err!("column identifier before (+)", token_start);
1888 }
1889 }
1890
1891 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1892 }
1893
1894 Ok(Expr::CompoundFieldAccess {
1895 root: Box::new(root),
1896 access_chain,
1897 })
1898 }
1899
1900 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1901 match k {
1902 Keyword::LOCAL => Some(ContextModifier::Local),
1903 Keyword::GLOBAL => Some(ContextModifier::Global),
1904 Keyword::SESSION => Some(ContextModifier::Session),
1905 _ => None,
1906 }
1907 }
1908
1909 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1911 if !matches!(root, Expr::Identifier(_)) {
1912 return false;
1913 }
1914 fields
1915 .iter()
1916 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1917 }
1918
1919 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1921 let mut idents = vec![];
1922 if let Expr::Identifier(root) = root {
1923 idents.push(root);
1924 for x in fields {
1925 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1926 idents.push(ident);
1927 } else {
1928 return parser_err!(
1929 format!("Expected identifier, found: {}", x),
1930 x.span().start
1931 );
1932 }
1933 }
1934 Ok(idents)
1935 } else {
1936 parser_err!(
1937 format!("Expected identifier, found: {}", root),
1938 root.span().start
1939 )
1940 }
1941 }
1942
1943 fn peek_outer_join_operator(&mut self) -> bool {
1945 if !self.dialect.supports_outer_join_operator() {
1946 return false;
1947 }
1948
1949 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
1950 Token::LParen == maybe_lparen.token
1951 && Token::Plus == maybe_plus.token
1952 && Token::RParen == maybe_rparen.token
1953 }
1954
1955 fn maybe_parse_outer_join_operator(&mut self) -> bool {
1958 self.dialect.supports_outer_join_operator()
1959 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
1960 }
1961
1962 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1963 self.expect_token(&Token::LParen)?;
1964 let options = self.parse_comma_separated(Self::parse_utility_option)?;
1965 self.expect_token(&Token::RParen)?;
1966
1967 Ok(options)
1968 }
1969
1970 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
1971 let name = self.parse_identifier()?;
1972
1973 let next_token = self.peek_token();
1974 if next_token == Token::Comma || next_token == Token::RParen {
1975 return Ok(UtilityOption { name, arg: None });
1976 }
1977 let arg = self.parse_expr()?;
1978
1979 Ok(UtilityOption {
1980 name,
1981 arg: Some(arg),
1982 })
1983 }
1984
1985 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
1986 if !self.peek_sub_query() {
1987 return Ok(None);
1988 }
1989
1990 Ok(Some(Expr::Subquery(self.parse_query()?)))
1991 }
1992
1993 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
1994 if !self.dialect.supports_lambda_functions() {
1995 return Ok(None);
1996 }
1997 self.maybe_parse(|p| {
1998 let params = p.parse_comma_separated(|p| p.parse_identifier())?;
1999 p.expect_token(&Token::RParen)?;
2000 p.expect_token(&Token::Arrow)?;
2001 let expr = p.parse_expr()?;
2002 Ok(Expr::Lambda(LambdaFunction {
2003 params: OneOrManyWithParens::Many(params),
2004 body: Box::new(expr),
2005 }))
2006 })
2007 }
2008
2009 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2018 self.maybe_parse(|p| {
2019 p.expect_keyword(Keyword::FN)?;
2020 let fn_name = p.parse_object_name(false)?;
2021 let mut fn_call = p.parse_function_call(fn_name)?;
2022 fn_call.uses_odbc_syntax = true;
2023 Ok(Expr::Function(fn_call))
2024 })
2025 }
2026
2027 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2028 self.parse_function_call(name).map(Expr::Function)
2029 }
2030
2031 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2032 self.expect_token(&Token::LParen)?;
2033
2034 if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2037 let subquery = self.parse_query()?;
2038 self.expect_token(&Token::RParen)?;
2039 return Ok(Function {
2040 name,
2041 uses_odbc_syntax: false,
2042 parameters: FunctionArguments::None,
2043 args: FunctionArguments::Subquery(subquery),
2044 filter: None,
2045 null_treatment: None,
2046 over: None,
2047 within_group: vec![],
2048 });
2049 }
2050
2051 let mut args = self.parse_function_argument_list()?;
2052 let mut parameters = FunctionArguments::None;
2053 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2056 && self.consume_token(&Token::LParen)
2057 {
2058 parameters = FunctionArguments::List(args);
2059 args = self.parse_function_argument_list()?;
2060 }
2061
2062 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2063 self.expect_token(&Token::LParen)?;
2064 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2065 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2066 self.expect_token(&Token::RParen)?;
2067 order_by
2068 } else {
2069 vec![]
2070 };
2071
2072 let filter = if self.dialect.supports_filter_during_aggregation()
2073 && self.parse_keyword(Keyword::FILTER)
2074 && self.consume_token(&Token::LParen)
2075 && self.parse_keyword(Keyword::WHERE)
2076 {
2077 let filter = Some(Box::new(self.parse_expr()?));
2078 self.expect_token(&Token::RParen)?;
2079 filter
2080 } else {
2081 None
2082 };
2083
2084 let null_treatment = if args
2087 .clauses
2088 .iter()
2089 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2090 {
2091 self.parse_null_treatment()?
2092 } else {
2093 None
2094 };
2095
2096 let over = if self.parse_keyword(Keyword::OVER) {
2097 if self.consume_token(&Token::LParen) {
2098 let window_spec = self.parse_window_spec()?;
2099 Some(WindowType::WindowSpec(window_spec))
2100 } else {
2101 Some(WindowType::NamedWindow(self.parse_identifier()?))
2102 }
2103 } else {
2104 None
2105 };
2106
2107 Ok(Function {
2108 name,
2109 uses_odbc_syntax: false,
2110 parameters,
2111 args: FunctionArguments::List(args),
2112 null_treatment,
2113 filter,
2114 over,
2115 within_group,
2116 })
2117 }
2118
2119 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2121 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2122 Some(keyword) => {
2123 self.expect_keyword_is(Keyword::NULLS)?;
2124
2125 Ok(match keyword {
2126 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2127 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2128 _ => None,
2129 })
2130 }
2131 None => Ok(None),
2132 }
2133 }
2134
2135 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2136 let args = if self.consume_token(&Token::LParen) {
2137 FunctionArguments::List(self.parse_function_argument_list()?)
2138 } else {
2139 FunctionArguments::None
2140 };
2141 Ok(Expr::Function(Function {
2142 name,
2143 uses_odbc_syntax: false,
2144 parameters: FunctionArguments::None,
2145 args,
2146 filter: None,
2147 over: None,
2148 null_treatment: None,
2149 within_group: vec![],
2150 }))
2151 }
2152
2153 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2154 let next_token = self.next_token();
2155 match &next_token.token {
2156 Token::Word(w) => match w.keyword {
2157 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2158 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2159 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2160 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2161 },
2162 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2163 }
2164 }
2165
2166 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2167 let units = self.parse_window_frame_units()?;
2168 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2169 let start_bound = self.parse_window_frame_bound()?;
2170 self.expect_keyword_is(Keyword::AND)?;
2171 let end_bound = Some(self.parse_window_frame_bound()?);
2172 (start_bound, end_bound)
2173 } else {
2174 (self.parse_window_frame_bound()?, None)
2175 };
2176 Ok(WindowFrame {
2177 units,
2178 start_bound,
2179 end_bound,
2180 })
2181 }
2182
2183 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2185 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2186 Ok(WindowFrameBound::CurrentRow)
2187 } else {
2188 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2189 None
2190 } else {
2191 Some(Box::new(match self.peek_token().token {
2192 Token::SingleQuotedString(_) => self.parse_interval()?,
2193 _ => self.parse_expr()?,
2194 }))
2195 };
2196 if self.parse_keyword(Keyword::PRECEDING) {
2197 Ok(WindowFrameBound::Preceding(rows))
2198 } else if self.parse_keyword(Keyword::FOLLOWING) {
2199 Ok(WindowFrameBound::Following(rows))
2200 } else {
2201 self.expected("PRECEDING or FOLLOWING", self.peek_token())
2202 }
2203 }
2204 }
2205
2206 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2208 if self.dialect.supports_group_by_expr() {
2209 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2210 self.expect_token(&Token::LParen)?;
2211 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2212 self.expect_token(&Token::RParen)?;
2213 Ok(Expr::GroupingSets(result))
2214 } else if self.parse_keyword(Keyword::CUBE) {
2215 self.expect_token(&Token::LParen)?;
2216 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2217 self.expect_token(&Token::RParen)?;
2218 Ok(Expr::Cube(result))
2219 } else if self.parse_keyword(Keyword::ROLLUP) {
2220 self.expect_token(&Token::LParen)?;
2221 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2222 self.expect_token(&Token::RParen)?;
2223 Ok(Expr::Rollup(result))
2224 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2225 Ok(Expr::Tuple(vec![]))
2229 } else {
2230 self.parse_expr()
2231 }
2232 } else {
2233 self.parse_expr()
2235 }
2236 }
2237
2238 fn parse_tuple(
2242 &mut self,
2243 lift_singleton: bool,
2244 allow_empty: bool,
2245 ) -> Result<Vec<Expr>, ParserError> {
2246 if lift_singleton {
2247 if self.consume_token(&Token::LParen) {
2248 let result = if allow_empty && self.consume_token(&Token::RParen) {
2249 vec![]
2250 } else {
2251 let result = self.parse_comma_separated(Parser::parse_expr)?;
2252 self.expect_token(&Token::RParen)?;
2253 result
2254 };
2255 Ok(result)
2256 } else {
2257 Ok(vec![self.parse_expr()?])
2258 }
2259 } else {
2260 self.expect_token(&Token::LParen)?;
2261 let result = if allow_empty && self.consume_token(&Token::RParen) {
2262 vec![]
2263 } else {
2264 let result = self.parse_comma_separated(Parser::parse_expr)?;
2265 self.expect_token(&Token::RParen)?;
2266 result
2267 };
2268 Ok(result)
2269 }
2270 }
2271
2272 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2273 let mut operand = None;
2274 if !self.parse_keyword(Keyword::WHEN) {
2275 operand = Some(Box::new(self.parse_expr()?));
2276 self.expect_keyword_is(Keyword::WHEN)?;
2277 }
2278 let mut conditions = vec![];
2279 loop {
2280 let condition = self.parse_expr()?;
2281 self.expect_keyword_is(Keyword::THEN)?;
2282 let result = self.parse_expr()?;
2283 conditions.push(CaseWhen { condition, result });
2284 if !self.parse_keyword(Keyword::WHEN) {
2285 break;
2286 }
2287 }
2288 let else_result = if self.parse_keyword(Keyword::ELSE) {
2289 Some(Box::new(self.parse_expr()?))
2290 } else {
2291 None
2292 };
2293 self.expect_keyword_is(Keyword::END)?;
2294 Ok(Expr::Case {
2295 operand,
2296 conditions,
2297 else_result,
2298 })
2299 }
2300
2301 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2302 if self.parse_keyword(Keyword::FORMAT) {
2303 let value = self.parse_value()?.value;
2304 match self.parse_optional_time_zone()? {
2305 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2306 None => Ok(Some(CastFormat::Value(value))),
2307 }
2308 } else {
2309 Ok(None)
2310 }
2311 }
2312
2313 pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2314 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2315 self.parse_value().map(|v| Some(v.value))
2316 } else {
2317 Ok(None)
2318 }
2319 }
2320
2321 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2323 self.expect_token(&Token::LParen)?;
2324 let data_type = self.parse_data_type()?;
2325 self.expect_token(&Token::Comma)?;
2326 let expr = self.parse_expr()?;
2327 let styles = if self.consume_token(&Token::Comma) {
2328 self.parse_comma_separated(Parser::parse_expr)?
2329 } else {
2330 Default::default()
2331 };
2332 self.expect_token(&Token::RParen)?;
2333 Ok(Expr::Convert {
2334 is_try,
2335 expr: Box::new(expr),
2336 data_type: Some(data_type),
2337 charset: None,
2338 target_before_value: true,
2339 styles,
2340 })
2341 }
2342
2343 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2348 if self.dialect.convert_type_before_value() {
2349 return self.parse_mssql_convert(is_try);
2350 }
2351 self.expect_token(&Token::LParen)?;
2352 let expr = self.parse_expr()?;
2353 if self.parse_keyword(Keyword::USING) {
2354 let charset = self.parse_object_name(false)?;
2355 self.expect_token(&Token::RParen)?;
2356 return Ok(Expr::Convert {
2357 is_try,
2358 expr: Box::new(expr),
2359 data_type: None,
2360 charset: Some(charset),
2361 target_before_value: false,
2362 styles: vec![],
2363 });
2364 }
2365 self.expect_token(&Token::Comma)?;
2366 let data_type = self.parse_data_type()?;
2367 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2368 Some(self.parse_object_name(false)?)
2369 } else {
2370 None
2371 };
2372 self.expect_token(&Token::RParen)?;
2373 Ok(Expr::Convert {
2374 is_try,
2375 expr: Box::new(expr),
2376 data_type: Some(data_type),
2377 charset,
2378 target_before_value: false,
2379 styles: vec![],
2380 })
2381 }
2382
2383 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2385 self.expect_token(&Token::LParen)?;
2386 let expr = self.parse_expr()?;
2387 self.expect_keyword_is(Keyword::AS)?;
2388 let data_type = self.parse_data_type()?;
2389 let format = self.parse_optional_cast_format()?;
2390 self.expect_token(&Token::RParen)?;
2391 Ok(Expr::Cast {
2392 kind,
2393 expr: Box::new(expr),
2394 data_type,
2395 format,
2396 })
2397 }
2398
2399 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2401 self.expect_token(&Token::LParen)?;
2402 let exists_node = Expr::Exists {
2403 negated,
2404 subquery: self.parse_query()?,
2405 };
2406 self.expect_token(&Token::RParen)?;
2407 Ok(exists_node)
2408 }
2409
2410 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2411 self.expect_token(&Token::LParen)?;
2412 let field = self.parse_date_time_field()?;
2413
2414 let syntax = if self.parse_keyword(Keyword::FROM) {
2415 ExtractSyntax::From
2416 } else if self.consume_token(&Token::Comma)
2417 && dialect_of!(self is SnowflakeDialect | GenericDialect)
2418 {
2419 ExtractSyntax::Comma
2420 } else {
2421 return Err(ParserError::ParserError(
2422 "Expected 'FROM' or ','".to_string(),
2423 ));
2424 };
2425
2426 let expr = self.parse_expr()?;
2427 self.expect_token(&Token::RParen)?;
2428 Ok(Expr::Extract {
2429 field,
2430 expr: Box::new(expr),
2431 syntax,
2432 })
2433 }
2434
2435 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2436 self.expect_token(&Token::LParen)?;
2437 let expr = self.parse_expr()?;
2438 let field = if self.parse_keyword(Keyword::TO) {
2440 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2442 } else if self.consume_token(&Token::Comma) {
2443 match self.parse_value()?.value {
2445 Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2446 _ => {
2447 return Err(ParserError::ParserError(
2448 "Scale field can only be of number type".to_string(),
2449 ))
2450 }
2451 }
2452 } else {
2453 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2454 };
2455 self.expect_token(&Token::RParen)?;
2456 if is_ceil {
2457 Ok(Expr::Ceil {
2458 expr: Box::new(expr),
2459 field,
2460 })
2461 } else {
2462 Ok(Expr::Floor {
2463 expr: Box::new(expr),
2464 field,
2465 })
2466 }
2467 }
2468
2469 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2470 let between_prec = self.dialect.prec_value(Precedence::Between);
2471 let position_expr = self.maybe_parse(|p| {
2472 p.expect_token(&Token::LParen)?;
2474
2475 let expr = p.parse_subexpr(between_prec)?;
2477 p.expect_keyword_is(Keyword::IN)?;
2478 let from = p.parse_expr()?;
2479 p.expect_token(&Token::RParen)?;
2480 Ok(Expr::Position {
2481 expr: Box::new(expr),
2482 r#in: Box::new(from),
2483 })
2484 })?;
2485 match position_expr {
2486 Some(expr) => Ok(expr),
2487 None => self.parse_function(ObjectName::from(vec![ident])),
2490 }
2491 }
2492
2493 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2495 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2496 Keyword::SUBSTR => true,
2497 Keyword::SUBSTRING => false,
2498 _ => {
2499 self.prev_token();
2500 return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2501 }
2502 };
2503 self.expect_token(&Token::LParen)?;
2504 let expr = self.parse_expr()?;
2505 let mut from_expr = None;
2506 let special = self.consume_token(&Token::Comma);
2507 if special || self.parse_keyword(Keyword::FROM) {
2508 from_expr = Some(self.parse_expr()?);
2509 }
2510
2511 let mut to_expr = None;
2512 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2513 to_expr = Some(self.parse_expr()?);
2514 }
2515 self.expect_token(&Token::RParen)?;
2516
2517 Ok(Expr::Substring {
2518 expr: Box::new(expr),
2519 substring_from: from_expr.map(Box::new),
2520 substring_for: to_expr.map(Box::new),
2521 special,
2522 shorthand,
2523 })
2524 }
2525
2526 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2527 self.expect_token(&Token::LParen)?;
2529 let expr = self.parse_expr()?;
2530 self.expect_keyword_is(Keyword::PLACING)?;
2531 let what_expr = self.parse_expr()?;
2532 self.expect_keyword_is(Keyword::FROM)?;
2533 let from_expr = self.parse_expr()?;
2534 let mut for_expr = None;
2535 if self.parse_keyword(Keyword::FOR) {
2536 for_expr = Some(self.parse_expr()?);
2537 }
2538 self.expect_token(&Token::RParen)?;
2539
2540 Ok(Expr::Overlay {
2541 expr: Box::new(expr),
2542 overlay_what: Box::new(what_expr),
2543 overlay_from: Box::new(from_expr),
2544 overlay_for: for_expr.map(Box::new),
2545 })
2546 }
2547
2548 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2554 self.expect_token(&Token::LParen)?;
2555 let mut trim_where = None;
2556 if let Token::Word(word) = self.peek_token().token {
2557 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING]
2558 .iter()
2559 .any(|d| word.keyword == *d)
2560 {
2561 trim_where = Some(self.parse_trim_where()?);
2562 }
2563 }
2564 let expr = self.parse_expr()?;
2565 if self.parse_keyword(Keyword::FROM) {
2566 let trim_what = Box::new(expr);
2567 let expr = self.parse_expr()?;
2568 self.expect_token(&Token::RParen)?;
2569 Ok(Expr::Trim {
2570 expr: Box::new(expr),
2571 trim_where,
2572 trim_what: Some(trim_what),
2573 trim_characters: None,
2574 })
2575 } else if self.consume_token(&Token::Comma)
2576 && dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect)
2577 {
2578 let characters = self.parse_comma_separated(Parser::parse_expr)?;
2579 self.expect_token(&Token::RParen)?;
2580 Ok(Expr::Trim {
2581 expr: Box::new(expr),
2582 trim_where: None,
2583 trim_what: None,
2584 trim_characters: Some(characters),
2585 })
2586 } else {
2587 self.expect_token(&Token::RParen)?;
2588 Ok(Expr::Trim {
2589 expr: Box::new(expr),
2590 trim_where,
2591 trim_what: None,
2592 trim_characters: None,
2593 })
2594 }
2595 }
2596
2597 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2598 let next_token = self.next_token();
2599 match &next_token.token {
2600 Token::Word(w) => match w.keyword {
2601 Keyword::BOTH => Ok(TrimWhereField::Both),
2602 Keyword::LEADING => Ok(TrimWhereField::Leading),
2603 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2604 _ => self.expected("trim_where field", next_token)?,
2605 },
2606 _ => self.expected("trim_where field", next_token),
2607 }
2608 }
2609
2610 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2613 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2614 self.expect_token(&Token::RBracket)?;
2615 Ok(Expr::Array(Array { elem: exprs, named }))
2616 }
2617
2618 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2619 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2620 if self.parse_keyword(Keyword::ERROR) {
2621 Ok(Some(ListAggOnOverflow::Error))
2622 } else {
2623 self.expect_keyword_is(Keyword::TRUNCATE)?;
2624 let filler = match self.peek_token().token {
2625 Token::Word(w)
2626 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2627 {
2628 None
2629 }
2630 Token::SingleQuotedString(_)
2631 | Token::EscapedStringLiteral(_)
2632 | Token::UnicodeStringLiteral(_)
2633 | Token::NationalStringLiteral(_)
2634 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2635 _ => self.expected(
2636 "either filler, WITH, or WITHOUT in LISTAGG",
2637 self.peek_token(),
2638 )?,
2639 };
2640 let with_count = self.parse_keyword(Keyword::WITH);
2641 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2642 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2643 }
2644 self.expect_keyword_is(Keyword::COUNT)?;
2645 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2646 }
2647 } else {
2648 Ok(None)
2649 }
2650 }
2651
2652 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2657 let next_token = self.next_token();
2658 match &next_token.token {
2659 Token::Word(w) => match w.keyword {
2660 Keyword::YEAR => Ok(DateTimeField::Year),
2661 Keyword::YEARS => Ok(DateTimeField::Years),
2662 Keyword::MONTH => Ok(DateTimeField::Month),
2663 Keyword::MONTHS => Ok(DateTimeField::Months),
2664 Keyword::WEEK => {
2665 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2666 && self.consume_token(&Token::LParen)
2667 {
2668 let week_day = self.parse_identifier()?;
2669 self.expect_token(&Token::RParen)?;
2670 Some(week_day)
2671 } else {
2672 None
2673 };
2674 Ok(DateTimeField::Week(week_day))
2675 }
2676 Keyword::WEEKS => Ok(DateTimeField::Weeks),
2677 Keyword::DAY => Ok(DateTimeField::Day),
2678 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2679 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2680 Keyword::DAYS => Ok(DateTimeField::Days),
2681 Keyword::DATE => Ok(DateTimeField::Date),
2682 Keyword::DATETIME => Ok(DateTimeField::Datetime),
2683 Keyword::HOUR => Ok(DateTimeField::Hour),
2684 Keyword::HOURS => Ok(DateTimeField::Hours),
2685 Keyword::MINUTE => Ok(DateTimeField::Minute),
2686 Keyword::MINUTES => Ok(DateTimeField::Minutes),
2687 Keyword::SECOND => Ok(DateTimeField::Second),
2688 Keyword::SECONDS => Ok(DateTimeField::Seconds),
2689 Keyword::CENTURY => Ok(DateTimeField::Century),
2690 Keyword::DECADE => Ok(DateTimeField::Decade),
2691 Keyword::DOY => Ok(DateTimeField::Doy),
2692 Keyword::DOW => Ok(DateTimeField::Dow),
2693 Keyword::EPOCH => Ok(DateTimeField::Epoch),
2694 Keyword::ISODOW => Ok(DateTimeField::Isodow),
2695 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2696 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2697 Keyword::JULIAN => Ok(DateTimeField::Julian),
2698 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2699 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2700 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2701 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2702 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2703 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2704 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2705 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2706 Keyword::QUARTER => Ok(DateTimeField::Quarter),
2707 Keyword::TIME => Ok(DateTimeField::Time),
2708 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2709 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2710 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2711 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2712 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2713 _ if self.dialect.allow_extract_custom() => {
2714 self.prev_token();
2715 let custom = self.parse_identifier()?;
2716 Ok(DateTimeField::Custom(custom))
2717 }
2718 _ => self.expected("date/time field", next_token),
2719 },
2720 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2721 self.prev_token();
2722 let custom = self.parse_identifier()?;
2723 Ok(DateTimeField::Custom(custom))
2724 }
2725 _ => self.expected("date/time field", next_token),
2726 }
2727 }
2728
2729 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2730 match self.peek_token().token {
2731 Token::Word(w) => match w.keyword {
2732 Keyword::EXISTS => {
2733 let negated = true;
2734 let _ = self.parse_keyword(Keyword::EXISTS);
2735 self.parse_exists_expr(negated)
2736 }
2737 _ => Ok(Expr::UnaryOp {
2738 op: UnaryOperator::Not,
2739 expr: Box::new(
2740 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2741 ),
2742 }),
2743 },
2744 _ => Ok(Expr::UnaryOp {
2745 op: UnaryOperator::Not,
2746 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2747 }),
2748 }
2749 }
2750
2751 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2761 let token = self.expect_token(&Token::LBrace)?;
2762
2763 if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? {
2764 self.expect_token(&Token::RBrace)?;
2765 return Ok(fn_expr);
2766 }
2767
2768 if self.dialect.supports_dictionary_syntax() {
2769 self.prev_token(); return self.parse_duckdb_struct_literal();
2771 }
2772
2773 self.expected("an expression", token)
2774 }
2775
2776 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2782 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2783
2784 self.expect_keyword_is(Keyword::AGAINST)?;
2785
2786 self.expect_token(&Token::LParen)?;
2787
2788 let match_value = self.parse_value()?.value;
2790
2791 let in_natural_language_mode_keywords = &[
2792 Keyword::IN,
2793 Keyword::NATURAL,
2794 Keyword::LANGUAGE,
2795 Keyword::MODE,
2796 ];
2797
2798 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2799
2800 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2801
2802 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2803 if self.parse_keywords(with_query_expansion_keywords) {
2804 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2805 } else {
2806 Some(SearchModifier::InNaturalLanguageMode)
2807 }
2808 } else if self.parse_keywords(in_boolean_mode_keywords) {
2809 Some(SearchModifier::InBooleanMode)
2810 } else if self.parse_keywords(with_query_expansion_keywords) {
2811 Some(SearchModifier::WithQueryExpansion)
2812 } else {
2813 None
2814 };
2815
2816 self.expect_token(&Token::RParen)?;
2817
2818 Ok(Expr::MatchAgainst {
2819 columns,
2820 match_value,
2821 opt_search_modifier,
2822 })
2823 }
2824
2825 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2841 let value = if self.dialect.require_interval_qualifier() {
2850 self.parse_expr()?
2852 } else {
2853 self.parse_prefix()?
2856 };
2857
2858 let leading_field = if self.next_token_is_temporal_unit() {
2864 Some(self.parse_date_time_field()?)
2865 } else if self.dialect.require_interval_qualifier() {
2866 return parser_err!(
2867 "INTERVAL requires a unit after the literal value",
2868 self.peek_token().span.start
2869 );
2870 } else {
2871 None
2872 };
2873
2874 let (leading_precision, last_field, fsec_precision) =
2875 if leading_field == Some(DateTimeField::Second) {
2876 let last_field = None;
2882 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2883 (leading_precision, last_field, fsec_precision)
2884 } else {
2885 let leading_precision = self.parse_optional_precision()?;
2886 if self.parse_keyword(Keyword::TO) {
2887 let last_field = Some(self.parse_date_time_field()?);
2888 let fsec_precision = if last_field == Some(DateTimeField::Second) {
2889 self.parse_optional_precision()?
2890 } else {
2891 None
2892 };
2893 (leading_precision, last_field, fsec_precision)
2894 } else {
2895 (leading_precision, None, None)
2896 }
2897 };
2898
2899 Ok(Expr::Interval(Interval {
2900 value: Box::new(value),
2901 leading_field,
2902 leading_precision,
2903 last_field,
2904 fractional_seconds_precision: fsec_precision,
2905 }))
2906 }
2907
2908 pub fn next_token_is_temporal_unit(&mut self) -> bool {
2911 if let Token::Word(word) = self.peek_token().token {
2912 matches!(
2913 word.keyword,
2914 Keyword::YEAR
2915 | Keyword::YEARS
2916 | Keyword::MONTH
2917 | Keyword::MONTHS
2918 | Keyword::WEEK
2919 | Keyword::WEEKS
2920 | Keyword::DAY
2921 | Keyword::DAYS
2922 | Keyword::HOUR
2923 | Keyword::HOURS
2924 | Keyword::MINUTE
2925 | Keyword::MINUTES
2926 | Keyword::SECOND
2927 | Keyword::SECONDS
2928 | Keyword::CENTURY
2929 | Keyword::DECADE
2930 | Keyword::DOW
2931 | Keyword::DOY
2932 | Keyword::EPOCH
2933 | Keyword::ISODOW
2934 | Keyword::ISOYEAR
2935 | Keyword::JULIAN
2936 | Keyword::MICROSECOND
2937 | Keyword::MICROSECONDS
2938 | Keyword::MILLENIUM
2939 | Keyword::MILLENNIUM
2940 | Keyword::MILLISECOND
2941 | Keyword::MILLISECONDS
2942 | Keyword::NANOSECOND
2943 | Keyword::NANOSECONDS
2944 | Keyword::QUARTER
2945 | Keyword::TIMEZONE
2946 | Keyword::TIMEZONE_HOUR
2947 | Keyword::TIMEZONE_MINUTE
2948 )
2949 } else {
2950 false
2951 }
2952 }
2953
2954 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
2962 self.prev_token();
2964 let (fields, trailing_bracket) =
2965 self.parse_struct_type_def(Self::parse_struct_field_def)?;
2966 if trailing_bracket.0 {
2967 return parser_err!(
2968 "unmatched > in STRUCT literal",
2969 self.peek_token().span.start
2970 );
2971 }
2972
2973 self.expect_token(&Token::LParen)?;
2975 let values = self
2976 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
2977 self.expect_token(&Token::RParen)?;
2978
2979 Ok(Expr::Struct { values, fields })
2980 }
2981
2982 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
2996 let expr = self.parse_expr()?;
2997 if self.parse_keyword(Keyword::AS) {
2998 if typed_syntax {
2999 return parser_err!("Typed syntax does not allow AS", {
3000 self.prev_token();
3001 self.peek_token().span.start
3002 });
3003 }
3004 let field_name = self.parse_identifier()?;
3005 Ok(Expr::Named {
3006 expr: expr.into(),
3007 name: field_name,
3008 })
3009 } else {
3010 Ok(expr)
3011 }
3012 }
3013
3014 fn parse_struct_type_def<F>(
3027 &mut self,
3028 mut elem_parser: F,
3029 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3030 where
3031 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3032 {
3033 let start_token = self.peek_token();
3034 self.expect_keyword_is(Keyword::STRUCT)?;
3035
3036 if Token::Lt != self.peek_token() {
3038 return Ok((Default::default(), false.into()));
3039 }
3040 self.next_token();
3041
3042 let mut field_defs = vec![];
3043 let trailing_bracket = loop {
3044 let (def, trailing_bracket) = elem_parser(self)?;
3045 field_defs.push(def);
3046 if !self.consume_token(&Token::Comma) {
3047 break trailing_bracket;
3048 }
3049
3050 if trailing_bracket.0 {
3054 return parser_err!("unmatched > in STRUCT definition", start_token.span.start);
3055 }
3056 };
3057
3058 Ok((
3059 field_defs,
3060 self.expect_closing_angle_bracket(trailing_bracket)?,
3061 ))
3062 }
3063
3064 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3066 self.expect_keyword_is(Keyword::STRUCT)?;
3067 self.expect_token(&Token::LParen)?;
3068 let struct_body = self.parse_comma_separated(|parser| {
3069 let field_name = parser.parse_identifier()?;
3070 let field_type = parser.parse_data_type()?;
3071
3072 Ok(StructField {
3073 field_name: Some(field_name),
3074 field_type,
3075 })
3076 });
3077 self.expect_token(&Token::RParen)?;
3078 struct_body
3079 }
3080
3081 fn parse_struct_field_def(
3091 &mut self,
3092 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3093 let is_anonymous_field = !matches!(
3096 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3097 (Token::Word(_), Token::Word(_))
3098 );
3099
3100 let field_name = if is_anonymous_field {
3101 None
3102 } else {
3103 Some(self.parse_identifier()?)
3104 };
3105
3106 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3107
3108 Ok((
3109 StructField {
3110 field_name,
3111 field_type,
3112 },
3113 trailing_bracket,
3114 ))
3115 }
3116
3117 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3127 self.expect_keyword_is(Keyword::UNION)?;
3128
3129 self.expect_token(&Token::LParen)?;
3130
3131 let fields = self.parse_comma_separated(|p| {
3132 Ok(UnionField {
3133 field_name: p.parse_identifier()?,
3134 field_type: p.parse_data_type()?,
3135 })
3136 })?;
3137
3138 self.expect_token(&Token::RParen)?;
3139
3140 Ok(fields)
3141 }
3142
3143 fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
3153 self.expect_token(&Token::LBrace)?;
3154
3155 let fields =
3156 self.parse_comma_separated0(Self::parse_duckdb_dictionary_field, Token::RBrace)?;
3157
3158 self.expect_token(&Token::RBrace)?;
3159
3160 Ok(Expr::Dictionary(fields))
3161 }
3162
3163 fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3173 let key = self.parse_identifier()?;
3174
3175 self.expect_token(&Token::Colon)?;
3176
3177 let expr = self.parse_expr()?;
3178
3179 Ok(DictionaryField {
3180 key,
3181 value: Box::new(expr),
3182 })
3183 }
3184
3185 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3195 self.expect_token(&Token::LBrace)?;
3196 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3197 self.expect_token(&Token::RBrace)?;
3198 Ok(Expr::Map(Map { entries: fields }))
3199 }
3200
3201 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3211 let key = self.parse_expr()?;
3212
3213 self.expect_token(&Token::Colon)?;
3214
3215 let value = self.parse_expr()?;
3216
3217 Ok(MapEntry {
3218 key: Box::new(key),
3219 value: Box::new(value),
3220 })
3221 }
3222
3223 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3233 self.expect_keyword_is(Keyword::MAP)?;
3234 self.expect_token(&Token::LParen)?;
3235 let key_data_type = self.parse_data_type()?;
3236 self.expect_token(&Token::Comma)?;
3237 let value_data_type = self.parse_data_type()?;
3238 self.expect_token(&Token::RParen)?;
3239
3240 Ok((key_data_type, value_data_type))
3241 }
3242
3243 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3253 self.expect_keyword_is(Keyword::TUPLE)?;
3254 self.expect_token(&Token::LParen)?;
3255 let mut field_defs = vec![];
3256 loop {
3257 let (def, _) = self.parse_struct_field_def()?;
3258 field_defs.push(def);
3259 if !self.consume_token(&Token::Comma) {
3260 break;
3261 }
3262 }
3263 self.expect_token(&Token::RParen)?;
3264
3265 Ok(field_defs)
3266 }
3267
3268 fn expect_closing_angle_bracket(
3273 &mut self,
3274 trailing_bracket: MatchedTrailingBracket,
3275 ) -> Result<MatchedTrailingBracket, ParserError> {
3276 let trailing_bracket = if !trailing_bracket.0 {
3277 match self.peek_token().token {
3278 Token::Gt => {
3279 self.next_token();
3280 false.into()
3281 }
3282 Token::ShiftRight => {
3283 self.next_token();
3284 true.into()
3285 }
3286 _ => return self.expected(">", self.peek_token()),
3287 }
3288 } else {
3289 false.into()
3290 };
3291
3292 Ok(trailing_bracket)
3293 }
3294
3295 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3297 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3299 return infix;
3300 }
3301
3302 let dialect = self.dialect;
3303
3304 self.advance_token();
3305 let tok = self.get_current_token();
3306 let tok_index = self.get_current_index();
3307 let span = tok.span;
3308 let regular_binary_operator = match &tok.token {
3309 Token::Spaceship => Some(BinaryOperator::Spaceship),
3310 Token::DoubleEq => Some(BinaryOperator::Eq),
3311 Token::Assignment => Some(BinaryOperator::Assignment),
3312 Token::Eq => Some(BinaryOperator::Eq),
3313 Token::Neq => Some(BinaryOperator::NotEq),
3314 Token::Gt => Some(BinaryOperator::Gt),
3315 Token::GtEq => Some(BinaryOperator::GtEq),
3316 Token::Lt => Some(BinaryOperator::Lt),
3317 Token::LtEq => Some(BinaryOperator::LtEq),
3318 Token::Plus => Some(BinaryOperator::Plus),
3319 Token::Minus => Some(BinaryOperator::Minus),
3320 Token::Mul => Some(BinaryOperator::Multiply),
3321 Token::Mod => Some(BinaryOperator::Modulo),
3322 Token::StringConcat => Some(BinaryOperator::StringConcat),
3323 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3324 Token::Caret => {
3325 if dialect_is!(dialect is PostgreSqlDialect) {
3328 Some(BinaryOperator::PGExp)
3329 } else {
3330 Some(BinaryOperator::BitwiseXor)
3331 }
3332 }
3333 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3334 Token::Div => Some(BinaryOperator::Divide),
3335 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3336 Some(BinaryOperator::DuckIntegerDivide)
3337 }
3338 Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3339 Some(BinaryOperator::PGBitwiseShiftLeft)
3340 }
3341 Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3342 Some(BinaryOperator::PGBitwiseShiftRight)
3343 }
3344 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3345 Some(BinaryOperator::PGBitwiseXor)
3346 }
3347 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3348 Some(BinaryOperator::PGOverlap)
3349 }
3350 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3351 Some(BinaryOperator::PGOverlap)
3352 }
3353 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3354 Some(BinaryOperator::PGStartsWith)
3355 }
3356 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3357 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3358 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3359 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3360 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3361 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3362 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3363 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3364 Token::Arrow => Some(BinaryOperator::Arrow),
3365 Token::LongArrow => Some(BinaryOperator::LongArrow),
3366 Token::HashArrow => Some(BinaryOperator::HashArrow),
3367 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3368 Token::AtArrow => Some(BinaryOperator::AtArrow),
3369 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3370 Token::HashMinus => Some(BinaryOperator::HashMinus),
3371 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3372 Token::AtAt => Some(BinaryOperator::AtAt),
3373 Token::Question => Some(BinaryOperator::Question),
3374 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3375 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3376 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3377 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3378 Some(BinaryOperator::DoubleHash)
3379 }
3380
3381 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3382 Some(BinaryOperator::AndLt)
3383 }
3384 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3385 Some(BinaryOperator::AndGt)
3386 }
3387 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3388 Some(BinaryOperator::QuestionDash)
3389 }
3390 Token::AmpersandLeftAngleBracketVerticalBar
3391 if self.dialect.supports_geometric_types() =>
3392 {
3393 Some(BinaryOperator::AndLtPipe)
3394 }
3395 Token::VerticalBarAmpersandRightAngleBracket
3396 if self.dialect.supports_geometric_types() =>
3397 {
3398 Some(BinaryOperator::PipeAndGt)
3399 }
3400 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3401 Some(BinaryOperator::LtDashGt)
3402 }
3403 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3404 Some(BinaryOperator::LtCaret)
3405 }
3406 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3407 Some(BinaryOperator::GtCaret)
3408 }
3409 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3410 Some(BinaryOperator::QuestionHash)
3411 }
3412 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3413 Some(BinaryOperator::QuestionDoublePipe)
3414 }
3415 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3416 Some(BinaryOperator::QuestionDashPipe)
3417 }
3418 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3419 Some(BinaryOperator::TildeEq)
3420 }
3421 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3422 Some(BinaryOperator::LtLtPipe)
3423 }
3424 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3425 Some(BinaryOperator::PipeGtGt)
3426 }
3427 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3428
3429 Token::Word(w) => match w.keyword {
3430 Keyword::AND => Some(BinaryOperator::And),
3431 Keyword::OR => Some(BinaryOperator::Or),
3432 Keyword::XOR => Some(BinaryOperator::Xor),
3433 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3434 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3435 self.expect_token(&Token::LParen)?;
3436 let mut idents = vec![];
3441 loop {
3442 self.advance_token();
3443 idents.push(self.get_current_token().to_string());
3444 if !self.consume_token(&Token::Period) {
3445 break;
3446 }
3447 }
3448 self.expect_token(&Token::RParen)?;
3449 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3450 }
3451 _ => None,
3452 },
3453 _ => None,
3454 };
3455
3456 let tok = self.token_at(tok_index);
3457 if let Some(op) = regular_binary_operator {
3458 if let Some(keyword) =
3459 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3460 {
3461 self.expect_token(&Token::LParen)?;
3462 let right = if self.peek_sub_query() {
3463 self.prev_token(); self.parse_subexpr(precedence)?
3467 } else {
3468 let right = self.parse_subexpr(precedence)?;
3470 self.expect_token(&Token::RParen)?;
3471 right
3472 };
3473
3474 if !dialect_of!(self is PostgreSqlDialect) && !matches!(
3475 op,
3476 BinaryOperator::Gt
3477 | BinaryOperator::Lt
3478 | BinaryOperator::GtEq
3479 | BinaryOperator::LtEq
3480 | BinaryOperator::Eq
3481 | BinaryOperator::NotEq
3482 ) {
3483 return parser_err!(
3484 format!(
3485 "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}"
3486 ),
3487 span.start
3488 );
3489 };
3490
3491 Ok(match keyword {
3492 Keyword::ALL => Expr::AllOp {
3493 left: Box::new(expr),
3494 compare_op: op,
3495 right: Box::new(right),
3496 },
3497 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3498 left: Box::new(expr),
3499 compare_op: op,
3500 right: Box::new(right),
3501 is_some: keyword == Keyword::SOME,
3502 },
3503 _ => unreachable!(),
3504 })
3505 } else {
3506 Ok(Expr::BinaryOp {
3507 left: Box::new(expr),
3508 op,
3509 right: Box::new(self.parse_subexpr(precedence)?),
3510 })
3511 }
3512 } else if let Token::Word(w) = &tok.token {
3513 match w.keyword {
3514 Keyword::IS => {
3515 if self.parse_keyword(Keyword::NULL) {
3516 Ok(Expr::IsNull(Box::new(expr)))
3517 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3518 Ok(Expr::IsNotNull(Box::new(expr)))
3519 } else if self.parse_keywords(&[Keyword::TRUE]) {
3520 Ok(Expr::IsTrue(Box::new(expr)))
3521 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3522 Ok(Expr::IsNotTrue(Box::new(expr)))
3523 } else if self.parse_keywords(&[Keyword::FALSE]) {
3524 Ok(Expr::IsFalse(Box::new(expr)))
3525 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3526 Ok(Expr::IsNotFalse(Box::new(expr)))
3527 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3528 Ok(Expr::IsUnknown(Box::new(expr)))
3529 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3530 Ok(Expr::IsNotUnknown(Box::new(expr)))
3531 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3532 let expr2 = self.parse_expr()?;
3533 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3534 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3535 {
3536 let expr2 = self.parse_expr()?;
3537 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3538 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3539 Ok(is_normalized)
3540 } else {
3541 self.expected(
3542 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3543 self.peek_token(),
3544 )
3545 }
3546 }
3547 Keyword::AT => {
3548 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3549 Ok(Expr::AtTimeZone {
3550 timestamp: Box::new(expr),
3551 time_zone: Box::new(self.parse_subexpr(precedence)?),
3552 })
3553 }
3554 Keyword::NOT
3555 | Keyword::IN
3556 | Keyword::BETWEEN
3557 | Keyword::LIKE
3558 | Keyword::ILIKE
3559 | Keyword::SIMILAR
3560 | Keyword::REGEXP
3561 | Keyword::RLIKE => {
3562 self.prev_token();
3563 let negated = self.parse_keyword(Keyword::NOT);
3564 let regexp = self.parse_keyword(Keyword::REGEXP);
3565 let rlike = self.parse_keyword(Keyword::RLIKE);
3566 if regexp || rlike {
3567 Ok(Expr::RLike {
3568 negated,
3569 expr: Box::new(expr),
3570 pattern: Box::new(
3571 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3572 ),
3573 regexp,
3574 })
3575 } else if self.parse_keyword(Keyword::IN) {
3576 self.parse_in(expr, negated)
3577 } else if self.parse_keyword(Keyword::BETWEEN) {
3578 self.parse_between(expr, negated)
3579 } else if self.parse_keyword(Keyword::LIKE) {
3580 Ok(Expr::Like {
3581 negated,
3582 any: self.parse_keyword(Keyword::ANY),
3583 expr: Box::new(expr),
3584 pattern: Box::new(
3585 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3586 ),
3587 escape_char: self.parse_escape_char()?,
3588 })
3589 } else if self.parse_keyword(Keyword::ILIKE) {
3590 Ok(Expr::ILike {
3591 negated,
3592 any: self.parse_keyword(Keyword::ANY),
3593 expr: Box::new(expr),
3594 pattern: Box::new(
3595 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3596 ),
3597 escape_char: self.parse_escape_char()?,
3598 })
3599 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3600 Ok(Expr::SimilarTo {
3601 negated,
3602 expr: Box::new(expr),
3603 pattern: Box::new(
3604 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3605 ),
3606 escape_char: self.parse_escape_char()?,
3607 })
3608 } else {
3609 self.expected("IN or BETWEEN after NOT", self.peek_token())
3610 }
3611 }
3612 _ => parser_err!(
3614 format!("No infix parser for token {:?}", tok.token),
3615 tok.span.start
3616 ),
3617 }
3618 } else if Token::DoubleColon == *tok {
3619 Ok(Expr::Cast {
3620 kind: CastKind::DoubleColon,
3621 expr: Box::new(expr),
3622 data_type: self.parse_data_type()?,
3623 format: None,
3624 })
3625 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3626 Ok(Expr::UnaryOp {
3627 op: UnaryOperator::PGPostfixFactorial,
3628 expr: Box::new(expr),
3629 })
3630 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3631 || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3632 {
3633 self.prev_token();
3634 self.parse_json_access(expr)
3635 } else {
3636 parser_err!(
3638 format!("No infix parser for token {:?}", tok.token),
3639 tok.span.start
3640 )
3641 }
3642 }
3643
3644 pub fn parse_escape_char(&mut self) -> Result<Option<String>, ParserError> {
3646 if self.parse_keyword(Keyword::ESCAPE) {
3647 Ok(Some(self.parse_literal_string()?))
3648 } else {
3649 Ok(None)
3650 }
3651 }
3652
3653 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3663 let lower_bound = if self.consume_token(&Token::Colon) {
3665 None
3666 } else {
3667 Some(self.parse_expr()?)
3668 };
3669
3670 if self.consume_token(&Token::RBracket) {
3672 if let Some(lower_bound) = lower_bound {
3673 return Ok(Subscript::Index { index: lower_bound });
3674 };
3675 return Ok(Subscript::Slice {
3676 lower_bound,
3677 upper_bound: None,
3678 stride: None,
3679 });
3680 }
3681
3682 if lower_bound.is_some() {
3684 self.expect_token(&Token::Colon)?;
3685 }
3686
3687 let upper_bound = if self.consume_token(&Token::RBracket) {
3689 return Ok(Subscript::Slice {
3690 lower_bound,
3691 upper_bound: None,
3692 stride: None,
3693 });
3694 } else {
3695 Some(self.parse_expr()?)
3696 };
3697
3698 if self.consume_token(&Token::RBracket) {
3700 return Ok(Subscript::Slice {
3701 lower_bound,
3702 upper_bound,
3703 stride: None,
3704 });
3705 }
3706
3707 self.expect_token(&Token::Colon)?;
3709 let stride = if self.consume_token(&Token::RBracket) {
3710 None
3711 } else {
3712 Some(self.parse_expr()?)
3713 };
3714
3715 if stride.is_some() {
3716 self.expect_token(&Token::RBracket)?;
3717 }
3718
3719 Ok(Subscript::Slice {
3720 lower_bound,
3721 upper_bound,
3722 stride,
3723 })
3724 }
3725
3726 pub fn parse_multi_dim_subscript(
3728 &mut self,
3729 chain: &mut Vec<AccessExpr>,
3730 ) -> Result<(), ParserError> {
3731 while self.consume_token(&Token::LBracket) {
3732 self.parse_subscript(chain)?;
3733 }
3734 Ok(())
3735 }
3736
3737 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3741 let subscript = self.parse_subscript_inner()?;
3742 chain.push(AccessExpr::Subscript(subscript));
3743 Ok(())
3744 }
3745
3746 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3747 let token = self.next_token();
3748 match token.token {
3749 Token::Word(Word {
3750 value,
3751 quote_style: quote_style @ (Some('"') | None),
3753 keyword: _,
3756 }) => Ok(JsonPathElem::Dot {
3757 key: value,
3758 quoted: quote_style.is_some(),
3759 }),
3760
3761 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3765
3766 _ => self.expected("variant object key name", token),
3767 }
3768 }
3769
3770 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3771 let path = self.parse_json_path()?;
3772 Ok(Expr::JsonAccess {
3773 value: Box::new(expr),
3774 path,
3775 })
3776 }
3777
3778 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3779 let mut path = Vec::new();
3780 loop {
3781 match self.next_token().token {
3782 Token::Colon if path.is_empty() => {
3783 path.push(self.parse_json_path_object_key()?);
3784 }
3785 Token::Period if !path.is_empty() => {
3786 path.push(self.parse_json_path_object_key()?);
3787 }
3788 Token::LBracket => {
3789 let key = self.parse_expr()?;
3790 self.expect_token(&Token::RBracket)?;
3791
3792 path.push(JsonPathElem::Bracket { key });
3793 }
3794 _ => {
3795 self.prev_token();
3796 break;
3797 }
3798 };
3799 }
3800
3801 debug_assert!(!path.is_empty());
3802 Ok(JsonPath { path })
3803 }
3804
3805 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3807 if self.parse_keyword(Keyword::UNNEST) {
3810 self.expect_token(&Token::LParen)?;
3811 let array_expr = self.parse_expr()?;
3812 self.expect_token(&Token::RParen)?;
3813 return Ok(Expr::InUnnest {
3814 expr: Box::new(expr),
3815 array_expr: Box::new(array_expr),
3816 negated,
3817 });
3818 }
3819 self.expect_token(&Token::LParen)?;
3820 let in_op = match self.maybe_parse(|p| p.parse_query_body(p.dialect.prec_unknown()))? {
3821 Some(subquery) => Expr::InSubquery {
3822 expr: Box::new(expr),
3823 subquery,
3824 negated,
3825 },
3826 None => Expr::InList {
3827 expr: Box::new(expr),
3828 list: if self.dialect.supports_in_empty_list() {
3829 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3830 } else {
3831 self.parse_comma_separated(Parser::parse_expr)?
3832 },
3833 negated,
3834 },
3835 };
3836 self.expect_token(&Token::RParen)?;
3837 Ok(in_op)
3838 }
3839
3840 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3842 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3845 self.expect_keyword_is(Keyword::AND)?;
3846 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3847 Ok(Expr::Between {
3848 expr: Box::new(expr),
3849 negated,
3850 low: Box::new(low),
3851 high: Box::new(high),
3852 })
3853 }
3854
3855 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3857 Ok(Expr::Cast {
3858 kind: CastKind::DoubleColon,
3859 expr: Box::new(expr),
3860 data_type: self.parse_data_type()?,
3861 format: None,
3862 })
3863 }
3864
3865 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3867 self.dialect.get_next_precedence_default(self)
3868 }
3869
3870 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
3873 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
3874 }
3875
3876 pub fn peek_token(&self) -> TokenWithSpan {
3881 self.peek_nth_token(0)
3882 }
3883
3884 pub fn peek_token_ref(&self) -> &TokenWithSpan {
3887 self.peek_nth_token_ref(0)
3888 }
3889
3890 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
3913 self.peek_tokens_with_location()
3914 .map(|with_loc| with_loc.token)
3915 }
3916
3917 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
3922 let mut index = self.index;
3923 core::array::from_fn(|_| loop {
3924 let token = self.tokens.get(index);
3925 index += 1;
3926 if let Some(TokenWithSpan {
3927 token: Token::Whitespace(_),
3928 span: _,
3929 }) = token
3930 {
3931 continue;
3932 }
3933 break token.cloned().unwrap_or(TokenWithSpan {
3934 token: Token::EOF,
3935 span: Span::empty(),
3936 });
3937 })
3938 }
3939
3940 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
3945 let mut index = self.index;
3946 core::array::from_fn(|_| loop {
3947 let token = self.tokens.get(index);
3948 index += 1;
3949 if let Some(TokenWithSpan {
3950 token: Token::Whitespace(_),
3951 span: _,
3952 }) = token
3953 {
3954 continue;
3955 }
3956 break token.unwrap_or(&EOF_TOKEN);
3957 })
3958 }
3959
3960 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
3962 self.peek_nth_token_ref(n).clone()
3963 }
3964
3965 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
3967 let mut index = self.index;
3968 loop {
3969 index += 1;
3970 match self.tokens.get(index - 1) {
3971 Some(TokenWithSpan {
3972 token: Token::Whitespace(_),
3973 span: _,
3974 }) => continue,
3975 non_whitespace => {
3976 if n == 0 {
3977 return non_whitespace.unwrap_or(&EOF_TOKEN);
3978 }
3979 n -= 1;
3980 }
3981 }
3982 }
3983 }
3984
3985 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
3988 self.peek_nth_token_no_skip(0)
3989 }
3990
3991 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
3993 self.tokens
3994 .get(self.index + n)
3995 .cloned()
3996 .unwrap_or(TokenWithSpan {
3997 token: Token::EOF,
3998 span: Span::empty(),
3999 })
4000 }
4001
4002 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4006 let index = self.index;
4007 let matched = self.parse_keywords(expected);
4008 self.index = index;
4009 matched
4010 }
4011
4012 pub fn next_token(&mut self) -> TokenWithSpan {
4017 self.advance_token();
4018 self.get_current_token().clone()
4019 }
4020
4021 pub fn get_current_index(&self) -> usize {
4026 self.index.saturating_sub(1)
4027 }
4028
4029 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4031 self.index += 1;
4032 self.tokens.get(self.index - 1)
4033 }
4034
4035 pub fn advance_token(&mut self) {
4039 loop {
4040 self.index += 1;
4041 match self.tokens.get(self.index - 1) {
4042 Some(TokenWithSpan {
4043 token: Token::Whitespace(_),
4044 span: _,
4045 }) => continue,
4046 _ => break,
4047 }
4048 }
4049 }
4050
4051 pub fn get_current_token(&self) -> &TokenWithSpan {
4055 self.token_at(self.index.saturating_sub(1))
4056 }
4057
4058 pub fn get_previous_token(&self) -> &TokenWithSpan {
4062 self.token_at(self.index.saturating_sub(2))
4063 }
4064
4065 pub fn get_next_token(&self) -> &TokenWithSpan {
4069 self.token_at(self.index)
4070 }
4071
4072 pub fn prev_token(&mut self) {
4079 loop {
4080 assert!(self.index > 0);
4081 self.index -= 1;
4082 if let Some(TokenWithSpan {
4083 token: Token::Whitespace(_),
4084 span: _,
4085 }) = self.tokens.get(self.index)
4086 {
4087 continue;
4088 }
4089 return;
4090 }
4091 }
4092
4093 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4095 parser_err!(
4096 format!("Expected: {expected}, found: {found}"),
4097 found.span.start
4098 )
4099 }
4100
4101 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4103 parser_err!(
4104 format!("Expected: {expected}, found: {found}"),
4105 found.span.start
4106 )
4107 }
4108
4109 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4111 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4112 parser_err!(
4113 format!("Expected: {expected}, found: {found}"),
4114 found.span.start
4115 )
4116 }
4117
4118 #[must_use]
4121 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4122 if self.peek_keyword(expected) {
4123 self.advance_token();
4124 true
4125 } else {
4126 false
4127 }
4128 }
4129
4130 #[must_use]
4131 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4132 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4133 }
4134
4135 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4143 match &self.peek_token_ref().token {
4144 Token::Word(w) if expected == w.keyword => {
4145 for (idx, token) in tokens.iter().enumerate() {
4146 if self.peek_nth_token_ref(idx + 1).token != *token {
4147 return false;
4148 }
4149 }
4150 for _ in 0..(tokens.len() + 1) {
4152 self.advance_token();
4153 }
4154 true
4155 }
4156 _ => false,
4157 }
4158 }
4159
4160 #[must_use]
4164 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4165 let index = self.index;
4166 for &keyword in keywords {
4167 if !self.parse_keyword(keyword) {
4168 self.index = index;
4171 return false;
4172 }
4173 }
4174 true
4175 }
4176
4177 #[must_use]
4180 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4181 for keyword in keywords {
4182 if self.peek_keyword(*keyword) {
4183 return Some(*keyword);
4184 }
4185 }
4186 None
4187 }
4188
4189 #[must_use]
4193 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4194 match &self.peek_token_ref().token {
4195 Token::Word(w) => {
4196 keywords
4197 .iter()
4198 .find(|keyword| **keyword == w.keyword)
4199 .map(|keyword| {
4200 self.advance_token();
4201 *keyword
4202 })
4203 }
4204 _ => None,
4205 }
4206 }
4207
4208 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4211 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4212 Ok(keyword)
4213 } else {
4214 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4215 self.expected_ref(
4216 &format!("one of {}", keywords.join(" or ")),
4217 self.peek_token_ref(),
4218 )
4219 }
4220 }
4221
4222 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4227 if self.parse_keyword(expected) {
4228 Ok(self.get_current_token().clone())
4229 } else {
4230 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4231 }
4232 }
4233
4234 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4240 if self.parse_keyword(expected) {
4241 Ok(())
4242 } else {
4243 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4244 }
4245 }
4246
4247 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4250 for &kw in expected {
4251 self.expect_keyword_is(kw)?;
4252 }
4253 Ok(())
4254 }
4255
4256 #[must_use]
4260 pub fn consume_token(&mut self, expected: &Token) -> bool {
4261 if self.peek_token_ref() == expected {
4262 self.advance_token();
4263 true
4264 } else {
4265 false
4266 }
4267 }
4268
4269 #[must_use]
4273 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4274 let index = self.index;
4275 for token in tokens {
4276 if !self.consume_token(token) {
4277 self.index = index;
4278 return false;
4279 }
4280 }
4281 true
4282 }
4283
4284 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4286 if self.peek_token_ref() == expected {
4287 Ok(self.next_token())
4288 } else {
4289 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4290 }
4291 }
4292
4293 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4294 where
4295 <T as FromStr>::Err: Display,
4296 {
4297 s.parse::<T>().map_err(|e| {
4298 ParserError::ParserError(format!(
4299 "Could not parse '{s}' as {}: {e}{loc}",
4300 core::any::type_name::<T>()
4301 ))
4302 })
4303 }
4304
4305 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4307 let trailing_commas =
4313 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4314
4315 self.parse_comma_separated_with_trailing_commas(
4316 |p| p.parse_select_item(),
4317 trailing_commas,
4318 Self::is_reserved_for_column_alias,
4319 )
4320 }
4321
4322 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4323 let mut values = vec![];
4324 loop {
4325 values.push(self.parse_grant_permission()?);
4326 if !self.consume_token(&Token::Comma) {
4327 break;
4328 } else if self.options.trailing_commas {
4329 match self.peek_token().token {
4330 Token::Word(kw) if kw.keyword == Keyword::ON => {
4331 break;
4332 }
4333 Token::RParen
4334 | Token::SemiColon
4335 | Token::EOF
4336 | Token::RBracket
4337 | Token::RBrace => break,
4338 _ => continue,
4339 }
4340 }
4341 }
4342 Ok(values)
4343 }
4344
4345 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4347 let trailing_commas = self.dialect.supports_from_trailing_commas();
4348
4349 self.parse_comma_separated_with_trailing_commas(
4350 Parser::parse_table_and_joins,
4351 trailing_commas,
4352 |kw, _parser| {
4353 self.dialect
4354 .get_reserved_keywords_for_table_factor()
4355 .contains(kw)
4356 },
4357 )
4358 }
4359
4360 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4367 &mut self,
4368 trailing_commas: bool,
4369 is_reserved_keyword: &R,
4370 ) -> bool
4371 where
4372 R: Fn(&Keyword, &mut Parser) -> bool,
4373 {
4374 if !self.consume_token(&Token::Comma) {
4375 true
4376 } else if trailing_commas {
4377 let token = self.next_token().token;
4378 let is_end = match token {
4379 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4380 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4381 true
4382 }
4383 _ => false,
4384 };
4385 self.prev_token();
4386
4387 is_end
4388 } else {
4389 false
4390 }
4391 }
4392
4393 fn is_parse_comma_separated_end(&mut self) -> bool {
4396 self.is_parse_comma_separated_end_with_trailing_commas(
4397 self.options.trailing_commas,
4398 &Self::is_reserved_for_column_alias,
4399 )
4400 }
4401
4402 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4404 where
4405 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4406 {
4407 self.parse_comma_separated_with_trailing_commas(
4408 f,
4409 self.options.trailing_commas,
4410 Self::is_reserved_for_column_alias,
4411 )
4412 }
4413
4414 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4419 &mut self,
4420 mut f: F,
4421 trailing_commas: bool,
4422 is_reserved_keyword: R,
4423 ) -> Result<Vec<T>, ParserError>
4424 where
4425 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4426 R: Fn(&Keyword, &mut Parser) -> bool,
4427 {
4428 let mut values = vec![];
4429 loop {
4430 values.push(f(self)?);
4431 if self.is_parse_comma_separated_end_with_trailing_commas(
4432 trailing_commas,
4433 &is_reserved_keyword,
4434 ) {
4435 break;
4436 }
4437 }
4438 Ok(values)
4439 }
4440
4441 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4443 where
4444 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4445 {
4446 let mut values = vec![];
4447 loop {
4448 values.push(f(self)?);
4449 if !self.consume_token(&Token::Period) {
4450 break;
4451 }
4452 }
4453 Ok(values)
4454 }
4455
4456 pub fn parse_keyword_separated<T, F>(
4458 &mut self,
4459 keyword: Keyword,
4460 mut f: F,
4461 ) -> Result<Vec<T>, ParserError>
4462 where
4463 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4464 {
4465 let mut values = vec![];
4466 loop {
4467 values.push(f(self)?);
4468 if !self.parse_keyword(keyword) {
4469 break;
4470 }
4471 }
4472 Ok(values)
4473 }
4474
4475 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4476 where
4477 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4478 {
4479 self.expect_token(&Token::LParen)?;
4480 let res = f(self)?;
4481 self.expect_token(&Token::RParen)?;
4482 Ok(res)
4483 }
4484
4485 pub fn parse_comma_separated0<T, F>(
4488 &mut self,
4489 f: F,
4490 end_token: Token,
4491 ) -> Result<Vec<T>, ParserError>
4492 where
4493 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4494 {
4495 if self.peek_token().token == end_token {
4496 return Ok(vec![]);
4497 }
4498
4499 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4500 let _ = self.consume_token(&Token::Comma);
4501 return Ok(vec![]);
4502 }
4503
4504 self.parse_comma_separated(f)
4505 }
4506
4507 pub(crate) fn parse_statement_list(
4511 &mut self,
4512 terminal_keywords: &[Keyword],
4513 ) -> Result<Vec<Statement>, ParserError> {
4514 let mut values = vec![];
4515 loop {
4516 match &self.peek_nth_token_ref(0).token {
4517 Token::EOF => break,
4518 Token::Word(w) => {
4519 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4520 break;
4521 }
4522 }
4523 _ => {}
4524 }
4525
4526 values.push(self.parse_statement()?);
4527 self.expect_token(&Token::SemiColon)?;
4528 }
4529 Ok(values)
4530 }
4531
4532 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4536 !parser.dialect.is_column_alias(kw, parser)
4537 }
4538
4539 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4543 where
4544 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4545 {
4546 match self.try_parse(f) {
4547 Ok(t) => Ok(Some(t)),
4548 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4549 _ => Ok(None),
4550 }
4551 }
4552
4553 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4555 where
4556 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4557 {
4558 let index = self.index;
4559 match f(self) {
4560 Ok(t) => Ok(t),
4561 Err(e) => {
4562 self.index = index;
4564 Err(e)
4565 }
4566 }
4567 }
4568
4569 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4572 let loc = self.peek_token().span.start;
4573 let all = self.parse_keyword(Keyword::ALL);
4574 let distinct = self.parse_keyword(Keyword::DISTINCT);
4575 if !distinct {
4576 return Ok(None);
4577 }
4578 if all {
4579 return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4580 }
4581 let on = self.parse_keyword(Keyword::ON);
4582 if !on {
4583 return Ok(Some(Distinct::Distinct));
4584 }
4585
4586 self.expect_token(&Token::LParen)?;
4587 let col_names = if self.consume_token(&Token::RParen) {
4588 self.prev_token();
4589 Vec::new()
4590 } else {
4591 self.parse_comma_separated(Parser::parse_expr)?
4592 };
4593 self.expect_token(&Token::RParen)?;
4594 Ok(Some(Distinct::On(col_names)))
4595 }
4596
4597 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4599 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4600 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4601 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4602 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4603 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4604 let global: Option<bool> = if global {
4605 Some(true)
4606 } else if local {
4607 Some(false)
4608 } else {
4609 None
4610 };
4611 let temporary = self
4612 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4613 .is_some();
4614 let persistent = dialect_of!(self is DuckDbDialect)
4615 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4616 let create_view_params = self.parse_create_view_params()?;
4617 if self.parse_keyword(Keyword::TABLE) {
4618 self.parse_create_table(or_replace, temporary, global, transient)
4619 } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
4620 self.prev_token();
4621 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4622 } else if self.parse_keyword(Keyword::POLICY) {
4623 self.parse_create_policy()
4624 } else if self.parse_keyword(Keyword::EXTERNAL) {
4625 self.parse_create_external_table(or_replace)
4626 } else if self.parse_keyword(Keyword::FUNCTION) {
4627 self.parse_create_function(or_alter, or_replace, temporary)
4628 } else if self.parse_keyword(Keyword::DOMAIN) {
4629 self.parse_create_domain()
4630 } else if self.parse_keyword(Keyword::TRIGGER) {
4631 self.parse_create_trigger(or_alter, or_replace, false)
4632 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4633 self.parse_create_trigger(or_alter, or_replace, true)
4634 } else if self.parse_keyword(Keyword::MACRO) {
4635 self.parse_create_macro(or_replace, temporary)
4636 } else if self.parse_keyword(Keyword::SECRET) {
4637 self.parse_create_secret(or_replace, temporary, persistent)
4638 } else if or_replace {
4639 self.expected(
4640 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4641 self.peek_token(),
4642 )
4643 } else if self.parse_keyword(Keyword::EXTENSION) {
4644 self.parse_create_extension()
4645 } else if self.parse_keyword(Keyword::INDEX) {
4646 self.parse_create_index(false)
4647 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4648 self.parse_create_index(true)
4649 } else if self.parse_keyword(Keyword::VIRTUAL) {
4650 self.parse_create_virtual_table()
4651 } else if self.parse_keyword(Keyword::SCHEMA) {
4652 self.parse_create_schema()
4653 } else if self.parse_keyword(Keyword::DATABASE) {
4654 self.parse_create_database()
4655 } else if self.parse_keyword(Keyword::ROLE) {
4656 self.parse_create_role()
4657 } else if self.parse_keyword(Keyword::SEQUENCE) {
4658 self.parse_create_sequence(temporary)
4659 } else if self.parse_keyword(Keyword::TYPE) {
4660 self.parse_create_type()
4661 } else if self.parse_keyword(Keyword::PROCEDURE) {
4662 self.parse_create_procedure(or_alter)
4663 } else if self.parse_keyword(Keyword::CONNECTOR) {
4664 self.parse_create_connector()
4665 } else {
4666 self.expected("an object type after CREATE", self.peek_token())
4667 }
4668 }
4669
4670 pub fn parse_create_secret(
4672 &mut self,
4673 or_replace: bool,
4674 temporary: bool,
4675 persistent: bool,
4676 ) -> Result<Statement, ParserError> {
4677 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4678
4679 let mut storage_specifier = None;
4680 let mut name = None;
4681 if self.peek_token() != Token::LParen {
4682 if self.parse_keyword(Keyword::IN) {
4683 storage_specifier = self.parse_identifier().ok()
4684 } else {
4685 name = self.parse_identifier().ok();
4686 }
4687
4688 if storage_specifier.is_none()
4690 && self.peek_token() != Token::LParen
4691 && self.parse_keyword(Keyword::IN)
4692 {
4693 storage_specifier = self.parse_identifier().ok();
4694 }
4695 }
4696
4697 self.expect_token(&Token::LParen)?;
4698 self.expect_keyword_is(Keyword::TYPE)?;
4699 let secret_type = self.parse_identifier()?;
4700
4701 let mut options = Vec::new();
4702 if self.consume_token(&Token::Comma) {
4703 options.append(&mut self.parse_comma_separated(|p| {
4704 let key = p.parse_identifier()?;
4705 let value = p.parse_identifier()?;
4706 Ok(SecretOption { key, value })
4707 })?);
4708 }
4709 self.expect_token(&Token::RParen)?;
4710
4711 let temp = match (temporary, persistent) {
4712 (true, false) => Some(true),
4713 (false, true) => Some(false),
4714 (false, false) => None,
4715 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4716 };
4717
4718 Ok(Statement::CreateSecret {
4719 or_replace,
4720 temporary: temp,
4721 if_not_exists,
4722 name,
4723 storage_specifier,
4724 secret_type,
4725 options,
4726 })
4727 }
4728
4729 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4731 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4732 if self.parse_keyword(Keyword::TABLE) {
4733 let table_name = self.parse_object_name(false)?;
4734 if self.peek_token().token != Token::EOF {
4735 if let Token::Word(word) = self.peek_token().token {
4736 if word.keyword == Keyword::OPTIONS {
4737 options = self.parse_options(Keyword::OPTIONS)?
4738 }
4739 };
4740
4741 if self.peek_token().token != Token::EOF {
4742 let (a, q) = self.parse_as_query()?;
4743 has_as = a;
4744 query = Some(q);
4745 }
4746
4747 Ok(Statement::Cache {
4748 table_flag,
4749 table_name,
4750 has_as,
4751 options,
4752 query,
4753 })
4754 } else {
4755 Ok(Statement::Cache {
4756 table_flag,
4757 table_name,
4758 has_as,
4759 options,
4760 query,
4761 })
4762 }
4763 } else {
4764 table_flag = Some(self.parse_object_name(false)?);
4765 if self.parse_keyword(Keyword::TABLE) {
4766 let table_name = self.parse_object_name(false)?;
4767 if self.peek_token() != Token::EOF {
4768 if let Token::Word(word) = self.peek_token().token {
4769 if word.keyword == Keyword::OPTIONS {
4770 options = self.parse_options(Keyword::OPTIONS)?
4771 }
4772 };
4773
4774 if self.peek_token() != Token::EOF {
4775 let (a, q) = self.parse_as_query()?;
4776 has_as = a;
4777 query = Some(q);
4778 }
4779
4780 Ok(Statement::Cache {
4781 table_flag,
4782 table_name,
4783 has_as,
4784 options,
4785 query,
4786 })
4787 } else {
4788 Ok(Statement::Cache {
4789 table_flag,
4790 table_name,
4791 has_as,
4792 options,
4793 query,
4794 })
4795 }
4796 } else {
4797 if self.peek_token() == Token::EOF {
4798 self.prev_token();
4799 }
4800 self.expected("a `TABLE` keyword", self.peek_token())
4801 }
4802 }
4803 }
4804
4805 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4807 match self.peek_token().token {
4808 Token::Word(word) => match word.keyword {
4809 Keyword::AS => {
4810 self.next_token();
4811 Ok((true, self.parse_query()?))
4812 }
4813 _ => Ok((false, self.parse_query()?)),
4814 },
4815 _ => self.expected("a QUERY statement", self.peek_token()),
4816 }
4817 }
4818
4819 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4821 self.expect_keyword_is(Keyword::TABLE)?;
4822 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4823 let table_name = self.parse_object_name(false)?;
4824 Ok(Statement::UNCache {
4825 table_name,
4826 if_exists,
4827 })
4828 }
4829
4830 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
4832 self.expect_keyword_is(Keyword::TABLE)?;
4833 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4834 let table_name = self.parse_object_name(false)?;
4835 self.expect_keyword_is(Keyword::USING)?;
4836 let module_name = self.parse_identifier()?;
4837 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
4842 Ok(Statement::CreateVirtualTable {
4843 name: table_name,
4844 if_not_exists,
4845 module_name,
4846 module_args,
4847 })
4848 }
4849
4850 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
4851 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4852
4853 let schema_name = self.parse_schema_name()?;
4854
4855 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
4856 Some(self.parse_expr()?)
4857 } else {
4858 None
4859 };
4860
4861 let options = if self.peek_keyword(Keyword::OPTIONS) {
4862 Some(self.parse_options(Keyword::OPTIONS)?)
4863 } else {
4864 None
4865 };
4866
4867 Ok(Statement::CreateSchema {
4868 schema_name,
4869 if_not_exists,
4870 options,
4871 default_collate_spec,
4872 })
4873 }
4874
4875 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
4876 if self.parse_keyword(Keyword::AUTHORIZATION) {
4877 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
4878 } else {
4879 let name = self.parse_object_name(false)?;
4880
4881 if self.parse_keyword(Keyword::AUTHORIZATION) {
4882 Ok(SchemaName::NamedAuthorization(
4883 name,
4884 self.parse_identifier()?,
4885 ))
4886 } else {
4887 Ok(SchemaName::Simple(name))
4888 }
4889 }
4890 }
4891
4892 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
4893 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4894 let db_name = self.parse_object_name(false)?;
4895 let mut location = None;
4896 let mut managed_location = None;
4897 loop {
4898 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
4899 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
4900 Some(Keyword::MANAGEDLOCATION) => {
4901 managed_location = Some(self.parse_literal_string()?)
4902 }
4903 _ => break,
4904 }
4905 }
4906 Ok(Statement::CreateDatabase {
4907 db_name,
4908 if_not_exists: ine,
4909 location,
4910 managed_location,
4911 })
4912 }
4913
4914 pub fn parse_optional_create_function_using(
4915 &mut self,
4916 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
4917 if !self.parse_keyword(Keyword::USING) {
4918 return Ok(None);
4919 };
4920 let keyword =
4921 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
4922
4923 let uri = self.parse_literal_string()?;
4924
4925 match keyword {
4926 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
4927 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
4928 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
4929 _ => self.expected(
4930 "JAR, FILE or ARCHIVE, got {:?}",
4931 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
4932 ),
4933 }
4934 }
4935
4936 pub fn parse_create_function(
4937 &mut self,
4938 or_alter: bool,
4939 or_replace: bool,
4940 temporary: bool,
4941 ) -> Result<Statement, ParserError> {
4942 if dialect_of!(self is HiveDialect) {
4943 self.parse_hive_create_function(or_replace, temporary)
4944 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4945 self.parse_postgres_create_function(or_replace, temporary)
4946 } else if dialect_of!(self is DuckDbDialect) {
4947 self.parse_create_macro(or_replace, temporary)
4948 } else if dialect_of!(self is BigQueryDialect) {
4949 self.parse_bigquery_create_function(or_replace, temporary)
4950 } else if dialect_of!(self is MsSqlDialect) {
4951 self.parse_mssql_create_function(or_alter, or_replace, temporary)
4952 } else {
4953 self.prev_token();
4954 self.expected("an object type after CREATE", self.peek_token())
4955 }
4956 }
4957
4958 fn parse_postgres_create_function(
4962 &mut self,
4963 or_replace: bool,
4964 temporary: bool,
4965 ) -> Result<Statement, ParserError> {
4966 let name = self.parse_object_name(false)?;
4967
4968 self.expect_token(&Token::LParen)?;
4969 let args = if Token::RParen != self.peek_token_ref().token {
4970 self.parse_comma_separated(Parser::parse_function_arg)?
4971 } else {
4972 vec![]
4973 };
4974 self.expect_token(&Token::RParen)?;
4975
4976 let return_type = if self.parse_keyword(Keyword::RETURNS) {
4977 Some(self.parse_data_type()?)
4978 } else {
4979 None
4980 };
4981
4982 #[derive(Default)]
4983 struct Body {
4984 language: Option<Ident>,
4985 behavior: Option<FunctionBehavior>,
4986 function_body: Option<CreateFunctionBody>,
4987 called_on_null: Option<FunctionCalledOnNull>,
4988 parallel: Option<FunctionParallel>,
4989 }
4990 let mut body = Body::default();
4991 loop {
4992 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
4993 if field.is_some() {
4994 return Err(ParserError::ParserError(format!(
4995 "{name} specified more than once",
4996 )));
4997 }
4998 Ok(())
4999 }
5000 if self.parse_keyword(Keyword::AS) {
5001 ensure_not_set(&body.function_body, "AS")?;
5002 body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
5003 self.parse_create_function_body_string()?,
5004 ));
5005 } else if self.parse_keyword(Keyword::LANGUAGE) {
5006 ensure_not_set(&body.language, "LANGUAGE")?;
5007 body.language = Some(self.parse_identifier()?);
5008 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5009 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5010 body.behavior = Some(FunctionBehavior::Immutable);
5011 } else if self.parse_keyword(Keyword::STABLE) {
5012 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5013 body.behavior = Some(FunctionBehavior::Stable);
5014 } else if self.parse_keyword(Keyword::VOLATILE) {
5015 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5016 body.behavior = Some(FunctionBehavior::Volatile);
5017 } else if self.parse_keywords(&[
5018 Keyword::CALLED,
5019 Keyword::ON,
5020 Keyword::NULL,
5021 Keyword::INPUT,
5022 ]) {
5023 ensure_not_set(
5024 &body.called_on_null,
5025 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5026 )?;
5027 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5028 } else if self.parse_keywords(&[
5029 Keyword::RETURNS,
5030 Keyword::NULL,
5031 Keyword::ON,
5032 Keyword::NULL,
5033 Keyword::INPUT,
5034 ]) {
5035 ensure_not_set(
5036 &body.called_on_null,
5037 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5038 )?;
5039 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5040 } else if self.parse_keyword(Keyword::STRICT) {
5041 ensure_not_set(
5042 &body.called_on_null,
5043 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5044 )?;
5045 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5046 } else if self.parse_keyword(Keyword::PARALLEL) {
5047 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5048 if self.parse_keyword(Keyword::UNSAFE) {
5049 body.parallel = Some(FunctionParallel::Unsafe);
5050 } else if self.parse_keyword(Keyword::RESTRICTED) {
5051 body.parallel = Some(FunctionParallel::Restricted);
5052 } else if self.parse_keyword(Keyword::SAFE) {
5053 body.parallel = Some(FunctionParallel::Safe);
5054 } else {
5055 return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5056 }
5057 } else if self.parse_keyword(Keyword::RETURN) {
5058 ensure_not_set(&body.function_body, "RETURN")?;
5059 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5060 } else {
5061 break;
5062 }
5063 }
5064
5065 Ok(Statement::CreateFunction(CreateFunction {
5066 or_alter: false,
5067 or_replace,
5068 temporary,
5069 name,
5070 args: Some(args),
5071 return_type,
5072 behavior: body.behavior,
5073 called_on_null: body.called_on_null,
5074 parallel: body.parallel,
5075 language: body.language,
5076 function_body: body.function_body,
5077 if_not_exists: false,
5078 using: None,
5079 determinism_specifier: None,
5080 options: None,
5081 remote_connection: None,
5082 }))
5083 }
5084
5085 fn parse_hive_create_function(
5089 &mut self,
5090 or_replace: bool,
5091 temporary: bool,
5092 ) -> Result<Statement, ParserError> {
5093 let name = self.parse_object_name(false)?;
5094 self.expect_keyword_is(Keyword::AS)?;
5095
5096 let as_ = self.parse_create_function_body_string()?;
5097 let using = self.parse_optional_create_function_using()?;
5098
5099 Ok(Statement::CreateFunction(CreateFunction {
5100 or_alter: false,
5101 or_replace,
5102 temporary,
5103 name,
5104 function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
5105 using,
5106 if_not_exists: false,
5107 args: None,
5108 return_type: None,
5109 behavior: None,
5110 called_on_null: None,
5111 parallel: None,
5112 language: None,
5113 determinism_specifier: None,
5114 options: None,
5115 remote_connection: None,
5116 }))
5117 }
5118
5119 fn parse_bigquery_create_function(
5123 &mut self,
5124 or_replace: bool,
5125 temporary: bool,
5126 ) -> Result<Statement, ParserError> {
5127 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5128 let (name, args) = self.parse_create_function_name_and_params()?;
5129
5130 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5131 Some(self.parse_data_type()?)
5132 } else {
5133 None
5134 };
5135
5136 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5137 Some(FunctionDeterminismSpecifier::Deterministic)
5138 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5139 Some(FunctionDeterminismSpecifier::NotDeterministic)
5140 } else {
5141 None
5142 };
5143
5144 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5145 Some(self.parse_identifier()?)
5146 } else {
5147 None
5148 };
5149
5150 let remote_connection =
5151 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5152 Some(self.parse_object_name(false)?)
5153 } else {
5154 None
5155 };
5156
5157 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5160
5161 let function_body = if remote_connection.is_none() {
5162 self.expect_keyword_is(Keyword::AS)?;
5163 let expr = self.parse_expr()?;
5164 if options.is_none() {
5165 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5166 Some(CreateFunctionBody::AsBeforeOptions(expr))
5167 } else {
5168 Some(CreateFunctionBody::AsAfterOptions(expr))
5169 }
5170 } else {
5171 None
5172 };
5173
5174 Ok(Statement::CreateFunction(CreateFunction {
5175 or_alter: false,
5176 or_replace,
5177 temporary,
5178 if_not_exists,
5179 name,
5180 args: Some(args),
5181 return_type,
5182 function_body,
5183 language,
5184 determinism_specifier,
5185 options,
5186 remote_connection,
5187 using: None,
5188 behavior: None,
5189 called_on_null: None,
5190 parallel: None,
5191 }))
5192 }
5193
5194 fn parse_mssql_create_function(
5198 &mut self,
5199 or_alter: bool,
5200 or_replace: bool,
5201 temporary: bool,
5202 ) -> Result<Statement, ParserError> {
5203 let (name, args) = self.parse_create_function_name_and_params()?;
5204
5205 self.expect_keyword(Keyword::RETURNS)?;
5206 let return_type = Some(self.parse_data_type()?);
5207
5208 self.expect_keyword_is(Keyword::AS)?;
5209
5210 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5211 let statements = self.parse_statement_list(&[Keyword::END])?;
5212 let end_token = self.expect_keyword(Keyword::END)?;
5213
5214 let function_body = Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5215 begin_token: AttachedToken(begin_token),
5216 statements,
5217 end_token: AttachedToken(end_token),
5218 }));
5219
5220 Ok(Statement::CreateFunction(CreateFunction {
5221 or_alter,
5222 or_replace,
5223 temporary,
5224 if_not_exists: false,
5225 name,
5226 args: Some(args),
5227 return_type,
5228 function_body,
5229 language: None,
5230 determinism_specifier: None,
5231 options: None,
5232 remote_connection: None,
5233 using: None,
5234 behavior: None,
5235 called_on_null: None,
5236 parallel: None,
5237 }))
5238 }
5239
5240 fn parse_create_function_name_and_params(
5241 &mut self,
5242 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5243 let name = self.parse_object_name(false)?;
5244 let parse_function_param =
5245 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5246 let name = parser.parse_identifier()?;
5247 let data_type = parser.parse_data_type()?;
5248 Ok(OperateFunctionArg {
5249 mode: None,
5250 name: Some(name),
5251 data_type,
5252 default_expr: None,
5253 })
5254 };
5255 self.expect_token(&Token::LParen)?;
5256 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5257 self.expect_token(&Token::RParen)?;
5258 Ok((name, args))
5259 }
5260
5261 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5262 let mode = if self.parse_keyword(Keyword::IN) {
5263 Some(ArgMode::In)
5264 } else if self.parse_keyword(Keyword::OUT) {
5265 Some(ArgMode::Out)
5266 } else if self.parse_keyword(Keyword::INOUT) {
5267 Some(ArgMode::InOut)
5268 } else {
5269 None
5270 };
5271
5272 let mut name = None;
5274 let mut data_type = self.parse_data_type()?;
5275 if let DataType::Custom(n, _) = &data_type {
5276 match n.0[0].clone() {
5278 ObjectNamePart::Identifier(ident) => name = Some(ident),
5279 }
5280 data_type = self.parse_data_type()?;
5281 }
5282
5283 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5284 {
5285 Some(self.parse_expr()?)
5286 } else {
5287 None
5288 };
5289 Ok(OperateFunctionArg {
5290 mode,
5291 name,
5292 data_type,
5293 default_expr,
5294 })
5295 }
5296
5297 pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5303 if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5304 self.prev_token();
5305 return self.expected("an object type after DROP", self.peek_token());
5306 }
5307 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5308 let trigger_name = self.parse_object_name(false)?;
5309 let table_name = if self.parse_keyword(Keyword::ON) {
5310 Some(self.parse_object_name(false)?)
5311 } else {
5312 None
5313 };
5314 let option = self
5315 .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5316 .map(|keyword| match keyword {
5317 Keyword::CASCADE => ReferentialAction::Cascade,
5318 Keyword::RESTRICT => ReferentialAction::Restrict,
5319 _ => unreachable!(),
5320 });
5321 Ok(Statement::DropTrigger {
5322 if_exists,
5323 trigger_name,
5324 table_name,
5325 option,
5326 })
5327 }
5328
5329 pub fn parse_create_trigger(
5330 &mut self,
5331 or_alter: bool,
5332 or_replace: bool,
5333 is_constraint: bool,
5334 ) -> Result<Statement, ParserError> {
5335 if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5336 self.prev_token();
5337 return self.expected("an object type after CREATE", self.peek_token());
5338 }
5339
5340 let name = self.parse_object_name(false)?;
5341 let period = self.parse_trigger_period()?;
5342
5343 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5344 self.expect_keyword_is(Keyword::ON)?;
5345 let table_name = self.parse_object_name(false)?;
5346
5347 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5348 self.parse_object_name(true).ok()
5349 } else {
5350 None
5351 };
5352
5353 let characteristics = self.parse_constraint_characteristics()?;
5354
5355 let mut referencing = vec![];
5356 if self.parse_keyword(Keyword::REFERENCING) {
5357 while let Some(refer) = self.parse_trigger_referencing()? {
5358 referencing.push(refer);
5359 }
5360 }
5361
5362 self.expect_keyword_is(Keyword::FOR)?;
5363 let include_each = self.parse_keyword(Keyword::EACH);
5364 let trigger_object =
5365 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5366 Keyword::ROW => TriggerObject::Row,
5367 Keyword::STATEMENT => TriggerObject::Statement,
5368 _ => unreachable!(),
5369 };
5370
5371 let condition = self
5372 .parse_keyword(Keyword::WHEN)
5373 .then(|| self.parse_expr())
5374 .transpose()?;
5375
5376 self.expect_keyword_is(Keyword::EXECUTE)?;
5377
5378 let exec_body = self.parse_trigger_exec_body()?;
5379
5380 Ok(Statement::CreateTrigger {
5381 or_alter,
5382 or_replace,
5383 is_constraint,
5384 name,
5385 period,
5386 events,
5387 table_name,
5388 referenced_table_name,
5389 referencing,
5390 trigger_object,
5391 include_each,
5392 condition,
5393 exec_body: Some(exec_body),
5394 statements: None,
5395 characteristics,
5396 })
5397 }
5398
5399 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5400 Ok(
5401 match self.expect_one_of_keywords(&[
5402 Keyword::FOR,
5403 Keyword::BEFORE,
5404 Keyword::AFTER,
5405 Keyword::INSTEAD,
5406 ])? {
5407 Keyword::FOR => TriggerPeriod::For,
5408 Keyword::BEFORE => TriggerPeriod::Before,
5409 Keyword::AFTER => TriggerPeriod::After,
5410 Keyword::INSTEAD => self
5411 .expect_keyword_is(Keyword::OF)
5412 .map(|_| TriggerPeriod::InsteadOf)?,
5413 _ => unreachable!(),
5414 },
5415 )
5416 }
5417
5418 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5419 Ok(
5420 match self.expect_one_of_keywords(&[
5421 Keyword::INSERT,
5422 Keyword::UPDATE,
5423 Keyword::DELETE,
5424 Keyword::TRUNCATE,
5425 ])? {
5426 Keyword::INSERT => TriggerEvent::Insert,
5427 Keyword::UPDATE => {
5428 if self.parse_keyword(Keyword::OF) {
5429 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5430 TriggerEvent::Update(cols)
5431 } else {
5432 TriggerEvent::Update(vec![])
5433 }
5434 }
5435 Keyword::DELETE => TriggerEvent::Delete,
5436 Keyword::TRUNCATE => TriggerEvent::Truncate,
5437 _ => unreachable!(),
5438 },
5439 )
5440 }
5441
5442 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5443 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5444 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5445 TriggerReferencingType::OldTable
5446 }
5447 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5448 TriggerReferencingType::NewTable
5449 }
5450 _ => {
5451 return Ok(None);
5452 }
5453 };
5454
5455 let is_as = self.parse_keyword(Keyword::AS);
5456 let transition_relation_name = self.parse_object_name(false)?;
5457 Ok(Some(TriggerReferencing {
5458 refer_type,
5459 is_as,
5460 transition_relation_name,
5461 }))
5462 }
5463
5464 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5465 Ok(TriggerExecBody {
5466 exec_type: match self
5467 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5468 {
5469 Keyword::FUNCTION => TriggerExecBodyType::Function,
5470 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5471 _ => unreachable!(),
5472 },
5473 func_desc: self.parse_function_desc()?,
5474 })
5475 }
5476
5477 pub fn parse_create_macro(
5478 &mut self,
5479 or_replace: bool,
5480 temporary: bool,
5481 ) -> Result<Statement, ParserError> {
5482 if dialect_of!(self is DuckDbDialect | GenericDialect) {
5483 let name = self.parse_object_name(false)?;
5484 self.expect_token(&Token::LParen)?;
5485 let args = if self.consume_token(&Token::RParen) {
5486 self.prev_token();
5487 None
5488 } else {
5489 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5490 };
5491
5492 self.expect_token(&Token::RParen)?;
5493 self.expect_keyword_is(Keyword::AS)?;
5494
5495 Ok(Statement::CreateMacro {
5496 or_replace,
5497 temporary,
5498 name,
5499 args,
5500 definition: if self.parse_keyword(Keyword::TABLE) {
5501 MacroDefinition::Table(self.parse_query()?)
5502 } else {
5503 MacroDefinition::Expr(self.parse_expr()?)
5504 },
5505 })
5506 } else {
5507 self.prev_token();
5508 self.expected("an object type after CREATE", self.peek_token())
5509 }
5510 }
5511
5512 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5513 let name = self.parse_identifier()?;
5514
5515 let default_expr =
5516 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5517 Some(self.parse_expr()?)
5518 } else {
5519 None
5520 };
5521 Ok(MacroArg { name, default_expr })
5522 }
5523
5524 pub fn parse_create_external_table(
5525 &mut self,
5526 or_replace: bool,
5527 ) -> Result<Statement, ParserError> {
5528 self.expect_keyword_is(Keyword::TABLE)?;
5529 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5530 let table_name = self.parse_object_name(false)?;
5531 let (columns, constraints) = self.parse_columns()?;
5532
5533 let hive_distribution = self.parse_hive_distribution()?;
5534 let hive_formats = self.parse_hive_formats()?;
5535
5536 let file_format = if let Some(ff) = &hive_formats.storage {
5537 match ff {
5538 HiveIOFormat::FileFormat { format } => Some(*format),
5539 _ => None,
5540 }
5541 } else {
5542 None
5543 };
5544 let location = hive_formats.location.clone();
5545 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5546 let table_options = if !table_properties.is_empty() {
5547 CreateTableOptions::TableProperties(table_properties)
5548 } else {
5549 CreateTableOptions::None
5550 };
5551 Ok(CreateTableBuilder::new(table_name)
5552 .columns(columns)
5553 .constraints(constraints)
5554 .hive_distribution(hive_distribution)
5555 .hive_formats(Some(hive_formats))
5556 .table_options(table_options)
5557 .or_replace(or_replace)
5558 .if_not_exists(if_not_exists)
5559 .external(true)
5560 .file_format(file_format)
5561 .location(location)
5562 .build())
5563 }
5564
5565 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5566 let next_token = self.next_token();
5567 match &next_token.token {
5568 Token::Word(w) => match w.keyword {
5569 Keyword::AVRO => Ok(FileFormat::AVRO),
5570 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5571 Keyword::ORC => Ok(FileFormat::ORC),
5572 Keyword::PARQUET => Ok(FileFormat::PARQUET),
5573 Keyword::RCFILE => Ok(FileFormat::RCFILE),
5574 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5575 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5576 _ => self.expected("fileformat", next_token),
5577 },
5578 _ => self.expected("fileformat", next_token),
5579 }
5580 }
5581
5582 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5583 let next_token = self.next_token();
5584 match &next_token.token {
5585 Token::Word(w) => match w.keyword {
5586 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5587 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5588 Keyword::JSON => Ok(AnalyzeFormat::JSON),
5589 _ => self.expected("fileformat", next_token),
5590 },
5591 _ => self.expected("fileformat", next_token),
5592 }
5593 }
5594
5595 pub fn parse_create_view(
5596 &mut self,
5597 or_alter: bool,
5598 or_replace: bool,
5599 temporary: bool,
5600 create_view_params: Option<CreateViewParams>,
5601 ) -> Result<Statement, ParserError> {
5602 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5603 self.expect_keyword_is(Keyword::VIEW)?;
5604 let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect)
5605 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5606 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5609 let name = self.parse_object_name(allow_unquoted_hyphen)?;
5610 let columns = self.parse_view_columns()?;
5611 let mut options = CreateTableOptions::None;
5612 let with_options = self.parse_options(Keyword::WITH)?;
5613 if !with_options.is_empty() {
5614 options = CreateTableOptions::With(with_options);
5615 }
5616
5617 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5618 self.expect_keyword_is(Keyword::BY)?;
5619 self.parse_parenthesized_column_list(Optional, false)?
5620 } else {
5621 vec![]
5622 };
5623
5624 if dialect_of!(self is BigQueryDialect | GenericDialect) {
5625 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5626 if !opts.is_empty() {
5627 options = CreateTableOptions::Options(opts);
5628 }
5629 };
5630 }
5631
5632 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5633 && self.parse_keyword(Keyword::TO)
5634 {
5635 Some(self.parse_object_name(false)?)
5636 } else {
5637 None
5638 };
5639
5640 let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5641 && self.parse_keyword(Keyword::COMMENT)
5642 {
5643 self.expect_token(&Token::Eq)?;
5644 Some(self.parse_comment_value()?)
5645 } else {
5646 None
5647 };
5648
5649 self.expect_keyword_is(Keyword::AS)?;
5650 let query = self.parse_query()?;
5651 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5654 && self.parse_keywords(&[
5655 Keyword::WITH,
5656 Keyword::NO,
5657 Keyword::SCHEMA,
5658 Keyword::BINDING,
5659 ]);
5660
5661 Ok(Statement::CreateView {
5662 or_alter,
5663 name,
5664 columns,
5665 query,
5666 materialized,
5667 or_replace,
5668 options,
5669 cluster_by,
5670 comment,
5671 with_no_schema_binding,
5672 if_not_exists,
5673 temporary,
5674 to,
5675 params: create_view_params,
5676 })
5677 }
5678
5679 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
5683 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
5684 self.expect_token(&Token::Eq)?;
5685 Some(
5686 match self.expect_one_of_keywords(&[
5687 Keyword::UNDEFINED,
5688 Keyword::MERGE,
5689 Keyword::TEMPTABLE,
5690 ])? {
5691 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
5692 Keyword::MERGE => CreateViewAlgorithm::Merge,
5693 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
5694 _ => {
5695 self.prev_token();
5696 let found = self.next_token();
5697 return self
5698 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
5699 }
5700 },
5701 )
5702 } else {
5703 None
5704 };
5705 let definer = if self.parse_keyword(Keyword::DEFINER) {
5706 self.expect_token(&Token::Eq)?;
5707 Some(self.parse_grantee_name()?)
5708 } else {
5709 None
5710 };
5711 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
5712 Some(
5713 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
5714 Keyword::DEFINER => CreateViewSecurity::Definer,
5715 Keyword::INVOKER => CreateViewSecurity::Invoker,
5716 _ => {
5717 self.prev_token();
5718 let found = self.next_token();
5719 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
5720 }
5721 },
5722 )
5723 } else {
5724 None
5725 };
5726 if algorithm.is_some() || definer.is_some() || security.is_some() {
5727 Ok(Some(CreateViewParams {
5728 algorithm,
5729 definer,
5730 security,
5731 }))
5732 } else {
5733 Ok(None)
5734 }
5735 }
5736
5737 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
5738 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5739 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
5740
5741 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
5744 vec![Keyword::AUTHORIZATION]
5745 } else if dialect_of!(self is PostgreSqlDialect) {
5746 vec![
5747 Keyword::LOGIN,
5748 Keyword::NOLOGIN,
5749 Keyword::INHERIT,
5750 Keyword::NOINHERIT,
5751 Keyword::BYPASSRLS,
5752 Keyword::NOBYPASSRLS,
5753 Keyword::PASSWORD,
5754 Keyword::CREATEDB,
5755 Keyword::NOCREATEDB,
5756 Keyword::CREATEROLE,
5757 Keyword::NOCREATEROLE,
5758 Keyword::SUPERUSER,
5759 Keyword::NOSUPERUSER,
5760 Keyword::REPLICATION,
5761 Keyword::NOREPLICATION,
5762 Keyword::CONNECTION,
5763 Keyword::VALID,
5764 Keyword::IN,
5765 Keyword::ROLE,
5766 Keyword::ADMIN,
5767 Keyword::USER,
5768 ]
5769 } else {
5770 vec![]
5771 };
5772
5773 let mut authorization_owner = None;
5775 let mut login = None;
5777 let mut inherit = None;
5778 let mut bypassrls = None;
5779 let mut password = None;
5780 let mut create_db = None;
5781 let mut create_role = None;
5782 let mut superuser = None;
5783 let mut replication = None;
5784 let mut connection_limit = None;
5785 let mut valid_until = None;
5786 let mut in_role = vec![];
5787 let mut in_group = vec![];
5788 let mut role = vec![];
5789 let mut user = vec![];
5790 let mut admin = vec![];
5791
5792 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
5793 let loc = self
5794 .tokens
5795 .get(self.index - 1)
5796 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
5797 match keyword {
5798 Keyword::AUTHORIZATION => {
5799 if authorization_owner.is_some() {
5800 parser_err!("Found multiple AUTHORIZATION", loc)
5801 } else {
5802 authorization_owner = Some(self.parse_object_name(false)?);
5803 Ok(())
5804 }
5805 }
5806 Keyword::LOGIN | Keyword::NOLOGIN => {
5807 if login.is_some() {
5808 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
5809 } else {
5810 login = Some(keyword == Keyword::LOGIN);
5811 Ok(())
5812 }
5813 }
5814 Keyword::INHERIT | Keyword::NOINHERIT => {
5815 if inherit.is_some() {
5816 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
5817 } else {
5818 inherit = Some(keyword == Keyword::INHERIT);
5819 Ok(())
5820 }
5821 }
5822 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
5823 if bypassrls.is_some() {
5824 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
5825 } else {
5826 bypassrls = Some(keyword == Keyword::BYPASSRLS);
5827 Ok(())
5828 }
5829 }
5830 Keyword::CREATEDB | Keyword::NOCREATEDB => {
5831 if create_db.is_some() {
5832 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
5833 } else {
5834 create_db = Some(keyword == Keyword::CREATEDB);
5835 Ok(())
5836 }
5837 }
5838 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
5839 if create_role.is_some() {
5840 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
5841 } else {
5842 create_role = Some(keyword == Keyword::CREATEROLE);
5843 Ok(())
5844 }
5845 }
5846 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
5847 if superuser.is_some() {
5848 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
5849 } else {
5850 superuser = Some(keyword == Keyword::SUPERUSER);
5851 Ok(())
5852 }
5853 }
5854 Keyword::REPLICATION | Keyword::NOREPLICATION => {
5855 if replication.is_some() {
5856 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
5857 } else {
5858 replication = Some(keyword == Keyword::REPLICATION);
5859 Ok(())
5860 }
5861 }
5862 Keyword::PASSWORD => {
5863 if password.is_some() {
5864 parser_err!("Found multiple PASSWORD", loc)
5865 } else {
5866 password = if self.parse_keyword(Keyword::NULL) {
5867 Some(Password::NullPassword)
5868 } else {
5869 Some(Password::Password(Expr::Value(self.parse_value()?)))
5870 };
5871 Ok(())
5872 }
5873 }
5874 Keyword::CONNECTION => {
5875 self.expect_keyword_is(Keyword::LIMIT)?;
5876 if connection_limit.is_some() {
5877 parser_err!("Found multiple CONNECTION LIMIT", loc)
5878 } else {
5879 connection_limit = Some(Expr::Value(self.parse_number_value()?));
5880 Ok(())
5881 }
5882 }
5883 Keyword::VALID => {
5884 self.expect_keyword_is(Keyword::UNTIL)?;
5885 if valid_until.is_some() {
5886 parser_err!("Found multiple VALID UNTIL", loc)
5887 } else {
5888 valid_until = Some(Expr::Value(self.parse_value()?));
5889 Ok(())
5890 }
5891 }
5892 Keyword::IN => {
5893 if self.parse_keyword(Keyword::ROLE) {
5894 if !in_role.is_empty() {
5895 parser_err!("Found multiple IN ROLE", loc)
5896 } else {
5897 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
5898 Ok(())
5899 }
5900 } else if self.parse_keyword(Keyword::GROUP) {
5901 if !in_group.is_empty() {
5902 parser_err!("Found multiple IN GROUP", loc)
5903 } else {
5904 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
5905 Ok(())
5906 }
5907 } else {
5908 self.expected("ROLE or GROUP after IN", self.peek_token())
5909 }
5910 }
5911 Keyword::ROLE => {
5912 if !role.is_empty() {
5913 parser_err!("Found multiple ROLE", loc)
5914 } else {
5915 role = self.parse_comma_separated(|p| p.parse_identifier())?;
5916 Ok(())
5917 }
5918 }
5919 Keyword::USER => {
5920 if !user.is_empty() {
5921 parser_err!("Found multiple USER", loc)
5922 } else {
5923 user = self.parse_comma_separated(|p| p.parse_identifier())?;
5924 Ok(())
5925 }
5926 }
5927 Keyword::ADMIN => {
5928 if !admin.is_empty() {
5929 parser_err!("Found multiple ADMIN", loc)
5930 } else {
5931 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
5932 Ok(())
5933 }
5934 }
5935 _ => break,
5936 }?
5937 }
5938
5939 Ok(Statement::CreateRole {
5940 names,
5941 if_not_exists,
5942 login,
5943 inherit,
5944 bypassrls,
5945 password,
5946 create_db,
5947 create_role,
5948 replication,
5949 superuser,
5950 connection_limit,
5951 valid_until,
5952 in_role,
5953 in_group,
5954 role,
5955 user,
5956 admin,
5957 authorization_owner,
5958 })
5959 }
5960
5961 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
5962 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
5963 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
5964 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
5965 Some(Keyword::SESSION_USER) => Owner::SessionUser,
5966 Some(_) => unreachable!(),
5967 None => {
5968 match self.parse_identifier() {
5969 Ok(ident) => Owner::Ident(ident),
5970 Err(e) => {
5971 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
5972 }
5973 }
5974 }
5975 };
5976 Ok(owner)
5977 }
5978
5979 fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
5981 let name = self.parse_object_name(false)?;
5982 self.expect_keyword_is(Keyword::AS)?;
5983 let data_type = self.parse_data_type()?;
5984 let collation = if self.parse_keyword(Keyword::COLLATE) {
5985 Some(self.parse_identifier()?)
5986 } else {
5987 None
5988 };
5989 let default = if self.parse_keyword(Keyword::DEFAULT) {
5990 Some(self.parse_expr()?)
5991 } else {
5992 None
5993 };
5994 let mut constraints = Vec::new();
5995 while let Some(constraint) = self.parse_optional_table_constraint()? {
5996 constraints.push(constraint);
5997 }
5998
5999 Ok(Statement::CreateDomain(CreateDomain {
6000 name,
6001 data_type,
6002 collation,
6003 default,
6004 constraints,
6005 }))
6006 }
6007
6008 pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6018 let name = self.parse_identifier()?;
6019 self.expect_keyword_is(Keyword::ON)?;
6020 let table_name = self.parse_object_name(false)?;
6021
6022 let policy_type = if self.parse_keyword(Keyword::AS) {
6023 let keyword =
6024 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6025 Some(match keyword {
6026 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6027 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6028 _ => unreachable!(),
6029 })
6030 } else {
6031 None
6032 };
6033
6034 let command = if self.parse_keyword(Keyword::FOR) {
6035 let keyword = self.expect_one_of_keywords(&[
6036 Keyword::ALL,
6037 Keyword::SELECT,
6038 Keyword::INSERT,
6039 Keyword::UPDATE,
6040 Keyword::DELETE,
6041 ])?;
6042 Some(match keyword {
6043 Keyword::ALL => CreatePolicyCommand::All,
6044 Keyword::SELECT => CreatePolicyCommand::Select,
6045 Keyword::INSERT => CreatePolicyCommand::Insert,
6046 Keyword::UPDATE => CreatePolicyCommand::Update,
6047 Keyword::DELETE => CreatePolicyCommand::Delete,
6048 _ => unreachable!(),
6049 })
6050 } else {
6051 None
6052 };
6053
6054 let to = if self.parse_keyword(Keyword::TO) {
6055 Some(self.parse_comma_separated(|p| p.parse_owner())?)
6056 } else {
6057 None
6058 };
6059
6060 let using = if self.parse_keyword(Keyword::USING) {
6061 self.expect_token(&Token::LParen)?;
6062 let expr = self.parse_expr()?;
6063 self.expect_token(&Token::RParen)?;
6064 Some(expr)
6065 } else {
6066 None
6067 };
6068
6069 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6070 self.expect_token(&Token::LParen)?;
6071 let expr = self.parse_expr()?;
6072 self.expect_token(&Token::RParen)?;
6073 Some(expr)
6074 } else {
6075 None
6076 };
6077
6078 Ok(CreatePolicy {
6079 name,
6080 table_name,
6081 policy_type,
6082 command,
6083 to,
6084 using,
6085 with_check,
6086 })
6087 }
6088
6089 pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6099 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6100 let name = self.parse_identifier()?;
6101
6102 let connector_type = if self.parse_keyword(Keyword::TYPE) {
6103 Some(self.parse_literal_string()?)
6104 } else {
6105 None
6106 };
6107
6108 let url = if self.parse_keyword(Keyword::URL) {
6109 Some(self.parse_literal_string()?)
6110 } else {
6111 None
6112 };
6113
6114 let comment = self.parse_optional_inline_comment()?;
6115
6116 let with_dcproperties =
6117 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6118 properties if !properties.is_empty() => Some(properties),
6119 _ => None,
6120 };
6121
6122 Ok(Statement::CreateConnector(CreateConnector {
6123 name,
6124 if_not_exists,
6125 connector_type,
6126 url,
6127 comment,
6128 with_dcproperties,
6129 }))
6130 }
6131
6132 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6133 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6135 && self.parse_keyword(Keyword::TEMPORARY);
6136 let persistent = dialect_of!(self is DuckDbDialect)
6137 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6138
6139 let object_type = if self.parse_keyword(Keyword::TABLE) {
6140 ObjectType::Table
6141 } else if self.parse_keyword(Keyword::VIEW) {
6142 ObjectType::View
6143 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6144 ObjectType::MaterializedView
6145 } else if self.parse_keyword(Keyword::INDEX) {
6146 ObjectType::Index
6147 } else if self.parse_keyword(Keyword::ROLE) {
6148 ObjectType::Role
6149 } else if self.parse_keyword(Keyword::SCHEMA) {
6150 ObjectType::Schema
6151 } else if self.parse_keyword(Keyword::DATABASE) {
6152 ObjectType::Database
6153 } else if self.parse_keyword(Keyword::SEQUENCE) {
6154 ObjectType::Sequence
6155 } else if self.parse_keyword(Keyword::STAGE) {
6156 ObjectType::Stage
6157 } else if self.parse_keyword(Keyword::TYPE) {
6158 ObjectType::Type
6159 } else if self.parse_keyword(Keyword::FUNCTION) {
6160 return self.parse_drop_function();
6161 } else if self.parse_keyword(Keyword::POLICY) {
6162 return self.parse_drop_policy();
6163 } else if self.parse_keyword(Keyword::CONNECTOR) {
6164 return self.parse_drop_connector();
6165 } else if self.parse_keyword(Keyword::DOMAIN) {
6166 return self.parse_drop_domain();
6167 } else if self.parse_keyword(Keyword::PROCEDURE) {
6168 return self.parse_drop_procedure();
6169 } else if self.parse_keyword(Keyword::SECRET) {
6170 return self.parse_drop_secret(temporary, persistent);
6171 } else if self.parse_keyword(Keyword::TRIGGER) {
6172 return self.parse_drop_trigger();
6173 } else if self.parse_keyword(Keyword::EXTENSION) {
6174 return self.parse_drop_extension();
6175 } else {
6176 return self.expected(
6177 "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, or MATERIALIZED VIEW after DROP",
6178 self.peek_token(),
6179 );
6180 };
6181 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6184 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6185
6186 let loc = self.peek_token().span.start;
6187 let cascade = self.parse_keyword(Keyword::CASCADE);
6188 let restrict = self.parse_keyword(Keyword::RESTRICT);
6189 let purge = self.parse_keyword(Keyword::PURGE);
6190 if cascade && restrict {
6191 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6192 }
6193 if object_type == ObjectType::Role && (cascade || restrict || purge) {
6194 return parser_err!(
6195 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6196 loc
6197 );
6198 }
6199 Ok(Statement::Drop {
6200 object_type,
6201 if_exists,
6202 names,
6203 cascade,
6204 restrict,
6205 purge,
6206 temporary,
6207 })
6208 }
6209
6210 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6211 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6212 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6213 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6214 _ => None,
6215 }
6216 }
6217
6218 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6223 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6224 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6225 let drop_behavior = self.parse_optional_drop_behavior();
6226 Ok(Statement::DropFunction {
6227 if_exists,
6228 func_desc,
6229 drop_behavior,
6230 })
6231 }
6232
6233 fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6239 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6240 let name = self.parse_identifier()?;
6241 self.expect_keyword_is(Keyword::ON)?;
6242 let table_name = self.parse_object_name(false)?;
6243 let drop_behavior = self.parse_optional_drop_behavior();
6244 Ok(Statement::DropPolicy {
6245 if_exists,
6246 name,
6247 table_name,
6248 drop_behavior,
6249 })
6250 }
6251 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6257 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6258 let name = self.parse_identifier()?;
6259 Ok(Statement::DropConnector { if_exists, name })
6260 }
6261
6262 fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6266 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6267 let name = self.parse_object_name(false)?;
6268 let drop_behavior = self.parse_optional_drop_behavior();
6269 Ok(Statement::DropDomain(DropDomain {
6270 if_exists,
6271 name,
6272 drop_behavior,
6273 }))
6274 }
6275
6276 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6281 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6282 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6283 let drop_behavior = self.parse_optional_drop_behavior();
6284 Ok(Statement::DropProcedure {
6285 if_exists,
6286 proc_desc,
6287 drop_behavior,
6288 })
6289 }
6290
6291 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6292 let name = self.parse_object_name(false)?;
6293
6294 let args = if self.consume_token(&Token::LParen) {
6295 if self.consume_token(&Token::RParen) {
6296 None
6297 } else {
6298 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6299 self.expect_token(&Token::RParen)?;
6300 Some(args)
6301 }
6302 } else {
6303 None
6304 };
6305
6306 Ok(FunctionDesc { name, args })
6307 }
6308
6309 fn parse_drop_secret(
6311 &mut self,
6312 temporary: bool,
6313 persistent: bool,
6314 ) -> Result<Statement, ParserError> {
6315 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6316 let name = self.parse_identifier()?;
6317 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
6318 self.parse_identifier().ok()
6319 } else {
6320 None
6321 };
6322 let temp = match (temporary, persistent) {
6323 (true, false) => Some(true),
6324 (false, true) => Some(false),
6325 (false, false) => None,
6326 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
6327 };
6328
6329 Ok(Statement::DropSecret {
6330 if_exists,
6331 temporary: temp,
6332 name,
6333 storage_specifier,
6334 })
6335 }
6336
6337 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
6347 if dialect_of!(self is BigQueryDialect) {
6348 return self.parse_big_query_declare();
6349 }
6350 if dialect_of!(self is SnowflakeDialect) {
6351 return self.parse_snowflake_declare();
6352 }
6353 if dialect_of!(self is MsSqlDialect) {
6354 return self.parse_mssql_declare();
6355 }
6356
6357 let name = self.parse_identifier()?;
6358
6359 let binary = Some(self.parse_keyword(Keyword::BINARY));
6360 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
6361 Some(true)
6362 } else if self.parse_keyword(Keyword::ASENSITIVE) {
6363 Some(false)
6364 } else {
6365 None
6366 };
6367 let scroll = if self.parse_keyword(Keyword::SCROLL) {
6368 Some(true)
6369 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
6370 Some(false)
6371 } else {
6372 None
6373 };
6374
6375 self.expect_keyword_is(Keyword::CURSOR)?;
6376 let declare_type = Some(DeclareType::Cursor);
6377
6378 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
6379 Some(keyword) => {
6380 self.expect_keyword_is(Keyword::HOLD)?;
6381
6382 match keyword {
6383 Keyword::WITH => Some(true),
6384 Keyword::WITHOUT => Some(false),
6385 _ => unreachable!(),
6386 }
6387 }
6388 None => None,
6389 };
6390
6391 self.expect_keyword_is(Keyword::FOR)?;
6392
6393 let query = Some(self.parse_query()?);
6394
6395 Ok(Statement::Declare {
6396 stmts: vec![Declare {
6397 names: vec![name],
6398 data_type: None,
6399 assignment: None,
6400 declare_type,
6401 binary,
6402 sensitive,
6403 scroll,
6404 hold,
6405 for_query: query,
6406 }],
6407 })
6408 }
6409
6410 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
6418 let names = self.parse_comma_separated(Parser::parse_identifier)?;
6419
6420 let data_type = match self.peek_token().token {
6421 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
6422 _ => Some(self.parse_data_type()?),
6423 };
6424
6425 let expr = if data_type.is_some() {
6426 if self.parse_keyword(Keyword::DEFAULT) {
6427 Some(self.parse_expr()?)
6428 } else {
6429 None
6430 }
6431 } else {
6432 self.expect_keyword_is(Keyword::DEFAULT)?;
6435 Some(self.parse_expr()?)
6436 };
6437
6438 Ok(Statement::Declare {
6439 stmts: vec![Declare {
6440 names,
6441 data_type,
6442 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
6443 declare_type: None,
6444 binary: None,
6445 sensitive: None,
6446 scroll: None,
6447 hold: None,
6448 for_query: None,
6449 }],
6450 })
6451 }
6452
6453 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
6478 let mut stmts = vec![];
6479 loop {
6480 let name = self.parse_identifier()?;
6481 let (declare_type, for_query, assigned_expr, data_type) =
6482 if self.parse_keyword(Keyword::CURSOR) {
6483 self.expect_keyword_is(Keyword::FOR)?;
6484 match self.peek_token().token {
6485 Token::Word(w) if w.keyword == Keyword::SELECT => (
6486 Some(DeclareType::Cursor),
6487 Some(self.parse_query()?),
6488 None,
6489 None,
6490 ),
6491 _ => (
6492 Some(DeclareType::Cursor),
6493 None,
6494 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
6495 None,
6496 ),
6497 }
6498 } else if self.parse_keyword(Keyword::RESULTSET) {
6499 let assigned_expr = if self.peek_token().token != Token::SemiColon {
6500 self.parse_snowflake_variable_declaration_expression()?
6501 } else {
6502 None
6504 };
6505
6506 (Some(DeclareType::ResultSet), None, assigned_expr, None)
6507 } else if self.parse_keyword(Keyword::EXCEPTION) {
6508 let assigned_expr = if self.peek_token().token == Token::LParen {
6509 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
6510 } else {
6511 None
6513 };
6514
6515 (Some(DeclareType::Exception), None, assigned_expr, None)
6516 } else {
6517 let (assigned_expr, data_type) = if let Some(assigned_expr) =
6519 self.parse_snowflake_variable_declaration_expression()?
6520 {
6521 (Some(assigned_expr), None)
6522 } else if let Token::Word(_) = self.peek_token().token {
6523 let data_type = self.parse_data_type()?;
6524 (
6525 self.parse_snowflake_variable_declaration_expression()?,
6526 Some(data_type),
6527 )
6528 } else {
6529 (None, None)
6530 };
6531 (None, None, assigned_expr, data_type)
6532 };
6533 let stmt = Declare {
6534 names: vec![name],
6535 data_type,
6536 assignment: assigned_expr,
6537 declare_type,
6538 binary: None,
6539 sensitive: None,
6540 scroll: None,
6541 hold: None,
6542 for_query,
6543 };
6544
6545 stmts.push(stmt);
6546 if self.consume_token(&Token::SemiColon) {
6547 match self.peek_token().token {
6548 Token::Word(w)
6549 if ALL_KEYWORDS
6550 .binary_search(&w.value.to_uppercase().as_str())
6551 .is_err() =>
6552 {
6553 continue;
6555 }
6556 _ => {
6557 self.prev_token();
6559 }
6560 }
6561 }
6562
6563 break;
6564 }
6565
6566 Ok(Statement::Declare { stmts })
6567 }
6568
6569 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
6581 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
6582
6583 Ok(Statement::Declare { stmts })
6584 }
6585
6586 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
6597 let name = {
6598 let ident = self.parse_identifier()?;
6599 if !ident.value.starts_with('@')
6600 && !matches!(
6601 self.peek_token().token,
6602 Token::Word(w) if w.keyword == Keyword::CURSOR
6603 )
6604 {
6605 Err(ParserError::TokenizerError(
6606 "Invalid MsSql variable declaration.".to_string(),
6607 ))
6608 } else {
6609 Ok(ident)
6610 }
6611 }?;
6612
6613 let (declare_type, data_type) = match self.peek_token().token {
6614 Token::Word(w) => match w.keyword {
6615 Keyword::CURSOR => {
6616 self.next_token();
6617 (Some(DeclareType::Cursor), None)
6618 }
6619 Keyword::AS => {
6620 self.next_token();
6621 (None, Some(self.parse_data_type()?))
6622 }
6623 _ => (None, Some(self.parse_data_type()?)),
6624 },
6625 _ => (None, Some(self.parse_data_type()?)),
6626 };
6627
6628 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
6629 self.next_token();
6630 let query = Some(self.parse_query()?);
6631 (query, None)
6632 } else {
6633 let assignment = self.parse_mssql_variable_declaration_expression()?;
6634 (None, assignment)
6635 };
6636
6637 Ok(Declare {
6638 names: vec![name],
6639 data_type,
6640 assignment,
6641 declare_type,
6642 binary: None,
6643 sensitive: None,
6644 scroll: None,
6645 hold: None,
6646 for_query,
6647 })
6648 }
6649
6650 pub fn parse_snowflake_variable_declaration_expression(
6658 &mut self,
6659 ) -> Result<Option<DeclareAssignment>, ParserError> {
6660 Ok(match self.peek_token().token {
6661 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
6662 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
6664 }
6665 Token::Assignment => {
6666 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
6668 self.parse_expr()?,
6669 )))
6670 }
6671 _ => None,
6672 })
6673 }
6674
6675 pub fn parse_mssql_variable_declaration_expression(
6682 &mut self,
6683 ) -> Result<Option<DeclareAssignment>, ParserError> {
6684 Ok(match self.peek_token().token {
6685 Token::Eq => {
6686 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
6688 self.parse_expr()?,
6689 )))
6690 }
6691 _ => None,
6692 })
6693 }
6694
6695 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
6697 let direction = if self.parse_keyword(Keyword::NEXT) {
6698 FetchDirection::Next
6699 } else if self.parse_keyword(Keyword::PRIOR) {
6700 FetchDirection::Prior
6701 } else if self.parse_keyword(Keyword::FIRST) {
6702 FetchDirection::First
6703 } else if self.parse_keyword(Keyword::LAST) {
6704 FetchDirection::Last
6705 } else if self.parse_keyword(Keyword::ABSOLUTE) {
6706 FetchDirection::Absolute {
6707 limit: self.parse_number_value()?.value,
6708 }
6709 } else if self.parse_keyword(Keyword::RELATIVE) {
6710 FetchDirection::Relative {
6711 limit: self.parse_number_value()?.value,
6712 }
6713 } else if self.parse_keyword(Keyword::FORWARD) {
6714 if self.parse_keyword(Keyword::ALL) {
6715 FetchDirection::ForwardAll
6716 } else {
6717 FetchDirection::Forward {
6718 limit: Some(self.parse_number_value()?.value),
6720 }
6721 }
6722 } else if self.parse_keyword(Keyword::BACKWARD) {
6723 if self.parse_keyword(Keyword::ALL) {
6724 FetchDirection::BackwardAll
6725 } else {
6726 FetchDirection::Backward {
6727 limit: Some(self.parse_number_value()?.value),
6729 }
6730 }
6731 } else if self.parse_keyword(Keyword::ALL) {
6732 FetchDirection::All
6733 } else {
6734 FetchDirection::Count {
6735 limit: self.parse_number_value()?.value,
6736 }
6737 };
6738
6739 let position = if self.peek_keyword(Keyword::FROM) {
6740 self.expect_keyword(Keyword::FROM)?;
6741 FetchPosition::From
6742 } else if self.peek_keyword(Keyword::IN) {
6743 self.expect_keyword(Keyword::IN)?;
6744 FetchPosition::In
6745 } else {
6746 return parser_err!("Expected FROM or IN", self.peek_token().span.start);
6747 };
6748
6749 let name = self.parse_identifier()?;
6750
6751 let into = if self.parse_keyword(Keyword::INTO) {
6752 Some(self.parse_object_name(false)?)
6753 } else {
6754 None
6755 };
6756
6757 Ok(Statement::Fetch {
6758 name,
6759 direction,
6760 position,
6761 into,
6762 })
6763 }
6764
6765 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
6766 let object_type = if self.parse_keyword(Keyword::ALL) {
6767 DiscardObject::ALL
6768 } else if self.parse_keyword(Keyword::PLANS) {
6769 DiscardObject::PLANS
6770 } else if self.parse_keyword(Keyword::SEQUENCES) {
6771 DiscardObject::SEQUENCES
6772 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
6773 DiscardObject::TEMP
6774 } else {
6775 return self.expected(
6776 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
6777 self.peek_token(),
6778 );
6779 };
6780 Ok(Statement::Discard { object_type })
6781 }
6782
6783 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
6784 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
6785 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6786 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
6787 let index_name = self.parse_object_name(false)?;
6788 self.expect_keyword_is(Keyword::ON)?;
6789 Some(index_name)
6790 } else {
6791 None
6792 };
6793 let table_name = self.parse_object_name(false)?;
6794 let using = if self.parse_keyword(Keyword::USING) {
6795 Some(self.parse_index_type()?)
6796 } else {
6797 None
6798 };
6799
6800 self.expect_token(&Token::LParen)?;
6801 let columns = self.parse_comma_separated(Parser::parse_create_index_expr)?;
6802 self.expect_token(&Token::RParen)?;
6803
6804 let include = if self.parse_keyword(Keyword::INCLUDE) {
6805 self.expect_token(&Token::LParen)?;
6806 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
6807 self.expect_token(&Token::RParen)?;
6808 columns
6809 } else {
6810 vec![]
6811 };
6812
6813 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
6814 let not = self.parse_keyword(Keyword::NOT);
6815 self.expect_keyword_is(Keyword::DISTINCT)?;
6816 Some(!not)
6817 } else {
6818 None
6819 };
6820
6821 let with = if self.dialect.supports_create_index_with_clause()
6822 && self.parse_keyword(Keyword::WITH)
6823 {
6824 self.expect_token(&Token::LParen)?;
6825 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
6826 self.expect_token(&Token::RParen)?;
6827 with_params
6828 } else {
6829 Vec::new()
6830 };
6831
6832 let predicate = if self.parse_keyword(Keyword::WHERE) {
6833 Some(self.parse_expr()?)
6834 } else {
6835 None
6836 };
6837
6838 Ok(Statement::CreateIndex(CreateIndex {
6839 name: index_name,
6840 table_name,
6841 using,
6842 columns,
6843 unique,
6844 concurrently,
6845 if_not_exists,
6846 include,
6847 nulls_distinct,
6848 with,
6849 predicate,
6850 }))
6851 }
6852
6853 pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
6854 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6855 let name = self.parse_identifier()?;
6856
6857 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
6858 let schema = if self.parse_keyword(Keyword::SCHEMA) {
6859 Some(self.parse_identifier()?)
6860 } else {
6861 None
6862 };
6863
6864 let version = if self.parse_keyword(Keyword::VERSION) {
6865 Some(self.parse_identifier()?)
6866 } else {
6867 None
6868 };
6869
6870 let cascade = self.parse_keyword(Keyword::CASCADE);
6871
6872 (schema, version, cascade)
6873 } else {
6874 (None, None, false)
6875 };
6876
6877 Ok(Statement::CreateExtension {
6878 name,
6879 if_not_exists,
6880 schema,
6881 version,
6882 cascade,
6883 })
6884 }
6885
6886 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
6888 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6889 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
6890 let cascade_or_restrict =
6891 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
6892 Ok(Statement::DropExtension {
6893 names,
6894 if_exists,
6895 cascade_or_restrict: cascade_or_restrict
6896 .map(|k| match k {
6897 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
6898 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
6899 _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
6900 })
6901 .transpose()?,
6902 })
6903 }
6904
6905 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
6907 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
6908 self.expect_token(&Token::LParen)?;
6909 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
6910 self.expect_token(&Token::RParen)?;
6911 Ok(HiveDistributionStyle::PARTITIONED { columns })
6912 } else {
6913 Ok(HiveDistributionStyle::NONE)
6914 }
6915 }
6916
6917 pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
6918 let mut hive_format = HiveFormat::default();
6919 loop {
6920 match self.parse_one_of_keywords(&[
6921 Keyword::ROW,
6922 Keyword::STORED,
6923 Keyword::LOCATION,
6924 Keyword::WITH,
6925 ]) {
6926 Some(Keyword::ROW) => {
6927 hive_format.row_format = Some(self.parse_row_format()?);
6928 }
6929 Some(Keyword::STORED) => {
6930 self.expect_keyword_is(Keyword::AS)?;
6931 if self.parse_keyword(Keyword::INPUTFORMAT) {
6932 let input_format = self.parse_expr()?;
6933 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
6934 let output_format = self.parse_expr()?;
6935 hive_format.storage = Some(HiveIOFormat::IOF {
6936 input_format,
6937 output_format,
6938 });
6939 } else {
6940 let format = self.parse_file_format()?;
6941 hive_format.storage = Some(HiveIOFormat::FileFormat { format });
6942 }
6943 }
6944 Some(Keyword::LOCATION) => {
6945 hive_format.location = Some(self.parse_literal_string()?);
6946 }
6947 Some(Keyword::WITH) => {
6948 self.prev_token();
6949 let properties = self
6950 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
6951 if !properties.is_empty() {
6952 hive_format.serde_properties = Some(properties);
6953 } else {
6954 break;
6955 }
6956 }
6957 None => break,
6958 _ => break,
6959 }
6960 }
6961
6962 Ok(hive_format)
6963 }
6964
6965 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
6966 self.expect_keyword_is(Keyword::FORMAT)?;
6967 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
6968 Some(Keyword::SERDE) => {
6969 let class = self.parse_literal_string()?;
6970 Ok(HiveRowFormat::SERDE { class })
6971 }
6972 _ => {
6973 let mut row_delimiters = vec![];
6974
6975 loop {
6976 match self.parse_one_of_keywords(&[
6977 Keyword::FIELDS,
6978 Keyword::COLLECTION,
6979 Keyword::MAP,
6980 Keyword::LINES,
6981 Keyword::NULL,
6982 ]) {
6983 Some(Keyword::FIELDS) => {
6984 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
6985 row_delimiters.push(HiveRowDelimiter {
6986 delimiter: HiveDelimiter::FieldsTerminatedBy,
6987 char: self.parse_identifier()?,
6988 });
6989
6990 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
6991 row_delimiters.push(HiveRowDelimiter {
6992 delimiter: HiveDelimiter::FieldsEscapedBy,
6993 char: self.parse_identifier()?,
6994 });
6995 }
6996 } else {
6997 break;
6998 }
6999 }
7000 Some(Keyword::COLLECTION) => {
7001 if self.parse_keywords(&[
7002 Keyword::ITEMS,
7003 Keyword::TERMINATED,
7004 Keyword::BY,
7005 ]) {
7006 row_delimiters.push(HiveRowDelimiter {
7007 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7008 char: self.parse_identifier()?,
7009 });
7010 } else {
7011 break;
7012 }
7013 }
7014 Some(Keyword::MAP) => {
7015 if self.parse_keywords(&[
7016 Keyword::KEYS,
7017 Keyword::TERMINATED,
7018 Keyword::BY,
7019 ]) {
7020 row_delimiters.push(HiveRowDelimiter {
7021 delimiter: HiveDelimiter::MapKeysTerminatedBy,
7022 char: self.parse_identifier()?,
7023 });
7024 } else {
7025 break;
7026 }
7027 }
7028 Some(Keyword::LINES) => {
7029 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7030 row_delimiters.push(HiveRowDelimiter {
7031 delimiter: HiveDelimiter::LinesTerminatedBy,
7032 char: self.parse_identifier()?,
7033 });
7034 } else {
7035 break;
7036 }
7037 }
7038 Some(Keyword::NULL) => {
7039 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7040 row_delimiters.push(HiveRowDelimiter {
7041 delimiter: HiveDelimiter::NullDefinedAs,
7042 char: self.parse_identifier()?,
7043 });
7044 } else {
7045 break;
7046 }
7047 }
7048 _ => {
7049 break;
7050 }
7051 }
7052 }
7053
7054 Ok(HiveRowFormat::DELIMITED {
7055 delimiters: row_delimiters,
7056 })
7057 }
7058 }
7059 }
7060
7061 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7062 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7063 Ok(Some(self.parse_identifier()?))
7064 } else {
7065 Ok(None)
7066 }
7067 }
7068
7069 pub fn parse_create_table(
7070 &mut self,
7071 or_replace: bool,
7072 temporary: bool,
7073 global: Option<bool>,
7074 transient: bool,
7075 ) -> Result<Statement, ParserError> {
7076 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7077 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7078 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7079
7080 let on_cluster = self.parse_optional_on_cluster()?;
7082
7083 let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
7084 self.parse_object_name(allow_unquoted_hyphen).ok()
7085 } else {
7086 None
7087 };
7088
7089 let clone = if self.parse_keyword(Keyword::CLONE) {
7090 self.parse_object_name(allow_unquoted_hyphen).ok()
7091 } else {
7092 None
7093 };
7094
7095 let (columns, constraints) = self.parse_columns()?;
7097 let comment_after_column_def =
7098 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7099 let next_token = self.next_token();
7100 match next_token.token {
7101 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7102 _ => self.expected("comment", next_token)?,
7103 }
7104 } else {
7105 None
7106 };
7107
7108 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7110
7111 let hive_distribution = self.parse_hive_distribution()?;
7112 let clustered_by = self.parse_optional_clustered_by()?;
7113 let hive_formats = self.parse_hive_formats()?;
7114
7115 let create_table_config = self.parse_optional_create_table_config()?;
7116
7117 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7120 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7121 {
7122 Some(Box::new(self.parse_expr()?))
7123 } else {
7124 None
7125 };
7126
7127 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7128 if self.consume_token(&Token::LParen) {
7129 let columns = if self.peek_token() != Token::RParen {
7130 self.parse_comma_separated(|p| p.parse_expr())?
7131 } else {
7132 vec![]
7133 };
7134 self.expect_token(&Token::RParen)?;
7135 Some(OneOrManyWithParens::Many(columns))
7136 } else {
7137 Some(OneOrManyWithParens::One(self.parse_expr()?))
7138 }
7139 } else {
7140 None
7141 };
7142
7143 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7144 Some(self.parse_create_table_on_commit()?)
7145 } else {
7146 None
7147 };
7148
7149 let strict = self.parse_keyword(Keyword::STRICT);
7150
7151 let query = if self.parse_keyword(Keyword::AS) {
7153 Some(self.parse_query()?)
7154 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7155 {
7156 self.prev_token();
7158 Some(self.parse_query()?)
7159 } else {
7160 None
7161 };
7162
7163 Ok(CreateTableBuilder::new(table_name)
7164 .temporary(temporary)
7165 .columns(columns)
7166 .constraints(constraints)
7167 .or_replace(or_replace)
7168 .if_not_exists(if_not_exists)
7169 .transient(transient)
7170 .hive_distribution(hive_distribution)
7171 .hive_formats(Some(hive_formats))
7172 .global(global)
7173 .query(query)
7174 .without_rowid(without_rowid)
7175 .like(like)
7176 .clone_clause(clone)
7177 .comment_after_column_def(comment_after_column_def)
7178 .order_by(order_by)
7179 .on_commit(on_commit)
7180 .on_cluster(on_cluster)
7181 .clustered_by(clustered_by)
7182 .partition_by(create_table_config.partition_by)
7183 .cluster_by(create_table_config.cluster_by)
7184 .inherits(create_table_config.inherits)
7185 .table_options(create_table_config.table_options)
7186 .primary_key(primary_key)
7187 .strict(strict)
7188 .build())
7189 }
7190
7191 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
7192 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
7193 Ok(OnCommit::DeleteRows)
7194 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
7195 Ok(OnCommit::PreserveRows)
7196 } else if self.parse_keywords(&[Keyword::DROP]) {
7197 Ok(OnCommit::Drop)
7198 } else {
7199 parser_err!(
7200 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
7201 self.peek_token()
7202 )
7203 }
7204 }
7205
7206 fn parse_optional_create_table_config(
7212 &mut self,
7213 ) -> Result<CreateTableConfiguration, ParserError> {
7214 let mut table_options = CreateTableOptions::None;
7215
7216 let inherits = if self.parse_keyword(Keyword::INHERITS) {
7217 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
7218 } else {
7219 None
7220 };
7221
7222 let with_options = self.parse_options(Keyword::WITH)?;
7224 if !with_options.is_empty() {
7225 table_options = CreateTableOptions::With(with_options)
7226 }
7227
7228 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
7229 if !table_properties.is_empty() {
7230 table_options = CreateTableOptions::TableProperties(table_properties);
7231 }
7232 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
7233 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
7234 {
7235 Some(Box::new(self.parse_expr()?))
7236 } else {
7237 None
7238 };
7239
7240 let mut cluster_by = None;
7241 if dialect_of!(self is BigQueryDialect | GenericDialect) {
7242 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
7243 cluster_by = Some(WrappedCollection::NoWrapping(
7244 self.parse_comma_separated(|p| p.parse_identifier())?,
7245 ));
7246 };
7247
7248 if let Token::Word(word) = self.peek_token().token {
7249 if word.keyword == Keyword::OPTIONS {
7250 table_options =
7251 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
7252 }
7253 };
7254 }
7255
7256 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
7257 let plain_options = self.parse_plain_options()?;
7258 if !plain_options.is_empty() {
7259 table_options = CreateTableOptions::Plain(plain_options)
7260 }
7261 };
7262
7263 Ok(CreateTableConfiguration {
7264 partition_by,
7265 cluster_by,
7266 inherits,
7267 table_options,
7268 })
7269 }
7270
7271 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
7272 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
7275 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
7276 }
7277
7278 if self.parse_keywords(&[Keyword::COMMENT]) {
7281 let has_eq = self.consume_token(&Token::Eq);
7282 let value = self.next_token();
7283
7284 let comment = match (has_eq, value.token) {
7285 (true, Token::SingleQuotedString(s)) => {
7286 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
7287 }
7288 (false, Token::SingleQuotedString(s)) => {
7289 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
7290 }
7291 (_, token) => {
7292 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
7293 }
7294 };
7295 return comment;
7296 }
7297
7298 if self.parse_keywords(&[Keyword::ENGINE]) {
7301 let _ = self.consume_token(&Token::Eq);
7302 let value = self.next_token();
7303
7304 let engine = match value.token {
7305 Token::Word(w) => {
7306 let parameters = if self.peek_token() == Token::LParen {
7307 self.parse_parenthesized_identifiers()?
7308 } else {
7309 vec![]
7310 };
7311
7312 Ok(Some(SqlOption::NamedParenthesizedList(
7313 NamedParenthesizedList {
7314 key: Ident::new("ENGINE"),
7315 name: Some(Ident::new(w.value)),
7316 values: parameters,
7317 },
7318 )))
7319 }
7320 _ => {
7321 return self.expected("Token::Word", value)?;
7322 }
7323 };
7324
7325 return engine;
7326 }
7327
7328 if self.parse_keywords(&[Keyword::TABLESPACE]) {
7330 let _ = self.consume_token(&Token::Eq);
7331 let value = self.next_token();
7332
7333 let tablespace = match value.token {
7334 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
7335 let storage = match self.parse_keyword(Keyword::STORAGE) {
7336 true => {
7337 let _ = self.consume_token(&Token::Eq);
7338 let storage_token = self.next_token();
7339 match &storage_token.token {
7340 Token::Word(w) => match w.value.to_uppercase().as_str() {
7341 "DISK" => Some(StorageType::Disk),
7342 "MEMORY" => Some(StorageType::Memory),
7343 _ => self
7344 .expected("Storage type (DISK or MEMORY)", storage_token)?,
7345 },
7346 _ => self.expected("Token::Word", storage_token)?,
7347 }
7348 }
7349 false => None,
7350 };
7351
7352 Ok(Some(SqlOption::TableSpace(TablespaceOption {
7353 name,
7354 storage,
7355 })))
7356 }
7357 _ => {
7358 return self.expected("Token::Word", value)?;
7359 }
7360 };
7361
7362 return tablespace;
7363 }
7364
7365 if self.parse_keyword(Keyword::UNION) {
7367 let _ = self.consume_token(&Token::Eq);
7368 let value = self.next_token();
7369
7370 match value.token {
7371 Token::LParen => {
7372 let tables: Vec<Ident> =
7373 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
7374 self.expect_token(&Token::RParen)?;
7375
7376 return Ok(Some(SqlOption::NamedParenthesizedList(
7377 NamedParenthesizedList {
7378 key: Ident::new("UNION"),
7379 name: None,
7380 values: tables,
7381 },
7382 )));
7383 }
7384 _ => {
7385 return self.expected("Token::LParen", value)?;
7386 }
7387 }
7388 }
7389
7390 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
7392 Ident::new("DEFAULT CHARSET")
7393 } else if self.parse_keyword(Keyword::CHARSET) {
7394 Ident::new("CHARSET")
7395 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
7396 Ident::new("DEFAULT CHARACTER SET")
7397 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7398 Ident::new("CHARACTER SET")
7399 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
7400 Ident::new("DEFAULT COLLATE")
7401 } else if self.parse_keyword(Keyword::COLLATE) {
7402 Ident::new("COLLATE")
7403 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
7404 Ident::new("DATA DIRECTORY")
7405 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
7406 Ident::new("INDEX DIRECTORY")
7407 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
7408 Ident::new("KEY_BLOCK_SIZE")
7409 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
7410 Ident::new("ROW_FORMAT")
7411 } else if self.parse_keyword(Keyword::PACK_KEYS) {
7412 Ident::new("PACK_KEYS")
7413 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
7414 Ident::new("STATS_AUTO_RECALC")
7415 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
7416 Ident::new("STATS_PERSISTENT")
7417 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
7418 Ident::new("STATS_SAMPLE_PAGES")
7419 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
7420 Ident::new("DELAY_KEY_WRITE")
7421 } else if self.parse_keyword(Keyword::COMPRESSION) {
7422 Ident::new("COMPRESSION")
7423 } else if self.parse_keyword(Keyword::ENCRYPTION) {
7424 Ident::new("ENCRYPTION")
7425 } else if self.parse_keyword(Keyword::MAX_ROWS) {
7426 Ident::new("MAX_ROWS")
7427 } else if self.parse_keyword(Keyword::MIN_ROWS) {
7428 Ident::new("MIN_ROWS")
7429 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
7430 Ident::new("AUTOEXTEND_SIZE")
7431 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
7432 Ident::new("AVG_ROW_LENGTH")
7433 } else if self.parse_keyword(Keyword::CHECKSUM) {
7434 Ident::new("CHECKSUM")
7435 } else if self.parse_keyword(Keyword::CONNECTION) {
7436 Ident::new("CONNECTION")
7437 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
7438 Ident::new("ENGINE_ATTRIBUTE")
7439 } else if self.parse_keyword(Keyword::PASSWORD) {
7440 Ident::new("PASSWORD")
7441 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
7442 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
7443 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
7444 Ident::new("INSERT_METHOD")
7445 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
7446 Ident::new("AUTO_INCREMENT")
7447 } else {
7448 return Ok(None);
7449 };
7450
7451 let _ = self.consume_token(&Token::Eq);
7452
7453 let value = match self
7454 .maybe_parse(|parser| parser.parse_value())?
7455 .map(Expr::Value)
7456 {
7457 Some(expr) => expr,
7458 None => Expr::Identifier(self.parse_identifier()?),
7459 };
7460
7461 Ok(Some(SqlOption::KeyValue { key, value }))
7462 }
7463
7464 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
7465 let mut options = Vec::new();
7466
7467 while let Some(option) = self.parse_plain_option()? {
7468 options.push(option);
7469 }
7470
7471 Ok(options)
7472 }
7473
7474 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
7475 let comment = if self.parse_keyword(Keyword::COMMENT) {
7476 let has_eq = self.consume_token(&Token::Eq);
7477 let comment = self.parse_comment_value()?;
7478 Some(if has_eq {
7479 CommentDef::WithEq(comment)
7480 } else {
7481 CommentDef::WithoutEq(comment)
7482 })
7483 } else {
7484 None
7485 };
7486 Ok(comment)
7487 }
7488
7489 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
7490 let next_token = self.next_token();
7491 let value = match next_token.token {
7492 Token::SingleQuotedString(str) => str,
7493 Token::DollarQuotedString(str) => str.value,
7494 _ => self.expected("string literal", next_token)?,
7495 };
7496 Ok(value)
7497 }
7498
7499 pub fn parse_optional_procedure_parameters(
7500 &mut self,
7501 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
7502 let mut params = vec![];
7503 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7504 return Ok(Some(params));
7505 }
7506 loop {
7507 if let Token::Word(_) = self.peek_token().token {
7508 params.push(self.parse_procedure_param()?)
7509 }
7510 let comma = self.consume_token(&Token::Comma);
7511 if self.consume_token(&Token::RParen) {
7512 break;
7514 } else if !comma {
7515 return self.expected("',' or ')' after parameter definition", self.peek_token());
7516 }
7517 }
7518 Ok(Some(params))
7519 }
7520
7521 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
7522 let mut columns = vec![];
7523 let mut constraints = vec![];
7524 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7525 return Ok((columns, constraints));
7526 }
7527
7528 loop {
7529 if let Some(constraint) = self.parse_optional_table_constraint()? {
7530 constraints.push(constraint);
7531 } else if let Token::Word(_) = self.peek_token().token {
7532 columns.push(self.parse_column_def()?);
7533 } else {
7534 return self.expected("column name or constraint definition", self.peek_token());
7535 }
7536
7537 let comma = self.consume_token(&Token::Comma);
7538 let rparen = self.peek_token().token == Token::RParen;
7539
7540 if !comma && !rparen {
7541 return self.expected("',' or ')' after column definition", self.peek_token());
7542 };
7543
7544 if rparen
7545 && (!comma
7546 || self.dialect.supports_column_definition_trailing_commas()
7547 || self.options.trailing_commas)
7548 {
7549 let _ = self.consume_token(&Token::RParen);
7550 break;
7551 }
7552 }
7553
7554 Ok((columns, constraints))
7555 }
7556
7557 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
7558 let name = self.parse_identifier()?;
7559 let data_type = self.parse_data_type()?;
7560 Ok(ProcedureParam { name, data_type })
7561 }
7562
7563 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
7564 let name = self.parse_identifier()?;
7565 let data_type = if self.is_column_type_sqlite_unspecified() {
7566 DataType::Unspecified
7567 } else {
7568 self.parse_data_type()?
7569 };
7570 let mut options = vec![];
7571 loop {
7572 if self.parse_keyword(Keyword::CONSTRAINT) {
7573 let name = Some(self.parse_identifier()?);
7574 if let Some(option) = self.parse_optional_column_option()? {
7575 options.push(ColumnOptionDef { name, option });
7576 } else {
7577 return self.expected(
7578 "constraint details after CONSTRAINT <name>",
7579 self.peek_token(),
7580 );
7581 }
7582 } else if let Some(option) = self.parse_optional_column_option()? {
7583 options.push(ColumnOptionDef { name: None, option });
7584 } else {
7585 break;
7586 };
7587 }
7588 Ok(ColumnDef {
7589 name,
7590 data_type,
7591 options,
7592 })
7593 }
7594
7595 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
7596 if dialect_of!(self is SQLiteDialect) {
7597 match self.peek_token().token {
7598 Token::Word(word) => matches!(
7599 word.keyword,
7600 Keyword::CONSTRAINT
7601 | Keyword::PRIMARY
7602 | Keyword::NOT
7603 | Keyword::UNIQUE
7604 | Keyword::CHECK
7605 | Keyword::DEFAULT
7606 | Keyword::COLLATE
7607 | Keyword::REFERENCES
7608 | Keyword::GENERATED
7609 | Keyword::AS
7610 ),
7611 _ => true, }
7613 } else {
7614 false
7615 }
7616 }
7617
7618 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7619 if let Some(option) = self.dialect.parse_column_option(self)? {
7620 return option;
7621 }
7622
7623 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7624 Ok(Some(ColumnOption::CharacterSet(
7625 self.parse_object_name(false)?,
7626 )))
7627 } else if self.parse_keywords(&[Keyword::COLLATE]) {
7628 Ok(Some(ColumnOption::Collation(
7629 self.parse_object_name(false)?,
7630 )))
7631 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
7632 Ok(Some(ColumnOption::NotNull))
7633 } else if self.parse_keywords(&[Keyword::COMMENT]) {
7634 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
7635 } else if self.parse_keyword(Keyword::NULL) {
7636 Ok(Some(ColumnOption::Null))
7637 } else if self.parse_keyword(Keyword::DEFAULT) {
7638 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
7639 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7640 && self.parse_keyword(Keyword::MATERIALIZED)
7641 {
7642 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
7643 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7644 && self.parse_keyword(Keyword::ALIAS)
7645 {
7646 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
7647 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7648 && self.parse_keyword(Keyword::EPHEMERAL)
7649 {
7650 if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
7653 Ok(Some(ColumnOption::Ephemeral(None)))
7654 } else {
7655 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
7656 }
7657 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
7658 let characteristics = self.parse_constraint_characteristics()?;
7659 Ok(Some(ColumnOption::Unique {
7660 is_primary: true,
7661 characteristics,
7662 }))
7663 } else if self.parse_keyword(Keyword::UNIQUE) {
7664 let characteristics = self.parse_constraint_characteristics()?;
7665 Ok(Some(ColumnOption::Unique {
7666 is_primary: false,
7667 characteristics,
7668 }))
7669 } else if self.parse_keyword(Keyword::REFERENCES) {
7670 let foreign_table = self.parse_object_name(false)?;
7671 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
7674 let mut on_delete = None;
7675 let mut on_update = None;
7676 loop {
7677 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
7678 on_delete = Some(self.parse_referential_action()?);
7679 } else if on_update.is_none()
7680 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7681 {
7682 on_update = Some(self.parse_referential_action()?);
7683 } else {
7684 break;
7685 }
7686 }
7687 let characteristics = self.parse_constraint_characteristics()?;
7688
7689 Ok(Some(ColumnOption::ForeignKey {
7690 foreign_table,
7691 referred_columns,
7692 on_delete,
7693 on_update,
7694 characteristics,
7695 }))
7696 } else if self.parse_keyword(Keyword::CHECK) {
7697 self.expect_token(&Token::LParen)?;
7698 let expr = self.parse_expr()?;
7699 self.expect_token(&Token::RParen)?;
7700 Ok(Some(ColumnOption::Check(expr)))
7701 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
7702 && dialect_of!(self is MySqlDialect | GenericDialect)
7703 {
7704 Ok(Some(ColumnOption::DialectSpecific(vec![
7706 Token::make_keyword("AUTO_INCREMENT"),
7707 ])))
7708 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
7709 && dialect_of!(self is SQLiteDialect | GenericDialect)
7710 {
7711 Ok(Some(ColumnOption::DialectSpecific(vec![
7713 Token::make_keyword("AUTOINCREMENT"),
7714 ])))
7715 } else if self.parse_keyword(Keyword::ASC)
7716 && self.dialect.supports_asc_desc_in_column_definition()
7717 {
7718 Ok(Some(ColumnOption::DialectSpecific(vec![
7720 Token::make_keyword("ASC"),
7721 ])))
7722 } else if self.parse_keyword(Keyword::DESC)
7723 && self.dialect.supports_asc_desc_in_column_definition()
7724 {
7725 Ok(Some(ColumnOption::DialectSpecific(vec![
7727 Token::make_keyword("DESC"),
7728 ])))
7729 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7730 && dialect_of!(self is MySqlDialect | GenericDialect)
7731 {
7732 let expr = self.parse_expr()?;
7733 Ok(Some(ColumnOption::OnUpdate(expr)))
7734 } else if self.parse_keyword(Keyword::GENERATED) {
7735 self.parse_optional_column_option_generated()
7736 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
7737 && self.parse_keyword(Keyword::OPTIONS)
7738 {
7739 self.prev_token();
7740 Ok(Some(ColumnOption::Options(
7741 self.parse_options(Keyword::OPTIONS)?,
7742 )))
7743 } else if self.parse_keyword(Keyword::AS)
7744 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
7745 {
7746 self.parse_optional_column_option_as()
7747 } else if self.parse_keyword(Keyword::IDENTITY)
7748 && dialect_of!(self is MsSqlDialect | GenericDialect)
7749 {
7750 let parameters = if self.consume_token(&Token::LParen) {
7751 let seed = self.parse_number()?;
7752 self.expect_token(&Token::Comma)?;
7753 let increment = self.parse_number()?;
7754 self.expect_token(&Token::RParen)?;
7755
7756 Some(IdentityPropertyFormatKind::FunctionCall(
7757 IdentityParameters { seed, increment },
7758 ))
7759 } else {
7760 None
7761 };
7762 Ok(Some(ColumnOption::Identity(
7763 IdentityPropertyKind::Identity(IdentityProperty {
7764 parameters,
7765 order: None,
7766 }),
7767 )))
7768 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
7769 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
7770 {
7771 Ok(Some(ColumnOption::OnConflict(
7773 self.expect_one_of_keywords(&[
7774 Keyword::ROLLBACK,
7775 Keyword::ABORT,
7776 Keyword::FAIL,
7777 Keyword::IGNORE,
7778 Keyword::REPLACE,
7779 ])?,
7780 )))
7781 } else {
7782 Ok(None)
7783 }
7784 }
7785
7786 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
7787 let name = self.parse_identifier()?;
7788 self.expect_token(&Token::Eq)?;
7789 let value = self.parse_literal_string()?;
7790
7791 Ok(Tag::new(name, value))
7792 }
7793
7794 fn parse_optional_column_option_generated(
7795 &mut self,
7796 ) -> Result<Option<ColumnOption>, ParserError> {
7797 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
7798 let mut sequence_options = vec![];
7799 if self.expect_token(&Token::LParen).is_ok() {
7800 sequence_options = self.parse_create_sequence_options()?;
7801 self.expect_token(&Token::RParen)?;
7802 }
7803 Ok(Some(ColumnOption::Generated {
7804 generated_as: GeneratedAs::Always,
7805 sequence_options: Some(sequence_options),
7806 generation_expr: None,
7807 generation_expr_mode: None,
7808 generated_keyword: true,
7809 }))
7810 } else if self.parse_keywords(&[
7811 Keyword::BY,
7812 Keyword::DEFAULT,
7813 Keyword::AS,
7814 Keyword::IDENTITY,
7815 ]) {
7816 let mut sequence_options = vec![];
7817 if self.expect_token(&Token::LParen).is_ok() {
7818 sequence_options = self.parse_create_sequence_options()?;
7819 self.expect_token(&Token::RParen)?;
7820 }
7821 Ok(Some(ColumnOption::Generated {
7822 generated_as: GeneratedAs::ByDefault,
7823 sequence_options: Some(sequence_options),
7824 generation_expr: None,
7825 generation_expr_mode: None,
7826 generated_keyword: true,
7827 }))
7828 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
7829 if self.expect_token(&Token::LParen).is_ok() {
7830 let expr = self.parse_expr()?;
7831 self.expect_token(&Token::RParen)?;
7832 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7833 Ok((
7834 GeneratedAs::ExpStored,
7835 Some(GeneratedExpressionMode::Stored),
7836 ))
7837 } else if dialect_of!(self is PostgreSqlDialect) {
7838 self.expected("STORED", self.peek_token())
7840 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7841 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
7842 } else {
7843 Ok((GeneratedAs::Always, None))
7844 }?;
7845
7846 Ok(Some(ColumnOption::Generated {
7847 generated_as: gen_as,
7848 sequence_options: None,
7849 generation_expr: Some(expr),
7850 generation_expr_mode: expr_mode,
7851 generated_keyword: true,
7852 }))
7853 } else {
7854 Ok(None)
7855 }
7856 } else {
7857 Ok(None)
7858 }
7859 }
7860
7861 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7862 self.expect_token(&Token::LParen)?;
7864 let expr = self.parse_expr()?;
7865 self.expect_token(&Token::RParen)?;
7866
7867 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7868 (
7869 GeneratedAs::ExpStored,
7870 Some(GeneratedExpressionMode::Stored),
7871 )
7872 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7873 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
7874 } else {
7875 (GeneratedAs::Always, None)
7876 };
7877
7878 Ok(Some(ColumnOption::Generated {
7879 generated_as: gen_as,
7880 sequence_options: None,
7881 generation_expr: Some(expr),
7882 generation_expr_mode: expr_mode,
7883 generated_keyword: false,
7884 }))
7885 }
7886
7887 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
7888 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
7889 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
7890 {
7891 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7892
7893 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
7894 self.expect_token(&Token::LParen)?;
7895 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
7896 self.expect_token(&Token::RParen)?;
7897 Some(sorted_by_columns)
7898 } else {
7899 None
7900 };
7901
7902 self.expect_keyword_is(Keyword::INTO)?;
7903 let num_buckets = self.parse_number_value()?.value;
7904 self.expect_keyword_is(Keyword::BUCKETS)?;
7905 Some(ClusteredBy {
7906 columns,
7907 sorted_by,
7908 num_buckets,
7909 })
7910 } else {
7911 None
7912 };
7913 Ok(clustered_by)
7914 }
7915
7916 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
7917 if self.parse_keyword(Keyword::RESTRICT) {
7918 Ok(ReferentialAction::Restrict)
7919 } else if self.parse_keyword(Keyword::CASCADE) {
7920 Ok(ReferentialAction::Cascade)
7921 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
7922 Ok(ReferentialAction::SetNull)
7923 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
7924 Ok(ReferentialAction::NoAction)
7925 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
7926 Ok(ReferentialAction::SetDefault)
7927 } else {
7928 self.expected(
7929 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
7930 self.peek_token(),
7931 )
7932 }
7933 }
7934
7935 pub fn parse_constraint_characteristics(
7936 &mut self,
7937 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
7938 let mut cc = ConstraintCharacteristics::default();
7939
7940 loop {
7941 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
7942 {
7943 cc.deferrable = Some(false);
7944 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
7945 cc.deferrable = Some(true);
7946 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
7947 if self.parse_keyword(Keyword::DEFERRED) {
7948 cc.initially = Some(DeferrableInitial::Deferred);
7949 } else if self.parse_keyword(Keyword::IMMEDIATE) {
7950 cc.initially = Some(DeferrableInitial::Immediate);
7951 } else {
7952 self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
7953 }
7954 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
7955 cc.enforced = Some(true);
7956 } else if cc.enforced.is_none()
7957 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
7958 {
7959 cc.enforced = Some(false);
7960 } else {
7961 break;
7962 }
7963 }
7964
7965 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
7966 Ok(Some(cc))
7967 } else {
7968 Ok(None)
7969 }
7970 }
7971
7972 pub fn parse_optional_table_constraint(
7973 &mut self,
7974 ) -> Result<Option<TableConstraint>, ParserError> {
7975 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
7976 Some(self.parse_identifier()?)
7977 } else {
7978 None
7979 };
7980
7981 let next_token = self.next_token();
7982 match next_token.token {
7983 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
7984 let index_type_display = self.parse_index_type_display();
7985 if !dialect_of!(self is GenericDialect | MySqlDialect)
7986 && !index_type_display.is_none()
7987 {
7988 return self
7989 .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
7990 }
7991
7992 let nulls_distinct = self.parse_optional_nulls_distinct()?;
7993
7994 let index_name = self.parse_optional_indent()?;
7996 let index_type = self.parse_optional_using_then_index_type()?;
7997
7998 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7999 let index_options = self.parse_index_options()?;
8000 let characteristics = self.parse_constraint_characteristics()?;
8001 Ok(Some(TableConstraint::Unique {
8002 name,
8003 index_name,
8004 index_type_display,
8005 index_type,
8006 columns,
8007 index_options,
8008 characteristics,
8009 nulls_distinct,
8010 }))
8011 }
8012 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8013 self.expect_keyword_is(Keyword::KEY)?;
8015
8016 let index_name = self.parse_optional_indent()?;
8018 let index_type = self.parse_optional_using_then_index_type()?;
8019
8020 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8021 let index_options = self.parse_index_options()?;
8022 let characteristics = self.parse_constraint_characteristics()?;
8023 Ok(Some(TableConstraint::PrimaryKey {
8024 name,
8025 index_name,
8026 index_type,
8027 columns,
8028 index_options,
8029 characteristics,
8030 }))
8031 }
8032 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
8033 self.expect_keyword_is(Keyword::KEY)?;
8034 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8035 self.expect_keyword_is(Keyword::REFERENCES)?;
8036 let foreign_table = self.parse_object_name(false)?;
8037 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8038 let mut on_delete = None;
8039 let mut on_update = None;
8040 loop {
8041 if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
8042 on_delete = Some(self.parse_referential_action()?);
8043 } else if on_update.is_none()
8044 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8045 {
8046 on_update = Some(self.parse_referential_action()?);
8047 } else {
8048 break;
8049 }
8050 }
8051
8052 let characteristics = self.parse_constraint_characteristics()?;
8053
8054 Ok(Some(TableConstraint::ForeignKey {
8055 name,
8056 columns,
8057 foreign_table,
8058 referred_columns,
8059 on_delete,
8060 on_update,
8061 characteristics,
8062 }))
8063 }
8064 Token::Word(w) if w.keyword == Keyword::CHECK => {
8065 self.expect_token(&Token::LParen)?;
8066 let expr = Box::new(self.parse_expr()?);
8067 self.expect_token(&Token::RParen)?;
8068 Ok(Some(TableConstraint::Check { name, expr }))
8069 }
8070 Token::Word(w)
8071 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
8072 && dialect_of!(self is GenericDialect | MySqlDialect)
8073 && name.is_none() =>
8074 {
8075 let display_as_key = w.keyword == Keyword::KEY;
8076
8077 let name = match self.peek_token().token {
8078 Token::Word(word) if word.keyword == Keyword::USING => None,
8079 _ => self.parse_optional_indent()?,
8080 };
8081
8082 let index_type = self.parse_optional_using_then_index_type()?;
8083 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8084
8085 Ok(Some(TableConstraint::Index {
8086 display_as_key,
8087 name,
8088 index_type,
8089 columns,
8090 }))
8091 }
8092 Token::Word(w)
8093 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
8094 && dialect_of!(self is GenericDialect | MySqlDialect) =>
8095 {
8096 if let Some(name) = name {
8097 return self.expected(
8098 "FULLTEXT or SPATIAL option without constraint name",
8099 TokenWithSpan {
8100 token: Token::make_keyword(&name.to_string()),
8101 span: next_token.span,
8102 },
8103 );
8104 }
8105
8106 let fulltext = w.keyword == Keyword::FULLTEXT;
8107
8108 let index_type_display = self.parse_index_type_display();
8109
8110 let opt_index_name = self.parse_optional_indent()?;
8111
8112 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8113
8114 Ok(Some(TableConstraint::FulltextOrSpatial {
8115 fulltext,
8116 index_type_display,
8117 opt_index_name,
8118 columns,
8119 }))
8120 }
8121 _ => {
8122 if name.is_some() {
8123 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
8124 } else {
8125 self.prev_token();
8126 Ok(None)
8127 }
8128 }
8129 }
8130 }
8131
8132 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
8133 Ok(if self.parse_keyword(Keyword::NULLS) {
8134 let not = self.parse_keyword(Keyword::NOT);
8135 self.expect_keyword_is(Keyword::DISTINCT)?;
8136 if not {
8137 NullsDistinctOption::NotDistinct
8138 } else {
8139 NullsDistinctOption::Distinct
8140 }
8141 } else {
8142 NullsDistinctOption::None
8143 })
8144 }
8145
8146 pub fn maybe_parse_options(
8147 &mut self,
8148 keyword: Keyword,
8149 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
8150 if let Token::Word(word) = self.peek_token().token {
8151 if word.keyword == keyword {
8152 return Ok(Some(self.parse_options(keyword)?));
8153 }
8154 };
8155 Ok(None)
8156 }
8157
8158 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
8159 if self.parse_keyword(keyword) {
8160 self.expect_token(&Token::LParen)?;
8161 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
8162 self.expect_token(&Token::RParen)?;
8163 Ok(options)
8164 } else {
8165 Ok(vec![])
8166 }
8167 }
8168
8169 pub fn parse_options_with_keywords(
8170 &mut self,
8171 keywords: &[Keyword],
8172 ) -> Result<Vec<SqlOption>, ParserError> {
8173 if self.parse_keywords(keywords) {
8174 self.expect_token(&Token::LParen)?;
8175 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8176 self.expect_token(&Token::RParen)?;
8177 Ok(options)
8178 } else {
8179 Ok(vec![])
8180 }
8181 }
8182
8183 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
8184 Ok(if self.parse_keyword(Keyword::BTREE) {
8185 IndexType::BTree
8186 } else if self.parse_keyword(Keyword::HASH) {
8187 IndexType::Hash
8188 } else if self.parse_keyword(Keyword::GIN) {
8189 IndexType::GIN
8190 } else if self.parse_keyword(Keyword::GIST) {
8191 IndexType::GiST
8192 } else if self.parse_keyword(Keyword::SPGIST) {
8193 IndexType::SPGiST
8194 } else if self.parse_keyword(Keyword::BRIN) {
8195 IndexType::BRIN
8196 } else if self.parse_keyword(Keyword::BLOOM) {
8197 IndexType::Bloom
8198 } else {
8199 IndexType::Custom(self.parse_identifier()?)
8200 })
8201 }
8202
8203 pub fn parse_optional_using_then_index_type(
8209 &mut self,
8210 ) -> Result<Option<IndexType>, ParserError> {
8211 if self.parse_keyword(Keyword::USING) {
8212 Ok(Some(self.parse_index_type()?))
8213 } else {
8214 Ok(None)
8215 }
8216 }
8217
8218 pub fn parse_optional_indent(&mut self) -> Result<Option<Ident>, ParserError> {
8221 self.maybe_parse(|parser| parser.parse_identifier())
8222 }
8223
8224 #[must_use]
8225 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
8226 if self.parse_keyword(Keyword::KEY) {
8227 KeyOrIndexDisplay::Key
8228 } else if self.parse_keyword(Keyword::INDEX) {
8229 KeyOrIndexDisplay::Index
8230 } else {
8231 KeyOrIndexDisplay::None
8232 }
8233 }
8234
8235 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
8236 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
8237 Ok(Some(IndexOption::Using(index_type)))
8238 } else if self.parse_keyword(Keyword::COMMENT) {
8239 let s = self.parse_literal_string()?;
8240 Ok(Some(IndexOption::Comment(s)))
8241 } else {
8242 Ok(None)
8243 }
8244 }
8245
8246 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
8247 let mut options = Vec::new();
8248
8249 loop {
8250 match self.parse_optional_index_option()? {
8251 Some(index_option) => options.push(index_option),
8252 None => return Ok(options),
8253 }
8254 }
8255 }
8256
8257 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
8258 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
8259
8260 match self.peek_token().token {
8261 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
8262 Ok(SqlOption::Ident(self.parse_identifier()?))
8263 }
8264 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
8265 self.parse_option_partition()
8266 }
8267 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
8268 self.parse_option_clustered()
8269 }
8270 _ => {
8271 let name = self.parse_identifier()?;
8272 self.expect_token(&Token::Eq)?;
8273 let value = self.parse_expr()?;
8274
8275 Ok(SqlOption::KeyValue { key: name, value })
8276 }
8277 }
8278 }
8279
8280 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
8281 if self.parse_keywords(&[
8282 Keyword::CLUSTERED,
8283 Keyword::COLUMNSTORE,
8284 Keyword::INDEX,
8285 Keyword::ORDER,
8286 ]) {
8287 Ok(SqlOption::Clustered(
8288 TableOptionsClustered::ColumnstoreIndexOrder(
8289 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
8290 ),
8291 ))
8292 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
8293 Ok(SqlOption::Clustered(
8294 TableOptionsClustered::ColumnstoreIndex,
8295 ))
8296 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
8297 self.expect_token(&Token::LParen)?;
8298
8299 let columns = self.parse_comma_separated(|p| {
8300 let name = p.parse_identifier()?;
8301 let asc = p.parse_asc_desc();
8302
8303 Ok(ClusteredIndex { name, asc })
8304 })?;
8305
8306 self.expect_token(&Token::RParen)?;
8307
8308 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
8309 } else {
8310 Err(ParserError::ParserError(
8311 "invalid CLUSTERED sequence".to_string(),
8312 ))
8313 }
8314 }
8315
8316 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
8317 self.expect_keyword_is(Keyword::PARTITION)?;
8318 self.expect_token(&Token::LParen)?;
8319 let column_name = self.parse_identifier()?;
8320
8321 self.expect_keyword_is(Keyword::RANGE)?;
8322 let range_direction = if self.parse_keyword(Keyword::LEFT) {
8323 Some(PartitionRangeDirection::Left)
8324 } else if self.parse_keyword(Keyword::RIGHT) {
8325 Some(PartitionRangeDirection::Right)
8326 } else {
8327 None
8328 };
8329
8330 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8331 self.expect_token(&Token::LParen)?;
8332
8333 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
8334
8335 self.expect_token(&Token::RParen)?;
8336 self.expect_token(&Token::RParen)?;
8337
8338 Ok(SqlOption::Partition {
8339 column_name,
8340 range_direction,
8341 for_values,
8342 })
8343 }
8344
8345 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
8346 self.expect_token(&Token::LParen)?;
8347 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8348 self.expect_token(&Token::RParen)?;
8349 Ok(Partition::Partitions(partitions))
8350 }
8351
8352 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
8353 self.expect_token(&Token::LParen)?;
8354 self.expect_keyword_is(Keyword::SELECT)?;
8355 let projection = self.parse_projection()?;
8356 let group_by = self.parse_optional_group_by()?;
8357 let order_by = self.parse_optional_order_by()?;
8358 self.expect_token(&Token::RParen)?;
8359 Ok(ProjectionSelect {
8360 projection,
8361 group_by,
8362 order_by,
8363 })
8364 }
8365 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
8366 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8367 let name = self.parse_identifier()?;
8368 let query = self.parse_projection_select()?;
8369 Ok(AlterTableOperation::AddProjection {
8370 if_not_exists,
8371 name,
8372 select: query,
8373 })
8374 }
8375
8376 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
8377 let operation = if self.parse_keyword(Keyword::ADD) {
8378 if let Some(constraint) = self.parse_optional_table_constraint()? {
8379 AlterTableOperation::AddConstraint(constraint)
8380 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8381 && self.parse_keyword(Keyword::PROJECTION)
8382 {
8383 return self.parse_alter_table_add_projection();
8384 } else {
8385 let if_not_exists =
8386 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8387 let mut new_partitions = vec![];
8388 loop {
8389 if self.parse_keyword(Keyword::PARTITION) {
8390 new_partitions.push(self.parse_partition()?);
8391 } else {
8392 break;
8393 }
8394 }
8395 if !new_partitions.is_empty() {
8396 AlterTableOperation::AddPartitions {
8397 if_not_exists,
8398 new_partitions,
8399 }
8400 } else {
8401 let column_keyword = self.parse_keyword(Keyword::COLUMN);
8402
8403 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
8404 {
8405 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
8406 || if_not_exists
8407 } else {
8408 false
8409 };
8410
8411 let column_def = self.parse_column_def()?;
8412
8413 let column_position = self.parse_column_position()?;
8414
8415 AlterTableOperation::AddColumn {
8416 column_keyword,
8417 if_not_exists,
8418 column_def,
8419 column_position,
8420 }
8421 }
8422 }
8423 } else if self.parse_keyword(Keyword::RENAME) {
8424 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
8425 let old_name = self.parse_identifier()?;
8426 self.expect_keyword_is(Keyword::TO)?;
8427 let new_name = self.parse_identifier()?;
8428 AlterTableOperation::RenameConstraint { old_name, new_name }
8429 } else if self.parse_keyword(Keyword::TO) {
8430 let table_name = self.parse_object_name(false)?;
8431 AlterTableOperation::RenameTable { table_name }
8432 } else {
8433 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
8435 self.expect_keyword_is(Keyword::TO)?;
8436 let new_column_name = self.parse_identifier()?;
8437 AlterTableOperation::RenameColumn {
8438 old_column_name,
8439 new_column_name,
8440 }
8441 }
8442 } else if self.parse_keyword(Keyword::DISABLE) {
8443 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8444 AlterTableOperation::DisableRowLevelSecurity {}
8445 } else if self.parse_keyword(Keyword::RULE) {
8446 let name = self.parse_identifier()?;
8447 AlterTableOperation::DisableRule { name }
8448 } else if self.parse_keyword(Keyword::TRIGGER) {
8449 let name = self.parse_identifier()?;
8450 AlterTableOperation::DisableTrigger { name }
8451 } else {
8452 return self.expected(
8453 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
8454 self.peek_token(),
8455 );
8456 }
8457 } else if self.parse_keyword(Keyword::ENABLE) {
8458 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
8459 let name = self.parse_identifier()?;
8460 AlterTableOperation::EnableAlwaysRule { name }
8461 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
8462 let name = self.parse_identifier()?;
8463 AlterTableOperation::EnableAlwaysTrigger { name }
8464 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8465 AlterTableOperation::EnableRowLevelSecurity {}
8466 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
8467 let name = self.parse_identifier()?;
8468 AlterTableOperation::EnableReplicaRule { name }
8469 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
8470 let name = self.parse_identifier()?;
8471 AlterTableOperation::EnableReplicaTrigger { name }
8472 } else if self.parse_keyword(Keyword::RULE) {
8473 let name = self.parse_identifier()?;
8474 AlterTableOperation::EnableRule { name }
8475 } else if self.parse_keyword(Keyword::TRIGGER) {
8476 let name = self.parse_identifier()?;
8477 AlterTableOperation::EnableTrigger { name }
8478 } else {
8479 return self.expected(
8480 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
8481 self.peek_token(),
8482 );
8483 }
8484 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
8485 && dialect_of!(self is ClickHouseDialect|GenericDialect)
8486 {
8487 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8488 let name = self.parse_identifier()?;
8489 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8490 Some(self.parse_identifier()?)
8491 } else {
8492 None
8493 };
8494 AlterTableOperation::ClearProjection {
8495 if_exists,
8496 name,
8497 partition,
8498 }
8499 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
8500 && dialect_of!(self is ClickHouseDialect|GenericDialect)
8501 {
8502 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8503 let name = self.parse_identifier()?;
8504 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8505 Some(self.parse_identifier()?)
8506 } else {
8507 None
8508 };
8509 AlterTableOperation::MaterializeProjection {
8510 if_exists,
8511 name,
8512 partition,
8513 }
8514 } else if self.parse_keyword(Keyword::DROP) {
8515 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
8516 self.expect_token(&Token::LParen)?;
8517 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8518 self.expect_token(&Token::RParen)?;
8519 AlterTableOperation::DropPartitions {
8520 partitions,
8521 if_exists: true,
8522 }
8523 } else if self.parse_keyword(Keyword::PARTITION) {
8524 self.expect_token(&Token::LParen)?;
8525 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8526 self.expect_token(&Token::RParen)?;
8527 AlterTableOperation::DropPartitions {
8528 partitions,
8529 if_exists: false,
8530 }
8531 } else if self.parse_keyword(Keyword::CONSTRAINT) {
8532 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8533 let name = self.parse_identifier()?;
8534 let drop_behavior = self.parse_optional_drop_behavior();
8535 AlterTableOperation::DropConstraint {
8536 if_exists,
8537 name,
8538 drop_behavior,
8539 }
8540 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8541 AlterTableOperation::DropPrimaryKey
8542 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
8543 let name = self.parse_identifier()?;
8544 AlterTableOperation::DropForeignKey { name }
8545 } else if self.parse_keyword(Keyword::PROJECTION)
8546 && dialect_of!(self is ClickHouseDialect|GenericDialect)
8547 {
8548 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8549 let name = self.parse_identifier()?;
8550 AlterTableOperation::DropProjection { if_exists, name }
8551 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
8552 AlterTableOperation::DropClusteringKey
8553 } else {
8554 let _ = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8556 let column_name = self.parse_identifier()?;
8557 let drop_behavior = self.parse_optional_drop_behavior();
8558 AlterTableOperation::DropColumn {
8559 column_name,
8560 if_exists,
8561 drop_behavior,
8562 }
8563 }
8564 } else if self.parse_keyword(Keyword::PARTITION) {
8565 self.expect_token(&Token::LParen)?;
8566 let before = self.parse_comma_separated(Parser::parse_expr)?;
8567 self.expect_token(&Token::RParen)?;
8568 self.expect_keyword_is(Keyword::RENAME)?;
8569 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
8570 self.expect_token(&Token::LParen)?;
8571 let renames = self.parse_comma_separated(Parser::parse_expr)?;
8572 self.expect_token(&Token::RParen)?;
8573 AlterTableOperation::RenamePartitions {
8574 old_partitions: before,
8575 new_partitions: renames,
8576 }
8577 } else if self.parse_keyword(Keyword::CHANGE) {
8578 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
8580 let new_name = self.parse_identifier()?;
8581 let data_type = self.parse_data_type()?;
8582 let mut options = vec![];
8583 while let Some(option) = self.parse_optional_column_option()? {
8584 options.push(option);
8585 }
8586
8587 let column_position = self.parse_column_position()?;
8588
8589 AlterTableOperation::ChangeColumn {
8590 old_name,
8591 new_name,
8592 data_type,
8593 options,
8594 column_position,
8595 }
8596 } else if self.parse_keyword(Keyword::MODIFY) {
8597 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
8599 let data_type = self.parse_data_type()?;
8600 let mut options = vec![];
8601 while let Some(option) = self.parse_optional_column_option()? {
8602 options.push(option);
8603 }
8604
8605 let column_position = self.parse_column_position()?;
8606
8607 AlterTableOperation::ModifyColumn {
8608 col_name,
8609 data_type,
8610 options,
8611 column_position,
8612 }
8613 } else if self.parse_keyword(Keyword::ALTER) {
8614 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
8616 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
8617
8618 let op: AlterColumnOperation = if self.parse_keywords(&[
8619 Keyword::SET,
8620 Keyword::NOT,
8621 Keyword::NULL,
8622 ]) {
8623 AlterColumnOperation::SetNotNull {}
8624 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
8625 AlterColumnOperation::DropNotNull {}
8626 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8627 AlterColumnOperation::SetDefault {
8628 value: self.parse_expr()?,
8629 }
8630 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
8631 AlterColumnOperation::DropDefault {}
8632 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
8633 || (is_postgresql && self.parse_keyword(Keyword::TYPE))
8634 {
8635 let data_type = self.parse_data_type()?;
8636 let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
8637 Some(self.parse_expr()?)
8638 } else {
8639 None
8640 };
8641 AlterColumnOperation::SetDataType { data_type, using }
8642 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
8643 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
8644 Some(GeneratedAs::Always)
8645 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
8646 Some(GeneratedAs::ByDefault)
8647 } else {
8648 None
8649 };
8650
8651 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
8652
8653 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
8654
8655 if self.peek_token().token == Token::LParen {
8656 self.expect_token(&Token::LParen)?;
8657 sequence_options = Some(self.parse_create_sequence_options()?);
8658 self.expect_token(&Token::RParen)?;
8659 }
8660
8661 AlterColumnOperation::AddGenerated {
8662 generated_as,
8663 sequence_options,
8664 }
8665 } else {
8666 let message = if is_postgresql {
8667 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
8668 } else {
8669 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
8670 };
8671
8672 return self.expected(message, self.peek_token());
8673 };
8674 AlterTableOperation::AlterColumn { column_name, op }
8675 } else if self.parse_keyword(Keyword::SWAP) {
8676 self.expect_keyword_is(Keyword::WITH)?;
8677 let table_name = self.parse_object_name(false)?;
8678 AlterTableOperation::SwapWith { table_name }
8679 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
8680 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
8681 {
8682 let new_owner = self.parse_owner()?;
8683 AlterTableOperation::OwnerTo { new_owner }
8684 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8685 && self.parse_keyword(Keyword::ATTACH)
8686 {
8687 AlterTableOperation::AttachPartition {
8688 partition: self.parse_part_or_partition()?,
8689 }
8690 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8691 && self.parse_keyword(Keyword::DETACH)
8692 {
8693 AlterTableOperation::DetachPartition {
8694 partition: self.parse_part_or_partition()?,
8695 }
8696 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8697 && self.parse_keyword(Keyword::FREEZE)
8698 {
8699 let partition = self.parse_part_or_partition()?;
8700 let with_name = if self.parse_keyword(Keyword::WITH) {
8701 self.expect_keyword_is(Keyword::NAME)?;
8702 Some(self.parse_identifier()?)
8703 } else {
8704 None
8705 };
8706 AlterTableOperation::FreezePartition {
8707 partition,
8708 with_name,
8709 }
8710 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8711 && self.parse_keyword(Keyword::UNFREEZE)
8712 {
8713 let partition = self.parse_part_or_partition()?;
8714 let with_name = if self.parse_keyword(Keyword::WITH) {
8715 self.expect_keyword_is(Keyword::NAME)?;
8716 Some(self.parse_identifier()?)
8717 } else {
8718 None
8719 };
8720 AlterTableOperation::UnfreezePartition {
8721 partition,
8722 with_name,
8723 }
8724 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8725 self.expect_token(&Token::LParen)?;
8726 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
8727 self.expect_token(&Token::RParen)?;
8728 AlterTableOperation::ClusterBy { exprs }
8729 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
8730 AlterTableOperation::SuspendRecluster
8731 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
8732 AlterTableOperation::ResumeRecluster
8733 } else if self.parse_keyword(Keyword::LOCK) {
8734 let equals = self.consume_token(&Token::Eq);
8735 let lock = match self.parse_one_of_keywords(&[
8736 Keyword::DEFAULT,
8737 Keyword::EXCLUSIVE,
8738 Keyword::NONE,
8739 Keyword::SHARED,
8740 ]) {
8741 Some(Keyword::DEFAULT) => AlterTableLock::Default,
8742 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
8743 Some(Keyword::NONE) => AlterTableLock::None,
8744 Some(Keyword::SHARED) => AlterTableLock::Shared,
8745 _ => self.expected(
8746 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
8747 self.peek_token(),
8748 )?,
8749 };
8750 AlterTableOperation::Lock { equals, lock }
8751 } else if self.parse_keyword(Keyword::ALGORITHM) {
8752 let equals = self.consume_token(&Token::Eq);
8753 let algorithm = match self.parse_one_of_keywords(&[
8754 Keyword::DEFAULT,
8755 Keyword::INSTANT,
8756 Keyword::INPLACE,
8757 Keyword::COPY,
8758 ]) {
8759 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
8760 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
8761 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
8762 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
8763 _ => self.expected(
8764 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
8765 self.peek_token(),
8766 )?,
8767 };
8768 AlterTableOperation::Algorithm { equals, algorithm }
8769 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8770 let equals = self.consume_token(&Token::Eq);
8771 let value = self.parse_number_value()?;
8772 AlterTableOperation::AutoIncrement { equals, value }
8773 } else {
8774 let options: Vec<SqlOption> =
8775 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
8776 if !options.is_empty() {
8777 AlterTableOperation::SetTblProperties {
8778 table_properties: options,
8779 }
8780 } else {
8781 return self.expected(
8782 "ADD, RENAME, PARTITION, SWAP, DROP, or SET TBLPROPERTIES after ALTER TABLE",
8783 self.peek_token(),
8784 );
8785 }
8786 };
8787 Ok(operation)
8788 }
8789
8790 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
8791 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
8792 match keyword {
8793 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
8794 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
8795 _ => unreachable!(),
8797 }
8798 }
8799
8800 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
8801 let object_type = self.expect_one_of_keywords(&[
8802 Keyword::VIEW,
8803 Keyword::TYPE,
8804 Keyword::TABLE,
8805 Keyword::INDEX,
8806 Keyword::ROLE,
8807 Keyword::POLICY,
8808 Keyword::CONNECTOR,
8809 ])?;
8810 match object_type {
8811 Keyword::VIEW => self.parse_alter_view(),
8812 Keyword::TYPE => self.parse_alter_type(),
8813 Keyword::TABLE => {
8814 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8815 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
8817 let on_cluster = self.parse_optional_on_cluster()?;
8818 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
8819
8820 let mut location = None;
8821 if self.parse_keyword(Keyword::LOCATION) {
8822 location = Some(HiveSetLocation {
8823 has_set: false,
8824 location: self.parse_identifier()?,
8825 });
8826 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
8827 location = Some(HiveSetLocation {
8828 has_set: true,
8829 location: self.parse_identifier()?,
8830 });
8831 }
8832
8833 Ok(Statement::AlterTable {
8834 name: table_name,
8835 if_exists,
8836 only,
8837 operations,
8838 location,
8839 on_cluster,
8840 })
8841 }
8842 Keyword::INDEX => {
8843 let index_name = self.parse_object_name(false)?;
8844 let operation = if self.parse_keyword(Keyword::RENAME) {
8845 if self.parse_keyword(Keyword::TO) {
8846 let index_name = self.parse_object_name(false)?;
8847 AlterIndexOperation::RenameIndex { index_name }
8848 } else {
8849 return self.expected("TO after RENAME", self.peek_token());
8850 }
8851 } else {
8852 return self.expected("RENAME after ALTER INDEX", self.peek_token());
8853 };
8854
8855 Ok(Statement::AlterIndex {
8856 name: index_name,
8857 operation,
8858 })
8859 }
8860 Keyword::ROLE => self.parse_alter_role(),
8861 Keyword::POLICY => self.parse_alter_policy(),
8862 Keyword::CONNECTOR => self.parse_alter_connector(),
8863 _ => unreachable!(),
8865 }
8866 }
8867
8868 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
8869 let name = self.parse_object_name(false)?;
8870 let columns = self.parse_parenthesized_column_list(Optional, false)?;
8871
8872 let with_options = self.parse_options(Keyword::WITH)?;
8873
8874 self.expect_keyword_is(Keyword::AS)?;
8875 let query = self.parse_query()?;
8876
8877 Ok(Statement::AlterView {
8878 name,
8879 columns,
8880 query,
8881 with_options,
8882 })
8883 }
8884
8885 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
8887 let name = self.parse_object_name(false)?;
8888
8889 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
8890 let new_name = self.parse_identifier()?;
8891 Ok(Statement::AlterType(AlterType {
8892 name,
8893 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
8894 }))
8895 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
8896 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8897 let new_enum_value = self.parse_identifier()?;
8898 let position = if self.parse_keyword(Keyword::BEFORE) {
8899 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
8900 } else if self.parse_keyword(Keyword::AFTER) {
8901 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
8902 } else {
8903 None
8904 };
8905
8906 Ok(Statement::AlterType(AlterType {
8907 name,
8908 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
8909 if_not_exists,
8910 value: new_enum_value,
8911 position,
8912 }),
8913 }))
8914 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
8915 let existing_enum_value = self.parse_identifier()?;
8916 self.expect_keyword(Keyword::TO)?;
8917 let new_enum_value = self.parse_identifier()?;
8918
8919 Ok(Statement::AlterType(AlterType {
8920 name,
8921 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
8922 from: existing_enum_value,
8923 to: new_enum_value,
8924 }),
8925 }))
8926 } else {
8927 return self.expected_ref(
8928 "{RENAME TO | { RENAME | ADD } VALUE}",
8929 self.peek_token_ref(),
8930 );
8931 }
8932 }
8933
8934 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
8937 let object_name = self.parse_object_name(false)?;
8938 if self.peek_token().token == Token::LParen {
8939 match self.parse_function(object_name)? {
8940 Expr::Function(f) => Ok(Statement::Call(f)),
8941 other => parser_err!(
8942 format!("Expected a simple procedure call but found: {other}"),
8943 self.peek_token().span.start
8944 ),
8945 }
8946 } else {
8947 Ok(Statement::Call(Function {
8948 name: object_name,
8949 uses_odbc_syntax: false,
8950 parameters: FunctionArguments::None,
8951 args: FunctionArguments::None,
8952 over: None,
8953 filter: None,
8954 null_treatment: None,
8955 within_group: vec![],
8956 }))
8957 }
8958 }
8959
8960 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
8962 let source;
8963 if self.consume_token(&Token::LParen) {
8964 source = CopySource::Query(self.parse_query()?);
8965 self.expect_token(&Token::RParen)?;
8966 } else {
8967 let table_name = self.parse_object_name(false)?;
8968 let columns = self.parse_parenthesized_column_list(Optional, false)?;
8969 source = CopySource::Table {
8970 table_name,
8971 columns,
8972 };
8973 }
8974 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
8975 Some(Keyword::FROM) => false,
8976 Some(Keyword::TO) => true,
8977 _ => self.expected("FROM or TO", self.peek_token())?,
8978 };
8979 if !to {
8980 if let CopySource::Query(_) = source {
8983 return Err(ParserError::ParserError(
8984 "COPY ... FROM does not support query as a source".to_string(),
8985 ));
8986 }
8987 }
8988 let target = if self.parse_keyword(Keyword::STDIN) {
8989 CopyTarget::Stdin
8990 } else if self.parse_keyword(Keyword::STDOUT) {
8991 CopyTarget::Stdout
8992 } else if self.parse_keyword(Keyword::PROGRAM) {
8993 CopyTarget::Program {
8994 command: self.parse_literal_string()?,
8995 }
8996 } else {
8997 CopyTarget::File {
8998 filename: self.parse_literal_string()?,
8999 }
9000 };
9001 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
9003 if self.consume_token(&Token::LParen) {
9004 options = self.parse_comma_separated(Parser::parse_copy_option)?;
9005 self.expect_token(&Token::RParen)?;
9006 }
9007 let mut legacy_options = vec![];
9008 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
9009 legacy_options.push(opt);
9010 }
9011 let values = if let CopyTarget::Stdin = target {
9012 self.expect_token(&Token::SemiColon)?;
9013 self.parse_tsv()
9014 } else {
9015 vec![]
9016 };
9017 Ok(Statement::Copy {
9018 source,
9019 to,
9020 target,
9021 options,
9022 legacy_options,
9023 values,
9024 })
9025 }
9026
9027 fn parse_open(&mut self) -> Result<Statement, ParserError> {
9029 self.expect_keyword(Keyword::OPEN)?;
9030 Ok(Statement::Open(OpenStatement {
9031 cursor_name: self.parse_identifier()?,
9032 }))
9033 }
9034
9035 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
9036 let cursor = if self.parse_keyword(Keyword::ALL) {
9037 CloseCursor::All
9038 } else {
9039 let name = self.parse_identifier()?;
9040
9041 CloseCursor::Specific { name }
9042 };
9043
9044 Ok(Statement::Close { cursor })
9045 }
9046
9047 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
9048 let ret = match self.parse_one_of_keywords(&[
9049 Keyword::FORMAT,
9050 Keyword::FREEZE,
9051 Keyword::DELIMITER,
9052 Keyword::NULL,
9053 Keyword::HEADER,
9054 Keyword::QUOTE,
9055 Keyword::ESCAPE,
9056 Keyword::FORCE_QUOTE,
9057 Keyword::FORCE_NOT_NULL,
9058 Keyword::FORCE_NULL,
9059 Keyword::ENCODING,
9060 ]) {
9061 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
9062 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
9063 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9064 Some(Keyword::FALSE)
9065 )),
9066 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
9067 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
9068 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
9069 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9070 Some(Keyword::FALSE)
9071 )),
9072 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
9073 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
9074 Some(Keyword::FORCE_QUOTE) => {
9075 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
9076 }
9077 Some(Keyword::FORCE_NOT_NULL) => {
9078 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9079 }
9080 Some(Keyword::FORCE_NULL) => {
9081 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9082 }
9083 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
9084 _ => self.expected("option", self.peek_token())?,
9085 };
9086 Ok(ret)
9087 }
9088
9089 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
9090 let ret = match self.parse_one_of_keywords(&[
9091 Keyword::BINARY,
9092 Keyword::DELIMITER,
9093 Keyword::NULL,
9094 Keyword::CSV,
9095 ]) {
9096 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
9097 Some(Keyword::DELIMITER) => {
9098 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Delimiter(self.parse_literal_char()?)
9100 }
9101 Some(Keyword::NULL) => {
9102 let _ = self.parse_keyword(Keyword::AS); CopyLegacyOption::Null(self.parse_literal_string()?)
9104 }
9105 Some(Keyword::CSV) => CopyLegacyOption::Csv({
9106 let mut opts = vec![];
9107 while let Some(opt) =
9108 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
9109 {
9110 opts.push(opt);
9111 }
9112 opts
9113 }),
9114 _ => self.expected("option", self.peek_token())?,
9115 };
9116 Ok(ret)
9117 }
9118
9119 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
9120 let ret = match self.parse_one_of_keywords(&[
9121 Keyword::HEADER,
9122 Keyword::QUOTE,
9123 Keyword::ESCAPE,
9124 Keyword::FORCE,
9125 ]) {
9126 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
9127 Some(Keyword::QUOTE) => {
9128 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
9130 }
9131 Some(Keyword::ESCAPE) => {
9132 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
9134 }
9135 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
9136 CopyLegacyCsvOption::ForceNotNull(
9137 self.parse_comma_separated(|p| p.parse_identifier())?,
9138 )
9139 }
9140 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
9141 CopyLegacyCsvOption::ForceQuote(
9142 self.parse_comma_separated(|p| p.parse_identifier())?,
9143 )
9144 }
9145 _ => self.expected("csv option", self.peek_token())?,
9146 };
9147 Ok(ret)
9148 }
9149
9150 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
9151 let s = self.parse_literal_string()?;
9152 if s.len() != 1 {
9153 let loc = self
9154 .tokens
9155 .get(self.index - 1)
9156 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
9157 return parser_err!(format!("Expect a char, found {s:?}"), loc);
9158 }
9159 Ok(s.chars().next().unwrap())
9160 }
9161
9162 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
9165 self.parse_tab_value()
9166 }
9167
9168 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
9169 let mut values = vec![];
9170 let mut content = String::from("");
9171 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
9172 match t {
9173 Token::Whitespace(Whitespace::Tab) => {
9174 values.push(Some(content.to_string()));
9175 content.clear();
9176 }
9177 Token::Whitespace(Whitespace::Newline) => {
9178 values.push(Some(content.to_string()));
9179 content.clear();
9180 }
9181 Token::Backslash => {
9182 if self.consume_token(&Token::Period) {
9183 return values;
9184 }
9185 if let Token::Word(w) = self.next_token().token {
9186 if w.value == "N" {
9187 values.push(None);
9188 }
9189 }
9190 }
9191 _ => {
9192 content.push_str(&t.to_string());
9193 }
9194 }
9195 }
9196 values
9197 }
9198
9199 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9201 let next_token = self.next_token();
9202 let span = next_token.span;
9203 let ok_value = |value: Value| Ok(value.with_span(span));
9204 match next_token.token {
9205 Token::Word(w) => match w.keyword {
9206 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
9207 ok_value(Value::Boolean(true))
9208 }
9209 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
9210 ok_value(Value::Boolean(false))
9211 }
9212 Keyword::NULL => ok_value(Value::Null),
9213 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
9214 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
9215 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
9216 _ => self.expected(
9217 "A value?",
9218 TokenWithSpan {
9219 token: Token::Word(w),
9220 span,
9221 },
9222 )?,
9223 },
9224 _ => self.expected(
9225 "a concrete value",
9226 TokenWithSpan {
9227 token: Token::Word(w),
9228 span,
9229 },
9230 ),
9231 },
9232 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
9236 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(s.to_string())),
9237 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(s.to_string())),
9238 Token::TripleSingleQuotedString(ref s) => {
9239 ok_value(Value::TripleSingleQuotedString(s.to_string()))
9240 }
9241 Token::TripleDoubleQuotedString(ref s) => {
9242 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
9243 }
9244 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
9245 Token::SingleQuotedByteStringLiteral(ref s) => {
9246 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
9247 }
9248 Token::DoubleQuotedByteStringLiteral(ref s) => {
9249 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
9250 }
9251 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
9252 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
9253 }
9254 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
9255 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
9256 }
9257 Token::SingleQuotedRawStringLiteral(ref s) => {
9258 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
9259 }
9260 Token::DoubleQuotedRawStringLiteral(ref s) => {
9261 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
9262 }
9263 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
9264 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
9265 }
9266 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
9267 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
9268 }
9269 Token::NationalStringLiteral(ref s) => {
9270 ok_value(Value::NationalStringLiteral(s.to_string()))
9271 }
9272 Token::EscapedStringLiteral(ref s) => {
9273 ok_value(Value::EscapedStringLiteral(s.to_string()))
9274 }
9275 Token::UnicodeStringLiteral(ref s) => {
9276 ok_value(Value::UnicodeStringLiteral(s.to_string()))
9277 }
9278 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
9279 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
9280 tok @ Token::Colon | tok @ Token::AtSign => {
9281 let next_token = self.next_token();
9284 let ident = match next_token.token {
9285 Token::Word(w) => Ok(w.into_ident(next_token.span)),
9286 Token::Number(w, false) => Ok(Ident::new(w)),
9287 _ => self.expected("placeholder", next_token),
9288 }?;
9289 let placeholder = tok.to_string() + &ident.value;
9290 ok_value(Value::Placeholder(placeholder))
9291 }
9292 unexpected => self.expected(
9293 "a value",
9294 TokenWithSpan {
9295 token: unexpected,
9296 span,
9297 },
9298 ),
9299 }
9300 }
9301
9302 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9304 let value_wrapper = self.parse_value()?;
9305 match &value_wrapper.value {
9306 Value::Number(_, _) => Ok(value_wrapper),
9307 Value::Placeholder(_) => Ok(value_wrapper),
9308 _ => {
9309 self.prev_token();
9310 self.expected("literal number", self.peek_token())
9311 }
9312 }
9313 }
9314
9315 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
9318 let next_token = self.next_token();
9319 match next_token.token {
9320 Token::Plus => Ok(Expr::UnaryOp {
9321 op: UnaryOperator::Plus,
9322 expr: Box::new(Expr::Value(self.parse_number_value()?)),
9323 }),
9324 Token::Minus => Ok(Expr::UnaryOp {
9325 op: UnaryOperator::Minus,
9326 expr: Box::new(Expr::Value(self.parse_number_value()?)),
9327 }),
9328 _ => {
9329 self.prev_token();
9330 Ok(Expr::Value(self.parse_number_value()?))
9331 }
9332 }
9333 }
9334
9335 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
9336 let next_token = self.next_token();
9337 let span = next_token.span;
9338 match next_token.token {
9339 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
9340 Value::SingleQuotedString(s.to_string()).with_span(span),
9341 )),
9342 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
9343 Value::DoubleQuotedString(s.to_string()).with_span(span),
9344 )),
9345 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
9346 Value::HexStringLiteral(s.to_string()).with_span(span),
9347 )),
9348 unexpected => self.expected(
9349 "a string value",
9350 TokenWithSpan {
9351 token: unexpected,
9352 span,
9353 },
9354 ),
9355 }
9356 }
9357
9358 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
9360 let next_token = self.next_token();
9361 match next_token.token {
9362 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
9363 _ => self.expected("literal int", next_token),
9364 }
9365 }
9366
9367 fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
9370 let peek_token = self.peek_token();
9371 let span = peek_token.span;
9372 match peek_token.token {
9373 Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
9374 {
9375 self.next_token();
9376 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
9377 }
9378 _ => Ok(Expr::Value(
9379 Value::SingleQuotedString(self.parse_literal_string()?).with_span(span),
9380 )),
9381 }
9382 }
9383
9384 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
9386 let next_token = self.next_token();
9387 match next_token.token {
9388 Token::Word(Word {
9389 value,
9390 keyword: Keyword::NoKeyword,
9391 ..
9392 }) => Ok(value),
9393 Token::SingleQuotedString(s) => Ok(s),
9394 Token::DoubleQuotedString(s) => Ok(s),
9395 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
9396 Ok(s)
9397 }
9398 Token::UnicodeStringLiteral(s) => Ok(s),
9399 _ => self.expected("literal string", next_token),
9400 }
9401 }
9402
9403 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
9405 let neg = self.parse_keyword(Keyword::NOT);
9406 let normalized_form = self.maybe_parse(|parser| {
9407 match parser.parse_one_of_keywords(&[
9408 Keyword::NFC,
9409 Keyword::NFD,
9410 Keyword::NFKC,
9411 Keyword::NFKD,
9412 ]) {
9413 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
9414 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
9415 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
9416 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
9417 _ => parser.expected("unicode normalization form", parser.peek_token()),
9418 }
9419 })?;
9420 if self.parse_keyword(Keyword::NORMALIZED) {
9421 return Ok(Expr::IsNormalized {
9422 expr: Box::new(expr),
9423 form: normalized_form,
9424 negated: neg,
9425 });
9426 }
9427 self.expected("unicode normalization form", self.peek_token())
9428 }
9429
9430 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
9431 self.expect_token(&Token::LParen)?;
9432 let values = self.parse_comma_separated(|parser| {
9433 let name = parser.parse_literal_string()?;
9434 let e = if parser.consume_token(&Token::Eq) {
9435 let value = parser.parse_number()?;
9436 EnumMember::NamedValue(name, value)
9437 } else {
9438 EnumMember::Name(name)
9439 };
9440 Ok(e)
9441 })?;
9442 self.expect_token(&Token::RParen)?;
9443
9444 Ok(values)
9445 }
9446
9447 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
9449 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
9450 if trailing_bracket.0 {
9451 return parser_err!(
9452 format!("unmatched > after parsing data type {ty}"),
9453 self.peek_token()
9454 );
9455 }
9456
9457 Ok(ty)
9458 }
9459
9460 fn parse_data_type_helper(
9461 &mut self,
9462 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
9463 let dialect = self.dialect;
9464 self.advance_token();
9465 let next_token = self.get_current_token();
9466 let next_token_index = self.get_current_index();
9467
9468 let mut trailing_bracket: MatchedTrailingBracket = false.into();
9469 let mut data = match &next_token.token {
9470 Token::Word(w) => match w.keyword {
9471 Keyword::BOOLEAN => Ok(DataType::Boolean),
9472 Keyword::BOOL => Ok(DataType::Bool),
9473 Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
9474 Keyword::REAL => Ok(DataType::Real),
9475 Keyword::FLOAT4 => Ok(DataType::Float4),
9476 Keyword::FLOAT32 => Ok(DataType::Float32),
9477 Keyword::FLOAT64 => Ok(DataType::Float64),
9478 Keyword::FLOAT8 => Ok(DataType::Float8),
9479 Keyword::DOUBLE => {
9480 if self.parse_keyword(Keyword::PRECISION) {
9481 Ok(DataType::DoublePrecision)
9482 } else {
9483 Ok(DataType::Double(
9484 self.parse_exact_number_optional_precision_scale()?,
9485 ))
9486 }
9487 }
9488 Keyword::TINYINT => {
9489 let optional_precision = self.parse_optional_precision();
9490 if self.parse_keyword(Keyword::UNSIGNED) {
9491 Ok(DataType::TinyIntUnsigned(optional_precision?))
9492 } else {
9493 Ok(DataType::TinyInt(optional_precision?))
9494 }
9495 }
9496 Keyword::INT2 => {
9497 let optional_precision = self.parse_optional_precision();
9498 if self.parse_keyword(Keyword::UNSIGNED) {
9499 Ok(DataType::Int2Unsigned(optional_precision?))
9500 } else {
9501 Ok(DataType::Int2(optional_precision?))
9502 }
9503 }
9504 Keyword::SMALLINT => {
9505 let optional_precision = self.parse_optional_precision();
9506 if self.parse_keyword(Keyword::UNSIGNED) {
9507 Ok(DataType::SmallIntUnsigned(optional_precision?))
9508 } else {
9509 Ok(DataType::SmallInt(optional_precision?))
9510 }
9511 }
9512 Keyword::MEDIUMINT => {
9513 let optional_precision = self.parse_optional_precision();
9514 if self.parse_keyword(Keyword::UNSIGNED) {
9515 Ok(DataType::MediumIntUnsigned(optional_precision?))
9516 } else {
9517 Ok(DataType::MediumInt(optional_precision?))
9518 }
9519 }
9520 Keyword::INT => {
9521 let optional_precision = self.parse_optional_precision();
9522 if self.parse_keyword(Keyword::UNSIGNED) {
9523 Ok(DataType::IntUnsigned(optional_precision?))
9524 } else {
9525 Ok(DataType::Int(optional_precision?))
9526 }
9527 }
9528 Keyword::INT4 => {
9529 let optional_precision = self.parse_optional_precision();
9530 if self.parse_keyword(Keyword::UNSIGNED) {
9531 Ok(DataType::Int4Unsigned(optional_precision?))
9532 } else {
9533 Ok(DataType::Int4(optional_precision?))
9534 }
9535 }
9536 Keyword::INT8 => {
9537 let optional_precision = self.parse_optional_precision();
9538 if self.parse_keyword(Keyword::UNSIGNED) {
9539 Ok(DataType::Int8Unsigned(optional_precision?))
9540 } else {
9541 Ok(DataType::Int8(optional_precision?))
9542 }
9543 }
9544 Keyword::INT16 => Ok(DataType::Int16),
9545 Keyword::INT32 => Ok(DataType::Int32),
9546 Keyword::INT64 => Ok(DataType::Int64),
9547 Keyword::INT128 => Ok(DataType::Int128),
9548 Keyword::INT256 => Ok(DataType::Int256),
9549 Keyword::INTEGER => {
9550 let optional_precision = self.parse_optional_precision();
9551 if self.parse_keyword(Keyword::UNSIGNED) {
9552 Ok(DataType::IntegerUnsigned(optional_precision?))
9553 } else {
9554 Ok(DataType::Integer(optional_precision?))
9555 }
9556 }
9557 Keyword::BIGINT => {
9558 let optional_precision = self.parse_optional_precision();
9559 if self.parse_keyword(Keyword::UNSIGNED) {
9560 Ok(DataType::BigIntUnsigned(optional_precision?))
9561 } else {
9562 Ok(DataType::BigInt(optional_precision?))
9563 }
9564 }
9565 Keyword::HUGEINT => Ok(DataType::HugeInt),
9566 Keyword::UBIGINT => Ok(DataType::UBigInt),
9567 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
9568 Keyword::USMALLINT => Ok(DataType::USmallInt),
9569 Keyword::UTINYINT => Ok(DataType::UTinyInt),
9570 Keyword::UINT8 => Ok(DataType::UInt8),
9571 Keyword::UINT16 => Ok(DataType::UInt16),
9572 Keyword::UINT32 => Ok(DataType::UInt32),
9573 Keyword::UINT64 => Ok(DataType::UInt64),
9574 Keyword::UINT128 => Ok(DataType::UInt128),
9575 Keyword::UINT256 => Ok(DataType::UInt256),
9576 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
9577 Keyword::NVARCHAR => {
9578 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
9579 }
9580 Keyword::CHARACTER => {
9581 if self.parse_keyword(Keyword::VARYING) {
9582 Ok(DataType::CharacterVarying(
9583 self.parse_optional_character_length()?,
9584 ))
9585 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9586 Ok(DataType::CharacterLargeObject(
9587 self.parse_optional_precision()?,
9588 ))
9589 } else {
9590 Ok(DataType::Character(self.parse_optional_character_length()?))
9591 }
9592 }
9593 Keyword::CHAR => {
9594 if self.parse_keyword(Keyword::VARYING) {
9595 Ok(DataType::CharVarying(
9596 self.parse_optional_character_length()?,
9597 ))
9598 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9599 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
9600 } else {
9601 Ok(DataType::Char(self.parse_optional_character_length()?))
9602 }
9603 }
9604 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
9605 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
9606 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
9607 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
9608 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
9609 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
9610 Keyword::LONGBLOB => Ok(DataType::LongBlob),
9611 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
9612 Keyword::BIT => {
9613 if self.parse_keyword(Keyword::VARYING) {
9614 Ok(DataType::BitVarying(self.parse_optional_precision()?))
9615 } else {
9616 Ok(DataType::Bit(self.parse_optional_precision()?))
9617 }
9618 }
9619 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
9620 Keyword::UUID => Ok(DataType::Uuid),
9621 Keyword::DATE => Ok(DataType::Date),
9622 Keyword::DATE32 => Ok(DataType::Date32),
9623 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
9624 Keyword::DATETIME64 => {
9625 self.prev_token();
9626 let (precision, time_zone) = self.parse_datetime_64()?;
9627 Ok(DataType::Datetime64(precision, time_zone))
9628 }
9629 Keyword::TIMESTAMP => {
9630 let precision = self.parse_optional_precision()?;
9631 let tz = if self.parse_keyword(Keyword::WITH) {
9632 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9633 TimezoneInfo::WithTimeZone
9634 } else if self.parse_keyword(Keyword::WITHOUT) {
9635 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9636 TimezoneInfo::WithoutTimeZone
9637 } else {
9638 TimezoneInfo::None
9639 };
9640 Ok(DataType::Timestamp(precision, tz))
9641 }
9642 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
9643 self.parse_optional_precision()?,
9644 TimezoneInfo::Tz,
9645 )),
9646 Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz),
9647 Keyword::TIME => {
9648 let precision = self.parse_optional_precision()?;
9649 let tz = if self.parse_keyword(Keyword::WITH) {
9650 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9651 TimezoneInfo::WithTimeZone
9652 } else if self.parse_keyword(Keyword::WITHOUT) {
9653 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9654 TimezoneInfo::WithoutTimeZone
9655 } else {
9656 TimezoneInfo::None
9657 };
9658 Ok(DataType::Time(precision, tz))
9659 }
9660 Keyword::TIMETZ => Ok(DataType::Time(
9661 self.parse_optional_precision()?,
9662 TimezoneInfo::Tz,
9663 )),
9664 Keyword::INTERVAL => Ok(DataType::Interval),
9668 Keyword::JSON => Ok(DataType::JSON),
9669 Keyword::JSONB => Ok(DataType::JSONB),
9670 Keyword::REGCLASS => Ok(DataType::Regclass),
9671 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
9672 Keyword::FIXEDSTRING => {
9673 self.expect_token(&Token::LParen)?;
9674 let character_length = self.parse_literal_uint()?;
9675 self.expect_token(&Token::RParen)?;
9676 Ok(DataType::FixedString(character_length))
9677 }
9678 Keyword::TEXT => Ok(DataType::Text),
9679 Keyword::TINYTEXT => Ok(DataType::TinyText),
9680 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
9681 Keyword::LONGTEXT => Ok(DataType::LongText),
9682 Keyword::BYTEA => Ok(DataType::Bytea),
9683 Keyword::NUMERIC => Ok(DataType::Numeric(
9684 self.parse_exact_number_optional_precision_scale()?,
9685 )),
9686 Keyword::DECIMAL => Ok(DataType::Decimal(
9687 self.parse_exact_number_optional_precision_scale()?,
9688 )),
9689 Keyword::DEC => Ok(DataType::Dec(
9690 self.parse_exact_number_optional_precision_scale()?,
9691 )),
9692 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
9693 self.parse_exact_number_optional_precision_scale()?,
9694 )),
9695 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
9696 self.parse_exact_number_optional_precision_scale()?,
9697 )),
9698 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
9699 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
9700 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
9701 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
9702 Keyword::ARRAY => {
9703 if dialect_of!(self is SnowflakeDialect) {
9704 Ok(DataType::Array(ArrayElemTypeDef::None))
9705 } else if dialect_of!(self is ClickHouseDialect) {
9706 Ok(self.parse_sub_type(|internal_type| {
9707 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
9708 })?)
9709 } else {
9710 self.expect_token(&Token::Lt)?;
9711 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
9712 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
9713 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
9714 inside_type,
9715 ))))
9716 }
9717 }
9718 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
9719 self.prev_token();
9720 let field_defs = self.parse_duckdb_struct_type_def()?;
9721 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
9722 }
9723 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
9724 self.prev_token();
9725 let (field_defs, _trailing_bracket) =
9726 self.parse_struct_type_def(Self::parse_struct_field_def)?;
9727 trailing_bracket = _trailing_bracket;
9728 Ok(DataType::Struct(
9729 field_defs,
9730 StructBracketKind::AngleBrackets,
9731 ))
9732 }
9733 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
9734 self.prev_token();
9735 let fields = self.parse_union_type_def()?;
9736 Ok(DataType::Union(fields))
9737 }
9738 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9739 Ok(self.parse_sub_type(DataType::Nullable)?)
9740 }
9741 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9742 Ok(self.parse_sub_type(DataType::LowCardinality)?)
9743 }
9744 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9745 self.prev_token();
9746 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
9747 Ok(DataType::Map(
9748 Box::new(key_data_type),
9749 Box::new(value_data_type),
9750 ))
9751 }
9752 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9753 self.expect_token(&Token::LParen)?;
9754 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
9755 self.expect_token(&Token::RParen)?;
9756 Ok(DataType::Nested(field_defs))
9757 }
9758 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9759 self.prev_token();
9760 let field_defs = self.parse_click_house_tuple_def()?;
9761 Ok(DataType::Tuple(field_defs))
9762 }
9763 Keyword::TRIGGER => Ok(DataType::Trigger),
9764 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
9765 let _ = self.parse_keyword(Keyword::TYPE);
9766 Ok(DataType::AnyType)
9767 }
9768 Keyword::TABLE => {
9769 let columns = self.parse_returns_table_columns()?;
9770 Ok(DataType::Table(columns))
9771 }
9772 Keyword::SIGNED => {
9773 if self.parse_keyword(Keyword::INTEGER) {
9774 Ok(DataType::SignedInteger)
9775 } else {
9776 Ok(DataType::Signed)
9777 }
9778 }
9779 Keyword::UNSIGNED => {
9780 if self.parse_keyword(Keyword::INTEGER) {
9781 Ok(DataType::UnsignedInteger)
9782 } else {
9783 Ok(DataType::Unsigned)
9784 }
9785 }
9786 _ => {
9787 self.prev_token();
9788 let type_name = self.parse_object_name(false)?;
9789 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
9790 Ok(DataType::Custom(type_name, modifiers))
9791 } else {
9792 Ok(DataType::Custom(type_name, vec![]))
9793 }
9794 }
9795 },
9796 _ => self.expected_at("a data type name", next_token_index),
9797 }?;
9798
9799 if self.dialect.supports_array_typedef_with_brackets() {
9800 while self.consume_token(&Token::LBracket) {
9801 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
9803 self.expect_token(&Token::RBracket)?;
9804 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
9805 }
9806 }
9807 Ok((data, trailing_bracket))
9808 }
9809
9810 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
9811 let name = self.parse_identifier()?;
9812 let data_type = self.parse_data_type()?;
9813 Ok(ColumnDef {
9814 name,
9815 data_type,
9816 options: Vec::new(), })
9818 }
9819
9820 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
9821 self.expect_token(&Token::LParen)?;
9822 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
9823 self.expect_token(&Token::RParen)?;
9824 Ok(columns)
9825 }
9826
9827 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
9828 self.expect_token(&Token::LParen)?;
9829 let mut values = Vec::new();
9830 loop {
9831 let next_token = self.next_token();
9832 match next_token.token {
9833 Token::SingleQuotedString(value) => values.push(value),
9834 _ => self.expected("a string", next_token)?,
9835 }
9836 let next_token = self.next_token();
9837 match next_token.token {
9838 Token::Comma => (),
9839 Token::RParen => break,
9840 _ => self.expected(", or }", next_token)?,
9841 }
9842 }
9843 Ok(values)
9844 }
9845
9846 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
9848 let ident = self.parse_identifier()?;
9849 self.expect_keyword_is(Keyword::AS)?;
9850 let alias = self.parse_identifier()?;
9851 Ok(IdentWithAlias { ident, alias })
9852 }
9853
9854 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
9856 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9857 parser.dialect.is_select_item_alias(explicit, kw, parser)
9858 }
9859 self.parse_optional_alias_inner(None, validator)
9860 }
9861
9862 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
9866 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9867 parser.dialect.is_table_factor_alias(explicit, kw, parser)
9868 }
9869 match self.parse_optional_alias_inner(None, validator)? {
9870 Some(name) => {
9871 let columns = self.parse_table_alias_column_defs()?;
9872 Ok(Some(TableAlias { name, columns }))
9873 }
9874 None => Ok(None),
9875 }
9876 }
9877
9878 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
9879 let mut hints = vec![];
9880 while let Some(hint_type) =
9881 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
9882 {
9883 let hint_type = match hint_type {
9884 Keyword::USE => TableIndexHintType::Use,
9885 Keyword::IGNORE => TableIndexHintType::Ignore,
9886 Keyword::FORCE => TableIndexHintType::Force,
9887 _ => {
9888 return self.expected(
9889 "expected to match USE/IGNORE/FORCE keyword",
9890 self.peek_token(),
9891 )
9892 }
9893 };
9894 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
9895 Some(Keyword::INDEX) => TableIndexType::Index,
9896 Some(Keyword::KEY) => TableIndexType::Key,
9897 _ => {
9898 return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
9899 }
9900 };
9901 let for_clause = if self.parse_keyword(Keyword::FOR) {
9902 let clause = if self.parse_keyword(Keyword::JOIN) {
9903 TableIndexHintForClause::Join
9904 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
9905 TableIndexHintForClause::OrderBy
9906 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
9907 TableIndexHintForClause::GroupBy
9908 } else {
9909 return self.expected(
9910 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
9911 self.peek_token(),
9912 );
9913 };
9914 Some(clause)
9915 } else {
9916 None
9917 };
9918
9919 self.expect_token(&Token::LParen)?;
9920 let index_names = if self.peek_token().token != Token::RParen {
9921 self.parse_comma_separated(Parser::parse_identifier)?
9922 } else {
9923 vec![]
9924 };
9925 self.expect_token(&Token::RParen)?;
9926 hints.push(TableIndexHints {
9927 hint_type,
9928 index_type,
9929 for_clause,
9930 index_names,
9931 });
9932 }
9933 Ok(hints)
9934 }
9935
9936 pub fn parse_optional_alias(
9940 &mut self,
9941 reserved_kwds: &[Keyword],
9942 ) -> Result<Option<Ident>, ParserError> {
9943 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
9944 false
9945 }
9946 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
9947 }
9948
9949 fn parse_optional_alias_inner<F>(
9956 &mut self,
9957 reserved_kwds: Option<&[Keyword]>,
9958 validator: F,
9959 ) -> Result<Option<Ident>, ParserError>
9960 where
9961 F: Fn(bool, &Keyword, &mut Parser) -> bool,
9962 {
9963 let after_as = self.parse_keyword(Keyword::AS);
9964
9965 let next_token = self.next_token();
9966 match next_token.token {
9967 Token::Word(w)
9970 if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
9971 {
9972 Ok(Some(w.into_ident(next_token.span)))
9973 }
9974 Token::Word(w) if validator(after_as, &w.keyword, self) => {
9978 Ok(Some(w.into_ident(next_token.span)))
9979 }
9980 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
9982 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
9983 _ => {
9984 if after_as {
9985 return self.expected("an identifier after AS", next_token);
9986 }
9987 self.prev_token();
9988 Ok(None) }
9990 }
9991 }
9992
9993 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
9994 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
9995 let expressions = if self.parse_keyword(Keyword::ALL) {
9996 None
9997 } else {
9998 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
9999 };
10000
10001 let mut modifiers = vec![];
10002 if self.dialect.supports_group_by_with_modifier() {
10003 loop {
10004 if !self.parse_keyword(Keyword::WITH) {
10005 break;
10006 }
10007 let keyword = self.expect_one_of_keywords(&[
10008 Keyword::ROLLUP,
10009 Keyword::CUBE,
10010 Keyword::TOTALS,
10011 ])?;
10012 modifiers.push(match keyword {
10013 Keyword::ROLLUP => GroupByWithModifier::Rollup,
10014 Keyword::CUBE => GroupByWithModifier::Cube,
10015 Keyword::TOTALS => GroupByWithModifier::Totals,
10016 _ => {
10017 return parser_err!(
10018 "BUG: expected to match GroupBy modifier keyword",
10019 self.peek_token().span.start
10020 )
10021 }
10022 });
10023 }
10024 }
10025 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
10026 self.expect_token(&Token::LParen)?;
10027 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
10028 self.expect_token(&Token::RParen)?;
10029 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
10030 result,
10031 )));
10032 };
10033 let group_by = match expressions {
10034 None => GroupByExpr::All(modifiers),
10035 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
10036 };
10037 Ok(Some(group_by))
10038 } else {
10039 Ok(None)
10040 }
10041 }
10042
10043 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
10044 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10045 let order_by =
10046 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
10047 let order_by_options = self.parse_order_by_options()?;
10048 OrderBy {
10049 kind: OrderByKind::All(order_by_options),
10050 interpolate: None,
10051 }
10052 } else {
10053 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
10054 let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10055 self.parse_interpolations()?
10056 } else {
10057 None
10058 };
10059 OrderBy {
10060 kind: OrderByKind::Expressions(exprs),
10061 interpolate,
10062 }
10063 };
10064 Ok(Some(order_by))
10065 } else {
10066 Ok(None)
10067 }
10068 }
10069
10070 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
10071 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
10072 Some(self.parse_offset()?)
10073 } else {
10074 None
10075 };
10076
10077 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
10078 let expr = self.parse_limit()?;
10079
10080 if self.dialect.supports_limit_comma()
10081 && offset.is_none()
10082 && expr.is_some() && self.consume_token(&Token::Comma)
10084 {
10085 let offset = expr.ok_or_else(|| {
10086 ParserError::ParserError(
10087 "Missing offset for LIMIT <offset>, <limit>".to_string(),
10088 )
10089 })?;
10090 return Ok(Some(LimitClause::OffsetCommaLimit {
10091 offset,
10092 limit: self.parse_expr()?,
10093 }));
10094 }
10095
10096 let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10097 && self.parse_keyword(Keyword::BY)
10098 {
10099 Some(self.parse_comma_separated(Parser::parse_expr)?)
10100 } else {
10101 None
10102 };
10103
10104 (Some(expr), limit_by)
10105 } else {
10106 (None, None)
10107 };
10108
10109 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
10110 offset = Some(self.parse_offset()?);
10111 }
10112
10113 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
10114 Ok(Some(LimitClause::LimitOffset {
10115 limit: limit.unwrap_or_default(),
10116 offset,
10117 limit_by: limit_by.unwrap_or_default(),
10118 }))
10119 } else {
10120 Ok(None)
10121 }
10122 }
10123
10124 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
10127 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
10128 let fn_name = self.parse_object_name(false)?;
10129 self.parse_function_call(fn_name)
10130 .map(TableObject::TableFunction)
10131 } else {
10132 self.parse_object_name(false).map(TableObject::TableName)
10133 }
10134 }
10135
10136 fn parse_object_name_with_wildcards(
10139 &mut self,
10140 in_table_clause: bool,
10141 allow_wildcards: bool,
10142 ) -> Result<ObjectName, ParserError> {
10143 let mut idents = vec![];
10144
10145 if dialect_of!(self is BigQueryDialect) && in_table_clause {
10146 loop {
10147 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
10148 idents.push(ident);
10149 if !self.consume_token(&Token::Period) && !end_with_period {
10150 break;
10151 }
10152 }
10153 } else {
10154 loop {
10155 let ident = if allow_wildcards && self.peek_token().token == Token::Mul {
10156 let span = self.next_token().span;
10157 Ident {
10158 value: Token::Mul.to_string(),
10159 quote_style: None,
10160 span,
10161 }
10162 } else {
10163 if self.dialect.supports_object_name_double_dot_notation()
10164 && idents.len() == 1
10165 && self.consume_token(&Token::Period)
10166 {
10167 idents.push(Ident::new(""));
10169 }
10170 self.parse_identifier()?
10171 };
10172 idents.push(ident);
10173 if !self.consume_token(&Token::Period) {
10174 break;
10175 }
10176 }
10177 }
10178 Ok(ObjectName::from(idents))
10179 }
10180
10181 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
10188 let ObjectName(mut idents) =
10189 self.parse_object_name_with_wildcards(in_table_clause, false)?;
10190
10191 if dialect_of!(self is BigQueryDialect)
10194 && idents.iter().any(|part| {
10195 part.as_ident()
10196 .is_some_and(|ident| ident.value.contains('.'))
10197 })
10198 {
10199 idents = idents
10200 .into_iter()
10201 .flat_map(|part| match part.as_ident() {
10202 Some(ident) => ident
10203 .value
10204 .split('.')
10205 .map(|value| {
10206 ObjectNamePart::Identifier(Ident {
10207 value: value.into(),
10208 quote_style: ident.quote_style,
10209 span: ident.span,
10210 })
10211 })
10212 .collect::<Vec<_>>(),
10213 None => vec![part],
10214 })
10215 .collect()
10216 }
10217
10218 Ok(ObjectName(idents))
10219 }
10220
10221 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
10223 let mut idents = vec![];
10224 loop {
10225 match &self.peek_token_ref().token {
10226 Token::Word(w) => {
10227 idents.push(w.clone().into_ident(self.peek_token_ref().span));
10228 }
10229 Token::EOF | Token::Eq => break,
10230 _ => {}
10231 }
10232 self.advance_token();
10233 }
10234 Ok(idents)
10235 }
10236
10237 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
10277 let mut idents = vec![];
10278
10279 let next_token = self.next_token();
10281 match next_token.token {
10282 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10283 Token::EOF => {
10284 return Err(ParserError::ParserError(
10285 "Empty input when parsing identifier".to_string(),
10286 ))?
10287 }
10288 token => {
10289 return Err(ParserError::ParserError(format!(
10290 "Unexpected token in identifier: {token}"
10291 )))?
10292 }
10293 };
10294
10295 loop {
10297 match self.next_token().token {
10298 Token::Period => {
10300 let next_token = self.next_token();
10301 match next_token.token {
10302 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10303 Token::EOF => {
10304 return Err(ParserError::ParserError(
10305 "Trailing period in identifier".to_string(),
10306 ))?
10307 }
10308 token => {
10309 return Err(ParserError::ParserError(format!(
10310 "Unexpected token following period in identifier: {token}"
10311 )))?
10312 }
10313 }
10314 }
10315 Token::EOF => break,
10316 token => {
10317 return Err(ParserError::ParserError(format!(
10318 "Unexpected token in identifier: {token}"
10319 )))?
10320 }
10321 }
10322 }
10323
10324 Ok(idents)
10325 }
10326
10327 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
10329 let next_token = self.next_token();
10330 match next_token.token {
10331 Token::Word(w) => Ok(w.into_ident(next_token.span)),
10332 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
10333 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
10334 _ => self.expected("identifier", next_token),
10335 }
10336 }
10337
10338 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
10349 match self.peek_token().token {
10350 Token::Word(w) => {
10351 let quote_style_is_none = w.quote_style.is_none();
10352 let mut requires_whitespace = false;
10353 let mut ident = w.into_ident(self.next_token().span);
10354 if quote_style_is_none {
10355 while matches!(self.peek_token_no_skip().token, Token::Minus) {
10356 self.next_token();
10357 ident.value.push('-');
10358
10359 let token = self
10360 .next_token_no_skip()
10361 .cloned()
10362 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
10363 requires_whitespace = match token.token {
10364 Token::Word(next_word) if next_word.quote_style.is_none() => {
10365 ident.value.push_str(&next_word.value);
10366 false
10367 }
10368 Token::Number(s, false) => {
10369 if s.ends_with('.') {
10376 let Some(s) = s.split('.').next().filter(|s| {
10377 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
10378 }) else {
10379 return self.expected(
10380 "continuation of hyphenated identifier",
10381 TokenWithSpan::new(Token::Number(s, false), token.span),
10382 );
10383 };
10384 ident.value.push_str(s);
10385 return Ok((ident, true));
10386 } else {
10387 ident.value.push_str(&s);
10388 }
10389 !matches!(self.peek_token().token, Token::Period)
10392 }
10393 _ => {
10394 return self
10395 .expected("continuation of hyphenated identifier", token);
10396 }
10397 }
10398 }
10399
10400 if requires_whitespace {
10403 let token = self.next_token();
10404 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
10405 return self
10406 .expected("whitespace following hyphenated identifier", token);
10407 }
10408 }
10409 }
10410 Ok((ident, false))
10411 }
10412 _ => Ok((self.parse_identifier()?, false)),
10413 }
10414 }
10415
10416 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
10418 if self.consume_token(&Token::LParen) {
10419 if self.peek_token().token == Token::RParen {
10420 self.next_token();
10421 Ok(vec![])
10422 } else {
10423 let cols = self.parse_comma_separated_with_trailing_commas(
10424 Parser::parse_view_column,
10425 self.dialect.supports_column_definition_trailing_commas(),
10426 Self::is_reserved_for_column_alias,
10427 )?;
10428 self.expect_token(&Token::RParen)?;
10429 Ok(cols)
10430 }
10431 } else {
10432 Ok(vec![])
10433 }
10434 }
10435
10436 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
10438 let name = self.parse_identifier()?;
10439 let options = if (dialect_of!(self is BigQueryDialect | GenericDialect)
10440 && self.parse_keyword(Keyword::OPTIONS))
10441 || (dialect_of!(self is SnowflakeDialect | GenericDialect)
10442 && self.parse_keyword(Keyword::COMMENT))
10443 {
10444 self.prev_token();
10445 self.parse_optional_column_option()?
10446 .map(|option| vec![option])
10447 } else {
10448 None
10449 };
10450 let data_type = if dialect_of!(self is ClickHouseDialect) {
10451 Some(self.parse_data_type()?)
10452 } else {
10453 None
10454 };
10455 Ok(ViewColumnDef {
10456 name,
10457 data_type,
10458 options,
10459 })
10460 }
10461
10462 pub fn parse_parenthesized_column_list(
10465 &mut self,
10466 optional: IsOptional,
10467 allow_empty: bool,
10468 ) -> Result<Vec<Ident>, ParserError> {
10469 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
10470 }
10471
10472 pub fn parse_parenthesized_qualified_column_list(
10475 &mut self,
10476 optional: IsOptional,
10477 allow_empty: bool,
10478 ) -> Result<Vec<ObjectName>, ParserError> {
10479 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
10480 p.parse_object_name(true)
10481 })
10482 }
10483
10484 fn parse_parenthesized_column_list_inner<F, T>(
10487 &mut self,
10488 optional: IsOptional,
10489 allow_empty: bool,
10490 mut f: F,
10491 ) -> Result<Vec<T>, ParserError>
10492 where
10493 F: FnMut(&mut Parser) -> Result<T, ParserError>,
10494 {
10495 if self.consume_token(&Token::LParen) {
10496 if allow_empty && self.peek_token().token == Token::RParen {
10497 self.next_token();
10498 Ok(vec![])
10499 } else {
10500 let cols = self.parse_comma_separated(|p| f(p))?;
10501 self.expect_token(&Token::RParen)?;
10502 Ok(cols)
10503 }
10504 } else if optional == Optional {
10505 Ok(vec![])
10506 } else {
10507 self.expected("a list of columns in parentheses", self.peek_token())
10508 }
10509 }
10510
10511 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
10513 if self.consume_token(&Token::LParen) {
10514 let cols = self.parse_comma_separated(|p| {
10515 let name = p.parse_identifier()?;
10516 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
10517 Ok(TableAliasColumnDef { name, data_type })
10518 })?;
10519 self.expect_token(&Token::RParen)?;
10520 Ok(cols)
10521 } else {
10522 Ok(vec![])
10523 }
10524 }
10525
10526 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
10527 self.expect_token(&Token::LParen)?;
10528 let n = self.parse_literal_uint()?;
10529 self.expect_token(&Token::RParen)?;
10530 Ok(n)
10531 }
10532
10533 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
10534 if self.consume_token(&Token::LParen) {
10535 let n = self.parse_literal_uint()?;
10536 self.expect_token(&Token::RParen)?;
10537 Ok(Some(n))
10538 } else {
10539 Ok(None)
10540 }
10541 }
10542
10543 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
10551 self.expect_keyword_is(Keyword::DATETIME64)?;
10552 self.expect_token(&Token::LParen)?;
10553 let precision = self.parse_literal_uint()?;
10554 let time_zone = if self.consume_token(&Token::Comma) {
10555 Some(self.parse_literal_string()?)
10556 } else {
10557 None
10558 };
10559 self.expect_token(&Token::RParen)?;
10560 Ok((precision, time_zone))
10561 }
10562
10563 pub fn parse_optional_character_length(
10564 &mut self,
10565 ) -> Result<Option<CharacterLength>, ParserError> {
10566 if self.consume_token(&Token::LParen) {
10567 let character_length = self.parse_character_length()?;
10568 self.expect_token(&Token::RParen)?;
10569 Ok(Some(character_length))
10570 } else {
10571 Ok(None)
10572 }
10573 }
10574
10575 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
10576 if self.consume_token(&Token::LParen) {
10577 let binary_length = self.parse_binary_length()?;
10578 self.expect_token(&Token::RParen)?;
10579 Ok(Some(binary_length))
10580 } else {
10581 Ok(None)
10582 }
10583 }
10584
10585 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
10586 if self.parse_keyword(Keyword::MAX) {
10587 return Ok(CharacterLength::Max);
10588 }
10589 let length = self.parse_literal_uint()?;
10590 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
10591 Some(CharLengthUnits::Characters)
10592 } else if self.parse_keyword(Keyword::OCTETS) {
10593 Some(CharLengthUnits::Octets)
10594 } else {
10595 None
10596 };
10597 Ok(CharacterLength::IntegerLength { length, unit })
10598 }
10599
10600 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
10601 if self.parse_keyword(Keyword::MAX) {
10602 return Ok(BinaryLength::Max);
10603 }
10604 let length = self.parse_literal_uint()?;
10605 Ok(BinaryLength::IntegerLength { length })
10606 }
10607
10608 pub fn parse_optional_precision_scale(
10609 &mut self,
10610 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
10611 if self.consume_token(&Token::LParen) {
10612 let n = self.parse_literal_uint()?;
10613 let scale = if self.consume_token(&Token::Comma) {
10614 Some(self.parse_literal_uint()?)
10615 } else {
10616 None
10617 };
10618 self.expect_token(&Token::RParen)?;
10619 Ok((Some(n), scale))
10620 } else {
10621 Ok((None, None))
10622 }
10623 }
10624
10625 pub fn parse_exact_number_optional_precision_scale(
10626 &mut self,
10627 ) -> Result<ExactNumberInfo, ParserError> {
10628 if self.consume_token(&Token::LParen) {
10629 let precision = self.parse_literal_uint()?;
10630 let scale = if self.consume_token(&Token::Comma) {
10631 Some(self.parse_literal_uint()?)
10632 } else {
10633 None
10634 };
10635
10636 self.expect_token(&Token::RParen)?;
10637
10638 match scale {
10639 None => Ok(ExactNumberInfo::Precision(precision)),
10640 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
10641 }
10642 } else {
10643 Ok(ExactNumberInfo::None)
10644 }
10645 }
10646
10647 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
10648 if self.consume_token(&Token::LParen) {
10649 let mut modifiers = Vec::new();
10650 loop {
10651 let next_token = self.next_token();
10652 match next_token.token {
10653 Token::Word(w) => modifiers.push(w.to_string()),
10654 Token::Number(n, _) => modifiers.push(n),
10655 Token::SingleQuotedString(s) => modifiers.push(s),
10656
10657 Token::Comma => {
10658 continue;
10659 }
10660 Token::RParen => {
10661 break;
10662 }
10663 _ => self.expected("type modifiers", next_token)?,
10664 }
10665 }
10666
10667 Ok(Some(modifiers))
10668 } else {
10669 Ok(None)
10670 }
10671 }
10672
10673 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
10675 where
10676 F: FnOnce(Box<DataType>) -> DataType,
10677 {
10678 self.expect_token(&Token::LParen)?;
10679 let inside_type = self.parse_data_type()?;
10680 self.expect_token(&Token::RParen)?;
10681 Ok(parent_type(inside_type.into()))
10682 }
10683
10684 fn parse_delete_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
10688 Ok(Box::new(SetExpr::Delete(self.parse_delete()?)))
10689 }
10690
10691 pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
10692 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
10693 if dialect_of!(self is BigQueryDialect | GenericDialect) {
10696 (vec![], false)
10697 } else {
10698 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
10699 self.expect_keyword_is(Keyword::FROM)?;
10700 (tables, true)
10701 }
10702 } else {
10703 (vec![], true)
10704 };
10705
10706 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
10707 let using = if self.parse_keyword(Keyword::USING) {
10708 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
10709 } else {
10710 None
10711 };
10712 let selection = if self.parse_keyword(Keyword::WHERE) {
10713 Some(self.parse_expr()?)
10714 } else {
10715 None
10716 };
10717 let returning = if self.parse_keyword(Keyword::RETURNING) {
10718 Some(self.parse_comma_separated(Parser::parse_select_item)?)
10719 } else {
10720 None
10721 };
10722 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10723 self.parse_comma_separated(Parser::parse_order_by_expr)?
10724 } else {
10725 vec![]
10726 };
10727 let limit = if self.parse_keyword(Keyword::LIMIT) {
10728 self.parse_limit()?
10729 } else {
10730 None
10731 };
10732
10733 Ok(Statement::Delete(Delete {
10734 tables,
10735 from: if with_from_keyword {
10736 FromTable::WithFromKeyword(from)
10737 } else {
10738 FromTable::WithoutKeyword(from)
10739 },
10740 using,
10741 selection,
10742 returning,
10743 order_by,
10744 limit,
10745 }))
10746 }
10747
10748 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
10750 let modifier_keyword =
10751 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
10752
10753 let id = self.parse_literal_uint()?;
10754
10755 let modifier = match modifier_keyword {
10756 Some(Keyword::CONNECTION) => Some(KillType::Connection),
10757 Some(Keyword::QUERY) => Some(KillType::Query),
10758 Some(Keyword::MUTATION) => {
10759 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10760 Some(KillType::Mutation)
10761 } else {
10762 self.expected(
10763 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
10764 self.peek_token(),
10765 )?
10766 }
10767 }
10768 _ => None,
10769 };
10770
10771 Ok(Statement::Kill { modifier, id })
10772 }
10773
10774 pub fn parse_explain(
10775 &mut self,
10776 describe_alias: DescribeAlias,
10777 ) -> Result<Statement, ParserError> {
10778 let mut analyze = false;
10779 let mut verbose = false;
10780 let mut query_plan = false;
10781 let mut estimate = false;
10782 let mut format = None;
10783 let mut options = None;
10784
10785 if describe_alias == DescribeAlias::Explain
10788 && self.dialect.supports_explain_with_utility_options()
10789 && self.peek_token().token == Token::LParen
10790 {
10791 options = Some(self.parse_utility_options()?)
10792 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
10793 query_plan = true;
10794 } else if self.parse_keyword(Keyword::ESTIMATE) {
10795 estimate = true;
10796 } else {
10797 analyze = self.parse_keyword(Keyword::ANALYZE);
10798 verbose = self.parse_keyword(Keyword::VERBOSE);
10799 if self.parse_keyword(Keyword::FORMAT) {
10800 format = Some(self.parse_analyze_format()?);
10801 }
10802 }
10803
10804 match self.maybe_parse(|parser| parser.parse_statement())? {
10805 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
10806 ParserError::ParserError("Explain must be root of the plan".to_string()),
10807 ),
10808 Some(statement) => Ok(Statement::Explain {
10809 describe_alias,
10810 analyze,
10811 verbose,
10812 query_plan,
10813 estimate,
10814 statement: Box::new(statement),
10815 format,
10816 options,
10817 }),
10818 _ => {
10819 let hive_format =
10820 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
10821 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
10822 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
10823 _ => None,
10824 };
10825
10826 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
10827 self.parse_keyword(Keyword::TABLE)
10829 } else {
10830 false
10831 };
10832
10833 let table_name = self.parse_object_name(false)?;
10834 Ok(Statement::ExplainTable {
10835 describe_alias,
10836 hive_format,
10837 has_table_keyword,
10838 table_name,
10839 })
10840 }
10841 }
10842 }
10843
10844 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
10849 let _guard = self.recursion_counter.try_decrease()?;
10850 let with = if self.parse_keyword(Keyword::WITH) {
10851 let with_token = self.get_current_token();
10852 Some(With {
10853 with_token: with_token.clone().into(),
10854 recursive: self.parse_keyword(Keyword::RECURSIVE),
10855 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
10856 })
10857 } else {
10858 None
10859 };
10860 if self.parse_keyword(Keyword::INSERT) {
10861 Ok(Query {
10862 with,
10863 body: self.parse_insert_setexpr_boxed()?,
10864 order_by: None,
10865 limit_clause: None,
10866 fetch: None,
10867 locks: vec![],
10868 for_clause: None,
10869 settings: None,
10870 format_clause: None,
10871 pipe_operators: vec![],
10872 }
10873 .into())
10874 } else if self.parse_keyword(Keyword::UPDATE) {
10875 Ok(Query {
10876 with,
10877 body: self.parse_update_setexpr_boxed()?,
10878 order_by: None,
10879 limit_clause: None,
10880 fetch: None,
10881 locks: vec![],
10882 for_clause: None,
10883 settings: None,
10884 format_clause: None,
10885 pipe_operators: vec![],
10886 }
10887 .into())
10888 } else if self.parse_keyword(Keyword::DELETE) {
10889 Ok(Query {
10890 with,
10891 body: self.parse_delete_setexpr_boxed()?,
10892 limit_clause: None,
10893 order_by: None,
10894 fetch: None,
10895 locks: vec![],
10896 for_clause: None,
10897 settings: None,
10898 format_clause: None,
10899 pipe_operators: vec![],
10900 }
10901 .into())
10902 } else {
10903 let body = self.parse_query_body(self.dialect.prec_unknown())?;
10904
10905 let order_by = self.parse_optional_order_by()?;
10906
10907 let limit_clause = self.parse_optional_limit_clause()?;
10908
10909 let settings = self.parse_settings()?;
10910
10911 let fetch = if self.parse_keyword(Keyword::FETCH) {
10912 Some(self.parse_fetch()?)
10913 } else {
10914 None
10915 };
10916
10917 let mut for_clause = None;
10918 let mut locks = Vec::new();
10919 while self.parse_keyword(Keyword::FOR) {
10920 if let Some(parsed_for_clause) = self.parse_for_clause()? {
10921 for_clause = Some(parsed_for_clause);
10922 break;
10923 } else {
10924 locks.push(self.parse_lock()?);
10925 }
10926 }
10927 let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10928 && self.parse_keyword(Keyword::FORMAT)
10929 {
10930 if self.parse_keyword(Keyword::NULL) {
10931 Some(FormatClause::Null)
10932 } else {
10933 let ident = self.parse_identifier()?;
10934 Some(FormatClause::Identifier(ident))
10935 }
10936 } else {
10937 None
10938 };
10939
10940 let pipe_operators = if self.dialect.supports_pipe_operator() {
10941 self.parse_pipe_operators()?
10942 } else {
10943 Vec::new()
10944 };
10945
10946 Ok(Query {
10947 with,
10948 body,
10949 order_by,
10950 limit_clause,
10951 fetch,
10952 locks,
10953 for_clause,
10954 settings,
10955 format_clause,
10956 pipe_operators,
10957 }
10958 .into())
10959 }
10960 }
10961
10962 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
10963 let mut pipe_operators = Vec::new();
10964
10965 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
10966 let kw = self.expect_one_of_keywords(&[
10967 Keyword::SELECT,
10968 Keyword::EXTEND,
10969 Keyword::SET,
10970 Keyword::DROP,
10971 Keyword::AS,
10972 Keyword::WHERE,
10973 Keyword::LIMIT,
10974 Keyword::AGGREGATE,
10975 Keyword::ORDER,
10976 ])?;
10977 match kw {
10978 Keyword::SELECT => {
10979 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
10980 pipe_operators.push(PipeOperator::Select { exprs })
10981 }
10982 Keyword::EXTEND => {
10983 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
10984 pipe_operators.push(PipeOperator::Extend { exprs })
10985 }
10986 Keyword::SET => {
10987 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
10988 pipe_operators.push(PipeOperator::Set { assignments })
10989 }
10990 Keyword::DROP => {
10991 let columns = self.parse_identifiers()?;
10992 pipe_operators.push(PipeOperator::Drop { columns })
10993 }
10994 Keyword::AS => {
10995 let alias = self.parse_identifier()?;
10996 pipe_operators.push(PipeOperator::As { alias })
10997 }
10998 Keyword::WHERE => {
10999 let expr = self.parse_expr()?;
11000 pipe_operators.push(PipeOperator::Where { expr })
11001 }
11002 Keyword::LIMIT => {
11003 let expr = self.parse_expr()?;
11004 let offset = if self.parse_keyword(Keyword::OFFSET) {
11005 Some(self.parse_expr()?)
11006 } else {
11007 None
11008 };
11009 pipe_operators.push(PipeOperator::Limit { expr, offset })
11010 }
11011 Keyword::AGGREGATE => {
11012 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
11013 vec![]
11014 } else {
11015 self.parse_comma_separated(|parser| {
11016 parser.parse_expr_with_alias_and_order_by()
11017 })?
11018 };
11019
11020 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11021 self.parse_comma_separated(|parser| {
11022 parser.parse_expr_with_alias_and_order_by()
11023 })?
11024 } else {
11025 vec![]
11026 };
11027
11028 pipe_operators.push(PipeOperator::Aggregate {
11029 full_table_exprs,
11030 group_by_expr,
11031 })
11032 }
11033 Keyword::ORDER => {
11034 self.expect_one_of_keywords(&[Keyword::BY])?;
11035 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11036 pipe_operators.push(PipeOperator::OrderBy { exprs })
11037 }
11038 unhandled => {
11039 return Err(ParserError::ParserError(format!(
11040 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
11041 )))
11042 }
11043 }
11044 }
11045 Ok(pipe_operators)
11046 }
11047
11048 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
11049 let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11050 && self.parse_keyword(Keyword::SETTINGS)
11051 {
11052 let key_values = self.parse_comma_separated(|p| {
11053 let key = p.parse_identifier()?;
11054 p.expect_token(&Token::Eq)?;
11055 let value = p.parse_value()?.value;
11056 Ok(Setting { key, value })
11057 })?;
11058 Some(key_values)
11059 } else {
11060 None
11061 };
11062 Ok(settings)
11063 }
11064
11065 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
11067 if self.parse_keyword(Keyword::XML) {
11068 Ok(Some(self.parse_for_xml()?))
11069 } else if self.parse_keyword(Keyword::JSON) {
11070 Ok(Some(self.parse_for_json()?))
11071 } else if self.parse_keyword(Keyword::BROWSE) {
11072 Ok(Some(ForClause::Browse))
11073 } else {
11074 Ok(None)
11075 }
11076 }
11077
11078 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
11080 let for_xml = if self.parse_keyword(Keyword::RAW) {
11081 let mut element_name = None;
11082 if self.peek_token().token == Token::LParen {
11083 self.expect_token(&Token::LParen)?;
11084 element_name = Some(self.parse_literal_string()?);
11085 self.expect_token(&Token::RParen)?;
11086 }
11087 ForXml::Raw(element_name)
11088 } else if self.parse_keyword(Keyword::AUTO) {
11089 ForXml::Auto
11090 } else if self.parse_keyword(Keyword::EXPLICIT) {
11091 ForXml::Explicit
11092 } else if self.parse_keyword(Keyword::PATH) {
11093 let mut element_name = None;
11094 if self.peek_token().token == Token::LParen {
11095 self.expect_token(&Token::LParen)?;
11096 element_name = Some(self.parse_literal_string()?);
11097 self.expect_token(&Token::RParen)?;
11098 }
11099 ForXml::Path(element_name)
11100 } else {
11101 return Err(ParserError::ParserError(
11102 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
11103 ));
11104 };
11105 let mut elements = false;
11106 let mut binary_base64 = false;
11107 let mut root = None;
11108 let mut r#type = false;
11109 while self.peek_token().token == Token::Comma {
11110 self.next_token();
11111 if self.parse_keyword(Keyword::ELEMENTS) {
11112 elements = true;
11113 } else if self.parse_keyword(Keyword::BINARY) {
11114 self.expect_keyword_is(Keyword::BASE64)?;
11115 binary_base64 = true;
11116 } else if self.parse_keyword(Keyword::ROOT) {
11117 self.expect_token(&Token::LParen)?;
11118 root = Some(self.parse_literal_string()?);
11119 self.expect_token(&Token::RParen)?;
11120 } else if self.parse_keyword(Keyword::TYPE) {
11121 r#type = true;
11122 }
11123 }
11124 Ok(ForClause::Xml {
11125 for_xml,
11126 elements,
11127 binary_base64,
11128 root,
11129 r#type,
11130 })
11131 }
11132
11133 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
11135 let for_json = if self.parse_keyword(Keyword::AUTO) {
11136 ForJson::Auto
11137 } else if self.parse_keyword(Keyword::PATH) {
11138 ForJson::Path
11139 } else {
11140 return Err(ParserError::ParserError(
11141 "Expected FOR JSON [AUTO | PATH ]".to_string(),
11142 ));
11143 };
11144 let mut root = None;
11145 let mut include_null_values = false;
11146 let mut without_array_wrapper = false;
11147 while self.peek_token().token == Token::Comma {
11148 self.next_token();
11149 if self.parse_keyword(Keyword::ROOT) {
11150 self.expect_token(&Token::LParen)?;
11151 root = Some(self.parse_literal_string()?);
11152 self.expect_token(&Token::RParen)?;
11153 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
11154 include_null_values = true;
11155 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
11156 without_array_wrapper = true;
11157 }
11158 }
11159 Ok(ForClause::Json {
11160 for_json,
11161 root,
11162 include_null_values,
11163 without_array_wrapper,
11164 })
11165 }
11166
11167 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
11169 let name = self.parse_identifier()?;
11170
11171 let mut cte = if self.parse_keyword(Keyword::AS) {
11172 let mut is_materialized = None;
11173 if dialect_of!(self is PostgreSqlDialect) {
11174 if self.parse_keyword(Keyword::MATERIALIZED) {
11175 is_materialized = Some(CteAsMaterialized::Materialized);
11176 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11177 is_materialized = Some(CteAsMaterialized::NotMaterialized);
11178 }
11179 }
11180 self.expect_token(&Token::LParen)?;
11181
11182 let query = self.parse_query()?;
11183 let closing_paren_token = self.expect_token(&Token::RParen)?;
11184
11185 let alias = TableAlias {
11186 name,
11187 columns: vec![],
11188 };
11189 Cte {
11190 alias,
11191 query,
11192 from: None,
11193 materialized: is_materialized,
11194 closing_paren_token: closing_paren_token.into(),
11195 }
11196 } else {
11197 let columns = self.parse_table_alias_column_defs()?;
11198 self.expect_keyword_is(Keyword::AS)?;
11199 let mut is_materialized = None;
11200 if dialect_of!(self is PostgreSqlDialect) {
11201 if self.parse_keyword(Keyword::MATERIALIZED) {
11202 is_materialized = Some(CteAsMaterialized::Materialized);
11203 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11204 is_materialized = Some(CteAsMaterialized::NotMaterialized);
11205 }
11206 }
11207 self.expect_token(&Token::LParen)?;
11208
11209 let query = self.parse_query()?;
11210 let closing_paren_token = self.expect_token(&Token::RParen)?;
11211
11212 let alias = TableAlias { name, columns };
11213 Cte {
11214 alias,
11215 query,
11216 from: None,
11217 materialized: is_materialized,
11218 closing_paren_token: closing_paren_token.into(),
11219 }
11220 };
11221 if self.parse_keyword(Keyword::FROM) {
11222 cte.from = Some(self.parse_identifier()?);
11223 }
11224 Ok(cte)
11225 }
11226
11227 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
11236 let expr = if self.peek_keyword(Keyword::SELECT)
11239 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
11240 {
11241 SetExpr::Select(self.parse_select().map(Box::new)?)
11242 } else if self.consume_token(&Token::LParen) {
11243 let subquery = self.parse_query()?;
11245 self.expect_token(&Token::RParen)?;
11246 SetExpr::Query(subquery)
11247 } else if self.parse_keyword(Keyword::VALUES) {
11248 let is_mysql = dialect_of!(self is MySqlDialect);
11249 SetExpr::Values(self.parse_values(is_mysql)?)
11250 } else if self.parse_keyword(Keyword::TABLE) {
11251 SetExpr::Table(Box::new(self.parse_as_table()?))
11252 } else {
11253 return self.expected(
11254 "SELECT, VALUES, or a subquery in the query body",
11255 self.peek_token(),
11256 );
11257 };
11258
11259 self.parse_remaining_set_exprs(expr, precedence)
11260 }
11261
11262 fn parse_remaining_set_exprs(
11266 &mut self,
11267 mut expr: SetExpr,
11268 precedence: u8,
11269 ) -> Result<Box<SetExpr>, ParserError> {
11270 loop {
11271 let op = self.parse_set_operator(&self.peek_token().token);
11273 let next_precedence = match op {
11274 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
11276 10
11277 }
11278 Some(SetOperator::Intersect) => 20,
11280 None => break,
11282 };
11283 if precedence >= next_precedence {
11284 break;
11285 }
11286 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
11288 expr = SetExpr::SetOperation {
11289 left: Box::new(expr),
11290 op: op.unwrap(),
11291 set_quantifier,
11292 right: self.parse_query_body(next_precedence)?,
11293 };
11294 }
11295
11296 Ok(expr.into())
11297 }
11298
11299 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
11300 match token {
11301 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
11302 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
11303 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
11304 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
11305 _ => None,
11306 }
11307 }
11308
11309 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
11310 match op {
11311 Some(
11312 SetOperator::Except
11313 | SetOperator::Intersect
11314 | SetOperator::Union
11315 | SetOperator::Minus,
11316 ) => {
11317 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
11318 SetQuantifier::DistinctByName
11319 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11320 SetQuantifier::ByName
11321 } else if self.parse_keyword(Keyword::ALL) {
11322 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11323 SetQuantifier::AllByName
11324 } else {
11325 SetQuantifier::All
11326 }
11327 } else if self.parse_keyword(Keyword::DISTINCT) {
11328 SetQuantifier::Distinct
11329 } else {
11330 SetQuantifier::None
11331 }
11332 }
11333 _ => SetQuantifier::None,
11334 }
11335 }
11336
11337 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
11339 let mut from_first = None;
11340
11341 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
11342 let from_token = self.expect_keyword(Keyword::FROM)?;
11343 let from = self.parse_table_with_joins()?;
11344 if !self.peek_keyword(Keyword::SELECT) {
11345 return Ok(Select {
11346 select_token: AttachedToken(from_token),
11347 distinct: None,
11348 top: None,
11349 top_before_distinct: false,
11350 projection: vec![],
11351 into: None,
11352 from,
11353 lateral_views: vec![],
11354 prewhere: None,
11355 selection: None,
11356 group_by: GroupByExpr::Expressions(vec![], vec![]),
11357 cluster_by: vec![],
11358 distribute_by: vec![],
11359 sort_by: vec![],
11360 having: None,
11361 named_window: vec![],
11362 window_before_qualify: false,
11363 qualify: None,
11364 value_table_mode: None,
11365 connect_by: None,
11366 flavor: SelectFlavor::FromFirstNoSelect,
11367 });
11368 }
11369 from_first = Some(from);
11370 }
11371
11372 let select_token = self.expect_keyword(Keyword::SELECT)?;
11373 let value_table_mode =
11374 if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) {
11375 if self.parse_keyword(Keyword::VALUE) {
11376 Some(ValueTableMode::AsValue)
11377 } else if self.parse_keyword(Keyword::STRUCT) {
11378 Some(ValueTableMode::AsStruct)
11379 } else {
11380 self.expected("VALUE or STRUCT", self.peek_token())?
11381 }
11382 } else {
11383 None
11384 };
11385
11386 let mut top_before_distinct = false;
11387 let mut top = None;
11388 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11389 top = Some(self.parse_top()?);
11390 top_before_distinct = true;
11391 }
11392 let distinct = self.parse_all_or_distinct()?;
11393 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11394 top = Some(self.parse_top()?);
11395 }
11396
11397 let projection =
11398 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
11399 vec![]
11400 } else {
11401 self.parse_projection()?
11402 };
11403
11404 let into = if self.parse_keyword(Keyword::INTO) {
11405 Some(self.parse_select_into()?)
11406 } else {
11407 None
11408 };
11409
11410 let (from, from_first) = if let Some(from) = from_first.take() {
11416 (from, true)
11417 } else if self.parse_keyword(Keyword::FROM) {
11418 (self.parse_table_with_joins()?, false)
11419 } else {
11420 (vec![], false)
11421 };
11422
11423 let mut lateral_views = vec![];
11424 loop {
11425 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
11426 let outer = self.parse_keyword(Keyword::OUTER);
11427 let lateral_view = self.parse_expr()?;
11428 let lateral_view_name = self.parse_object_name(false)?;
11429 let lateral_col_alias = self
11430 .parse_comma_separated(|parser| {
11431 parser.parse_optional_alias(&[
11432 Keyword::WHERE,
11433 Keyword::GROUP,
11434 Keyword::CLUSTER,
11435 Keyword::HAVING,
11436 Keyword::LATERAL,
11437 ]) })?
11439 .into_iter()
11440 .flatten()
11441 .collect();
11442
11443 lateral_views.push(LateralView {
11444 lateral_view,
11445 lateral_view_name,
11446 lateral_col_alias,
11447 outer,
11448 });
11449 } else {
11450 break;
11451 }
11452 }
11453
11454 let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11455 && self.parse_keyword(Keyword::PREWHERE)
11456 {
11457 Some(self.parse_expr()?)
11458 } else {
11459 None
11460 };
11461
11462 let selection = if self.parse_keyword(Keyword::WHERE) {
11463 Some(self.parse_expr()?)
11464 } else {
11465 None
11466 };
11467
11468 let group_by = self
11469 .parse_optional_group_by()?
11470 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
11471
11472 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
11473 self.parse_comma_separated(Parser::parse_expr)?
11474 } else {
11475 vec![]
11476 };
11477
11478 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
11479 self.parse_comma_separated(Parser::parse_expr)?
11480 } else {
11481 vec![]
11482 };
11483
11484 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
11485 self.parse_comma_separated(Parser::parse_expr)?
11486 } else {
11487 vec![]
11488 };
11489
11490 let having = if self.parse_keyword(Keyword::HAVING) {
11491 Some(self.parse_expr()?)
11492 } else {
11493 None
11494 };
11495
11496 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
11498 {
11499 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
11500 if self.parse_keyword(Keyword::QUALIFY) {
11501 (named_windows, Some(self.parse_expr()?), true)
11502 } else {
11503 (named_windows, None, true)
11504 }
11505 } else if self.parse_keyword(Keyword::QUALIFY) {
11506 let qualify = Some(self.parse_expr()?);
11507 if self.parse_keyword(Keyword::WINDOW) {
11508 (
11509 self.parse_comma_separated(Parser::parse_named_window)?,
11510 qualify,
11511 false,
11512 )
11513 } else {
11514 (Default::default(), qualify, false)
11515 }
11516 } else {
11517 Default::default()
11518 };
11519
11520 let connect_by = if self.dialect.supports_connect_by()
11521 && self
11522 .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
11523 .is_some()
11524 {
11525 self.prev_token();
11526 Some(self.parse_connect_by()?)
11527 } else {
11528 None
11529 };
11530
11531 Ok(Select {
11532 select_token: AttachedToken(select_token),
11533 distinct,
11534 top,
11535 top_before_distinct,
11536 projection,
11537 into,
11538 from,
11539 lateral_views,
11540 prewhere,
11541 selection,
11542 group_by,
11543 cluster_by,
11544 distribute_by,
11545 sort_by,
11546 having,
11547 named_window: named_windows,
11548 window_before_qualify,
11549 qualify,
11550 value_table_mode,
11551 connect_by,
11552 flavor: if from_first {
11553 SelectFlavor::FromFirst
11554 } else {
11555 SelectFlavor::Standard
11556 },
11557 })
11558 }
11559
11560 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
11564 where
11565 F: FnMut(&mut Parser) -> Result<T, ParserError>,
11566 {
11567 let current_state = self.state;
11568 self.state = state;
11569 let res = f(self);
11570 self.state = current_state;
11571 res
11572 }
11573
11574 pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
11575 let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
11576 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
11577 parser.parse_comma_separated(Parser::parse_expr)
11578 })?;
11579 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
11580 let condition = self.parse_expr()?;
11581 (condition, relationships)
11582 } else {
11583 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
11584 let condition = self.parse_expr()?;
11585 self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
11586 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
11587 parser.parse_comma_separated(Parser::parse_expr)
11588 })?;
11589 (condition, relationships)
11590 };
11591 Ok(ConnectBy {
11592 condition,
11593 relationships,
11594 })
11595 }
11596
11597 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
11599 let token1 = self.next_token();
11600 let token2 = self.next_token();
11601 let token3 = self.next_token();
11602
11603 let table_name;
11604 let schema_name;
11605 if token2 == Token::Period {
11606 match token1.token {
11607 Token::Word(w) => {
11608 schema_name = w.value;
11609 }
11610 _ => {
11611 return self.expected("Schema name", token1);
11612 }
11613 }
11614 match token3.token {
11615 Token::Word(w) => {
11616 table_name = w.value;
11617 }
11618 _ => {
11619 return self.expected("Table name", token3);
11620 }
11621 }
11622 Ok(Table {
11623 table_name: Some(table_name),
11624 schema_name: Some(schema_name),
11625 })
11626 } else {
11627 match token1.token {
11628 Token::Word(w) => {
11629 table_name = w.value;
11630 }
11631 _ => {
11632 return self.expected("Table name", token1);
11633 }
11634 }
11635 Ok(Table {
11636 table_name: Some(table_name),
11637 schema_name: None,
11638 })
11639 }
11640 }
11641
11642 fn parse_set_role(
11644 &mut self,
11645 modifier: Option<ContextModifier>,
11646 ) -> Result<Statement, ParserError> {
11647 self.expect_keyword_is(Keyword::ROLE)?;
11648
11649 let role_name = if self.parse_keyword(Keyword::NONE) {
11650 None
11651 } else {
11652 Some(self.parse_identifier()?)
11653 };
11654 Ok(Statement::Set(Set::SetRole {
11655 context_modifier: modifier,
11656 role_name,
11657 }))
11658 }
11659
11660 fn parse_set_values(
11661 &mut self,
11662 parenthesized_assignment: bool,
11663 ) -> Result<Vec<Expr>, ParserError> {
11664 let mut values = vec![];
11665
11666 if parenthesized_assignment {
11667 self.expect_token(&Token::LParen)?;
11668 }
11669
11670 loop {
11671 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
11672 expr
11673 } else if let Ok(expr) = self.parse_expr() {
11674 expr
11675 } else {
11676 self.expected("variable value", self.peek_token())?
11677 };
11678
11679 values.push(value);
11680 if self.consume_token(&Token::Comma) {
11681 continue;
11682 }
11683
11684 if parenthesized_assignment {
11685 self.expect_token(&Token::RParen)?;
11686 }
11687 return Ok(values);
11688 }
11689 }
11690
11691 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
11692 let modifier =
11693 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
11694
11695 Self::keyword_to_modifier(modifier)
11696 }
11697
11698 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
11700 let scope = self.parse_context_modifier();
11701
11702 let name = if self.dialect.supports_parenthesized_set_variables()
11703 && self.consume_token(&Token::LParen)
11704 {
11705 self.expected("Unparenthesized assignment", self.peek_token())?
11709 } else {
11710 self.parse_object_name(false)?
11711 };
11712
11713 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
11714 return self.expected("assignment operator", self.peek_token());
11715 }
11716
11717 let value = self.parse_expr()?;
11718
11719 Ok(SetAssignment { scope, name, value })
11720 }
11721
11722 fn parse_set(&mut self) -> Result<Statement, ParserError> {
11723 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
11724
11725 let scope = if !hivevar {
11727 self.parse_context_modifier()
11728 } else {
11729 None
11730 };
11731
11732 if hivevar {
11733 self.expect_token(&Token::Colon)?;
11734 }
11735
11736 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
11737 return Ok(set_role_stmt);
11738 }
11739
11740 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
11742 || self.parse_keyword(Keyword::TIMEZONE)
11743 {
11744 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
11745 return Ok(Set::SingleAssignment {
11746 scope,
11747 hivevar,
11748 variable: ObjectName::from(vec!["TIMEZONE".into()]),
11749 values: self.parse_set_values(false)?,
11750 }
11751 .into());
11752 } else {
11753 return Ok(Set::SetTimeZone {
11757 local: scope == Some(ContextModifier::Local),
11758 value: self.parse_expr()?,
11759 }
11760 .into());
11761 }
11762 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
11763 if self.parse_keyword(Keyword::DEFAULT) {
11764 return Ok(Set::SetNamesDefault {}.into());
11765 }
11766 let charset_name = self.parse_identifier()?;
11767 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
11768 Some(self.parse_literal_string()?)
11769 } else {
11770 None
11771 };
11772
11773 return Ok(Set::SetNames {
11774 charset_name,
11775 collation_name,
11776 }
11777 .into());
11778 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
11779 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
11780 return Ok(Set::SetTransaction {
11781 modes: self.parse_transaction_modes()?,
11782 snapshot: None,
11783 session: true,
11784 }
11785 .into());
11786 } else if self.parse_keyword(Keyword::TRANSACTION) {
11787 if self.parse_keyword(Keyword::SNAPSHOT) {
11788 let snapshot_id = self.parse_value()?.value;
11789 return Ok(Set::SetTransaction {
11790 modes: vec![],
11791 snapshot: Some(snapshot_id),
11792 session: false,
11793 }
11794 .into());
11795 }
11796 return Ok(Set::SetTransaction {
11797 modes: self.parse_transaction_modes()?,
11798 snapshot: None,
11799 session: false,
11800 }
11801 .into());
11802 }
11803
11804 if self.dialect.supports_comma_separated_set_assignments() {
11805 if scope.is_some() {
11806 self.prev_token();
11807 }
11808
11809 if let Some(assignments) = self
11810 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
11811 {
11812 return if assignments.len() > 1 {
11813 Ok(Set::MultipleAssignments { assignments }.into())
11814 } else {
11815 let SetAssignment { scope, name, value } =
11816 assignments.into_iter().next().ok_or_else(|| {
11817 ParserError::ParserError("Expected at least one assignment".to_string())
11818 })?;
11819
11820 Ok(Set::SingleAssignment {
11821 scope,
11822 hivevar,
11823 variable: name,
11824 values: vec![value],
11825 }
11826 .into())
11827 };
11828 }
11829 }
11830
11831 let variables = if self.dialect.supports_parenthesized_set_variables()
11832 && self.consume_token(&Token::LParen)
11833 {
11834 let vars = OneOrManyWithParens::Many(
11835 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
11836 .into_iter()
11837 .map(|ident| ObjectName::from(vec![ident]))
11838 .collect(),
11839 );
11840 self.expect_token(&Token::RParen)?;
11841 vars
11842 } else {
11843 OneOrManyWithParens::One(self.parse_object_name(false)?)
11844 };
11845
11846 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
11847 let stmt = match variables {
11848 OneOrManyWithParens::One(var) => Set::SingleAssignment {
11849 scope,
11850 hivevar,
11851 variable: var,
11852 values: self.parse_set_values(false)?,
11853 },
11854 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
11855 variables: vars,
11856 values: self.parse_set_values(true)?,
11857 },
11858 };
11859
11860 return Ok(stmt.into());
11861 }
11862
11863 if self.dialect.supports_set_stmt_without_operator() {
11864 self.prev_token();
11865 return self.parse_set_session_params();
11866 };
11867
11868 self.expected("equals sign or TO", self.peek_token())
11869 }
11870
11871 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
11872 if self.parse_keyword(Keyword::STATISTICS) {
11873 let topic = match self.parse_one_of_keywords(&[
11874 Keyword::IO,
11875 Keyword::PROFILE,
11876 Keyword::TIME,
11877 Keyword::XML,
11878 ]) {
11879 Some(Keyword::IO) => SessionParamStatsTopic::IO,
11880 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
11881 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
11882 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
11883 _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
11884 };
11885 let value = self.parse_session_param_value()?;
11886 Ok(
11887 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
11888 topic,
11889 value,
11890 }))
11891 .into(),
11892 )
11893 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
11894 let obj = self.parse_object_name(false)?;
11895 let value = self.parse_session_param_value()?;
11896 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
11897 SetSessionParamIdentityInsert { obj, value },
11898 ))
11899 .into())
11900 } else if self.parse_keyword(Keyword::OFFSETS) {
11901 let keywords = self.parse_comma_separated(|parser| {
11902 let next_token = parser.next_token();
11903 match &next_token.token {
11904 Token::Word(w) => Ok(w.to_string()),
11905 _ => parser.expected("SQL keyword", next_token),
11906 }
11907 })?;
11908 let value = self.parse_session_param_value()?;
11909 Ok(
11910 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
11911 keywords,
11912 value,
11913 }))
11914 .into(),
11915 )
11916 } else {
11917 let names = self.parse_comma_separated(|parser| {
11918 let next_token = parser.next_token();
11919 match next_token.token {
11920 Token::Word(w) => Ok(w.to_string()),
11921 _ => parser.expected("Session param name", next_token),
11922 }
11923 })?;
11924 let value = self.parse_expr()?.to_string();
11925 Ok(
11926 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
11927 names,
11928 value,
11929 }))
11930 .into(),
11931 )
11932 }
11933 }
11934
11935 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
11936 if self.parse_keyword(Keyword::ON) {
11937 Ok(SessionParamValue::On)
11938 } else if self.parse_keyword(Keyword::OFF) {
11939 Ok(SessionParamValue::Off)
11940 } else {
11941 self.expected("ON or OFF", self.peek_token())
11942 }
11943 }
11944
11945 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
11946 let terse = self.parse_keyword(Keyword::TERSE);
11947 let extended = self.parse_keyword(Keyword::EXTENDED);
11948 let full = self.parse_keyword(Keyword::FULL);
11949 let session = self.parse_keyword(Keyword::SESSION);
11950 let global = self.parse_keyword(Keyword::GLOBAL);
11951 let external = self.parse_keyword(Keyword::EXTERNAL);
11952 if self
11953 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
11954 .is_some()
11955 {
11956 Ok(self.parse_show_columns(extended, full)?)
11957 } else if self.parse_keyword(Keyword::TABLES) {
11958 Ok(self.parse_show_tables(terse, extended, full, external)?)
11959 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
11960 Ok(self.parse_show_views(terse, true)?)
11961 } else if self.parse_keyword(Keyword::VIEWS) {
11962 Ok(self.parse_show_views(terse, false)?)
11963 } else if self.parse_keyword(Keyword::FUNCTIONS) {
11964 Ok(self.parse_show_functions()?)
11965 } else if extended || full {
11966 Err(ParserError::ParserError(
11967 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
11968 ))
11969 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
11970 Ok(self.parse_show_create()?)
11971 } else if self.parse_keyword(Keyword::COLLATION) {
11972 Ok(self.parse_show_collation()?)
11973 } else if self.parse_keyword(Keyword::VARIABLES)
11974 && dialect_of!(self is MySqlDialect | GenericDialect)
11975 {
11976 Ok(Statement::ShowVariables {
11977 filter: self.parse_show_statement_filter()?,
11978 session,
11979 global,
11980 })
11981 } else if self.parse_keyword(Keyword::STATUS)
11982 && dialect_of!(self is MySqlDialect | GenericDialect)
11983 {
11984 Ok(Statement::ShowStatus {
11985 filter: self.parse_show_statement_filter()?,
11986 session,
11987 global,
11988 })
11989 } else if self.parse_keyword(Keyword::DATABASES) {
11990 self.parse_show_databases(terse)
11991 } else if self.parse_keyword(Keyword::SCHEMAS) {
11992 self.parse_show_schemas(terse)
11993 } else {
11994 Ok(Statement::ShowVariable {
11995 variable: self.parse_identifiers()?,
11996 })
11997 }
11998 }
11999
12000 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
12001 let history = self.parse_keyword(Keyword::HISTORY);
12002 let show_options = self.parse_show_stmt_options()?;
12003 Ok(Statement::ShowDatabases {
12004 terse,
12005 history,
12006 show_options,
12007 })
12008 }
12009
12010 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
12011 let history = self.parse_keyword(Keyword::HISTORY);
12012 let show_options = self.parse_show_stmt_options()?;
12013 Ok(Statement::ShowSchemas {
12014 terse,
12015 history,
12016 show_options,
12017 })
12018 }
12019
12020 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
12021 let obj_type = match self.expect_one_of_keywords(&[
12022 Keyword::TABLE,
12023 Keyword::TRIGGER,
12024 Keyword::FUNCTION,
12025 Keyword::PROCEDURE,
12026 Keyword::EVENT,
12027 Keyword::VIEW,
12028 ])? {
12029 Keyword::TABLE => Ok(ShowCreateObject::Table),
12030 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
12031 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
12032 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
12033 Keyword::EVENT => Ok(ShowCreateObject::Event),
12034 Keyword::VIEW => Ok(ShowCreateObject::View),
12035 keyword => Err(ParserError::ParserError(format!(
12036 "Unable to map keyword to ShowCreateObject: {keyword:?}"
12037 ))),
12038 }?;
12039
12040 let obj_name = self.parse_object_name(false)?;
12041
12042 Ok(Statement::ShowCreate { obj_type, obj_name })
12043 }
12044
12045 pub fn parse_show_columns(
12046 &mut self,
12047 extended: bool,
12048 full: bool,
12049 ) -> Result<Statement, ParserError> {
12050 let show_options = self.parse_show_stmt_options()?;
12051 Ok(Statement::ShowColumns {
12052 extended,
12053 full,
12054 show_options,
12055 })
12056 }
12057
12058 fn parse_show_tables(
12059 &mut self,
12060 terse: bool,
12061 extended: bool,
12062 full: bool,
12063 external: bool,
12064 ) -> Result<Statement, ParserError> {
12065 let history = !external && self.parse_keyword(Keyword::HISTORY);
12066 let show_options = self.parse_show_stmt_options()?;
12067 Ok(Statement::ShowTables {
12068 terse,
12069 history,
12070 extended,
12071 full,
12072 external,
12073 show_options,
12074 })
12075 }
12076
12077 fn parse_show_views(
12078 &mut self,
12079 terse: bool,
12080 materialized: bool,
12081 ) -> Result<Statement, ParserError> {
12082 let show_options = self.parse_show_stmt_options()?;
12083 Ok(Statement::ShowViews {
12084 materialized,
12085 terse,
12086 show_options,
12087 })
12088 }
12089
12090 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
12091 let filter = self.parse_show_statement_filter()?;
12092 Ok(Statement::ShowFunctions { filter })
12093 }
12094
12095 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
12096 let filter = self.parse_show_statement_filter()?;
12097 Ok(Statement::ShowCollation { filter })
12098 }
12099
12100 pub fn parse_show_statement_filter(
12101 &mut self,
12102 ) -> Result<Option<ShowStatementFilter>, ParserError> {
12103 if self.parse_keyword(Keyword::LIKE) {
12104 Ok(Some(ShowStatementFilter::Like(
12105 self.parse_literal_string()?,
12106 )))
12107 } else if self.parse_keyword(Keyword::ILIKE) {
12108 Ok(Some(ShowStatementFilter::ILike(
12109 self.parse_literal_string()?,
12110 )))
12111 } else if self.parse_keyword(Keyword::WHERE) {
12112 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
12113 } else {
12114 self.maybe_parse(|parser| -> Result<String, ParserError> {
12115 parser.parse_literal_string()
12116 })?
12117 .map_or(Ok(None), |filter| {
12118 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
12119 })
12120 }
12121 }
12122
12123 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
12124 let parsed_keyword = if dialect_of!(self is HiveDialect) {
12126 if self.parse_keyword(Keyword::DEFAULT) {
12128 return Ok(Statement::Use(Use::Default));
12129 }
12130 None } else if dialect_of!(self is DatabricksDialect) {
12132 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
12133 } else if dialect_of!(self is SnowflakeDialect) {
12134 self.parse_one_of_keywords(&[
12135 Keyword::DATABASE,
12136 Keyword::SCHEMA,
12137 Keyword::WAREHOUSE,
12138 Keyword::ROLE,
12139 Keyword::SECONDARY,
12140 ])
12141 } else {
12142 None };
12144
12145 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
12146 self.parse_secondary_roles()?
12147 } else {
12148 let obj_name = self.parse_object_name(false)?;
12149 match parsed_keyword {
12150 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
12151 Some(Keyword::DATABASE) => Use::Database(obj_name),
12152 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
12153 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
12154 Some(Keyword::ROLE) => Use::Role(obj_name),
12155 _ => Use::Object(obj_name),
12156 }
12157 };
12158
12159 Ok(Statement::Use(result))
12160 }
12161
12162 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
12163 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
12164 if self.parse_keyword(Keyword::NONE) {
12165 Ok(Use::SecondaryRoles(SecondaryRoles::None))
12166 } else if self.parse_keyword(Keyword::ALL) {
12167 Ok(Use::SecondaryRoles(SecondaryRoles::All))
12168 } else {
12169 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
12170 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
12171 }
12172 }
12173
12174 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
12175 let relation = self.parse_table_factor()?;
12176 let joins = self.parse_joins()?;
12180 Ok(TableWithJoins { relation, joins })
12181 }
12182
12183 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
12184 let mut joins = vec![];
12185 loop {
12186 let global = self.parse_keyword(Keyword::GLOBAL);
12187 let join = if self.parse_keyword(Keyword::CROSS) {
12188 let join_operator = if self.parse_keyword(Keyword::JOIN) {
12189 JoinOperator::CrossJoin
12190 } else if self.parse_keyword(Keyword::APPLY) {
12191 JoinOperator::CrossApply
12193 } else {
12194 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
12195 };
12196 Join {
12197 relation: self.parse_table_factor()?,
12198 global,
12199 join_operator,
12200 }
12201 } else if self.parse_keyword(Keyword::OUTER) {
12202 self.expect_keyword_is(Keyword::APPLY)?;
12204 Join {
12205 relation: self.parse_table_factor()?,
12206 global,
12207 join_operator: JoinOperator::OuterApply,
12208 }
12209 } else if self.parse_keyword(Keyword::ASOF) {
12210 self.expect_keyword_is(Keyword::JOIN)?;
12211 let relation = self.parse_table_factor()?;
12212 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
12213 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
12214 Join {
12215 relation,
12216 global,
12217 join_operator: JoinOperator::AsOf {
12218 match_condition,
12219 constraint: self.parse_join_constraint(false)?,
12220 },
12221 }
12222 } else {
12223 let natural = self.parse_keyword(Keyword::NATURAL);
12224 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
12225 w.keyword
12226 } else {
12227 Keyword::NoKeyword
12228 };
12229
12230 let join_operator_type = match peek_keyword {
12231 Keyword::INNER | Keyword::JOIN => {
12232 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
12234 if inner {
12235 JoinOperator::Inner
12236 } else {
12237 JoinOperator::Join
12238 }
12239 }
12240 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
12241 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
12243 let join_type = self.parse_one_of_keywords(&[
12244 Keyword::OUTER,
12245 Keyword::SEMI,
12246 Keyword::ANTI,
12247 Keyword::JOIN,
12248 ]);
12249 match join_type {
12250 Some(Keyword::OUTER) => {
12251 self.expect_keyword_is(Keyword::JOIN)?;
12252 if is_left {
12253 JoinOperator::LeftOuter
12254 } else {
12255 JoinOperator::RightOuter
12256 }
12257 }
12258 Some(Keyword::SEMI) => {
12259 self.expect_keyword_is(Keyword::JOIN)?;
12260 if is_left {
12261 JoinOperator::LeftSemi
12262 } else {
12263 JoinOperator::RightSemi
12264 }
12265 }
12266 Some(Keyword::ANTI) => {
12267 self.expect_keyword_is(Keyword::JOIN)?;
12268 if is_left {
12269 JoinOperator::LeftAnti
12270 } else {
12271 JoinOperator::RightAnti
12272 }
12273 }
12274 Some(Keyword::JOIN) => {
12275 if is_left {
12276 JoinOperator::Left
12277 } else {
12278 JoinOperator::Right
12279 }
12280 }
12281 _ => {
12282 return Err(ParserError::ParserError(format!(
12283 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
12284 )))
12285 }
12286 }
12287 }
12288 Keyword::ANTI => {
12289 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
12291 JoinOperator::Anti
12292 }
12293 Keyword::SEMI => {
12294 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
12296 JoinOperator::Semi
12297 }
12298 Keyword::FULL => {
12299 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
12302 JoinOperator::FullOuter
12303 }
12304 Keyword::OUTER => {
12305 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
12306 }
12307 Keyword::STRAIGHT_JOIN => {
12308 let _ = self.next_token(); JoinOperator::StraightJoin
12310 }
12311 _ if natural => {
12312 return self.expected("a join type after NATURAL", self.peek_token());
12313 }
12314 _ => break,
12315 };
12316 let mut relation = self.parse_table_factor()?;
12317
12318 if self.peek_parens_less_nested_join() {
12319 let joins = self.parse_joins()?;
12320 relation = TableFactor::NestedJoin {
12321 table_with_joins: Box::new(TableWithJoins { relation, joins }),
12322 alias: None,
12323 };
12324 }
12325
12326 let join_constraint = self.parse_join_constraint(natural)?;
12327 Join {
12328 relation,
12329 global,
12330 join_operator: join_operator_type(join_constraint),
12331 }
12332 };
12333 joins.push(join);
12334 }
12335 Ok(joins)
12336 }
12337
12338 fn peek_parens_less_nested_join(&self) -> bool {
12339 matches!(
12340 self.peek_token_ref().token,
12341 Token::Word(Word {
12342 keyword: Keyword::JOIN
12343 | Keyword::INNER
12344 | Keyword::LEFT
12345 | Keyword::RIGHT
12346 | Keyword::FULL,
12347 ..
12348 })
12349 )
12350 }
12351
12352 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12354 if self.parse_keyword(Keyword::LATERAL) {
12355 if self.consume_token(&Token::LParen) {
12357 self.parse_derived_table_factor(Lateral)
12358 } else {
12359 let name = self.parse_object_name(false)?;
12360 self.expect_token(&Token::LParen)?;
12361 let args = self.parse_optional_args()?;
12362 let alias = self.maybe_parse_table_alias()?;
12363 Ok(TableFactor::Function {
12364 lateral: true,
12365 name,
12366 args,
12367 alias,
12368 })
12369 }
12370 } else if self.parse_keyword(Keyword::TABLE) {
12371 self.expect_token(&Token::LParen)?;
12373 let expr = self.parse_expr()?;
12374 self.expect_token(&Token::RParen)?;
12375 let alias = self.maybe_parse_table_alias()?;
12376 Ok(TableFactor::TableFunction { expr, alias })
12377 } else if self.consume_token(&Token::LParen) {
12378 if let Some(mut table) =
12400 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
12401 {
12402 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
12403 {
12404 table = match kw {
12405 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
12406 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
12407 _ => unreachable!(),
12408 }
12409 }
12410 return Ok(table);
12411 }
12412
12413 let mut table_and_joins = self.parse_table_and_joins()?;
12420
12421 #[allow(clippy::if_same_then_else)]
12422 if !table_and_joins.joins.is_empty() {
12423 self.expect_token(&Token::RParen)?;
12424 let alias = self.maybe_parse_table_alias()?;
12425 Ok(TableFactor::NestedJoin {
12426 table_with_joins: Box::new(table_and_joins),
12427 alias,
12428 }) } else if let TableFactor::NestedJoin {
12430 table_with_joins: _,
12431 alias: _,
12432 } = &table_and_joins.relation
12433 {
12434 self.expect_token(&Token::RParen)?;
12437 let alias = self.maybe_parse_table_alias()?;
12438 Ok(TableFactor::NestedJoin {
12439 table_with_joins: Box::new(table_and_joins),
12440 alias,
12441 })
12442 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
12443 self.expect_token(&Token::RParen)?;
12450
12451 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
12452 match &mut table_and_joins.relation {
12455 TableFactor::Derived { alias, .. }
12456 | TableFactor::Table { alias, .. }
12457 | TableFactor::Function { alias, .. }
12458 | TableFactor::UNNEST { alias, .. }
12459 | TableFactor::JsonTable { alias, .. }
12460 | TableFactor::XmlTable { alias, .. }
12461 | TableFactor::OpenJsonTable { alias, .. }
12462 | TableFactor::TableFunction { alias, .. }
12463 | TableFactor::Pivot { alias, .. }
12464 | TableFactor::Unpivot { alias, .. }
12465 | TableFactor::MatchRecognize { alias, .. }
12466 | TableFactor::NestedJoin { alias, .. } => {
12467 if let Some(inner_alias) = alias {
12469 return Err(ParserError::ParserError(format!(
12470 "duplicate alias {inner_alias}"
12471 )));
12472 }
12473 alias.replace(outer_alias);
12477 }
12478 };
12479 }
12480 Ok(table_and_joins.relation)
12482 } else {
12483 self.expected("joined table", self.peek_token())
12486 }
12487 } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
12488 && matches!(
12489 self.peek_tokens(),
12490 [
12491 Token::Word(Word {
12492 keyword: Keyword::VALUES,
12493 ..
12494 }),
12495 Token::LParen
12496 ]
12497 )
12498 {
12499 self.expect_keyword_is(Keyword::VALUES)?;
12500
12501 let values = SetExpr::Values(self.parse_values(false)?);
12505 let alias = self.maybe_parse_table_alias()?;
12506 Ok(TableFactor::Derived {
12507 lateral: false,
12508 subquery: Box::new(Query {
12509 with: None,
12510 body: Box::new(values),
12511 order_by: None,
12512 limit_clause: None,
12513 fetch: None,
12514 locks: vec![],
12515 for_clause: None,
12516 settings: None,
12517 format_clause: None,
12518 pipe_operators: vec![],
12519 }),
12520 alias,
12521 })
12522 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
12523 && self.parse_keyword(Keyword::UNNEST)
12524 {
12525 self.expect_token(&Token::LParen)?;
12526 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
12527 self.expect_token(&Token::RParen)?;
12528
12529 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
12530 let alias = match self.maybe_parse_table_alias() {
12531 Ok(Some(alias)) => Some(alias),
12532 Ok(None) => None,
12533 Err(e) => return Err(e),
12534 };
12535
12536 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
12537 Ok(()) => true,
12538 Err(_) => false,
12539 };
12540
12541 let with_offset_alias = if with_offset {
12542 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
12543 Ok(Some(alias)) => Some(alias),
12544 Ok(None) => None,
12545 Err(e) => return Err(e),
12546 }
12547 } else {
12548 None
12549 };
12550
12551 Ok(TableFactor::UNNEST {
12552 alias,
12553 array_exprs,
12554 with_offset,
12555 with_offset_alias,
12556 with_ordinality,
12557 })
12558 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
12559 let json_expr = self.parse_expr()?;
12560 self.expect_token(&Token::Comma)?;
12561 let json_path = self.parse_value()?.value;
12562 self.expect_keyword_is(Keyword::COLUMNS)?;
12563 self.expect_token(&Token::LParen)?;
12564 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
12565 self.expect_token(&Token::RParen)?;
12566 self.expect_token(&Token::RParen)?;
12567 let alias = self.maybe_parse_table_alias()?;
12568 Ok(TableFactor::JsonTable {
12569 json_expr,
12570 json_path,
12571 columns,
12572 alias,
12573 })
12574 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
12575 self.prev_token();
12576 self.parse_open_json_table_factor()
12577 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
12578 self.prev_token();
12579 self.parse_xml_table_factor()
12580 } else {
12581 let name = self.parse_object_name(true)?;
12582
12583 let json_path = match self.peek_token().token {
12584 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
12585 _ => None,
12586 };
12587
12588 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
12589 && self.parse_keyword(Keyword::PARTITION)
12590 {
12591 self.parse_parenthesized_identifiers()?
12592 } else {
12593 vec![]
12594 };
12595
12596 let version = self.maybe_parse_table_version()?;
12598
12599 let args = if self.consume_token(&Token::LParen) {
12601 Some(self.parse_table_function_args()?)
12602 } else {
12603 None
12604 };
12605
12606 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
12607
12608 let mut sample = None;
12609 if self.dialect.supports_table_sample_before_alias() {
12610 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
12611 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
12612 }
12613 }
12614
12615 let alias = self.maybe_parse_table_alias()?;
12616
12617 let index_hints = if self.dialect.supports_table_hints() {
12619 self.maybe_parse(|p| p.parse_table_index_hints())?
12620 .unwrap_or(vec![])
12621 } else {
12622 vec![]
12623 };
12624
12625 let mut with_hints = vec![];
12627 if self.parse_keyword(Keyword::WITH) {
12628 if self.consume_token(&Token::LParen) {
12629 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
12630 self.expect_token(&Token::RParen)?;
12631 } else {
12632 self.prev_token();
12634 }
12635 };
12636
12637 if !self.dialect.supports_table_sample_before_alias() {
12638 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
12639 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
12640 }
12641 }
12642
12643 let mut table = TableFactor::Table {
12644 name,
12645 alias,
12646 args,
12647 with_hints,
12648 version,
12649 partitions,
12650 with_ordinality,
12651 json_path,
12652 sample,
12653 index_hints,
12654 };
12655
12656 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
12657 table = match kw {
12658 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
12659 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
12660 _ => unreachable!(),
12661 }
12662 }
12663
12664 if self.dialect.supports_match_recognize()
12665 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
12666 {
12667 table = self.parse_match_recognize(table)?;
12668 }
12669
12670 Ok(table)
12671 }
12672 }
12673
12674 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
12675 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
12676 TableSampleModifier::TableSample
12677 } else if self.parse_keyword(Keyword::SAMPLE) {
12678 TableSampleModifier::Sample
12679 } else {
12680 return Ok(None);
12681 };
12682
12683 let name = match self.parse_one_of_keywords(&[
12684 Keyword::BERNOULLI,
12685 Keyword::ROW,
12686 Keyword::SYSTEM,
12687 Keyword::BLOCK,
12688 ]) {
12689 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
12690 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
12691 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
12692 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
12693 _ => None,
12694 };
12695
12696 let parenthesized = self.consume_token(&Token::LParen);
12697
12698 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
12699 let selected_bucket = self.parse_number_value()?.value;
12700 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
12701 let total = self.parse_number_value()?.value;
12702 let on = if self.parse_keyword(Keyword::ON) {
12703 Some(self.parse_expr()?)
12704 } else {
12705 None
12706 };
12707 (
12708 None,
12709 Some(TableSampleBucket {
12710 bucket: selected_bucket,
12711 total,
12712 on,
12713 }),
12714 )
12715 } else {
12716 let value = match self.maybe_parse(|p| p.parse_expr())? {
12717 Some(num) => num,
12718 None => {
12719 let next_token = self.next_token();
12720 if let Token::Word(w) = next_token.token {
12721 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
12722 } else {
12723 return parser_err!(
12724 "Expecting number or byte length e.g. 100M",
12725 self.peek_token().span.start
12726 );
12727 }
12728 }
12729 };
12730 let unit = if self.parse_keyword(Keyword::ROWS) {
12731 Some(TableSampleUnit::Rows)
12732 } else if self.parse_keyword(Keyword::PERCENT) {
12733 Some(TableSampleUnit::Percent)
12734 } else {
12735 None
12736 };
12737 (
12738 Some(TableSampleQuantity {
12739 parenthesized,
12740 value,
12741 unit,
12742 }),
12743 None,
12744 )
12745 };
12746 if parenthesized {
12747 self.expect_token(&Token::RParen)?;
12748 }
12749
12750 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
12751 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
12752 } else if self.parse_keyword(Keyword::SEED) {
12753 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
12754 } else {
12755 None
12756 };
12757
12758 let offset = if self.parse_keyword(Keyword::OFFSET) {
12759 Some(self.parse_expr()?)
12760 } else {
12761 None
12762 };
12763
12764 Ok(Some(Box::new(TableSample {
12765 modifier,
12766 name,
12767 quantity,
12768 seed,
12769 bucket,
12770 offset,
12771 })))
12772 }
12773
12774 fn parse_table_sample_seed(
12775 &mut self,
12776 modifier: TableSampleSeedModifier,
12777 ) -> Result<TableSampleSeed, ParserError> {
12778 self.expect_token(&Token::LParen)?;
12779 let value = self.parse_number_value()?.value;
12780 self.expect_token(&Token::RParen)?;
12781 Ok(TableSampleSeed { modifier, value })
12782 }
12783
12784 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12787 self.expect_token(&Token::LParen)?;
12788 let json_expr = self.parse_expr()?;
12789 let json_path = if self.consume_token(&Token::Comma) {
12790 Some(self.parse_value()?.value)
12791 } else {
12792 None
12793 };
12794 self.expect_token(&Token::RParen)?;
12795 let columns = if self.parse_keyword(Keyword::WITH) {
12796 self.expect_token(&Token::LParen)?;
12797 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
12798 self.expect_token(&Token::RParen)?;
12799 columns
12800 } else {
12801 Vec::new()
12802 };
12803 let alias = self.maybe_parse_table_alias()?;
12804 Ok(TableFactor::OpenJsonTable {
12805 json_expr,
12806 json_path,
12807 columns,
12808 alias,
12809 })
12810 }
12811
12812 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12813 self.expect_token(&Token::LParen)?;
12814 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
12815 self.expect_token(&Token::LParen)?;
12816 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
12817 self.expect_token(&Token::RParen)?;
12818 self.expect_token(&Token::Comma)?;
12819 namespaces
12820 } else {
12821 vec![]
12822 };
12823 let row_expression = self.parse_expr()?;
12824 let passing = self.parse_xml_passing_clause()?;
12825 self.expect_keyword_is(Keyword::COLUMNS)?;
12826 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
12827 self.expect_token(&Token::RParen)?;
12828 let alias = self.maybe_parse_table_alias()?;
12829 Ok(TableFactor::XmlTable {
12830 namespaces,
12831 row_expression,
12832 passing,
12833 columns,
12834 alias,
12835 })
12836 }
12837
12838 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
12839 let uri = self.parse_expr()?;
12840 self.expect_keyword_is(Keyword::AS)?;
12841 let name = self.parse_identifier()?;
12842 Ok(XmlNamespaceDefinition { uri, name })
12843 }
12844
12845 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
12846 let name = self.parse_identifier()?;
12847
12848 let option = if self.parse_keyword(Keyword::FOR) {
12849 self.expect_keyword(Keyword::ORDINALITY)?;
12850 XmlTableColumnOption::ForOrdinality
12851 } else {
12852 let r#type = self.parse_data_type()?;
12853 let mut path = None;
12854 let mut default = None;
12855
12856 if self.parse_keyword(Keyword::PATH) {
12857 path = Some(self.parse_expr()?);
12858 }
12859
12860 if self.parse_keyword(Keyword::DEFAULT) {
12861 default = Some(self.parse_expr()?);
12862 }
12863
12864 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
12865 if !not_null {
12866 let _ = self.parse_keyword(Keyword::NULL);
12868 }
12869
12870 XmlTableColumnOption::NamedInfo {
12871 r#type,
12872 path,
12873 default,
12874 nullable: !not_null,
12875 }
12876 };
12877 Ok(XmlTableColumn { name, option })
12878 }
12879
12880 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
12881 let mut arguments = vec![];
12882 if self.parse_keyword(Keyword::PASSING) {
12883 loop {
12884 let by_value =
12885 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
12886 let expr = self.parse_expr()?;
12887 let alias = if self.parse_keyword(Keyword::AS) {
12888 Some(self.parse_identifier()?)
12889 } else {
12890 None
12891 };
12892 arguments.push(XmlPassingArgument {
12893 expr,
12894 alias,
12895 by_value,
12896 });
12897 if !self.consume_token(&Token::Comma) {
12898 break;
12899 }
12900 }
12901 }
12902 Ok(XmlPassingClause { arguments })
12903 }
12904
12905 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
12906 self.expect_token(&Token::LParen)?;
12907
12908 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
12909 self.parse_comma_separated(Parser::parse_expr)?
12910 } else {
12911 vec![]
12912 };
12913
12914 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12915 self.parse_comma_separated(Parser::parse_order_by_expr)?
12916 } else {
12917 vec![]
12918 };
12919
12920 let measures = if self.parse_keyword(Keyword::MEASURES) {
12921 self.parse_comma_separated(|p| {
12922 let expr = p.parse_expr()?;
12923 let _ = p.parse_keyword(Keyword::AS);
12924 let alias = p.parse_identifier()?;
12925 Ok(Measure { expr, alias })
12926 })?
12927 } else {
12928 vec![]
12929 };
12930
12931 let rows_per_match =
12932 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
12933 Some(RowsPerMatch::OneRow)
12934 } else if self.parse_keywords(&[
12935 Keyword::ALL,
12936 Keyword::ROWS,
12937 Keyword::PER,
12938 Keyword::MATCH,
12939 ]) {
12940 Some(RowsPerMatch::AllRows(
12941 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
12942 Some(EmptyMatchesMode::Show)
12943 } else if self.parse_keywords(&[
12944 Keyword::OMIT,
12945 Keyword::EMPTY,
12946 Keyword::MATCHES,
12947 ]) {
12948 Some(EmptyMatchesMode::Omit)
12949 } else if self.parse_keywords(&[
12950 Keyword::WITH,
12951 Keyword::UNMATCHED,
12952 Keyword::ROWS,
12953 ]) {
12954 Some(EmptyMatchesMode::WithUnmatched)
12955 } else {
12956 None
12957 },
12958 ))
12959 } else {
12960 None
12961 };
12962
12963 let after_match_skip =
12964 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
12965 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
12966 Some(AfterMatchSkip::PastLastRow)
12967 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
12968 Some(AfterMatchSkip::ToNextRow)
12969 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
12970 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
12971 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
12972 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
12973 } else {
12974 let found = self.next_token();
12975 return self.expected("after match skip option", found);
12976 }
12977 } else {
12978 None
12979 };
12980
12981 self.expect_keyword_is(Keyword::PATTERN)?;
12982 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
12983
12984 self.expect_keyword_is(Keyword::DEFINE)?;
12985
12986 let symbols = self.parse_comma_separated(|p| {
12987 let symbol = p.parse_identifier()?;
12988 p.expect_keyword_is(Keyword::AS)?;
12989 let definition = p.parse_expr()?;
12990 Ok(SymbolDefinition { symbol, definition })
12991 })?;
12992
12993 self.expect_token(&Token::RParen)?;
12994
12995 let alias = self.maybe_parse_table_alias()?;
12996
12997 Ok(TableFactor::MatchRecognize {
12998 table: Box::new(table),
12999 partition_by,
13000 order_by,
13001 measures,
13002 rows_per_match,
13003 after_match_skip,
13004 pattern,
13005 symbols,
13006 alias,
13007 })
13008 }
13009
13010 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13011 match self.next_token().token {
13012 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
13013 Token::Placeholder(s) if s == "$" => {
13014 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
13015 }
13016 Token::LBrace => {
13017 self.expect_token(&Token::Minus)?;
13018 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
13019 self.expect_token(&Token::Minus)?;
13020 self.expect_token(&Token::RBrace)?;
13021 Ok(MatchRecognizePattern::Exclude(symbol))
13022 }
13023 Token::Word(Word {
13024 value,
13025 quote_style: None,
13026 ..
13027 }) if value == "PERMUTE" => {
13028 self.expect_token(&Token::LParen)?;
13029 let symbols = self.parse_comma_separated(|p| {
13030 p.parse_identifier().map(MatchRecognizeSymbol::Named)
13031 })?;
13032 self.expect_token(&Token::RParen)?;
13033 Ok(MatchRecognizePattern::Permute(symbols))
13034 }
13035 Token::LParen => {
13036 let pattern = self.parse_pattern()?;
13037 self.expect_token(&Token::RParen)?;
13038 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
13039 }
13040 _ => {
13041 self.prev_token();
13042 self.parse_identifier()
13043 .map(MatchRecognizeSymbol::Named)
13044 .map(MatchRecognizePattern::Symbol)
13045 }
13046 }
13047 }
13048
13049 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13050 let mut pattern = self.parse_base_pattern()?;
13051 loop {
13052 let token = self.next_token();
13053 let quantifier = match token.token {
13054 Token::Mul => RepetitionQuantifier::ZeroOrMore,
13055 Token::Plus => RepetitionQuantifier::OneOrMore,
13056 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
13057 Token::LBrace => {
13058 let token = self.next_token();
13060 match token.token {
13061 Token::Comma => {
13062 let next_token = self.next_token();
13063 let Token::Number(n, _) = next_token.token else {
13064 return self.expected("literal number", next_token);
13065 };
13066 self.expect_token(&Token::RBrace)?;
13067 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
13068 }
13069 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
13070 let next_token = self.next_token();
13071 match next_token.token {
13072 Token::Number(m, _) => {
13073 self.expect_token(&Token::RBrace)?;
13074 RepetitionQuantifier::Range(
13075 Self::parse(n, token.span.start)?,
13076 Self::parse(m, token.span.start)?,
13077 )
13078 }
13079 Token::RBrace => {
13080 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
13081 }
13082 _ => {
13083 return self.expected("} or upper bound", next_token);
13084 }
13085 }
13086 }
13087 Token::Number(n, _) => {
13088 self.expect_token(&Token::RBrace)?;
13089 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
13090 }
13091 _ => return self.expected("quantifier range", token),
13092 }
13093 }
13094 _ => {
13095 self.prev_token();
13096 break;
13097 }
13098 };
13099 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
13100 }
13101 Ok(pattern)
13102 }
13103
13104 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13105 let mut patterns = vec![self.parse_repetition_pattern()?];
13106 while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
13107 patterns.push(self.parse_repetition_pattern()?);
13108 }
13109 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
13110 Ok([pattern]) => Ok(pattern),
13111 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
13112 }
13113 }
13114
13115 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13116 let pattern = self.parse_concat_pattern()?;
13117 if self.consume_token(&Token::Pipe) {
13118 match self.parse_pattern()? {
13119 MatchRecognizePattern::Alternation(mut patterns) => {
13121 patterns.insert(0, pattern);
13122 Ok(MatchRecognizePattern::Alternation(patterns))
13123 }
13124 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
13125 }
13126 } else {
13127 Ok(pattern)
13128 }
13129 }
13130
13131 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
13133 if self.dialect.supports_timestamp_versioning() {
13134 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
13135 {
13136 let expr = self.parse_expr()?;
13137 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
13138 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
13139 let func_name = self.parse_object_name(true)?;
13140 let func = self.parse_function(func_name)?;
13141 return Ok(Some(TableVersion::Function(func)));
13142 }
13143 }
13144 Ok(None)
13145 }
13146
13147 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
13150 if self.parse_keyword(Keyword::NESTED) {
13151 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
13152 let path = self.parse_value()?.value;
13153 self.expect_keyword_is(Keyword::COLUMNS)?;
13154 let columns = self.parse_parenthesized(|p| {
13155 p.parse_comma_separated(Self::parse_json_table_column_def)
13156 })?;
13157 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
13158 path,
13159 columns,
13160 }));
13161 }
13162 let name = self.parse_identifier()?;
13163 if self.parse_keyword(Keyword::FOR) {
13164 self.expect_keyword_is(Keyword::ORDINALITY)?;
13165 return Ok(JsonTableColumn::ForOrdinality(name));
13166 }
13167 let r#type = self.parse_data_type()?;
13168 let exists = self.parse_keyword(Keyword::EXISTS);
13169 self.expect_keyword_is(Keyword::PATH)?;
13170 let path = self.parse_value()?.value;
13171 let mut on_empty = None;
13172 let mut on_error = None;
13173 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
13174 if self.parse_keyword(Keyword::EMPTY) {
13175 on_empty = Some(error_handling);
13176 } else {
13177 self.expect_keyword_is(Keyword::ERROR)?;
13178 on_error = Some(error_handling);
13179 }
13180 }
13181 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
13182 name,
13183 r#type,
13184 path,
13185 exists,
13186 on_empty,
13187 on_error,
13188 }))
13189 }
13190
13191 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
13199 let name = self.parse_identifier()?;
13200 let r#type = self.parse_data_type()?;
13201 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
13202 self.next_token();
13203 Some(path)
13204 } else {
13205 None
13206 };
13207 let as_json = self.parse_keyword(Keyword::AS);
13208 if as_json {
13209 self.expect_keyword_is(Keyword::JSON)?;
13210 }
13211 Ok(OpenJsonTableColumn {
13212 name,
13213 r#type,
13214 path,
13215 as_json,
13216 })
13217 }
13218
13219 fn parse_json_table_column_error_handling(
13220 &mut self,
13221 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
13222 let res = if self.parse_keyword(Keyword::NULL) {
13223 JsonTableColumnErrorHandling::Null
13224 } else if self.parse_keyword(Keyword::ERROR) {
13225 JsonTableColumnErrorHandling::Error
13226 } else if self.parse_keyword(Keyword::DEFAULT) {
13227 JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
13228 } else {
13229 return Ok(None);
13230 };
13231 self.expect_keyword_is(Keyword::ON)?;
13232 Ok(Some(res))
13233 }
13234
13235 pub fn parse_derived_table_factor(
13236 &mut self,
13237 lateral: IsLateral,
13238 ) -> Result<TableFactor, ParserError> {
13239 let subquery = self.parse_query()?;
13240 self.expect_token(&Token::RParen)?;
13241 let alias = self.maybe_parse_table_alias()?;
13242 Ok(TableFactor::Derived {
13243 lateral: match lateral {
13244 Lateral => true,
13245 NotLateral => false,
13246 },
13247 subquery,
13248 alias,
13249 })
13250 }
13251
13252 fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
13253 let function_name = match self.next_token().token {
13254 Token::Word(w) => Ok(w.value),
13255 _ => self.expected("a function identifier", self.peek_token()),
13256 }?;
13257 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
13258 let alias = if self.parse_keyword(Keyword::AS) {
13259 Some(self.parse_identifier()?)
13260 } else {
13261 None
13262 };
13263
13264 Ok(ExprWithAlias { expr, alias })
13265 }
13266 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
13289 let expr = self.parse_expr()?;
13290 let alias = if self.parse_keyword(Keyword::AS) {
13291 Some(self.parse_identifier()?)
13292 } else {
13293 None
13294 };
13295
13296 Ok(ExprWithAlias { expr, alias })
13297 }
13298
13299 pub fn parse_pivot_table_factor(
13300 &mut self,
13301 table: TableFactor,
13302 ) -> Result<TableFactor, ParserError> {
13303 self.expect_token(&Token::LParen)?;
13304 let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
13305 self.expect_keyword_is(Keyword::FOR)?;
13306 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
13307 self.expect_keyword_is(Keyword::IN)?;
13308
13309 self.expect_token(&Token::LParen)?;
13310 let value_source = if self.parse_keyword(Keyword::ANY) {
13311 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13312 self.parse_comma_separated(Parser::parse_order_by_expr)?
13313 } else {
13314 vec![]
13315 };
13316 PivotValueSource::Any(order_by)
13317 } else if self.peek_sub_query() {
13318 PivotValueSource::Subquery(self.parse_query()?)
13319 } else {
13320 PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
13321 };
13322 self.expect_token(&Token::RParen)?;
13323
13324 let default_on_null =
13325 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
13326 self.expect_token(&Token::LParen)?;
13327 let expr = self.parse_expr()?;
13328 self.expect_token(&Token::RParen)?;
13329 Some(expr)
13330 } else {
13331 None
13332 };
13333
13334 self.expect_token(&Token::RParen)?;
13335 let alias = self.maybe_parse_table_alias()?;
13336 Ok(TableFactor::Pivot {
13337 table: Box::new(table),
13338 aggregate_functions,
13339 value_column,
13340 value_source,
13341 default_on_null,
13342 alias,
13343 })
13344 }
13345
13346 pub fn parse_unpivot_table_factor(
13347 &mut self,
13348 table: TableFactor,
13349 ) -> Result<TableFactor, ParserError> {
13350 self.expect_token(&Token::LParen)?;
13351 let value = self.parse_identifier()?;
13352 self.expect_keyword_is(Keyword::FOR)?;
13353 let name = self.parse_identifier()?;
13354 self.expect_keyword_is(Keyword::IN)?;
13355 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
13356 self.expect_token(&Token::RParen)?;
13357 let alias = self.maybe_parse_table_alias()?;
13358 Ok(TableFactor::Unpivot {
13359 table: Box::new(table),
13360 value,
13361 name,
13362 columns,
13363 alias,
13364 })
13365 }
13366
13367 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
13368 if natural {
13369 Ok(JoinConstraint::Natural)
13370 } else if self.parse_keyword(Keyword::ON) {
13371 let constraint = self.parse_expr()?;
13372 Ok(JoinConstraint::On(constraint))
13373 } else if self.parse_keyword(Keyword::USING) {
13374 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
13375 Ok(JoinConstraint::Using(columns))
13376 } else {
13377 Ok(JoinConstraint::None)
13378 }
13380 }
13381
13382 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
13384 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
13385
13386 self.expect_keyword_is(Keyword::TO)?;
13387 let grantees = self.parse_grantees()?;
13388
13389 let with_grant_option =
13390 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
13391
13392 let granted_by = self
13393 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
13394 .then(|| self.parse_identifier().unwrap());
13395
13396 Ok(Statement::Grant {
13397 privileges,
13398 objects,
13399 grantees,
13400 with_grant_option,
13401 granted_by,
13402 })
13403 }
13404
13405 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
13406 let mut values = vec![];
13407 let mut grantee_type = GranteesType::None;
13408 loop {
13409 grantee_type = if self.parse_keyword(Keyword::ROLE) {
13410 GranteesType::Role
13411 } else if self.parse_keyword(Keyword::USER) {
13412 GranteesType::User
13413 } else if self.parse_keyword(Keyword::SHARE) {
13414 GranteesType::Share
13415 } else if self.parse_keyword(Keyword::GROUP) {
13416 GranteesType::Group
13417 } else if self.parse_keyword(Keyword::PUBLIC) {
13418 GranteesType::Public
13419 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
13420 GranteesType::DatabaseRole
13421 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
13422 GranteesType::ApplicationRole
13423 } else if self.parse_keyword(Keyword::APPLICATION) {
13424 GranteesType::Application
13425 } else {
13426 grantee_type };
13428
13429 let grantee = if grantee_type == GranteesType::Public {
13430 Grantee {
13431 grantee_type: grantee_type.clone(),
13432 name: None,
13433 }
13434 } else {
13435 let mut name = self.parse_grantee_name()?;
13436 if self.consume_token(&Token::Colon) {
13437 let ident = self.parse_identifier()?;
13441 if let GranteeName::ObjectName(namespace) = name {
13442 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
13443 format!("{}:{}", namespace, ident),
13444 )]));
13445 };
13446 }
13447 Grantee {
13448 grantee_type: grantee_type.clone(),
13449 name: Some(name),
13450 }
13451 };
13452
13453 values.push(grantee);
13454
13455 if !self.consume_token(&Token::Comma) {
13456 break;
13457 }
13458 }
13459
13460 Ok(values)
13461 }
13462
13463 pub fn parse_grant_revoke_privileges_objects(
13464 &mut self,
13465 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
13466 let privileges = if self.parse_keyword(Keyword::ALL) {
13467 Privileges::All {
13468 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
13469 }
13470 } else {
13471 let actions = self.parse_actions_list()?;
13472 Privileges::Actions(actions)
13473 };
13474
13475 let objects = if self.parse_keyword(Keyword::ON) {
13476 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
13477 Some(GrantObjects::AllTablesInSchema {
13478 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13479 })
13480 } else if self.parse_keywords(&[
13481 Keyword::ALL,
13482 Keyword::SEQUENCES,
13483 Keyword::IN,
13484 Keyword::SCHEMA,
13485 ]) {
13486 Some(GrantObjects::AllSequencesInSchema {
13487 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13488 })
13489 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
13490 Some(GrantObjects::ResourceMonitors(self.parse_comma_separated(
13491 |p| p.parse_object_name_with_wildcards(false, true),
13492 )?))
13493 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
13494 Some(GrantObjects::ComputePools(self.parse_comma_separated(
13495 |p| p.parse_object_name_with_wildcards(false, true),
13496 )?))
13497 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
13498 Some(GrantObjects::FailoverGroup(self.parse_comma_separated(
13499 |p| p.parse_object_name_with_wildcards(false, true),
13500 )?))
13501 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
13502 Some(GrantObjects::ReplicationGroup(self.parse_comma_separated(
13503 |p| p.parse_object_name_with_wildcards(false, true),
13504 )?))
13505 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
13506 Some(GrantObjects::ExternalVolumes(self.parse_comma_separated(
13507 |p| p.parse_object_name_with_wildcards(false, true),
13508 )?))
13509 } else {
13510 let object_type = self.parse_one_of_keywords(&[
13511 Keyword::SEQUENCE,
13512 Keyword::DATABASE,
13513 Keyword::DATABASE,
13514 Keyword::SCHEMA,
13515 Keyword::TABLE,
13516 Keyword::VIEW,
13517 Keyword::WAREHOUSE,
13518 Keyword::INTEGRATION,
13519 Keyword::VIEW,
13520 Keyword::WAREHOUSE,
13521 Keyword::INTEGRATION,
13522 Keyword::USER,
13523 Keyword::CONNECTION,
13524 ]);
13525 let objects =
13526 self.parse_comma_separated(|p| p.parse_object_name_with_wildcards(false, true));
13527 match object_type {
13528 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
13529 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
13530 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
13531 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
13532 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
13533 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
13534 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
13535 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
13536 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
13537 _ => unreachable!(),
13538 }
13539 }
13540 } else {
13541 None
13542 };
13543
13544 Ok((privileges, objects))
13545 }
13546
13547 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
13548 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
13549 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
13550 if columns.is_empty() {
13551 Ok(None)
13552 } else {
13553 Ok(Some(columns))
13554 }
13555 }
13556
13557 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
13559 Ok(Action::ImportedPrivileges)
13560 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
13561 Ok(Action::AddSearchOptimization)
13562 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
13563 Ok(Action::AttachListing)
13564 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
13565 Ok(Action::AttachPolicy)
13566 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
13567 Ok(Action::BindServiceEndpoint)
13568 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
13569 let role = self.parse_object_name(false)?;
13570 Ok(Action::DatabaseRole { role })
13571 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
13572 Ok(Action::EvolveSchema)
13573 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
13574 Ok(Action::ImportShare)
13575 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
13576 Ok(Action::ManageVersions)
13577 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
13578 Ok(Action::ManageReleases)
13579 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
13580 Ok(Action::OverrideShareRestrictions)
13581 } else if self.parse_keywords(&[
13582 Keyword::PURCHASE,
13583 Keyword::DATA,
13584 Keyword::EXCHANGE,
13585 Keyword::LISTING,
13586 ]) {
13587 Ok(Action::PurchaseDataExchangeListing)
13588 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
13589 Ok(Action::ResolveAll)
13590 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
13591 Ok(Action::ReadSession)
13592
13593 } else if self.parse_keyword(Keyword::APPLY) {
13595 let apply_type = self.parse_action_apply_type()?;
13596 Ok(Action::Apply { apply_type })
13597 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
13598 Ok(Action::ApplyBudget)
13599 } else if self.parse_keyword(Keyword::AUDIT) {
13600 Ok(Action::Audit)
13601 } else if self.parse_keyword(Keyword::CONNECT) {
13602 Ok(Action::Connect)
13603 } else if self.parse_keyword(Keyword::CREATE) {
13604 let obj_type = self.maybe_parse_action_create_object_type();
13605 Ok(Action::Create { obj_type })
13606 } else if self.parse_keyword(Keyword::DELETE) {
13607 Ok(Action::Delete)
13608 } else if self.parse_keyword(Keyword::EXECUTE) {
13609 let obj_type = self.maybe_parse_action_execute_obj_type();
13610 Ok(Action::Execute { obj_type })
13611 } else if self.parse_keyword(Keyword::FAILOVER) {
13612 Ok(Action::Failover)
13613 } else if self.parse_keyword(Keyword::INSERT) {
13614 Ok(Action::Insert {
13615 columns: parse_columns(self)?,
13616 })
13617 } else if self.parse_keyword(Keyword::MANAGE) {
13618 let manage_type = self.parse_action_manage_type()?;
13619 Ok(Action::Manage { manage_type })
13620 } else if self.parse_keyword(Keyword::MODIFY) {
13621 let modify_type = self.parse_action_modify_type();
13622 Ok(Action::Modify { modify_type })
13623 } else if self.parse_keyword(Keyword::MONITOR) {
13624 let monitor_type = self.parse_action_monitor_type();
13625 Ok(Action::Monitor { monitor_type })
13626 } else if self.parse_keyword(Keyword::OPERATE) {
13627 Ok(Action::Operate)
13628 } else if self.parse_keyword(Keyword::REFERENCES) {
13629 Ok(Action::References {
13630 columns: parse_columns(self)?,
13631 })
13632 } else if self.parse_keyword(Keyword::READ) {
13633 Ok(Action::Read)
13634 } else if self.parse_keyword(Keyword::REPLICATE) {
13635 Ok(Action::Replicate)
13636 } else if self.parse_keyword(Keyword::ROLE) {
13637 let role = self.parse_identifier()?;
13638 Ok(Action::Role { role })
13639 } else if self.parse_keyword(Keyword::SELECT) {
13640 Ok(Action::Select {
13641 columns: parse_columns(self)?,
13642 })
13643 } else if self.parse_keyword(Keyword::TEMPORARY) {
13644 Ok(Action::Temporary)
13645 } else if self.parse_keyword(Keyword::TRIGGER) {
13646 Ok(Action::Trigger)
13647 } else if self.parse_keyword(Keyword::TRUNCATE) {
13648 Ok(Action::Truncate)
13649 } else if self.parse_keyword(Keyword::UPDATE) {
13650 Ok(Action::Update {
13651 columns: parse_columns(self)?,
13652 })
13653 } else if self.parse_keyword(Keyword::USAGE) {
13654 Ok(Action::Usage)
13655 } else if self.parse_keyword(Keyword::OWNERSHIP) {
13656 Ok(Action::Ownership)
13657 } else {
13658 self.expected("a privilege keyword", self.peek_token())?
13659 }
13660 }
13661
13662 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
13663 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
13665 Some(ActionCreateObjectType::ApplicationPackage)
13666 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
13667 Some(ActionCreateObjectType::ComputePool)
13668 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
13669 Some(ActionCreateObjectType::DataExchangeListing)
13670 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
13671 Some(ActionCreateObjectType::ExternalVolume)
13672 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
13673 Some(ActionCreateObjectType::FailoverGroup)
13674 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
13675 Some(ActionCreateObjectType::NetworkPolicy)
13676 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
13677 Some(ActionCreateObjectType::OrganiationListing)
13678 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
13679 Some(ActionCreateObjectType::ReplicationGroup)
13680 }
13681 else if self.parse_keyword(Keyword::ACCOUNT) {
13683 Some(ActionCreateObjectType::Account)
13684 } else if self.parse_keyword(Keyword::APPLICATION) {
13685 Some(ActionCreateObjectType::Application)
13686 } else if self.parse_keyword(Keyword::DATABASE) {
13687 Some(ActionCreateObjectType::Database)
13688 } else if self.parse_keyword(Keyword::INTEGRATION) {
13689 Some(ActionCreateObjectType::Integration)
13690 } else if self.parse_keyword(Keyword::ROLE) {
13691 Some(ActionCreateObjectType::Role)
13692 } else if self.parse_keyword(Keyword::SHARE) {
13693 Some(ActionCreateObjectType::Share)
13694 } else if self.parse_keyword(Keyword::USER) {
13695 Some(ActionCreateObjectType::User)
13696 } else if self.parse_keyword(Keyword::WAREHOUSE) {
13697 Some(ActionCreateObjectType::Warehouse)
13698 } else {
13699 None
13700 }
13701 }
13702
13703 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
13704 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
13705 Ok(ActionApplyType::AggregationPolicy)
13706 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
13707 Ok(ActionApplyType::AuthenticationPolicy)
13708 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
13709 Ok(ActionApplyType::JoinPolicy)
13710 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
13711 Ok(ActionApplyType::MaskingPolicy)
13712 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
13713 Ok(ActionApplyType::PackagesPolicy)
13714 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
13715 Ok(ActionApplyType::PasswordPolicy)
13716 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
13717 Ok(ActionApplyType::ProjectionPolicy)
13718 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
13719 Ok(ActionApplyType::RowAccessPolicy)
13720 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
13721 Ok(ActionApplyType::SessionPolicy)
13722 } else if self.parse_keyword(Keyword::TAG) {
13723 Ok(ActionApplyType::Tag)
13724 } else {
13725 self.expected("GRANT APPLY type", self.peek_token())
13726 }
13727 }
13728
13729 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
13730 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
13731 Some(ActionExecuteObjectType::DataMetricFunction)
13732 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
13733 Some(ActionExecuteObjectType::ManagedAlert)
13734 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
13735 Some(ActionExecuteObjectType::ManagedTask)
13736 } else if self.parse_keyword(Keyword::ALERT) {
13737 Some(ActionExecuteObjectType::Alert)
13738 } else if self.parse_keyword(Keyword::TASK) {
13739 Some(ActionExecuteObjectType::Task)
13740 } else {
13741 None
13742 }
13743 }
13744
13745 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
13746 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
13747 Ok(ActionManageType::AccountSupportCases)
13748 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
13749 Ok(ActionManageType::EventSharing)
13750 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
13751 Ok(ActionManageType::ListingAutoFulfillment)
13752 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
13753 Ok(ActionManageType::OrganizationSupportCases)
13754 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
13755 Ok(ActionManageType::UserSupportCases)
13756 } else if self.parse_keyword(Keyword::GRANTS) {
13757 Ok(ActionManageType::Grants)
13758 } else if self.parse_keyword(Keyword::WAREHOUSES) {
13759 Ok(ActionManageType::Warehouses)
13760 } else {
13761 self.expected("GRANT MANAGE type", self.peek_token())
13762 }
13763 }
13764
13765 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
13766 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
13767 Some(ActionModifyType::LogLevel)
13768 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
13769 Some(ActionModifyType::TraceLevel)
13770 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
13771 Some(ActionModifyType::SessionLogLevel)
13772 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
13773 Some(ActionModifyType::SessionTraceLevel)
13774 } else {
13775 None
13776 }
13777 }
13778
13779 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
13780 if self.parse_keyword(Keyword::EXECUTION) {
13781 Some(ActionMonitorType::Execution)
13782 } else if self.parse_keyword(Keyword::SECURITY) {
13783 Some(ActionMonitorType::Security)
13784 } else if self.parse_keyword(Keyword::USAGE) {
13785 Some(ActionMonitorType::Usage)
13786 } else {
13787 None
13788 }
13789 }
13790
13791 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
13792 let mut name = self.parse_object_name(false)?;
13793 if self.dialect.supports_user_host_grantee()
13794 && name.0.len() == 1
13795 && name.0[0].as_ident().is_some()
13796 && self.consume_token(&Token::AtSign)
13797 {
13798 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
13799 let host = self.parse_identifier()?;
13800 Ok(GranteeName::UserHost { user, host })
13801 } else {
13802 Ok(GranteeName::ObjectName(name))
13803 }
13804 }
13805
13806 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
13808 let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
13809
13810 self.expect_keyword_is(Keyword::FROM)?;
13811 let grantees = self.parse_grantees()?;
13812
13813 let granted_by = self
13814 .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
13815 .then(|| self.parse_identifier().unwrap());
13816
13817 let cascade = self.parse_cascade_option();
13818
13819 Ok(Statement::Revoke {
13820 privileges,
13821 objects,
13822 grantees,
13823 granted_by,
13824 cascade,
13825 })
13826 }
13827
13828 pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
13830 if !dialect_of!(self is MySqlDialect | GenericDialect) {
13831 return parser_err!(
13832 "Unsupported statement REPLACE",
13833 self.peek_token().span.start
13834 );
13835 }
13836
13837 let mut insert = self.parse_insert()?;
13838 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
13839 *replace_into = true;
13840 }
13841
13842 Ok(insert)
13843 }
13844
13845 fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
13849 Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
13850 }
13851
13852 pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
13854 let or = self.parse_conflict_clause();
13855 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
13856 None
13857 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
13858 Some(MysqlInsertPriority::LowPriority)
13859 } else if self.parse_keyword(Keyword::DELAYED) {
13860 Some(MysqlInsertPriority::Delayed)
13861 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
13862 Some(MysqlInsertPriority::HighPriority)
13863 } else {
13864 None
13865 };
13866
13867 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
13868 && self.parse_keyword(Keyword::IGNORE);
13869
13870 let replace_into = false;
13871
13872 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
13873 let into = self.parse_keyword(Keyword::INTO);
13874
13875 let local = self.parse_keyword(Keyword::LOCAL);
13876
13877 if self.parse_keyword(Keyword::DIRECTORY) {
13878 let path = self.parse_literal_string()?;
13879 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
13880 Some(self.parse_file_format()?)
13881 } else {
13882 None
13883 };
13884 let source = self.parse_query()?;
13885 Ok(Statement::Directory {
13886 local,
13887 path,
13888 overwrite,
13889 file_format,
13890 source,
13891 })
13892 } else {
13893 let table = self.parse_keyword(Keyword::TABLE);
13895 let table_object = self.parse_table_object()?;
13896
13897 let table_alias =
13898 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
13899 Some(self.parse_identifier()?)
13900 } else {
13901 None
13902 };
13903
13904 let is_mysql = dialect_of!(self is MySqlDialect);
13905
13906 let (columns, partitioned, after_columns, source, assignments) = if self
13907 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
13908 {
13909 (vec![], None, vec![], None, vec![])
13910 } else {
13911 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
13912 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
13913
13914 let partitioned = self.parse_insert_partition()?;
13915 let after_columns = if dialect_of!(self is HiveDialect) {
13917 self.parse_parenthesized_column_list(Optional, false)?
13918 } else {
13919 vec![]
13920 };
13921 (columns, partitioned, after_columns)
13922 } else {
13923 Default::default()
13924 };
13925
13926 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
13927 || self.peek_keyword(Keyword::SETTINGS)
13928 {
13929 (None, vec![])
13930 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
13931 (None, self.parse_comma_separated(Parser::parse_assignment)?)
13932 } else {
13933 (Some(self.parse_query()?), vec![])
13934 };
13935
13936 (columns, partitioned, after_columns, source, assignments)
13937 };
13938
13939 let (format_clause, settings) = if self.dialect.supports_insert_format() {
13940 let settings = self.parse_settings()?;
13943
13944 let format = if self.parse_keyword(Keyword::FORMAT) {
13945 Some(self.parse_input_format_clause()?)
13946 } else {
13947 None
13948 };
13949
13950 (format, settings)
13951 } else {
13952 Default::default()
13953 };
13954
13955 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
13956 && self.parse_keyword(Keyword::AS)
13957 {
13958 let row_alias = self.parse_object_name(false)?;
13959 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
13960 Some(InsertAliases {
13961 row_alias,
13962 col_aliases,
13963 })
13964 } else {
13965 None
13966 };
13967
13968 let on = if self.parse_keyword(Keyword::ON) {
13969 if self.parse_keyword(Keyword::CONFLICT) {
13970 let conflict_target =
13971 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
13972 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
13973 } else if self.peek_token() == Token::LParen {
13974 Some(ConflictTarget::Columns(
13975 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
13976 ))
13977 } else {
13978 None
13979 };
13980
13981 self.expect_keyword_is(Keyword::DO)?;
13982 let action = if self.parse_keyword(Keyword::NOTHING) {
13983 OnConflictAction::DoNothing
13984 } else {
13985 self.expect_keyword_is(Keyword::UPDATE)?;
13986 self.expect_keyword_is(Keyword::SET)?;
13987 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
13988 let selection = if self.parse_keyword(Keyword::WHERE) {
13989 Some(self.parse_expr()?)
13990 } else {
13991 None
13992 };
13993 OnConflictAction::DoUpdate(DoUpdate {
13994 assignments,
13995 selection,
13996 })
13997 };
13998
13999 Some(OnInsert::OnConflict(OnConflict {
14000 conflict_target,
14001 action,
14002 }))
14003 } else {
14004 self.expect_keyword_is(Keyword::DUPLICATE)?;
14005 self.expect_keyword_is(Keyword::KEY)?;
14006 self.expect_keyword_is(Keyword::UPDATE)?;
14007 let l = self.parse_comma_separated(Parser::parse_assignment)?;
14008
14009 Some(OnInsert::DuplicateKeyUpdate(l))
14010 }
14011 } else {
14012 None
14013 };
14014
14015 let returning = if self.parse_keyword(Keyword::RETURNING) {
14016 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14017 } else {
14018 None
14019 };
14020
14021 Ok(Statement::Insert(Insert {
14022 or,
14023 table: table_object,
14024 table_alias,
14025 ignore,
14026 into,
14027 overwrite,
14028 partitioned,
14029 columns,
14030 after_columns,
14031 source,
14032 assignments,
14033 has_table_keyword: table,
14034 on,
14035 returning,
14036 replace_into,
14037 priority,
14038 insert_alias,
14039 settings,
14040 format_clause,
14041 }))
14042 }
14043 }
14044
14045 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
14049 let ident = self.parse_identifier()?;
14050 let values = self
14051 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
14052 .unwrap_or_default();
14053
14054 Ok(InputFormatClause { ident, values })
14055 }
14056
14057 fn peek_subquery_start(&mut self) -> bool {
14060 let [maybe_lparen, maybe_select] = self.peek_tokens();
14061 Token::LParen == maybe_lparen
14062 && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
14063 }
14064
14065 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
14066 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
14067 Some(SqliteOnConflict::Replace)
14068 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
14069 Some(SqliteOnConflict::Rollback)
14070 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
14071 Some(SqliteOnConflict::Abort)
14072 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
14073 Some(SqliteOnConflict::Fail)
14074 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
14075 Some(SqliteOnConflict::Ignore)
14076 } else if self.parse_keyword(Keyword::REPLACE) {
14077 Some(SqliteOnConflict::Replace)
14078 } else {
14079 None
14080 }
14081 }
14082
14083 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
14084 if self.parse_keyword(Keyword::PARTITION) {
14085 self.expect_token(&Token::LParen)?;
14086 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
14087 self.expect_token(&Token::RParen)?;
14088 Ok(partition_cols)
14089 } else {
14090 Ok(None)
14091 }
14092 }
14093
14094 pub fn parse_load_data_table_format(
14095 &mut self,
14096 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
14097 if self.parse_keyword(Keyword::INPUTFORMAT) {
14098 let input_format = self.parse_expr()?;
14099 self.expect_keyword_is(Keyword::SERDE)?;
14100 let serde = self.parse_expr()?;
14101 Ok(Some(HiveLoadDataFormat {
14102 input_format,
14103 serde,
14104 }))
14105 } else {
14106 Ok(None)
14107 }
14108 }
14109
14110 fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
14114 Ok(Box::new(SetExpr::Update(self.parse_update()?)))
14115 }
14116
14117 pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
14118 let or = self.parse_conflict_clause();
14119 let table = self.parse_table_and_joins()?;
14120 let from_before_set = if self.parse_keyword(Keyword::FROM) {
14121 Some(UpdateTableFromKind::BeforeSet(
14122 self.parse_table_with_joins()?,
14123 ))
14124 } else {
14125 None
14126 };
14127 self.expect_keyword(Keyword::SET)?;
14128 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14129 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
14130 Some(UpdateTableFromKind::AfterSet(
14131 self.parse_table_with_joins()?,
14132 ))
14133 } else {
14134 from_before_set
14135 };
14136 let selection = if self.parse_keyword(Keyword::WHERE) {
14137 Some(self.parse_expr()?)
14138 } else {
14139 None
14140 };
14141 let returning = if self.parse_keyword(Keyword::RETURNING) {
14142 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14143 } else {
14144 None
14145 };
14146 Ok(Statement::Update {
14147 table,
14148 assignments,
14149 from,
14150 selection,
14151 returning,
14152 or,
14153 })
14154 }
14155
14156 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
14158 let target = self.parse_assignment_target()?;
14159 self.expect_token(&Token::Eq)?;
14160 let value = self.parse_expr()?;
14161 Ok(Assignment { target, value })
14162 }
14163
14164 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
14166 if self.consume_token(&Token::LParen) {
14167 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14168 self.expect_token(&Token::RParen)?;
14169 Ok(AssignmentTarget::Tuple(columns))
14170 } else {
14171 let column = self.parse_object_name(false)?;
14172 Ok(AssignmentTarget::ColumnName(column))
14173 }
14174 }
14175
14176 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
14177 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
14178 self.maybe_parse(|p| {
14179 let name = p.parse_expr()?;
14180 let operator = p.parse_function_named_arg_operator()?;
14181 let arg = p.parse_wildcard_expr()?.into();
14182 Ok(FunctionArg::ExprNamed {
14183 name,
14184 arg,
14185 operator,
14186 })
14187 })?
14188 } else {
14189 self.maybe_parse(|p| {
14190 let name = p.parse_identifier()?;
14191 let operator = p.parse_function_named_arg_operator()?;
14192 let arg = p.parse_wildcard_expr()?.into();
14193 Ok(FunctionArg::Named {
14194 name,
14195 arg,
14196 operator,
14197 })
14198 })?
14199 };
14200 if let Some(arg) = arg {
14201 return Ok(arg);
14202 }
14203 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
14204 }
14205
14206 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
14207 if self.parse_keyword(Keyword::VALUE) {
14208 return Ok(FunctionArgOperator::Value);
14209 }
14210 let tok = self.next_token();
14211 match tok.token {
14212 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
14213 Ok(FunctionArgOperator::RightArrow)
14214 }
14215 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
14216 Ok(FunctionArgOperator::Equals)
14217 }
14218 Token::Assignment
14219 if self
14220 .dialect
14221 .supports_named_fn_args_with_assignment_operator() =>
14222 {
14223 Ok(FunctionArgOperator::Assignment)
14224 }
14225 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
14226 Ok(FunctionArgOperator::Colon)
14227 }
14228 _ => {
14229 self.prev_token();
14230 self.expected("argument operator", tok)
14231 }
14232 }
14233 }
14234
14235 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
14236 if self.consume_token(&Token::RParen) {
14237 Ok(vec![])
14238 } else {
14239 let args = self.parse_comma_separated(Parser::parse_function_args)?;
14240 self.expect_token(&Token::RParen)?;
14241 Ok(args)
14242 }
14243 }
14244
14245 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
14246 if self.consume_token(&Token::RParen) {
14247 return Ok(TableFunctionArgs {
14248 args: vec![],
14249 settings: None,
14250 });
14251 }
14252 let mut args = vec![];
14253 let settings = loop {
14254 if let Some(settings) = self.parse_settings()? {
14255 break Some(settings);
14256 }
14257 args.push(self.parse_function_args()?);
14258 if self.is_parse_comma_separated_end() {
14259 break None;
14260 }
14261 };
14262 self.expect_token(&Token::RParen)?;
14263 Ok(TableFunctionArgs { args, settings })
14264 }
14265
14266 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
14275 let mut clauses = vec![];
14276
14277 if let Some(null_clause) = self.parse_json_null_clause() {
14279 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14280 }
14281
14282 if self.consume_token(&Token::RParen) {
14283 return Ok(FunctionArgumentList {
14284 duplicate_treatment: None,
14285 args: vec![],
14286 clauses,
14287 });
14288 }
14289
14290 let duplicate_treatment = self.parse_duplicate_treatment()?;
14291 let args = self.parse_comma_separated(Parser::parse_function_args)?;
14292
14293 if self.dialect.supports_window_function_null_treatment_arg() {
14294 if let Some(null_treatment) = self.parse_null_treatment()? {
14295 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
14296 }
14297 }
14298
14299 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14300 clauses.push(FunctionArgumentClause::OrderBy(
14301 self.parse_comma_separated(Parser::parse_order_by_expr)?,
14302 ));
14303 }
14304
14305 if self.parse_keyword(Keyword::LIMIT) {
14306 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
14307 }
14308
14309 if dialect_of!(self is GenericDialect | BigQueryDialect)
14310 && self.parse_keyword(Keyword::HAVING)
14311 {
14312 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
14313 Keyword::MIN => HavingBoundKind::Min,
14314 Keyword::MAX => HavingBoundKind::Max,
14315 _ => unreachable!(),
14316 };
14317 clauses.push(FunctionArgumentClause::Having(HavingBound(
14318 kind,
14319 self.parse_expr()?,
14320 )))
14321 }
14322
14323 if dialect_of!(self is GenericDialect | MySqlDialect)
14324 && self.parse_keyword(Keyword::SEPARATOR)
14325 {
14326 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
14327 }
14328
14329 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
14330 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
14331 }
14332
14333 if let Some(null_clause) = self.parse_json_null_clause() {
14334 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14335 }
14336
14337 self.expect_token(&Token::RParen)?;
14338 Ok(FunctionArgumentList {
14339 duplicate_treatment,
14340 args,
14341 clauses,
14342 })
14343 }
14344
14345 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
14347 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
14348 Some(JsonNullClause::AbsentOnNull)
14349 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
14350 Some(JsonNullClause::NullOnNull)
14351 } else {
14352 None
14353 }
14354 }
14355
14356 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
14357 let loc = self.peek_token().span.start;
14358 match (
14359 self.parse_keyword(Keyword::ALL),
14360 self.parse_keyword(Keyword::DISTINCT),
14361 ) {
14362 (true, false) => Ok(Some(DuplicateTreatment::All)),
14363 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
14364 (false, false) => Ok(None),
14365 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
14366 }
14367 }
14368
14369 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
14371 let prefix = self
14372 .parse_one_of_keywords(
14373 self.dialect
14374 .get_reserved_keywords_for_select_item_operator(),
14375 )
14376 .map(|keyword| Ident::new(format!("{:?}", keyword)));
14377
14378 match self.parse_wildcard_expr()? {
14379 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
14380 SelectItemQualifiedWildcardKind::ObjectName(prefix),
14381 self.parse_wildcard_additional_options(token.0)?,
14382 )),
14383 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
14384 self.parse_wildcard_additional_options(token.0)?,
14385 )),
14386 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
14387 parser_err!(
14388 format!("Expected an expression, found: {}", v),
14389 self.peek_token().span.start
14390 )
14391 }
14392 Expr::BinaryOp {
14393 left,
14394 op: BinaryOperator::Eq,
14395 right,
14396 } if self.dialect.supports_eq_alias_assignment()
14397 && matches!(left.as_ref(), Expr::Identifier(_)) =>
14398 {
14399 let Expr::Identifier(alias) = *left else {
14400 return parser_err!(
14401 "BUG: expected identifier expression as alias",
14402 self.peek_token().span.start
14403 );
14404 };
14405 Ok(SelectItem::ExprWithAlias {
14406 expr: *right,
14407 alias,
14408 })
14409 }
14410 expr if self.dialect.supports_select_expr_star()
14411 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
14412 {
14413 let wildcard_token = self.get_previous_token().clone();
14414 Ok(SelectItem::QualifiedWildcard(
14415 SelectItemQualifiedWildcardKind::Expr(expr),
14416 self.parse_wildcard_additional_options(wildcard_token)?,
14417 ))
14418 }
14419 expr => self
14420 .maybe_parse_select_item_alias()
14421 .map(|alias| match alias {
14422 Some(alias) => SelectItem::ExprWithAlias {
14423 expr: maybe_prefixed_expr(expr, prefix),
14424 alias,
14425 },
14426 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
14427 }),
14428 }
14429 }
14430
14431 pub fn parse_wildcard_additional_options(
14435 &mut self,
14436 wildcard_token: TokenWithSpan,
14437 ) -> Result<WildcardAdditionalOptions, ParserError> {
14438 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
14439 self.parse_optional_select_item_ilike()?
14440 } else {
14441 None
14442 };
14443 let opt_exclude = if opt_ilike.is_none()
14444 && dialect_of!(self is GenericDialect | DuckDbDialect | SnowflakeDialect)
14445 {
14446 self.parse_optional_select_item_exclude()?
14447 } else {
14448 None
14449 };
14450 let opt_except = if self.dialect.supports_select_wildcard_except() {
14451 self.parse_optional_select_item_except()?
14452 } else {
14453 None
14454 };
14455 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
14456 {
14457 self.parse_optional_select_item_replace()?
14458 } else {
14459 None
14460 };
14461 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
14462 self.parse_optional_select_item_rename()?
14463 } else {
14464 None
14465 };
14466
14467 Ok(WildcardAdditionalOptions {
14468 wildcard_token: wildcard_token.into(),
14469 opt_ilike,
14470 opt_exclude,
14471 opt_except,
14472 opt_rename,
14473 opt_replace,
14474 })
14475 }
14476
14477 pub fn parse_optional_select_item_ilike(
14481 &mut self,
14482 ) -> Result<Option<IlikeSelectItem>, ParserError> {
14483 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
14484 let next_token = self.next_token();
14485 let pattern = match next_token.token {
14486 Token::SingleQuotedString(s) => s,
14487 _ => return self.expected("ilike pattern", next_token),
14488 };
14489 Some(IlikeSelectItem { pattern })
14490 } else {
14491 None
14492 };
14493 Ok(opt_ilike)
14494 }
14495
14496 pub fn parse_optional_select_item_exclude(
14500 &mut self,
14501 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
14502 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
14503 if self.consume_token(&Token::LParen) {
14504 let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14505 self.expect_token(&Token::RParen)?;
14506 Some(ExcludeSelectItem::Multiple(columns))
14507 } else {
14508 let column = self.parse_identifier()?;
14509 Some(ExcludeSelectItem::Single(column))
14510 }
14511 } else {
14512 None
14513 };
14514
14515 Ok(opt_exclude)
14516 }
14517
14518 pub fn parse_optional_select_item_except(
14522 &mut self,
14523 ) -> Result<Option<ExceptSelectItem>, ParserError> {
14524 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
14525 if self.peek_token().token == Token::LParen {
14526 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
14527 match &idents[..] {
14528 [] => {
14529 return self.expected(
14530 "at least one column should be parsed by the expect clause",
14531 self.peek_token(),
14532 )?;
14533 }
14534 [first, idents @ ..] => Some(ExceptSelectItem {
14535 first_element: first.clone(),
14536 additional_elements: idents.to_vec(),
14537 }),
14538 }
14539 } else {
14540 let ident = self.parse_identifier()?;
14542 Some(ExceptSelectItem {
14543 first_element: ident,
14544 additional_elements: vec![],
14545 })
14546 }
14547 } else {
14548 None
14549 };
14550
14551 Ok(opt_except)
14552 }
14553
14554 pub fn parse_optional_select_item_rename(
14556 &mut self,
14557 ) -> Result<Option<RenameSelectItem>, ParserError> {
14558 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
14559 if self.consume_token(&Token::LParen) {
14560 let idents =
14561 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
14562 self.expect_token(&Token::RParen)?;
14563 Some(RenameSelectItem::Multiple(idents))
14564 } else {
14565 let ident = self.parse_identifier_with_alias()?;
14566 Some(RenameSelectItem::Single(ident))
14567 }
14568 } else {
14569 None
14570 };
14571
14572 Ok(opt_rename)
14573 }
14574
14575 pub fn parse_optional_select_item_replace(
14577 &mut self,
14578 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
14579 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
14580 if self.consume_token(&Token::LParen) {
14581 let items = self.parse_comma_separated(|parser| {
14582 Ok(Box::new(parser.parse_replace_elements()?))
14583 })?;
14584 self.expect_token(&Token::RParen)?;
14585 Some(ReplaceSelectItem { items })
14586 } else {
14587 let tok = self.next_token();
14588 return self.expected("( after REPLACE but", tok);
14589 }
14590 } else {
14591 None
14592 };
14593
14594 Ok(opt_replace)
14595 }
14596 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
14597 let expr = self.parse_expr()?;
14598 let as_keyword = self.parse_keyword(Keyword::AS);
14599 let ident = self.parse_identifier()?;
14600 Ok(ReplaceSelectElement {
14601 expr,
14602 column_name: ident,
14603 as_keyword,
14604 })
14605 }
14606
14607 pub fn parse_asc_desc(&mut self) -> Option<bool> {
14610 if self.parse_keyword(Keyword::ASC) {
14611 Some(true)
14612 } else if self.parse_keyword(Keyword::DESC) {
14613 Some(false)
14614 } else {
14615 None
14616 }
14617 }
14618
14619 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
14621 self.parse_order_by_expr_inner(false)
14622 .map(|(order_by, _)| order_by)
14623 }
14624
14625 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
14627 self.parse_order_by_expr_inner(true)
14628 .map(|(column, operator_class)| IndexColumn {
14629 column,
14630 operator_class,
14631 })
14632 }
14633
14634 fn parse_order_by_expr_inner(
14635 &mut self,
14636 with_operator_class: bool,
14637 ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
14638 let expr = self.parse_expr()?;
14639
14640 let operator_class: Option<Ident> = if with_operator_class {
14641 if self
14644 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
14645 .is_some()
14646 {
14647 None
14648 } else {
14649 self.maybe_parse(|parser| parser.parse_identifier())?
14650 }
14651 } else {
14652 None
14653 };
14654
14655 let options = self.parse_order_by_options()?;
14656
14657 let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
14658 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
14659 {
14660 Some(self.parse_with_fill()?)
14661 } else {
14662 None
14663 };
14664
14665 Ok((
14666 OrderByExpr {
14667 expr,
14668 options,
14669 with_fill,
14670 },
14671 operator_class,
14672 ))
14673 }
14674
14675 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
14676 let asc = self.parse_asc_desc();
14677
14678 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
14679 Some(true)
14680 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
14681 Some(false)
14682 } else {
14683 None
14684 };
14685
14686 Ok(OrderByOptions { asc, nulls_first })
14687 }
14688
14689 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
14692 let from = if self.parse_keyword(Keyword::FROM) {
14693 Some(self.parse_expr()?)
14694 } else {
14695 None
14696 };
14697
14698 let to = if self.parse_keyword(Keyword::TO) {
14699 Some(self.parse_expr()?)
14700 } else {
14701 None
14702 };
14703
14704 let step = if self.parse_keyword(Keyword::STEP) {
14705 Some(self.parse_expr()?)
14706 } else {
14707 None
14708 };
14709
14710 Ok(WithFill { from, to, step })
14711 }
14712
14713 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
14716 if !self.parse_keyword(Keyword::INTERPOLATE) {
14717 return Ok(None);
14718 }
14719
14720 if self.consume_token(&Token::LParen) {
14721 let interpolations =
14722 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
14723 self.expect_token(&Token::RParen)?;
14724 return Ok(Some(Interpolate {
14726 exprs: Some(interpolations),
14727 }));
14728 }
14729
14730 Ok(Some(Interpolate { exprs: None }))
14732 }
14733
14734 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
14736 let column = self.parse_identifier()?;
14737 let expr = if self.parse_keyword(Keyword::AS) {
14738 Some(self.parse_expr()?)
14739 } else {
14740 None
14741 };
14742 Ok(InterpolateExpr { column, expr })
14743 }
14744
14745 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
14748 let quantity = if self.consume_token(&Token::LParen) {
14749 let quantity = self.parse_expr()?;
14750 self.expect_token(&Token::RParen)?;
14751 Some(TopQuantity::Expr(quantity))
14752 } else {
14753 let next_token = self.next_token();
14754 let quantity = match next_token.token {
14755 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
14756 _ => self.expected("literal int", next_token)?,
14757 };
14758 Some(TopQuantity::Constant(quantity))
14759 };
14760
14761 let percent = self.parse_keyword(Keyword::PERCENT);
14762
14763 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
14764
14765 Ok(Top {
14766 with_ties,
14767 percent,
14768 quantity,
14769 })
14770 }
14771
14772 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
14774 if self.parse_keyword(Keyword::ALL) {
14775 Ok(None)
14776 } else {
14777 Ok(Some(self.parse_expr()?))
14778 }
14779 }
14780
14781 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
14783 let value = self.parse_expr()?;
14784 let rows = if self.parse_keyword(Keyword::ROW) {
14785 OffsetRows::Row
14786 } else if self.parse_keyword(Keyword::ROWS) {
14787 OffsetRows::Rows
14788 } else {
14789 OffsetRows::None
14790 };
14791 Ok(Offset { value, rows })
14792 }
14793
14794 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
14796 self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
14797 let (quantity, percent) = if self
14798 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
14799 .is_some()
14800 {
14801 (None, false)
14802 } else {
14803 let quantity = Expr::Value(self.parse_value()?);
14804 let percent = self.parse_keyword(Keyword::PERCENT);
14805 self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
14806 (Some(quantity), percent)
14807 };
14808 let with_ties = if self.parse_keyword(Keyword::ONLY) {
14809 false
14810 } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
14811 true
14812 } else {
14813 return self.expected("one of ONLY or WITH TIES", self.peek_token());
14814 };
14815 Ok(Fetch {
14816 with_ties,
14817 percent,
14818 quantity,
14819 })
14820 }
14821
14822 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
14824 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
14825 Keyword::UPDATE => LockType::Update,
14826 Keyword::SHARE => LockType::Share,
14827 _ => unreachable!(),
14828 };
14829 let of = if self.parse_keyword(Keyword::OF) {
14830 Some(self.parse_object_name(false)?)
14831 } else {
14832 None
14833 };
14834 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
14835 Some(NonBlock::Nowait)
14836 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
14837 Some(NonBlock::SkipLocked)
14838 } else {
14839 None
14840 };
14841 Ok(LockClause {
14842 lock_type,
14843 of,
14844 nonblock,
14845 })
14846 }
14847
14848 pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
14849 let mut explicit_row = false;
14850
14851 let rows = self.parse_comma_separated(|parser| {
14852 if parser.parse_keyword(Keyword::ROW) {
14853 explicit_row = true;
14854 }
14855
14856 parser.expect_token(&Token::LParen)?;
14857 if allow_empty && parser.peek_token().token == Token::RParen {
14858 parser.next_token();
14859 Ok(vec![])
14860 } else {
14861 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
14862 parser.expect_token(&Token::RParen)?;
14863 Ok(exprs)
14864 }
14865 })?;
14866 Ok(Values { explicit_row, rows })
14867 }
14868
14869 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
14870 self.expect_keyword_is(Keyword::TRANSACTION)?;
14871 Ok(Statement::StartTransaction {
14872 modes: self.parse_transaction_modes()?,
14873 begin: false,
14874 transaction: Some(BeginTransactionKind::Transaction),
14875 modifier: None,
14876 statements: vec![],
14877 exception_statements: None,
14878 has_end_keyword: false,
14879 })
14880 }
14881
14882 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
14883 let modifier = if !self.dialect.supports_start_transaction_modifier() {
14884 None
14885 } else if self.parse_keyword(Keyword::DEFERRED) {
14886 Some(TransactionModifier::Deferred)
14887 } else if self.parse_keyword(Keyword::IMMEDIATE) {
14888 Some(TransactionModifier::Immediate)
14889 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
14890 Some(TransactionModifier::Exclusive)
14891 } else if self.parse_keyword(Keyword::TRY) {
14892 Some(TransactionModifier::Try)
14893 } else if self.parse_keyword(Keyword::CATCH) {
14894 Some(TransactionModifier::Catch)
14895 } else {
14896 None
14897 };
14898 let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
14899 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
14900 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
14901 _ => None,
14902 };
14903 Ok(Statement::StartTransaction {
14904 modes: self.parse_transaction_modes()?,
14905 begin: true,
14906 transaction,
14907 modifier,
14908 statements: vec![],
14909 exception_statements: None,
14910 has_end_keyword: false,
14911 })
14912 }
14913
14914 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
14915 let modifier = if !self.dialect.supports_end_transaction_modifier() {
14916 None
14917 } else if self.parse_keyword(Keyword::TRY) {
14918 Some(TransactionModifier::Try)
14919 } else if self.parse_keyword(Keyword::CATCH) {
14920 Some(TransactionModifier::Catch)
14921 } else {
14922 None
14923 };
14924 Ok(Statement::Commit {
14925 chain: self.parse_commit_rollback_chain()?,
14926 end: true,
14927 modifier,
14928 })
14929 }
14930
14931 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
14932 let mut modes = vec![];
14933 let mut required = false;
14934 loop {
14935 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
14936 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
14937 TransactionIsolationLevel::ReadUncommitted
14938 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
14939 TransactionIsolationLevel::ReadCommitted
14940 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
14941 TransactionIsolationLevel::RepeatableRead
14942 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
14943 TransactionIsolationLevel::Serializable
14944 } else if self.parse_keyword(Keyword::SNAPSHOT) {
14945 TransactionIsolationLevel::Snapshot
14946 } else {
14947 self.expected("isolation level", self.peek_token())?
14948 };
14949 TransactionMode::IsolationLevel(iso_level)
14950 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
14951 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
14952 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
14953 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
14954 } else if required {
14955 self.expected("transaction mode", self.peek_token())?
14956 } else {
14957 break;
14958 };
14959 modes.push(mode);
14960 required = self.consume_token(&Token::Comma);
14965 }
14966 Ok(modes)
14967 }
14968
14969 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
14970 Ok(Statement::Commit {
14971 chain: self.parse_commit_rollback_chain()?,
14972 end: false,
14973 modifier: None,
14974 })
14975 }
14976
14977 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
14978 let chain = self.parse_commit_rollback_chain()?;
14979 let savepoint = self.parse_rollback_savepoint()?;
14980
14981 Ok(Statement::Rollback { chain, savepoint })
14982 }
14983
14984 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
14985 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
14986 if self.parse_keyword(Keyword::AND) {
14987 let chain = !self.parse_keyword(Keyword::NO);
14988 self.expect_keyword_is(Keyword::CHAIN)?;
14989 Ok(chain)
14990 } else {
14991 Ok(false)
14992 }
14993 }
14994
14995 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
14996 if self.parse_keyword(Keyword::TO) {
14997 let _ = self.parse_keyword(Keyword::SAVEPOINT);
14998 let savepoint = self.parse_identifier()?;
14999
15000 Ok(Some(savepoint))
15001 } else {
15002 Ok(None)
15003 }
15004 }
15005
15006 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
15008 self.expect_token(&Token::LParen)?;
15009 let message = Box::new(self.parse_expr()?);
15010 self.expect_token(&Token::Comma)?;
15011 let severity = Box::new(self.parse_expr()?);
15012 self.expect_token(&Token::Comma)?;
15013 let state = Box::new(self.parse_expr()?);
15014 let arguments = if self.consume_token(&Token::Comma) {
15015 self.parse_comma_separated(Parser::parse_expr)?
15016 } else {
15017 vec![]
15018 };
15019 self.expect_token(&Token::RParen)?;
15020 let options = if self.parse_keyword(Keyword::WITH) {
15021 self.parse_comma_separated(Parser::parse_raiserror_option)?
15022 } else {
15023 vec![]
15024 };
15025 Ok(Statement::RaisError {
15026 message,
15027 severity,
15028 state,
15029 arguments,
15030 options,
15031 })
15032 }
15033
15034 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
15035 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
15036 Keyword::LOG => Ok(RaisErrorOption::Log),
15037 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
15038 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
15039 _ => self.expected(
15040 "LOG, NOWAIT OR SETERROR raiserror option",
15041 self.peek_token(),
15042 ),
15043 }
15044 }
15045
15046 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
15047 let prepare = self.parse_keyword(Keyword::PREPARE);
15048 let name = self.parse_identifier()?;
15049 Ok(Statement::Deallocate { name, prepare })
15050 }
15051
15052 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
15053 let name = if self.dialect.supports_execute_immediate()
15054 && self.parse_keyword(Keyword::IMMEDIATE)
15055 {
15056 None
15057 } else {
15058 let name = self.parse_object_name(false)?;
15059 Some(name)
15060 };
15061
15062 let has_parentheses = self.consume_token(&Token::LParen);
15063
15064 let end_token = match (has_parentheses, self.peek_token().token) {
15065 (true, _) => Token::RParen,
15066 (false, Token::EOF) => Token::EOF,
15067 (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w),
15068 (false, _) => Token::SemiColon,
15069 };
15070
15071 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
15072
15073 if has_parentheses {
15074 self.expect_token(&Token::RParen)?;
15075 }
15076
15077 let into = if self.parse_keyword(Keyword::INTO) {
15078 self.parse_comma_separated(Self::parse_identifier)?
15079 } else {
15080 vec![]
15081 };
15082
15083 let using = if self.parse_keyword(Keyword::USING) {
15084 self.parse_comma_separated(Self::parse_expr_with_alias)?
15085 } else {
15086 vec![]
15087 };
15088
15089 Ok(Statement::Execute {
15090 immediate: name.is_none(),
15091 name,
15092 parameters,
15093 has_parentheses,
15094 into,
15095 using,
15096 })
15097 }
15098
15099 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
15100 let name = self.parse_identifier()?;
15101
15102 let mut data_types = vec![];
15103 if self.consume_token(&Token::LParen) {
15104 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
15105 self.expect_token(&Token::RParen)?;
15106 }
15107
15108 self.expect_keyword_is(Keyword::AS)?;
15109 let statement = Box::new(self.parse_statement()?);
15110 Ok(Statement::Prepare {
15111 name,
15112 data_types,
15113 statement,
15114 })
15115 }
15116
15117 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
15118 self.expect_token(&Token::LParen)?;
15119 let query = self.parse_query()?;
15120 self.expect_token(&Token::RParen)?;
15121
15122 self.expect_keyword_is(Keyword::TO)?;
15123 let to = self.parse_identifier()?;
15124
15125 let with_options = self.parse_options(Keyword::WITH)?;
15126
15127 Ok(Statement::Unload {
15128 query,
15129 to,
15130 with: with_options,
15131 })
15132 }
15133
15134 pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
15135 let mut clauses = vec![];
15136 loop {
15137 if !(self.parse_keyword(Keyword::WHEN)) {
15138 break;
15139 }
15140
15141 let mut clause_kind = MergeClauseKind::Matched;
15142 if self.parse_keyword(Keyword::NOT) {
15143 clause_kind = MergeClauseKind::NotMatched;
15144 }
15145 self.expect_keyword_is(Keyword::MATCHED)?;
15146
15147 if matches!(clause_kind, MergeClauseKind::NotMatched)
15148 && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
15149 {
15150 clause_kind = MergeClauseKind::NotMatchedBySource;
15151 } else if matches!(clause_kind, MergeClauseKind::NotMatched)
15152 && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
15153 {
15154 clause_kind = MergeClauseKind::NotMatchedByTarget;
15155 }
15156
15157 let predicate = if self.parse_keyword(Keyword::AND) {
15158 Some(self.parse_expr()?)
15159 } else {
15160 None
15161 };
15162
15163 self.expect_keyword_is(Keyword::THEN)?;
15164
15165 let merge_clause = match self.parse_one_of_keywords(&[
15166 Keyword::UPDATE,
15167 Keyword::INSERT,
15168 Keyword::DELETE,
15169 ]) {
15170 Some(Keyword::UPDATE) => {
15171 if matches!(
15172 clause_kind,
15173 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15174 ) {
15175 return Err(ParserError::ParserError(format!(
15176 "UPDATE is not allowed in a {clause_kind} merge clause"
15177 )));
15178 }
15179 self.expect_keyword_is(Keyword::SET)?;
15180 MergeAction::Update {
15181 assignments: self.parse_comma_separated(Parser::parse_assignment)?,
15182 }
15183 }
15184 Some(Keyword::DELETE) => {
15185 if matches!(
15186 clause_kind,
15187 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15188 ) {
15189 return Err(ParserError::ParserError(format!(
15190 "DELETE is not allowed in a {clause_kind} merge clause"
15191 )));
15192 }
15193 MergeAction::Delete
15194 }
15195 Some(Keyword::INSERT) => {
15196 if !matches!(
15197 clause_kind,
15198 MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15199 ) {
15200 return Err(ParserError::ParserError(format!(
15201 "INSERT is not allowed in a {clause_kind} merge clause"
15202 )));
15203 }
15204 let is_mysql = dialect_of!(self is MySqlDialect);
15205
15206 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
15207 let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
15208 && self.parse_keyword(Keyword::ROW)
15209 {
15210 MergeInsertKind::Row
15211 } else {
15212 self.expect_keyword_is(Keyword::VALUES)?;
15213 let values = self.parse_values(is_mysql)?;
15214 MergeInsertKind::Values(values)
15215 };
15216 MergeAction::Insert(MergeInsertExpr { columns, kind })
15217 }
15218 _ => {
15219 return Err(ParserError::ParserError(
15220 "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
15221 ));
15222 }
15223 };
15224 clauses.push(MergeClause {
15225 clause_kind,
15226 predicate,
15227 action: merge_clause,
15228 });
15229 }
15230 Ok(clauses)
15231 }
15232
15233 fn parse_output(&mut self) -> Result<OutputClause, ParserError> {
15234 self.expect_keyword_is(Keyword::OUTPUT)?;
15235 let select_items = self.parse_projection()?;
15236 self.expect_keyword_is(Keyword::INTO)?;
15237 let into_table = self.parse_select_into()?;
15238
15239 Ok(OutputClause {
15240 select_items,
15241 into_table,
15242 })
15243 }
15244
15245 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
15246 let temporary = self
15247 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
15248 .is_some();
15249 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
15250 let table = self.parse_keyword(Keyword::TABLE);
15251 let name = self.parse_object_name(false)?;
15252
15253 Ok(SelectInto {
15254 temporary,
15255 unlogged,
15256 table,
15257 name,
15258 })
15259 }
15260
15261 pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
15262 let into = self.parse_keyword(Keyword::INTO);
15263
15264 let table = self.parse_table_factor()?;
15265
15266 self.expect_keyword_is(Keyword::USING)?;
15267 let source = self.parse_table_factor()?;
15268 self.expect_keyword_is(Keyword::ON)?;
15269 let on = self.parse_expr()?;
15270 let clauses = self.parse_merge_clauses()?;
15271 let output = if self.peek_keyword(Keyword::OUTPUT) {
15272 Some(self.parse_output()?)
15273 } else {
15274 None
15275 };
15276
15277 Ok(Statement::Merge {
15278 into,
15279 table,
15280 source,
15281 on: Box::new(on),
15282 clauses,
15283 output,
15284 })
15285 }
15286
15287 fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
15288 match self.parse_value()?.value {
15289 v @ Value::SingleQuotedString(_) => Ok(v),
15290 v @ Value::DoubleQuotedString(_) => Ok(v),
15291 v @ Value::Number(_, _) => Ok(v),
15292 v @ Value::Placeholder(_) => Ok(v),
15293 _ => {
15294 self.prev_token();
15295 self.expected("number or string or ? placeholder", self.peek_token())
15296 }
15297 }
15298 }
15299
15300 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
15302 let name = self.parse_object_name(false)?;
15303 if self.consume_token(&Token::LParen) {
15304 let value = self.parse_pragma_value()?;
15305 self.expect_token(&Token::RParen)?;
15306 Ok(Statement::Pragma {
15307 name,
15308 value: Some(value),
15309 is_eq: false,
15310 })
15311 } else if self.consume_token(&Token::Eq) {
15312 Ok(Statement::Pragma {
15313 name,
15314 value: Some(self.parse_pragma_value()?),
15315 is_eq: true,
15316 })
15317 } else {
15318 Ok(Statement::Pragma {
15319 name,
15320 value: None,
15321 is_eq: false,
15322 })
15323 }
15324 }
15325
15326 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
15328 let extension_name = self.parse_identifier()?;
15329
15330 Ok(Statement::Install { extension_name })
15331 }
15332
15333 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
15335 if self.dialect.supports_load_extension() {
15336 let extension_name = self.parse_identifier()?;
15337 Ok(Statement::Load { extension_name })
15338 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
15339 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
15340 self.expect_keyword_is(Keyword::INPATH)?;
15341 let inpath = self.parse_literal_string()?;
15342 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
15343 self.expect_keyword_is(Keyword::INTO)?;
15344 self.expect_keyword_is(Keyword::TABLE)?;
15345 let table_name = self.parse_object_name(false)?;
15346 let partitioned = self.parse_insert_partition()?;
15347 let table_format = self.parse_load_data_table_format()?;
15348 Ok(Statement::LoadData {
15349 local,
15350 inpath,
15351 overwrite,
15352 table_name,
15353 partitioned,
15354 table_format,
15355 })
15356 } else {
15357 self.expected(
15358 "`DATA` or an extension name after `LOAD`",
15359 self.peek_token(),
15360 )
15361 }
15362 }
15363
15364 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
15369 self.expect_keyword_is(Keyword::TABLE)?;
15370 let name = self.parse_object_name(false)?;
15371 let on_cluster = self.parse_optional_on_cluster()?;
15372
15373 let partition = if self.parse_keyword(Keyword::PARTITION) {
15374 if self.parse_keyword(Keyword::ID) {
15375 Some(Partition::Identifier(self.parse_identifier()?))
15376 } else {
15377 Some(Partition::Expr(self.parse_expr()?))
15378 }
15379 } else {
15380 None
15381 };
15382
15383 let include_final = self.parse_keyword(Keyword::FINAL);
15384 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
15385 if self.parse_keyword(Keyword::BY) {
15386 Some(Deduplicate::ByExpression(self.parse_expr()?))
15387 } else {
15388 Some(Deduplicate::All)
15389 }
15390 } else {
15391 None
15392 };
15393
15394 Ok(Statement::OptimizeTable {
15395 name,
15396 on_cluster,
15397 partition,
15398 include_final,
15399 deduplicate,
15400 })
15401 }
15402
15403 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
15409 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
15411 let name = self.parse_object_name(false)?;
15413 let mut data_type: Option<DataType> = None;
15415 if self.parse_keywords(&[Keyword::AS]) {
15416 data_type = Some(self.parse_data_type()?)
15417 }
15418 let sequence_options = self.parse_create_sequence_options()?;
15419 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
15421 if self.parse_keywords(&[Keyword::NONE]) {
15422 Some(ObjectName::from(vec![Ident::new("NONE")]))
15423 } else {
15424 Some(self.parse_object_name(false)?)
15425 }
15426 } else {
15427 None
15428 };
15429 Ok(Statement::CreateSequence {
15430 temporary,
15431 if_not_exists,
15432 name,
15433 data_type,
15434 sequence_options,
15435 owned_by,
15436 })
15437 }
15438
15439 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
15440 let mut sequence_options = vec![];
15441 if self.parse_keywords(&[Keyword::INCREMENT]) {
15443 if self.parse_keywords(&[Keyword::BY]) {
15444 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
15445 } else {
15446 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
15447 }
15448 }
15449 if self.parse_keyword(Keyword::MINVALUE) {
15451 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
15452 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
15453 sequence_options.push(SequenceOptions::MinValue(None));
15454 }
15455 if self.parse_keywords(&[Keyword::MAXVALUE]) {
15457 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
15458 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
15459 sequence_options.push(SequenceOptions::MaxValue(None));
15460 }
15461
15462 if self.parse_keywords(&[Keyword::START]) {
15464 if self.parse_keywords(&[Keyword::WITH]) {
15465 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
15466 } else {
15467 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
15468 }
15469 }
15470 if self.parse_keywords(&[Keyword::CACHE]) {
15472 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
15473 }
15474 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
15476 sequence_options.push(SequenceOptions::Cycle(true));
15477 } else if self.parse_keywords(&[Keyword::CYCLE]) {
15478 sequence_options.push(SequenceOptions::Cycle(false));
15479 }
15480
15481 Ok(sequence_options)
15482 }
15483
15484 pub fn index(&self) -> usize {
15486 self.index
15487 }
15488
15489 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
15490 let ident = self.parse_identifier()?;
15491 self.expect_keyword_is(Keyword::AS)?;
15492
15493 let window_expr = if self.consume_token(&Token::LParen) {
15494 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
15495 } else if self.dialect.supports_window_clause_named_window_reference() {
15496 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
15497 } else {
15498 return self.expected("(", self.peek_token());
15499 };
15500
15501 Ok(NamedWindowDefinition(ident, window_expr))
15502 }
15503
15504 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
15505 let name = self.parse_object_name(false)?;
15506 let params = self.parse_optional_procedure_parameters()?;
15507 self.expect_keyword_is(Keyword::AS)?;
15508 self.expect_keyword_is(Keyword::BEGIN)?;
15509 let statements = self.parse_statements()?;
15510 self.expect_keyword_is(Keyword::END)?;
15511 Ok(Statement::CreateProcedure {
15512 name,
15513 or_alter,
15514 params,
15515 body: statements,
15516 })
15517 }
15518
15519 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
15520 let window_name = match self.peek_token().token {
15521 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
15522 self.parse_optional_indent()?
15523 }
15524 _ => None,
15525 };
15526
15527 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15528 self.parse_comma_separated(Parser::parse_expr)?
15529 } else {
15530 vec![]
15531 };
15532 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15533 self.parse_comma_separated(Parser::parse_order_by_expr)?
15534 } else {
15535 vec![]
15536 };
15537
15538 let window_frame = if !self.consume_token(&Token::RParen) {
15539 let window_frame = self.parse_window_frame()?;
15540 self.expect_token(&Token::RParen)?;
15541 Some(window_frame)
15542 } else {
15543 None
15544 };
15545 Ok(WindowSpec {
15546 window_name,
15547 partition_by,
15548 order_by,
15549 window_frame,
15550 })
15551 }
15552
15553 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
15554 let name = self.parse_object_name(false)?;
15555 self.expect_keyword_is(Keyword::AS)?;
15556
15557 if self.parse_keyword(Keyword::ENUM) {
15558 return self.parse_create_type_enum(name);
15559 }
15560
15561 let mut attributes = vec![];
15562 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
15563 return Ok(Statement::CreateType {
15564 name,
15565 representation: UserDefinedTypeRepresentation::Composite { attributes },
15566 });
15567 }
15568
15569 loop {
15570 let attr_name = self.parse_identifier()?;
15571 let attr_data_type = self.parse_data_type()?;
15572 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
15573 Some(self.parse_object_name(false)?)
15574 } else {
15575 None
15576 };
15577 attributes.push(UserDefinedTypeCompositeAttributeDef {
15578 name: attr_name,
15579 data_type: attr_data_type,
15580 collation: attr_collation,
15581 });
15582 let comma = self.consume_token(&Token::Comma);
15583 if self.consume_token(&Token::RParen) {
15584 break;
15586 } else if !comma {
15587 return self.expected("',' or ')' after attribute definition", self.peek_token());
15588 }
15589 }
15590
15591 Ok(Statement::CreateType {
15592 name,
15593 representation: UserDefinedTypeRepresentation::Composite { attributes },
15594 })
15595 }
15596
15597 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
15601 self.expect_token(&Token::LParen)?;
15602 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
15603 self.expect_token(&Token::RParen)?;
15604
15605 Ok(Statement::CreateType {
15606 name,
15607 representation: UserDefinedTypeRepresentation::Enum { labels },
15608 })
15609 }
15610
15611 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
15612 self.expect_token(&Token::LParen)?;
15613 let partitions = self.parse_comma_separated(|p| p.parse_identifier())?;
15614 self.expect_token(&Token::RParen)?;
15615 Ok(partitions)
15616 }
15617
15618 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
15619 if dialect_of!(self is MySqlDialect | GenericDialect) {
15620 if self.parse_keyword(Keyword::FIRST) {
15621 Ok(Some(MySQLColumnPosition::First))
15622 } else if self.parse_keyword(Keyword::AFTER) {
15623 let ident = self.parse_identifier()?;
15624 Ok(Some(MySQLColumnPosition::After(ident)))
15625 } else {
15626 Ok(None)
15627 }
15628 } else {
15629 Ok(None)
15630 }
15631 }
15632
15633 fn parse_print(&mut self) -> Result<Statement, ParserError> {
15635 Ok(Statement::Print(PrintStatement {
15636 message: Box::new(self.parse_expr()?),
15637 }))
15638 }
15639
15640 fn parse_return(&mut self) -> Result<Statement, ParserError> {
15642 match self.maybe_parse(|p| p.parse_expr())? {
15643 Some(expr) => Ok(Statement::Return(ReturnStatement {
15644 value: Some(ReturnStatementValue::Expr(expr)),
15645 })),
15646 None => Ok(Statement::Return(ReturnStatement { value: None })),
15647 }
15648 }
15649
15650 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
15652 self.tokens
15653 }
15654
15655 fn peek_sub_query(&mut self) -> bool {
15657 if self
15658 .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
15659 .is_some()
15660 {
15661 self.prev_token();
15662 return true;
15663 }
15664 false
15665 }
15666
15667 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
15668 let show_in;
15669 let mut filter_position = None;
15670 if self.dialect.supports_show_like_before_in() {
15671 if let Some(filter) = self.parse_show_statement_filter()? {
15672 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
15673 }
15674 show_in = self.maybe_parse_show_stmt_in()?;
15675 } else {
15676 show_in = self.maybe_parse_show_stmt_in()?;
15677 if let Some(filter) = self.parse_show_statement_filter()? {
15678 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
15679 }
15680 }
15681 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
15682 let limit = self.maybe_parse_show_stmt_limit()?;
15683 let from = self.maybe_parse_show_stmt_from()?;
15684 Ok(ShowStatementOptions {
15685 filter_position,
15686 show_in,
15687 starts_with,
15688 limit,
15689 limit_from: from,
15690 })
15691 }
15692
15693 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
15694 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
15695 Some(Keyword::FROM) => ShowStatementInClause::FROM,
15696 Some(Keyword::IN) => ShowStatementInClause::IN,
15697 None => return Ok(None),
15698 _ => return self.expected("FROM or IN", self.peek_token()),
15699 };
15700
15701 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
15702 Keyword::ACCOUNT,
15703 Keyword::DATABASE,
15704 Keyword::SCHEMA,
15705 Keyword::TABLE,
15706 Keyword::VIEW,
15707 ]) {
15708 Some(Keyword::DATABASE)
15710 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
15711 | self.peek_keyword(Keyword::LIMIT) =>
15712 {
15713 (Some(ShowStatementInParentType::Database), None)
15714 }
15715 Some(Keyword::SCHEMA)
15716 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
15717 | self.peek_keyword(Keyword::LIMIT) =>
15718 {
15719 (Some(ShowStatementInParentType::Schema), None)
15720 }
15721 Some(parent_kw) => {
15722 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
15726 match parent_kw {
15727 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
15728 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
15729 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
15730 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
15731 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
15732 _ => {
15733 return self.expected(
15734 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
15735 self.peek_token(),
15736 )
15737 }
15738 }
15739 }
15740 None => {
15741 let mut parent_name = self.parse_object_name(false)?;
15744 if self
15745 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
15746 .is_some()
15747 {
15748 parent_name
15749 .0
15750 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
15751 }
15752 (None, Some(parent_name))
15753 }
15754 };
15755
15756 Ok(Some(ShowStatementIn {
15757 clause,
15758 parent_type,
15759 parent_name,
15760 }))
15761 }
15762
15763 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
15764 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
15765 Ok(Some(self.parse_value()?.value))
15766 } else {
15767 Ok(None)
15768 }
15769 }
15770
15771 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
15772 if self.parse_keyword(Keyword::LIMIT) {
15773 Ok(self.parse_limit()?)
15774 } else {
15775 Ok(None)
15776 }
15777 }
15778
15779 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
15780 if self.parse_keyword(Keyword::FROM) {
15781 Ok(Some(self.parse_value()?.value))
15782 } else {
15783 Ok(None)
15784 }
15785 }
15786}
15787
15788fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
15789 if let Some(prefix) = prefix {
15790 Expr::Prefixed {
15791 prefix,
15792 value: Box::new(expr),
15793 }
15794 } else {
15795 expr
15796 }
15797}
15798
15799impl Word {
15800 #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
15801 pub fn to_ident(&self, span: Span) -> Ident {
15802 Ident {
15803 value: self.value.clone(),
15804 quote_style: self.quote_style,
15805 span,
15806 }
15807 }
15808
15809 pub fn into_ident(self, span: Span) -> Ident {
15811 Ident {
15812 value: self.value,
15813 quote_style: self.quote_style,
15814 span,
15815 }
15816 }
15817}
15818
15819#[cfg(test)]
15820mod tests {
15821 use crate::test_utils::{all_dialects, TestedDialects};
15822
15823 use super::*;
15824
15825 #[test]
15826 fn test_prev_index() {
15827 let sql = "SELECT version";
15828 all_dialects().run_parser_method(sql, |parser| {
15829 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
15830 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
15831 parser.prev_token();
15832 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
15833 assert_eq!(parser.next_token(), Token::make_word("version", None));
15834 parser.prev_token();
15835 assert_eq!(parser.peek_token(), Token::make_word("version", None));
15836 assert_eq!(parser.next_token(), Token::make_word("version", None));
15837 assert_eq!(parser.peek_token(), Token::EOF);
15838 parser.prev_token();
15839 assert_eq!(parser.next_token(), Token::make_word("version", None));
15840 assert_eq!(parser.next_token(), Token::EOF);
15841 assert_eq!(parser.next_token(), Token::EOF);
15842 parser.prev_token();
15843 });
15844 }
15845
15846 #[test]
15847 fn test_peek_tokens() {
15848 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
15849 assert!(matches!(
15850 parser.peek_tokens(),
15851 [Token::Word(Word {
15852 keyword: Keyword::SELECT,
15853 ..
15854 })]
15855 ));
15856
15857 assert!(matches!(
15858 parser.peek_tokens(),
15859 [
15860 Token::Word(Word {
15861 keyword: Keyword::SELECT,
15862 ..
15863 }),
15864 Token::Word(_),
15865 Token::Word(Word {
15866 keyword: Keyword::AS,
15867 ..
15868 }),
15869 ]
15870 ));
15871
15872 for _ in 0..4 {
15873 parser.next_token();
15874 }
15875
15876 assert!(matches!(
15877 parser.peek_tokens(),
15878 [
15879 Token::Word(Word {
15880 keyword: Keyword::FROM,
15881 ..
15882 }),
15883 Token::Word(_),
15884 Token::EOF,
15885 Token::EOF,
15886 ]
15887 ))
15888 })
15889 }
15890
15891 #[cfg(test)]
15892 mod test_parse_data_type {
15893 use crate::ast::{
15894 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
15895 };
15896 use crate::dialect::{AnsiDialect, GenericDialect};
15897 use crate::test_utils::TestedDialects;
15898
15899 macro_rules! test_parse_data_type {
15900 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
15901 $dialect.run_parser_method(&*$input, |parser| {
15902 let data_type = parser.parse_data_type().unwrap();
15903 assert_eq!($expected_type, data_type);
15904 assert_eq!($input.to_string(), data_type.to_string());
15905 });
15906 }};
15907 }
15908
15909 #[test]
15910 fn test_ansii_character_string_types() {
15911 let dialect =
15913 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
15914
15915 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
15916
15917 test_parse_data_type!(
15918 dialect,
15919 "CHARACTER(20)",
15920 DataType::Character(Some(CharacterLength::IntegerLength {
15921 length: 20,
15922 unit: None
15923 }))
15924 );
15925
15926 test_parse_data_type!(
15927 dialect,
15928 "CHARACTER(20 CHARACTERS)",
15929 DataType::Character(Some(CharacterLength::IntegerLength {
15930 length: 20,
15931 unit: Some(CharLengthUnits::Characters)
15932 }))
15933 );
15934
15935 test_parse_data_type!(
15936 dialect,
15937 "CHARACTER(20 OCTETS)",
15938 DataType::Character(Some(CharacterLength::IntegerLength {
15939 length: 20,
15940 unit: Some(CharLengthUnits::Octets)
15941 }))
15942 );
15943
15944 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
15945
15946 test_parse_data_type!(
15947 dialect,
15948 "CHAR(20)",
15949 DataType::Char(Some(CharacterLength::IntegerLength {
15950 length: 20,
15951 unit: None
15952 }))
15953 );
15954
15955 test_parse_data_type!(
15956 dialect,
15957 "CHAR(20 CHARACTERS)",
15958 DataType::Char(Some(CharacterLength::IntegerLength {
15959 length: 20,
15960 unit: Some(CharLengthUnits::Characters)
15961 }))
15962 );
15963
15964 test_parse_data_type!(
15965 dialect,
15966 "CHAR(20 OCTETS)",
15967 DataType::Char(Some(CharacterLength::IntegerLength {
15968 length: 20,
15969 unit: Some(CharLengthUnits::Octets)
15970 }))
15971 );
15972
15973 test_parse_data_type!(
15974 dialect,
15975 "CHARACTER VARYING(20)",
15976 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
15977 length: 20,
15978 unit: None
15979 }))
15980 );
15981
15982 test_parse_data_type!(
15983 dialect,
15984 "CHARACTER VARYING(20 CHARACTERS)",
15985 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
15986 length: 20,
15987 unit: Some(CharLengthUnits::Characters)
15988 }))
15989 );
15990
15991 test_parse_data_type!(
15992 dialect,
15993 "CHARACTER VARYING(20 OCTETS)",
15994 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
15995 length: 20,
15996 unit: Some(CharLengthUnits::Octets)
15997 }))
15998 );
15999
16000 test_parse_data_type!(
16001 dialect,
16002 "CHAR VARYING(20)",
16003 DataType::CharVarying(Some(CharacterLength::IntegerLength {
16004 length: 20,
16005 unit: None
16006 }))
16007 );
16008
16009 test_parse_data_type!(
16010 dialect,
16011 "CHAR VARYING(20 CHARACTERS)",
16012 DataType::CharVarying(Some(CharacterLength::IntegerLength {
16013 length: 20,
16014 unit: Some(CharLengthUnits::Characters)
16015 }))
16016 );
16017
16018 test_parse_data_type!(
16019 dialect,
16020 "CHAR VARYING(20 OCTETS)",
16021 DataType::CharVarying(Some(CharacterLength::IntegerLength {
16022 length: 20,
16023 unit: Some(CharLengthUnits::Octets)
16024 }))
16025 );
16026
16027 test_parse_data_type!(
16028 dialect,
16029 "VARCHAR(20)",
16030 DataType::Varchar(Some(CharacterLength::IntegerLength {
16031 length: 20,
16032 unit: None
16033 }))
16034 );
16035 }
16036
16037 #[test]
16038 fn test_ansii_character_large_object_types() {
16039 let dialect =
16041 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16042
16043 test_parse_data_type!(
16044 dialect,
16045 "CHARACTER LARGE OBJECT",
16046 DataType::CharacterLargeObject(None)
16047 );
16048 test_parse_data_type!(
16049 dialect,
16050 "CHARACTER LARGE OBJECT(20)",
16051 DataType::CharacterLargeObject(Some(20))
16052 );
16053
16054 test_parse_data_type!(
16055 dialect,
16056 "CHAR LARGE OBJECT",
16057 DataType::CharLargeObject(None)
16058 );
16059 test_parse_data_type!(
16060 dialect,
16061 "CHAR LARGE OBJECT(20)",
16062 DataType::CharLargeObject(Some(20))
16063 );
16064
16065 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
16066 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
16067 }
16068
16069 #[test]
16070 fn test_parse_custom_types() {
16071 let dialect =
16072 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16073
16074 test_parse_data_type!(
16075 dialect,
16076 "GEOMETRY",
16077 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
16078 );
16079
16080 test_parse_data_type!(
16081 dialect,
16082 "GEOMETRY(POINT)",
16083 DataType::Custom(
16084 ObjectName::from(vec!["GEOMETRY".into()]),
16085 vec!["POINT".to_string()]
16086 )
16087 );
16088
16089 test_parse_data_type!(
16090 dialect,
16091 "GEOMETRY(POINT, 4326)",
16092 DataType::Custom(
16093 ObjectName::from(vec!["GEOMETRY".into()]),
16094 vec!["POINT".to_string(), "4326".to_string()]
16095 )
16096 );
16097 }
16098
16099 #[test]
16100 fn test_ansii_exact_numeric_types() {
16101 let dialect =
16103 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16104
16105 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
16106
16107 test_parse_data_type!(
16108 dialect,
16109 "NUMERIC(2)",
16110 DataType::Numeric(ExactNumberInfo::Precision(2))
16111 );
16112
16113 test_parse_data_type!(
16114 dialect,
16115 "NUMERIC(2,10)",
16116 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
16117 );
16118
16119 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
16120
16121 test_parse_data_type!(
16122 dialect,
16123 "DECIMAL(2)",
16124 DataType::Decimal(ExactNumberInfo::Precision(2))
16125 );
16126
16127 test_parse_data_type!(
16128 dialect,
16129 "DECIMAL(2,10)",
16130 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
16131 );
16132
16133 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
16134
16135 test_parse_data_type!(
16136 dialect,
16137 "DEC(2)",
16138 DataType::Dec(ExactNumberInfo::Precision(2))
16139 );
16140
16141 test_parse_data_type!(
16142 dialect,
16143 "DEC(2,10)",
16144 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
16145 );
16146 }
16147
16148 #[test]
16149 fn test_ansii_date_type() {
16150 let dialect =
16152 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16153
16154 test_parse_data_type!(dialect, "DATE", DataType::Date);
16155
16156 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
16157
16158 test_parse_data_type!(
16159 dialect,
16160 "TIME(6)",
16161 DataType::Time(Some(6), TimezoneInfo::None)
16162 );
16163
16164 test_parse_data_type!(
16165 dialect,
16166 "TIME WITH TIME ZONE",
16167 DataType::Time(None, TimezoneInfo::WithTimeZone)
16168 );
16169
16170 test_parse_data_type!(
16171 dialect,
16172 "TIME(6) WITH TIME ZONE",
16173 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
16174 );
16175
16176 test_parse_data_type!(
16177 dialect,
16178 "TIME WITHOUT TIME ZONE",
16179 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
16180 );
16181
16182 test_parse_data_type!(
16183 dialect,
16184 "TIME(6) WITHOUT TIME ZONE",
16185 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
16186 );
16187
16188 test_parse_data_type!(
16189 dialect,
16190 "TIMESTAMP",
16191 DataType::Timestamp(None, TimezoneInfo::None)
16192 );
16193
16194 test_parse_data_type!(
16195 dialect,
16196 "TIMESTAMP(22)",
16197 DataType::Timestamp(Some(22), TimezoneInfo::None)
16198 );
16199
16200 test_parse_data_type!(
16201 dialect,
16202 "TIMESTAMP(22) WITH TIME ZONE",
16203 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
16204 );
16205
16206 test_parse_data_type!(
16207 dialect,
16208 "TIMESTAMP(33) WITHOUT TIME ZONE",
16209 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
16210 );
16211 }
16212 }
16213
16214 #[test]
16215 fn test_parse_schema_name() {
16216 macro_rules! test_parse_schema_name {
16218 ($input:expr, $expected_name:expr $(,)?) => {{
16219 all_dialects().run_parser_method(&*$input, |parser| {
16220 let schema_name = parser.parse_schema_name().unwrap();
16221 assert_eq!(schema_name, $expected_name);
16223 assert_eq!(schema_name.to_string(), $input.to_string());
16225 });
16226 }};
16227 }
16228
16229 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
16230 let dummy_authorization = Ident::new("dummy_authorization");
16231
16232 test_parse_schema_name!(
16233 format!("{dummy_name}"),
16234 SchemaName::Simple(dummy_name.clone())
16235 );
16236
16237 test_parse_schema_name!(
16238 format!("AUTHORIZATION {dummy_authorization}"),
16239 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
16240 );
16241 test_parse_schema_name!(
16242 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
16243 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
16244 );
16245 }
16246
16247 #[test]
16248 fn mysql_parse_index_table_constraint() {
16249 macro_rules! test_parse_table_constraint {
16250 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
16251 $dialect.run_parser_method(&*$input, |parser| {
16252 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
16253 assert_eq!(constraint, $expected);
16255 assert_eq!(constraint.to_string(), $input.to_string());
16257 });
16258 }};
16259 }
16260
16261 let dialect =
16262 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
16263
16264 test_parse_table_constraint!(
16265 dialect,
16266 "INDEX (c1)",
16267 TableConstraint::Index {
16268 display_as_key: false,
16269 name: None,
16270 index_type: None,
16271 columns: vec![Ident::new("c1")],
16272 }
16273 );
16274
16275 test_parse_table_constraint!(
16276 dialect,
16277 "KEY (c1)",
16278 TableConstraint::Index {
16279 display_as_key: true,
16280 name: None,
16281 index_type: None,
16282 columns: vec![Ident::new("c1")],
16283 }
16284 );
16285
16286 test_parse_table_constraint!(
16287 dialect,
16288 "INDEX 'index' (c1, c2)",
16289 TableConstraint::Index {
16290 display_as_key: false,
16291 name: Some(Ident::with_quote('\'', "index")),
16292 index_type: None,
16293 columns: vec![Ident::new("c1"), Ident::new("c2")],
16294 }
16295 );
16296
16297 test_parse_table_constraint!(
16298 dialect,
16299 "INDEX USING BTREE (c1)",
16300 TableConstraint::Index {
16301 display_as_key: false,
16302 name: None,
16303 index_type: Some(IndexType::BTree),
16304 columns: vec![Ident::new("c1")],
16305 }
16306 );
16307
16308 test_parse_table_constraint!(
16309 dialect,
16310 "INDEX USING HASH (c1)",
16311 TableConstraint::Index {
16312 display_as_key: false,
16313 name: None,
16314 index_type: Some(IndexType::Hash),
16315 columns: vec![Ident::new("c1")],
16316 }
16317 );
16318
16319 test_parse_table_constraint!(
16320 dialect,
16321 "INDEX idx_name USING BTREE (c1)",
16322 TableConstraint::Index {
16323 display_as_key: false,
16324 name: Some(Ident::new("idx_name")),
16325 index_type: Some(IndexType::BTree),
16326 columns: vec![Ident::new("c1")],
16327 }
16328 );
16329
16330 test_parse_table_constraint!(
16331 dialect,
16332 "INDEX idx_name USING HASH (c1)",
16333 TableConstraint::Index {
16334 display_as_key: false,
16335 name: Some(Ident::new("idx_name")),
16336 index_type: Some(IndexType::Hash),
16337 columns: vec![Ident::new("c1")],
16338 }
16339 );
16340 }
16341
16342 #[test]
16343 fn test_tokenizer_error_loc() {
16344 let sql = "foo '";
16345 let ast = Parser::parse_sql(&GenericDialect, sql);
16346 assert_eq!(
16347 ast,
16348 Err(ParserError::TokenizerError(
16349 "Unterminated string literal at Line: 1, Column: 5".to_string()
16350 ))
16351 );
16352 }
16353
16354 #[test]
16355 fn test_parser_error_loc() {
16356 let sql = "SELECT this is a syntax error";
16357 let ast = Parser::parse_sql(&GenericDialect, sql);
16358 assert_eq!(
16359 ast,
16360 Err(ParserError::ParserError(
16361 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
16362 .to_string()
16363 ))
16364 );
16365 }
16366
16367 #[test]
16368 fn test_nested_explain_error() {
16369 let sql = "EXPLAIN EXPLAIN SELECT 1";
16370 let ast = Parser::parse_sql(&GenericDialect, sql);
16371 assert_eq!(
16372 ast,
16373 Err(ParserError::ParserError(
16374 "Explain must be root of the plan".to_string()
16375 ))
16376 );
16377 }
16378
16379 #[test]
16380 fn test_parse_multipart_identifier_positive() {
16381 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
16382
16383 let expected = vec![
16385 Ident {
16386 value: "CATALOG".to_string(),
16387 quote_style: None,
16388 span: Span::empty(),
16389 },
16390 Ident {
16391 value: "F(o)o. \"bar".to_string(),
16392 quote_style: Some('"'),
16393 span: Span::empty(),
16394 },
16395 Ident {
16396 value: "table".to_string(),
16397 quote_style: None,
16398 span: Span::empty(),
16399 },
16400 ];
16401 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
16402 let actual = parser.parse_multipart_identifier().unwrap();
16403 assert_eq!(expected, actual);
16404 });
16405
16406 let expected = vec![
16408 Ident {
16409 value: "CATALOG".to_string(),
16410 quote_style: None,
16411 span: Span::empty(),
16412 },
16413 Ident {
16414 value: "table".to_string(),
16415 quote_style: None,
16416 span: Span::empty(),
16417 },
16418 ];
16419 dialect.run_parser_method("CATALOG . table", |parser| {
16420 let actual = parser.parse_multipart_identifier().unwrap();
16421 assert_eq!(expected, actual);
16422 });
16423 }
16424
16425 #[test]
16426 fn test_parse_multipart_identifier_negative() {
16427 macro_rules! test_parse_multipart_identifier_error {
16428 ($input:expr, $expected_err:expr $(,)?) => {{
16429 all_dialects().run_parser_method(&*$input, |parser| {
16430 let actual_err = parser.parse_multipart_identifier().unwrap_err();
16431 assert_eq!(actual_err.to_string(), $expected_err);
16432 });
16433 }};
16434 }
16435
16436 test_parse_multipart_identifier_error!(
16437 "",
16438 "sql parser error: Empty input when parsing identifier",
16439 );
16440
16441 test_parse_multipart_identifier_error!(
16442 "*schema.table",
16443 "sql parser error: Unexpected token in identifier: *",
16444 );
16445
16446 test_parse_multipart_identifier_error!(
16447 "schema.table*",
16448 "sql parser error: Unexpected token in identifier: *",
16449 );
16450
16451 test_parse_multipart_identifier_error!(
16452 "schema.table.",
16453 "sql parser error: Trailing period in identifier",
16454 );
16455
16456 test_parse_multipart_identifier_error!(
16457 "schema.*",
16458 "sql parser error: Unexpected token following period in identifier: *",
16459 );
16460 }
16461
16462 #[test]
16463 fn test_mysql_partition_selection() {
16464 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
16465 let expected = vec!["p0", "p2"];
16466
16467 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
16468 assert_eq!(ast.len(), 1);
16469 if let Statement::Query(v) = &ast[0] {
16470 if let SetExpr::Select(select) = &*v.body {
16471 assert_eq!(select.from.len(), 1);
16472 let from: &TableWithJoins = &select.from[0];
16473 let table_factor = &from.relation;
16474 if let TableFactor::Table { partitions, .. } = table_factor {
16475 let actual: Vec<&str> = partitions
16476 .iter()
16477 .map(|ident| ident.value.as_str())
16478 .collect();
16479 assert_eq!(expected, actual);
16480 }
16481 }
16482 } else {
16483 panic!("fail to parse mysql partition selection");
16484 }
16485 }
16486
16487 #[test]
16488 fn test_replace_into_placeholders() {
16489 let sql = "REPLACE INTO t (a) VALUES (&a)";
16490
16491 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
16492 }
16493
16494 #[test]
16495 fn test_replace_into_set_placeholder() {
16496 let sql = "REPLACE INTO t SET ?";
16497
16498 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
16499 }
16500
16501 #[test]
16502 fn test_replace_incomplete() {
16503 let sql = r#"REPLACE"#;
16504
16505 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
16506 }
16507}