1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36 key_value_options::{
37 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
38 },
39 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48#[derive(Debug, Clone, PartialEq, Eq)]
49pub enum ParserError {
50 TokenizerError(String),
51 ParserError(String),
52 RecursionLimitExceeded,
53}
54
55macro_rules! parser_err {
57 ($MSG:expr, $loc:expr) => {
58 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
59 };
60}
61
62mod alter;
63mod merge;
64
65#[cfg(feature = "std")]
66mod recursion {
68 use std::cell::Cell;
69 use std::rc::Rc;
70
71 use super::ParserError;
72
73 pub(crate) struct RecursionCounter {
84 remaining_depth: Rc<Cell<usize>>,
85 }
86
87 impl RecursionCounter {
88 pub fn new(remaining_depth: usize) -> Self {
91 Self {
92 remaining_depth: Rc::new(remaining_depth.into()),
93 }
94 }
95
96 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
103 let old_value = self.remaining_depth.get();
104 if old_value == 0 {
106 Err(ParserError::RecursionLimitExceeded)
107 } else {
108 self.remaining_depth.set(old_value - 1);
109 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
110 }
111 }
112 }
113
114 pub struct DepthGuard {
116 remaining_depth: Rc<Cell<usize>>,
117 }
118
119 impl DepthGuard {
120 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
121 Self { remaining_depth }
122 }
123 }
124 impl Drop for DepthGuard {
125 fn drop(&mut self) {
126 let old_value = self.remaining_depth.get();
127 self.remaining_depth.set(old_value + 1);
128 }
129 }
130}
131
132#[cfg(not(feature = "std"))]
133mod recursion {
134 pub(crate) struct RecursionCounter {}
140
141 impl RecursionCounter {
142 pub fn new(_remaining_depth: usize) -> Self {
143 Self {}
144 }
145 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
146 Ok(DepthGuard {})
147 }
148 }
149
150 pub struct DepthGuard {}
151}
152
153#[derive(PartialEq, Eq)]
154pub enum IsOptional {
155 Optional,
156 Mandatory,
157}
158
159pub enum IsLateral {
160 Lateral,
161 NotLateral,
162}
163
164pub enum WildcardExpr {
165 Expr(Expr),
166 QualifiedWildcard(ObjectName),
167 Wildcard,
168}
169
170impl From<TokenizerError> for ParserError {
171 fn from(e: TokenizerError) -> Self {
172 ParserError::TokenizerError(e.to_string())
173 }
174}
175
176impl fmt::Display for ParserError {
177 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
178 write!(
179 f,
180 "sql parser error: {}",
181 match self {
182 ParserError::TokenizerError(s) => s,
183 ParserError::ParserError(s) => s,
184 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
185 }
186 )
187 }
188}
189
190#[cfg(feature = "std")]
191impl std::error::Error for ParserError {}
192
193const DEFAULT_REMAINING_DEPTH: usize = 50;
195
196const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
198 token: Token::EOF,
199 span: Span {
200 start: Location { line: 0, column: 0 },
201 end: Location { line: 0, column: 0 },
202 },
203};
204
205struct MatchedTrailingBracket(bool);
218
219impl From<bool> for MatchedTrailingBracket {
220 fn from(value: bool) -> Self {
221 Self(value)
222 }
223}
224
225#[derive(Debug, Clone, PartialEq, Eq)]
227pub struct ParserOptions {
228 pub trailing_commas: bool,
229 pub unescape: bool,
232 pub require_semicolon_stmt_delimiter: bool,
235}
236
237impl Default for ParserOptions {
238 fn default() -> Self {
239 Self {
240 trailing_commas: false,
241 unescape: true,
242 require_semicolon_stmt_delimiter: true,
243 }
244 }
245}
246
247impl ParserOptions {
248 pub fn new() -> Self {
250 Default::default()
251 }
252
253 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
265 self.trailing_commas = trailing_commas;
266 self
267 }
268
269 pub fn with_unescape(mut self, unescape: bool) -> Self {
272 self.unescape = unescape;
273 self
274 }
275}
276
277#[derive(Copy, Clone)]
278enum ParserState {
279 Normal,
281 ConnectBy,
285 ColumnDefinition,
291}
292
293pub struct Parser<'a> {
332 tokens: Vec<TokenWithSpan>,
334 index: usize,
336 state: ParserState,
338 dialect: &'a dyn Dialect,
340 options: ParserOptions,
344 recursion_counter: RecursionCounter,
346}
347
348impl<'a> Parser<'a> {
349 pub fn new(dialect: &'a dyn Dialect) -> Self {
365 Self {
366 tokens: vec![],
367 index: 0,
368 state: ParserState::Normal,
369 dialect,
370 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
371 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
372 }
373 }
374
375 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
398 self.recursion_counter = RecursionCounter::new(recursion_limit);
399 self
400 }
401
402 pub fn with_options(mut self, options: ParserOptions) -> Self {
425 self.options = options;
426 self
427 }
428
429 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
431 self.tokens = tokens;
432 self.index = 0;
433 self
434 }
435
436 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
438 let tokens_with_locations: Vec<TokenWithSpan> = tokens
440 .into_iter()
441 .map(|token| TokenWithSpan {
442 token,
443 span: Span::empty(),
444 })
445 .collect();
446 self.with_tokens_with_locations(tokens_with_locations)
447 }
448
449 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
456 debug!("Parsing sql '{sql}'...");
457 let tokens = Tokenizer::new(self.dialect, sql)
458 .with_unescape(self.options.unescape)
459 .tokenize_with_location()?;
460 Ok(self.with_tokens_with_locations(tokens))
461 }
462
463 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
479 let mut stmts = Vec::new();
480 let mut expecting_statement_delimiter = false;
481 loop {
482 while self.consume_token(&Token::SemiColon) {
484 expecting_statement_delimiter = false;
485 }
486
487 if !self.options.require_semicolon_stmt_delimiter {
488 expecting_statement_delimiter = false;
489 }
490
491 match self.peek_token().token {
492 Token::EOF => break,
493
494 Token::Word(word) => {
496 if expecting_statement_delimiter && word.keyword == Keyword::END {
497 break;
498 }
499 }
500 _ => {}
501 }
502
503 if expecting_statement_delimiter {
504 return self.expected("end of statement", self.peek_token());
505 }
506
507 let statement = self.parse_statement()?;
508 stmts.push(statement);
509 expecting_statement_delimiter = true;
510 }
511 Ok(stmts)
512 }
513
514 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
530 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
531 }
532
533 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
536 let _guard = self.recursion_counter.try_decrease()?;
537
538 if let Some(statement) = self.dialect.parse_statement(self) {
540 return statement;
541 }
542
543 let next_token = self.next_token();
544 match &next_token.token {
545 Token::Word(w) => match w.keyword {
546 Keyword::KILL => self.parse_kill(),
547 Keyword::FLUSH => self.parse_flush(),
548 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
549 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
550 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
551 Keyword::ANALYZE => self.parse_analyze(),
552 Keyword::CASE => {
553 self.prev_token();
554 self.parse_case_stmt()
555 }
556 Keyword::IF => {
557 self.prev_token();
558 self.parse_if_stmt()
559 }
560 Keyword::WHILE => {
561 self.prev_token();
562 self.parse_while()
563 }
564 Keyword::RAISE => {
565 self.prev_token();
566 self.parse_raise_stmt()
567 }
568 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
569 self.prev_token();
570 self.parse_query().map(Statement::Query)
571 }
572 Keyword::TRUNCATE => self.parse_truncate(),
573 Keyword::ATTACH => {
574 if dialect_of!(self is DuckDbDialect) {
575 self.parse_attach_duckdb_database()
576 } else {
577 self.parse_attach_database()
578 }
579 }
580 Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
581 self.parse_detach_duckdb_database()
582 }
583 Keyword::MSCK => self.parse_msck(),
584 Keyword::CREATE => self.parse_create(),
585 Keyword::CACHE => self.parse_cache_table(),
586 Keyword::DROP => self.parse_drop(),
587 Keyword::DISCARD => self.parse_discard(),
588 Keyword::DECLARE => self.parse_declare(),
589 Keyword::FETCH => self.parse_fetch_statement(),
590 Keyword::DELETE => self.parse_delete(next_token),
591 Keyword::INSERT => self.parse_insert(next_token),
592 Keyword::REPLACE => self.parse_replace(next_token),
593 Keyword::UNCACHE => self.parse_uncache_table(),
594 Keyword::UPDATE => self.parse_update(next_token),
595 Keyword::ALTER => self.parse_alter(),
596 Keyword::CALL => self.parse_call(),
597 Keyword::COPY => self.parse_copy(),
598 Keyword::OPEN => {
599 self.prev_token();
600 self.parse_open()
601 }
602 Keyword::CLOSE => self.parse_close(),
603 Keyword::SET => self.parse_set(),
604 Keyword::SHOW => self.parse_show(),
605 Keyword::USE => self.parse_use(),
606 Keyword::GRANT => self.parse_grant(),
607 Keyword::DENY => {
608 self.prev_token();
609 self.parse_deny()
610 }
611 Keyword::REVOKE => self.parse_revoke(),
612 Keyword::START => self.parse_start_transaction(),
613 Keyword::BEGIN => self.parse_begin(),
614 Keyword::END => self.parse_end(),
615 Keyword::SAVEPOINT => self.parse_savepoint(),
616 Keyword::RELEASE => self.parse_release(),
617 Keyword::COMMIT => self.parse_commit(),
618 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
619 Keyword::ROLLBACK => self.parse_rollback(),
620 Keyword::ASSERT => self.parse_assert(),
621 Keyword::DEALLOCATE => self.parse_deallocate(),
624 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
625 Keyword::PREPARE => self.parse_prepare(),
626 Keyword::MERGE => self.parse_merge(next_token),
627 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
630 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
631 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
632 Keyword::PRAGMA => self.parse_pragma(),
634 Keyword::UNLOAD => {
635 self.prev_token();
636 self.parse_unload()
637 }
638 Keyword::RENAME => self.parse_rename(),
639 Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
641 self.parse_install()
642 }
643 Keyword::LOAD => self.parse_load(),
644 Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
646 self.parse_optimize_table()
647 }
648 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
650 Keyword::PRINT => self.parse_print(),
651 Keyword::RETURN => self.parse_return(),
652 Keyword::EXPORT => {
653 self.prev_token();
654 self.parse_export_data()
655 }
656 Keyword::VACUUM => {
657 self.prev_token();
658 self.parse_vacuum()
659 }
660 Keyword::RESET => self.parse_reset(),
661 _ => self.expected("an SQL statement", next_token),
662 },
663 Token::LParen => {
664 self.prev_token();
665 self.parse_query().map(Statement::Query)
666 }
667 _ => self.expected("an SQL statement", next_token),
668 }
669 }
670
671 pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
675 let case_token = self.expect_keyword(Keyword::CASE)?;
676
677 let match_expr = if self.peek_keyword(Keyword::WHEN) {
678 None
679 } else {
680 Some(self.parse_expr()?)
681 };
682
683 self.expect_keyword_is(Keyword::WHEN)?;
684 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
685 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
686 })?;
687
688 let else_block = if self.parse_keyword(Keyword::ELSE) {
689 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
690 } else {
691 None
692 };
693
694 let mut end_case_token = self.expect_keyword(Keyword::END)?;
695 if self.peek_keyword(Keyword::CASE) {
696 end_case_token = self.expect_keyword(Keyword::CASE)?;
697 }
698
699 Ok(Statement::Case(CaseStatement {
700 case_token: AttachedToken(case_token),
701 match_expr,
702 when_blocks,
703 else_block,
704 end_case_token: AttachedToken(end_case_token),
705 }))
706 }
707
708 pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
712 self.expect_keyword_is(Keyword::IF)?;
713 let if_block = self.parse_conditional_statement_block(&[
714 Keyword::ELSE,
715 Keyword::ELSEIF,
716 Keyword::END,
717 ])?;
718
719 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
720 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
721 parser.parse_conditional_statement_block(&[
722 Keyword::ELSEIF,
723 Keyword::ELSE,
724 Keyword::END,
725 ])
726 })?
727 } else {
728 vec![]
729 };
730
731 let else_block = if self.parse_keyword(Keyword::ELSE) {
732 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
733 } else {
734 None
735 };
736
737 self.expect_keyword_is(Keyword::END)?;
738 let end_token = self.expect_keyword(Keyword::IF)?;
739
740 Ok(Statement::If(IfStatement {
741 if_block,
742 elseif_blocks,
743 else_block,
744 end_token: Some(AttachedToken(end_token)),
745 }))
746 }
747
748 fn parse_while(&mut self) -> Result<Statement, ParserError> {
752 self.expect_keyword_is(Keyword::WHILE)?;
753 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
754
755 Ok(Statement::While(WhileStatement { while_block }))
756 }
757
758 fn parse_conditional_statement_block(
766 &mut self,
767 terminal_keywords: &[Keyword],
768 ) -> Result<ConditionalStatementBlock, ParserError> {
769 let start_token = self.get_current_token().clone(); let mut then_token = None;
771
772 let condition = match &start_token.token {
773 Token::Word(w) if w.keyword == Keyword::ELSE => None,
774 Token::Word(w) if w.keyword == Keyword::WHILE => {
775 let expr = self.parse_expr()?;
776 Some(expr)
777 }
778 _ => {
779 let expr = self.parse_expr()?;
780 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
781 Some(expr)
782 }
783 };
784
785 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
786
787 Ok(ConditionalStatementBlock {
788 start_token: AttachedToken(start_token),
789 condition,
790 then_token,
791 conditional_statements,
792 })
793 }
794
795 pub(crate) fn parse_conditional_statements(
798 &mut self,
799 terminal_keywords: &[Keyword],
800 ) -> Result<ConditionalStatements, ParserError> {
801 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
802 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
803 let statements = self.parse_statement_list(terminal_keywords)?;
804 let end_token = self.expect_keyword(Keyword::END)?;
805
806 ConditionalStatements::BeginEnd(BeginEndStatements {
807 begin_token: AttachedToken(begin_token),
808 statements,
809 end_token: AttachedToken(end_token),
810 })
811 } else {
812 ConditionalStatements::Sequence {
813 statements: self.parse_statement_list(terminal_keywords)?,
814 }
815 };
816 Ok(conditional_statements)
817 }
818
819 pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
823 self.expect_keyword_is(Keyword::RAISE)?;
824
825 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
826 self.expect_token(&Token::Eq)?;
827 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
828 } else {
829 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
830 };
831
832 Ok(Statement::Raise(RaiseStatement { value }))
833 }
834
835 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
836 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
837
838 self.expect_keyword_is(Keyword::ON)?;
839 let token = self.next_token();
840
841 let (object_type, object_name) = match token.token {
842 Token::Word(w) if w.keyword == Keyword::COLUMN => {
843 (CommentObject::Column, self.parse_object_name(false)?)
844 }
845 Token::Word(w) if w.keyword == Keyword::TABLE => {
846 (CommentObject::Table, self.parse_object_name(false)?)
847 }
848 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
849 (CommentObject::Extension, self.parse_object_name(false)?)
850 }
851 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
852 (CommentObject::Schema, self.parse_object_name(false)?)
853 }
854 Token::Word(w) if w.keyword == Keyword::DATABASE => {
855 (CommentObject::Database, self.parse_object_name(false)?)
856 }
857 Token::Word(w) if w.keyword == Keyword::USER => {
858 (CommentObject::User, self.parse_object_name(false)?)
859 }
860 Token::Word(w) if w.keyword == Keyword::ROLE => {
861 (CommentObject::Role, self.parse_object_name(false)?)
862 }
863 _ => self.expected("comment object_type", token)?,
864 };
865
866 self.expect_keyword_is(Keyword::IS)?;
867 let comment = if self.parse_keyword(Keyword::NULL) {
868 None
869 } else {
870 Some(self.parse_literal_string()?)
871 };
872 Ok(Statement::Comment {
873 object_type,
874 object_name,
875 comment,
876 if_exists,
877 })
878 }
879
880 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
881 let mut channel = None;
882 let mut tables: Vec<ObjectName> = vec![];
883 let mut read_lock = false;
884 let mut export = false;
885
886 if !dialect_of!(self is MySqlDialect | GenericDialect) {
887 return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
888 }
889
890 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
891 Some(FlushLocation::NoWriteToBinlog)
892 } else if self.parse_keyword(Keyword::LOCAL) {
893 Some(FlushLocation::Local)
894 } else {
895 None
896 };
897
898 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
899 FlushType::BinaryLogs
900 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
901 FlushType::EngineLogs
902 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
903 FlushType::ErrorLogs
904 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
905 FlushType::GeneralLogs
906 } else if self.parse_keywords(&[Keyword::HOSTS]) {
907 FlushType::Hosts
908 } else if self.parse_keyword(Keyword::PRIVILEGES) {
909 FlushType::Privileges
910 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
911 FlushType::OptimizerCosts
912 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
913 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
914 channel = Some(self.parse_object_name(false).unwrap().to_string());
915 }
916 FlushType::RelayLogs
917 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
918 FlushType::SlowLogs
919 } else if self.parse_keyword(Keyword::STATUS) {
920 FlushType::Status
921 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
922 FlushType::UserResources
923 } else if self.parse_keywords(&[Keyword::LOGS]) {
924 FlushType::Logs
925 } else if self.parse_keywords(&[Keyword::TABLES]) {
926 loop {
927 let next_token = self.next_token();
928 match &next_token.token {
929 Token::Word(w) => match w.keyword {
930 Keyword::WITH => {
931 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
932 }
933 Keyword::FOR => {
934 export = self.parse_keyword(Keyword::EXPORT);
935 }
936 Keyword::NoKeyword => {
937 self.prev_token();
938 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
939 }
940 _ => {}
941 },
942 _ => {
943 break;
944 }
945 }
946 }
947
948 FlushType::Tables
949 } else {
950 return self.expected(
951 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
952 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
953 self.peek_token(),
954 );
955 };
956
957 Ok(Statement::Flush {
958 object_type,
959 location,
960 channel,
961 read_lock,
962 export,
963 tables,
964 })
965 }
966
967 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
968 let repair = self.parse_keyword(Keyword::REPAIR);
969 self.expect_keyword_is(Keyword::TABLE)?;
970 let table_name = self.parse_object_name(false)?;
971 let partition_action = self
972 .maybe_parse(|parser| {
973 let pa = match parser.parse_one_of_keywords(&[
974 Keyword::ADD,
975 Keyword::DROP,
976 Keyword::SYNC,
977 ]) {
978 Some(Keyword::ADD) => Some(AddDropSync::ADD),
979 Some(Keyword::DROP) => Some(AddDropSync::DROP),
980 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
981 _ => None,
982 };
983 parser.expect_keyword_is(Keyword::PARTITIONS)?;
984 Ok(pa)
985 })?
986 .unwrap_or_default();
987 Ok(Msck {
988 repair,
989 table_name,
990 partition_action,
991 }
992 .into())
993 }
994
995 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
996 let table = self.parse_keyword(Keyword::TABLE);
997
998 let table_names = self
999 .parse_comma_separated(|p| {
1000 Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1001 })?
1002 .into_iter()
1003 .map(|(only, name)| TruncateTableTarget { name, only })
1004 .collect();
1005
1006 let mut partitions = None;
1007 if self.parse_keyword(Keyword::PARTITION) {
1008 self.expect_token(&Token::LParen)?;
1009 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1010 self.expect_token(&Token::RParen)?;
1011 }
1012
1013 let mut identity = None;
1014 let mut cascade = None;
1015
1016 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1017 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1018 Some(TruncateIdentityOption::Restart)
1019 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1020 Some(TruncateIdentityOption::Continue)
1021 } else {
1022 None
1023 };
1024
1025 cascade = self.parse_cascade_option();
1026 };
1027
1028 let on_cluster = self.parse_optional_on_cluster()?;
1029
1030 Ok(Truncate {
1031 table_names,
1032 partitions,
1033 table,
1034 identity,
1035 cascade,
1036 on_cluster,
1037 }
1038 .into())
1039 }
1040
1041 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1042 if self.parse_keyword(Keyword::CASCADE) {
1043 Some(CascadeOption::Cascade)
1044 } else if self.parse_keyword(Keyword::RESTRICT) {
1045 Some(CascadeOption::Restrict)
1046 } else {
1047 None
1048 }
1049 }
1050
1051 pub fn parse_attach_duckdb_database_options(
1052 &mut self,
1053 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1054 if !self.consume_token(&Token::LParen) {
1055 return Ok(vec![]);
1056 }
1057
1058 let mut options = vec![];
1059 loop {
1060 if self.parse_keyword(Keyword::READ_ONLY) {
1061 let boolean = if self.parse_keyword(Keyword::TRUE) {
1062 Some(true)
1063 } else if self.parse_keyword(Keyword::FALSE) {
1064 Some(false)
1065 } else {
1066 None
1067 };
1068 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1069 } else if self.parse_keyword(Keyword::TYPE) {
1070 let ident = self.parse_identifier()?;
1071 options.push(AttachDuckDBDatabaseOption::Type(ident));
1072 } else {
1073 return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1074 };
1075
1076 if self.consume_token(&Token::RParen) {
1077 return Ok(options);
1078 } else if self.consume_token(&Token::Comma) {
1079 continue;
1080 } else {
1081 return self.expected("expected one of: ')', ','", self.peek_token());
1082 }
1083 }
1084 }
1085
1086 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1087 let database = self.parse_keyword(Keyword::DATABASE);
1088 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1089 let database_path = self.parse_identifier()?;
1090 let database_alias = if self.parse_keyword(Keyword::AS) {
1091 Some(self.parse_identifier()?)
1092 } else {
1093 None
1094 };
1095
1096 let attach_options = self.parse_attach_duckdb_database_options()?;
1097 Ok(Statement::AttachDuckDBDatabase {
1098 if_not_exists,
1099 database,
1100 database_path,
1101 database_alias,
1102 attach_options,
1103 })
1104 }
1105
1106 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1107 let database = self.parse_keyword(Keyword::DATABASE);
1108 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1109 let database_alias = self.parse_identifier()?;
1110 Ok(Statement::DetachDuckDBDatabase {
1111 if_exists,
1112 database,
1113 database_alias,
1114 })
1115 }
1116
1117 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1118 let database = self.parse_keyword(Keyword::DATABASE);
1119 let database_file_name = self.parse_expr()?;
1120 self.expect_keyword_is(Keyword::AS)?;
1121 let schema_name = self.parse_identifier()?;
1122 Ok(Statement::AttachDatabase {
1123 database,
1124 schema_name,
1125 database_file_name,
1126 })
1127 }
1128
1129 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1130 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1131 let table_name = self.parse_object_name(false)?;
1132 let mut for_columns = false;
1133 let mut cache_metadata = false;
1134 let mut noscan = false;
1135 let mut partitions = None;
1136 let mut compute_statistics = false;
1137 let mut columns = vec![];
1138 loop {
1139 match self.parse_one_of_keywords(&[
1140 Keyword::PARTITION,
1141 Keyword::FOR,
1142 Keyword::CACHE,
1143 Keyword::NOSCAN,
1144 Keyword::COMPUTE,
1145 ]) {
1146 Some(Keyword::PARTITION) => {
1147 self.expect_token(&Token::LParen)?;
1148 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1149 self.expect_token(&Token::RParen)?;
1150 }
1151 Some(Keyword::NOSCAN) => noscan = true,
1152 Some(Keyword::FOR) => {
1153 self.expect_keyword_is(Keyword::COLUMNS)?;
1154
1155 columns = self
1156 .maybe_parse(|parser| {
1157 parser.parse_comma_separated(|p| p.parse_identifier())
1158 })?
1159 .unwrap_or_default();
1160 for_columns = true
1161 }
1162 Some(Keyword::CACHE) => {
1163 self.expect_keyword_is(Keyword::METADATA)?;
1164 cache_metadata = true
1165 }
1166 Some(Keyword::COMPUTE) => {
1167 self.expect_keyword_is(Keyword::STATISTICS)?;
1168 compute_statistics = true
1169 }
1170 _ => break,
1171 }
1172 }
1173
1174 Ok(Analyze {
1175 has_table_keyword,
1176 table_name,
1177 for_columns,
1178 columns,
1179 partitions,
1180 cache_metadata,
1181 noscan,
1182 compute_statistics,
1183 }
1184 .into())
1185 }
1186
1187 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1189 let index = self.index;
1190
1191 let next_token = self.next_token();
1192 match next_token.token {
1193 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1194 if self.peek_token().token == Token::Period {
1195 let mut id_parts: Vec<Ident> = vec![match t {
1196 Token::Word(w) => w.into_ident(next_token.span),
1197 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1198 _ => {
1199 return Err(ParserError::ParserError(
1200 "Internal parser error: unexpected token type".to_string(),
1201 ))
1202 }
1203 }];
1204
1205 while self.consume_token(&Token::Period) {
1206 let next_token = self.next_token();
1207 match next_token.token {
1208 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1209 Token::SingleQuotedString(s) => {
1210 id_parts.push(Ident::with_quote('\'', s))
1212 }
1213 Token::Mul => {
1214 return Ok(Expr::QualifiedWildcard(
1215 ObjectName::from(id_parts),
1216 AttachedToken(next_token),
1217 ));
1218 }
1219 _ => {
1220 return self
1221 .expected("an identifier or a '*' after '.'", next_token);
1222 }
1223 }
1224 }
1225 }
1226 }
1227 Token::Mul => {
1228 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1229 }
1230 _ => (),
1231 };
1232
1233 self.index = index;
1234 self.parse_expr()
1235 }
1236
1237 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1239 self.parse_subexpr(self.dialect.prec_unknown())
1240 }
1241
1242 pub fn parse_expr_with_alias_and_order_by(
1243 &mut self,
1244 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1245 let expr = self.parse_expr()?;
1246
1247 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1248 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1249 }
1250 let alias = self.parse_optional_alias_inner(None, validator)?;
1251 let order_by = OrderByOptions {
1252 asc: self.parse_asc_desc(),
1253 nulls_first: None,
1254 };
1255 Ok(ExprWithAliasAndOrderBy {
1256 expr: ExprWithAlias { expr, alias },
1257 order_by,
1258 })
1259 }
1260
1261 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1263 let _guard = self.recursion_counter.try_decrease()?;
1264 debug!("parsing expr");
1265 let mut expr = self.parse_prefix()?;
1266
1267 expr = self.parse_compound_expr(expr, vec![])?;
1268
1269 debug!("prefix: {expr:?}");
1270 loop {
1271 let next_precedence = self.get_next_precedence()?;
1272 debug!("next precedence: {next_precedence:?}");
1273
1274 if precedence >= next_precedence {
1275 break;
1276 }
1277
1278 if Token::Period == self.peek_token_ref().token {
1281 break;
1282 }
1283
1284 expr = self.parse_infix(expr, next_precedence)?;
1285 }
1286 Ok(expr)
1287 }
1288
1289 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1290 let condition = self.parse_expr()?;
1291 let message = if self.parse_keyword(Keyword::AS) {
1292 Some(self.parse_expr()?)
1293 } else {
1294 None
1295 };
1296
1297 Ok(Statement::Assert { condition, message })
1298 }
1299
1300 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1301 let name = self.parse_identifier()?;
1302 Ok(Statement::Savepoint { name })
1303 }
1304
1305 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1306 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1307 let name = self.parse_identifier()?;
1308
1309 Ok(Statement::ReleaseSavepoint { name })
1310 }
1311
1312 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1313 let channel = self.parse_identifier()?;
1314 Ok(Statement::LISTEN { channel })
1315 }
1316
1317 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1318 let channel = if self.consume_token(&Token::Mul) {
1319 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1320 } else {
1321 match self.parse_identifier() {
1322 Ok(expr) => expr,
1323 _ => {
1324 self.prev_token();
1325 return self.expected("wildcard or identifier", self.peek_token());
1326 }
1327 }
1328 };
1329 Ok(Statement::UNLISTEN { channel })
1330 }
1331
1332 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1333 let channel = self.parse_identifier()?;
1334 let payload = if self.consume_token(&Token::Comma) {
1335 Some(self.parse_literal_string()?)
1336 } else {
1337 None
1338 };
1339 Ok(Statement::NOTIFY { channel, payload })
1340 }
1341
1342 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1344 if self.peek_keyword(Keyword::TABLE) {
1345 self.expect_keyword(Keyword::TABLE)?;
1346 let rename_tables = self.parse_comma_separated(|parser| {
1347 let old_name = parser.parse_object_name(false)?;
1348 parser.expect_keyword(Keyword::TO)?;
1349 let new_name = parser.parse_object_name(false)?;
1350
1351 Ok(RenameTable { old_name, new_name })
1352 })?;
1353 Ok(Statement::RenameTable(rename_tables))
1354 } else {
1355 self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1356 }
1357 }
1358
1359 fn parse_expr_prefix_by_reserved_word(
1362 &mut self,
1363 w: &Word,
1364 w_span: Span,
1365 ) -> Result<Option<Expr>, ParserError> {
1366 match w.keyword {
1367 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1368 self.prev_token();
1369 Ok(Some(Expr::Value(self.parse_value()?)))
1370 }
1371 Keyword::NULL => {
1372 self.prev_token();
1373 Ok(Some(Expr::Value(self.parse_value()?)))
1374 }
1375 Keyword::CURRENT_CATALOG
1376 | Keyword::CURRENT_USER
1377 | Keyword::SESSION_USER
1378 | Keyword::USER
1379 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1380 {
1381 Ok(Some(Expr::Function(Function {
1382 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1383 uses_odbc_syntax: false,
1384 parameters: FunctionArguments::None,
1385 args: FunctionArguments::None,
1386 null_treatment: None,
1387 filter: None,
1388 over: None,
1389 within_group: vec![],
1390 })))
1391 }
1392 Keyword::CURRENT_TIMESTAMP
1393 | Keyword::CURRENT_TIME
1394 | Keyword::CURRENT_DATE
1395 | Keyword::LOCALTIME
1396 | Keyword::LOCALTIMESTAMP => {
1397 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1398 }
1399 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1400 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1401 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1402 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1403 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1404 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1405 Keyword::EXISTS
1406 if !dialect_of!(self is DatabricksDialect)
1408 || matches!(
1409 self.peek_nth_token_ref(1).token,
1410 Token::Word(Word {
1411 keyword: Keyword::SELECT | Keyword::WITH,
1412 ..
1413 })
1414 ) =>
1415 {
1416 Ok(Some(self.parse_exists_expr(false)?))
1417 }
1418 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1419 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1420 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1421 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1422 Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1423 }
1424 Keyword::SUBSTR | Keyword::SUBSTRING => {
1425 self.prev_token();
1426 Ok(Some(self.parse_substring()?))
1427 }
1428 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1429 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1430 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1431 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1433 self.expect_token(&Token::LBracket)?;
1434 Ok(Some(self.parse_array_expr(true)?))
1435 }
1436 Keyword::ARRAY
1437 if self.peek_token() == Token::LParen
1438 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1439 {
1440 self.expect_token(&Token::LParen)?;
1441 let query = self.parse_query()?;
1442 self.expect_token(&Token::RParen)?;
1443 Ok(Some(Expr::Function(Function {
1444 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1445 uses_odbc_syntax: false,
1446 parameters: FunctionArguments::None,
1447 args: FunctionArguments::Subquery(query),
1448 filter: None,
1449 null_treatment: None,
1450 over: None,
1451 within_group: vec![],
1452 })))
1453 }
1454 Keyword::NOT => Ok(Some(self.parse_not()?)),
1455 Keyword::MATCH if self.dialect.supports_match_against() => {
1456 Ok(Some(self.parse_match_against()?))
1457 }
1458 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1459 let struct_expr = self.parse_struct_literal()?;
1460 Ok(Some(struct_expr))
1461 }
1462 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1463 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1464 Ok(Some(Expr::Prior(Box::new(expr))))
1465 }
1466 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1467 Ok(Some(self.parse_duckdb_map_literal()?))
1468 }
1469 _ if self.dialect.supports_geometric_types() => match w.keyword {
1470 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1471 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1472 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1473 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1474 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1475 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1476 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1477 _ => Ok(None),
1478 },
1479 _ => Ok(None),
1480 }
1481 }
1482
1483 fn parse_expr_prefix_by_unreserved_word(
1485 &mut self,
1486 w: &Word,
1487 w_span: Span,
1488 ) -> Result<Expr, ParserError> {
1489 match self.peek_token().token {
1490 Token::LParen if !self.peek_outer_join_operator() => {
1491 let id_parts = vec![w.clone().into_ident(w_span)];
1492 self.parse_function(ObjectName::from(id_parts))
1493 }
1494 Token::SingleQuotedString(_)
1496 | Token::DoubleQuotedString(_)
1497 | Token::HexStringLiteral(_)
1498 if w.value.starts_with('_') =>
1499 {
1500 Ok(Expr::Prefixed {
1501 prefix: w.clone().into_ident(w_span),
1502 value: self.parse_introduced_string_expr()?.into(),
1503 })
1504 }
1505 Token::SingleQuotedString(_)
1507 | Token::DoubleQuotedString(_)
1508 | Token::HexStringLiteral(_)
1509 if w.value.starts_with('_') =>
1510 {
1511 Ok(Expr::Prefixed {
1512 prefix: w.clone().into_ident(w_span),
1513 value: self.parse_introduced_string_expr()?.into(),
1514 })
1515 }
1516 Token::Arrow if self.dialect.supports_lambda_functions() => {
1517 self.expect_token(&Token::Arrow)?;
1518 Ok(Expr::Lambda(LambdaFunction {
1519 params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1520 body: Box::new(self.parse_expr()?),
1521 }))
1522 }
1523 _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1524 }
1525 }
1526
1527 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1529 if let Some(prefix) = self.dialect.parse_prefix(self) {
1531 return prefix;
1532 }
1533
1534 let loc = self.peek_token_ref().span.start;
1551 let opt_expr = self.maybe_parse(|parser| {
1552 match parser.parse_data_type()? {
1553 DataType::Interval { .. } => parser.parse_interval(),
1554 DataType::Custom(..) => parser_err!("dummy", loc),
1562 data_type => Ok(Expr::TypedString(TypedString {
1563 data_type,
1564 value: parser.parse_value()?,
1565 uses_odbc_syntax: false,
1566 })),
1567 }
1568 })?;
1569
1570 if let Some(expr) = opt_expr {
1571 return Ok(expr);
1572 }
1573
1574 let dialect = self.dialect;
1578
1579 self.advance_token();
1580 let next_token_index = self.get_current_index();
1581 let next_token = self.get_current_token();
1582 let span = next_token.span;
1583 let expr = match &next_token.token {
1584 Token::Word(w) => {
1585 let w = w.clone();
1594 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1595 Ok(Some(expr)) => Ok(expr),
1597
1598 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1600
1601 Err(e) => {
1608 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1609 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1610 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1611 }) {
1612 return Ok(expr);
1613 }
1614 }
1615 return Err(e);
1616 }
1617 }
1618 } Token::LBracket => self.parse_array_expr(false),
1621 tok @ Token::Minus | tok @ Token::Plus => {
1622 let op = if *tok == Token::Plus {
1623 UnaryOperator::Plus
1624 } else {
1625 UnaryOperator::Minus
1626 };
1627 Ok(Expr::UnaryOp {
1628 op,
1629 expr: Box::new(
1630 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1631 ),
1632 })
1633 }
1634 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1635 op: UnaryOperator::BangNot,
1636 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1637 }),
1638 tok @ Token::DoubleExclamationMark
1639 | tok @ Token::PGSquareRoot
1640 | tok @ Token::PGCubeRoot
1641 | tok @ Token::AtSign
1642 if dialect_is!(dialect is PostgreSqlDialect) =>
1643 {
1644 let op = match tok {
1645 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1646 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1647 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1648 Token::AtSign => UnaryOperator::PGAbs,
1649 _ => {
1650 return Err(ParserError::ParserError(
1651 "Internal parser error: unexpected unary operator token".to_string(),
1652 ))
1653 }
1654 };
1655 Ok(Expr::UnaryOp {
1656 op,
1657 expr: Box::new(
1658 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1659 ),
1660 })
1661 }
1662 Token::Tilde => Ok(Expr::UnaryOp {
1663 op: UnaryOperator::BitwiseNot,
1664 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1665 }),
1666 tok @ Token::Sharp
1667 | tok @ Token::AtDashAt
1668 | tok @ Token::AtAt
1669 | tok @ Token::QuestionMarkDash
1670 | tok @ Token::QuestionPipe
1671 if self.dialect.supports_geometric_types() =>
1672 {
1673 let op = match tok {
1674 Token::Sharp => UnaryOperator::Hash,
1675 Token::AtDashAt => UnaryOperator::AtDashAt,
1676 Token::AtAt => UnaryOperator::DoubleAt,
1677 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1678 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1679 _ => {
1680 return Err(ParserError::ParserError(format!(
1681 "Unexpected token in unary operator parsing: {tok:?}"
1682 )))
1683 }
1684 };
1685 Ok(Expr::UnaryOp {
1686 op,
1687 expr: Box::new(
1688 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1689 ),
1690 })
1691 }
1692 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1693 {
1694 self.prev_token();
1695 Ok(Expr::Value(self.parse_value()?))
1696 }
1697 Token::UnicodeStringLiteral(_) => {
1698 self.prev_token();
1699 Ok(Expr::Value(self.parse_value()?))
1700 }
1701 Token::Number(_, _)
1702 | Token::SingleQuotedString(_)
1703 | Token::DoubleQuotedString(_)
1704 | Token::TripleSingleQuotedString(_)
1705 | Token::TripleDoubleQuotedString(_)
1706 | Token::DollarQuotedString(_)
1707 | Token::SingleQuotedByteStringLiteral(_)
1708 | Token::DoubleQuotedByteStringLiteral(_)
1709 | Token::TripleSingleQuotedByteStringLiteral(_)
1710 | Token::TripleDoubleQuotedByteStringLiteral(_)
1711 | Token::SingleQuotedRawStringLiteral(_)
1712 | Token::DoubleQuotedRawStringLiteral(_)
1713 | Token::TripleSingleQuotedRawStringLiteral(_)
1714 | Token::TripleDoubleQuotedRawStringLiteral(_)
1715 | Token::NationalStringLiteral(_)
1716 | Token::HexStringLiteral(_) => {
1717 self.prev_token();
1718 Ok(Expr::Value(self.parse_value()?))
1719 }
1720 Token::LParen => {
1721 let expr =
1722 if let Some(expr) = self.try_parse_expr_sub_query()? {
1723 expr
1724 } else if let Some(lambda) = self.try_parse_lambda()? {
1725 return Ok(lambda);
1726 } else {
1727 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1728 match exprs.len() {
1729 0 => return Err(ParserError::ParserError(
1730 "Internal parser error: parse_comma_separated returned empty list"
1731 .to_string(),
1732 )),
1733 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1734 _ => Expr::Tuple(exprs),
1735 }
1736 };
1737 self.expect_token(&Token::RParen)?;
1738 Ok(expr)
1739 }
1740 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1741 self.prev_token();
1742 Ok(Expr::Value(self.parse_value()?))
1743 }
1744 Token::LBrace => {
1745 self.prev_token();
1746 self.parse_lbrace_expr()
1747 }
1748 _ => self.expected_at("an expression", next_token_index),
1749 }?;
1750
1751 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1752 Ok(Expr::Collate {
1753 expr: Box::new(expr),
1754 collation: self.parse_object_name(false)?,
1755 })
1756 } else {
1757 Ok(expr)
1758 }
1759 }
1760
1761 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1762 Ok(Expr::TypedString(TypedString {
1763 data_type: DataType::GeometricType(kind),
1764 value: self.parse_value()?,
1765 uses_odbc_syntax: false,
1766 }))
1767 }
1768
1769 pub fn parse_compound_expr(
1776 &mut self,
1777 root: Expr,
1778 mut chain: Vec<AccessExpr>,
1779 ) -> Result<Expr, ParserError> {
1780 let mut ending_wildcard: Option<TokenWithSpan> = None;
1781 loop {
1782 if self.consume_token(&Token::Period) {
1783 let next_token = self.peek_token_ref();
1784 match &next_token.token {
1785 Token::Mul => {
1786 if dialect_of!(self is PostgreSqlDialect) {
1789 ending_wildcard = Some(self.next_token());
1790 } else {
1791 self.prev_token(); }
1798
1799 break;
1800 }
1801 Token::SingleQuotedString(s) => {
1802 let expr =
1803 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1804 chain.push(AccessExpr::Dot(expr));
1805 self.advance_token(); }
1807 _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1809 Expr::CompoundFieldAccess { root, access_chain } => {
1818 chain.push(AccessExpr::Dot(*root));
1819 chain.extend(access_chain);
1820 }
1821 Expr::CompoundIdentifier(parts) => chain
1822 .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1823 expr => {
1824 chain.push(AccessExpr::Dot(expr));
1825 }
1826 },
1827 }
1828 } else if !self.dialect.supports_partiql()
1829 && self.peek_token_ref().token == Token::LBracket
1830 {
1831 self.parse_multi_dim_subscript(&mut chain)?;
1832 } else {
1833 break;
1834 }
1835 }
1836
1837 let tok_index = self.get_current_index();
1838 if let Some(wildcard_token) = ending_wildcard {
1839 if !Self::is_all_ident(&root, &chain) {
1840 return self.expected("an identifier or a '*' after '.'", self.peek_token());
1841 };
1842 Ok(Expr::QualifiedWildcard(
1843 ObjectName::from(Self::exprs_to_idents(root, chain)?),
1844 AttachedToken(wildcard_token),
1845 ))
1846 } else if self.maybe_parse_outer_join_operator() {
1847 if !Self::is_all_ident(&root, &chain) {
1848 return self.expected_at("column identifier before (+)", tok_index);
1849 };
1850 let expr = if chain.is_empty() {
1851 root
1852 } else {
1853 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1854 };
1855 Ok(Expr::OuterJoin(expr.into()))
1856 } else {
1857 Self::build_compound_expr(root, chain)
1858 }
1859 }
1860
1861 fn build_compound_expr(
1866 root: Expr,
1867 mut access_chain: Vec<AccessExpr>,
1868 ) -> Result<Expr, ParserError> {
1869 if access_chain.is_empty() {
1870 return Ok(root);
1871 }
1872
1873 if Self::is_all_ident(&root, &access_chain) {
1874 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1875 root,
1876 access_chain,
1877 )?));
1878 }
1879
1880 if matches!(root, Expr::Identifier(_))
1885 && matches!(
1886 access_chain.last(),
1887 Some(AccessExpr::Dot(Expr::Function(_)))
1888 )
1889 && access_chain
1890 .iter()
1891 .rev()
1892 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1894 {
1895 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1896 return parser_err!("expected function expression", root.span().start);
1897 };
1898
1899 let compound_func_name = [root]
1900 .into_iter()
1901 .chain(access_chain.into_iter().flat_map(|access| match access {
1902 AccessExpr::Dot(expr) => Some(expr),
1903 _ => None,
1904 }))
1905 .flat_map(|expr| match expr {
1906 Expr::Identifier(ident) => Some(ident),
1907 _ => None,
1908 })
1909 .map(ObjectNamePart::Identifier)
1910 .chain(func.name.0)
1911 .collect::<Vec<_>>();
1912 func.name = ObjectName(compound_func_name);
1913
1914 return Ok(Expr::Function(func));
1915 }
1916
1917 if access_chain.len() == 1
1922 && matches!(
1923 access_chain.last(),
1924 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1925 )
1926 {
1927 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1928 return parser_err!("expected (+) expression", root.span().start);
1929 };
1930
1931 if !Self::is_all_ident(&root, &[]) {
1932 return parser_err!("column identifier before (+)", root.span().start);
1933 };
1934
1935 let token_start = root.span().start;
1936 let mut idents = Self::exprs_to_idents(root, vec![])?;
1937 match *inner_expr {
1938 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1939 Expr::Identifier(suffix) => idents.push(suffix),
1940 _ => {
1941 return parser_err!("column identifier before (+)", token_start);
1942 }
1943 }
1944
1945 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1946 }
1947
1948 Ok(Expr::CompoundFieldAccess {
1949 root: Box::new(root),
1950 access_chain,
1951 })
1952 }
1953
1954 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1955 match k {
1956 Keyword::LOCAL => Some(ContextModifier::Local),
1957 Keyword::GLOBAL => Some(ContextModifier::Global),
1958 Keyword::SESSION => Some(ContextModifier::Session),
1959 _ => None,
1960 }
1961 }
1962
1963 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1965 if !matches!(root, Expr::Identifier(_)) {
1966 return false;
1967 }
1968 fields
1969 .iter()
1970 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1971 }
1972
1973 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1975 let mut idents = vec![];
1976 if let Expr::Identifier(root) = root {
1977 idents.push(root);
1978 for x in fields {
1979 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1980 idents.push(ident);
1981 } else {
1982 return parser_err!(
1983 format!("Expected identifier, found: {}", x),
1984 x.span().start
1985 );
1986 }
1987 }
1988 Ok(idents)
1989 } else {
1990 parser_err!(
1991 format!("Expected identifier, found: {}", root),
1992 root.span().start
1993 )
1994 }
1995 }
1996
1997 fn peek_outer_join_operator(&mut self) -> bool {
1999 if !self.dialect.supports_outer_join_operator() {
2000 return false;
2001 }
2002
2003 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2004 Token::LParen == maybe_lparen.token
2005 && Token::Plus == maybe_plus.token
2006 && Token::RParen == maybe_rparen.token
2007 }
2008
2009 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2012 self.dialect.supports_outer_join_operator()
2013 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2014 }
2015
2016 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2017 self.expect_token(&Token::LParen)?;
2018 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2019 self.expect_token(&Token::RParen)?;
2020
2021 Ok(options)
2022 }
2023
2024 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2025 let name = self.parse_identifier()?;
2026
2027 let next_token = self.peek_token();
2028 if next_token == Token::Comma || next_token == Token::RParen {
2029 return Ok(UtilityOption { name, arg: None });
2030 }
2031 let arg = self.parse_expr()?;
2032
2033 Ok(UtilityOption {
2034 name,
2035 arg: Some(arg),
2036 })
2037 }
2038
2039 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2040 if !self.peek_sub_query() {
2041 return Ok(None);
2042 }
2043
2044 Ok(Some(Expr::Subquery(self.parse_query()?)))
2045 }
2046
2047 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2048 if !self.dialect.supports_lambda_functions() {
2049 return Ok(None);
2050 }
2051 self.maybe_parse(|p| {
2052 let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2053 p.expect_token(&Token::RParen)?;
2054 p.expect_token(&Token::Arrow)?;
2055 let expr = p.parse_expr()?;
2056 Ok(Expr::Lambda(LambdaFunction {
2057 params: OneOrManyWithParens::Many(params),
2058 body: Box::new(expr),
2059 }))
2060 })
2061 }
2062
2063 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2070 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2072 return Ok(Some(expr));
2073 }
2074 self.maybe_parse_odbc_body_datetime()
2076 }
2077
2078 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2089 self.maybe_parse(|p| {
2090 let token = p.next_token().clone();
2091 let word_string = token.token.to_string();
2092 let data_type = match word_string.as_str() {
2093 "t" => DataType::Time(None, TimezoneInfo::None),
2094 "d" => DataType::Date,
2095 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2096 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2097 };
2098 let value = p.parse_value()?;
2099 Ok(Expr::TypedString(TypedString {
2100 data_type,
2101 value,
2102 uses_odbc_syntax: true,
2103 }))
2104 })
2105 }
2106
2107 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2116 self.maybe_parse(|p| {
2117 p.expect_keyword(Keyword::FN)?;
2118 let fn_name = p.parse_object_name(false)?;
2119 let mut fn_call = p.parse_function_call(fn_name)?;
2120 fn_call.uses_odbc_syntax = true;
2121 Ok(Expr::Function(fn_call))
2122 })
2123 }
2124
2125 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2126 self.parse_function_call(name).map(Expr::Function)
2127 }
2128
2129 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2130 self.expect_token(&Token::LParen)?;
2131
2132 if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2135 let subquery = self.parse_query()?;
2136 self.expect_token(&Token::RParen)?;
2137 return Ok(Function {
2138 name,
2139 uses_odbc_syntax: false,
2140 parameters: FunctionArguments::None,
2141 args: FunctionArguments::Subquery(subquery),
2142 filter: None,
2143 null_treatment: None,
2144 over: None,
2145 within_group: vec![],
2146 });
2147 }
2148
2149 let mut args = self.parse_function_argument_list()?;
2150 let mut parameters = FunctionArguments::None;
2151 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2154 && self.consume_token(&Token::LParen)
2155 {
2156 parameters = FunctionArguments::List(args);
2157 args = self.parse_function_argument_list()?;
2158 }
2159
2160 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2161 self.expect_token(&Token::LParen)?;
2162 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2163 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2164 self.expect_token(&Token::RParen)?;
2165 order_by
2166 } else {
2167 vec![]
2168 };
2169
2170 let filter = if self.dialect.supports_filter_during_aggregation()
2171 && self.parse_keyword(Keyword::FILTER)
2172 && self.consume_token(&Token::LParen)
2173 && self.parse_keyword(Keyword::WHERE)
2174 {
2175 let filter = Some(Box::new(self.parse_expr()?));
2176 self.expect_token(&Token::RParen)?;
2177 filter
2178 } else {
2179 None
2180 };
2181
2182 let null_treatment = if args
2185 .clauses
2186 .iter()
2187 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2188 {
2189 self.parse_null_treatment()?
2190 } else {
2191 None
2192 };
2193
2194 let over = if self.parse_keyword(Keyword::OVER) {
2195 if self.consume_token(&Token::LParen) {
2196 let window_spec = self.parse_window_spec()?;
2197 Some(WindowType::WindowSpec(window_spec))
2198 } else {
2199 Some(WindowType::NamedWindow(self.parse_identifier()?))
2200 }
2201 } else {
2202 None
2203 };
2204
2205 Ok(Function {
2206 name,
2207 uses_odbc_syntax: false,
2208 parameters,
2209 args: FunctionArguments::List(args),
2210 null_treatment,
2211 filter,
2212 over,
2213 within_group,
2214 })
2215 }
2216
2217 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2219 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2220 Some(keyword) => {
2221 self.expect_keyword_is(Keyword::NULLS)?;
2222
2223 Ok(match keyword {
2224 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2225 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2226 _ => None,
2227 })
2228 }
2229 None => Ok(None),
2230 }
2231 }
2232
2233 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2234 let args = if self.consume_token(&Token::LParen) {
2235 FunctionArguments::List(self.parse_function_argument_list()?)
2236 } else {
2237 FunctionArguments::None
2238 };
2239 Ok(Expr::Function(Function {
2240 name,
2241 uses_odbc_syntax: false,
2242 parameters: FunctionArguments::None,
2243 args,
2244 filter: None,
2245 over: None,
2246 null_treatment: None,
2247 within_group: vec![],
2248 }))
2249 }
2250
2251 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2252 let next_token = self.next_token();
2253 match &next_token.token {
2254 Token::Word(w) => match w.keyword {
2255 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2256 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2257 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2258 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2259 },
2260 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2261 }
2262 }
2263
2264 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2265 let units = self.parse_window_frame_units()?;
2266 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2267 let start_bound = self.parse_window_frame_bound()?;
2268 self.expect_keyword_is(Keyword::AND)?;
2269 let end_bound = Some(self.parse_window_frame_bound()?);
2270 (start_bound, end_bound)
2271 } else {
2272 (self.parse_window_frame_bound()?, None)
2273 };
2274 Ok(WindowFrame {
2275 units,
2276 start_bound,
2277 end_bound,
2278 })
2279 }
2280
2281 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2283 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2284 Ok(WindowFrameBound::CurrentRow)
2285 } else {
2286 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2287 None
2288 } else {
2289 Some(Box::new(match self.peek_token().token {
2290 Token::SingleQuotedString(_) => self.parse_interval()?,
2291 _ => self.parse_expr()?,
2292 }))
2293 };
2294 if self.parse_keyword(Keyword::PRECEDING) {
2295 Ok(WindowFrameBound::Preceding(rows))
2296 } else if self.parse_keyword(Keyword::FOLLOWING) {
2297 Ok(WindowFrameBound::Following(rows))
2298 } else {
2299 self.expected("PRECEDING or FOLLOWING", self.peek_token())
2300 }
2301 }
2302 }
2303
2304 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2306 if self.dialect.supports_group_by_expr() {
2307 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2308 self.expect_token(&Token::LParen)?;
2309 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2310 self.expect_token(&Token::RParen)?;
2311 Ok(Expr::GroupingSets(result))
2312 } else if self.parse_keyword(Keyword::CUBE) {
2313 self.expect_token(&Token::LParen)?;
2314 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2315 self.expect_token(&Token::RParen)?;
2316 Ok(Expr::Cube(result))
2317 } else if self.parse_keyword(Keyword::ROLLUP) {
2318 self.expect_token(&Token::LParen)?;
2319 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2320 self.expect_token(&Token::RParen)?;
2321 Ok(Expr::Rollup(result))
2322 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2323 Ok(Expr::Tuple(vec![]))
2327 } else {
2328 self.parse_expr()
2329 }
2330 } else {
2331 self.parse_expr()
2333 }
2334 }
2335
2336 fn parse_tuple(
2340 &mut self,
2341 lift_singleton: bool,
2342 allow_empty: bool,
2343 ) -> Result<Vec<Expr>, ParserError> {
2344 if lift_singleton {
2345 if self.consume_token(&Token::LParen) {
2346 let result = if allow_empty && self.consume_token(&Token::RParen) {
2347 vec![]
2348 } else {
2349 let result = self.parse_comma_separated(Parser::parse_expr)?;
2350 self.expect_token(&Token::RParen)?;
2351 result
2352 };
2353 Ok(result)
2354 } else {
2355 Ok(vec![self.parse_expr()?])
2356 }
2357 } else {
2358 self.expect_token(&Token::LParen)?;
2359 let result = if allow_empty && self.consume_token(&Token::RParen) {
2360 vec![]
2361 } else {
2362 let result = self.parse_comma_separated(Parser::parse_expr)?;
2363 self.expect_token(&Token::RParen)?;
2364 result
2365 };
2366 Ok(result)
2367 }
2368 }
2369
2370 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2371 let case_token = AttachedToken(self.get_current_token().clone());
2372 let mut operand = None;
2373 if !self.parse_keyword(Keyword::WHEN) {
2374 operand = Some(Box::new(self.parse_expr()?));
2375 self.expect_keyword_is(Keyword::WHEN)?;
2376 }
2377 let mut conditions = vec![];
2378 loop {
2379 let condition = self.parse_expr()?;
2380 self.expect_keyword_is(Keyword::THEN)?;
2381 let result = self.parse_expr()?;
2382 conditions.push(CaseWhen { condition, result });
2383 if !self.parse_keyword(Keyword::WHEN) {
2384 break;
2385 }
2386 }
2387 let else_result = if self.parse_keyword(Keyword::ELSE) {
2388 Some(Box::new(self.parse_expr()?))
2389 } else {
2390 None
2391 };
2392 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2393 Ok(Expr::Case {
2394 case_token,
2395 end_token,
2396 operand,
2397 conditions,
2398 else_result,
2399 })
2400 }
2401
2402 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2403 if self.parse_keyword(Keyword::FORMAT) {
2404 let value = self.parse_value()?.value;
2405 match self.parse_optional_time_zone()? {
2406 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2407 None => Ok(Some(CastFormat::Value(value))),
2408 }
2409 } else {
2410 Ok(None)
2411 }
2412 }
2413
2414 pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2415 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2416 self.parse_value().map(|v| Some(v.value))
2417 } else {
2418 Ok(None)
2419 }
2420 }
2421
2422 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2424 self.expect_token(&Token::LParen)?;
2425 let data_type = self.parse_data_type()?;
2426 self.expect_token(&Token::Comma)?;
2427 let expr = self.parse_expr()?;
2428 let styles = if self.consume_token(&Token::Comma) {
2429 self.parse_comma_separated(Parser::parse_expr)?
2430 } else {
2431 Default::default()
2432 };
2433 self.expect_token(&Token::RParen)?;
2434 Ok(Expr::Convert {
2435 is_try,
2436 expr: Box::new(expr),
2437 data_type: Some(data_type),
2438 charset: None,
2439 target_before_value: true,
2440 styles,
2441 })
2442 }
2443
2444 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2449 if self.dialect.convert_type_before_value() {
2450 return self.parse_mssql_convert(is_try);
2451 }
2452 self.expect_token(&Token::LParen)?;
2453 let expr = self.parse_expr()?;
2454 if self.parse_keyword(Keyword::USING) {
2455 let charset = self.parse_object_name(false)?;
2456 self.expect_token(&Token::RParen)?;
2457 return Ok(Expr::Convert {
2458 is_try,
2459 expr: Box::new(expr),
2460 data_type: None,
2461 charset: Some(charset),
2462 target_before_value: false,
2463 styles: vec![],
2464 });
2465 }
2466 self.expect_token(&Token::Comma)?;
2467 let data_type = self.parse_data_type()?;
2468 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2469 Some(self.parse_object_name(false)?)
2470 } else {
2471 None
2472 };
2473 self.expect_token(&Token::RParen)?;
2474 Ok(Expr::Convert {
2475 is_try,
2476 expr: Box::new(expr),
2477 data_type: Some(data_type),
2478 charset,
2479 target_before_value: false,
2480 styles: vec![],
2481 })
2482 }
2483
2484 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2486 self.expect_token(&Token::LParen)?;
2487 let expr = self.parse_expr()?;
2488 self.expect_keyword_is(Keyword::AS)?;
2489 let data_type = self.parse_data_type()?;
2490 let format = self.parse_optional_cast_format()?;
2491 self.expect_token(&Token::RParen)?;
2492 Ok(Expr::Cast {
2493 kind,
2494 expr: Box::new(expr),
2495 data_type,
2496 format,
2497 })
2498 }
2499
2500 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2502 self.expect_token(&Token::LParen)?;
2503 let exists_node = Expr::Exists {
2504 negated,
2505 subquery: self.parse_query()?,
2506 };
2507 self.expect_token(&Token::RParen)?;
2508 Ok(exists_node)
2509 }
2510
2511 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2512 self.expect_token(&Token::LParen)?;
2513 let field = self.parse_date_time_field()?;
2514
2515 let syntax = if self.parse_keyword(Keyword::FROM) {
2516 ExtractSyntax::From
2517 } else if self.consume_token(&Token::Comma)
2518 && dialect_of!(self is SnowflakeDialect | GenericDialect)
2519 {
2520 ExtractSyntax::Comma
2521 } else {
2522 return Err(ParserError::ParserError(
2523 "Expected 'FROM' or ','".to_string(),
2524 ));
2525 };
2526
2527 let expr = self.parse_expr()?;
2528 self.expect_token(&Token::RParen)?;
2529 Ok(Expr::Extract {
2530 field,
2531 expr: Box::new(expr),
2532 syntax,
2533 })
2534 }
2535
2536 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2537 self.expect_token(&Token::LParen)?;
2538 let expr = self.parse_expr()?;
2539 let field = if self.parse_keyword(Keyword::TO) {
2541 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2543 } else if self.consume_token(&Token::Comma) {
2544 match self.parse_value()?.value {
2546 Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2547 _ => {
2548 return Err(ParserError::ParserError(
2549 "Scale field can only be of number type".to_string(),
2550 ))
2551 }
2552 }
2553 } else {
2554 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2555 };
2556 self.expect_token(&Token::RParen)?;
2557 if is_ceil {
2558 Ok(Expr::Ceil {
2559 expr: Box::new(expr),
2560 field,
2561 })
2562 } else {
2563 Ok(Expr::Floor {
2564 expr: Box::new(expr),
2565 field,
2566 })
2567 }
2568 }
2569
2570 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2571 let between_prec = self.dialect.prec_value(Precedence::Between);
2572 let position_expr = self.maybe_parse(|p| {
2573 p.expect_token(&Token::LParen)?;
2575
2576 let expr = p.parse_subexpr(between_prec)?;
2578 p.expect_keyword_is(Keyword::IN)?;
2579 let from = p.parse_expr()?;
2580 p.expect_token(&Token::RParen)?;
2581 Ok(Expr::Position {
2582 expr: Box::new(expr),
2583 r#in: Box::new(from),
2584 })
2585 })?;
2586 match position_expr {
2587 Some(expr) => Ok(expr),
2588 None => self.parse_function(ObjectName::from(vec![ident])),
2591 }
2592 }
2593
2594 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2596 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2597 Keyword::SUBSTR => true,
2598 Keyword::SUBSTRING => false,
2599 _ => {
2600 self.prev_token();
2601 return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2602 }
2603 };
2604 self.expect_token(&Token::LParen)?;
2605 let expr = self.parse_expr()?;
2606 let mut from_expr = None;
2607 let special = self.consume_token(&Token::Comma);
2608 if special || self.parse_keyword(Keyword::FROM) {
2609 from_expr = Some(self.parse_expr()?);
2610 }
2611
2612 let mut to_expr = None;
2613 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2614 to_expr = Some(self.parse_expr()?);
2615 }
2616 self.expect_token(&Token::RParen)?;
2617
2618 Ok(Expr::Substring {
2619 expr: Box::new(expr),
2620 substring_from: from_expr.map(Box::new),
2621 substring_for: to_expr.map(Box::new),
2622 special,
2623 shorthand,
2624 })
2625 }
2626
2627 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2628 self.expect_token(&Token::LParen)?;
2630 let expr = self.parse_expr()?;
2631 self.expect_keyword_is(Keyword::PLACING)?;
2632 let what_expr = self.parse_expr()?;
2633 self.expect_keyword_is(Keyword::FROM)?;
2634 let from_expr = self.parse_expr()?;
2635 let mut for_expr = None;
2636 if self.parse_keyword(Keyword::FOR) {
2637 for_expr = Some(self.parse_expr()?);
2638 }
2639 self.expect_token(&Token::RParen)?;
2640
2641 Ok(Expr::Overlay {
2642 expr: Box::new(expr),
2643 overlay_what: Box::new(what_expr),
2644 overlay_from: Box::new(from_expr),
2645 overlay_for: for_expr.map(Box::new),
2646 })
2647 }
2648
2649 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2655 self.expect_token(&Token::LParen)?;
2656 let mut trim_where = None;
2657 if let Token::Word(word) = self.peek_token().token {
2658 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2659 trim_where = Some(self.parse_trim_where()?);
2660 }
2661 }
2662 let expr = self.parse_expr()?;
2663 if self.parse_keyword(Keyword::FROM) {
2664 let trim_what = Box::new(expr);
2665 let expr = self.parse_expr()?;
2666 self.expect_token(&Token::RParen)?;
2667 Ok(Expr::Trim {
2668 expr: Box::new(expr),
2669 trim_where,
2670 trim_what: Some(trim_what),
2671 trim_characters: None,
2672 })
2673 } else if self.consume_token(&Token::Comma)
2674 && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2675 {
2676 let characters = self.parse_comma_separated(Parser::parse_expr)?;
2677 self.expect_token(&Token::RParen)?;
2678 Ok(Expr::Trim {
2679 expr: Box::new(expr),
2680 trim_where: None,
2681 trim_what: None,
2682 trim_characters: Some(characters),
2683 })
2684 } else {
2685 self.expect_token(&Token::RParen)?;
2686 Ok(Expr::Trim {
2687 expr: Box::new(expr),
2688 trim_where,
2689 trim_what: None,
2690 trim_characters: None,
2691 })
2692 }
2693 }
2694
2695 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2696 let next_token = self.next_token();
2697 match &next_token.token {
2698 Token::Word(w) => match w.keyword {
2699 Keyword::BOTH => Ok(TrimWhereField::Both),
2700 Keyword::LEADING => Ok(TrimWhereField::Leading),
2701 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2702 _ => self.expected("trim_where field", next_token)?,
2703 },
2704 _ => self.expected("trim_where field", next_token),
2705 }
2706 }
2707
2708 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2711 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2712 self.expect_token(&Token::RBracket)?;
2713 Ok(Expr::Array(Array { elem: exprs, named }))
2714 }
2715
2716 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2717 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2718 if self.parse_keyword(Keyword::ERROR) {
2719 Ok(Some(ListAggOnOverflow::Error))
2720 } else {
2721 self.expect_keyword_is(Keyword::TRUNCATE)?;
2722 let filler = match self.peek_token().token {
2723 Token::Word(w)
2724 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2725 {
2726 None
2727 }
2728 Token::SingleQuotedString(_)
2729 | Token::EscapedStringLiteral(_)
2730 | Token::UnicodeStringLiteral(_)
2731 | Token::NationalStringLiteral(_)
2732 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2733 _ => self.expected(
2734 "either filler, WITH, or WITHOUT in LISTAGG",
2735 self.peek_token(),
2736 )?,
2737 };
2738 let with_count = self.parse_keyword(Keyword::WITH);
2739 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2740 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2741 }
2742 self.expect_keyword_is(Keyword::COUNT)?;
2743 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2744 }
2745 } else {
2746 Ok(None)
2747 }
2748 }
2749
2750 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2755 let next_token = self.next_token();
2756 match &next_token.token {
2757 Token::Word(w) => match w.keyword {
2758 Keyword::YEAR => Ok(DateTimeField::Year),
2759 Keyword::YEARS => Ok(DateTimeField::Years),
2760 Keyword::MONTH => Ok(DateTimeField::Month),
2761 Keyword::MONTHS => Ok(DateTimeField::Months),
2762 Keyword::WEEK => {
2763 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2764 && self.consume_token(&Token::LParen)
2765 {
2766 let week_day = self.parse_identifier()?;
2767 self.expect_token(&Token::RParen)?;
2768 Some(week_day)
2769 } else {
2770 None
2771 };
2772 Ok(DateTimeField::Week(week_day))
2773 }
2774 Keyword::WEEKS => Ok(DateTimeField::Weeks),
2775 Keyword::DAY => Ok(DateTimeField::Day),
2776 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2777 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2778 Keyword::DAYS => Ok(DateTimeField::Days),
2779 Keyword::DATE => Ok(DateTimeField::Date),
2780 Keyword::DATETIME => Ok(DateTimeField::Datetime),
2781 Keyword::HOUR => Ok(DateTimeField::Hour),
2782 Keyword::HOURS => Ok(DateTimeField::Hours),
2783 Keyword::MINUTE => Ok(DateTimeField::Minute),
2784 Keyword::MINUTES => Ok(DateTimeField::Minutes),
2785 Keyword::SECOND => Ok(DateTimeField::Second),
2786 Keyword::SECONDS => Ok(DateTimeField::Seconds),
2787 Keyword::CENTURY => Ok(DateTimeField::Century),
2788 Keyword::DECADE => Ok(DateTimeField::Decade),
2789 Keyword::DOY => Ok(DateTimeField::Doy),
2790 Keyword::DOW => Ok(DateTimeField::Dow),
2791 Keyword::EPOCH => Ok(DateTimeField::Epoch),
2792 Keyword::ISODOW => Ok(DateTimeField::Isodow),
2793 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2794 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2795 Keyword::JULIAN => Ok(DateTimeField::Julian),
2796 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2797 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2798 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2799 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2800 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2801 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2802 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2803 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2804 Keyword::QUARTER => Ok(DateTimeField::Quarter),
2805 Keyword::TIME => Ok(DateTimeField::Time),
2806 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2807 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2808 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2809 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2810 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2811 _ if self.dialect.allow_extract_custom() => {
2812 self.prev_token();
2813 let custom = self.parse_identifier()?;
2814 Ok(DateTimeField::Custom(custom))
2815 }
2816 _ => self.expected("date/time field", next_token),
2817 },
2818 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2819 self.prev_token();
2820 let custom = self.parse_identifier()?;
2821 Ok(DateTimeField::Custom(custom))
2822 }
2823 _ => self.expected("date/time field", next_token),
2824 }
2825 }
2826
2827 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2828 match self.peek_token().token {
2829 Token::Word(w) => match w.keyword {
2830 Keyword::EXISTS => {
2831 let negated = true;
2832 let _ = self.parse_keyword(Keyword::EXISTS);
2833 self.parse_exists_expr(negated)
2834 }
2835 _ => Ok(Expr::UnaryOp {
2836 op: UnaryOperator::Not,
2837 expr: Box::new(
2838 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2839 ),
2840 }),
2841 },
2842 _ => Ok(Expr::UnaryOp {
2843 op: UnaryOperator::Not,
2844 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2845 }),
2846 }
2847 }
2848
2849 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2859 let token = self.expect_token(&Token::LBrace)?;
2860
2861 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2862 self.expect_token(&Token::RBrace)?;
2863 return Ok(fn_expr);
2864 }
2865
2866 if self.dialect.supports_dictionary_syntax() {
2867 self.prev_token(); return self.parse_dictionary();
2869 }
2870
2871 self.expected("an expression", token)
2872 }
2873
2874 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2880 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2881
2882 self.expect_keyword_is(Keyword::AGAINST)?;
2883
2884 self.expect_token(&Token::LParen)?;
2885
2886 let match_value = self.parse_value()?.value;
2888
2889 let in_natural_language_mode_keywords = &[
2890 Keyword::IN,
2891 Keyword::NATURAL,
2892 Keyword::LANGUAGE,
2893 Keyword::MODE,
2894 ];
2895
2896 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2897
2898 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2899
2900 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2901 if self.parse_keywords(with_query_expansion_keywords) {
2902 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2903 } else {
2904 Some(SearchModifier::InNaturalLanguageMode)
2905 }
2906 } else if self.parse_keywords(in_boolean_mode_keywords) {
2907 Some(SearchModifier::InBooleanMode)
2908 } else if self.parse_keywords(with_query_expansion_keywords) {
2909 Some(SearchModifier::WithQueryExpansion)
2910 } else {
2911 None
2912 };
2913
2914 self.expect_token(&Token::RParen)?;
2915
2916 Ok(Expr::MatchAgainst {
2917 columns,
2918 match_value,
2919 opt_search_modifier,
2920 })
2921 }
2922
2923 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2939 let value = if self.dialect.require_interval_qualifier() {
2948 self.parse_expr()?
2950 } else {
2951 self.parse_prefix()?
2954 };
2955
2956 let leading_field = if self.next_token_is_temporal_unit() {
2962 Some(self.parse_date_time_field()?)
2963 } else if self.dialect.require_interval_qualifier() {
2964 return parser_err!(
2965 "INTERVAL requires a unit after the literal value",
2966 self.peek_token().span.start
2967 );
2968 } else {
2969 None
2970 };
2971
2972 let (leading_precision, last_field, fsec_precision) =
2973 if leading_field == Some(DateTimeField::Second) {
2974 let last_field = None;
2980 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2981 (leading_precision, last_field, fsec_precision)
2982 } else {
2983 let leading_precision = self.parse_optional_precision()?;
2984 if self.parse_keyword(Keyword::TO) {
2985 let last_field = Some(self.parse_date_time_field()?);
2986 let fsec_precision = if last_field == Some(DateTimeField::Second) {
2987 self.parse_optional_precision()?
2988 } else {
2989 None
2990 };
2991 (leading_precision, last_field, fsec_precision)
2992 } else {
2993 (leading_precision, None, None)
2994 }
2995 };
2996
2997 Ok(Expr::Interval(Interval {
2998 value: Box::new(value),
2999 leading_field,
3000 leading_precision,
3001 last_field,
3002 fractional_seconds_precision: fsec_precision,
3003 }))
3004 }
3005
3006 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3009 if let Token::Word(word) = self.peek_token().token {
3010 matches!(
3011 word.keyword,
3012 Keyword::YEAR
3013 | Keyword::YEARS
3014 | Keyword::MONTH
3015 | Keyword::MONTHS
3016 | Keyword::WEEK
3017 | Keyword::WEEKS
3018 | Keyword::DAY
3019 | Keyword::DAYS
3020 | Keyword::HOUR
3021 | Keyword::HOURS
3022 | Keyword::MINUTE
3023 | Keyword::MINUTES
3024 | Keyword::SECOND
3025 | Keyword::SECONDS
3026 | Keyword::CENTURY
3027 | Keyword::DECADE
3028 | Keyword::DOW
3029 | Keyword::DOY
3030 | Keyword::EPOCH
3031 | Keyword::ISODOW
3032 | Keyword::ISOYEAR
3033 | Keyword::JULIAN
3034 | Keyword::MICROSECOND
3035 | Keyword::MICROSECONDS
3036 | Keyword::MILLENIUM
3037 | Keyword::MILLENNIUM
3038 | Keyword::MILLISECOND
3039 | Keyword::MILLISECONDS
3040 | Keyword::NANOSECOND
3041 | Keyword::NANOSECONDS
3042 | Keyword::QUARTER
3043 | Keyword::TIMEZONE
3044 | Keyword::TIMEZONE_HOUR
3045 | Keyword::TIMEZONE_MINUTE
3046 )
3047 } else {
3048 false
3049 }
3050 }
3051
3052 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3060 self.prev_token();
3062 let (fields, trailing_bracket) =
3063 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3064 if trailing_bracket.0 {
3065 return parser_err!(
3066 "unmatched > in STRUCT literal",
3067 self.peek_token().span.start
3068 );
3069 }
3070
3071 self.expect_token(&Token::LParen)?;
3073 let values = self
3074 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3075 self.expect_token(&Token::RParen)?;
3076
3077 Ok(Expr::Struct { values, fields })
3078 }
3079
3080 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3094 let expr = self.parse_expr()?;
3095 if self.parse_keyword(Keyword::AS) {
3096 if typed_syntax {
3097 return parser_err!("Typed syntax does not allow AS", {
3098 self.prev_token();
3099 self.peek_token().span.start
3100 });
3101 }
3102 let field_name = self.parse_identifier()?;
3103 Ok(Expr::Named {
3104 expr: expr.into(),
3105 name: field_name,
3106 })
3107 } else {
3108 Ok(expr)
3109 }
3110 }
3111
3112 fn parse_struct_type_def<F>(
3125 &mut self,
3126 mut elem_parser: F,
3127 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3128 where
3129 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3130 {
3131 self.expect_keyword_is(Keyword::STRUCT)?;
3132
3133 if Token::Lt != self.peek_token() {
3135 return Ok((Default::default(), false.into()));
3136 }
3137 self.next_token();
3138
3139 let mut field_defs = vec![];
3140 let trailing_bracket = loop {
3141 let (def, trailing_bracket) = elem_parser(self)?;
3142 field_defs.push(def);
3143 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3145 break trailing_bracket;
3146 }
3147 };
3148
3149 Ok((
3150 field_defs,
3151 self.expect_closing_angle_bracket(trailing_bracket)?,
3152 ))
3153 }
3154
3155 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3157 self.expect_keyword_is(Keyword::STRUCT)?;
3158 self.expect_token(&Token::LParen)?;
3159 let struct_body = self.parse_comma_separated(|parser| {
3160 let field_name = parser.parse_identifier()?;
3161 let field_type = parser.parse_data_type()?;
3162
3163 Ok(StructField {
3164 field_name: Some(field_name),
3165 field_type,
3166 options: None,
3167 })
3168 });
3169 self.expect_token(&Token::RParen)?;
3170 struct_body
3171 }
3172
3173 fn parse_struct_field_def(
3183 &mut self,
3184 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3185 let is_anonymous_field = !matches!(
3188 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3189 (Token::Word(_), Token::Word(_))
3190 );
3191
3192 let field_name = if is_anonymous_field {
3193 None
3194 } else {
3195 Some(self.parse_identifier()?)
3196 };
3197
3198 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3199
3200 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3201 Ok((
3202 StructField {
3203 field_name,
3204 field_type,
3205 options,
3206 },
3207 trailing_bracket,
3208 ))
3209 }
3210
3211 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3221 self.expect_keyword_is(Keyword::UNION)?;
3222
3223 self.expect_token(&Token::LParen)?;
3224
3225 let fields = self.parse_comma_separated(|p| {
3226 Ok(UnionField {
3227 field_name: p.parse_identifier()?,
3228 field_type: p.parse_data_type()?,
3229 })
3230 })?;
3231
3232 self.expect_token(&Token::RParen)?;
3233
3234 Ok(fields)
3235 }
3236
3237 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3248 self.expect_token(&Token::LBrace)?;
3249
3250 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3251
3252 self.expect_token(&Token::RBrace)?;
3253
3254 Ok(Expr::Dictionary(fields))
3255 }
3256
3257 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3268 let key = self.parse_identifier()?;
3269
3270 self.expect_token(&Token::Colon)?;
3271
3272 let expr = self.parse_expr()?;
3273
3274 Ok(DictionaryField {
3275 key,
3276 value: Box::new(expr),
3277 })
3278 }
3279
3280 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3290 self.expect_token(&Token::LBrace)?;
3291 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3292 self.expect_token(&Token::RBrace)?;
3293 Ok(Expr::Map(Map { entries: fields }))
3294 }
3295
3296 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3306 let key = self.parse_expr()?;
3307
3308 self.expect_token(&Token::Colon)?;
3309
3310 let value = self.parse_expr()?;
3311
3312 Ok(MapEntry {
3313 key: Box::new(key),
3314 value: Box::new(value),
3315 })
3316 }
3317
3318 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3328 self.expect_keyword_is(Keyword::MAP)?;
3329 self.expect_token(&Token::LParen)?;
3330 let key_data_type = self.parse_data_type()?;
3331 self.expect_token(&Token::Comma)?;
3332 let value_data_type = self.parse_data_type()?;
3333 self.expect_token(&Token::RParen)?;
3334
3335 Ok((key_data_type, value_data_type))
3336 }
3337
3338 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3348 self.expect_keyword_is(Keyword::TUPLE)?;
3349 self.expect_token(&Token::LParen)?;
3350 let mut field_defs = vec![];
3351 loop {
3352 let (def, _) = self.parse_struct_field_def()?;
3353 field_defs.push(def);
3354 if !self.consume_token(&Token::Comma) {
3355 break;
3356 }
3357 }
3358 self.expect_token(&Token::RParen)?;
3359
3360 Ok(field_defs)
3361 }
3362
3363 fn expect_closing_angle_bracket(
3368 &mut self,
3369 trailing_bracket: MatchedTrailingBracket,
3370 ) -> Result<MatchedTrailingBracket, ParserError> {
3371 let trailing_bracket = if !trailing_bracket.0 {
3372 match self.peek_token().token {
3373 Token::Gt => {
3374 self.next_token();
3375 false.into()
3376 }
3377 Token::ShiftRight => {
3378 self.next_token();
3379 true.into()
3380 }
3381 _ => return self.expected(">", self.peek_token()),
3382 }
3383 } else {
3384 false.into()
3385 };
3386
3387 Ok(trailing_bracket)
3388 }
3389
3390 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3392 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3394 return infix;
3395 }
3396
3397 let dialect = self.dialect;
3398
3399 self.advance_token();
3400 let tok = self.get_current_token();
3401 debug!("infix: {tok:?}");
3402 let tok_index = self.get_current_index();
3403 let span = tok.span;
3404 let regular_binary_operator = match &tok.token {
3405 Token::Spaceship => Some(BinaryOperator::Spaceship),
3406 Token::DoubleEq => Some(BinaryOperator::Eq),
3407 Token::Assignment => Some(BinaryOperator::Assignment),
3408 Token::Eq => Some(BinaryOperator::Eq),
3409 Token::Neq => Some(BinaryOperator::NotEq),
3410 Token::Gt => Some(BinaryOperator::Gt),
3411 Token::GtEq => Some(BinaryOperator::GtEq),
3412 Token::Lt => Some(BinaryOperator::Lt),
3413 Token::LtEq => Some(BinaryOperator::LtEq),
3414 Token::Plus => Some(BinaryOperator::Plus),
3415 Token::Minus => Some(BinaryOperator::Minus),
3416 Token::Mul => Some(BinaryOperator::Multiply),
3417 Token::Mod => Some(BinaryOperator::Modulo),
3418 Token::StringConcat => Some(BinaryOperator::StringConcat),
3419 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3420 Token::Caret => {
3421 if dialect_is!(dialect is PostgreSqlDialect) {
3424 Some(BinaryOperator::PGExp)
3425 } else {
3426 Some(BinaryOperator::BitwiseXor)
3427 }
3428 }
3429 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3430 Token::Div => Some(BinaryOperator::Divide),
3431 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3432 Some(BinaryOperator::DuckIntegerDivide)
3433 }
3434 Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3435 Some(BinaryOperator::PGBitwiseShiftLeft)
3436 }
3437 Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3438 Some(BinaryOperator::PGBitwiseShiftRight)
3439 }
3440 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3441 Some(BinaryOperator::PGBitwiseXor)
3442 }
3443 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3444 Some(BinaryOperator::PGOverlap)
3445 }
3446 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3447 Some(BinaryOperator::PGOverlap)
3448 }
3449 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3450 Some(BinaryOperator::PGStartsWith)
3451 }
3452 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3453 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3454 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3455 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3456 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3457 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3458 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3459 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3460 Token::Arrow => Some(BinaryOperator::Arrow),
3461 Token::LongArrow => Some(BinaryOperator::LongArrow),
3462 Token::HashArrow => Some(BinaryOperator::HashArrow),
3463 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3464 Token::AtArrow => Some(BinaryOperator::AtArrow),
3465 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3466 Token::HashMinus => Some(BinaryOperator::HashMinus),
3467 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3468 Token::AtAt => Some(BinaryOperator::AtAt),
3469 Token::Question => Some(BinaryOperator::Question),
3470 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3471 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3472 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3473 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3474 Some(BinaryOperator::DoubleHash)
3475 }
3476
3477 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3478 Some(BinaryOperator::AndLt)
3479 }
3480 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3481 Some(BinaryOperator::AndGt)
3482 }
3483 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3484 Some(BinaryOperator::QuestionDash)
3485 }
3486 Token::AmpersandLeftAngleBracketVerticalBar
3487 if self.dialect.supports_geometric_types() =>
3488 {
3489 Some(BinaryOperator::AndLtPipe)
3490 }
3491 Token::VerticalBarAmpersandRightAngleBracket
3492 if self.dialect.supports_geometric_types() =>
3493 {
3494 Some(BinaryOperator::PipeAndGt)
3495 }
3496 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3497 Some(BinaryOperator::LtDashGt)
3498 }
3499 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3500 Some(BinaryOperator::LtCaret)
3501 }
3502 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3503 Some(BinaryOperator::GtCaret)
3504 }
3505 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3506 Some(BinaryOperator::QuestionHash)
3507 }
3508 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3509 Some(BinaryOperator::QuestionDoublePipe)
3510 }
3511 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3512 Some(BinaryOperator::QuestionDashPipe)
3513 }
3514 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3515 Some(BinaryOperator::TildeEq)
3516 }
3517 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3518 Some(BinaryOperator::LtLtPipe)
3519 }
3520 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3521 Some(BinaryOperator::PipeGtGt)
3522 }
3523 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3524
3525 Token::Word(w) => match w.keyword {
3526 Keyword::AND => Some(BinaryOperator::And),
3527 Keyword::OR => Some(BinaryOperator::Or),
3528 Keyword::XOR => Some(BinaryOperator::Xor),
3529 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3530 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3531 self.expect_token(&Token::LParen)?;
3532 let mut idents = vec![];
3537 loop {
3538 self.advance_token();
3539 idents.push(self.get_current_token().to_string());
3540 if !self.consume_token(&Token::Period) {
3541 break;
3542 }
3543 }
3544 self.expect_token(&Token::RParen)?;
3545 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3546 }
3547 _ => None,
3548 },
3549 _ => None,
3550 };
3551
3552 let tok = self.token_at(tok_index);
3553 if let Some(op) = regular_binary_operator {
3554 if let Some(keyword) =
3555 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3556 {
3557 self.expect_token(&Token::LParen)?;
3558 let right = if self.peek_sub_query() {
3559 self.prev_token(); self.parse_subexpr(precedence)?
3563 } else {
3564 let right = self.parse_subexpr(precedence)?;
3566 self.expect_token(&Token::RParen)?;
3567 right
3568 };
3569
3570 if !matches!(
3571 op,
3572 BinaryOperator::Gt
3573 | BinaryOperator::Lt
3574 | BinaryOperator::GtEq
3575 | BinaryOperator::LtEq
3576 | BinaryOperator::Eq
3577 | BinaryOperator::NotEq
3578 | BinaryOperator::PGRegexMatch
3579 | BinaryOperator::PGRegexIMatch
3580 | BinaryOperator::PGRegexNotMatch
3581 | BinaryOperator::PGRegexNotIMatch
3582 | BinaryOperator::PGLikeMatch
3583 | BinaryOperator::PGILikeMatch
3584 | BinaryOperator::PGNotLikeMatch
3585 | BinaryOperator::PGNotILikeMatch
3586 ) {
3587 return parser_err!(
3588 format!(
3589 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3590 ),
3591 span.start
3592 );
3593 };
3594
3595 Ok(match keyword {
3596 Keyword::ALL => Expr::AllOp {
3597 left: Box::new(expr),
3598 compare_op: op,
3599 right: Box::new(right),
3600 },
3601 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3602 left: Box::new(expr),
3603 compare_op: op,
3604 right: Box::new(right),
3605 is_some: keyword == Keyword::SOME,
3606 },
3607 unexpected_keyword => return Err(ParserError::ParserError(
3608 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3609 )),
3610 })
3611 } else {
3612 Ok(Expr::BinaryOp {
3613 left: Box::new(expr),
3614 op,
3615 right: Box::new(self.parse_subexpr(precedence)?),
3616 })
3617 }
3618 } else if let Token::Word(w) = &tok.token {
3619 match w.keyword {
3620 Keyword::IS => {
3621 if self.parse_keyword(Keyword::NULL) {
3622 Ok(Expr::IsNull(Box::new(expr)))
3623 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3624 Ok(Expr::IsNotNull(Box::new(expr)))
3625 } else if self.parse_keywords(&[Keyword::TRUE]) {
3626 Ok(Expr::IsTrue(Box::new(expr)))
3627 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3628 Ok(Expr::IsNotTrue(Box::new(expr)))
3629 } else if self.parse_keywords(&[Keyword::FALSE]) {
3630 Ok(Expr::IsFalse(Box::new(expr)))
3631 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3632 Ok(Expr::IsNotFalse(Box::new(expr)))
3633 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3634 Ok(Expr::IsUnknown(Box::new(expr)))
3635 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3636 Ok(Expr::IsNotUnknown(Box::new(expr)))
3637 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3638 let expr2 = self.parse_expr()?;
3639 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3640 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3641 {
3642 let expr2 = self.parse_expr()?;
3643 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3644 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3645 Ok(is_normalized)
3646 } else {
3647 self.expected(
3648 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3649 self.peek_token(),
3650 )
3651 }
3652 }
3653 Keyword::AT => {
3654 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3655 Ok(Expr::AtTimeZone {
3656 timestamp: Box::new(expr),
3657 time_zone: Box::new(self.parse_subexpr(precedence)?),
3658 })
3659 }
3660 Keyword::NOT
3661 | Keyword::IN
3662 | Keyword::BETWEEN
3663 | Keyword::LIKE
3664 | Keyword::ILIKE
3665 | Keyword::SIMILAR
3666 | Keyword::REGEXP
3667 | Keyword::RLIKE => {
3668 self.prev_token();
3669 let negated = self.parse_keyword(Keyword::NOT);
3670 let regexp = self.parse_keyword(Keyword::REGEXP);
3671 let rlike = self.parse_keyword(Keyword::RLIKE);
3672 let null = if !self.in_column_definition_state() {
3673 self.parse_keyword(Keyword::NULL)
3674 } else {
3675 false
3676 };
3677 if regexp || rlike {
3678 Ok(Expr::RLike {
3679 negated,
3680 expr: Box::new(expr),
3681 pattern: Box::new(
3682 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3683 ),
3684 regexp,
3685 })
3686 } else if negated && null {
3687 Ok(Expr::IsNotNull(Box::new(expr)))
3688 } else if self.parse_keyword(Keyword::IN) {
3689 self.parse_in(expr, negated)
3690 } else if self.parse_keyword(Keyword::BETWEEN) {
3691 self.parse_between(expr, negated)
3692 } else if self.parse_keyword(Keyword::LIKE) {
3693 Ok(Expr::Like {
3694 negated,
3695 any: self.parse_keyword(Keyword::ANY),
3696 expr: Box::new(expr),
3697 pattern: Box::new(
3698 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3699 ),
3700 escape_char: self.parse_escape_char()?,
3701 })
3702 } else if self.parse_keyword(Keyword::ILIKE) {
3703 Ok(Expr::ILike {
3704 negated,
3705 any: self.parse_keyword(Keyword::ANY),
3706 expr: Box::new(expr),
3707 pattern: Box::new(
3708 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3709 ),
3710 escape_char: self.parse_escape_char()?,
3711 })
3712 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3713 Ok(Expr::SimilarTo {
3714 negated,
3715 expr: Box::new(expr),
3716 pattern: Box::new(
3717 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3718 ),
3719 escape_char: self.parse_escape_char()?,
3720 })
3721 } else {
3722 self.expected("IN or BETWEEN after NOT", self.peek_token())
3723 }
3724 }
3725 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3726 Ok(Expr::IsNotNull(Box::new(expr)))
3727 }
3728 Keyword::MEMBER => {
3729 if self.parse_keyword(Keyword::OF) {
3730 self.expect_token(&Token::LParen)?;
3731 let array = self.parse_expr()?;
3732 self.expect_token(&Token::RParen)?;
3733 Ok(Expr::MemberOf(MemberOf {
3734 value: Box::new(expr),
3735 array: Box::new(array),
3736 }))
3737 } else {
3738 self.expected("OF after MEMBER", self.peek_token())
3739 }
3740 }
3741 _ => parser_err!(
3743 format!("No infix parser for token {:?}", tok.token),
3744 tok.span.start
3745 ),
3746 }
3747 } else if Token::DoubleColon == *tok {
3748 Ok(Expr::Cast {
3749 kind: CastKind::DoubleColon,
3750 expr: Box::new(expr),
3751 data_type: self.parse_data_type()?,
3752 format: None,
3753 })
3754 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3755 Ok(Expr::UnaryOp {
3756 op: UnaryOperator::PGPostfixFactorial,
3757 expr: Box::new(expr),
3758 })
3759 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3760 || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3761 {
3762 self.prev_token();
3763 self.parse_json_access(expr)
3764 } else {
3765 parser_err!(
3767 format!("No infix parser for token {:?}", tok.token),
3768 tok.span.start
3769 )
3770 }
3771 }
3772
3773 pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3775 if self.parse_keyword(Keyword::ESCAPE) {
3776 Ok(Some(self.parse_value()?.into()))
3777 } else {
3778 Ok(None)
3779 }
3780 }
3781
3782 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3792 let lower_bound = if self.consume_token(&Token::Colon) {
3794 None
3795 } else {
3796 Some(self.parse_expr()?)
3797 };
3798
3799 if self.consume_token(&Token::RBracket) {
3801 if let Some(lower_bound) = lower_bound {
3802 return Ok(Subscript::Index { index: lower_bound });
3803 };
3804 return Ok(Subscript::Slice {
3805 lower_bound,
3806 upper_bound: None,
3807 stride: None,
3808 });
3809 }
3810
3811 if lower_bound.is_some() {
3813 self.expect_token(&Token::Colon)?;
3814 }
3815
3816 let upper_bound = if self.consume_token(&Token::RBracket) {
3818 return Ok(Subscript::Slice {
3819 lower_bound,
3820 upper_bound: None,
3821 stride: None,
3822 });
3823 } else {
3824 Some(self.parse_expr()?)
3825 };
3826
3827 if self.consume_token(&Token::RBracket) {
3829 return Ok(Subscript::Slice {
3830 lower_bound,
3831 upper_bound,
3832 stride: None,
3833 });
3834 }
3835
3836 self.expect_token(&Token::Colon)?;
3838 let stride = if self.consume_token(&Token::RBracket) {
3839 None
3840 } else {
3841 Some(self.parse_expr()?)
3842 };
3843
3844 if stride.is_some() {
3845 self.expect_token(&Token::RBracket)?;
3846 }
3847
3848 Ok(Subscript::Slice {
3849 lower_bound,
3850 upper_bound,
3851 stride,
3852 })
3853 }
3854
3855 pub fn parse_multi_dim_subscript(
3857 &mut self,
3858 chain: &mut Vec<AccessExpr>,
3859 ) -> Result<(), ParserError> {
3860 while self.consume_token(&Token::LBracket) {
3861 self.parse_subscript(chain)?;
3862 }
3863 Ok(())
3864 }
3865
3866 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3870 let subscript = self.parse_subscript_inner()?;
3871 chain.push(AccessExpr::Subscript(subscript));
3872 Ok(())
3873 }
3874
3875 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3876 let token = self.next_token();
3877 match token.token {
3878 Token::Word(Word {
3879 value,
3880 quote_style: quote_style @ (Some('"') | None),
3882 keyword: _,
3885 }) => Ok(JsonPathElem::Dot {
3886 key: value,
3887 quoted: quote_style.is_some(),
3888 }),
3889
3890 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3894
3895 _ => self.expected("variant object key name", token),
3896 }
3897 }
3898
3899 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3900 let path = self.parse_json_path()?;
3901 Ok(Expr::JsonAccess {
3902 value: Box::new(expr),
3903 path,
3904 })
3905 }
3906
3907 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3908 let mut path = Vec::new();
3909 loop {
3910 match self.next_token().token {
3911 Token::Colon if path.is_empty() => {
3912 path.push(self.parse_json_path_object_key()?);
3913 }
3914 Token::Period if !path.is_empty() => {
3915 path.push(self.parse_json_path_object_key()?);
3916 }
3917 Token::LBracket => {
3918 let key = self.parse_expr()?;
3919 self.expect_token(&Token::RBracket)?;
3920
3921 path.push(JsonPathElem::Bracket { key });
3922 }
3923 _ => {
3924 self.prev_token();
3925 break;
3926 }
3927 };
3928 }
3929
3930 debug_assert!(!path.is_empty());
3931 Ok(JsonPath { path })
3932 }
3933
3934 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3936 if self.parse_keyword(Keyword::UNNEST) {
3939 self.expect_token(&Token::LParen)?;
3940 let array_expr = self.parse_expr()?;
3941 self.expect_token(&Token::RParen)?;
3942 return Ok(Expr::InUnnest {
3943 expr: Box::new(expr),
3944 array_expr: Box::new(array_expr),
3945 negated,
3946 });
3947 }
3948 self.expect_token(&Token::LParen)?;
3949 let in_op = match self.maybe_parse(|p| p.parse_query())? {
3950 Some(subquery) => Expr::InSubquery {
3951 expr: Box::new(expr),
3952 subquery,
3953 negated,
3954 },
3955 None => Expr::InList {
3956 expr: Box::new(expr),
3957 list: if self.dialect.supports_in_empty_list() {
3958 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3959 } else {
3960 self.parse_comma_separated(Parser::parse_expr)?
3961 },
3962 negated,
3963 },
3964 };
3965 self.expect_token(&Token::RParen)?;
3966 Ok(in_op)
3967 }
3968
3969 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3971 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3974 self.expect_keyword_is(Keyword::AND)?;
3975 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3976 Ok(Expr::Between {
3977 expr: Box::new(expr),
3978 negated,
3979 low: Box::new(low),
3980 high: Box::new(high),
3981 })
3982 }
3983
3984 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3986 Ok(Expr::Cast {
3987 kind: CastKind::DoubleColon,
3988 expr: Box::new(expr),
3989 data_type: self.parse_data_type()?,
3990 format: None,
3991 })
3992 }
3993
3994 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3996 self.dialect.get_next_precedence_default(self)
3997 }
3998
3999 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4002 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4003 }
4004
4005 pub fn peek_token(&self) -> TokenWithSpan {
4010 self.peek_nth_token(0)
4011 }
4012
4013 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4016 self.peek_nth_token_ref(0)
4017 }
4018
4019 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4042 self.peek_tokens_with_location()
4043 .map(|with_loc| with_loc.token)
4044 }
4045
4046 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4051 let mut index = self.index;
4052 core::array::from_fn(|_| loop {
4053 let token = self.tokens.get(index);
4054 index += 1;
4055 if let Some(TokenWithSpan {
4056 token: Token::Whitespace(_),
4057 span: _,
4058 }) = token
4059 {
4060 continue;
4061 }
4062 break token.cloned().unwrap_or(TokenWithSpan {
4063 token: Token::EOF,
4064 span: Span::empty(),
4065 });
4066 })
4067 }
4068
4069 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4074 let mut index = self.index;
4075 core::array::from_fn(|_| loop {
4076 let token = self.tokens.get(index);
4077 index += 1;
4078 if let Some(TokenWithSpan {
4079 token: Token::Whitespace(_),
4080 span: _,
4081 }) = token
4082 {
4083 continue;
4084 }
4085 break token.unwrap_or(&EOF_TOKEN);
4086 })
4087 }
4088
4089 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4091 self.peek_nth_token_ref(n).clone()
4092 }
4093
4094 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4096 let mut index = self.index;
4097 loop {
4098 index += 1;
4099 match self.tokens.get(index - 1) {
4100 Some(TokenWithSpan {
4101 token: Token::Whitespace(_),
4102 span: _,
4103 }) => continue,
4104 non_whitespace => {
4105 if n == 0 {
4106 return non_whitespace.unwrap_or(&EOF_TOKEN);
4107 }
4108 n -= 1;
4109 }
4110 }
4111 }
4112 }
4113
4114 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4117 self.peek_nth_token_no_skip(0)
4118 }
4119
4120 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4122 self.tokens
4123 .get(self.index + n)
4124 .cloned()
4125 .unwrap_or(TokenWithSpan {
4126 token: Token::EOF,
4127 span: Span::empty(),
4128 })
4129 }
4130
4131 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4135 let index = self.index;
4136 let matched = self.parse_keywords(expected);
4137 self.index = index;
4138 matched
4139 }
4140
4141 pub fn next_token(&mut self) -> TokenWithSpan {
4146 self.advance_token();
4147 self.get_current_token().clone()
4148 }
4149
4150 pub fn get_current_index(&self) -> usize {
4155 self.index.saturating_sub(1)
4156 }
4157
4158 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4160 self.index += 1;
4161 self.tokens.get(self.index - 1)
4162 }
4163
4164 pub fn advance_token(&mut self) {
4168 loop {
4169 self.index += 1;
4170 match self.tokens.get(self.index - 1) {
4171 Some(TokenWithSpan {
4172 token: Token::Whitespace(_),
4173 span: _,
4174 }) => continue,
4175 _ => break,
4176 }
4177 }
4178 }
4179
4180 pub fn get_current_token(&self) -> &TokenWithSpan {
4184 self.token_at(self.index.saturating_sub(1))
4185 }
4186
4187 pub fn get_previous_token(&self) -> &TokenWithSpan {
4191 self.token_at(self.index.saturating_sub(2))
4192 }
4193
4194 pub fn get_next_token(&self) -> &TokenWithSpan {
4198 self.token_at(self.index)
4199 }
4200
4201 pub fn prev_token(&mut self) {
4208 loop {
4209 assert!(self.index > 0);
4210 self.index -= 1;
4211 if let Some(TokenWithSpan {
4212 token: Token::Whitespace(_),
4213 span: _,
4214 }) = self.tokens.get(self.index)
4215 {
4216 continue;
4217 }
4218 return;
4219 }
4220 }
4221
4222 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4224 parser_err!(
4225 format!("Expected: {expected}, found: {found}"),
4226 found.span.start
4227 )
4228 }
4229
4230 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4232 parser_err!(
4233 format!("Expected: {expected}, found: {found}"),
4234 found.span.start
4235 )
4236 }
4237
4238 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4240 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4241 parser_err!(
4242 format!("Expected: {expected}, found: {found}"),
4243 found.span.start
4244 )
4245 }
4246
4247 #[must_use]
4250 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4251 if self.peek_keyword(expected) {
4252 self.advance_token();
4253 true
4254 } else {
4255 false
4256 }
4257 }
4258
4259 #[must_use]
4260 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4261 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4262 }
4263
4264 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4272 self.keyword_with_tokens(expected, tokens, true)
4273 }
4274
4275 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4280 self.keyword_with_tokens(expected, tokens, false)
4281 }
4282
4283 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4284 match &self.peek_token_ref().token {
4285 Token::Word(w) if expected == w.keyword => {
4286 for (idx, token) in tokens.iter().enumerate() {
4287 if self.peek_nth_token_ref(idx + 1).token != *token {
4288 return false;
4289 }
4290 }
4291
4292 if consume {
4293 for _ in 0..(tokens.len() + 1) {
4294 self.advance_token();
4295 }
4296 }
4297
4298 true
4299 }
4300 _ => false,
4301 }
4302 }
4303
4304 #[must_use]
4308 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4309 let index = self.index;
4310 for &keyword in keywords {
4311 if !self.parse_keyword(keyword) {
4312 self.index = index;
4315 return false;
4316 }
4317 }
4318 true
4319 }
4320
4321 #[must_use]
4324 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4325 for keyword in keywords {
4326 if self.peek_keyword(*keyword) {
4327 return Some(*keyword);
4328 }
4329 }
4330 None
4331 }
4332
4333 #[must_use]
4337 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4338 match &self.peek_token_ref().token {
4339 Token::Word(w) => {
4340 keywords
4341 .iter()
4342 .find(|keyword| **keyword == w.keyword)
4343 .map(|keyword| {
4344 self.advance_token();
4345 *keyword
4346 })
4347 }
4348 _ => None,
4349 }
4350 }
4351
4352 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4355 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4356 Ok(keyword)
4357 } else {
4358 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4359 self.expected_ref(
4360 &format!("one of {}", keywords.join(" or ")),
4361 self.peek_token_ref(),
4362 )
4363 }
4364 }
4365
4366 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4371 if self.parse_keyword(expected) {
4372 Ok(self.get_current_token().clone())
4373 } else {
4374 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4375 }
4376 }
4377
4378 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4384 if self.parse_keyword(expected) {
4385 Ok(())
4386 } else {
4387 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4388 }
4389 }
4390
4391 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4394 for &kw in expected {
4395 self.expect_keyword_is(kw)?;
4396 }
4397 Ok(())
4398 }
4399
4400 #[must_use]
4404 pub fn consume_token(&mut self, expected: &Token) -> bool {
4405 if self.peek_token_ref() == expected {
4406 self.advance_token();
4407 true
4408 } else {
4409 false
4410 }
4411 }
4412
4413 #[must_use]
4417 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4418 let index = self.index;
4419 for token in tokens {
4420 if !self.consume_token(token) {
4421 self.index = index;
4422 return false;
4423 }
4424 }
4425 true
4426 }
4427
4428 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4430 if self.peek_token_ref() == expected {
4431 Ok(self.next_token())
4432 } else {
4433 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4434 }
4435 }
4436
4437 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4438 where
4439 <T as FromStr>::Err: Display,
4440 {
4441 s.parse::<T>().map_err(|e| {
4442 ParserError::ParserError(format!(
4443 "Could not parse '{s}' as {}: {e}{loc}",
4444 core::any::type_name::<T>()
4445 ))
4446 })
4447 }
4448
4449 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4451 let trailing_commas =
4457 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4458
4459 self.parse_comma_separated_with_trailing_commas(
4460 |p| p.parse_select_item(),
4461 trailing_commas,
4462 Self::is_reserved_for_column_alias,
4463 )
4464 }
4465
4466 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4467 let mut values = vec![];
4468 loop {
4469 values.push(self.parse_grant_permission()?);
4470 if !self.consume_token(&Token::Comma) {
4471 break;
4472 } else if self.options.trailing_commas {
4473 match self.peek_token().token {
4474 Token::Word(kw) if kw.keyword == Keyword::ON => {
4475 break;
4476 }
4477 Token::RParen
4478 | Token::SemiColon
4479 | Token::EOF
4480 | Token::RBracket
4481 | Token::RBrace => break,
4482 _ => continue,
4483 }
4484 }
4485 }
4486 Ok(values)
4487 }
4488
4489 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4491 let trailing_commas = self.dialect.supports_from_trailing_commas();
4492
4493 self.parse_comma_separated_with_trailing_commas(
4494 Parser::parse_table_and_joins,
4495 trailing_commas,
4496 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4497 )
4498 }
4499
4500 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4507 &mut self,
4508 trailing_commas: bool,
4509 is_reserved_keyword: &R,
4510 ) -> bool
4511 where
4512 R: Fn(&Keyword, &mut Parser) -> bool,
4513 {
4514 if !self.consume_token(&Token::Comma) {
4515 true
4516 } else if trailing_commas {
4517 let token = self.next_token().token;
4518 let is_end = match token {
4519 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4520 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4521 true
4522 }
4523 _ => false,
4524 };
4525 self.prev_token();
4526
4527 is_end
4528 } else {
4529 false
4530 }
4531 }
4532
4533 fn is_parse_comma_separated_end(&mut self) -> bool {
4536 self.is_parse_comma_separated_end_with_trailing_commas(
4537 self.options.trailing_commas,
4538 &Self::is_reserved_for_column_alias,
4539 )
4540 }
4541
4542 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4544 where
4545 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4546 {
4547 self.parse_comma_separated_with_trailing_commas(
4548 f,
4549 self.options.trailing_commas,
4550 Self::is_reserved_for_column_alias,
4551 )
4552 }
4553
4554 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4559 &mut self,
4560 mut f: F,
4561 trailing_commas: bool,
4562 is_reserved_keyword: R,
4563 ) -> Result<Vec<T>, ParserError>
4564 where
4565 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4566 R: Fn(&Keyword, &mut Parser) -> bool,
4567 {
4568 let mut values = vec![];
4569 loop {
4570 values.push(f(self)?);
4571 if self.is_parse_comma_separated_end_with_trailing_commas(
4572 trailing_commas,
4573 &is_reserved_keyword,
4574 ) {
4575 break;
4576 }
4577 }
4578 Ok(values)
4579 }
4580
4581 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4583 where
4584 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4585 {
4586 let mut values = vec![];
4587 loop {
4588 values.push(f(self)?);
4589 if !self.consume_token(&Token::Period) {
4590 break;
4591 }
4592 }
4593 Ok(values)
4594 }
4595
4596 pub fn parse_keyword_separated<T, F>(
4598 &mut self,
4599 keyword: Keyword,
4600 mut f: F,
4601 ) -> Result<Vec<T>, ParserError>
4602 where
4603 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4604 {
4605 let mut values = vec![];
4606 loop {
4607 values.push(f(self)?);
4608 if !self.parse_keyword(keyword) {
4609 break;
4610 }
4611 }
4612 Ok(values)
4613 }
4614
4615 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4616 where
4617 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4618 {
4619 self.expect_token(&Token::LParen)?;
4620 let res = f(self)?;
4621 self.expect_token(&Token::RParen)?;
4622 Ok(res)
4623 }
4624
4625 pub fn parse_comma_separated0<T, F>(
4628 &mut self,
4629 f: F,
4630 end_token: Token,
4631 ) -> Result<Vec<T>, ParserError>
4632 where
4633 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4634 {
4635 if self.peek_token().token == end_token {
4636 return Ok(vec![]);
4637 }
4638
4639 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4640 let _ = self.consume_token(&Token::Comma);
4641 return Ok(vec![]);
4642 }
4643
4644 self.parse_comma_separated(f)
4645 }
4646
4647 pub(crate) fn parse_statement_list(
4651 &mut self,
4652 terminal_keywords: &[Keyword],
4653 ) -> Result<Vec<Statement>, ParserError> {
4654 let mut values = vec![];
4655 loop {
4656 match &self.peek_nth_token_ref(0).token {
4657 Token::EOF => break,
4658 Token::Word(w) => {
4659 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4660 break;
4661 }
4662 }
4663 _ => {}
4664 }
4665
4666 values.push(self.parse_statement()?);
4667 self.expect_token(&Token::SemiColon)?;
4668 }
4669 Ok(values)
4670 }
4671
4672 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4676 !parser.dialect.is_column_alias(kw, parser)
4677 }
4678
4679 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4683 where
4684 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4685 {
4686 match self.try_parse(f) {
4687 Ok(t) => Ok(Some(t)),
4688 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4689 _ => Ok(None),
4690 }
4691 }
4692
4693 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4695 where
4696 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4697 {
4698 let index = self.index;
4699 match f(self) {
4700 Ok(t) => Ok(t),
4701 Err(e) => {
4702 self.index = index;
4704 Err(e)
4705 }
4706 }
4707 }
4708
4709 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4712 let loc = self.peek_token().span.start;
4713 let all = self.parse_keyword(Keyword::ALL);
4714 let distinct = self.parse_keyword(Keyword::DISTINCT);
4715 if !distinct {
4716 return Ok(None);
4717 }
4718 if all {
4719 return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4720 }
4721 let on = self.parse_keyword(Keyword::ON);
4722 if !on {
4723 return Ok(Some(Distinct::Distinct));
4724 }
4725
4726 self.expect_token(&Token::LParen)?;
4727 let col_names = if self.consume_token(&Token::RParen) {
4728 self.prev_token();
4729 Vec::new()
4730 } else {
4731 self.parse_comma_separated(Parser::parse_expr)?
4732 };
4733 self.expect_token(&Token::RParen)?;
4734 Ok(Some(Distinct::On(col_names)))
4735 }
4736
4737 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4739 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4740 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4741 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4742 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4743 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4744 let global: Option<bool> = if global {
4745 Some(true)
4746 } else if local {
4747 Some(false)
4748 } else {
4749 None
4750 };
4751 let temporary = self
4752 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4753 .is_some();
4754 let persistent = dialect_of!(self is DuckDbDialect)
4755 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4756 let create_view_params = self.parse_create_view_params()?;
4757 if self.parse_keyword(Keyword::TABLE) {
4758 self.parse_create_table(or_replace, temporary, global, transient)
4759 } else if self.peek_keyword(Keyword::MATERIALIZED)
4760 || self.peek_keyword(Keyword::VIEW)
4761 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4762 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4763 {
4764 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4765 } else if self.parse_keyword(Keyword::POLICY) {
4766 self.parse_create_policy()
4767 } else if self.parse_keyword(Keyword::EXTERNAL) {
4768 self.parse_create_external_table(or_replace)
4769 } else if self.parse_keyword(Keyword::FUNCTION) {
4770 self.parse_create_function(or_alter, or_replace, temporary)
4771 } else if self.parse_keyword(Keyword::DOMAIN) {
4772 self.parse_create_domain()
4773 } else if self.parse_keyword(Keyword::TRIGGER) {
4774 self.parse_create_trigger(temporary, or_alter, or_replace, false)
4775 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4776 self.parse_create_trigger(temporary, or_alter, or_replace, true)
4777 } else if self.parse_keyword(Keyword::MACRO) {
4778 self.parse_create_macro(or_replace, temporary)
4779 } else if self.parse_keyword(Keyword::SECRET) {
4780 self.parse_create_secret(or_replace, temporary, persistent)
4781 } else if self.parse_keyword(Keyword::USER) {
4782 self.parse_create_user(or_replace)
4783 } else if or_replace {
4784 self.expected(
4785 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4786 self.peek_token(),
4787 )
4788 } else if self.parse_keyword(Keyword::EXTENSION) {
4789 self.parse_create_extension()
4790 } else if self.parse_keyword(Keyword::INDEX) {
4791 self.parse_create_index(false)
4792 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4793 self.parse_create_index(true)
4794 } else if self.parse_keyword(Keyword::VIRTUAL) {
4795 self.parse_create_virtual_table()
4796 } else if self.parse_keyword(Keyword::SCHEMA) {
4797 self.parse_create_schema()
4798 } else if self.parse_keyword(Keyword::DATABASE) {
4799 self.parse_create_database()
4800 } else if self.parse_keyword(Keyword::ROLE) {
4801 self.parse_create_role()
4802 } else if self.parse_keyword(Keyword::SEQUENCE) {
4803 self.parse_create_sequence(temporary)
4804 } else if self.parse_keyword(Keyword::TYPE) {
4805 self.parse_create_type()
4806 } else if self.parse_keyword(Keyword::PROCEDURE) {
4807 self.parse_create_procedure(or_alter)
4808 } else if self.parse_keyword(Keyword::CONNECTOR) {
4809 self.parse_create_connector()
4810 } else if self.parse_keyword(Keyword::OPERATOR) {
4811 if self.parse_keyword(Keyword::FAMILY) {
4813 self.parse_create_operator_family()
4814 } else if self.parse_keyword(Keyword::CLASS) {
4815 self.parse_create_operator_class()
4816 } else {
4817 self.parse_create_operator()
4818 }
4819 } else if self.parse_keyword(Keyword::SERVER) {
4820 self.parse_pg_create_server()
4821 } else {
4822 self.expected("an object type after CREATE", self.peek_token())
4823 }
4824 }
4825
4826 fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4827 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4828 let name = self.parse_identifier()?;
4829 let options = self
4830 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4831 .options;
4832 let with_tags = self.parse_keyword(Keyword::WITH);
4833 let tags = if self.parse_keyword(Keyword::TAG) {
4834 self.parse_key_value_options(true, &[])?.options
4835 } else {
4836 vec![]
4837 };
4838 Ok(Statement::CreateUser(CreateUser {
4839 or_replace,
4840 if_not_exists,
4841 name,
4842 options: KeyValueOptions {
4843 options,
4844 delimiter: KeyValueOptionsDelimiter::Space,
4845 },
4846 with_tags,
4847 tags: KeyValueOptions {
4848 options: tags,
4849 delimiter: KeyValueOptionsDelimiter::Comma,
4850 },
4851 }))
4852 }
4853
4854 pub fn parse_create_secret(
4856 &mut self,
4857 or_replace: bool,
4858 temporary: bool,
4859 persistent: bool,
4860 ) -> Result<Statement, ParserError> {
4861 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4862
4863 let mut storage_specifier = None;
4864 let mut name = None;
4865 if self.peek_token() != Token::LParen {
4866 if self.parse_keyword(Keyword::IN) {
4867 storage_specifier = self.parse_identifier().ok()
4868 } else {
4869 name = self.parse_identifier().ok();
4870 }
4871
4872 if storage_specifier.is_none()
4874 && self.peek_token() != Token::LParen
4875 && self.parse_keyword(Keyword::IN)
4876 {
4877 storage_specifier = self.parse_identifier().ok();
4878 }
4879 }
4880
4881 self.expect_token(&Token::LParen)?;
4882 self.expect_keyword_is(Keyword::TYPE)?;
4883 let secret_type = self.parse_identifier()?;
4884
4885 let mut options = Vec::new();
4886 if self.consume_token(&Token::Comma) {
4887 options.append(&mut self.parse_comma_separated(|p| {
4888 let key = p.parse_identifier()?;
4889 let value = p.parse_identifier()?;
4890 Ok(SecretOption { key, value })
4891 })?);
4892 }
4893 self.expect_token(&Token::RParen)?;
4894
4895 let temp = match (temporary, persistent) {
4896 (true, false) => Some(true),
4897 (false, true) => Some(false),
4898 (false, false) => None,
4899 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4900 };
4901
4902 Ok(Statement::CreateSecret {
4903 or_replace,
4904 temporary: temp,
4905 if_not_exists,
4906 name,
4907 storage_specifier,
4908 secret_type,
4909 options,
4910 })
4911 }
4912
4913 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4915 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4916 if self.parse_keyword(Keyword::TABLE) {
4917 let table_name = self.parse_object_name(false)?;
4918 if self.peek_token().token != Token::EOF {
4919 if let Token::Word(word) = self.peek_token().token {
4920 if word.keyword == Keyword::OPTIONS {
4921 options = self.parse_options(Keyword::OPTIONS)?
4922 }
4923 };
4924
4925 if self.peek_token().token != Token::EOF {
4926 let (a, q) = self.parse_as_query()?;
4927 has_as = a;
4928 query = Some(q);
4929 }
4930
4931 Ok(Statement::Cache {
4932 table_flag,
4933 table_name,
4934 has_as,
4935 options,
4936 query,
4937 })
4938 } else {
4939 Ok(Statement::Cache {
4940 table_flag,
4941 table_name,
4942 has_as,
4943 options,
4944 query,
4945 })
4946 }
4947 } else {
4948 table_flag = Some(self.parse_object_name(false)?);
4949 if self.parse_keyword(Keyword::TABLE) {
4950 let table_name = self.parse_object_name(false)?;
4951 if self.peek_token() != Token::EOF {
4952 if let Token::Word(word) = self.peek_token().token {
4953 if word.keyword == Keyword::OPTIONS {
4954 options = self.parse_options(Keyword::OPTIONS)?
4955 }
4956 };
4957
4958 if self.peek_token() != Token::EOF {
4959 let (a, q) = self.parse_as_query()?;
4960 has_as = a;
4961 query = Some(q);
4962 }
4963
4964 Ok(Statement::Cache {
4965 table_flag,
4966 table_name,
4967 has_as,
4968 options,
4969 query,
4970 })
4971 } else {
4972 Ok(Statement::Cache {
4973 table_flag,
4974 table_name,
4975 has_as,
4976 options,
4977 query,
4978 })
4979 }
4980 } else {
4981 if self.peek_token() == Token::EOF {
4982 self.prev_token();
4983 }
4984 self.expected("a `TABLE` keyword", self.peek_token())
4985 }
4986 }
4987 }
4988
4989 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4991 match self.peek_token().token {
4992 Token::Word(word) => match word.keyword {
4993 Keyword::AS => {
4994 self.next_token();
4995 Ok((true, self.parse_query()?))
4996 }
4997 _ => Ok((false, self.parse_query()?)),
4998 },
4999 _ => self.expected("a QUERY statement", self.peek_token()),
5000 }
5001 }
5002
5003 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5005 self.expect_keyword_is(Keyword::TABLE)?;
5006 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5007 let table_name = self.parse_object_name(false)?;
5008 Ok(Statement::UNCache {
5009 table_name,
5010 if_exists,
5011 })
5012 }
5013
5014 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5016 self.expect_keyword_is(Keyword::TABLE)?;
5017 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5018 let table_name = self.parse_object_name(false)?;
5019 self.expect_keyword_is(Keyword::USING)?;
5020 let module_name = self.parse_identifier()?;
5021 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5026 Ok(Statement::CreateVirtualTable {
5027 name: table_name,
5028 if_not_exists,
5029 module_name,
5030 module_args,
5031 })
5032 }
5033
5034 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5035 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5036
5037 let schema_name = self.parse_schema_name()?;
5038
5039 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5040 Some(self.parse_expr()?)
5041 } else {
5042 None
5043 };
5044
5045 let with = if self.peek_keyword(Keyword::WITH) {
5046 Some(self.parse_options(Keyword::WITH)?)
5047 } else {
5048 None
5049 };
5050
5051 let options = if self.peek_keyword(Keyword::OPTIONS) {
5052 Some(self.parse_options(Keyword::OPTIONS)?)
5053 } else {
5054 None
5055 };
5056
5057 let clone = if self.parse_keyword(Keyword::CLONE) {
5058 Some(self.parse_object_name(false)?)
5059 } else {
5060 None
5061 };
5062
5063 Ok(Statement::CreateSchema {
5064 schema_name,
5065 if_not_exists,
5066 with,
5067 options,
5068 default_collate_spec,
5069 clone,
5070 })
5071 }
5072
5073 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5074 if self.parse_keyword(Keyword::AUTHORIZATION) {
5075 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5076 } else {
5077 let name = self.parse_object_name(false)?;
5078
5079 if self.parse_keyword(Keyword::AUTHORIZATION) {
5080 Ok(SchemaName::NamedAuthorization(
5081 name,
5082 self.parse_identifier()?,
5083 ))
5084 } else {
5085 Ok(SchemaName::Simple(name))
5086 }
5087 }
5088 }
5089
5090 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5091 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5092 let db_name = self.parse_object_name(false)?;
5093 let mut location = None;
5094 let mut managed_location = None;
5095 loop {
5096 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5097 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5098 Some(Keyword::MANAGEDLOCATION) => {
5099 managed_location = Some(self.parse_literal_string()?)
5100 }
5101 _ => break,
5102 }
5103 }
5104 let clone = if self.parse_keyword(Keyword::CLONE) {
5105 Some(self.parse_object_name(false)?)
5106 } else {
5107 None
5108 };
5109
5110 Ok(Statement::CreateDatabase {
5111 db_name,
5112 if_not_exists: ine,
5113 location,
5114 managed_location,
5115 or_replace: false,
5116 transient: false,
5117 clone,
5118 data_retention_time_in_days: None,
5119 max_data_extension_time_in_days: None,
5120 external_volume: None,
5121 catalog: None,
5122 replace_invalid_characters: None,
5123 default_ddl_collation: None,
5124 storage_serialization_policy: None,
5125 comment: None,
5126 catalog_sync: None,
5127 catalog_sync_namespace_mode: None,
5128 catalog_sync_namespace_flatten_delimiter: None,
5129 with_tags: None,
5130 with_contacts: None,
5131 })
5132 }
5133
5134 pub fn parse_optional_create_function_using(
5135 &mut self,
5136 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5137 if !self.parse_keyword(Keyword::USING) {
5138 return Ok(None);
5139 };
5140 let keyword =
5141 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5142
5143 let uri = self.parse_literal_string()?;
5144
5145 match keyword {
5146 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5147 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5148 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5149 _ => self.expected(
5150 "JAR, FILE or ARCHIVE, got {:?}",
5151 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5152 ),
5153 }
5154 }
5155
5156 pub fn parse_create_function(
5157 &mut self,
5158 or_alter: bool,
5159 or_replace: bool,
5160 temporary: bool,
5161 ) -> Result<Statement, ParserError> {
5162 if dialect_of!(self is HiveDialect) {
5163 self.parse_hive_create_function(or_replace, temporary)
5164 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5165 self.parse_postgres_create_function(or_replace, temporary)
5166 } else if dialect_of!(self is DuckDbDialect) {
5167 self.parse_create_macro(or_replace, temporary)
5168 } else if dialect_of!(self is BigQueryDialect) {
5169 self.parse_bigquery_create_function(or_replace, temporary)
5170 } else if dialect_of!(self is MsSqlDialect) {
5171 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5172 } else {
5173 self.prev_token();
5174 self.expected("an object type after CREATE", self.peek_token())
5175 }
5176 }
5177
5178 fn parse_postgres_create_function(
5182 &mut self,
5183 or_replace: bool,
5184 temporary: bool,
5185 ) -> Result<Statement, ParserError> {
5186 let name = self.parse_object_name(false)?;
5187
5188 self.expect_token(&Token::LParen)?;
5189 let args = if Token::RParen != self.peek_token_ref().token {
5190 self.parse_comma_separated(Parser::parse_function_arg)?
5191 } else {
5192 vec![]
5193 };
5194 self.expect_token(&Token::RParen)?;
5195
5196 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5197 Some(self.parse_data_type()?)
5198 } else {
5199 None
5200 };
5201
5202 #[derive(Default)]
5203 struct Body {
5204 language: Option<Ident>,
5205 behavior: Option<FunctionBehavior>,
5206 function_body: Option<CreateFunctionBody>,
5207 called_on_null: Option<FunctionCalledOnNull>,
5208 parallel: Option<FunctionParallel>,
5209 security: Option<FunctionSecurity>,
5210 }
5211 let mut body = Body::default();
5212 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5213 loop {
5214 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5215 if field.is_some() {
5216 return Err(ParserError::ParserError(format!(
5217 "{name} specified more than once",
5218 )));
5219 }
5220 Ok(())
5221 }
5222 if self.parse_keyword(Keyword::AS) {
5223 ensure_not_set(&body.function_body, "AS")?;
5224 body.function_body = Some(self.parse_create_function_body_string()?);
5225 } else if self.parse_keyword(Keyword::LANGUAGE) {
5226 ensure_not_set(&body.language, "LANGUAGE")?;
5227 body.language = Some(self.parse_identifier()?);
5228 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5229 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5230 body.behavior = Some(FunctionBehavior::Immutable);
5231 } else if self.parse_keyword(Keyword::STABLE) {
5232 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5233 body.behavior = Some(FunctionBehavior::Stable);
5234 } else if self.parse_keyword(Keyword::VOLATILE) {
5235 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5236 body.behavior = Some(FunctionBehavior::Volatile);
5237 } else if self.parse_keywords(&[
5238 Keyword::CALLED,
5239 Keyword::ON,
5240 Keyword::NULL,
5241 Keyword::INPUT,
5242 ]) {
5243 ensure_not_set(
5244 &body.called_on_null,
5245 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5246 )?;
5247 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5248 } else if self.parse_keywords(&[
5249 Keyword::RETURNS,
5250 Keyword::NULL,
5251 Keyword::ON,
5252 Keyword::NULL,
5253 Keyword::INPUT,
5254 ]) {
5255 ensure_not_set(
5256 &body.called_on_null,
5257 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5258 )?;
5259 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5260 } else if self.parse_keyword(Keyword::STRICT) {
5261 ensure_not_set(
5262 &body.called_on_null,
5263 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5264 )?;
5265 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5266 } else if self.parse_keyword(Keyword::PARALLEL) {
5267 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5268 if self.parse_keyword(Keyword::UNSAFE) {
5269 body.parallel = Some(FunctionParallel::Unsafe);
5270 } else if self.parse_keyword(Keyword::RESTRICTED) {
5271 body.parallel = Some(FunctionParallel::Restricted);
5272 } else if self.parse_keyword(Keyword::SAFE) {
5273 body.parallel = Some(FunctionParallel::Safe);
5274 } else {
5275 return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5276 }
5277 } else if self.parse_keyword(Keyword::SECURITY) {
5278 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5279 if self.parse_keyword(Keyword::DEFINER) {
5280 body.security = Some(FunctionSecurity::Definer);
5281 } else if self.parse_keyword(Keyword::INVOKER) {
5282 body.security = Some(FunctionSecurity::Invoker);
5283 } else {
5284 return self.expected("DEFINER or INVOKER", self.peek_token());
5285 }
5286 } else if self.parse_keyword(Keyword::SET) {
5287 let name = self.parse_identifier()?;
5288 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5289 FunctionSetValue::FromCurrent
5290 } else {
5291 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5292 return self.expected("= or TO", self.peek_token());
5293 }
5294 let values = self.parse_comma_separated(Parser::parse_expr)?;
5295 FunctionSetValue::Values(values)
5296 };
5297 set_params.push(FunctionDefinitionSetParam { name, value });
5298 } else if self.parse_keyword(Keyword::RETURN) {
5299 ensure_not_set(&body.function_body, "RETURN")?;
5300 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5301 } else {
5302 break;
5303 }
5304 }
5305
5306 Ok(Statement::CreateFunction(CreateFunction {
5307 or_alter: false,
5308 or_replace,
5309 temporary,
5310 name,
5311 args: Some(args),
5312 return_type,
5313 behavior: body.behavior,
5314 called_on_null: body.called_on_null,
5315 parallel: body.parallel,
5316 security: body.security,
5317 set_params,
5318 language: body.language,
5319 function_body: body.function_body,
5320 if_not_exists: false,
5321 using: None,
5322 determinism_specifier: None,
5323 options: None,
5324 remote_connection: None,
5325 }))
5326 }
5327
5328 fn parse_hive_create_function(
5332 &mut self,
5333 or_replace: bool,
5334 temporary: bool,
5335 ) -> Result<Statement, ParserError> {
5336 let name = self.parse_object_name(false)?;
5337 self.expect_keyword_is(Keyword::AS)?;
5338
5339 let body = self.parse_create_function_body_string()?;
5340 let using = self.parse_optional_create_function_using()?;
5341
5342 Ok(Statement::CreateFunction(CreateFunction {
5343 or_alter: false,
5344 or_replace,
5345 temporary,
5346 name,
5347 function_body: Some(body),
5348 using,
5349 if_not_exists: false,
5350 args: None,
5351 return_type: None,
5352 behavior: None,
5353 called_on_null: None,
5354 parallel: None,
5355 security: None,
5356 set_params: vec![],
5357 language: None,
5358 determinism_specifier: None,
5359 options: None,
5360 remote_connection: None,
5361 }))
5362 }
5363
5364 fn parse_bigquery_create_function(
5368 &mut self,
5369 or_replace: bool,
5370 temporary: bool,
5371 ) -> Result<Statement, ParserError> {
5372 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5373 let (name, args) = self.parse_create_function_name_and_params()?;
5374
5375 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5376 Some(self.parse_data_type()?)
5377 } else {
5378 None
5379 };
5380
5381 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5382 Some(FunctionDeterminismSpecifier::Deterministic)
5383 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5384 Some(FunctionDeterminismSpecifier::NotDeterministic)
5385 } else {
5386 None
5387 };
5388
5389 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5390 Some(self.parse_identifier()?)
5391 } else {
5392 None
5393 };
5394
5395 let remote_connection =
5396 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5397 Some(self.parse_object_name(false)?)
5398 } else {
5399 None
5400 };
5401
5402 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5405
5406 let function_body = if remote_connection.is_none() {
5407 self.expect_keyword_is(Keyword::AS)?;
5408 let expr = self.parse_expr()?;
5409 if options.is_none() {
5410 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5411 Some(CreateFunctionBody::AsBeforeOptions {
5412 body: expr,
5413 link_symbol: None,
5414 })
5415 } else {
5416 Some(CreateFunctionBody::AsAfterOptions(expr))
5417 }
5418 } else {
5419 None
5420 };
5421
5422 Ok(Statement::CreateFunction(CreateFunction {
5423 or_alter: false,
5424 or_replace,
5425 temporary,
5426 if_not_exists,
5427 name,
5428 args: Some(args),
5429 return_type,
5430 function_body,
5431 language,
5432 determinism_specifier,
5433 options,
5434 remote_connection,
5435 using: None,
5436 behavior: None,
5437 called_on_null: None,
5438 parallel: None,
5439 security: None,
5440 set_params: vec![],
5441 }))
5442 }
5443
5444 fn parse_mssql_create_function(
5448 &mut self,
5449 or_alter: bool,
5450 or_replace: bool,
5451 temporary: bool,
5452 ) -> Result<Statement, ParserError> {
5453 let (name, args) = self.parse_create_function_name_and_params()?;
5454
5455 self.expect_keyword(Keyword::RETURNS)?;
5456
5457 let return_table = self.maybe_parse(|p| {
5458 let return_table_name = p.parse_identifier()?;
5459
5460 p.expect_keyword_is(Keyword::TABLE)?;
5461 p.prev_token();
5462
5463 let table_column_defs = match p.parse_data_type()? {
5464 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5465 table_column_defs
5466 }
5467 _ => parser_err!(
5468 "Expected table column definitions after TABLE keyword",
5469 p.peek_token().span.start
5470 )?,
5471 };
5472
5473 Ok(DataType::NamedTable {
5474 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5475 columns: table_column_defs,
5476 })
5477 })?;
5478
5479 let return_type = if return_table.is_some() {
5480 return_table
5481 } else {
5482 Some(self.parse_data_type()?)
5483 };
5484
5485 let _ = self.parse_keyword(Keyword::AS);
5486
5487 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5488 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5489 let statements = self.parse_statement_list(&[Keyword::END])?;
5490 let end_token = self.expect_keyword(Keyword::END)?;
5491
5492 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5493 begin_token: AttachedToken(begin_token),
5494 statements,
5495 end_token: AttachedToken(end_token),
5496 }))
5497 } else if self.parse_keyword(Keyword::RETURN) {
5498 if self.peek_token() == Token::LParen {
5499 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5500 } else if self.peek_keyword(Keyword::SELECT) {
5501 let select = self.parse_select()?;
5502 Some(CreateFunctionBody::AsReturnSelect(select))
5503 } else {
5504 parser_err!(
5505 "Expected a subquery (or bare SELECT statement) after RETURN",
5506 self.peek_token().span.start
5507 )?
5508 }
5509 } else {
5510 parser_err!("Unparsable function body", self.peek_token().span.start)?
5511 };
5512
5513 Ok(Statement::CreateFunction(CreateFunction {
5514 or_alter,
5515 or_replace,
5516 temporary,
5517 if_not_exists: false,
5518 name,
5519 args: Some(args),
5520 return_type,
5521 function_body,
5522 language: None,
5523 determinism_specifier: None,
5524 options: None,
5525 remote_connection: None,
5526 using: None,
5527 behavior: None,
5528 called_on_null: None,
5529 parallel: None,
5530 security: None,
5531 set_params: vec![],
5532 }))
5533 }
5534
5535 fn parse_create_function_name_and_params(
5536 &mut self,
5537 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5538 let name = self.parse_object_name(false)?;
5539 let parse_function_param =
5540 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5541 let name = parser.parse_identifier()?;
5542 let data_type = parser.parse_data_type()?;
5543 let default_expr = if parser.consume_token(&Token::Eq) {
5544 Some(parser.parse_expr()?)
5545 } else {
5546 None
5547 };
5548
5549 Ok(OperateFunctionArg {
5550 mode: None,
5551 name: Some(name),
5552 data_type,
5553 default_expr,
5554 })
5555 };
5556 self.expect_token(&Token::LParen)?;
5557 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5558 self.expect_token(&Token::RParen)?;
5559 Ok((name, args))
5560 }
5561
5562 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5563 let mode = if self.parse_keyword(Keyword::IN) {
5564 Some(ArgMode::In)
5565 } else if self.parse_keyword(Keyword::OUT) {
5566 Some(ArgMode::Out)
5567 } else if self.parse_keyword(Keyword::INOUT) {
5568 Some(ArgMode::InOut)
5569 } else {
5570 None
5571 };
5572
5573 let mut name = None;
5575 let mut data_type = self.parse_data_type()?;
5576
5577 let data_type_idx = self.get_current_index();
5581
5582 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5584 if parser.peek_keyword(Keyword::DEFAULT) {
5585 parser_err!(
5587 "The DEFAULT keyword is not a type",
5588 parser.peek_token().span.start
5589 )
5590 } else {
5591 parser.parse_data_type()
5592 }
5593 }
5594
5595 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5596 let token = self.token_at(data_type_idx);
5597
5598 if !matches!(token.token, Token::Word(_)) {
5600 return self.expected("a name or type", token.clone());
5601 }
5602
5603 name = Some(Ident::new(token.to_string()));
5604 data_type = next_data_type;
5605 }
5606
5607 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5608 {
5609 Some(self.parse_expr()?)
5610 } else {
5611 None
5612 };
5613 Ok(OperateFunctionArg {
5614 mode,
5615 name,
5616 data_type,
5617 default_expr,
5618 })
5619 }
5620
5621 pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5627 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5628 {
5629 self.prev_token();
5630 return self.expected("an object type after DROP", self.peek_token());
5631 }
5632 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5633 let trigger_name = self.parse_object_name(false)?;
5634 let table_name = if self.parse_keyword(Keyword::ON) {
5635 Some(self.parse_object_name(false)?)
5636 } else {
5637 None
5638 };
5639 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5640 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5641 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5642 Some(unexpected_keyword) => return Err(ParserError::ParserError(
5643 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5644 )),
5645 None => None,
5646 };
5647 Ok(Statement::DropTrigger(DropTrigger {
5648 if_exists,
5649 trigger_name,
5650 table_name,
5651 option,
5652 }))
5653 }
5654
5655 pub fn parse_create_trigger(
5656 &mut self,
5657 temporary: bool,
5658 or_alter: bool,
5659 or_replace: bool,
5660 is_constraint: bool,
5661 ) -> Result<Statement, ParserError> {
5662 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5663 {
5664 self.prev_token();
5665 return self.expected("an object type after CREATE", self.peek_token());
5666 }
5667
5668 let name = self.parse_object_name(false)?;
5669 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5670
5671 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5672 self.expect_keyword_is(Keyword::ON)?;
5673 let table_name = self.parse_object_name(false)?;
5674
5675 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5676 self.parse_object_name(true).ok()
5677 } else {
5678 None
5679 };
5680
5681 let characteristics = self.parse_constraint_characteristics()?;
5682
5683 let mut referencing = vec![];
5684 if self.parse_keyword(Keyword::REFERENCING) {
5685 while let Some(refer) = self.parse_trigger_referencing()? {
5686 referencing.push(refer);
5687 }
5688 }
5689
5690 let trigger_object = if self.parse_keyword(Keyword::FOR) {
5691 let include_each = self.parse_keyword(Keyword::EACH);
5692 let trigger_object =
5693 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5694 Keyword::ROW => TriggerObject::Row,
5695 Keyword::STATEMENT => TriggerObject::Statement,
5696 unexpected_keyword => return Err(ParserError::ParserError(
5697 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5698 )),
5699 };
5700
5701 Some(if include_each {
5702 TriggerObjectKind::ForEach(trigger_object)
5703 } else {
5704 TriggerObjectKind::For(trigger_object)
5705 })
5706 } else {
5707 let _ = self.parse_keyword(Keyword::FOR);
5708
5709 None
5710 };
5711
5712 let condition = self
5713 .parse_keyword(Keyword::WHEN)
5714 .then(|| self.parse_expr())
5715 .transpose()?;
5716
5717 let mut exec_body = None;
5718 let mut statements = None;
5719 if self.parse_keyword(Keyword::EXECUTE) {
5720 exec_body = Some(self.parse_trigger_exec_body()?);
5721 } else {
5722 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5723 }
5724
5725 Ok(CreateTrigger {
5726 or_alter,
5727 temporary,
5728 or_replace,
5729 is_constraint,
5730 name,
5731 period,
5732 period_before_table: true,
5733 events,
5734 table_name,
5735 referenced_table_name,
5736 referencing,
5737 trigger_object,
5738 condition,
5739 exec_body,
5740 statements_as: false,
5741 statements,
5742 characteristics,
5743 }
5744 .into())
5745 }
5746
5747 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5748 Ok(
5749 match self.expect_one_of_keywords(&[
5750 Keyword::FOR,
5751 Keyword::BEFORE,
5752 Keyword::AFTER,
5753 Keyword::INSTEAD,
5754 ])? {
5755 Keyword::FOR => TriggerPeriod::For,
5756 Keyword::BEFORE => TriggerPeriod::Before,
5757 Keyword::AFTER => TriggerPeriod::After,
5758 Keyword::INSTEAD => self
5759 .expect_keyword_is(Keyword::OF)
5760 .map(|_| TriggerPeriod::InsteadOf)?,
5761 unexpected_keyword => return Err(ParserError::ParserError(
5762 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
5763 )),
5764 },
5765 )
5766 }
5767
5768 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5769 Ok(
5770 match self.expect_one_of_keywords(&[
5771 Keyword::INSERT,
5772 Keyword::UPDATE,
5773 Keyword::DELETE,
5774 Keyword::TRUNCATE,
5775 ])? {
5776 Keyword::INSERT => TriggerEvent::Insert,
5777 Keyword::UPDATE => {
5778 if self.parse_keyword(Keyword::OF) {
5779 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5780 TriggerEvent::Update(cols)
5781 } else {
5782 TriggerEvent::Update(vec![])
5783 }
5784 }
5785 Keyword::DELETE => TriggerEvent::Delete,
5786 Keyword::TRUNCATE => TriggerEvent::Truncate,
5787 unexpected_keyword => return Err(ParserError::ParserError(
5788 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
5789 )),
5790 },
5791 )
5792 }
5793
5794 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5795 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5796 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5797 TriggerReferencingType::OldTable
5798 }
5799 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5800 TriggerReferencingType::NewTable
5801 }
5802 _ => {
5803 return Ok(None);
5804 }
5805 };
5806
5807 let is_as = self.parse_keyword(Keyword::AS);
5808 let transition_relation_name = self.parse_object_name(false)?;
5809 Ok(Some(TriggerReferencing {
5810 refer_type,
5811 is_as,
5812 transition_relation_name,
5813 }))
5814 }
5815
5816 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5817 Ok(TriggerExecBody {
5818 exec_type: match self
5819 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5820 {
5821 Keyword::FUNCTION => TriggerExecBodyType::Function,
5822 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5823 unexpected_keyword => return Err(ParserError::ParserError(
5824 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
5825 )),
5826 },
5827 func_desc: self.parse_function_desc()?,
5828 })
5829 }
5830
5831 pub fn parse_create_macro(
5832 &mut self,
5833 or_replace: bool,
5834 temporary: bool,
5835 ) -> Result<Statement, ParserError> {
5836 if dialect_of!(self is DuckDbDialect | GenericDialect) {
5837 let name = self.parse_object_name(false)?;
5838 self.expect_token(&Token::LParen)?;
5839 let args = if self.consume_token(&Token::RParen) {
5840 self.prev_token();
5841 None
5842 } else {
5843 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5844 };
5845
5846 self.expect_token(&Token::RParen)?;
5847 self.expect_keyword_is(Keyword::AS)?;
5848
5849 Ok(Statement::CreateMacro {
5850 or_replace,
5851 temporary,
5852 name,
5853 args,
5854 definition: if self.parse_keyword(Keyword::TABLE) {
5855 MacroDefinition::Table(self.parse_query()?)
5856 } else {
5857 MacroDefinition::Expr(self.parse_expr()?)
5858 },
5859 })
5860 } else {
5861 self.prev_token();
5862 self.expected("an object type after CREATE", self.peek_token())
5863 }
5864 }
5865
5866 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5867 let name = self.parse_identifier()?;
5868
5869 let default_expr =
5870 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5871 Some(self.parse_expr()?)
5872 } else {
5873 None
5874 };
5875 Ok(MacroArg { name, default_expr })
5876 }
5877
5878 pub fn parse_create_external_table(
5879 &mut self,
5880 or_replace: bool,
5881 ) -> Result<Statement, ParserError> {
5882 self.expect_keyword_is(Keyword::TABLE)?;
5883 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5884 let table_name = self.parse_object_name(false)?;
5885 let (columns, constraints) = self.parse_columns()?;
5886
5887 let hive_distribution = self.parse_hive_distribution()?;
5888 let hive_formats = self.parse_hive_formats()?;
5889
5890 let file_format = if let Some(ref hf) = hive_formats {
5891 if let Some(ref ff) = hf.storage {
5892 match ff {
5893 HiveIOFormat::FileFormat { format } => Some(*format),
5894 _ => None,
5895 }
5896 } else {
5897 None
5898 }
5899 } else {
5900 None
5901 };
5902 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5903 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5904 let table_options = if !table_properties.is_empty() {
5905 CreateTableOptions::TableProperties(table_properties)
5906 } else {
5907 CreateTableOptions::None
5908 };
5909 Ok(CreateTableBuilder::new(table_name)
5910 .columns(columns)
5911 .constraints(constraints)
5912 .hive_distribution(hive_distribution)
5913 .hive_formats(hive_formats)
5914 .table_options(table_options)
5915 .or_replace(or_replace)
5916 .if_not_exists(if_not_exists)
5917 .external(true)
5918 .file_format(file_format)
5919 .location(location)
5920 .build())
5921 }
5922
5923 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5924 let next_token = self.next_token();
5925 match &next_token.token {
5926 Token::Word(w) => match w.keyword {
5927 Keyword::AVRO => Ok(FileFormat::AVRO),
5928 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5929 Keyword::ORC => Ok(FileFormat::ORC),
5930 Keyword::PARQUET => Ok(FileFormat::PARQUET),
5931 Keyword::RCFILE => Ok(FileFormat::RCFILE),
5932 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5933 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5934 _ => self.expected("fileformat", next_token),
5935 },
5936 _ => self.expected("fileformat", next_token),
5937 }
5938 }
5939
5940 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5941 if self.consume_token(&Token::Eq) {
5942 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5943 } else {
5944 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5945 }
5946 }
5947
5948 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5949 let next_token = self.next_token();
5950 match &next_token.token {
5951 Token::Word(w) => match w.keyword {
5952 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5953 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5954 Keyword::JSON => Ok(AnalyzeFormat::JSON),
5955 _ => self.expected("fileformat", next_token),
5956 },
5957 _ => self.expected("fileformat", next_token),
5958 }
5959 }
5960
5961 pub fn parse_create_view(
5962 &mut self,
5963 or_alter: bool,
5964 or_replace: bool,
5965 temporary: bool,
5966 create_view_params: Option<CreateViewParams>,
5967 ) -> Result<Statement, ParserError> {
5968 let secure = self.parse_keyword(Keyword::SECURE);
5969 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5970 self.expect_keyword_is(Keyword::VIEW)?;
5971 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5972 let if_not_exists_first =
5975 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5976 let name = self.parse_object_name(allow_unquoted_hyphen)?;
5977 let name_before_not_exists = !if_not_exists_first
5978 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5979 let if_not_exists = if_not_exists_first || name_before_not_exists;
5980 let columns = self.parse_view_columns()?;
5983 let mut options = CreateTableOptions::None;
5984 let with_options = self.parse_options(Keyword::WITH)?;
5985 if !with_options.is_empty() {
5986 options = CreateTableOptions::With(with_options);
5987 }
5988
5989 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5990 self.expect_keyword_is(Keyword::BY)?;
5991 self.parse_parenthesized_column_list(Optional, false)?
5992 } else {
5993 vec![]
5994 };
5995
5996 if dialect_of!(self is BigQueryDialect | GenericDialect) {
5997 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5998 if !opts.is_empty() {
5999 options = CreateTableOptions::Options(opts);
6000 }
6001 };
6002 }
6003
6004 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6005 && self.parse_keyword(Keyword::TO)
6006 {
6007 Some(self.parse_object_name(false)?)
6008 } else {
6009 None
6010 };
6011
6012 let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
6013 && self.parse_keyword(Keyword::COMMENT)
6014 {
6015 self.expect_token(&Token::Eq)?;
6016 Some(self.parse_comment_value()?)
6017 } else {
6018 None
6019 };
6020
6021 self.expect_keyword_is(Keyword::AS)?;
6022 let query = self.parse_query()?;
6023 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6026 && self.parse_keywords(&[
6027 Keyword::WITH,
6028 Keyword::NO,
6029 Keyword::SCHEMA,
6030 Keyword::BINDING,
6031 ]);
6032
6033 Ok(CreateView {
6034 or_alter,
6035 name,
6036 columns,
6037 query,
6038 materialized,
6039 secure,
6040 or_replace,
6041 options,
6042 cluster_by,
6043 comment,
6044 with_no_schema_binding,
6045 if_not_exists,
6046 temporary,
6047 to,
6048 params: create_view_params,
6049 name_before_not_exists,
6050 }
6051 .into())
6052 }
6053
6054 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6058 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6059 self.expect_token(&Token::Eq)?;
6060 Some(
6061 match self.expect_one_of_keywords(&[
6062 Keyword::UNDEFINED,
6063 Keyword::MERGE,
6064 Keyword::TEMPTABLE,
6065 ])? {
6066 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6067 Keyword::MERGE => CreateViewAlgorithm::Merge,
6068 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6069 _ => {
6070 self.prev_token();
6071 let found = self.next_token();
6072 return self
6073 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6074 }
6075 },
6076 )
6077 } else {
6078 None
6079 };
6080 let definer = if self.parse_keyword(Keyword::DEFINER) {
6081 self.expect_token(&Token::Eq)?;
6082 Some(self.parse_grantee_name()?)
6083 } else {
6084 None
6085 };
6086 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6087 Some(
6088 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6089 Keyword::DEFINER => CreateViewSecurity::Definer,
6090 Keyword::INVOKER => CreateViewSecurity::Invoker,
6091 _ => {
6092 self.prev_token();
6093 let found = self.next_token();
6094 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6095 }
6096 },
6097 )
6098 } else {
6099 None
6100 };
6101 if algorithm.is_some() || definer.is_some() || security.is_some() {
6102 Ok(Some(CreateViewParams {
6103 algorithm,
6104 definer,
6105 security,
6106 }))
6107 } else {
6108 Ok(None)
6109 }
6110 }
6111
6112 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6113 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6114 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6115
6116 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6119 vec![Keyword::AUTHORIZATION]
6120 } else if dialect_of!(self is PostgreSqlDialect) {
6121 vec![
6122 Keyword::LOGIN,
6123 Keyword::NOLOGIN,
6124 Keyword::INHERIT,
6125 Keyword::NOINHERIT,
6126 Keyword::BYPASSRLS,
6127 Keyword::NOBYPASSRLS,
6128 Keyword::PASSWORD,
6129 Keyword::CREATEDB,
6130 Keyword::NOCREATEDB,
6131 Keyword::CREATEROLE,
6132 Keyword::NOCREATEROLE,
6133 Keyword::SUPERUSER,
6134 Keyword::NOSUPERUSER,
6135 Keyword::REPLICATION,
6136 Keyword::NOREPLICATION,
6137 Keyword::CONNECTION,
6138 Keyword::VALID,
6139 Keyword::IN,
6140 Keyword::ROLE,
6141 Keyword::ADMIN,
6142 Keyword::USER,
6143 ]
6144 } else {
6145 vec![]
6146 };
6147
6148 let mut authorization_owner = None;
6150 let mut login = None;
6152 let mut inherit = None;
6153 let mut bypassrls = None;
6154 let mut password = None;
6155 let mut create_db = None;
6156 let mut create_role = None;
6157 let mut superuser = None;
6158 let mut replication = None;
6159 let mut connection_limit = None;
6160 let mut valid_until = None;
6161 let mut in_role = vec![];
6162 let mut in_group = vec![];
6163 let mut role = vec![];
6164 let mut user = vec![];
6165 let mut admin = vec![];
6166
6167 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6168 let loc = self
6169 .tokens
6170 .get(self.index - 1)
6171 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6172 match keyword {
6173 Keyword::AUTHORIZATION => {
6174 if authorization_owner.is_some() {
6175 parser_err!("Found multiple AUTHORIZATION", loc)
6176 } else {
6177 authorization_owner = Some(self.parse_object_name(false)?);
6178 Ok(())
6179 }
6180 }
6181 Keyword::LOGIN | Keyword::NOLOGIN => {
6182 if login.is_some() {
6183 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6184 } else {
6185 login = Some(keyword == Keyword::LOGIN);
6186 Ok(())
6187 }
6188 }
6189 Keyword::INHERIT | Keyword::NOINHERIT => {
6190 if inherit.is_some() {
6191 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6192 } else {
6193 inherit = Some(keyword == Keyword::INHERIT);
6194 Ok(())
6195 }
6196 }
6197 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6198 if bypassrls.is_some() {
6199 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6200 } else {
6201 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6202 Ok(())
6203 }
6204 }
6205 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6206 if create_db.is_some() {
6207 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6208 } else {
6209 create_db = Some(keyword == Keyword::CREATEDB);
6210 Ok(())
6211 }
6212 }
6213 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6214 if create_role.is_some() {
6215 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6216 } else {
6217 create_role = Some(keyword == Keyword::CREATEROLE);
6218 Ok(())
6219 }
6220 }
6221 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6222 if superuser.is_some() {
6223 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6224 } else {
6225 superuser = Some(keyword == Keyword::SUPERUSER);
6226 Ok(())
6227 }
6228 }
6229 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6230 if replication.is_some() {
6231 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6232 } else {
6233 replication = Some(keyword == Keyword::REPLICATION);
6234 Ok(())
6235 }
6236 }
6237 Keyword::PASSWORD => {
6238 if password.is_some() {
6239 parser_err!("Found multiple PASSWORD", loc)
6240 } else {
6241 password = if self.parse_keyword(Keyword::NULL) {
6242 Some(Password::NullPassword)
6243 } else {
6244 Some(Password::Password(Expr::Value(self.parse_value()?)))
6245 };
6246 Ok(())
6247 }
6248 }
6249 Keyword::CONNECTION => {
6250 self.expect_keyword_is(Keyword::LIMIT)?;
6251 if connection_limit.is_some() {
6252 parser_err!("Found multiple CONNECTION LIMIT", loc)
6253 } else {
6254 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6255 Ok(())
6256 }
6257 }
6258 Keyword::VALID => {
6259 self.expect_keyword_is(Keyword::UNTIL)?;
6260 if valid_until.is_some() {
6261 parser_err!("Found multiple VALID UNTIL", loc)
6262 } else {
6263 valid_until = Some(Expr::Value(self.parse_value()?));
6264 Ok(())
6265 }
6266 }
6267 Keyword::IN => {
6268 if self.parse_keyword(Keyword::ROLE) {
6269 if !in_role.is_empty() {
6270 parser_err!("Found multiple IN ROLE", loc)
6271 } else {
6272 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6273 Ok(())
6274 }
6275 } else if self.parse_keyword(Keyword::GROUP) {
6276 if !in_group.is_empty() {
6277 parser_err!("Found multiple IN GROUP", loc)
6278 } else {
6279 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6280 Ok(())
6281 }
6282 } else {
6283 self.expected("ROLE or GROUP after IN", self.peek_token())
6284 }
6285 }
6286 Keyword::ROLE => {
6287 if !role.is_empty() {
6288 parser_err!("Found multiple ROLE", loc)
6289 } else {
6290 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6291 Ok(())
6292 }
6293 }
6294 Keyword::USER => {
6295 if !user.is_empty() {
6296 parser_err!("Found multiple USER", loc)
6297 } else {
6298 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6299 Ok(())
6300 }
6301 }
6302 Keyword::ADMIN => {
6303 if !admin.is_empty() {
6304 parser_err!("Found multiple ADMIN", loc)
6305 } else {
6306 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6307 Ok(())
6308 }
6309 }
6310 _ => break,
6311 }?
6312 }
6313
6314 Ok(CreateRole {
6315 names,
6316 if_not_exists,
6317 login,
6318 inherit,
6319 bypassrls,
6320 password,
6321 create_db,
6322 create_role,
6323 replication,
6324 superuser,
6325 connection_limit,
6326 valid_until,
6327 in_role,
6328 in_group,
6329 role,
6330 user,
6331 admin,
6332 authorization_owner,
6333 }
6334 .into())
6335 }
6336
6337 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6338 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6339 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6340 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6341 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6342 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6343 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6344 )),
6345 None => {
6346 match self.parse_identifier() {
6347 Ok(ident) => Owner::Ident(ident),
6348 Err(e) => {
6349 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6350 }
6351 }
6352 }
6353 };
6354 Ok(owner)
6355 }
6356
6357 fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6359 let name = self.parse_object_name(false)?;
6360 self.expect_keyword_is(Keyword::AS)?;
6361 let data_type = self.parse_data_type()?;
6362 let collation = if self.parse_keyword(Keyword::COLLATE) {
6363 Some(self.parse_identifier()?)
6364 } else {
6365 None
6366 };
6367 let default = if self.parse_keyword(Keyword::DEFAULT) {
6368 Some(self.parse_expr()?)
6369 } else {
6370 None
6371 };
6372 let mut constraints = Vec::new();
6373 while let Some(constraint) = self.parse_optional_table_constraint()? {
6374 constraints.push(constraint);
6375 }
6376
6377 Ok(Statement::CreateDomain(CreateDomain {
6378 name,
6379 data_type,
6380 collation,
6381 default,
6382 constraints,
6383 }))
6384 }
6385
6386 pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6396 let name = self.parse_identifier()?;
6397 self.expect_keyword_is(Keyword::ON)?;
6398 let table_name = self.parse_object_name(false)?;
6399
6400 let policy_type = if self.parse_keyword(Keyword::AS) {
6401 let keyword =
6402 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6403 Some(match keyword {
6404 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6405 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6406 unexpected_keyword => return Err(ParserError::ParserError(
6407 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6408 )),
6409 })
6410 } else {
6411 None
6412 };
6413
6414 let command = if self.parse_keyword(Keyword::FOR) {
6415 let keyword = self.expect_one_of_keywords(&[
6416 Keyword::ALL,
6417 Keyword::SELECT,
6418 Keyword::INSERT,
6419 Keyword::UPDATE,
6420 Keyword::DELETE,
6421 ])?;
6422 Some(match keyword {
6423 Keyword::ALL => CreatePolicyCommand::All,
6424 Keyword::SELECT => CreatePolicyCommand::Select,
6425 Keyword::INSERT => CreatePolicyCommand::Insert,
6426 Keyword::UPDATE => CreatePolicyCommand::Update,
6427 Keyword::DELETE => CreatePolicyCommand::Delete,
6428 unexpected_keyword => return Err(ParserError::ParserError(
6429 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6430 )),
6431 })
6432 } else {
6433 None
6434 };
6435
6436 let to = if self.parse_keyword(Keyword::TO) {
6437 Some(self.parse_comma_separated(|p| p.parse_owner())?)
6438 } else {
6439 None
6440 };
6441
6442 let using = if self.parse_keyword(Keyword::USING) {
6443 self.expect_token(&Token::LParen)?;
6444 let expr = self.parse_expr()?;
6445 self.expect_token(&Token::RParen)?;
6446 Some(expr)
6447 } else {
6448 None
6449 };
6450
6451 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6452 self.expect_token(&Token::LParen)?;
6453 let expr = self.parse_expr()?;
6454 self.expect_token(&Token::RParen)?;
6455 Some(expr)
6456 } else {
6457 None
6458 };
6459
6460 Ok(CreatePolicy {
6461 name,
6462 table_name,
6463 policy_type,
6464 command,
6465 to,
6466 using,
6467 with_check,
6468 })
6469 }
6470
6471 pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6481 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6482 let name = self.parse_identifier()?;
6483
6484 let connector_type = if self.parse_keyword(Keyword::TYPE) {
6485 Some(self.parse_literal_string()?)
6486 } else {
6487 None
6488 };
6489
6490 let url = if self.parse_keyword(Keyword::URL) {
6491 Some(self.parse_literal_string()?)
6492 } else {
6493 None
6494 };
6495
6496 let comment = self.parse_optional_inline_comment()?;
6497
6498 let with_dcproperties =
6499 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6500 properties if !properties.is_empty() => Some(properties),
6501 _ => None,
6502 };
6503
6504 Ok(Statement::CreateConnector(CreateConnector {
6505 name,
6506 if_not_exists,
6507 connector_type,
6508 url,
6509 comment,
6510 with_dcproperties,
6511 }))
6512 }
6513
6514 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6520 let mut parts = vec![];
6521 loop {
6522 parts.push(ObjectNamePart::Identifier(Ident::new(
6523 self.next_token().to_string(),
6524 )));
6525 if !self.consume_token(&Token::Period) {
6526 break;
6527 }
6528 }
6529 Ok(ObjectName(parts))
6530 }
6531
6532 pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6536 let name = self.parse_operator_name()?;
6537 self.expect_token(&Token::LParen)?;
6538
6539 let mut function: Option<ObjectName> = None;
6540 let mut is_procedure = false;
6541 let mut left_arg: Option<DataType> = None;
6542 let mut right_arg: Option<DataType> = None;
6543 let mut options: Vec<OperatorOption> = Vec::new();
6544
6545 loop {
6546 let keyword = self.expect_one_of_keywords(&[
6547 Keyword::FUNCTION,
6548 Keyword::PROCEDURE,
6549 Keyword::LEFTARG,
6550 Keyword::RIGHTARG,
6551 Keyword::COMMUTATOR,
6552 Keyword::NEGATOR,
6553 Keyword::RESTRICT,
6554 Keyword::JOIN,
6555 Keyword::HASHES,
6556 Keyword::MERGES,
6557 ])?;
6558
6559 match keyword {
6560 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6561 options.push(OperatorOption::Hashes);
6562 }
6563 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6564 options.push(OperatorOption::Merges);
6565 }
6566 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6567 self.expect_token(&Token::Eq)?;
6568 function = Some(self.parse_object_name(false)?);
6569 is_procedure = keyword == Keyword::PROCEDURE;
6570 }
6571 Keyword::LEFTARG if left_arg.is_none() => {
6572 self.expect_token(&Token::Eq)?;
6573 left_arg = Some(self.parse_data_type()?);
6574 }
6575 Keyword::RIGHTARG if right_arg.is_none() => {
6576 self.expect_token(&Token::Eq)?;
6577 right_arg = Some(self.parse_data_type()?);
6578 }
6579 Keyword::COMMUTATOR
6580 if !options
6581 .iter()
6582 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6583 {
6584 self.expect_token(&Token::Eq)?;
6585 if self.parse_keyword(Keyword::OPERATOR) {
6586 self.expect_token(&Token::LParen)?;
6587 let op = self.parse_operator_name()?;
6588 self.expect_token(&Token::RParen)?;
6589 options.push(OperatorOption::Commutator(op));
6590 } else {
6591 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6592 }
6593 }
6594 Keyword::NEGATOR
6595 if !options
6596 .iter()
6597 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6598 {
6599 self.expect_token(&Token::Eq)?;
6600 if self.parse_keyword(Keyword::OPERATOR) {
6601 self.expect_token(&Token::LParen)?;
6602 let op = self.parse_operator_name()?;
6603 self.expect_token(&Token::RParen)?;
6604 options.push(OperatorOption::Negator(op));
6605 } else {
6606 options.push(OperatorOption::Negator(self.parse_operator_name()?));
6607 }
6608 }
6609 Keyword::RESTRICT
6610 if !options
6611 .iter()
6612 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6613 {
6614 self.expect_token(&Token::Eq)?;
6615 options.push(OperatorOption::Restrict(Some(
6616 self.parse_object_name(false)?,
6617 )));
6618 }
6619 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6620 self.expect_token(&Token::Eq)?;
6621 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6622 }
6623 _ => {
6624 return Err(ParserError::ParserError(format!(
6625 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6626 keyword
6627 )))
6628 }
6629 }
6630
6631 if !self.consume_token(&Token::Comma) {
6632 break;
6633 }
6634 }
6635
6636 self.expect_token(&Token::RParen)?;
6638
6639 let function = function.ok_or_else(|| {
6641 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6642 })?;
6643
6644 Ok(Statement::CreateOperator(CreateOperator {
6645 name,
6646 function,
6647 is_procedure,
6648 left_arg,
6649 right_arg,
6650 options,
6651 }))
6652 }
6653
6654 pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6658 let name = self.parse_object_name(false)?;
6659 self.expect_keyword(Keyword::USING)?;
6660 let using = self.parse_identifier()?;
6661
6662 Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6663 name,
6664 using,
6665 }))
6666 }
6667
6668 pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6672 let name = self.parse_object_name(false)?;
6673 let default = self.parse_keyword(Keyword::DEFAULT);
6674 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6675 let for_type = self.parse_data_type()?;
6676 self.expect_keyword(Keyword::USING)?;
6677 let using = self.parse_identifier()?;
6678
6679 let family = if self.parse_keyword(Keyword::FAMILY) {
6680 Some(self.parse_object_name(false)?)
6681 } else {
6682 None
6683 };
6684
6685 self.expect_keyword(Keyword::AS)?;
6686
6687 let mut items = vec![];
6688 loop {
6689 if self.parse_keyword(Keyword::OPERATOR) {
6690 let strategy_number = self.parse_literal_uint()? as u32;
6691 let operator_name = self.parse_operator_name()?;
6692
6693 let op_types = if self.consume_token(&Token::LParen) {
6695 let left = self.parse_data_type()?;
6696 self.expect_token(&Token::Comma)?;
6697 let right = self.parse_data_type()?;
6698 self.expect_token(&Token::RParen)?;
6699 Some(OperatorArgTypes { left, right })
6700 } else {
6701 None
6702 };
6703
6704 let purpose = if self.parse_keyword(Keyword::FOR) {
6706 if self.parse_keyword(Keyword::SEARCH) {
6707 Some(OperatorPurpose::ForSearch)
6708 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6709 let sort_family = self.parse_object_name(false)?;
6710 Some(OperatorPurpose::ForOrderBy { sort_family })
6711 } else {
6712 return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6713 }
6714 } else {
6715 None
6716 };
6717
6718 items.push(OperatorClassItem::Operator {
6719 strategy_number,
6720 operator_name,
6721 op_types,
6722 purpose,
6723 });
6724 } else if self.parse_keyword(Keyword::FUNCTION) {
6725 let support_number = self.parse_literal_uint()? as u32;
6726
6727 let op_types =
6729 if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6730 let mut types = vec![];
6731 loop {
6732 types.push(self.parse_data_type()?);
6733 if !self.consume_token(&Token::Comma) {
6734 break;
6735 }
6736 }
6737 self.expect_token(&Token::RParen)?;
6738 Some(types)
6739 } else if self.consume_token(&Token::LParen) {
6740 self.expect_token(&Token::RParen)?;
6741 Some(vec![])
6742 } else {
6743 None
6744 };
6745
6746 let function_name = self.parse_object_name(false)?;
6747
6748 let argument_types = if self.consume_token(&Token::LParen) {
6750 let mut types = vec![];
6751 loop {
6752 if self.peek_token() == Token::RParen {
6753 break;
6754 }
6755 types.push(self.parse_data_type()?);
6756 if !self.consume_token(&Token::Comma) {
6757 break;
6758 }
6759 }
6760 self.expect_token(&Token::RParen)?;
6761 types
6762 } else {
6763 vec![]
6764 };
6765
6766 items.push(OperatorClassItem::Function {
6767 support_number,
6768 op_types,
6769 function_name,
6770 argument_types,
6771 });
6772 } else if self.parse_keyword(Keyword::STORAGE) {
6773 let storage_type = self.parse_data_type()?;
6774 items.push(OperatorClassItem::Storage { storage_type });
6775 } else {
6776 break;
6777 }
6778
6779 if !self.consume_token(&Token::Comma) {
6781 break;
6782 }
6783 }
6784
6785 Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6786 name,
6787 default,
6788 for_type,
6789 using,
6790 family,
6791 items,
6792 }))
6793 }
6794
6795 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6796 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6798 && self.parse_keyword(Keyword::TEMPORARY);
6799 let persistent = dialect_of!(self is DuckDbDialect)
6800 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6801
6802 let object_type = if self.parse_keyword(Keyword::TABLE) {
6803 ObjectType::Table
6804 } else if self.parse_keyword(Keyword::VIEW) {
6805 ObjectType::View
6806 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6807 ObjectType::MaterializedView
6808 } else if self.parse_keyword(Keyword::INDEX) {
6809 ObjectType::Index
6810 } else if self.parse_keyword(Keyword::ROLE) {
6811 ObjectType::Role
6812 } else if self.parse_keyword(Keyword::SCHEMA) {
6813 ObjectType::Schema
6814 } else if self.parse_keyword(Keyword::DATABASE) {
6815 ObjectType::Database
6816 } else if self.parse_keyword(Keyword::SEQUENCE) {
6817 ObjectType::Sequence
6818 } else if self.parse_keyword(Keyword::STAGE) {
6819 ObjectType::Stage
6820 } else if self.parse_keyword(Keyword::TYPE) {
6821 ObjectType::Type
6822 } else if self.parse_keyword(Keyword::USER) {
6823 ObjectType::User
6824 } else if self.parse_keyword(Keyword::STREAM) {
6825 ObjectType::Stream
6826 } else if self.parse_keyword(Keyword::FUNCTION) {
6827 return self.parse_drop_function();
6828 } else if self.parse_keyword(Keyword::POLICY) {
6829 return self.parse_drop_policy();
6830 } else if self.parse_keyword(Keyword::CONNECTOR) {
6831 return self.parse_drop_connector();
6832 } else if self.parse_keyword(Keyword::DOMAIN) {
6833 return self.parse_drop_domain();
6834 } else if self.parse_keyword(Keyword::PROCEDURE) {
6835 return self.parse_drop_procedure();
6836 } else if self.parse_keyword(Keyword::SECRET) {
6837 return self.parse_drop_secret(temporary, persistent);
6838 } else if self.parse_keyword(Keyword::TRIGGER) {
6839 return self.parse_drop_trigger();
6840 } else if self.parse_keyword(Keyword::EXTENSION) {
6841 return self.parse_drop_extension();
6842 } else if self.parse_keyword(Keyword::OPERATOR) {
6843 return if self.parse_keyword(Keyword::FAMILY) {
6845 self.parse_drop_operator_family()
6846 } else if self.parse_keyword(Keyword::CLASS) {
6847 self.parse_drop_operator_class()
6848 } else {
6849 self.parse_drop_operator()
6850 };
6851 } else {
6852 return self.expected(
6853 "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6854 self.peek_token(),
6855 );
6856 };
6857 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6860 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6861
6862 let loc = self.peek_token().span.start;
6863 let cascade = self.parse_keyword(Keyword::CASCADE);
6864 let restrict = self.parse_keyword(Keyword::RESTRICT);
6865 let purge = self.parse_keyword(Keyword::PURGE);
6866 if cascade && restrict {
6867 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6868 }
6869 if object_type == ObjectType::Role && (cascade || restrict || purge) {
6870 return parser_err!(
6871 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6872 loc
6873 );
6874 }
6875 let table = if self.parse_keyword(Keyword::ON) {
6876 Some(self.parse_object_name(false)?)
6877 } else {
6878 None
6879 };
6880 Ok(Statement::Drop {
6881 object_type,
6882 if_exists,
6883 names,
6884 cascade,
6885 restrict,
6886 purge,
6887 temporary,
6888 table,
6889 })
6890 }
6891
6892 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6893 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6894 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6895 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6896 _ => None,
6897 }
6898 }
6899
6900 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6905 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6906 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6907 let drop_behavior = self.parse_optional_drop_behavior();
6908 Ok(Statement::DropFunction(DropFunction {
6909 if_exists,
6910 func_desc,
6911 drop_behavior,
6912 }))
6913 }
6914
6915 fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6921 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6922 let name = self.parse_identifier()?;
6923 self.expect_keyword_is(Keyword::ON)?;
6924 let table_name = self.parse_object_name(false)?;
6925 let drop_behavior = self.parse_optional_drop_behavior();
6926 Ok(Statement::DropPolicy {
6927 if_exists,
6928 name,
6929 table_name,
6930 drop_behavior,
6931 })
6932 }
6933 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6939 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6940 let name = self.parse_identifier()?;
6941 Ok(Statement::DropConnector { if_exists, name })
6942 }
6943
6944 fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6948 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6949 let name = self.parse_object_name(false)?;
6950 let drop_behavior = self.parse_optional_drop_behavior();
6951 Ok(Statement::DropDomain(DropDomain {
6952 if_exists,
6953 name,
6954 drop_behavior,
6955 }))
6956 }
6957
6958 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6963 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6964 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6965 let drop_behavior = self.parse_optional_drop_behavior();
6966 Ok(Statement::DropProcedure {
6967 if_exists,
6968 proc_desc,
6969 drop_behavior,
6970 })
6971 }
6972
6973 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6974 let name = self.parse_object_name(false)?;
6975
6976 let args = if self.consume_token(&Token::LParen) {
6977 if self.consume_token(&Token::RParen) {
6978 Some(vec![])
6979 } else {
6980 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6981 self.expect_token(&Token::RParen)?;
6982 Some(args)
6983 }
6984 } else {
6985 None
6986 };
6987
6988 Ok(FunctionDesc { name, args })
6989 }
6990
6991 fn parse_drop_secret(
6993 &mut self,
6994 temporary: bool,
6995 persistent: bool,
6996 ) -> Result<Statement, ParserError> {
6997 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6998 let name = self.parse_identifier()?;
6999 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7000 self.parse_identifier().ok()
7001 } else {
7002 None
7003 };
7004 let temp = match (temporary, persistent) {
7005 (true, false) => Some(true),
7006 (false, true) => Some(false),
7007 (false, false) => None,
7008 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7009 };
7010
7011 Ok(Statement::DropSecret {
7012 if_exists,
7013 temporary: temp,
7014 name,
7015 storage_specifier,
7016 })
7017 }
7018
7019 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7029 if dialect_of!(self is BigQueryDialect) {
7030 return self.parse_big_query_declare();
7031 }
7032 if dialect_of!(self is SnowflakeDialect) {
7033 return self.parse_snowflake_declare();
7034 }
7035 if dialect_of!(self is MsSqlDialect) {
7036 return self.parse_mssql_declare();
7037 }
7038
7039 let name = self.parse_identifier()?;
7040
7041 let binary = Some(self.parse_keyword(Keyword::BINARY));
7042 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7043 Some(true)
7044 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7045 Some(false)
7046 } else {
7047 None
7048 };
7049 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7050 Some(true)
7051 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7052 Some(false)
7053 } else {
7054 None
7055 };
7056
7057 self.expect_keyword_is(Keyword::CURSOR)?;
7058 let declare_type = Some(DeclareType::Cursor);
7059
7060 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7061 Some(keyword) => {
7062 self.expect_keyword_is(Keyword::HOLD)?;
7063
7064 match keyword {
7065 Keyword::WITH => Some(true),
7066 Keyword::WITHOUT => Some(false),
7067 unexpected_keyword => return Err(ParserError::ParserError(
7068 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7069 )),
7070 }
7071 }
7072 None => None,
7073 };
7074
7075 self.expect_keyword_is(Keyword::FOR)?;
7076
7077 let query = Some(self.parse_query()?);
7078
7079 Ok(Statement::Declare {
7080 stmts: vec![Declare {
7081 names: vec![name],
7082 data_type: None,
7083 assignment: None,
7084 declare_type,
7085 binary,
7086 sensitive,
7087 scroll,
7088 hold,
7089 for_query: query,
7090 }],
7091 })
7092 }
7093
7094 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7102 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7103
7104 let data_type = match self.peek_token().token {
7105 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7106 _ => Some(self.parse_data_type()?),
7107 };
7108
7109 let expr = if data_type.is_some() {
7110 if self.parse_keyword(Keyword::DEFAULT) {
7111 Some(self.parse_expr()?)
7112 } else {
7113 None
7114 }
7115 } else {
7116 self.expect_keyword_is(Keyword::DEFAULT)?;
7119 Some(self.parse_expr()?)
7120 };
7121
7122 Ok(Statement::Declare {
7123 stmts: vec![Declare {
7124 names,
7125 data_type,
7126 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7127 declare_type: None,
7128 binary: None,
7129 sensitive: None,
7130 scroll: None,
7131 hold: None,
7132 for_query: None,
7133 }],
7134 })
7135 }
7136
7137 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7162 let mut stmts = vec![];
7163 loop {
7164 let name = self.parse_identifier()?;
7165 let (declare_type, for_query, assigned_expr, data_type) =
7166 if self.parse_keyword(Keyword::CURSOR) {
7167 self.expect_keyword_is(Keyword::FOR)?;
7168 match self.peek_token().token {
7169 Token::Word(w) if w.keyword == Keyword::SELECT => (
7170 Some(DeclareType::Cursor),
7171 Some(self.parse_query()?),
7172 None,
7173 None,
7174 ),
7175 _ => (
7176 Some(DeclareType::Cursor),
7177 None,
7178 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7179 None,
7180 ),
7181 }
7182 } else if self.parse_keyword(Keyword::RESULTSET) {
7183 let assigned_expr = if self.peek_token().token != Token::SemiColon {
7184 self.parse_snowflake_variable_declaration_expression()?
7185 } else {
7186 None
7188 };
7189
7190 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7191 } else if self.parse_keyword(Keyword::EXCEPTION) {
7192 let assigned_expr = if self.peek_token().token == Token::LParen {
7193 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7194 } else {
7195 None
7197 };
7198
7199 (Some(DeclareType::Exception), None, assigned_expr, None)
7200 } else {
7201 let (assigned_expr, data_type) = if let Some(assigned_expr) =
7203 self.parse_snowflake_variable_declaration_expression()?
7204 {
7205 (Some(assigned_expr), None)
7206 } else if let Token::Word(_) = self.peek_token().token {
7207 let data_type = self.parse_data_type()?;
7208 (
7209 self.parse_snowflake_variable_declaration_expression()?,
7210 Some(data_type),
7211 )
7212 } else {
7213 (None, None)
7214 };
7215 (None, None, assigned_expr, data_type)
7216 };
7217 let stmt = Declare {
7218 names: vec![name],
7219 data_type,
7220 assignment: assigned_expr,
7221 declare_type,
7222 binary: None,
7223 sensitive: None,
7224 scroll: None,
7225 hold: None,
7226 for_query,
7227 };
7228
7229 stmts.push(stmt);
7230 if self.consume_token(&Token::SemiColon) {
7231 match self.peek_token().token {
7232 Token::Word(w)
7233 if ALL_KEYWORDS
7234 .binary_search(&w.value.to_uppercase().as_str())
7235 .is_err() =>
7236 {
7237 continue;
7239 }
7240 _ => {
7241 self.prev_token();
7243 }
7244 }
7245 }
7246
7247 break;
7248 }
7249
7250 Ok(Statement::Declare { stmts })
7251 }
7252
7253 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7265 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7266
7267 Ok(Statement::Declare { stmts })
7268 }
7269
7270 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7281 let name = {
7282 let ident = self.parse_identifier()?;
7283 if !ident.value.starts_with('@')
7284 && !matches!(
7285 self.peek_token().token,
7286 Token::Word(w) if w.keyword == Keyword::CURSOR
7287 )
7288 {
7289 Err(ParserError::TokenizerError(
7290 "Invalid MsSql variable declaration.".to_string(),
7291 ))
7292 } else {
7293 Ok(ident)
7294 }
7295 }?;
7296
7297 let (declare_type, data_type) = match self.peek_token().token {
7298 Token::Word(w) => match w.keyword {
7299 Keyword::CURSOR => {
7300 self.next_token();
7301 (Some(DeclareType::Cursor), None)
7302 }
7303 Keyword::AS => {
7304 self.next_token();
7305 (None, Some(self.parse_data_type()?))
7306 }
7307 _ => (None, Some(self.parse_data_type()?)),
7308 },
7309 _ => (None, Some(self.parse_data_type()?)),
7310 };
7311
7312 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7313 self.next_token();
7314 let query = Some(self.parse_query()?);
7315 (query, None)
7316 } else {
7317 let assignment = self.parse_mssql_variable_declaration_expression()?;
7318 (None, assignment)
7319 };
7320
7321 Ok(Declare {
7322 names: vec![name],
7323 data_type,
7324 assignment,
7325 declare_type,
7326 binary: None,
7327 sensitive: None,
7328 scroll: None,
7329 hold: None,
7330 for_query,
7331 })
7332 }
7333
7334 pub fn parse_snowflake_variable_declaration_expression(
7342 &mut self,
7343 ) -> Result<Option<DeclareAssignment>, ParserError> {
7344 Ok(match self.peek_token().token {
7345 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7346 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7348 }
7349 Token::Assignment => {
7350 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
7352 self.parse_expr()?,
7353 )))
7354 }
7355 _ => None,
7356 })
7357 }
7358
7359 pub fn parse_mssql_variable_declaration_expression(
7366 &mut self,
7367 ) -> Result<Option<DeclareAssignment>, ParserError> {
7368 Ok(match self.peek_token().token {
7369 Token::Eq => {
7370 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
7372 self.parse_expr()?,
7373 )))
7374 }
7375 _ => None,
7376 })
7377 }
7378
7379 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7381 let direction = if self.parse_keyword(Keyword::NEXT) {
7382 FetchDirection::Next
7383 } else if self.parse_keyword(Keyword::PRIOR) {
7384 FetchDirection::Prior
7385 } else if self.parse_keyword(Keyword::FIRST) {
7386 FetchDirection::First
7387 } else if self.parse_keyword(Keyword::LAST) {
7388 FetchDirection::Last
7389 } else if self.parse_keyword(Keyword::ABSOLUTE) {
7390 FetchDirection::Absolute {
7391 limit: self.parse_number_value()?.value,
7392 }
7393 } else if self.parse_keyword(Keyword::RELATIVE) {
7394 FetchDirection::Relative {
7395 limit: self.parse_number_value()?.value,
7396 }
7397 } else if self.parse_keyword(Keyword::FORWARD) {
7398 if self.parse_keyword(Keyword::ALL) {
7399 FetchDirection::ForwardAll
7400 } else {
7401 FetchDirection::Forward {
7402 limit: Some(self.parse_number_value()?.value),
7404 }
7405 }
7406 } else if self.parse_keyword(Keyword::BACKWARD) {
7407 if self.parse_keyword(Keyword::ALL) {
7408 FetchDirection::BackwardAll
7409 } else {
7410 FetchDirection::Backward {
7411 limit: Some(self.parse_number_value()?.value),
7413 }
7414 }
7415 } else if self.parse_keyword(Keyword::ALL) {
7416 FetchDirection::All
7417 } else {
7418 FetchDirection::Count {
7419 limit: self.parse_number_value()?.value,
7420 }
7421 };
7422
7423 let position = if self.peek_keyword(Keyword::FROM) {
7424 self.expect_keyword(Keyword::FROM)?;
7425 FetchPosition::From
7426 } else if self.peek_keyword(Keyword::IN) {
7427 self.expect_keyword(Keyword::IN)?;
7428 FetchPosition::In
7429 } else {
7430 return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7431 };
7432
7433 let name = self.parse_identifier()?;
7434
7435 let into = if self.parse_keyword(Keyword::INTO) {
7436 Some(self.parse_object_name(false)?)
7437 } else {
7438 None
7439 };
7440
7441 Ok(Statement::Fetch {
7442 name,
7443 direction,
7444 position,
7445 into,
7446 })
7447 }
7448
7449 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7450 let object_type = if self.parse_keyword(Keyword::ALL) {
7451 DiscardObject::ALL
7452 } else if self.parse_keyword(Keyword::PLANS) {
7453 DiscardObject::PLANS
7454 } else if self.parse_keyword(Keyword::SEQUENCES) {
7455 DiscardObject::SEQUENCES
7456 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7457 DiscardObject::TEMP
7458 } else {
7459 return self.expected(
7460 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7461 self.peek_token(),
7462 );
7463 };
7464 Ok(Statement::Discard { object_type })
7465 }
7466
7467 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7468 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7469 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7470
7471 let mut using = None;
7472
7473 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7474 let index_name = self.parse_object_name(false)?;
7475 using = self.parse_optional_using_then_index_type()?;
7477 self.expect_keyword_is(Keyword::ON)?;
7478 Some(index_name)
7479 } else {
7480 None
7481 };
7482
7483 let table_name = self.parse_object_name(false)?;
7484
7485 using = self.parse_optional_using_then_index_type()?.or(using);
7488
7489 let columns = self.parse_parenthesized_index_column_list()?;
7490
7491 let include = if self.parse_keyword(Keyword::INCLUDE) {
7492 self.expect_token(&Token::LParen)?;
7493 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7494 self.expect_token(&Token::RParen)?;
7495 columns
7496 } else {
7497 vec![]
7498 };
7499
7500 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7501 let not = self.parse_keyword(Keyword::NOT);
7502 self.expect_keyword_is(Keyword::DISTINCT)?;
7503 Some(!not)
7504 } else {
7505 None
7506 };
7507
7508 let with = if self.dialect.supports_create_index_with_clause()
7509 && self.parse_keyword(Keyword::WITH)
7510 {
7511 self.expect_token(&Token::LParen)?;
7512 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7513 self.expect_token(&Token::RParen)?;
7514 with_params
7515 } else {
7516 Vec::new()
7517 };
7518
7519 let predicate = if self.parse_keyword(Keyword::WHERE) {
7520 Some(self.parse_expr()?)
7521 } else {
7522 None
7523 };
7524
7525 let index_options = self.parse_index_options()?;
7531
7532 let mut alter_options = Vec::new();
7534 while self
7535 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7536 .is_some()
7537 {
7538 alter_options.push(self.parse_alter_table_operation()?)
7539 }
7540
7541 Ok(Statement::CreateIndex(CreateIndex {
7542 name: index_name,
7543 table_name,
7544 using,
7545 columns,
7546 unique,
7547 concurrently,
7548 if_not_exists,
7549 include,
7550 nulls_distinct,
7551 with,
7552 predicate,
7553 index_options,
7554 alter_options,
7555 }))
7556 }
7557
7558 pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7559 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7560 let name = self.parse_identifier()?;
7561
7562 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7563 let schema = if self.parse_keyword(Keyword::SCHEMA) {
7564 Some(self.parse_identifier()?)
7565 } else {
7566 None
7567 };
7568
7569 let version = if self.parse_keyword(Keyword::VERSION) {
7570 Some(self.parse_identifier()?)
7571 } else {
7572 None
7573 };
7574
7575 let cascade = self.parse_keyword(Keyword::CASCADE);
7576
7577 (schema, version, cascade)
7578 } else {
7579 (None, None, false)
7580 };
7581
7582 Ok(CreateExtension {
7583 name,
7584 if_not_exists,
7585 schema,
7586 version,
7587 cascade,
7588 }
7589 .into())
7590 }
7591
7592 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7594 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7595 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7596 let cascade_or_restrict =
7597 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7598 Ok(Statement::DropExtension(DropExtension {
7599 names,
7600 if_exists,
7601 cascade_or_restrict: cascade_or_restrict
7602 .map(|k| match k {
7603 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7604 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7605 _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7606 })
7607 .transpose()?,
7608 }))
7609 }
7610
7611 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7614 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7615 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7616 let drop_behavior = self.parse_optional_drop_behavior();
7617 Ok(Statement::DropOperator(DropOperator {
7618 if_exists,
7619 operators,
7620 drop_behavior,
7621 }))
7622 }
7623
7624 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7627 let name = self.parse_operator_name()?;
7628 self.expect_token(&Token::LParen)?;
7629
7630 let left_type = if self.parse_keyword(Keyword::NONE) {
7632 None
7633 } else {
7634 Some(self.parse_data_type()?)
7635 };
7636
7637 self.expect_token(&Token::Comma)?;
7638
7639 let right_type = self.parse_data_type()?;
7641
7642 self.expect_token(&Token::RParen)?;
7643
7644 Ok(DropOperatorSignature {
7645 name,
7646 left_type,
7647 right_type,
7648 })
7649 }
7650
7651 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7655 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7656 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7657 self.expect_keyword(Keyword::USING)?;
7658 let using = self.parse_identifier()?;
7659 let drop_behavior = self.parse_optional_drop_behavior();
7660 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7661 if_exists,
7662 names,
7663 using,
7664 drop_behavior,
7665 }))
7666 }
7667
7668 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7672 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7673 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7674 self.expect_keyword(Keyword::USING)?;
7675 let using = self.parse_identifier()?;
7676 let drop_behavior = self.parse_optional_drop_behavior();
7677 Ok(Statement::DropOperatorClass(DropOperatorClass {
7678 if_exists,
7679 names,
7680 using,
7681 drop_behavior,
7682 }))
7683 }
7684
7685 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7687 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7688 self.expect_token(&Token::LParen)?;
7689 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7690 self.expect_token(&Token::RParen)?;
7691 Ok(HiveDistributionStyle::PARTITIONED { columns })
7692 } else {
7693 Ok(HiveDistributionStyle::NONE)
7694 }
7695 }
7696
7697 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7698 let mut hive_format: Option<HiveFormat> = None;
7699 loop {
7700 match self.parse_one_of_keywords(&[
7701 Keyword::ROW,
7702 Keyword::STORED,
7703 Keyword::LOCATION,
7704 Keyword::WITH,
7705 ]) {
7706 Some(Keyword::ROW) => {
7707 hive_format
7708 .get_or_insert_with(HiveFormat::default)
7709 .row_format = Some(self.parse_row_format()?);
7710 }
7711 Some(Keyword::STORED) => {
7712 self.expect_keyword_is(Keyword::AS)?;
7713 if self.parse_keyword(Keyword::INPUTFORMAT) {
7714 let input_format = self.parse_expr()?;
7715 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7716 let output_format = self.parse_expr()?;
7717 hive_format.get_or_insert_with(HiveFormat::default).storage =
7718 Some(HiveIOFormat::IOF {
7719 input_format,
7720 output_format,
7721 });
7722 } else {
7723 let format = self.parse_file_format()?;
7724 hive_format.get_or_insert_with(HiveFormat::default).storage =
7725 Some(HiveIOFormat::FileFormat { format });
7726 }
7727 }
7728 Some(Keyword::LOCATION) => {
7729 hive_format.get_or_insert_with(HiveFormat::default).location =
7730 Some(self.parse_literal_string()?);
7731 }
7732 Some(Keyword::WITH) => {
7733 self.prev_token();
7734 let properties = self
7735 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7736 if !properties.is_empty() {
7737 hive_format
7738 .get_or_insert_with(HiveFormat::default)
7739 .serde_properties = Some(properties);
7740 } else {
7741 break;
7742 }
7743 }
7744 None => break,
7745 _ => break,
7746 }
7747 }
7748
7749 Ok(hive_format)
7750 }
7751
7752 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7753 self.expect_keyword_is(Keyword::FORMAT)?;
7754 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7755 Some(Keyword::SERDE) => {
7756 let class = self.parse_literal_string()?;
7757 Ok(HiveRowFormat::SERDE { class })
7758 }
7759 _ => {
7760 let mut row_delimiters = vec![];
7761
7762 loop {
7763 match self.parse_one_of_keywords(&[
7764 Keyword::FIELDS,
7765 Keyword::COLLECTION,
7766 Keyword::MAP,
7767 Keyword::LINES,
7768 Keyword::NULL,
7769 ]) {
7770 Some(Keyword::FIELDS) => {
7771 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7772 row_delimiters.push(HiveRowDelimiter {
7773 delimiter: HiveDelimiter::FieldsTerminatedBy,
7774 char: self.parse_identifier()?,
7775 });
7776
7777 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7778 row_delimiters.push(HiveRowDelimiter {
7779 delimiter: HiveDelimiter::FieldsEscapedBy,
7780 char: self.parse_identifier()?,
7781 });
7782 }
7783 } else {
7784 break;
7785 }
7786 }
7787 Some(Keyword::COLLECTION) => {
7788 if self.parse_keywords(&[
7789 Keyword::ITEMS,
7790 Keyword::TERMINATED,
7791 Keyword::BY,
7792 ]) {
7793 row_delimiters.push(HiveRowDelimiter {
7794 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7795 char: self.parse_identifier()?,
7796 });
7797 } else {
7798 break;
7799 }
7800 }
7801 Some(Keyword::MAP) => {
7802 if self.parse_keywords(&[
7803 Keyword::KEYS,
7804 Keyword::TERMINATED,
7805 Keyword::BY,
7806 ]) {
7807 row_delimiters.push(HiveRowDelimiter {
7808 delimiter: HiveDelimiter::MapKeysTerminatedBy,
7809 char: self.parse_identifier()?,
7810 });
7811 } else {
7812 break;
7813 }
7814 }
7815 Some(Keyword::LINES) => {
7816 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7817 row_delimiters.push(HiveRowDelimiter {
7818 delimiter: HiveDelimiter::LinesTerminatedBy,
7819 char: self.parse_identifier()?,
7820 });
7821 } else {
7822 break;
7823 }
7824 }
7825 Some(Keyword::NULL) => {
7826 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7827 row_delimiters.push(HiveRowDelimiter {
7828 delimiter: HiveDelimiter::NullDefinedAs,
7829 char: self.parse_identifier()?,
7830 });
7831 } else {
7832 break;
7833 }
7834 }
7835 _ => {
7836 break;
7837 }
7838 }
7839 }
7840
7841 Ok(HiveRowFormat::DELIMITED {
7842 delimiters: row_delimiters,
7843 })
7844 }
7845 }
7846 }
7847
7848 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7849 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7850 Ok(Some(self.parse_identifier()?))
7851 } else {
7852 Ok(None)
7853 }
7854 }
7855
7856 pub fn parse_create_table(
7857 &mut self,
7858 or_replace: bool,
7859 temporary: bool,
7860 global: Option<bool>,
7861 transient: bool,
7862 ) -> Result<Statement, ParserError> {
7863 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7864 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7865 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7866
7867 let partition_of = if dialect_of!(self is PostgreSqlDialect | GenericDialect)
7869 && self.parse_keywords(&[Keyword::PARTITION, Keyword::OF])
7870 {
7871 Some(self.parse_object_name(allow_unquoted_hyphen)?)
7872 } else {
7873 None
7874 };
7875
7876 let on_cluster = self.parse_optional_on_cluster()?;
7878
7879 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7880
7881 let clone = if self.parse_keyword(Keyword::CLONE) {
7882 self.parse_object_name(allow_unquoted_hyphen).ok()
7883 } else {
7884 None
7885 };
7886
7887 let (columns, constraints) = self.parse_columns()?;
7889 let comment_after_column_def =
7890 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7891 let next_token = self.next_token();
7892 match next_token.token {
7893 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7894 _ => self.expected("comment", next_token)?,
7895 }
7896 } else {
7897 None
7898 };
7899
7900 let for_values = if partition_of.is_some() {
7902 Some(self.parse_partition_for_values()?)
7903 } else {
7904 None
7905 };
7906
7907 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7909
7910 let hive_distribution = self.parse_hive_distribution()?;
7911 let clustered_by = self.parse_optional_clustered_by()?;
7912 let hive_formats = self.parse_hive_formats()?;
7913
7914 let create_table_config = self.parse_optional_create_table_config()?;
7915
7916 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7919 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7920 {
7921 Some(Box::new(self.parse_expr()?))
7922 } else {
7923 None
7924 };
7925
7926 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7927 if self.consume_token(&Token::LParen) {
7928 let columns = if self.peek_token() != Token::RParen {
7929 self.parse_comma_separated(|p| p.parse_expr())?
7930 } else {
7931 vec![]
7932 };
7933 self.expect_token(&Token::RParen)?;
7934 Some(OneOrManyWithParens::Many(columns))
7935 } else {
7936 Some(OneOrManyWithParens::One(self.parse_expr()?))
7937 }
7938 } else {
7939 None
7940 };
7941
7942 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7943 Some(self.parse_create_table_on_commit()?)
7944 } else {
7945 None
7946 };
7947
7948 let strict = self.parse_keyword(Keyword::STRICT);
7949
7950 let query = if self.parse_keyword(Keyword::AS) {
7952 Some(self.parse_query()?)
7953 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7954 {
7955 self.prev_token();
7957 Some(self.parse_query()?)
7958 } else {
7959 None
7960 };
7961
7962 Ok(CreateTableBuilder::new(table_name)
7963 .temporary(temporary)
7964 .columns(columns)
7965 .constraints(constraints)
7966 .or_replace(or_replace)
7967 .if_not_exists(if_not_exists)
7968 .transient(transient)
7969 .hive_distribution(hive_distribution)
7970 .hive_formats(hive_formats)
7971 .global(global)
7972 .query(query)
7973 .without_rowid(without_rowid)
7974 .like(like)
7975 .clone_clause(clone)
7976 .comment_after_column_def(comment_after_column_def)
7977 .order_by(order_by)
7978 .on_commit(on_commit)
7979 .on_cluster(on_cluster)
7980 .clustered_by(clustered_by)
7981 .partition_by(create_table_config.partition_by)
7982 .cluster_by(create_table_config.cluster_by)
7983 .inherits(create_table_config.inherits)
7984 .partition_of(partition_of)
7985 .for_values(for_values)
7986 .table_options(create_table_config.table_options)
7987 .primary_key(primary_key)
7988 .strict(strict)
7989 .build())
7990 }
7991
7992 fn maybe_parse_create_table_like(
7993 &mut self,
7994 allow_unquoted_hyphen: bool,
7995 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
7996 let like = if self.dialect.supports_create_table_like_parenthesized()
7997 && self.consume_token(&Token::LParen)
7998 {
7999 if self.parse_keyword(Keyword::LIKE) {
8000 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8001 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8002 Some(CreateTableLikeDefaults::Including)
8003 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8004 Some(CreateTableLikeDefaults::Excluding)
8005 } else {
8006 None
8007 };
8008 self.expect_token(&Token::RParen)?;
8009 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8010 name,
8011 defaults,
8012 }))
8013 } else {
8014 self.prev_token();
8016 None
8017 }
8018 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8019 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8020 Some(CreateTableLikeKind::Plain(CreateTableLike {
8021 name,
8022 defaults: None,
8023 }))
8024 } else {
8025 None
8026 };
8027 Ok(like)
8028 }
8029
8030 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8031 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8032 Ok(OnCommit::DeleteRows)
8033 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8034 Ok(OnCommit::PreserveRows)
8035 } else if self.parse_keywords(&[Keyword::DROP]) {
8036 Ok(OnCommit::Drop)
8037 } else {
8038 parser_err!(
8039 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8040 self.peek_token()
8041 )
8042 }
8043 }
8044
8045 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8051 if self.parse_keyword(Keyword::DEFAULT) {
8052 return Ok(ForValues::Default);
8053 }
8054
8055 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8056
8057 if self.parse_keyword(Keyword::IN) {
8058 self.expect_token(&Token::LParen)?;
8060 let values = self.parse_comma_separated(Parser::parse_expr)?;
8061 self.expect_token(&Token::RParen)?;
8062 Ok(ForValues::In(values))
8063 } else if self.parse_keyword(Keyword::FROM) {
8064 self.expect_token(&Token::LParen)?;
8066 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8067 self.expect_token(&Token::RParen)?;
8068 self.expect_keyword(Keyword::TO)?;
8069 self.expect_token(&Token::LParen)?;
8070 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8071 self.expect_token(&Token::RParen)?;
8072 Ok(ForValues::From { from, to })
8073 } else if self.parse_keyword(Keyword::WITH) {
8074 self.expect_token(&Token::LParen)?;
8076 self.expect_keyword(Keyword::MODULUS)?;
8077 let modulus = self.parse_literal_uint()?;
8078 self.expect_token(&Token::Comma)?;
8079 self.expect_keyword(Keyword::REMAINDER)?;
8080 let remainder = self.parse_literal_uint()?;
8081 self.expect_token(&Token::RParen)?;
8082 Ok(ForValues::With { modulus, remainder })
8083 } else {
8084 self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8085 }
8086 }
8087
8088 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8090 if self.parse_keyword(Keyword::MINVALUE) {
8091 Ok(PartitionBoundValue::MinValue)
8092 } else if self.parse_keyword(Keyword::MAXVALUE) {
8093 Ok(PartitionBoundValue::MaxValue)
8094 } else {
8095 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8096 }
8097 }
8098
8099 fn parse_optional_create_table_config(
8105 &mut self,
8106 ) -> Result<CreateTableConfiguration, ParserError> {
8107 let mut table_options = CreateTableOptions::None;
8108
8109 let inherits = if self.parse_keyword(Keyword::INHERITS) {
8110 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8111 } else {
8112 None
8113 };
8114
8115 let with_options = self.parse_options(Keyword::WITH)?;
8117 if !with_options.is_empty() {
8118 table_options = CreateTableOptions::With(with_options)
8119 }
8120
8121 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8122 if !table_properties.is_empty() {
8123 table_options = CreateTableOptions::TableProperties(table_properties);
8124 }
8125 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8126 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8127 {
8128 Some(Box::new(self.parse_expr()?))
8129 } else {
8130 None
8131 };
8132
8133 let mut cluster_by = None;
8134 if dialect_of!(self is BigQueryDialect | GenericDialect) {
8135 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8136 cluster_by = Some(WrappedCollection::NoWrapping(
8137 self.parse_comma_separated(|p| p.parse_expr())?,
8138 ));
8139 };
8140
8141 if let Token::Word(word) = self.peek_token().token {
8142 if word.keyword == Keyword::OPTIONS {
8143 table_options =
8144 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8145 }
8146 };
8147 }
8148
8149 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8150 let plain_options = self.parse_plain_options()?;
8151 if !plain_options.is_empty() {
8152 table_options = CreateTableOptions::Plain(plain_options)
8153 }
8154 };
8155
8156 Ok(CreateTableConfiguration {
8157 partition_by,
8158 cluster_by,
8159 inherits,
8160 table_options,
8161 })
8162 }
8163
8164 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8165 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8168 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8169 }
8170
8171 if self.parse_keywords(&[Keyword::COMMENT]) {
8174 let has_eq = self.consume_token(&Token::Eq);
8175 let value = self.next_token();
8176
8177 let comment = match (has_eq, value.token) {
8178 (true, Token::SingleQuotedString(s)) => {
8179 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8180 }
8181 (false, Token::SingleQuotedString(s)) => {
8182 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8183 }
8184 (_, token) => {
8185 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8186 }
8187 };
8188 return comment;
8189 }
8190
8191 if self.parse_keywords(&[Keyword::ENGINE]) {
8194 let _ = self.consume_token(&Token::Eq);
8195 let value = self.next_token();
8196
8197 let engine = match value.token {
8198 Token::Word(w) => {
8199 let parameters = if self.peek_token() == Token::LParen {
8200 self.parse_parenthesized_identifiers()?
8201 } else {
8202 vec![]
8203 };
8204
8205 Ok(Some(SqlOption::NamedParenthesizedList(
8206 NamedParenthesizedList {
8207 key: Ident::new("ENGINE"),
8208 name: Some(Ident::new(w.value)),
8209 values: parameters,
8210 },
8211 )))
8212 }
8213 _ => {
8214 return self.expected("Token::Word", value)?;
8215 }
8216 };
8217
8218 return engine;
8219 }
8220
8221 if self.parse_keywords(&[Keyword::TABLESPACE]) {
8223 let _ = self.consume_token(&Token::Eq);
8224 let value = self.next_token();
8225
8226 let tablespace = match value.token {
8227 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8228 let storage = match self.parse_keyword(Keyword::STORAGE) {
8229 true => {
8230 let _ = self.consume_token(&Token::Eq);
8231 let storage_token = self.next_token();
8232 match &storage_token.token {
8233 Token::Word(w) => match w.value.to_uppercase().as_str() {
8234 "DISK" => Some(StorageType::Disk),
8235 "MEMORY" => Some(StorageType::Memory),
8236 _ => self
8237 .expected("Storage type (DISK or MEMORY)", storage_token)?,
8238 },
8239 _ => self.expected("Token::Word", storage_token)?,
8240 }
8241 }
8242 false => None,
8243 };
8244
8245 Ok(Some(SqlOption::TableSpace(TablespaceOption {
8246 name,
8247 storage,
8248 })))
8249 }
8250 _ => {
8251 return self.expected("Token::Word", value)?;
8252 }
8253 };
8254
8255 return tablespace;
8256 }
8257
8258 if self.parse_keyword(Keyword::UNION) {
8260 let _ = self.consume_token(&Token::Eq);
8261 let value = self.next_token();
8262
8263 match value.token {
8264 Token::LParen => {
8265 let tables: Vec<Ident> =
8266 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8267 self.expect_token(&Token::RParen)?;
8268
8269 return Ok(Some(SqlOption::NamedParenthesizedList(
8270 NamedParenthesizedList {
8271 key: Ident::new("UNION"),
8272 name: None,
8273 values: tables,
8274 },
8275 )));
8276 }
8277 _ => {
8278 return self.expected("Token::LParen", value)?;
8279 }
8280 }
8281 }
8282
8283 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8285 Ident::new("DEFAULT CHARSET")
8286 } else if self.parse_keyword(Keyword::CHARSET) {
8287 Ident::new("CHARSET")
8288 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8289 Ident::new("DEFAULT CHARACTER SET")
8290 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8291 Ident::new("CHARACTER SET")
8292 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8293 Ident::new("DEFAULT COLLATE")
8294 } else if self.parse_keyword(Keyword::COLLATE) {
8295 Ident::new("COLLATE")
8296 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8297 Ident::new("DATA DIRECTORY")
8298 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8299 Ident::new("INDEX DIRECTORY")
8300 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8301 Ident::new("KEY_BLOCK_SIZE")
8302 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8303 Ident::new("ROW_FORMAT")
8304 } else if self.parse_keyword(Keyword::PACK_KEYS) {
8305 Ident::new("PACK_KEYS")
8306 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8307 Ident::new("STATS_AUTO_RECALC")
8308 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8309 Ident::new("STATS_PERSISTENT")
8310 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8311 Ident::new("STATS_SAMPLE_PAGES")
8312 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8313 Ident::new("DELAY_KEY_WRITE")
8314 } else if self.parse_keyword(Keyword::COMPRESSION) {
8315 Ident::new("COMPRESSION")
8316 } else if self.parse_keyword(Keyword::ENCRYPTION) {
8317 Ident::new("ENCRYPTION")
8318 } else if self.parse_keyword(Keyword::MAX_ROWS) {
8319 Ident::new("MAX_ROWS")
8320 } else if self.parse_keyword(Keyword::MIN_ROWS) {
8321 Ident::new("MIN_ROWS")
8322 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8323 Ident::new("AUTOEXTEND_SIZE")
8324 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8325 Ident::new("AVG_ROW_LENGTH")
8326 } else if self.parse_keyword(Keyword::CHECKSUM) {
8327 Ident::new("CHECKSUM")
8328 } else if self.parse_keyword(Keyword::CONNECTION) {
8329 Ident::new("CONNECTION")
8330 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8331 Ident::new("ENGINE_ATTRIBUTE")
8332 } else if self.parse_keyword(Keyword::PASSWORD) {
8333 Ident::new("PASSWORD")
8334 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8335 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8336 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8337 Ident::new("INSERT_METHOD")
8338 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8339 Ident::new("AUTO_INCREMENT")
8340 } else {
8341 return Ok(None);
8342 };
8343
8344 let _ = self.consume_token(&Token::Eq);
8345
8346 let value = match self
8347 .maybe_parse(|parser| parser.parse_value())?
8348 .map(Expr::Value)
8349 {
8350 Some(expr) => expr,
8351 None => Expr::Identifier(self.parse_identifier()?),
8352 };
8353
8354 Ok(Some(SqlOption::KeyValue { key, value }))
8355 }
8356
8357 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8358 let mut options = Vec::new();
8359
8360 while let Some(option) = self.parse_plain_option()? {
8361 options.push(option);
8362 let _ = self.consume_token(&Token::Comma);
8365 }
8366
8367 Ok(options)
8368 }
8369
8370 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8371 let comment = if self.parse_keyword(Keyword::COMMENT) {
8372 let has_eq = self.consume_token(&Token::Eq);
8373 let comment = self.parse_comment_value()?;
8374 Some(if has_eq {
8375 CommentDef::WithEq(comment)
8376 } else {
8377 CommentDef::WithoutEq(comment)
8378 })
8379 } else {
8380 None
8381 };
8382 Ok(comment)
8383 }
8384
8385 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8386 let next_token = self.next_token();
8387 let value = match next_token.token {
8388 Token::SingleQuotedString(str) => str,
8389 Token::DollarQuotedString(str) => str.value,
8390 _ => self.expected("string literal", next_token)?,
8391 };
8392 Ok(value)
8393 }
8394
8395 pub fn parse_optional_procedure_parameters(
8396 &mut self,
8397 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8398 let mut params = vec![];
8399 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8400 return Ok(Some(params));
8401 }
8402 loop {
8403 if let Token::Word(_) = self.peek_token().token {
8404 params.push(self.parse_procedure_param()?)
8405 }
8406 let comma = self.consume_token(&Token::Comma);
8407 if self.consume_token(&Token::RParen) {
8408 break;
8410 } else if !comma {
8411 return self.expected("',' or ')' after parameter definition", self.peek_token());
8412 }
8413 }
8414 Ok(Some(params))
8415 }
8416
8417 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8418 let mut columns = vec![];
8419 let mut constraints = vec![];
8420 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8421 return Ok((columns, constraints));
8422 }
8423
8424 loop {
8425 if let Some(constraint) = self.parse_optional_table_constraint()? {
8426 constraints.push(constraint);
8427 } else if let Token::Word(_) = self.peek_token().token {
8428 columns.push(self.parse_column_def()?);
8429 } else {
8430 return self.expected("column name or constraint definition", self.peek_token());
8431 }
8432
8433 let comma = self.consume_token(&Token::Comma);
8434 let rparen = self.peek_token().token == Token::RParen;
8435
8436 if !comma && !rparen {
8437 return self.expected("',' or ')' after column definition", self.peek_token());
8438 };
8439
8440 if rparen
8441 && (!comma
8442 || self.dialect.supports_column_definition_trailing_commas()
8443 || self.options.trailing_commas)
8444 {
8445 let _ = self.consume_token(&Token::RParen);
8446 break;
8447 }
8448 }
8449
8450 Ok((columns, constraints))
8451 }
8452
8453 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8454 let mode = if self.parse_keyword(Keyword::IN) {
8455 Some(ArgMode::In)
8456 } else if self.parse_keyword(Keyword::OUT) {
8457 Some(ArgMode::Out)
8458 } else if self.parse_keyword(Keyword::INOUT) {
8459 Some(ArgMode::InOut)
8460 } else {
8461 None
8462 };
8463 let name = self.parse_identifier()?;
8464 let data_type = self.parse_data_type()?;
8465 let default = if self.consume_token(&Token::Eq) {
8466 Some(self.parse_expr()?)
8467 } else {
8468 None
8469 };
8470
8471 Ok(ProcedureParam {
8472 name,
8473 data_type,
8474 mode,
8475 default,
8476 })
8477 }
8478
8479 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8480 let col_name = self.parse_identifier()?;
8481 let data_type = if self.is_column_type_sqlite_unspecified() {
8482 DataType::Unspecified
8483 } else {
8484 self.parse_data_type()?
8485 };
8486 let mut options = vec![];
8487 loop {
8488 if self.parse_keyword(Keyword::CONSTRAINT) {
8489 let name = Some(self.parse_identifier()?);
8490 if let Some(option) = self.parse_optional_column_option()? {
8491 options.push(ColumnOptionDef { name, option });
8492 } else {
8493 return self.expected(
8494 "constraint details after CONSTRAINT <name>",
8495 self.peek_token(),
8496 );
8497 }
8498 } else if let Some(option) = self.parse_optional_column_option()? {
8499 options.push(ColumnOptionDef { name: None, option });
8500 } else {
8501 break;
8502 };
8503 }
8504 Ok(ColumnDef {
8505 name: col_name,
8506 data_type,
8507 options,
8508 })
8509 }
8510
8511 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8512 if dialect_of!(self is SQLiteDialect) {
8513 match self.peek_token().token {
8514 Token::Word(word) => matches!(
8515 word.keyword,
8516 Keyword::CONSTRAINT
8517 | Keyword::PRIMARY
8518 | Keyword::NOT
8519 | Keyword::UNIQUE
8520 | Keyword::CHECK
8521 | Keyword::DEFAULT
8522 | Keyword::COLLATE
8523 | Keyword::REFERENCES
8524 | Keyword::GENERATED
8525 | Keyword::AS
8526 ),
8527 _ => true, }
8529 } else {
8530 false
8531 }
8532 }
8533
8534 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8535 if let Some(option) = self.dialect.parse_column_option(self)? {
8536 return option;
8537 }
8538
8539 self.with_state(
8540 ColumnDefinition,
8541 |parser| -> Result<Option<ColumnOption>, ParserError> {
8542 parser.parse_optional_column_option_inner()
8543 },
8544 )
8545 }
8546
8547 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8548 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8549 Ok(Some(ColumnOption::CharacterSet(
8550 self.parse_object_name(false)?,
8551 )))
8552 } else if self.parse_keywords(&[Keyword::COLLATE]) {
8553 Ok(Some(ColumnOption::Collation(
8554 self.parse_object_name(false)?,
8555 )))
8556 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8557 Ok(Some(ColumnOption::NotNull))
8558 } else if self.parse_keywords(&[Keyword::COMMENT]) {
8559 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8560 } else if self.parse_keyword(Keyword::NULL) {
8561 Ok(Some(ColumnOption::Null))
8562 } else if self.parse_keyword(Keyword::DEFAULT) {
8563 Ok(Some(ColumnOption::Default(
8564 self.parse_column_option_expr()?,
8565 )))
8566 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8567 && self.parse_keyword(Keyword::MATERIALIZED)
8568 {
8569 Ok(Some(ColumnOption::Materialized(
8570 self.parse_column_option_expr()?,
8571 )))
8572 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8573 && self.parse_keyword(Keyword::ALIAS)
8574 {
8575 Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8576 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8577 && self.parse_keyword(Keyword::EPHEMERAL)
8578 {
8579 if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8582 Ok(Some(ColumnOption::Ephemeral(None)))
8583 } else {
8584 Ok(Some(ColumnOption::Ephemeral(Some(
8585 self.parse_column_option_expr()?,
8586 ))))
8587 }
8588 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8589 let characteristics = self.parse_constraint_characteristics()?;
8590 Ok(Some(
8591 PrimaryKeyConstraint {
8592 name: None,
8593 index_name: None,
8594 index_type: None,
8595 columns: vec![],
8596 index_options: vec![],
8597 characteristics,
8598 }
8599 .into(),
8600 ))
8601 } else if self.parse_keyword(Keyword::UNIQUE) {
8602 let characteristics = self.parse_constraint_characteristics()?;
8603 Ok(Some(
8604 UniqueConstraint {
8605 name: None,
8606 index_name: None,
8607 index_type_display: KeyOrIndexDisplay::None,
8608 index_type: None,
8609 columns: vec![],
8610 index_options: vec![],
8611 characteristics,
8612 nulls_distinct: NullsDistinctOption::None,
8613 }
8614 .into(),
8615 ))
8616 } else if self.parse_keyword(Keyword::REFERENCES) {
8617 let foreign_table = self.parse_object_name(false)?;
8618 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8621 let mut match_kind = None;
8622 let mut on_delete = None;
8623 let mut on_update = None;
8624 loop {
8625 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8626 match_kind = Some(self.parse_match_kind()?);
8627 } else if on_delete.is_none()
8628 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8629 {
8630 on_delete = Some(self.parse_referential_action()?);
8631 } else if on_update.is_none()
8632 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8633 {
8634 on_update = Some(self.parse_referential_action()?);
8635 } else {
8636 break;
8637 }
8638 }
8639 let characteristics = self.parse_constraint_characteristics()?;
8640
8641 Ok(Some(
8642 ForeignKeyConstraint {
8643 name: None, index_name: None, columns: vec![], foreign_table,
8647 referred_columns,
8648 on_delete,
8649 on_update,
8650 match_kind,
8651 characteristics,
8652 }
8653 .into(),
8654 ))
8655 } else if self.parse_keyword(Keyword::CHECK) {
8656 self.expect_token(&Token::LParen)?;
8657 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8659 self.expect_token(&Token::RParen)?;
8660 Ok(Some(
8661 CheckConstraint {
8662 name: None, expr: Box::new(expr),
8664 enforced: None, }
8666 .into(),
8667 ))
8668 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8669 && dialect_of!(self is MySqlDialect | GenericDialect)
8670 {
8671 Ok(Some(ColumnOption::DialectSpecific(vec![
8673 Token::make_keyword("AUTO_INCREMENT"),
8674 ])))
8675 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8676 && dialect_of!(self is SQLiteDialect | GenericDialect)
8677 {
8678 Ok(Some(ColumnOption::DialectSpecific(vec![
8680 Token::make_keyword("AUTOINCREMENT"),
8681 ])))
8682 } else if self.parse_keyword(Keyword::ASC)
8683 && self.dialect.supports_asc_desc_in_column_definition()
8684 {
8685 Ok(Some(ColumnOption::DialectSpecific(vec![
8687 Token::make_keyword("ASC"),
8688 ])))
8689 } else if self.parse_keyword(Keyword::DESC)
8690 && self.dialect.supports_asc_desc_in_column_definition()
8691 {
8692 Ok(Some(ColumnOption::DialectSpecific(vec![
8694 Token::make_keyword("DESC"),
8695 ])))
8696 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8697 && dialect_of!(self is MySqlDialect | GenericDialect)
8698 {
8699 let expr = self.parse_column_option_expr()?;
8700 Ok(Some(ColumnOption::OnUpdate(expr)))
8701 } else if self.parse_keyword(Keyword::GENERATED) {
8702 self.parse_optional_column_option_generated()
8703 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8704 && self.parse_keyword(Keyword::OPTIONS)
8705 {
8706 self.prev_token();
8707 Ok(Some(ColumnOption::Options(
8708 self.parse_options(Keyword::OPTIONS)?,
8709 )))
8710 } else if self.parse_keyword(Keyword::AS)
8711 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8712 {
8713 self.parse_optional_column_option_as()
8714 } else if self.parse_keyword(Keyword::SRID)
8715 && dialect_of!(self is MySqlDialect | GenericDialect)
8716 {
8717 Ok(Some(ColumnOption::Srid(Box::new(
8718 self.parse_column_option_expr()?,
8719 ))))
8720 } else if self.parse_keyword(Keyword::IDENTITY)
8721 && dialect_of!(self is MsSqlDialect | GenericDialect)
8722 {
8723 let parameters = if self.consume_token(&Token::LParen) {
8724 let seed = self.parse_number()?;
8725 self.expect_token(&Token::Comma)?;
8726 let increment = self.parse_number()?;
8727 self.expect_token(&Token::RParen)?;
8728
8729 Some(IdentityPropertyFormatKind::FunctionCall(
8730 IdentityParameters { seed, increment },
8731 ))
8732 } else {
8733 None
8734 };
8735 Ok(Some(ColumnOption::Identity(
8736 IdentityPropertyKind::Identity(IdentityProperty {
8737 parameters,
8738 order: None,
8739 }),
8740 )))
8741 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8742 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8743 {
8744 Ok(Some(ColumnOption::OnConflict(
8746 self.expect_one_of_keywords(&[
8747 Keyword::ROLLBACK,
8748 Keyword::ABORT,
8749 Keyword::FAIL,
8750 Keyword::IGNORE,
8751 Keyword::REPLACE,
8752 ])?,
8753 )))
8754 } else if self.parse_keyword(Keyword::INVISIBLE) {
8755 Ok(Some(ColumnOption::Invisible))
8756 } else {
8757 Ok(None)
8758 }
8759 }
8760
8761 fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8778 if self.peek_token_ref().token == Token::LParen {
8779 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8780 Ok(expr)
8781 } else {
8782 Ok(self.parse_expr()?)
8783 }
8784 }
8785
8786 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8787 let name = self.parse_object_name(false)?;
8788 self.expect_token(&Token::Eq)?;
8789 let value = self.parse_literal_string()?;
8790
8791 Ok(Tag::new(name, value))
8792 }
8793
8794 fn parse_optional_column_option_generated(
8795 &mut self,
8796 ) -> Result<Option<ColumnOption>, ParserError> {
8797 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8798 let mut sequence_options = vec![];
8799 if self.expect_token(&Token::LParen).is_ok() {
8800 sequence_options = self.parse_create_sequence_options()?;
8801 self.expect_token(&Token::RParen)?;
8802 }
8803 Ok(Some(ColumnOption::Generated {
8804 generated_as: GeneratedAs::Always,
8805 sequence_options: Some(sequence_options),
8806 generation_expr: None,
8807 generation_expr_mode: None,
8808 generated_keyword: true,
8809 }))
8810 } else if self.parse_keywords(&[
8811 Keyword::BY,
8812 Keyword::DEFAULT,
8813 Keyword::AS,
8814 Keyword::IDENTITY,
8815 ]) {
8816 let mut sequence_options = vec![];
8817 if self.expect_token(&Token::LParen).is_ok() {
8818 sequence_options = self.parse_create_sequence_options()?;
8819 self.expect_token(&Token::RParen)?;
8820 }
8821 Ok(Some(ColumnOption::Generated {
8822 generated_as: GeneratedAs::ByDefault,
8823 sequence_options: Some(sequence_options),
8824 generation_expr: None,
8825 generation_expr_mode: None,
8826 generated_keyword: true,
8827 }))
8828 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8829 if self.expect_token(&Token::LParen).is_ok() {
8830 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8831 self.expect_token(&Token::RParen)?;
8832 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8833 Ok((
8834 GeneratedAs::ExpStored,
8835 Some(GeneratedExpressionMode::Stored),
8836 ))
8837 } else if dialect_of!(self is PostgreSqlDialect) {
8838 self.expected("STORED", self.peek_token())
8840 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8841 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8842 } else {
8843 Ok((GeneratedAs::Always, None))
8844 }?;
8845
8846 Ok(Some(ColumnOption::Generated {
8847 generated_as: gen_as,
8848 sequence_options: None,
8849 generation_expr: Some(expr),
8850 generation_expr_mode: expr_mode,
8851 generated_keyword: true,
8852 }))
8853 } else {
8854 Ok(None)
8855 }
8856 } else {
8857 Ok(None)
8858 }
8859 }
8860
8861 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8862 self.expect_token(&Token::LParen)?;
8864 let expr = self.parse_expr()?;
8865 self.expect_token(&Token::RParen)?;
8866
8867 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8868 (
8869 GeneratedAs::ExpStored,
8870 Some(GeneratedExpressionMode::Stored),
8871 )
8872 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8873 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8874 } else {
8875 (GeneratedAs::Always, None)
8876 };
8877
8878 Ok(Some(ColumnOption::Generated {
8879 generated_as: gen_as,
8880 sequence_options: None,
8881 generation_expr: Some(expr),
8882 generation_expr_mode: expr_mode,
8883 generated_keyword: false,
8884 }))
8885 }
8886
8887 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8888 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8889 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8890 {
8891 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8892
8893 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8894 self.expect_token(&Token::LParen)?;
8895 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8896 self.expect_token(&Token::RParen)?;
8897 Some(sorted_by_columns)
8898 } else {
8899 None
8900 };
8901
8902 self.expect_keyword_is(Keyword::INTO)?;
8903 let num_buckets = self.parse_number_value()?.value;
8904 self.expect_keyword_is(Keyword::BUCKETS)?;
8905 Some(ClusteredBy {
8906 columns,
8907 sorted_by,
8908 num_buckets,
8909 })
8910 } else {
8911 None
8912 };
8913 Ok(clustered_by)
8914 }
8915
8916 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8917 if self.parse_keyword(Keyword::RESTRICT) {
8918 Ok(ReferentialAction::Restrict)
8919 } else if self.parse_keyword(Keyword::CASCADE) {
8920 Ok(ReferentialAction::Cascade)
8921 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8922 Ok(ReferentialAction::SetNull)
8923 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8924 Ok(ReferentialAction::NoAction)
8925 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8926 Ok(ReferentialAction::SetDefault)
8927 } else {
8928 self.expected(
8929 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8930 self.peek_token(),
8931 )
8932 }
8933 }
8934
8935 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
8936 if self.parse_keyword(Keyword::FULL) {
8937 Ok(ConstraintReferenceMatchKind::Full)
8938 } else if self.parse_keyword(Keyword::PARTIAL) {
8939 Ok(ConstraintReferenceMatchKind::Partial)
8940 } else if self.parse_keyword(Keyword::SIMPLE) {
8941 Ok(ConstraintReferenceMatchKind::Simple)
8942 } else {
8943 self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
8944 }
8945 }
8946
8947 pub fn parse_constraint_characteristics(
8948 &mut self,
8949 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8950 let mut cc = ConstraintCharacteristics::default();
8951
8952 loop {
8953 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8954 {
8955 cc.deferrable = Some(false);
8956 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8957 cc.deferrable = Some(true);
8958 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8959 if self.parse_keyword(Keyword::DEFERRED) {
8960 cc.initially = Some(DeferrableInitial::Deferred);
8961 } else if self.parse_keyword(Keyword::IMMEDIATE) {
8962 cc.initially = Some(DeferrableInitial::Immediate);
8963 } else {
8964 self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8965 }
8966 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8967 cc.enforced = Some(true);
8968 } else if cc.enforced.is_none()
8969 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8970 {
8971 cc.enforced = Some(false);
8972 } else {
8973 break;
8974 }
8975 }
8976
8977 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8978 Ok(Some(cc))
8979 } else {
8980 Ok(None)
8981 }
8982 }
8983
8984 pub fn parse_optional_table_constraint(
8985 &mut self,
8986 ) -> Result<Option<TableConstraint>, ParserError> {
8987 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8988 Some(self.parse_identifier()?)
8989 } else {
8990 None
8991 };
8992
8993 let next_token = self.next_token();
8994 match next_token.token {
8995 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8996 let index_type_display = self.parse_index_type_display();
8997 if !dialect_of!(self is GenericDialect | MySqlDialect)
8998 && !index_type_display.is_none()
8999 {
9000 return self
9001 .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
9002 }
9003
9004 let nulls_distinct = self.parse_optional_nulls_distinct()?;
9005
9006 let index_name = self.parse_optional_ident()?;
9008 let index_type = self.parse_optional_using_then_index_type()?;
9009
9010 let columns = self.parse_parenthesized_index_column_list()?;
9011 let index_options = self.parse_index_options()?;
9012 let characteristics = self.parse_constraint_characteristics()?;
9013 Ok(Some(
9014 UniqueConstraint {
9015 name,
9016 index_name,
9017 index_type_display,
9018 index_type,
9019 columns,
9020 index_options,
9021 characteristics,
9022 nulls_distinct,
9023 }
9024 .into(),
9025 ))
9026 }
9027 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9028 self.expect_keyword_is(Keyword::KEY)?;
9030
9031 let index_name = self.parse_optional_ident()?;
9033 let index_type = self.parse_optional_using_then_index_type()?;
9034
9035 let columns = self.parse_parenthesized_index_column_list()?;
9036 let index_options = self.parse_index_options()?;
9037 let characteristics = self.parse_constraint_characteristics()?;
9038 Ok(Some(
9039 PrimaryKeyConstraint {
9040 name,
9041 index_name,
9042 index_type,
9043 columns,
9044 index_options,
9045 characteristics,
9046 }
9047 .into(),
9048 ))
9049 }
9050 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9051 self.expect_keyword_is(Keyword::KEY)?;
9052 let index_name = self.parse_optional_ident()?;
9053 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9054 self.expect_keyword_is(Keyword::REFERENCES)?;
9055 let foreign_table = self.parse_object_name(false)?;
9056 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9057 let mut match_kind = None;
9058 let mut on_delete = None;
9059 let mut on_update = None;
9060 loop {
9061 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9062 match_kind = Some(self.parse_match_kind()?);
9063 } else if on_delete.is_none()
9064 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9065 {
9066 on_delete = Some(self.parse_referential_action()?);
9067 } else if on_update.is_none()
9068 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9069 {
9070 on_update = Some(self.parse_referential_action()?);
9071 } else {
9072 break;
9073 }
9074 }
9075
9076 let characteristics = self.parse_constraint_characteristics()?;
9077
9078 Ok(Some(
9079 ForeignKeyConstraint {
9080 name,
9081 index_name,
9082 columns,
9083 foreign_table,
9084 referred_columns,
9085 on_delete,
9086 on_update,
9087 match_kind,
9088 characteristics,
9089 }
9090 .into(),
9091 ))
9092 }
9093 Token::Word(w) if w.keyword == Keyword::CHECK => {
9094 self.expect_token(&Token::LParen)?;
9095 let expr = Box::new(self.parse_expr()?);
9096 self.expect_token(&Token::RParen)?;
9097
9098 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9099 Some(true)
9100 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9101 Some(false)
9102 } else {
9103 None
9104 };
9105
9106 Ok(Some(
9107 CheckConstraint {
9108 name,
9109 expr,
9110 enforced,
9111 }
9112 .into(),
9113 ))
9114 }
9115 Token::Word(w)
9116 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9117 && dialect_of!(self is GenericDialect | MySqlDialect)
9118 && name.is_none() =>
9119 {
9120 let display_as_key = w.keyword == Keyword::KEY;
9121
9122 let name = match self.peek_token().token {
9123 Token::Word(word) if word.keyword == Keyword::USING => None,
9124 _ => self.parse_optional_ident()?,
9125 };
9126
9127 let index_type = self.parse_optional_using_then_index_type()?;
9128 let columns = self.parse_parenthesized_index_column_list()?;
9129 let index_options = self.parse_index_options()?;
9130
9131 Ok(Some(
9132 IndexConstraint {
9133 display_as_key,
9134 name,
9135 index_type,
9136 columns,
9137 index_options,
9138 }
9139 .into(),
9140 ))
9141 }
9142 Token::Word(w)
9143 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9144 && dialect_of!(self is GenericDialect | MySqlDialect) =>
9145 {
9146 if let Some(name) = name {
9147 return self.expected(
9148 "FULLTEXT or SPATIAL option without constraint name",
9149 TokenWithSpan {
9150 token: Token::make_keyword(&name.to_string()),
9151 span: next_token.span,
9152 },
9153 );
9154 }
9155
9156 let fulltext = w.keyword == Keyword::FULLTEXT;
9157
9158 let index_type_display = self.parse_index_type_display();
9159
9160 let opt_index_name = self.parse_optional_ident()?;
9161
9162 let columns = self.parse_parenthesized_index_column_list()?;
9163
9164 Ok(Some(
9165 FullTextOrSpatialConstraint {
9166 fulltext,
9167 index_type_display,
9168 opt_index_name,
9169 columns,
9170 }
9171 .into(),
9172 ))
9173 }
9174 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
9175 let index_method = if self.parse_keyword(Keyword::USING) {
9176 Some(self.parse_identifier()?)
9177 } else {
9178 None
9179 };
9180
9181 self.expect_token(&Token::LParen)?;
9182 let elements =
9183 self.parse_comma_separated(|p| p.parse_exclusion_element())?;
9184 self.expect_token(&Token::RParen)?;
9185
9186 let include = if self.parse_keyword(Keyword::INCLUDE) {
9187 self.expect_token(&Token::LParen)?;
9188 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
9189 self.expect_token(&Token::RParen)?;
9190 cols
9191 } else {
9192 vec![]
9193 };
9194
9195 let where_clause = if self.parse_keyword(Keyword::WHERE) {
9196 self.expect_token(&Token::LParen)?;
9197 let predicate = self.parse_expr()?;
9198 self.expect_token(&Token::RParen)?;
9199 Some(Box::new(predicate))
9200 } else {
9201 None
9202 };
9203
9204 let characteristics = self.parse_constraint_characteristics()?;
9205
9206 Ok(Some(
9207 ExclusionConstraint {
9208 name,
9209 index_method,
9210 elements,
9211 include,
9212 where_clause,
9213 characteristics,
9214 }
9215 .into(),
9216 ))
9217 }
9218 _ => {
9219 if name.is_some() {
9220 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9221 } else {
9222 self.prev_token();
9223 Ok(None)
9224 }
9225 }
9226 }
9227 }
9228
9229 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
9230 let expr = self.parse_expr()?;
9231 self.expect_keyword_is(Keyword::WITH)?;
9232 let operator_token = self.next_token();
9233 let operator = operator_token.token.to_string();
9234 Ok(ExclusionElement { expr, operator })
9235 }
9236
9237 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9238 Ok(if self.parse_keyword(Keyword::NULLS) {
9239 let not = self.parse_keyword(Keyword::NOT);
9240 self.expect_keyword_is(Keyword::DISTINCT)?;
9241 if not {
9242 NullsDistinctOption::NotDistinct
9243 } else {
9244 NullsDistinctOption::Distinct
9245 }
9246 } else {
9247 NullsDistinctOption::None
9248 })
9249 }
9250
9251 pub fn maybe_parse_options(
9252 &mut self,
9253 keyword: Keyword,
9254 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9255 if let Token::Word(word) = self.peek_token().token {
9256 if word.keyword == keyword {
9257 return Ok(Some(self.parse_options(keyword)?));
9258 }
9259 };
9260 Ok(None)
9261 }
9262
9263 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9264 if self.parse_keyword(keyword) {
9265 self.expect_token(&Token::LParen)?;
9266 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9267 self.expect_token(&Token::RParen)?;
9268 Ok(options)
9269 } else {
9270 Ok(vec![])
9271 }
9272 }
9273
9274 pub fn parse_options_with_keywords(
9275 &mut self,
9276 keywords: &[Keyword],
9277 ) -> Result<Vec<SqlOption>, ParserError> {
9278 if self.parse_keywords(keywords) {
9279 self.expect_token(&Token::LParen)?;
9280 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9281 self.expect_token(&Token::RParen)?;
9282 Ok(options)
9283 } else {
9284 Ok(vec![])
9285 }
9286 }
9287
9288 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9289 Ok(if self.parse_keyword(Keyword::BTREE) {
9290 IndexType::BTree
9291 } else if self.parse_keyword(Keyword::HASH) {
9292 IndexType::Hash
9293 } else if self.parse_keyword(Keyword::GIN) {
9294 IndexType::GIN
9295 } else if self.parse_keyword(Keyword::GIST) {
9296 IndexType::GiST
9297 } else if self.parse_keyword(Keyword::SPGIST) {
9298 IndexType::SPGiST
9299 } else if self.parse_keyword(Keyword::BRIN) {
9300 IndexType::BRIN
9301 } else if self.parse_keyword(Keyword::BLOOM) {
9302 IndexType::Bloom
9303 } else {
9304 IndexType::Custom(self.parse_identifier()?)
9305 })
9306 }
9307
9308 pub fn parse_optional_using_then_index_type(
9314 &mut self,
9315 ) -> Result<Option<IndexType>, ParserError> {
9316 if self.parse_keyword(Keyword::USING) {
9317 Ok(Some(self.parse_index_type()?))
9318 } else {
9319 Ok(None)
9320 }
9321 }
9322
9323 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9326 self.maybe_parse(|parser| parser.parse_identifier())
9327 }
9328
9329 #[must_use]
9330 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9331 if self.parse_keyword(Keyword::KEY) {
9332 KeyOrIndexDisplay::Key
9333 } else if self.parse_keyword(Keyword::INDEX) {
9334 KeyOrIndexDisplay::Index
9335 } else {
9336 KeyOrIndexDisplay::None
9337 }
9338 }
9339
9340 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9341 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9342 Ok(Some(IndexOption::Using(index_type)))
9343 } else if self.parse_keyword(Keyword::COMMENT) {
9344 let s = self.parse_literal_string()?;
9345 Ok(Some(IndexOption::Comment(s)))
9346 } else {
9347 Ok(None)
9348 }
9349 }
9350
9351 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9352 let mut options = Vec::new();
9353
9354 loop {
9355 match self.parse_optional_index_option()? {
9356 Some(index_option) => options.push(index_option),
9357 None => return Ok(options),
9358 }
9359 }
9360 }
9361
9362 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9363 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9364
9365 match self.peek_token().token {
9366 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9367 Ok(SqlOption::Ident(self.parse_identifier()?))
9368 }
9369 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9370 self.parse_option_partition()
9371 }
9372 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9373 self.parse_option_clustered()
9374 }
9375 _ => {
9376 let name = self.parse_identifier()?;
9377 self.expect_token(&Token::Eq)?;
9378 let value = self.parse_expr()?;
9379
9380 Ok(SqlOption::KeyValue { key: name, value })
9381 }
9382 }
9383 }
9384
9385 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9386 if self.parse_keywords(&[
9387 Keyword::CLUSTERED,
9388 Keyword::COLUMNSTORE,
9389 Keyword::INDEX,
9390 Keyword::ORDER,
9391 ]) {
9392 Ok(SqlOption::Clustered(
9393 TableOptionsClustered::ColumnstoreIndexOrder(
9394 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9395 ),
9396 ))
9397 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9398 Ok(SqlOption::Clustered(
9399 TableOptionsClustered::ColumnstoreIndex,
9400 ))
9401 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9402 self.expect_token(&Token::LParen)?;
9403
9404 let columns = self.parse_comma_separated(|p| {
9405 let name = p.parse_identifier()?;
9406 let asc = p.parse_asc_desc();
9407
9408 Ok(ClusteredIndex { name, asc })
9409 })?;
9410
9411 self.expect_token(&Token::RParen)?;
9412
9413 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9414 } else {
9415 Err(ParserError::ParserError(
9416 "invalid CLUSTERED sequence".to_string(),
9417 ))
9418 }
9419 }
9420
9421 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9422 self.expect_keyword_is(Keyword::PARTITION)?;
9423 self.expect_token(&Token::LParen)?;
9424 let column_name = self.parse_identifier()?;
9425
9426 self.expect_keyword_is(Keyword::RANGE)?;
9427 let range_direction = if self.parse_keyword(Keyword::LEFT) {
9428 Some(PartitionRangeDirection::Left)
9429 } else if self.parse_keyword(Keyword::RIGHT) {
9430 Some(PartitionRangeDirection::Right)
9431 } else {
9432 None
9433 };
9434
9435 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9436 self.expect_token(&Token::LParen)?;
9437
9438 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9439
9440 self.expect_token(&Token::RParen)?;
9441 self.expect_token(&Token::RParen)?;
9442
9443 Ok(SqlOption::Partition {
9444 column_name,
9445 range_direction,
9446 for_values,
9447 })
9448 }
9449
9450 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9451 self.expect_token(&Token::LParen)?;
9452 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9453 self.expect_token(&Token::RParen)?;
9454 Ok(Partition::Partitions(partitions))
9455 }
9456
9457 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9458 self.expect_token(&Token::LParen)?;
9459 self.expect_keyword_is(Keyword::SELECT)?;
9460 let projection = self.parse_projection()?;
9461 let group_by = self.parse_optional_group_by()?;
9462 let order_by = self.parse_optional_order_by()?;
9463 self.expect_token(&Token::RParen)?;
9464 Ok(ProjectionSelect {
9465 projection,
9466 group_by,
9467 order_by,
9468 })
9469 }
9470 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9471 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9472 let name = self.parse_identifier()?;
9473 let query = self.parse_projection_select()?;
9474 Ok(AlterTableOperation::AddProjection {
9475 if_not_exists,
9476 name,
9477 select: query,
9478 })
9479 }
9480
9481 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9482 let operation = if self.parse_keyword(Keyword::ADD) {
9483 if let Some(constraint) = self.parse_optional_table_constraint()? {
9484 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9485 AlterTableOperation::AddConstraint {
9486 constraint,
9487 not_valid,
9488 }
9489 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9490 && self.parse_keyword(Keyword::PROJECTION)
9491 {
9492 return self.parse_alter_table_add_projection();
9493 } else {
9494 let if_not_exists =
9495 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9496 let mut new_partitions = vec![];
9497 loop {
9498 if self.parse_keyword(Keyword::PARTITION) {
9499 new_partitions.push(self.parse_partition()?);
9500 } else {
9501 break;
9502 }
9503 }
9504 if !new_partitions.is_empty() {
9505 AlterTableOperation::AddPartitions {
9506 if_not_exists,
9507 new_partitions,
9508 }
9509 } else {
9510 let column_keyword = self.parse_keyword(Keyword::COLUMN);
9511
9512 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9513 {
9514 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9515 || if_not_exists
9516 } else {
9517 false
9518 };
9519
9520 let column_def = self.parse_column_def()?;
9521
9522 let column_position = self.parse_column_position()?;
9523
9524 AlterTableOperation::AddColumn {
9525 column_keyword,
9526 if_not_exists,
9527 column_def,
9528 column_position,
9529 }
9530 }
9531 }
9532 } else if self.parse_keyword(Keyword::RENAME) {
9533 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9534 let old_name = self.parse_identifier()?;
9535 self.expect_keyword_is(Keyword::TO)?;
9536 let new_name = self.parse_identifier()?;
9537 AlterTableOperation::RenameConstraint { old_name, new_name }
9538 } else if self.parse_keyword(Keyword::TO) {
9539 let table_name = self.parse_object_name(false)?;
9540 AlterTableOperation::RenameTable {
9541 table_name: RenameTableNameKind::To(table_name),
9542 }
9543 } else if self.parse_keyword(Keyword::AS) {
9544 let table_name = self.parse_object_name(false)?;
9545 AlterTableOperation::RenameTable {
9546 table_name: RenameTableNameKind::As(table_name),
9547 }
9548 } else {
9549 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
9551 self.expect_keyword_is(Keyword::TO)?;
9552 let new_column_name = self.parse_identifier()?;
9553 AlterTableOperation::RenameColumn {
9554 old_column_name,
9555 new_column_name,
9556 }
9557 }
9558 } else if self.parse_keyword(Keyword::DISABLE) {
9559 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9560 AlterTableOperation::DisableRowLevelSecurity {}
9561 } else if self.parse_keyword(Keyword::RULE) {
9562 let name = self.parse_identifier()?;
9563 AlterTableOperation::DisableRule { name }
9564 } else if self.parse_keyword(Keyword::TRIGGER) {
9565 let name = self.parse_identifier()?;
9566 AlterTableOperation::DisableTrigger { name }
9567 } else {
9568 return self.expected(
9569 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9570 self.peek_token(),
9571 );
9572 }
9573 } else if self.parse_keyword(Keyword::ENABLE) {
9574 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9575 let name = self.parse_identifier()?;
9576 AlterTableOperation::EnableAlwaysRule { name }
9577 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9578 let name = self.parse_identifier()?;
9579 AlterTableOperation::EnableAlwaysTrigger { name }
9580 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9581 AlterTableOperation::EnableRowLevelSecurity {}
9582 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9583 let name = self.parse_identifier()?;
9584 AlterTableOperation::EnableReplicaRule { name }
9585 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9586 let name = self.parse_identifier()?;
9587 AlterTableOperation::EnableReplicaTrigger { name }
9588 } else if self.parse_keyword(Keyword::RULE) {
9589 let name = self.parse_identifier()?;
9590 AlterTableOperation::EnableRule { name }
9591 } else if self.parse_keyword(Keyword::TRIGGER) {
9592 let name = self.parse_identifier()?;
9593 AlterTableOperation::EnableTrigger { name }
9594 } else {
9595 return self.expected(
9596 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9597 self.peek_token(),
9598 );
9599 }
9600 } else if self.parse_keywords(&[Keyword::FORCE, Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9601 AlterTableOperation::ForceRowLevelSecurity
9602 } else if self.parse_keywords(&[Keyword::NO, Keyword::FORCE, Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9603 AlterTableOperation::NoForceRowLevelSecurity
9604 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9605 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9606 {
9607 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9608 let name = self.parse_identifier()?;
9609 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9610 Some(self.parse_identifier()?)
9611 } else {
9612 None
9613 };
9614 AlterTableOperation::ClearProjection {
9615 if_exists,
9616 name,
9617 partition,
9618 }
9619 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9620 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9621 {
9622 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9623 let name = self.parse_identifier()?;
9624 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9625 Some(self.parse_identifier()?)
9626 } else {
9627 None
9628 };
9629 AlterTableOperation::MaterializeProjection {
9630 if_exists,
9631 name,
9632 partition,
9633 }
9634 } else if self.parse_keyword(Keyword::DROP) {
9635 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9636 self.expect_token(&Token::LParen)?;
9637 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9638 self.expect_token(&Token::RParen)?;
9639 AlterTableOperation::DropPartitions {
9640 partitions,
9641 if_exists: true,
9642 }
9643 } else if self.parse_keyword(Keyword::PARTITION) {
9644 self.expect_token(&Token::LParen)?;
9645 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9646 self.expect_token(&Token::RParen)?;
9647 AlterTableOperation::DropPartitions {
9648 partitions,
9649 if_exists: false,
9650 }
9651 } else if self.parse_keyword(Keyword::CONSTRAINT) {
9652 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9653 let name = self.parse_identifier()?;
9654 let drop_behavior = self.parse_optional_drop_behavior();
9655 AlterTableOperation::DropConstraint {
9656 if_exists,
9657 name,
9658 drop_behavior,
9659 }
9660 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9661 let drop_behavior = self.parse_optional_drop_behavior();
9662 AlterTableOperation::DropPrimaryKey { drop_behavior }
9663 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9664 let name = self.parse_identifier()?;
9665 let drop_behavior = self.parse_optional_drop_behavior();
9666 AlterTableOperation::DropForeignKey {
9667 name,
9668 drop_behavior,
9669 }
9670 } else if self.parse_keyword(Keyword::INDEX) {
9671 let name = self.parse_identifier()?;
9672 AlterTableOperation::DropIndex { name }
9673 } else if self.parse_keyword(Keyword::PROJECTION)
9674 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9675 {
9676 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9677 let name = self.parse_identifier()?;
9678 AlterTableOperation::DropProjection { if_exists, name }
9679 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9680 AlterTableOperation::DropClusteringKey
9681 } else {
9682 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9684 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9685 self.parse_comma_separated(Parser::parse_identifier)?
9686 } else {
9687 vec![self.parse_identifier()?]
9688 };
9689 let drop_behavior = self.parse_optional_drop_behavior();
9690 AlterTableOperation::DropColumn {
9691 has_column_keyword,
9692 column_names,
9693 if_exists,
9694 drop_behavior,
9695 }
9696 }
9697 } else if self.parse_keyword(Keyword::PARTITION) {
9698 self.expect_token(&Token::LParen)?;
9699 let before = self.parse_comma_separated(Parser::parse_expr)?;
9700 self.expect_token(&Token::RParen)?;
9701 self.expect_keyword_is(Keyword::RENAME)?;
9702 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9703 self.expect_token(&Token::LParen)?;
9704 let renames = self.parse_comma_separated(Parser::parse_expr)?;
9705 self.expect_token(&Token::RParen)?;
9706 AlterTableOperation::RenamePartitions {
9707 old_partitions: before,
9708 new_partitions: renames,
9709 }
9710 } else if self.parse_keyword(Keyword::CHANGE) {
9711 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
9713 let new_name = self.parse_identifier()?;
9714 let data_type = self.parse_data_type()?;
9715 let mut options = vec![];
9716 while let Some(option) = self.parse_optional_column_option()? {
9717 options.push(option);
9718 }
9719
9720 let column_position = self.parse_column_position()?;
9721
9722 AlterTableOperation::ChangeColumn {
9723 old_name,
9724 new_name,
9725 data_type,
9726 options,
9727 column_position,
9728 }
9729 } else if self.parse_keyword(Keyword::MODIFY) {
9730 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
9732 let data_type = self.parse_data_type()?;
9733 let mut options = vec![];
9734 while let Some(option) = self.parse_optional_column_option()? {
9735 options.push(option);
9736 }
9737
9738 let column_position = self.parse_column_position()?;
9739
9740 AlterTableOperation::ModifyColumn {
9741 col_name,
9742 data_type,
9743 options,
9744 column_position,
9745 }
9746 } else if self.parse_keyword(Keyword::ALTER) {
9747 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
9749 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9750
9751 let op: AlterColumnOperation = if self.parse_keywords(&[
9752 Keyword::SET,
9753 Keyword::NOT,
9754 Keyword::NULL,
9755 ]) {
9756 AlterColumnOperation::SetNotNull {}
9757 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9758 AlterColumnOperation::DropNotNull {}
9759 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9760 AlterColumnOperation::SetDefault {
9761 value: self.parse_expr()?,
9762 }
9763 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9764 AlterColumnOperation::DropDefault {}
9765 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9766 self.parse_set_data_type(true)?
9767 } else if self.parse_keyword(Keyword::TYPE) {
9768 self.parse_set_data_type(false)?
9769 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9770 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9771 Some(GeneratedAs::Always)
9772 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9773 Some(GeneratedAs::ByDefault)
9774 } else {
9775 None
9776 };
9777
9778 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9779
9780 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9781
9782 if self.peek_token().token == Token::LParen {
9783 self.expect_token(&Token::LParen)?;
9784 sequence_options = Some(self.parse_create_sequence_options()?);
9785 self.expect_token(&Token::RParen)?;
9786 }
9787
9788 AlterColumnOperation::AddGenerated {
9789 generated_as,
9790 sequence_options,
9791 }
9792 } else {
9793 let message = if is_postgresql {
9794 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9795 } else {
9796 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9797 };
9798
9799 return self.expected(message, self.peek_token());
9800 };
9801 AlterTableOperation::AlterColumn { column_name, op }
9802 } else if self.parse_keyword(Keyword::SWAP) {
9803 self.expect_keyword_is(Keyword::WITH)?;
9804 let table_name = self.parse_object_name(false)?;
9805 AlterTableOperation::SwapWith { table_name }
9806 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9807 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9808 {
9809 let new_owner = self.parse_owner()?;
9810 AlterTableOperation::OwnerTo { new_owner }
9811 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9812 && self.parse_keyword(Keyword::ATTACH)
9813 {
9814 AlterTableOperation::AttachPartition {
9815 partition: self.parse_part_or_partition()?,
9816 }
9817 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9818 && self.parse_keyword(Keyword::DETACH)
9819 {
9820 AlterTableOperation::DetachPartition {
9821 partition: self.parse_part_or_partition()?,
9822 }
9823 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9824 && self.parse_keyword(Keyword::FREEZE)
9825 {
9826 let partition = self.parse_part_or_partition()?;
9827 let with_name = if self.parse_keyword(Keyword::WITH) {
9828 self.expect_keyword_is(Keyword::NAME)?;
9829 Some(self.parse_identifier()?)
9830 } else {
9831 None
9832 };
9833 AlterTableOperation::FreezePartition {
9834 partition,
9835 with_name,
9836 }
9837 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9838 && self.parse_keyword(Keyword::UNFREEZE)
9839 {
9840 let partition = self.parse_part_or_partition()?;
9841 let with_name = if self.parse_keyword(Keyword::WITH) {
9842 self.expect_keyword_is(Keyword::NAME)?;
9843 Some(self.parse_identifier()?)
9844 } else {
9845 None
9846 };
9847 AlterTableOperation::UnfreezePartition {
9848 partition,
9849 with_name,
9850 }
9851 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9852 self.expect_token(&Token::LParen)?;
9853 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9854 self.expect_token(&Token::RParen)?;
9855 AlterTableOperation::ClusterBy { exprs }
9856 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9857 AlterTableOperation::SuspendRecluster
9858 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9859 AlterTableOperation::ResumeRecluster
9860 } else if self.parse_keyword(Keyword::LOCK) {
9861 let equals = self.consume_token(&Token::Eq);
9862 let lock = match self.parse_one_of_keywords(&[
9863 Keyword::DEFAULT,
9864 Keyword::EXCLUSIVE,
9865 Keyword::NONE,
9866 Keyword::SHARED,
9867 ]) {
9868 Some(Keyword::DEFAULT) => AlterTableLock::Default,
9869 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9870 Some(Keyword::NONE) => AlterTableLock::None,
9871 Some(Keyword::SHARED) => AlterTableLock::Shared,
9872 _ => self.expected(
9873 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9874 self.peek_token(),
9875 )?,
9876 };
9877 AlterTableOperation::Lock { equals, lock }
9878 } else if self.parse_keyword(Keyword::ALGORITHM) {
9879 let equals = self.consume_token(&Token::Eq);
9880 let algorithm = match self.parse_one_of_keywords(&[
9881 Keyword::DEFAULT,
9882 Keyword::INSTANT,
9883 Keyword::INPLACE,
9884 Keyword::COPY,
9885 ]) {
9886 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9887 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9888 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9889 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9890 _ => self.expected(
9891 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9892 self.peek_token(),
9893 )?,
9894 };
9895 AlterTableOperation::Algorithm { equals, algorithm }
9896 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9897 let equals = self.consume_token(&Token::Eq);
9898 let value = self.parse_number_value()?;
9899 AlterTableOperation::AutoIncrement { equals, value }
9900 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9901 let identity = if self.parse_keyword(Keyword::NONE) {
9902 ReplicaIdentity::None
9903 } else if self.parse_keyword(Keyword::FULL) {
9904 ReplicaIdentity::Full
9905 } else if self.parse_keyword(Keyword::DEFAULT) {
9906 ReplicaIdentity::Default
9907 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9908 ReplicaIdentity::Index(self.parse_identifier()?)
9909 } else {
9910 return self.expected(
9911 "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9912 self.peek_token(),
9913 );
9914 };
9915
9916 AlterTableOperation::ReplicaIdentity { identity }
9917 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9918 let name = self.parse_identifier()?;
9919 AlterTableOperation::ValidateConstraint { name }
9920 } else {
9921 let mut options =
9922 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9923 if !options.is_empty() {
9924 AlterTableOperation::SetTblProperties {
9925 table_properties: options,
9926 }
9927 } else {
9928 options = self.parse_options(Keyword::SET)?;
9929 if !options.is_empty() {
9930 AlterTableOperation::SetOptionsParens { options }
9931 } else {
9932 return self.expected(
9933 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9934 self.peek_token(),
9935 );
9936 }
9937 }
9938 };
9939 Ok(operation)
9940 }
9941
9942 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9943 let data_type = self.parse_data_type()?;
9944 let using = if self.dialect.supports_alter_column_type_using()
9945 && self.parse_keyword(Keyword::USING)
9946 {
9947 Some(self.parse_expr()?)
9948 } else {
9949 None
9950 };
9951 Ok(AlterColumnOperation::SetDataType {
9952 data_type,
9953 using,
9954 had_set,
9955 })
9956 }
9957
9958 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9959 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9960 match keyword {
9961 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9962 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9963 unexpected_keyword => Err(ParserError::ParserError(
9965 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
9966 )),
9967 }
9968 }
9969
9970 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9971 let object_type = self.expect_one_of_keywords(&[
9972 Keyword::VIEW,
9973 Keyword::TYPE,
9974 Keyword::TABLE,
9975 Keyword::INDEX,
9976 Keyword::ROLE,
9977 Keyword::POLICY,
9978 Keyword::CONNECTOR,
9979 Keyword::ICEBERG,
9980 Keyword::SCHEMA,
9981 Keyword::USER,
9982 Keyword::OPERATOR,
9983 ])?;
9984 match object_type {
9985 Keyword::SCHEMA => {
9986 self.prev_token();
9987 self.prev_token();
9988 self.parse_alter_schema()
9989 }
9990 Keyword::VIEW => self.parse_alter_view(),
9991 Keyword::TYPE => self.parse_alter_type(),
9992 Keyword::TABLE => self.parse_alter_table(false),
9993 Keyword::ICEBERG => {
9994 self.expect_keyword(Keyword::TABLE)?;
9995 self.parse_alter_table(true)
9996 }
9997 Keyword::INDEX => {
9998 let index_name = self.parse_object_name(false)?;
9999 let operation = if self.parse_keyword(Keyword::RENAME) {
10000 if self.parse_keyword(Keyword::TO) {
10001 let index_name = self.parse_object_name(false)?;
10002 AlterIndexOperation::RenameIndex { index_name }
10003 } else {
10004 return self.expected("TO after RENAME", self.peek_token());
10005 }
10006 } else {
10007 return self.expected("RENAME after ALTER INDEX", self.peek_token());
10008 };
10009
10010 Ok(Statement::AlterIndex {
10011 name: index_name,
10012 operation,
10013 })
10014 }
10015 Keyword::OPERATOR => self.parse_alter_operator(),
10016 Keyword::ROLE => self.parse_alter_role(),
10017 Keyword::POLICY => self.parse_alter_policy(),
10018 Keyword::CONNECTOR => self.parse_alter_connector(),
10019 Keyword::USER => self.parse_alter_user(),
10020 unexpected_keyword => Err(ParserError::ParserError(
10022 format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
10023 )),
10024 }
10025 }
10026
10027 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
10029 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10030 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
10032 let on_cluster = self.parse_optional_on_cluster()?;
10033 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
10034
10035 let mut location = None;
10036 if self.parse_keyword(Keyword::LOCATION) {
10037 location = Some(HiveSetLocation {
10038 has_set: false,
10039 location: self.parse_identifier()?,
10040 });
10041 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
10042 location = Some(HiveSetLocation {
10043 has_set: true,
10044 location: self.parse_identifier()?,
10045 });
10046 }
10047
10048 let end_token = if self.peek_token_ref().token == Token::SemiColon {
10049 self.peek_token_ref().clone()
10050 } else {
10051 self.get_current_token().clone()
10052 };
10053
10054 Ok(AlterTable {
10055 name: table_name,
10056 if_exists,
10057 only,
10058 operations,
10059 location,
10060 on_cluster,
10061 table_type: if iceberg {
10062 Some(AlterTableType::Iceberg)
10063 } else {
10064 None
10065 },
10066 end_token: AttachedToken(end_token),
10067 }
10068 .into())
10069 }
10070
10071 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
10072 let name = self.parse_object_name(false)?;
10073 let columns = self.parse_parenthesized_column_list(Optional, false)?;
10074
10075 let with_options = self.parse_options(Keyword::WITH)?;
10076
10077 self.expect_keyword_is(Keyword::AS)?;
10078 let query = self.parse_query()?;
10079
10080 Ok(Statement::AlterView {
10081 name,
10082 columns,
10083 query,
10084 with_options,
10085 })
10086 }
10087
10088 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10090 let name = self.parse_object_name(false)?;
10091
10092 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10093 let new_name = self.parse_identifier()?;
10094 Ok(Statement::AlterType(AlterType {
10095 name,
10096 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10097 }))
10098 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10099 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10100 let new_enum_value = self.parse_identifier()?;
10101 let position = if self.parse_keyword(Keyword::BEFORE) {
10102 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10103 } else if self.parse_keyword(Keyword::AFTER) {
10104 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10105 } else {
10106 None
10107 };
10108
10109 Ok(Statement::AlterType(AlterType {
10110 name,
10111 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10112 if_not_exists,
10113 value: new_enum_value,
10114 position,
10115 }),
10116 }))
10117 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10118 let existing_enum_value = self.parse_identifier()?;
10119 self.expect_keyword(Keyword::TO)?;
10120 let new_enum_value = self.parse_identifier()?;
10121
10122 Ok(Statement::AlterType(AlterType {
10123 name,
10124 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10125 from: existing_enum_value,
10126 to: new_enum_value,
10127 }),
10128 }))
10129 } else {
10130 self.expected_ref(
10131 "{RENAME TO | { RENAME | ADD } VALUE}",
10132 self.peek_token_ref(),
10133 )
10134 }
10135 }
10136
10137 pub fn parse_alter_operator(&mut self) -> Result<Statement, ParserError> {
10141 let name = self.parse_operator_name()?;
10142
10143 self.expect_token(&Token::LParen)?;
10145
10146 let left_type = if self.parse_keyword(Keyword::NONE) {
10147 None
10148 } else {
10149 Some(self.parse_data_type()?)
10150 };
10151
10152 self.expect_token(&Token::Comma)?;
10153 let right_type = self.parse_data_type()?;
10154 self.expect_token(&Token::RParen)?;
10155
10156 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10158 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10159 Owner::CurrentRole
10160 } else if self.parse_keyword(Keyword::CURRENT_USER) {
10161 Owner::CurrentUser
10162 } else if self.parse_keyword(Keyword::SESSION_USER) {
10163 Owner::SessionUser
10164 } else {
10165 Owner::Ident(self.parse_identifier()?)
10166 };
10167 AlterOperatorOperation::OwnerTo(owner)
10168 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10169 let schema_name = self.parse_object_name(false)?;
10170 AlterOperatorOperation::SetSchema { schema_name }
10171 } else if self.parse_keyword(Keyword::SET) {
10172 self.expect_token(&Token::LParen)?;
10173
10174 let mut options = Vec::new();
10175 loop {
10176 let keyword = self.expect_one_of_keywords(&[
10177 Keyword::RESTRICT,
10178 Keyword::JOIN,
10179 Keyword::COMMUTATOR,
10180 Keyword::NEGATOR,
10181 Keyword::HASHES,
10182 Keyword::MERGES,
10183 ])?;
10184
10185 match keyword {
10186 Keyword::RESTRICT => {
10187 self.expect_token(&Token::Eq)?;
10188 let proc_name = if self.parse_keyword(Keyword::NONE) {
10189 None
10190 } else {
10191 Some(self.parse_object_name(false)?)
10192 };
10193 options.push(OperatorOption::Restrict(proc_name));
10194 }
10195 Keyword::JOIN => {
10196 self.expect_token(&Token::Eq)?;
10197 let proc_name = if self.parse_keyword(Keyword::NONE) {
10198 None
10199 } else {
10200 Some(self.parse_object_name(false)?)
10201 };
10202 options.push(OperatorOption::Join(proc_name));
10203 }
10204 Keyword::COMMUTATOR => {
10205 self.expect_token(&Token::Eq)?;
10206 let op_name = self.parse_operator_name()?;
10207 options.push(OperatorOption::Commutator(op_name));
10208 }
10209 Keyword::NEGATOR => {
10210 self.expect_token(&Token::Eq)?;
10211 let op_name = self.parse_operator_name()?;
10212 options.push(OperatorOption::Negator(op_name));
10213 }
10214 Keyword::HASHES => {
10215 options.push(OperatorOption::Hashes);
10216 }
10217 Keyword::MERGES => {
10218 options.push(OperatorOption::Merges);
10219 }
10220 unexpected_keyword => return Err(ParserError::ParserError(
10221 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10222 )),
10223 }
10224
10225 if !self.consume_token(&Token::Comma) {
10226 break;
10227 }
10228 }
10229
10230 self.expect_token(&Token::RParen)?;
10231 AlterOperatorOperation::Set { options }
10232 } else {
10233 return self.expected_ref(
10234 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10235 self.peek_token_ref(),
10236 );
10237 };
10238
10239 Ok(Statement::AlterOperator(AlterOperator {
10240 name,
10241 left_type,
10242 right_type,
10243 operation,
10244 }))
10245 }
10246
10247 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10250 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10251 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10252 let name = self.parse_object_name(false)?;
10253 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10254 self.prev_token();
10255 let options = self.parse_options(Keyword::OPTIONS)?;
10256 AlterSchemaOperation::SetOptionsParens { options }
10257 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10258 let collate = self.parse_expr()?;
10259 AlterSchemaOperation::SetDefaultCollate { collate }
10260 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10261 let replica = self.parse_identifier()?;
10262 let options = if self.peek_keyword(Keyword::OPTIONS) {
10263 Some(self.parse_options(Keyword::OPTIONS)?)
10264 } else {
10265 None
10266 };
10267 AlterSchemaOperation::AddReplica { replica, options }
10268 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10269 let replica = self.parse_identifier()?;
10270 AlterSchemaOperation::DropReplica { replica }
10271 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10272 let new_name = self.parse_object_name(false)?;
10273 AlterSchemaOperation::Rename { name: new_name }
10274 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10275 let owner = self.parse_owner()?;
10276 AlterSchemaOperation::OwnerTo { owner }
10277 } else {
10278 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10279 };
10280 Ok(Statement::AlterSchema(AlterSchema {
10281 name,
10282 if_exists,
10283 operations: vec![operation],
10284 }))
10285 }
10286
10287 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10290 let object_name = self.parse_object_name(false)?;
10291 if self.peek_token().token == Token::LParen {
10292 match self.parse_function(object_name)? {
10293 Expr::Function(f) => Ok(Statement::Call(f)),
10294 other => parser_err!(
10295 format!("Expected a simple procedure call but found: {other}"),
10296 self.peek_token().span.start
10297 ),
10298 }
10299 } else {
10300 Ok(Statement::Call(Function {
10301 name: object_name,
10302 uses_odbc_syntax: false,
10303 parameters: FunctionArguments::None,
10304 args: FunctionArguments::None,
10305 over: None,
10306 filter: None,
10307 null_treatment: None,
10308 within_group: vec![],
10309 }))
10310 }
10311 }
10312
10313 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10315 let source;
10316 if self.consume_token(&Token::LParen) {
10317 source = CopySource::Query(self.parse_query()?);
10318 self.expect_token(&Token::RParen)?;
10319 } else {
10320 let table_name = self.parse_object_name(false)?;
10321 let columns = self.parse_parenthesized_column_list(Optional, false)?;
10322 source = CopySource::Table {
10323 table_name,
10324 columns,
10325 };
10326 }
10327 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10328 Some(Keyword::FROM) => false,
10329 Some(Keyword::TO) => true,
10330 _ => self.expected("FROM or TO", self.peek_token())?,
10331 };
10332 if !to {
10333 if let CopySource::Query(_) = source {
10336 return Err(ParserError::ParserError(
10337 "COPY ... FROM does not support query as a source".to_string(),
10338 ));
10339 }
10340 }
10341 let target = if self.parse_keyword(Keyword::STDIN) {
10342 CopyTarget::Stdin
10343 } else if self.parse_keyword(Keyword::STDOUT) {
10344 CopyTarget::Stdout
10345 } else if self.parse_keyword(Keyword::PROGRAM) {
10346 CopyTarget::Program {
10347 command: self.parse_literal_string()?,
10348 }
10349 } else {
10350 CopyTarget::File {
10351 filename: self.parse_literal_string()?,
10352 }
10353 };
10354 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
10356 if self.consume_token(&Token::LParen) {
10357 options = self.parse_comma_separated(Parser::parse_copy_option)?;
10358 self.expect_token(&Token::RParen)?;
10359 }
10360 let mut legacy_options = vec![];
10361 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10362 legacy_options.push(opt);
10363 }
10364 let values = if let CopyTarget::Stdin = target {
10365 self.expect_token(&Token::SemiColon)?;
10366 self.parse_tsv()
10367 } else {
10368 vec![]
10369 };
10370 Ok(Statement::Copy {
10371 source,
10372 to,
10373 target,
10374 options,
10375 legacy_options,
10376 values,
10377 })
10378 }
10379
10380 fn parse_open(&mut self) -> Result<Statement, ParserError> {
10382 self.expect_keyword(Keyword::OPEN)?;
10383 Ok(Statement::Open(OpenStatement {
10384 cursor_name: self.parse_identifier()?,
10385 }))
10386 }
10387
10388 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10389 let cursor = if self.parse_keyword(Keyword::ALL) {
10390 CloseCursor::All
10391 } else {
10392 let name = self.parse_identifier()?;
10393
10394 CloseCursor::Specific { name }
10395 };
10396
10397 Ok(Statement::Close { cursor })
10398 }
10399
10400 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10401 let ret = match self.parse_one_of_keywords(&[
10402 Keyword::FORMAT,
10403 Keyword::FREEZE,
10404 Keyword::DELIMITER,
10405 Keyword::NULL,
10406 Keyword::HEADER,
10407 Keyword::QUOTE,
10408 Keyword::ESCAPE,
10409 Keyword::FORCE_QUOTE,
10410 Keyword::FORCE_NOT_NULL,
10411 Keyword::FORCE_NULL,
10412 Keyword::ENCODING,
10413 ]) {
10414 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10415 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10416 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10417 Some(Keyword::FALSE)
10418 )),
10419 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10420 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10421 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10422 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10423 Some(Keyword::FALSE)
10424 )),
10425 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10426 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10427 Some(Keyword::FORCE_QUOTE) => {
10428 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10429 }
10430 Some(Keyword::FORCE_NOT_NULL) => {
10431 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10432 }
10433 Some(Keyword::FORCE_NULL) => {
10434 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10435 }
10436 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10437 _ => self.expected("option", self.peek_token())?,
10438 };
10439 Ok(ret)
10440 }
10441
10442 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10443 if self.parse_keyword(Keyword::FORMAT) {
10445 let _ = self.parse_keyword(Keyword::AS);
10446 }
10447
10448 let ret = match self.parse_one_of_keywords(&[
10449 Keyword::ACCEPTANYDATE,
10450 Keyword::ACCEPTINVCHARS,
10451 Keyword::ADDQUOTES,
10452 Keyword::ALLOWOVERWRITE,
10453 Keyword::BINARY,
10454 Keyword::BLANKSASNULL,
10455 Keyword::BZIP2,
10456 Keyword::CLEANPATH,
10457 Keyword::COMPUPDATE,
10458 Keyword::CSV,
10459 Keyword::DATEFORMAT,
10460 Keyword::DELIMITER,
10461 Keyword::EMPTYASNULL,
10462 Keyword::ENCRYPTED,
10463 Keyword::ESCAPE,
10464 Keyword::EXTENSION,
10465 Keyword::FIXEDWIDTH,
10466 Keyword::GZIP,
10467 Keyword::HEADER,
10468 Keyword::IAM_ROLE,
10469 Keyword::IGNOREHEADER,
10470 Keyword::JSON,
10471 Keyword::MANIFEST,
10472 Keyword::MAXFILESIZE,
10473 Keyword::NULL,
10474 Keyword::PARALLEL,
10475 Keyword::PARQUET,
10476 Keyword::PARTITION,
10477 Keyword::REGION,
10478 Keyword::REMOVEQUOTES,
10479 Keyword::ROWGROUPSIZE,
10480 Keyword::STATUPDATE,
10481 Keyword::TIMEFORMAT,
10482 Keyword::TRUNCATECOLUMNS,
10483 Keyword::ZSTD,
10484 ]) {
10485 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10486 Some(Keyword::ACCEPTINVCHARS) => {
10487 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10489 Some(self.parse_literal_string()?)
10490 } else {
10491 None
10492 };
10493 CopyLegacyOption::AcceptInvChars(ch)
10494 }
10495 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10496 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10497 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10498 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10499 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10500 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10501 Some(Keyword::COMPUPDATE) => {
10502 let preset = self.parse_keyword(Keyword::PRESET);
10503 let enabled = match self.parse_one_of_keywords(&[
10504 Keyword::TRUE,
10505 Keyword::FALSE,
10506 Keyword::ON,
10507 Keyword::OFF,
10508 ]) {
10509 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10510 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10511 _ => None,
10512 };
10513 CopyLegacyOption::CompUpdate { preset, enabled }
10514 }
10515 Some(Keyword::CSV) => CopyLegacyOption::Csv({
10516 let mut opts = vec![];
10517 while let Some(opt) =
10518 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10519 {
10520 opts.push(opt);
10521 }
10522 opts
10523 }),
10524 Some(Keyword::DATEFORMAT) => {
10525 let _ = self.parse_keyword(Keyword::AS);
10526 let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10527 Some(self.parse_literal_string()?)
10528 } else {
10529 None
10530 };
10531 CopyLegacyOption::DateFormat(fmt)
10532 }
10533 Some(Keyword::DELIMITER) => {
10534 let _ = self.parse_keyword(Keyword::AS);
10535 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10536 }
10537 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10538 Some(Keyword::ENCRYPTED) => {
10539 let auto = self.parse_keyword(Keyword::AUTO);
10540 CopyLegacyOption::Encrypted { auto }
10541 }
10542 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10543 Some(Keyword::EXTENSION) => {
10544 let ext = self.parse_literal_string()?;
10545 CopyLegacyOption::Extension(ext)
10546 }
10547 Some(Keyword::FIXEDWIDTH) => {
10548 let spec = self.parse_literal_string()?;
10549 CopyLegacyOption::FixedWidth(spec)
10550 }
10551 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10552 Some(Keyword::HEADER) => CopyLegacyOption::Header,
10553 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10554 Some(Keyword::IGNOREHEADER) => {
10555 let _ = self.parse_keyword(Keyword::AS);
10556 let num_rows = self.parse_literal_uint()?;
10557 CopyLegacyOption::IgnoreHeader(num_rows)
10558 }
10559 Some(Keyword::JSON) => CopyLegacyOption::Json,
10560 Some(Keyword::MANIFEST) => {
10561 let verbose = self.parse_keyword(Keyword::VERBOSE);
10562 CopyLegacyOption::Manifest { verbose }
10563 }
10564 Some(Keyword::MAXFILESIZE) => {
10565 let _ = self.parse_keyword(Keyword::AS);
10566 let size = self.parse_number_value()?.value;
10567 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10568 Some(Keyword::MB) => Some(FileSizeUnit::MB),
10569 Some(Keyword::GB) => Some(FileSizeUnit::GB),
10570 _ => None,
10571 };
10572 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10573 }
10574 Some(Keyword::NULL) => {
10575 let _ = self.parse_keyword(Keyword::AS);
10576 CopyLegacyOption::Null(self.parse_literal_string()?)
10577 }
10578 Some(Keyword::PARALLEL) => {
10579 let enabled = match self.parse_one_of_keywords(&[
10580 Keyword::TRUE,
10581 Keyword::FALSE,
10582 Keyword::ON,
10583 Keyword::OFF,
10584 ]) {
10585 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10586 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10587 _ => None,
10588 };
10589 CopyLegacyOption::Parallel(enabled)
10590 }
10591 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10592 Some(Keyword::PARTITION) => {
10593 self.expect_keyword(Keyword::BY)?;
10594 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10595 let include = self.parse_keyword(Keyword::INCLUDE);
10596 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10597 }
10598 Some(Keyword::REGION) => {
10599 let _ = self.parse_keyword(Keyword::AS);
10600 let region = self.parse_literal_string()?;
10601 CopyLegacyOption::Region(region)
10602 }
10603 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10604 Some(Keyword::ROWGROUPSIZE) => {
10605 let _ = self.parse_keyword(Keyword::AS);
10606 let file_size = self.parse_file_size()?;
10607 CopyLegacyOption::RowGroupSize(file_size)
10608 }
10609 Some(Keyword::STATUPDATE) => {
10610 let enabled = match self.parse_one_of_keywords(&[
10611 Keyword::TRUE,
10612 Keyword::FALSE,
10613 Keyword::ON,
10614 Keyword::OFF,
10615 ]) {
10616 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10617 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10618 _ => None,
10619 };
10620 CopyLegacyOption::StatUpdate(enabled)
10621 }
10622 Some(Keyword::TIMEFORMAT) => {
10623 let _ = self.parse_keyword(Keyword::AS);
10624 let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10625 Some(self.parse_literal_string()?)
10626 } else {
10627 None
10628 };
10629 CopyLegacyOption::TimeFormat(fmt)
10630 }
10631 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10632 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10633 _ => self.expected("option", self.peek_token())?,
10634 };
10635 Ok(ret)
10636 }
10637
10638 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10639 let size = self.parse_number_value()?.value;
10640 let unit = self.maybe_parse_file_size_unit();
10641 Ok(FileSize { size, unit })
10642 }
10643
10644 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10645 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10646 Some(Keyword::MB) => Some(FileSizeUnit::MB),
10647 Some(Keyword::GB) => Some(FileSizeUnit::GB),
10648 _ => None,
10649 }
10650 }
10651
10652 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10653 if self.parse_keyword(Keyword::DEFAULT) {
10654 Ok(IamRoleKind::Default)
10655 } else {
10656 let arn = self.parse_literal_string()?;
10657 Ok(IamRoleKind::Arn(arn))
10658 }
10659 }
10660
10661 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10662 let ret = match self.parse_one_of_keywords(&[
10663 Keyword::HEADER,
10664 Keyword::QUOTE,
10665 Keyword::ESCAPE,
10666 Keyword::FORCE,
10667 ]) {
10668 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10669 Some(Keyword::QUOTE) => {
10670 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10672 }
10673 Some(Keyword::ESCAPE) => {
10674 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10676 }
10677 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10678 CopyLegacyCsvOption::ForceNotNull(
10679 self.parse_comma_separated(|p| p.parse_identifier())?,
10680 )
10681 }
10682 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10683 CopyLegacyCsvOption::ForceQuote(
10684 self.parse_comma_separated(|p| p.parse_identifier())?,
10685 )
10686 }
10687 _ => self.expected("csv option", self.peek_token())?,
10688 };
10689 Ok(ret)
10690 }
10691
10692 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10693 let s = self.parse_literal_string()?;
10694 if s.len() != 1 {
10695 let loc = self
10696 .tokens
10697 .get(self.index - 1)
10698 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10699 return parser_err!(format!("Expect a char, found {s:?}"), loc);
10700 }
10701 Ok(s.chars().next().unwrap())
10702 }
10703
10704 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10707 self.parse_tab_value()
10708 }
10709
10710 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10711 let mut values = vec![];
10712 let mut content = String::from("");
10713 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10714 match t {
10715 Token::Whitespace(Whitespace::Tab) => {
10716 values.push(Some(content.to_string()));
10717 content.clear();
10718 }
10719 Token::Whitespace(Whitespace::Newline) => {
10720 values.push(Some(content.to_string()));
10721 content.clear();
10722 }
10723 Token::Backslash => {
10724 if self.consume_token(&Token::Period) {
10725 return values;
10726 }
10727 if let Token::Word(w) = self.next_token().token {
10728 if w.value == "N" {
10729 values.push(None);
10730 }
10731 }
10732 }
10733 _ => {
10734 content.push_str(&t.to_string());
10735 }
10736 }
10737 }
10738 values
10739 }
10740
10741 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10743 let next_token = self.next_token();
10744 let span = next_token.span;
10745 let ok_value = |value: Value| Ok(value.with_span(span));
10746 match next_token.token {
10747 Token::Word(w) => match w.keyword {
10748 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10749 ok_value(Value::Boolean(true))
10750 }
10751 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10752 ok_value(Value::Boolean(false))
10753 }
10754 Keyword::NULL => ok_value(Value::Null),
10755 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10756 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10757 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10758 _ => self.expected(
10759 "A value?",
10760 TokenWithSpan {
10761 token: Token::Word(w),
10762 span,
10763 },
10764 )?,
10765 },
10766 _ => self.expected(
10767 "a concrete value",
10768 TokenWithSpan {
10769 token: Token::Word(w),
10770 span,
10771 },
10772 ),
10773 },
10774 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10778 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10779 self.maybe_concat_string_literal(s.to_string()),
10780 )),
10781 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10782 self.maybe_concat_string_literal(s.to_string()),
10783 )),
10784 Token::TripleSingleQuotedString(ref s) => {
10785 ok_value(Value::TripleSingleQuotedString(s.to_string()))
10786 }
10787 Token::TripleDoubleQuotedString(ref s) => {
10788 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10789 }
10790 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10791 Token::SingleQuotedByteStringLiteral(ref s) => {
10792 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10793 }
10794 Token::DoubleQuotedByteStringLiteral(ref s) => {
10795 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10796 }
10797 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10798 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10799 }
10800 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10801 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10802 }
10803 Token::SingleQuotedRawStringLiteral(ref s) => {
10804 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10805 }
10806 Token::DoubleQuotedRawStringLiteral(ref s) => {
10807 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10808 }
10809 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
10810 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
10811 }
10812 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
10813 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
10814 }
10815 Token::NationalStringLiteral(ref s) => {
10816 ok_value(Value::NationalStringLiteral(s.to_string()))
10817 }
10818 Token::EscapedStringLiteral(ref s) => {
10819 ok_value(Value::EscapedStringLiteral(s.to_string()))
10820 }
10821 Token::UnicodeStringLiteral(ref s) => {
10822 ok_value(Value::UnicodeStringLiteral(s.to_string()))
10823 }
10824 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
10825 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
10826 tok @ Token::Colon | tok @ Token::AtSign => {
10827 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
10835 let ident = match next_token.token {
10836 Token::Word(w) => Ok(w.into_ident(next_token.span)),
10837 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
10838 _ => self.expected("placeholder", next_token),
10839 }?;
10840 Ok(Value::Placeholder(tok.to_string() + &ident.value)
10841 .with_span(Span::new(span.start, ident.span.end)))
10842 }
10843 unexpected => self.expected(
10844 "a value",
10845 TokenWithSpan {
10846 token: unexpected,
10847 span,
10848 },
10849 ),
10850 }
10851 }
10852
10853 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
10854 if self.dialect.supports_string_literal_concatenation() {
10855 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10856 self.peek_token_ref().token
10857 {
10858 str.push_str(s.clone().as_str());
10859 self.advance_token();
10860 }
10861 }
10862 str
10863 }
10864
10865 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10867 let value_wrapper = self.parse_value()?;
10868 match &value_wrapper.value {
10869 Value::Number(_, _) => Ok(value_wrapper),
10870 Value::Placeholder(_) => Ok(value_wrapper),
10871 _ => {
10872 self.prev_token();
10873 self.expected("literal number", self.peek_token())
10874 }
10875 }
10876 }
10877
10878 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10881 let next_token = self.next_token();
10882 match next_token.token {
10883 Token::Plus => Ok(Expr::UnaryOp {
10884 op: UnaryOperator::Plus,
10885 expr: Box::new(Expr::Value(self.parse_number_value()?)),
10886 }),
10887 Token::Minus => Ok(Expr::UnaryOp {
10888 op: UnaryOperator::Minus,
10889 expr: Box::new(Expr::Value(self.parse_number_value()?)),
10890 }),
10891 _ => {
10892 self.prev_token();
10893 Ok(Expr::Value(self.parse_number_value()?))
10894 }
10895 }
10896 }
10897
10898 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10899 let next_token = self.next_token();
10900 let span = next_token.span;
10901 match next_token.token {
10902 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10903 Value::SingleQuotedString(s.to_string()).with_span(span),
10904 )),
10905 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10906 Value::DoubleQuotedString(s.to_string()).with_span(span),
10907 )),
10908 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10909 Value::HexStringLiteral(s.to_string()).with_span(span),
10910 )),
10911 unexpected => self.expected(
10912 "a string value",
10913 TokenWithSpan {
10914 token: unexpected,
10915 span,
10916 },
10917 ),
10918 }
10919 }
10920
10921 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10923 let next_token = self.next_token();
10924 match next_token.token {
10925 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10926 _ => self.expected("literal int", next_token),
10927 }
10928 }
10929
10930 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
10933 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
10934 let peek_token = parser.peek_token();
10935 let span = peek_token.span;
10936 match peek_token.token {
10937 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
10938 {
10939 parser.next_token();
10940 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10941 }
10942 _ => Ok(Expr::Value(
10943 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
10944 )),
10945 }
10946 };
10947
10948 Ok(CreateFunctionBody::AsBeforeOptions {
10949 body: parse_string_expr(self)?,
10950 link_symbol: if self.consume_token(&Token::Comma) {
10951 Some(parse_string_expr(self)?)
10952 } else {
10953 None
10954 },
10955 })
10956 }
10957
10958 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10960 let next_token = self.next_token();
10961 match next_token.token {
10962 Token::Word(Word {
10963 value,
10964 keyword: Keyword::NoKeyword,
10965 ..
10966 }) => Ok(value),
10967 Token::SingleQuotedString(s) => Ok(s),
10968 Token::DoubleQuotedString(s) => Ok(s),
10969 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10970 Ok(s)
10971 }
10972 Token::UnicodeStringLiteral(s) => Ok(s),
10973 _ => self.expected("literal string", next_token),
10974 }
10975 }
10976
10977 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10979 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10980 Some(Keyword::TRUE) => Ok(true),
10981 Some(Keyword::FALSE) => Ok(false),
10982 _ => self.expected("TRUE or FALSE", self.peek_token()),
10983 }
10984 }
10985
10986 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10988 let neg = self.parse_keyword(Keyword::NOT);
10989 let normalized_form = self.maybe_parse(|parser| {
10990 match parser.parse_one_of_keywords(&[
10991 Keyword::NFC,
10992 Keyword::NFD,
10993 Keyword::NFKC,
10994 Keyword::NFKD,
10995 ]) {
10996 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10997 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10998 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10999 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
11000 _ => parser.expected("unicode normalization form", parser.peek_token()),
11001 }
11002 })?;
11003 if self.parse_keyword(Keyword::NORMALIZED) {
11004 return Ok(Expr::IsNormalized {
11005 expr: Box::new(expr),
11006 form: normalized_form,
11007 negated: neg,
11008 });
11009 }
11010 self.expected("unicode normalization form", self.peek_token())
11011 }
11012
11013 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
11014 self.expect_token(&Token::LParen)?;
11015 let values = self.parse_comma_separated(|parser| {
11016 let name = parser.parse_literal_string()?;
11017 let e = if parser.consume_token(&Token::Eq) {
11018 let value = parser.parse_number()?;
11019 EnumMember::NamedValue(name, value)
11020 } else {
11021 EnumMember::Name(name)
11022 };
11023 Ok(e)
11024 })?;
11025 self.expect_token(&Token::RParen)?;
11026
11027 Ok(values)
11028 }
11029
11030 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
11032 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
11033 if trailing_bracket.0 {
11034 return parser_err!(
11035 format!("unmatched > after parsing data type {ty}"),
11036 self.peek_token()
11037 );
11038 }
11039
11040 Ok(ty)
11041 }
11042
11043 fn parse_data_type_helper(
11044 &mut self,
11045 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
11046 let dialect = self.dialect;
11047 self.advance_token();
11048 let next_token = self.get_current_token();
11049 let next_token_index = self.get_current_index();
11050
11051 let mut trailing_bracket: MatchedTrailingBracket = false.into();
11052 let mut data = match &next_token.token {
11053 Token::Word(w) => match w.keyword {
11054 Keyword::BOOLEAN => Ok(DataType::Boolean),
11055 Keyword::BOOL => Ok(DataType::Bool),
11056 Keyword::FLOAT => {
11057 let precision = self.parse_exact_number_optional_precision_scale()?;
11058
11059 if self.parse_keyword(Keyword::UNSIGNED) {
11060 Ok(DataType::FloatUnsigned(precision))
11061 } else {
11062 Ok(DataType::Float(precision))
11063 }
11064 }
11065 Keyword::REAL => {
11066 if self.parse_keyword(Keyword::UNSIGNED) {
11067 Ok(DataType::RealUnsigned)
11068 } else {
11069 Ok(DataType::Real)
11070 }
11071 }
11072 Keyword::FLOAT4 => Ok(DataType::Float4),
11073 Keyword::FLOAT32 => Ok(DataType::Float32),
11074 Keyword::FLOAT64 => Ok(DataType::Float64),
11075 Keyword::FLOAT8 => Ok(DataType::Float8),
11076 Keyword::DOUBLE => {
11077 if self.parse_keyword(Keyword::PRECISION) {
11078 if self.parse_keyword(Keyword::UNSIGNED) {
11079 Ok(DataType::DoublePrecisionUnsigned)
11080 } else {
11081 Ok(DataType::DoublePrecision)
11082 }
11083 } else {
11084 let precision = self.parse_exact_number_optional_precision_scale()?;
11085
11086 if self.parse_keyword(Keyword::UNSIGNED) {
11087 Ok(DataType::DoubleUnsigned(precision))
11088 } else {
11089 Ok(DataType::Double(precision))
11090 }
11091 }
11092 }
11093 Keyword::TINYINT => {
11094 let optional_precision = self.parse_optional_precision();
11095 if self.parse_keyword(Keyword::UNSIGNED) {
11096 Ok(DataType::TinyIntUnsigned(optional_precision?))
11097 } else {
11098 if dialect.supports_data_type_signed_suffix() {
11099 let _ = self.parse_keyword(Keyword::SIGNED);
11100 }
11101 Ok(DataType::TinyInt(optional_precision?))
11102 }
11103 }
11104 Keyword::INT2 => {
11105 let optional_precision = self.parse_optional_precision();
11106 if self.parse_keyword(Keyword::UNSIGNED) {
11107 Ok(DataType::Int2Unsigned(optional_precision?))
11108 } else {
11109 Ok(DataType::Int2(optional_precision?))
11110 }
11111 }
11112 Keyword::SMALLINT => {
11113 let optional_precision = self.parse_optional_precision();
11114 if self.parse_keyword(Keyword::UNSIGNED) {
11115 Ok(DataType::SmallIntUnsigned(optional_precision?))
11116 } else {
11117 if dialect.supports_data_type_signed_suffix() {
11118 let _ = self.parse_keyword(Keyword::SIGNED);
11119 }
11120 Ok(DataType::SmallInt(optional_precision?))
11121 }
11122 }
11123 Keyword::MEDIUMINT => {
11124 let optional_precision = self.parse_optional_precision();
11125 if self.parse_keyword(Keyword::UNSIGNED) {
11126 Ok(DataType::MediumIntUnsigned(optional_precision?))
11127 } else {
11128 if dialect.supports_data_type_signed_suffix() {
11129 let _ = self.parse_keyword(Keyword::SIGNED);
11130 }
11131 Ok(DataType::MediumInt(optional_precision?))
11132 }
11133 }
11134 Keyword::INT => {
11135 let optional_precision = self.parse_optional_precision();
11136 if self.parse_keyword(Keyword::UNSIGNED) {
11137 Ok(DataType::IntUnsigned(optional_precision?))
11138 } else {
11139 if dialect.supports_data_type_signed_suffix() {
11140 let _ = self.parse_keyword(Keyword::SIGNED);
11141 }
11142 Ok(DataType::Int(optional_precision?))
11143 }
11144 }
11145 Keyword::INT4 => {
11146 let optional_precision = self.parse_optional_precision();
11147 if self.parse_keyword(Keyword::UNSIGNED) {
11148 Ok(DataType::Int4Unsigned(optional_precision?))
11149 } else {
11150 Ok(DataType::Int4(optional_precision?))
11151 }
11152 }
11153 Keyword::INT8 => {
11154 let optional_precision = self.parse_optional_precision();
11155 if self.parse_keyword(Keyword::UNSIGNED) {
11156 Ok(DataType::Int8Unsigned(optional_precision?))
11157 } else {
11158 Ok(DataType::Int8(optional_precision?))
11159 }
11160 }
11161 Keyword::INT16 => Ok(DataType::Int16),
11162 Keyword::INT32 => Ok(DataType::Int32),
11163 Keyword::INT64 => Ok(DataType::Int64),
11164 Keyword::INT128 => Ok(DataType::Int128),
11165 Keyword::INT256 => Ok(DataType::Int256),
11166 Keyword::INTEGER => {
11167 let optional_precision = self.parse_optional_precision();
11168 if self.parse_keyword(Keyword::UNSIGNED) {
11169 Ok(DataType::IntegerUnsigned(optional_precision?))
11170 } else {
11171 if dialect.supports_data_type_signed_suffix() {
11172 let _ = self.parse_keyword(Keyword::SIGNED);
11173 }
11174 Ok(DataType::Integer(optional_precision?))
11175 }
11176 }
11177 Keyword::BIGINT => {
11178 let optional_precision = self.parse_optional_precision();
11179 if self.parse_keyword(Keyword::UNSIGNED) {
11180 Ok(DataType::BigIntUnsigned(optional_precision?))
11181 } else {
11182 if dialect.supports_data_type_signed_suffix() {
11183 let _ = self.parse_keyword(Keyword::SIGNED);
11184 }
11185 Ok(DataType::BigInt(optional_precision?))
11186 }
11187 }
11188 Keyword::HUGEINT => Ok(DataType::HugeInt),
11189 Keyword::UBIGINT => Ok(DataType::UBigInt),
11190 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11191 Keyword::USMALLINT => Ok(DataType::USmallInt),
11192 Keyword::UTINYINT => Ok(DataType::UTinyInt),
11193 Keyword::UINT8 => Ok(DataType::UInt8),
11194 Keyword::UINT16 => Ok(DataType::UInt16),
11195 Keyword::UINT32 => Ok(DataType::UInt32),
11196 Keyword::UINT64 => Ok(DataType::UInt64),
11197 Keyword::UINT128 => Ok(DataType::UInt128),
11198 Keyword::UINT256 => Ok(DataType::UInt256),
11199 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11200 Keyword::NVARCHAR => {
11201 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11202 }
11203 Keyword::CHARACTER => {
11204 if self.parse_keyword(Keyword::VARYING) {
11205 Ok(DataType::CharacterVarying(
11206 self.parse_optional_character_length()?,
11207 ))
11208 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11209 Ok(DataType::CharacterLargeObject(
11210 self.parse_optional_precision()?,
11211 ))
11212 } else {
11213 Ok(DataType::Character(self.parse_optional_character_length()?))
11214 }
11215 }
11216 Keyword::CHAR => {
11217 if self.parse_keyword(Keyword::VARYING) {
11218 Ok(DataType::CharVarying(
11219 self.parse_optional_character_length()?,
11220 ))
11221 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11222 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11223 } else {
11224 Ok(DataType::Char(self.parse_optional_character_length()?))
11225 }
11226 }
11227 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11228 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11229 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11230 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11231 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11232 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11233 Keyword::LONGBLOB => Ok(DataType::LongBlob),
11234 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11235 Keyword::BIT => {
11236 if self.parse_keyword(Keyword::VARYING) {
11237 Ok(DataType::BitVarying(self.parse_optional_precision()?))
11238 } else {
11239 Ok(DataType::Bit(self.parse_optional_precision()?))
11240 }
11241 }
11242 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11243 Keyword::UUID => Ok(DataType::Uuid),
11244 Keyword::DATE => Ok(DataType::Date),
11245 Keyword::DATE32 => Ok(DataType::Date32),
11246 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11247 Keyword::DATETIME64 => {
11248 self.prev_token();
11249 let (precision, time_zone) = self.parse_datetime_64()?;
11250 Ok(DataType::Datetime64(precision, time_zone))
11251 }
11252 Keyword::TIMESTAMP => {
11253 let precision = self.parse_optional_precision()?;
11254 let tz = if self.parse_keyword(Keyword::WITH) {
11255 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11256 TimezoneInfo::WithTimeZone
11257 } else if self.parse_keyword(Keyword::WITHOUT) {
11258 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11259 TimezoneInfo::WithoutTimeZone
11260 } else {
11261 TimezoneInfo::None
11262 };
11263 Ok(DataType::Timestamp(precision, tz))
11264 }
11265 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11266 self.parse_optional_precision()?,
11267 TimezoneInfo::Tz,
11268 )),
11269 Keyword::TIMESTAMP_NTZ => {
11270 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11271 }
11272 Keyword::TIME => {
11273 let precision = self.parse_optional_precision()?;
11274 let tz = if self.parse_keyword(Keyword::WITH) {
11275 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11276 TimezoneInfo::WithTimeZone
11277 } else if self.parse_keyword(Keyword::WITHOUT) {
11278 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11279 TimezoneInfo::WithoutTimeZone
11280 } else {
11281 TimezoneInfo::None
11282 };
11283 Ok(DataType::Time(precision, tz))
11284 }
11285 Keyword::TIMETZ => Ok(DataType::Time(
11286 self.parse_optional_precision()?,
11287 TimezoneInfo::Tz,
11288 )),
11289 Keyword::INTERVAL => {
11290 if self.dialect.supports_interval_options() {
11291 let fields = self.maybe_parse_optional_interval_fields()?;
11292 let precision = self.parse_optional_precision()?;
11293 Ok(DataType::Interval { fields, precision })
11294 } else {
11295 Ok(DataType::Interval {
11296 fields: None,
11297 precision: None,
11298 })
11299 }
11300 }
11301 Keyword::JSON => Ok(DataType::JSON),
11302 Keyword::JSONB => Ok(DataType::JSONB),
11303 Keyword::REGCLASS => Ok(DataType::Regclass),
11304 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11305 Keyword::FIXEDSTRING => {
11306 self.expect_token(&Token::LParen)?;
11307 let character_length = self.parse_literal_uint()?;
11308 self.expect_token(&Token::RParen)?;
11309 Ok(DataType::FixedString(character_length))
11310 }
11311 Keyword::TEXT => Ok(DataType::Text),
11312 Keyword::TINYTEXT => Ok(DataType::TinyText),
11313 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11314 Keyword::LONGTEXT => Ok(DataType::LongText),
11315 Keyword::BYTEA => Ok(DataType::Bytea),
11316 Keyword::NUMERIC => Ok(DataType::Numeric(
11317 self.parse_exact_number_optional_precision_scale()?,
11318 )),
11319 Keyword::DECIMAL => {
11320 let precision = self.parse_exact_number_optional_precision_scale()?;
11321
11322 if self.parse_keyword(Keyword::UNSIGNED) {
11323 Ok(DataType::DecimalUnsigned(precision))
11324 } else {
11325 Ok(DataType::Decimal(precision))
11326 }
11327 }
11328 Keyword::DEC => {
11329 let precision = self.parse_exact_number_optional_precision_scale()?;
11330
11331 if self.parse_keyword(Keyword::UNSIGNED) {
11332 Ok(DataType::DecUnsigned(precision))
11333 } else {
11334 Ok(DataType::Dec(precision))
11335 }
11336 }
11337 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11338 self.parse_exact_number_optional_precision_scale()?,
11339 )),
11340 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11341 self.parse_exact_number_optional_precision_scale()?,
11342 )),
11343 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11344 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11345 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11346 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11347 Keyword::ARRAY => {
11348 if dialect_of!(self is SnowflakeDialect) {
11349 Ok(DataType::Array(ArrayElemTypeDef::None))
11350 } else if dialect_of!(self is ClickHouseDialect) {
11351 Ok(self.parse_sub_type(|internal_type| {
11352 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11353 })?)
11354 } else {
11355 self.expect_token(&Token::Lt)?;
11356 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11357 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11358 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11359 inside_type,
11360 ))))
11361 }
11362 }
11363 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11364 self.prev_token();
11365 let field_defs = self.parse_duckdb_struct_type_def()?;
11366 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11367 }
11368 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11369 self.prev_token();
11370 let (field_defs, _trailing_bracket) =
11371 self.parse_struct_type_def(Self::parse_struct_field_def)?;
11372 trailing_bracket = _trailing_bracket;
11373 Ok(DataType::Struct(
11374 field_defs,
11375 StructBracketKind::AngleBrackets,
11376 ))
11377 }
11378 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11379 self.prev_token();
11380 let fields = self.parse_union_type_def()?;
11381 Ok(DataType::Union(fields))
11382 }
11383 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11384 Ok(self.parse_sub_type(DataType::Nullable)?)
11385 }
11386 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11387 Ok(self.parse_sub_type(DataType::LowCardinality)?)
11388 }
11389 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11390 self.prev_token();
11391 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11392 Ok(DataType::Map(
11393 Box::new(key_data_type),
11394 Box::new(value_data_type),
11395 ))
11396 }
11397 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11398 self.expect_token(&Token::LParen)?;
11399 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11400 self.expect_token(&Token::RParen)?;
11401 Ok(DataType::Nested(field_defs))
11402 }
11403 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11404 self.prev_token();
11405 let field_defs = self.parse_click_house_tuple_def()?;
11406 Ok(DataType::Tuple(field_defs))
11407 }
11408 Keyword::TRIGGER => Ok(DataType::Trigger),
11409 Keyword::SETOF => {
11410 let inner = self.parse_data_type()?;
11411 Ok(DataType::SetOf(Box::new(inner)))
11412 }
11413 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11414 let _ = self.parse_keyword(Keyword::TYPE);
11415 Ok(DataType::AnyType)
11416 }
11417 Keyword::TABLE => {
11418 if self.peek_token() == Token::LParen {
11421 let columns = self.parse_returns_table_columns()?;
11422 Ok(DataType::Table(Some(columns)))
11423 } else {
11424 Ok(DataType::Table(None))
11425 }
11426 }
11427 Keyword::SIGNED => {
11428 if self.parse_keyword(Keyword::INTEGER) {
11429 Ok(DataType::SignedInteger)
11430 } else {
11431 Ok(DataType::Signed)
11432 }
11433 }
11434 Keyword::UNSIGNED => {
11435 if self.parse_keyword(Keyword::INTEGER) {
11436 Ok(DataType::UnsignedInteger)
11437 } else {
11438 Ok(DataType::Unsigned)
11439 }
11440 }
11441 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11442 Ok(DataType::TsVector)
11443 }
11444 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11445 Ok(DataType::TsQuery)
11446 }
11447 _ => {
11448 self.prev_token();
11449 let type_name = self.parse_object_name(false)?;
11450 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11451 Ok(DataType::Custom(type_name, modifiers))
11452 } else {
11453 Ok(DataType::Custom(type_name, vec![]))
11454 }
11455 }
11456 },
11457 _ => self.expected_at("a data type name", next_token_index),
11458 }?;
11459
11460 if self.dialect.supports_array_typedef_with_brackets() {
11461 while self.consume_token(&Token::LBracket) {
11462 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11464 self.expect_token(&Token::RBracket)?;
11465 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11466 }
11467 }
11468 Ok((data, trailing_bracket))
11469 }
11470
11471 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11472 self.parse_column_def()
11473 }
11474
11475 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11476 self.expect_token(&Token::LParen)?;
11477 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11478 self.expect_token(&Token::RParen)?;
11479 Ok(columns)
11480 }
11481
11482 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11483 self.expect_token(&Token::LParen)?;
11484 let mut values = Vec::new();
11485 loop {
11486 let next_token = self.next_token();
11487 match next_token.token {
11488 Token::SingleQuotedString(value) => values.push(value),
11489 _ => self.expected("a string", next_token)?,
11490 }
11491 let next_token = self.next_token();
11492 match next_token.token {
11493 Token::Comma => (),
11494 Token::RParen => break,
11495 _ => self.expected(", or }", next_token)?,
11496 }
11497 }
11498 Ok(values)
11499 }
11500
11501 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11503 let ident = self.parse_identifier()?;
11504 self.expect_keyword_is(Keyword::AS)?;
11505 let alias = self.parse_identifier()?;
11506 Ok(IdentWithAlias { ident, alias })
11507 }
11508
11509 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11511 let ident = self.parse_identifier()?;
11512 let _after_as = self.parse_keyword(Keyword::AS);
11513 let alias = self.parse_identifier()?;
11514 Ok(IdentWithAlias { ident, alias })
11515 }
11516
11517 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11519 self.parse_comma_separated(|parser| {
11520 parser.expect_token(&Token::LParen)?;
11521 let query = parser.parse_query()?;
11522 parser.expect_token(&Token::RParen)?;
11523 Ok(*query)
11524 })
11525 }
11526
11527 fn parse_distinct_required_set_quantifier(
11529 &mut self,
11530 operator_name: &str,
11531 ) -> Result<SetQuantifier, ParserError> {
11532 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11533 match quantifier {
11534 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11535 _ => Err(ParserError::ParserError(format!(
11536 "{operator_name} pipe operator requires DISTINCT modifier",
11537 ))),
11538 }
11539 }
11540
11541 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11543 if self.parse_keyword(Keyword::AS) {
11544 Ok(Some(self.parse_identifier()?))
11545 } else {
11546 self.maybe_parse(|parser| parser.parse_identifier())
11548 }
11549 }
11550
11551 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11553 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11554 parser.dialect.is_select_item_alias(explicit, kw, parser)
11555 }
11556 self.parse_optional_alias_inner(None, validator)
11557 }
11558
11559 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11563 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11564 parser.dialect.is_table_factor_alias(explicit, kw, parser)
11565 }
11566 let explicit = self.peek_keyword(Keyword::AS);
11567 match self.parse_optional_alias_inner(None, validator)? {
11568 Some(name) => {
11569 let columns = self.parse_table_alias_column_defs()?;
11570 Ok(Some(TableAlias {
11571 explicit,
11572 name,
11573 columns,
11574 }))
11575 }
11576 None => Ok(None),
11577 }
11578 }
11579
11580 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11581 let mut hints = vec![];
11582 while let Some(hint_type) =
11583 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11584 {
11585 let hint_type = match hint_type {
11586 Keyword::USE => TableIndexHintType::Use,
11587 Keyword::IGNORE => TableIndexHintType::Ignore,
11588 Keyword::FORCE => TableIndexHintType::Force,
11589 _ => {
11590 return self.expected(
11591 "expected to match USE/IGNORE/FORCE keyword",
11592 self.peek_token(),
11593 )
11594 }
11595 };
11596 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11597 Some(Keyword::INDEX) => TableIndexType::Index,
11598 Some(Keyword::KEY) => TableIndexType::Key,
11599 _ => {
11600 return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11601 }
11602 };
11603 let for_clause = if self.parse_keyword(Keyword::FOR) {
11604 let clause = if self.parse_keyword(Keyword::JOIN) {
11605 TableIndexHintForClause::Join
11606 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11607 TableIndexHintForClause::OrderBy
11608 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11609 TableIndexHintForClause::GroupBy
11610 } else {
11611 return self.expected(
11612 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11613 self.peek_token(),
11614 );
11615 };
11616 Some(clause)
11617 } else {
11618 None
11619 };
11620
11621 self.expect_token(&Token::LParen)?;
11622 let index_names = if self.peek_token().token != Token::RParen {
11623 self.parse_comma_separated(Parser::parse_identifier)?
11624 } else {
11625 vec![]
11626 };
11627 self.expect_token(&Token::RParen)?;
11628 hints.push(TableIndexHints {
11629 hint_type,
11630 index_type,
11631 for_clause,
11632 index_names,
11633 });
11634 }
11635 Ok(hints)
11636 }
11637
11638 pub fn parse_optional_alias(
11642 &mut self,
11643 reserved_kwds: &[Keyword],
11644 ) -> Result<Option<Ident>, ParserError> {
11645 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11646 false
11647 }
11648 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11649 }
11650
11651 fn parse_optional_alias_inner<F>(
11658 &mut self,
11659 reserved_kwds: Option<&[Keyword]>,
11660 validator: F,
11661 ) -> Result<Option<Ident>, ParserError>
11662 where
11663 F: Fn(bool, &Keyword, &mut Parser) -> bool,
11664 {
11665 let after_as = self.parse_keyword(Keyword::AS);
11666
11667 let next_token = self.next_token();
11668 match next_token.token {
11669 Token::Word(w)
11672 if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
11673 {
11674 Ok(Some(w.into_ident(next_token.span)))
11675 }
11676 Token::Word(w) if validator(after_as, &w.keyword, self) => {
11680 Ok(Some(w.into_ident(next_token.span)))
11681 }
11682 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11684 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11685 _ => {
11686 if after_as {
11687 return self.expected("an identifier after AS", next_token);
11688 }
11689 self.prev_token();
11690 Ok(None) }
11692 }
11693 }
11694
11695 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11696 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11697 let expressions = if self.parse_keyword(Keyword::ALL) {
11698 None
11699 } else {
11700 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11701 };
11702
11703 let mut modifiers = vec![];
11704 if self.dialect.supports_group_by_with_modifier() {
11705 loop {
11706 if !self.parse_keyword(Keyword::WITH) {
11707 break;
11708 }
11709 let keyword = self.expect_one_of_keywords(&[
11710 Keyword::ROLLUP,
11711 Keyword::CUBE,
11712 Keyword::TOTALS,
11713 ])?;
11714 modifiers.push(match keyword {
11715 Keyword::ROLLUP => GroupByWithModifier::Rollup,
11716 Keyword::CUBE => GroupByWithModifier::Cube,
11717 Keyword::TOTALS => GroupByWithModifier::Totals,
11718 _ => {
11719 return parser_err!(
11720 "BUG: expected to match GroupBy modifier keyword",
11721 self.peek_token().span.start
11722 )
11723 }
11724 });
11725 }
11726 }
11727 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11728 self.expect_token(&Token::LParen)?;
11729 let result = self.parse_comma_separated(|p| {
11730 if p.peek_token_ref().token == Token::LParen {
11731 p.parse_tuple(true, true)
11732 } else {
11733 Ok(vec![p.parse_expr()?])
11734 }
11735 })?;
11736 self.expect_token(&Token::RParen)?;
11737 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11738 result,
11739 )));
11740 };
11741 let group_by = match expressions {
11742 None => GroupByExpr::All(modifiers),
11743 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11744 };
11745 Ok(Some(group_by))
11746 } else {
11747 Ok(None)
11748 }
11749 }
11750
11751 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11752 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11753 let order_by =
11754 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11755 let order_by_options = self.parse_order_by_options()?;
11756 OrderBy {
11757 kind: OrderByKind::All(order_by_options),
11758 interpolate: None,
11759 }
11760 } else {
11761 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11762 let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11763 self.parse_interpolations()?
11764 } else {
11765 None
11766 };
11767 OrderBy {
11768 kind: OrderByKind::Expressions(exprs),
11769 interpolate,
11770 }
11771 };
11772 Ok(Some(order_by))
11773 } else {
11774 Ok(None)
11775 }
11776 }
11777
11778 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11779 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11780 Some(self.parse_offset()?)
11781 } else {
11782 None
11783 };
11784
11785 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11786 let expr = self.parse_limit()?;
11787
11788 if self.dialect.supports_limit_comma()
11789 && offset.is_none()
11790 && expr.is_some() && self.consume_token(&Token::Comma)
11792 {
11793 let offset = expr.ok_or_else(|| {
11794 ParserError::ParserError(
11795 "Missing offset for LIMIT <offset>, <limit>".to_string(),
11796 )
11797 })?;
11798 return Ok(Some(LimitClause::OffsetCommaLimit {
11799 offset,
11800 limit: self.parse_expr()?,
11801 }));
11802 }
11803
11804 let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11805 && self.parse_keyword(Keyword::BY)
11806 {
11807 Some(self.parse_comma_separated(Parser::parse_expr)?)
11808 } else {
11809 None
11810 };
11811
11812 (Some(expr), limit_by)
11813 } else {
11814 (None, None)
11815 };
11816
11817 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
11818 offset = Some(self.parse_offset()?);
11819 }
11820
11821 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
11822 Ok(Some(LimitClause::LimitOffset {
11823 limit: limit.unwrap_or_default(),
11824 offset,
11825 limit_by: limit_by.unwrap_or_default(),
11826 }))
11827 } else {
11828 Ok(None)
11829 }
11830 }
11831
11832 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
11835 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
11836 let fn_name = self.parse_object_name(false)?;
11837 self.parse_function_call(fn_name)
11838 .map(TableObject::TableFunction)
11839 } else {
11840 self.parse_object_name(false).map(TableObject::TableName)
11841 }
11842 }
11843
11844 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
11851 self.parse_object_name_inner(in_table_clause, false)
11852 }
11853
11854 fn parse_object_name_inner(
11864 &mut self,
11865 in_table_clause: bool,
11866 allow_wildcards: bool,
11867 ) -> Result<ObjectName, ParserError> {
11868 let mut parts = vec![];
11869 if dialect_of!(self is BigQueryDialect) && in_table_clause {
11870 loop {
11871 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11872 parts.push(ObjectNamePart::Identifier(ident));
11873 if !self.consume_token(&Token::Period) && !end_with_period {
11874 break;
11875 }
11876 }
11877 } else {
11878 loop {
11879 if allow_wildcards && self.peek_token().token == Token::Mul {
11880 let span = self.next_token().span;
11881 parts.push(ObjectNamePart::Identifier(Ident {
11882 value: Token::Mul.to_string(),
11883 quote_style: None,
11884 span,
11885 }));
11886 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11887 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11888 parts.push(ObjectNamePart::Identifier(ident));
11889 if !self.consume_token(&Token::Period) && !end_with_period {
11890 break;
11891 }
11892 } else if self.dialect.supports_object_name_double_dot_notation()
11893 && parts.len() == 1
11894 && matches!(self.peek_token().token, Token::Period)
11895 {
11896 parts.push(ObjectNamePart::Identifier(Ident::new("")));
11898 } else {
11899 let ident = self.parse_identifier()?;
11900 let part = if self
11901 .dialect
11902 .is_identifier_generating_function_name(&ident, &parts)
11903 {
11904 self.expect_token(&Token::LParen)?;
11905 let args: Vec<FunctionArg> =
11906 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11907 self.expect_token(&Token::RParen)?;
11908 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11909 } else {
11910 ObjectNamePart::Identifier(ident)
11911 };
11912 parts.push(part);
11913 }
11914
11915 if !self.consume_token(&Token::Period) {
11916 break;
11917 }
11918 }
11919 }
11920
11921 if dialect_of!(self is BigQueryDialect)
11924 && parts.iter().any(|part| {
11925 part.as_ident()
11926 .is_some_and(|ident| ident.value.contains('.'))
11927 })
11928 {
11929 parts = parts
11930 .into_iter()
11931 .flat_map(|part| match part.as_ident() {
11932 Some(ident) => ident
11933 .value
11934 .split('.')
11935 .map(|value| {
11936 ObjectNamePart::Identifier(Ident {
11937 value: value.into(),
11938 quote_style: ident.quote_style,
11939 span: ident.span,
11940 })
11941 })
11942 .collect::<Vec<_>>(),
11943 None => vec![part],
11944 })
11945 .collect()
11946 }
11947
11948 Ok(ObjectName(parts))
11949 }
11950
11951 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11953 let mut idents = vec![];
11954 loop {
11955 match &self.peek_token_ref().token {
11956 Token::Word(w) => {
11957 idents.push(w.clone().into_ident(self.peek_token_ref().span));
11958 }
11959 Token::EOF | Token::Eq => break,
11960 _ => {}
11961 }
11962 self.advance_token();
11963 }
11964 Ok(idents)
11965 }
11966
11967 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
12007 let mut idents = vec![];
12008
12009 let next_token = self.next_token();
12011 match next_token.token {
12012 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12013 Token::EOF => {
12014 return Err(ParserError::ParserError(
12015 "Empty input when parsing identifier".to_string(),
12016 ))?
12017 }
12018 token => {
12019 return Err(ParserError::ParserError(format!(
12020 "Unexpected token in identifier: {token}"
12021 )))?
12022 }
12023 };
12024
12025 loop {
12027 match self.next_token().token {
12028 Token::Period => {
12030 let next_token = self.next_token();
12031 match next_token.token {
12032 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12033 Token::EOF => {
12034 return Err(ParserError::ParserError(
12035 "Trailing period in identifier".to_string(),
12036 ))?
12037 }
12038 token => {
12039 return Err(ParserError::ParserError(format!(
12040 "Unexpected token following period in identifier: {token}"
12041 )))?
12042 }
12043 }
12044 }
12045 Token::EOF => break,
12046 token => {
12047 return Err(ParserError::ParserError(format!(
12048 "Unexpected token in identifier: {token}"
12049 )))?;
12050 }
12051 }
12052 }
12053
12054 Ok(idents)
12055 }
12056
12057 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
12059 let next_token = self.next_token();
12060 match next_token.token {
12061 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12062 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
12063 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
12064 _ => self.expected("identifier", next_token),
12065 }
12066 }
12067
12068 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
12079 match self.peek_token().token {
12080 Token::Word(w) => {
12081 let quote_style_is_none = w.quote_style.is_none();
12082 let mut requires_whitespace = false;
12083 let mut ident = w.into_ident(self.next_token().span);
12084 if quote_style_is_none {
12085 while matches!(self.peek_token_no_skip().token, Token::Minus) {
12086 self.next_token();
12087 ident.value.push('-');
12088
12089 let token = self
12090 .next_token_no_skip()
12091 .cloned()
12092 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12093 requires_whitespace = match token.token {
12094 Token::Word(next_word) if next_word.quote_style.is_none() => {
12095 ident.value.push_str(&next_word.value);
12096 false
12097 }
12098 Token::Number(s, false) => {
12099 if s.ends_with('.') {
12106 let Some(s) = s.split('.').next().filter(|s| {
12107 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12108 }) else {
12109 return self.expected(
12110 "continuation of hyphenated identifier",
12111 TokenWithSpan::new(Token::Number(s, false), token.span),
12112 );
12113 };
12114 ident.value.push_str(s);
12115 return Ok((ident, true));
12116 } else {
12117 ident.value.push_str(&s);
12118 }
12119 !matches!(self.peek_token().token, Token::Period)
12122 }
12123 _ => {
12124 return self
12125 .expected("continuation of hyphenated identifier", token);
12126 }
12127 }
12128 }
12129
12130 if requires_whitespace {
12133 let token = self.next_token();
12134 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12135 return self
12136 .expected("whitespace following hyphenated identifier", token);
12137 }
12138 }
12139 }
12140 Ok((ident, false))
12141 }
12142 _ => Ok((self.parse_identifier()?, false)),
12143 }
12144 }
12145
12146 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12148 if self.consume_token(&Token::LParen) {
12149 if self.peek_token().token == Token::RParen {
12150 self.next_token();
12151 Ok(vec![])
12152 } else {
12153 let cols = self.parse_comma_separated_with_trailing_commas(
12154 Parser::parse_view_column,
12155 self.dialect.supports_column_definition_trailing_commas(),
12156 Self::is_reserved_for_column_alias,
12157 )?;
12158 self.expect_token(&Token::RParen)?;
12159 Ok(cols)
12160 }
12161 } else {
12162 Ok(vec![])
12163 }
12164 }
12165
12166 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12168 let name = self.parse_identifier()?;
12169 let options = self.parse_view_column_options()?;
12170 let data_type = if dialect_of!(self is ClickHouseDialect) {
12171 Some(self.parse_data_type()?)
12172 } else {
12173 None
12174 };
12175 Ok(ViewColumnDef {
12176 name,
12177 data_type,
12178 options,
12179 })
12180 }
12181
12182 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12183 let mut options = Vec::new();
12184 loop {
12185 let option = self.parse_optional_column_option()?;
12186 if let Some(option) = option {
12187 options.push(option);
12188 } else {
12189 break;
12190 }
12191 }
12192 if options.is_empty() {
12193 Ok(None)
12194 } else if self.dialect.supports_space_separated_column_options() {
12195 Ok(Some(ColumnOptions::SpaceSeparated(options)))
12196 } else {
12197 Ok(Some(ColumnOptions::CommaSeparated(options)))
12198 }
12199 }
12200
12201 pub fn parse_parenthesized_column_list(
12204 &mut self,
12205 optional: IsOptional,
12206 allow_empty: bool,
12207 ) -> Result<Vec<Ident>, ParserError> {
12208 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12209 }
12210
12211 pub fn parse_parenthesized_compound_identifier_list(
12212 &mut self,
12213 optional: IsOptional,
12214 allow_empty: bool,
12215 ) -> Result<Vec<Expr>, ParserError> {
12216 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12217 Ok(Expr::CompoundIdentifier(
12218 p.parse_period_separated(|p| p.parse_identifier())?,
12219 ))
12220 })
12221 }
12222
12223 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12226 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12227 p.parse_create_index_expr()
12228 })
12229 }
12230
12231 pub fn parse_parenthesized_qualified_column_list(
12234 &mut self,
12235 optional: IsOptional,
12236 allow_empty: bool,
12237 ) -> Result<Vec<ObjectName>, ParserError> {
12238 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12239 p.parse_object_name(true)
12240 })
12241 }
12242
12243 fn parse_parenthesized_column_list_inner<F, T>(
12246 &mut self,
12247 optional: IsOptional,
12248 allow_empty: bool,
12249 mut f: F,
12250 ) -> Result<Vec<T>, ParserError>
12251 where
12252 F: FnMut(&mut Parser) -> Result<T, ParserError>,
12253 {
12254 if self.consume_token(&Token::LParen) {
12255 if allow_empty && self.peek_token().token == Token::RParen {
12256 self.next_token();
12257 Ok(vec![])
12258 } else {
12259 let cols = self.parse_comma_separated(|p| f(p))?;
12260 self.expect_token(&Token::RParen)?;
12261 Ok(cols)
12262 }
12263 } else if optional == Optional {
12264 Ok(vec![])
12265 } else {
12266 self.expected("a list of columns in parentheses", self.peek_token())
12267 }
12268 }
12269
12270 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12272 if self.consume_token(&Token::LParen) {
12273 let cols = self.parse_comma_separated(|p| {
12274 let name = p.parse_identifier()?;
12275 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12276 Ok(TableAliasColumnDef { name, data_type })
12277 })?;
12278 self.expect_token(&Token::RParen)?;
12279 Ok(cols)
12280 } else {
12281 Ok(vec![])
12282 }
12283 }
12284
12285 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12286 self.expect_token(&Token::LParen)?;
12287 let n = self.parse_literal_uint()?;
12288 self.expect_token(&Token::RParen)?;
12289 Ok(n)
12290 }
12291
12292 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12293 if self.consume_token(&Token::LParen) {
12294 let n = self.parse_literal_uint()?;
12295 self.expect_token(&Token::RParen)?;
12296 Ok(Some(n))
12297 } else {
12298 Ok(None)
12299 }
12300 }
12301
12302 fn maybe_parse_optional_interval_fields(
12303 &mut self,
12304 ) -> Result<Option<IntervalFields>, ParserError> {
12305 match self.parse_one_of_keywords(&[
12306 Keyword::YEAR,
12308 Keyword::DAY,
12309 Keyword::HOUR,
12310 Keyword::MINUTE,
12311 Keyword::MONTH,
12313 Keyword::SECOND,
12314 ]) {
12315 Some(Keyword::YEAR) => {
12316 if self.peek_keyword(Keyword::TO) {
12317 self.expect_keyword(Keyword::TO)?;
12318 self.expect_keyword(Keyword::MONTH)?;
12319 Ok(Some(IntervalFields::YearToMonth))
12320 } else {
12321 Ok(Some(IntervalFields::Year))
12322 }
12323 }
12324 Some(Keyword::DAY) => {
12325 if self.peek_keyword(Keyword::TO) {
12326 self.expect_keyword(Keyword::TO)?;
12327 match self.expect_one_of_keywords(&[
12328 Keyword::HOUR,
12329 Keyword::MINUTE,
12330 Keyword::SECOND,
12331 ])? {
12332 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12333 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12334 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12335 _ => {
12336 self.prev_token();
12337 self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12338 }
12339 }
12340 } else {
12341 Ok(Some(IntervalFields::Day))
12342 }
12343 }
12344 Some(Keyword::HOUR) => {
12345 if self.peek_keyword(Keyword::TO) {
12346 self.expect_keyword(Keyword::TO)?;
12347 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12348 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12349 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12350 _ => {
12351 self.prev_token();
12352 self.expected("MINUTE or SECOND", self.peek_token())
12353 }
12354 }
12355 } else {
12356 Ok(Some(IntervalFields::Hour))
12357 }
12358 }
12359 Some(Keyword::MINUTE) => {
12360 if self.peek_keyword(Keyword::TO) {
12361 self.expect_keyword(Keyword::TO)?;
12362 self.expect_keyword(Keyword::SECOND)?;
12363 Ok(Some(IntervalFields::MinuteToSecond))
12364 } else {
12365 Ok(Some(IntervalFields::Minute))
12366 }
12367 }
12368 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12369 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12370 Some(_) => {
12371 self.prev_token();
12372 self.expected(
12373 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12374 self.peek_token(),
12375 )
12376 }
12377 None => Ok(None),
12378 }
12379 }
12380
12381 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12389 self.expect_keyword_is(Keyword::DATETIME64)?;
12390 self.expect_token(&Token::LParen)?;
12391 let precision = self.parse_literal_uint()?;
12392 let time_zone = if self.consume_token(&Token::Comma) {
12393 Some(self.parse_literal_string()?)
12394 } else {
12395 None
12396 };
12397 self.expect_token(&Token::RParen)?;
12398 Ok((precision, time_zone))
12399 }
12400
12401 pub fn parse_optional_character_length(
12402 &mut self,
12403 ) -> Result<Option<CharacterLength>, ParserError> {
12404 if self.consume_token(&Token::LParen) {
12405 let character_length = self.parse_character_length()?;
12406 self.expect_token(&Token::RParen)?;
12407 Ok(Some(character_length))
12408 } else {
12409 Ok(None)
12410 }
12411 }
12412
12413 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12414 if self.consume_token(&Token::LParen) {
12415 let binary_length = self.parse_binary_length()?;
12416 self.expect_token(&Token::RParen)?;
12417 Ok(Some(binary_length))
12418 } else {
12419 Ok(None)
12420 }
12421 }
12422
12423 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12424 if self.parse_keyword(Keyword::MAX) {
12425 return Ok(CharacterLength::Max);
12426 }
12427 let length = self.parse_literal_uint()?;
12428 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12429 Some(CharLengthUnits::Characters)
12430 } else if self.parse_keyword(Keyword::OCTETS) {
12431 Some(CharLengthUnits::Octets)
12432 } else {
12433 None
12434 };
12435 Ok(CharacterLength::IntegerLength { length, unit })
12436 }
12437
12438 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12439 if self.parse_keyword(Keyword::MAX) {
12440 return Ok(BinaryLength::Max);
12441 }
12442 let length = self.parse_literal_uint()?;
12443 Ok(BinaryLength::IntegerLength { length })
12444 }
12445
12446 pub fn parse_optional_precision_scale(
12447 &mut self,
12448 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12449 if self.consume_token(&Token::LParen) {
12450 let n = self.parse_literal_uint()?;
12451 let scale = if self.consume_token(&Token::Comma) {
12452 Some(self.parse_literal_uint()?)
12453 } else {
12454 None
12455 };
12456 self.expect_token(&Token::RParen)?;
12457 Ok((Some(n), scale))
12458 } else {
12459 Ok((None, None))
12460 }
12461 }
12462
12463 pub fn parse_exact_number_optional_precision_scale(
12464 &mut self,
12465 ) -> Result<ExactNumberInfo, ParserError> {
12466 if self.consume_token(&Token::LParen) {
12467 let precision = self.parse_literal_uint()?;
12468 let scale = if self.consume_token(&Token::Comma) {
12469 Some(self.parse_signed_integer()?)
12470 } else {
12471 None
12472 };
12473
12474 self.expect_token(&Token::RParen)?;
12475
12476 match scale {
12477 None => Ok(ExactNumberInfo::Precision(precision)),
12478 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12479 }
12480 } else {
12481 Ok(ExactNumberInfo::None)
12482 }
12483 }
12484
12485 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12487 let is_negative = self.consume_token(&Token::Minus);
12488
12489 if !is_negative {
12490 let _ = self.consume_token(&Token::Plus);
12491 }
12492
12493 let current_token = self.peek_token_ref();
12494 match ¤t_token.token {
12495 Token::Number(s, _) => {
12496 let s = s.clone();
12497 let span_start = current_token.span.start;
12498 self.advance_token();
12499 let value = Self::parse::<i64>(s, span_start)?;
12500 Ok(if is_negative { -value } else { value })
12501 }
12502 _ => self.expected_ref("number", current_token),
12503 }
12504 }
12505
12506 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12507 if self.consume_token(&Token::LParen) {
12508 let mut modifiers = Vec::new();
12509 loop {
12510 let next_token = self.next_token();
12511 match next_token.token {
12512 Token::Word(w) => modifiers.push(w.to_string()),
12513 Token::Number(n, _) => modifiers.push(n),
12514 Token::SingleQuotedString(s) => modifiers.push(s),
12515
12516 Token::Comma => {
12517 continue;
12518 }
12519 Token::RParen => {
12520 break;
12521 }
12522 _ => self.expected("type modifiers", next_token)?,
12523 }
12524 }
12525
12526 Ok(Some(modifiers))
12527 } else {
12528 Ok(None)
12529 }
12530 }
12531
12532 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12534 where
12535 F: FnOnce(Box<DataType>) -> DataType,
12536 {
12537 self.expect_token(&Token::LParen)?;
12538 let inside_type = self.parse_data_type()?;
12539 self.expect_token(&Token::RParen)?;
12540 Ok(parent_type(inside_type.into()))
12541 }
12542
12543 fn parse_delete_setexpr_boxed(
12547 &mut self,
12548 delete_token: TokenWithSpan,
12549 ) -> Result<Box<SetExpr>, ParserError> {
12550 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12551 }
12552
12553 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12554 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12555 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12558 (vec![], false)
12559 } else {
12560 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12561 self.expect_keyword_is(Keyword::FROM)?;
12562 (tables, true)
12563 }
12564 } else {
12565 (vec![], true)
12566 };
12567
12568 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12569 let using = if self.parse_keyword(Keyword::USING) {
12570 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12571 } else {
12572 None
12573 };
12574 let selection = if self.parse_keyword(Keyword::WHERE) {
12575 Some(self.parse_expr()?)
12576 } else {
12577 None
12578 };
12579 let returning = if self.parse_keyword(Keyword::RETURNING) {
12580 Some(self.parse_comma_separated(Parser::parse_select_item)?)
12581 } else {
12582 None
12583 };
12584 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12585 self.parse_comma_separated(Parser::parse_order_by_expr)?
12586 } else {
12587 vec![]
12588 };
12589 let limit = if self.parse_keyword(Keyword::LIMIT) {
12590 self.parse_limit()?
12591 } else {
12592 None
12593 };
12594
12595 Ok(Statement::Delete(Delete {
12596 delete_token: delete_token.into(),
12597 tables,
12598 from: if with_from_keyword {
12599 FromTable::WithFromKeyword(from)
12600 } else {
12601 FromTable::WithoutKeyword(from)
12602 },
12603 using,
12604 selection,
12605 returning,
12606 order_by,
12607 limit,
12608 }))
12609 }
12610
12611 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12613 let modifier_keyword =
12614 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12615
12616 let id = self.parse_literal_uint()?;
12617
12618 let modifier = match modifier_keyword {
12619 Some(Keyword::CONNECTION) => Some(KillType::Connection),
12620 Some(Keyword::QUERY) => Some(KillType::Query),
12621 Some(Keyword::MUTATION) => {
12622 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12623 Some(KillType::Mutation)
12624 } else {
12625 self.expected(
12626 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12627 self.peek_token(),
12628 )?
12629 }
12630 }
12631 _ => None,
12632 };
12633
12634 Ok(Statement::Kill { modifier, id })
12635 }
12636
12637 pub fn parse_explain(
12638 &mut self,
12639 describe_alias: DescribeAlias,
12640 ) -> Result<Statement, ParserError> {
12641 let mut analyze = false;
12642 let mut verbose = false;
12643 let mut query_plan = false;
12644 let mut estimate = false;
12645 let mut format = None;
12646 let mut options = None;
12647
12648 if describe_alias == DescribeAlias::Explain
12651 && self.dialect.supports_explain_with_utility_options()
12652 && self.peek_token().token == Token::LParen
12653 {
12654 options = Some(self.parse_utility_options()?)
12655 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12656 query_plan = true;
12657 } else if self.parse_keyword(Keyword::ESTIMATE) {
12658 estimate = true;
12659 } else {
12660 analyze = self.parse_keyword(Keyword::ANALYZE);
12661 verbose = self.parse_keyword(Keyword::VERBOSE);
12662 if self.parse_keyword(Keyword::FORMAT) {
12663 format = Some(self.parse_analyze_format_kind()?);
12664 }
12665 }
12666
12667 match self.maybe_parse(|parser| parser.parse_statement())? {
12668 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12669 ParserError::ParserError("Explain must be root of the plan".to_string()),
12670 ),
12671 Some(statement) => Ok(Statement::Explain {
12672 describe_alias,
12673 analyze,
12674 verbose,
12675 query_plan,
12676 estimate,
12677 statement: Box::new(statement),
12678 format,
12679 options,
12680 }),
12681 _ => {
12682 let hive_format =
12683 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12684 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12685 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12686 _ => None,
12687 };
12688
12689 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12690 self.parse_keyword(Keyword::TABLE)
12692 } else {
12693 false
12694 };
12695
12696 let table_name = self.parse_object_name(false)?;
12697 Ok(Statement::ExplainTable {
12698 describe_alias,
12699 hive_format,
12700 has_table_keyword,
12701 table_name,
12702 })
12703 }
12704 }
12705 }
12706
12707 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12712 let _guard = self.recursion_counter.try_decrease()?;
12713 let with = if self.parse_keyword(Keyword::WITH) {
12714 let with_token = self.get_current_token();
12715 Some(With {
12716 with_token: with_token.clone().into(),
12717 recursive: self.parse_keyword(Keyword::RECURSIVE),
12718 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12719 })
12720 } else {
12721 None
12722 };
12723 if self.parse_keyword(Keyword::INSERT) {
12724 Ok(Query {
12725 with,
12726 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12727 order_by: None,
12728 limit_clause: None,
12729 fetch: None,
12730 locks: vec![],
12731 for_clause: None,
12732 settings: None,
12733 format_clause: None,
12734 pipe_operators: vec![],
12735 }
12736 .into())
12737 } else if self.parse_keyword(Keyword::UPDATE) {
12738 Ok(Query {
12739 with,
12740 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12741 order_by: None,
12742 limit_clause: None,
12743 fetch: None,
12744 locks: vec![],
12745 for_clause: None,
12746 settings: None,
12747 format_clause: None,
12748 pipe_operators: vec![],
12749 }
12750 .into())
12751 } else if self.parse_keyword(Keyword::DELETE) {
12752 Ok(Query {
12753 with,
12754 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12755 limit_clause: None,
12756 order_by: None,
12757 fetch: None,
12758 locks: vec![],
12759 for_clause: None,
12760 settings: None,
12761 format_clause: None,
12762 pipe_operators: vec![],
12763 }
12764 .into())
12765 } else if self.parse_keyword(Keyword::MERGE) {
12766 Ok(Query {
12767 with,
12768 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12769 limit_clause: None,
12770 order_by: None,
12771 fetch: None,
12772 locks: vec![],
12773 for_clause: None,
12774 settings: None,
12775 format_clause: None,
12776 pipe_operators: vec![],
12777 }
12778 .into())
12779 } else {
12780 let body = self.parse_query_body(self.dialect.prec_unknown())?;
12781
12782 let order_by = self.parse_optional_order_by()?;
12783
12784 let limit_clause = self.parse_optional_limit_clause()?;
12785
12786 let settings = self.parse_settings()?;
12787
12788 let fetch = if self.parse_keyword(Keyword::FETCH) {
12789 Some(self.parse_fetch()?)
12790 } else {
12791 None
12792 };
12793
12794 let mut for_clause = None;
12795 let mut locks = Vec::new();
12796 while self.parse_keyword(Keyword::FOR) {
12797 if let Some(parsed_for_clause) = self.parse_for_clause()? {
12798 for_clause = Some(parsed_for_clause);
12799 break;
12800 } else {
12801 locks.push(self.parse_lock()?);
12802 }
12803 }
12804 let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12805 && self.parse_keyword(Keyword::FORMAT)
12806 {
12807 if self.parse_keyword(Keyword::NULL) {
12808 Some(FormatClause::Null)
12809 } else {
12810 let ident = self.parse_identifier()?;
12811 Some(FormatClause::Identifier(ident))
12812 }
12813 } else {
12814 None
12815 };
12816
12817 let pipe_operators = if self.dialect.supports_pipe_operator() {
12818 self.parse_pipe_operators()?
12819 } else {
12820 Vec::new()
12821 };
12822
12823 Ok(Query {
12824 with,
12825 body,
12826 order_by,
12827 limit_clause,
12828 fetch,
12829 locks,
12830 for_clause,
12831 settings,
12832 format_clause,
12833 pipe_operators,
12834 }
12835 .into())
12836 }
12837 }
12838
12839 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
12840 let mut pipe_operators = Vec::new();
12841
12842 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
12843 let kw = self.expect_one_of_keywords(&[
12844 Keyword::SELECT,
12845 Keyword::EXTEND,
12846 Keyword::SET,
12847 Keyword::DROP,
12848 Keyword::AS,
12849 Keyword::WHERE,
12850 Keyword::LIMIT,
12851 Keyword::AGGREGATE,
12852 Keyword::ORDER,
12853 Keyword::TABLESAMPLE,
12854 Keyword::RENAME,
12855 Keyword::UNION,
12856 Keyword::INTERSECT,
12857 Keyword::EXCEPT,
12858 Keyword::CALL,
12859 Keyword::PIVOT,
12860 Keyword::UNPIVOT,
12861 Keyword::JOIN,
12862 Keyword::INNER,
12863 Keyword::LEFT,
12864 Keyword::RIGHT,
12865 Keyword::FULL,
12866 Keyword::CROSS,
12867 ])?;
12868 match kw {
12869 Keyword::SELECT => {
12870 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12871 pipe_operators.push(PipeOperator::Select { exprs })
12872 }
12873 Keyword::EXTEND => {
12874 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12875 pipe_operators.push(PipeOperator::Extend { exprs })
12876 }
12877 Keyword::SET => {
12878 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12879 pipe_operators.push(PipeOperator::Set { assignments })
12880 }
12881 Keyword::DROP => {
12882 let columns = self.parse_identifiers()?;
12883 pipe_operators.push(PipeOperator::Drop { columns })
12884 }
12885 Keyword::AS => {
12886 let alias = self.parse_identifier()?;
12887 pipe_operators.push(PipeOperator::As { alias })
12888 }
12889 Keyword::WHERE => {
12890 let expr = self.parse_expr()?;
12891 pipe_operators.push(PipeOperator::Where { expr })
12892 }
12893 Keyword::LIMIT => {
12894 let expr = self.parse_expr()?;
12895 let offset = if self.parse_keyword(Keyword::OFFSET) {
12896 Some(self.parse_expr()?)
12897 } else {
12898 None
12899 };
12900 pipe_operators.push(PipeOperator::Limit { expr, offset })
12901 }
12902 Keyword::AGGREGATE => {
12903 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12904 vec![]
12905 } else {
12906 self.parse_comma_separated(|parser| {
12907 parser.parse_expr_with_alias_and_order_by()
12908 })?
12909 };
12910
12911 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12912 self.parse_comma_separated(|parser| {
12913 parser.parse_expr_with_alias_and_order_by()
12914 })?
12915 } else {
12916 vec![]
12917 };
12918
12919 pipe_operators.push(PipeOperator::Aggregate {
12920 full_table_exprs,
12921 group_by_expr,
12922 })
12923 }
12924 Keyword::ORDER => {
12925 self.expect_one_of_keywords(&[Keyword::BY])?;
12926 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12927 pipe_operators.push(PipeOperator::OrderBy { exprs })
12928 }
12929 Keyword::TABLESAMPLE => {
12930 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12931 pipe_operators.push(PipeOperator::TableSample { sample });
12932 }
12933 Keyword::RENAME => {
12934 let mappings =
12935 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12936 pipe_operators.push(PipeOperator::Rename { mappings });
12937 }
12938 Keyword::UNION => {
12939 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12940 let queries = self.parse_pipe_operator_queries()?;
12941 pipe_operators.push(PipeOperator::Union {
12942 set_quantifier,
12943 queries,
12944 });
12945 }
12946 Keyword::INTERSECT => {
12947 let set_quantifier =
12948 self.parse_distinct_required_set_quantifier("INTERSECT")?;
12949 let queries = self.parse_pipe_operator_queries()?;
12950 pipe_operators.push(PipeOperator::Intersect {
12951 set_quantifier,
12952 queries,
12953 });
12954 }
12955 Keyword::EXCEPT => {
12956 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12957 let queries = self.parse_pipe_operator_queries()?;
12958 pipe_operators.push(PipeOperator::Except {
12959 set_quantifier,
12960 queries,
12961 });
12962 }
12963 Keyword::CALL => {
12964 let function_name = self.parse_object_name(false)?;
12965 let function_expr = self.parse_function(function_name)?;
12966 if let Expr::Function(function) = function_expr {
12967 let alias = self.parse_identifier_optional_alias()?;
12968 pipe_operators.push(PipeOperator::Call { function, alias });
12969 } else {
12970 return Err(ParserError::ParserError(
12971 "Expected function call after CALL".to_string(),
12972 ));
12973 }
12974 }
12975 Keyword::PIVOT => {
12976 self.expect_token(&Token::LParen)?;
12977 let aggregate_functions =
12978 self.parse_comma_separated(Self::parse_aliased_function_call)?;
12979 self.expect_keyword_is(Keyword::FOR)?;
12980 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12981 self.expect_keyword_is(Keyword::IN)?;
12982
12983 self.expect_token(&Token::LParen)?;
12984 let value_source = if self.parse_keyword(Keyword::ANY) {
12985 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12986 self.parse_comma_separated(Parser::parse_order_by_expr)?
12987 } else {
12988 vec![]
12989 };
12990 PivotValueSource::Any(order_by)
12991 } else if self.peek_sub_query() {
12992 PivotValueSource::Subquery(self.parse_query()?)
12993 } else {
12994 PivotValueSource::List(
12995 self.parse_comma_separated(Self::parse_expr_with_alias)?,
12996 )
12997 };
12998 self.expect_token(&Token::RParen)?;
12999 self.expect_token(&Token::RParen)?;
13000
13001 let alias = self.parse_identifier_optional_alias()?;
13002
13003 pipe_operators.push(PipeOperator::Pivot {
13004 aggregate_functions,
13005 value_column,
13006 value_source,
13007 alias,
13008 });
13009 }
13010 Keyword::UNPIVOT => {
13011 self.expect_token(&Token::LParen)?;
13012 let value_column = self.parse_identifier()?;
13013 self.expect_keyword(Keyword::FOR)?;
13014 let name_column = self.parse_identifier()?;
13015 self.expect_keyword(Keyword::IN)?;
13016
13017 self.expect_token(&Token::LParen)?;
13018 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
13019 self.expect_token(&Token::RParen)?;
13020
13021 self.expect_token(&Token::RParen)?;
13022
13023 let alias = self.parse_identifier_optional_alias()?;
13024
13025 pipe_operators.push(PipeOperator::Unpivot {
13026 value_column,
13027 name_column,
13028 unpivot_columns,
13029 alias,
13030 });
13031 }
13032 Keyword::JOIN
13033 | Keyword::INNER
13034 | Keyword::LEFT
13035 | Keyword::RIGHT
13036 | Keyword::FULL
13037 | Keyword::CROSS => {
13038 self.prev_token();
13039 let mut joins = self.parse_joins()?;
13040 if joins.len() != 1 {
13041 return Err(ParserError::ParserError(
13042 "Join pipe operator must have a single join".to_string(),
13043 ));
13044 }
13045 let join = joins.swap_remove(0);
13046 pipe_operators.push(PipeOperator::Join(join))
13047 }
13048 unhandled => {
13049 return Err(ParserError::ParserError(format!(
13050 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
13051 )))
13052 }
13053 }
13054 }
13055 Ok(pipe_operators)
13056 }
13057
13058 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
13059 let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13060 && self.parse_keyword(Keyword::SETTINGS)
13061 {
13062 let key_values = self.parse_comma_separated(|p| {
13063 let key = p.parse_identifier()?;
13064 p.expect_token(&Token::Eq)?;
13065 let value = p.parse_expr()?;
13066 Ok(Setting { key, value })
13067 })?;
13068 Some(key_values)
13069 } else {
13070 None
13071 };
13072 Ok(settings)
13073 }
13074
13075 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
13077 if self.parse_keyword(Keyword::XML) {
13078 Ok(Some(self.parse_for_xml()?))
13079 } else if self.parse_keyword(Keyword::JSON) {
13080 Ok(Some(self.parse_for_json()?))
13081 } else if self.parse_keyword(Keyword::BROWSE) {
13082 Ok(Some(ForClause::Browse))
13083 } else {
13084 Ok(None)
13085 }
13086 }
13087
13088 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
13090 let for_xml = if self.parse_keyword(Keyword::RAW) {
13091 let mut element_name = None;
13092 if self.peek_token().token == Token::LParen {
13093 self.expect_token(&Token::LParen)?;
13094 element_name = Some(self.parse_literal_string()?);
13095 self.expect_token(&Token::RParen)?;
13096 }
13097 ForXml::Raw(element_name)
13098 } else if self.parse_keyword(Keyword::AUTO) {
13099 ForXml::Auto
13100 } else if self.parse_keyword(Keyword::EXPLICIT) {
13101 ForXml::Explicit
13102 } else if self.parse_keyword(Keyword::PATH) {
13103 let mut element_name = None;
13104 if self.peek_token().token == Token::LParen {
13105 self.expect_token(&Token::LParen)?;
13106 element_name = Some(self.parse_literal_string()?);
13107 self.expect_token(&Token::RParen)?;
13108 }
13109 ForXml::Path(element_name)
13110 } else {
13111 return Err(ParserError::ParserError(
13112 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13113 ));
13114 };
13115 let mut elements = false;
13116 let mut binary_base64 = false;
13117 let mut root = None;
13118 let mut r#type = false;
13119 while self.peek_token().token == Token::Comma {
13120 self.next_token();
13121 if self.parse_keyword(Keyword::ELEMENTS) {
13122 elements = true;
13123 } else if self.parse_keyword(Keyword::BINARY) {
13124 self.expect_keyword_is(Keyword::BASE64)?;
13125 binary_base64 = true;
13126 } else if self.parse_keyword(Keyword::ROOT) {
13127 self.expect_token(&Token::LParen)?;
13128 root = Some(self.parse_literal_string()?);
13129 self.expect_token(&Token::RParen)?;
13130 } else if self.parse_keyword(Keyword::TYPE) {
13131 r#type = true;
13132 }
13133 }
13134 Ok(ForClause::Xml {
13135 for_xml,
13136 elements,
13137 binary_base64,
13138 root,
13139 r#type,
13140 })
13141 }
13142
13143 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13145 let for_json = if self.parse_keyword(Keyword::AUTO) {
13146 ForJson::Auto
13147 } else if self.parse_keyword(Keyword::PATH) {
13148 ForJson::Path
13149 } else {
13150 return Err(ParserError::ParserError(
13151 "Expected FOR JSON [AUTO | PATH ]".to_string(),
13152 ));
13153 };
13154 let mut root = None;
13155 let mut include_null_values = false;
13156 let mut without_array_wrapper = false;
13157 while self.peek_token().token == Token::Comma {
13158 self.next_token();
13159 if self.parse_keyword(Keyword::ROOT) {
13160 self.expect_token(&Token::LParen)?;
13161 root = Some(self.parse_literal_string()?);
13162 self.expect_token(&Token::RParen)?;
13163 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13164 include_null_values = true;
13165 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13166 without_array_wrapper = true;
13167 }
13168 }
13169 Ok(ForClause::Json {
13170 for_json,
13171 root,
13172 include_null_values,
13173 without_array_wrapper,
13174 })
13175 }
13176
13177 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13179 let name = self.parse_identifier()?;
13180
13181 let mut cte = if self.parse_keyword(Keyword::AS) {
13182 let mut is_materialized = None;
13183 if dialect_of!(self is PostgreSqlDialect) {
13184 if self.parse_keyword(Keyword::MATERIALIZED) {
13185 is_materialized = Some(CteAsMaterialized::Materialized);
13186 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13187 is_materialized = Some(CteAsMaterialized::NotMaterialized);
13188 }
13189 }
13190 self.expect_token(&Token::LParen)?;
13191
13192 let query = self.parse_query()?;
13193 let closing_paren_token = self.expect_token(&Token::RParen)?;
13194
13195 let alias = TableAlias {
13196 explicit: false,
13197 name,
13198 columns: vec![],
13199 };
13200 Cte {
13201 alias,
13202 query,
13203 from: None,
13204 materialized: is_materialized,
13205 closing_paren_token: closing_paren_token.into(),
13206 }
13207 } else {
13208 let columns = self.parse_table_alias_column_defs()?;
13209 self.expect_keyword_is(Keyword::AS)?;
13210 let mut is_materialized = None;
13211 if dialect_of!(self is PostgreSqlDialect) {
13212 if self.parse_keyword(Keyword::MATERIALIZED) {
13213 is_materialized = Some(CteAsMaterialized::Materialized);
13214 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13215 is_materialized = Some(CteAsMaterialized::NotMaterialized);
13216 }
13217 }
13218 self.expect_token(&Token::LParen)?;
13219
13220 let query = self.parse_query()?;
13221 let closing_paren_token = self.expect_token(&Token::RParen)?;
13222
13223 let alias = TableAlias {
13224 explicit: false,
13225 name,
13226 columns,
13227 };
13228 Cte {
13229 alias,
13230 query,
13231 from: None,
13232 materialized: is_materialized,
13233 closing_paren_token: closing_paren_token.into(),
13234 }
13235 };
13236 if self.parse_keyword(Keyword::FROM) {
13237 cte.from = Some(self.parse_identifier()?);
13238 }
13239 Ok(cte)
13240 }
13241
13242 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13251 let expr = if self.peek_keyword(Keyword::SELECT)
13254 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13255 {
13256 SetExpr::Select(self.parse_select().map(Box::new)?)
13257 } else if self.consume_token(&Token::LParen) {
13258 let subquery = self.parse_query()?;
13260 self.expect_token(&Token::RParen)?;
13261 SetExpr::Query(subquery)
13262 } else if self.parse_keyword(Keyword::VALUES) {
13263 let is_mysql = dialect_of!(self is MySqlDialect);
13264 SetExpr::Values(self.parse_values(is_mysql, false)?)
13265 } else if self.parse_keyword(Keyword::VALUE) {
13266 let is_mysql = dialect_of!(self is MySqlDialect);
13267 SetExpr::Values(self.parse_values(is_mysql, true)?)
13268 } else if self.parse_keyword(Keyword::TABLE) {
13269 SetExpr::Table(Box::new(self.parse_as_table()?))
13270 } else {
13271 return self.expected(
13272 "SELECT, VALUES, or a subquery in the query body",
13273 self.peek_token(),
13274 );
13275 };
13276
13277 self.parse_remaining_set_exprs(expr, precedence)
13278 }
13279
13280 fn parse_remaining_set_exprs(
13284 &mut self,
13285 mut expr: SetExpr,
13286 precedence: u8,
13287 ) -> Result<Box<SetExpr>, ParserError> {
13288 loop {
13289 let op = self.parse_set_operator(&self.peek_token().token);
13291 let next_precedence = match op {
13292 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13294 10
13295 }
13296 Some(SetOperator::Intersect) => 20,
13298 None => break,
13300 };
13301 if precedence >= next_precedence {
13302 break;
13303 }
13304 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
13306 expr = SetExpr::SetOperation {
13307 left: Box::new(expr),
13308 op: op.unwrap(),
13309 set_quantifier,
13310 right: self.parse_query_body(next_precedence)?,
13311 };
13312 }
13313
13314 Ok(expr.into())
13315 }
13316
13317 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13318 match token {
13319 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13320 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13321 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13322 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13323 _ => None,
13324 }
13325 }
13326
13327 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13328 match op {
13329 Some(
13330 SetOperator::Except
13331 | SetOperator::Intersect
13332 | SetOperator::Union
13333 | SetOperator::Minus,
13334 ) => {
13335 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13336 SetQuantifier::DistinctByName
13337 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13338 SetQuantifier::ByName
13339 } else if self.parse_keyword(Keyword::ALL) {
13340 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13341 SetQuantifier::AllByName
13342 } else {
13343 SetQuantifier::All
13344 }
13345 } else if self.parse_keyword(Keyword::DISTINCT) {
13346 SetQuantifier::Distinct
13347 } else {
13348 SetQuantifier::None
13349 }
13350 }
13351 _ => SetQuantifier::None,
13352 }
13353 }
13354
13355 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13357 let mut from_first = None;
13358
13359 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13360 let from_token = self.expect_keyword(Keyword::FROM)?;
13361 let from = self.parse_table_with_joins()?;
13362 if !self.peek_keyword(Keyword::SELECT) {
13363 return Ok(Select {
13364 select_token: AttachedToken(from_token),
13365 distinct: None,
13366 top: None,
13367 top_before_distinct: false,
13368 projection: vec![],
13369 exclude: None,
13370 into: None,
13371 from,
13372 lateral_views: vec![],
13373 prewhere: None,
13374 selection: None,
13375 group_by: GroupByExpr::Expressions(vec![], vec![]),
13376 cluster_by: vec![],
13377 distribute_by: vec![],
13378 sort_by: vec![],
13379 having: None,
13380 named_window: vec![],
13381 window_before_qualify: false,
13382 qualify: None,
13383 value_table_mode: None,
13384 connect_by: None,
13385 flavor: SelectFlavor::FromFirstNoSelect,
13386 });
13387 }
13388 from_first = Some(from);
13389 }
13390
13391 let select_token = self.expect_keyword(Keyword::SELECT)?;
13392 let value_table_mode = self.parse_value_table_mode()?;
13393
13394 let mut top_before_distinct = false;
13395 let mut top = None;
13396 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13397 top = Some(self.parse_top()?);
13398 top_before_distinct = true;
13399 }
13400 let distinct = self.parse_all_or_distinct()?;
13401 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13402 top = Some(self.parse_top()?);
13403 }
13404
13405 let projection =
13406 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13407 vec![]
13408 } else {
13409 self.parse_projection()?
13410 };
13411
13412 let exclude = if self.dialect.supports_select_exclude() {
13413 self.parse_optional_select_item_exclude()?
13414 } else {
13415 None
13416 };
13417
13418 let into = if self.parse_keyword(Keyword::INTO) {
13419 Some(self.parse_select_into()?)
13420 } else {
13421 None
13422 };
13423
13424 let (from, from_first) = if let Some(from) = from_first.take() {
13430 (from, true)
13431 } else if self.parse_keyword(Keyword::FROM) {
13432 (self.parse_table_with_joins()?, false)
13433 } else {
13434 (vec![], false)
13435 };
13436
13437 let mut lateral_views = vec![];
13438 loop {
13439 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13440 let outer = self.parse_keyword(Keyword::OUTER);
13441 let lateral_view = self.parse_expr()?;
13442 let lateral_view_name = self.parse_object_name(false)?;
13443 let lateral_col_alias = self
13444 .parse_comma_separated(|parser| {
13445 parser.parse_optional_alias(&[
13446 Keyword::WHERE,
13447 Keyword::GROUP,
13448 Keyword::CLUSTER,
13449 Keyword::HAVING,
13450 Keyword::LATERAL,
13451 ]) })?
13453 .into_iter()
13454 .flatten()
13455 .collect();
13456
13457 lateral_views.push(LateralView {
13458 lateral_view,
13459 lateral_view_name,
13460 lateral_col_alias,
13461 outer,
13462 });
13463 } else {
13464 break;
13465 }
13466 }
13467
13468 let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13469 && self.parse_keyword(Keyword::PREWHERE)
13470 {
13471 Some(self.parse_expr()?)
13472 } else {
13473 None
13474 };
13475
13476 let selection = if self.parse_keyword(Keyword::WHERE) {
13477 Some(self.parse_expr()?)
13478 } else {
13479 None
13480 };
13481
13482 let group_by = self
13483 .parse_optional_group_by()?
13484 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13485
13486 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13487 self.parse_comma_separated(Parser::parse_expr)?
13488 } else {
13489 vec![]
13490 };
13491
13492 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13493 self.parse_comma_separated(Parser::parse_expr)?
13494 } else {
13495 vec![]
13496 };
13497
13498 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13499 self.parse_comma_separated(Parser::parse_order_by_expr)?
13500 } else {
13501 vec![]
13502 };
13503
13504 let having = if self.parse_keyword(Keyword::HAVING) {
13505 Some(self.parse_expr()?)
13506 } else {
13507 None
13508 };
13509
13510 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13512 {
13513 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13514 if self.parse_keyword(Keyword::QUALIFY) {
13515 (named_windows, Some(self.parse_expr()?), true)
13516 } else {
13517 (named_windows, None, true)
13518 }
13519 } else if self.parse_keyword(Keyword::QUALIFY) {
13520 let qualify = Some(self.parse_expr()?);
13521 if self.parse_keyword(Keyword::WINDOW) {
13522 (
13523 self.parse_comma_separated(Parser::parse_named_window)?,
13524 qualify,
13525 false,
13526 )
13527 } else {
13528 (Default::default(), qualify, false)
13529 }
13530 } else {
13531 Default::default()
13532 };
13533
13534 let connect_by = if self.dialect.supports_connect_by()
13535 && self
13536 .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13537 .is_some()
13538 {
13539 self.prev_token();
13540 Some(self.parse_connect_by()?)
13541 } else {
13542 None
13543 };
13544
13545 Ok(Select {
13546 select_token: AttachedToken(select_token),
13547 distinct,
13548 top,
13549 top_before_distinct,
13550 projection,
13551 exclude,
13552 into,
13553 from,
13554 lateral_views,
13555 prewhere,
13556 selection,
13557 group_by,
13558 cluster_by,
13559 distribute_by,
13560 sort_by,
13561 having,
13562 named_window: named_windows,
13563 window_before_qualify,
13564 qualify,
13565 value_table_mode,
13566 connect_by,
13567 flavor: if from_first {
13568 SelectFlavor::FromFirst
13569 } else {
13570 SelectFlavor::Standard
13571 },
13572 })
13573 }
13574
13575 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13576 if !dialect_of!(self is BigQueryDialect) {
13577 return Ok(None);
13578 }
13579
13580 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13581 Some(ValueTableMode::DistinctAsValue)
13582 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13583 Some(ValueTableMode::DistinctAsStruct)
13584 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13585 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13586 {
13587 Some(ValueTableMode::AsValue)
13588 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13589 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13590 {
13591 Some(ValueTableMode::AsStruct)
13592 } else if self.parse_keyword(Keyword::AS) {
13593 self.expected("VALUE or STRUCT", self.peek_token())?
13594 } else {
13595 None
13596 };
13597
13598 Ok(mode)
13599 }
13600
13601 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13605 where
13606 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13607 {
13608 let current_state = self.state;
13609 self.state = state;
13610 let res = f(self);
13611 self.state = current_state;
13612 res
13613 }
13614
13615 pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13616 let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13617 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13618 parser.parse_comma_separated(Parser::parse_expr)
13619 })?;
13620 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13621 let condition = self.parse_expr()?;
13622 (condition, relationships)
13623 } else {
13624 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13625 let condition = self.parse_expr()?;
13626 self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13627 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13628 parser.parse_comma_separated(Parser::parse_expr)
13629 })?;
13630 (condition, relationships)
13631 };
13632 Ok(ConnectBy {
13633 condition,
13634 relationships,
13635 })
13636 }
13637
13638 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13640 let token1 = self.next_token();
13641 let token2 = self.next_token();
13642 let token3 = self.next_token();
13643
13644 let table_name;
13645 let schema_name;
13646 if token2 == Token::Period {
13647 match token1.token {
13648 Token::Word(w) => {
13649 schema_name = w.value;
13650 }
13651 _ => {
13652 return self.expected("Schema name", token1);
13653 }
13654 }
13655 match token3.token {
13656 Token::Word(w) => {
13657 table_name = w.value;
13658 }
13659 _ => {
13660 return self.expected("Table name", token3);
13661 }
13662 }
13663 Ok(Table {
13664 table_name: Some(table_name),
13665 schema_name: Some(schema_name),
13666 })
13667 } else {
13668 match token1.token {
13669 Token::Word(w) => {
13670 table_name = w.value;
13671 }
13672 _ => {
13673 return self.expected("Table name", token1);
13674 }
13675 }
13676 Ok(Table {
13677 table_name: Some(table_name),
13678 schema_name: None,
13679 })
13680 }
13681 }
13682
13683 fn parse_set_role(
13685 &mut self,
13686 modifier: Option<ContextModifier>,
13687 ) -> Result<Statement, ParserError> {
13688 self.expect_keyword_is(Keyword::ROLE)?;
13689
13690 let role_name = if self.parse_keyword(Keyword::NONE) {
13691 None
13692 } else {
13693 Some(self.parse_identifier()?)
13694 };
13695 Ok(Statement::Set(Set::SetRole {
13696 context_modifier: modifier,
13697 role_name,
13698 }))
13699 }
13700
13701 fn parse_set_values(
13702 &mut self,
13703 parenthesized_assignment: bool,
13704 ) -> Result<Vec<Expr>, ParserError> {
13705 let mut values = vec![];
13706
13707 if parenthesized_assignment {
13708 self.expect_token(&Token::LParen)?;
13709 }
13710
13711 loop {
13712 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13713 expr
13714 } else if let Ok(expr) = self.parse_expr() {
13715 expr
13716 } else {
13717 self.expected("variable value", self.peek_token())?
13718 };
13719
13720 values.push(value);
13721 if self.consume_token(&Token::Comma) {
13722 continue;
13723 }
13724
13725 if parenthesized_assignment {
13726 self.expect_token(&Token::RParen)?;
13727 }
13728 return Ok(values);
13729 }
13730 }
13731
13732 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13733 let modifier =
13734 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13735
13736 Self::keyword_to_modifier(modifier)
13737 }
13738
13739 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13741 let scope = self.parse_context_modifier();
13742
13743 let name = if self.dialect.supports_parenthesized_set_variables()
13744 && self.consume_token(&Token::LParen)
13745 {
13746 self.expected("Unparenthesized assignment", self.peek_token())?
13750 } else {
13751 self.parse_object_name(false)?
13752 };
13753
13754 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13755 return self.expected("assignment operator", self.peek_token());
13756 }
13757
13758 let value = self.parse_expr()?;
13759
13760 Ok(SetAssignment { scope, name, value })
13761 }
13762
13763 fn parse_set(&mut self) -> Result<Statement, ParserError> {
13764 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13765
13766 let scope = if !hivevar {
13768 self.parse_context_modifier()
13769 } else {
13770 None
13771 };
13772
13773 if hivevar {
13774 self.expect_token(&Token::Colon)?;
13775 }
13776
13777 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13778 return Ok(set_role_stmt);
13779 }
13780
13781 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13783 || self.parse_keyword(Keyword::TIMEZONE)
13784 {
13785 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13786 return Ok(Set::SingleAssignment {
13787 scope,
13788 hivevar,
13789 variable: ObjectName::from(vec!["TIMEZONE".into()]),
13790 values: self.parse_set_values(false)?,
13791 }
13792 .into());
13793 } else {
13794 return Ok(Set::SetTimeZone {
13798 local: scope == Some(ContextModifier::Local),
13799 value: self.parse_expr()?,
13800 }
13801 .into());
13802 }
13803 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
13804 if self.parse_keyword(Keyword::DEFAULT) {
13805 return Ok(Set::SetNamesDefault {}.into());
13806 }
13807 let charset_name = self.parse_identifier()?;
13808 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
13809 Some(self.parse_literal_string()?)
13810 } else {
13811 None
13812 };
13813
13814 return Ok(Set::SetNames {
13815 charset_name,
13816 collation_name,
13817 }
13818 .into());
13819 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
13820 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
13821 return Ok(Set::SetTransaction {
13822 modes: self.parse_transaction_modes()?,
13823 snapshot: None,
13824 session: true,
13825 }
13826 .into());
13827 } else if self.parse_keyword(Keyword::TRANSACTION) {
13828 if self.parse_keyword(Keyword::SNAPSHOT) {
13829 let snapshot_id = self.parse_value()?.value;
13830 return Ok(Set::SetTransaction {
13831 modes: vec![],
13832 snapshot: Some(snapshot_id),
13833 session: false,
13834 }
13835 .into());
13836 }
13837 return Ok(Set::SetTransaction {
13838 modes: self.parse_transaction_modes()?,
13839 snapshot: None,
13840 session: false,
13841 }
13842 .into());
13843 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
13844 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
13845 SetSessionAuthorizationParamKind::Default
13846 } else {
13847 let value = self.parse_identifier()?;
13848 SetSessionAuthorizationParamKind::User(value)
13849 };
13850 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
13851 scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
13852 kind: auth_value,
13853 })
13854 .into());
13855 }
13856
13857 if self.dialect.supports_comma_separated_set_assignments() {
13858 if scope.is_some() {
13859 self.prev_token();
13860 }
13861
13862 if let Some(assignments) = self
13863 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
13864 {
13865 return if assignments.len() > 1 {
13866 Ok(Set::MultipleAssignments { assignments }.into())
13867 } else {
13868 let SetAssignment { scope, name, value } =
13869 assignments.into_iter().next().ok_or_else(|| {
13870 ParserError::ParserError("Expected at least one assignment".to_string())
13871 })?;
13872
13873 Ok(Set::SingleAssignment {
13874 scope,
13875 hivevar,
13876 variable: name,
13877 values: vec![value],
13878 }
13879 .into())
13880 };
13881 }
13882 }
13883
13884 let variables = if self.dialect.supports_parenthesized_set_variables()
13885 && self.consume_token(&Token::LParen)
13886 {
13887 let vars = OneOrManyWithParens::Many(
13888 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
13889 .into_iter()
13890 .map(|ident| ObjectName::from(vec![ident]))
13891 .collect(),
13892 );
13893 self.expect_token(&Token::RParen)?;
13894 vars
13895 } else {
13896 OneOrManyWithParens::One(self.parse_object_name(false)?)
13897 };
13898
13899 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13900 let stmt = match variables {
13901 OneOrManyWithParens::One(var) => Set::SingleAssignment {
13902 scope,
13903 hivevar,
13904 variable: var,
13905 values: self.parse_set_values(false)?,
13906 },
13907 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13908 variables: vars,
13909 values: self.parse_set_values(true)?,
13910 },
13911 };
13912
13913 return Ok(stmt.into());
13914 }
13915
13916 if self.dialect.supports_set_stmt_without_operator() {
13917 self.prev_token();
13918 return self.parse_set_session_params();
13919 };
13920
13921 self.expected("equals sign or TO", self.peek_token())
13922 }
13923
13924 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13925 if self.parse_keyword(Keyword::STATISTICS) {
13926 let topic = match self.parse_one_of_keywords(&[
13927 Keyword::IO,
13928 Keyword::PROFILE,
13929 Keyword::TIME,
13930 Keyword::XML,
13931 ]) {
13932 Some(Keyword::IO) => SessionParamStatsTopic::IO,
13933 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13934 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13935 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13936 _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13937 };
13938 let value = self.parse_session_param_value()?;
13939 Ok(
13940 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13941 topic,
13942 value,
13943 }))
13944 .into(),
13945 )
13946 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13947 let obj = self.parse_object_name(false)?;
13948 let value = self.parse_session_param_value()?;
13949 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13950 SetSessionParamIdentityInsert { obj, value },
13951 ))
13952 .into())
13953 } else if self.parse_keyword(Keyword::OFFSETS) {
13954 let keywords = self.parse_comma_separated(|parser| {
13955 let next_token = parser.next_token();
13956 match &next_token.token {
13957 Token::Word(w) => Ok(w.to_string()),
13958 _ => parser.expected("SQL keyword", next_token),
13959 }
13960 })?;
13961 let value = self.parse_session_param_value()?;
13962 Ok(
13963 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13964 keywords,
13965 value,
13966 }))
13967 .into(),
13968 )
13969 } else {
13970 let names = self.parse_comma_separated(|parser| {
13971 let next_token = parser.next_token();
13972 match next_token.token {
13973 Token::Word(w) => Ok(w.to_string()),
13974 _ => parser.expected("Session param name", next_token),
13975 }
13976 })?;
13977 let value = self.parse_expr()?.to_string();
13978 Ok(
13979 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13980 names,
13981 value,
13982 }))
13983 .into(),
13984 )
13985 }
13986 }
13987
13988 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13989 if self.parse_keyword(Keyword::ON) {
13990 Ok(SessionParamValue::On)
13991 } else if self.parse_keyword(Keyword::OFF) {
13992 Ok(SessionParamValue::Off)
13993 } else {
13994 self.expected("ON or OFF", self.peek_token())
13995 }
13996 }
13997
13998 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13999 let terse = self.parse_keyword(Keyword::TERSE);
14000 let extended = self.parse_keyword(Keyword::EXTENDED);
14001 let full = self.parse_keyword(Keyword::FULL);
14002 let session = self.parse_keyword(Keyword::SESSION);
14003 let global = self.parse_keyword(Keyword::GLOBAL);
14004 let external = self.parse_keyword(Keyword::EXTERNAL);
14005 if self
14006 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
14007 .is_some()
14008 {
14009 Ok(self.parse_show_columns(extended, full)?)
14010 } else if self.parse_keyword(Keyword::TABLES) {
14011 Ok(self.parse_show_tables(terse, extended, full, external)?)
14012 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
14013 Ok(self.parse_show_views(terse, true)?)
14014 } else if self.parse_keyword(Keyword::VIEWS) {
14015 Ok(self.parse_show_views(terse, false)?)
14016 } else if self.parse_keyword(Keyword::FUNCTIONS) {
14017 Ok(self.parse_show_functions()?)
14018 } else if extended || full {
14019 Err(ParserError::ParserError(
14020 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
14021 ))
14022 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
14023 Ok(self.parse_show_create()?)
14024 } else if self.parse_keyword(Keyword::COLLATION) {
14025 Ok(self.parse_show_collation()?)
14026 } else if self.parse_keyword(Keyword::VARIABLES)
14027 && dialect_of!(self is MySqlDialect | GenericDialect)
14028 {
14029 Ok(Statement::ShowVariables {
14030 filter: self.parse_show_statement_filter()?,
14031 session,
14032 global,
14033 })
14034 } else if self.parse_keyword(Keyword::STATUS)
14035 && dialect_of!(self is MySqlDialect | GenericDialect)
14036 {
14037 Ok(Statement::ShowStatus {
14038 filter: self.parse_show_statement_filter()?,
14039 session,
14040 global,
14041 })
14042 } else if self.parse_keyword(Keyword::DATABASES) {
14043 self.parse_show_databases(terse)
14044 } else if self.parse_keyword(Keyword::SCHEMAS) {
14045 self.parse_show_schemas(terse)
14046 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
14047 self.parse_show_charset(false)
14048 } else if self.parse_keyword(Keyword::CHARSET) {
14049 self.parse_show_charset(true)
14050 } else {
14051 Ok(Statement::ShowVariable {
14052 variable: self.parse_identifiers()?,
14053 })
14054 }
14055 }
14056
14057 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
14058 Ok(Statement::ShowCharset(ShowCharset {
14060 is_shorthand,
14061 filter: self.parse_show_statement_filter()?,
14062 }))
14063 }
14064
14065 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
14066 let history = self.parse_keyword(Keyword::HISTORY);
14067 let show_options = self.parse_show_stmt_options()?;
14068 Ok(Statement::ShowDatabases {
14069 terse,
14070 history,
14071 show_options,
14072 })
14073 }
14074
14075 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
14076 let history = self.parse_keyword(Keyword::HISTORY);
14077 let show_options = self.parse_show_stmt_options()?;
14078 Ok(Statement::ShowSchemas {
14079 terse,
14080 history,
14081 show_options,
14082 })
14083 }
14084
14085 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
14086 let obj_type = match self.expect_one_of_keywords(&[
14087 Keyword::TABLE,
14088 Keyword::TRIGGER,
14089 Keyword::FUNCTION,
14090 Keyword::PROCEDURE,
14091 Keyword::EVENT,
14092 Keyword::VIEW,
14093 ])? {
14094 Keyword::TABLE => Ok(ShowCreateObject::Table),
14095 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14096 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14097 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14098 Keyword::EVENT => Ok(ShowCreateObject::Event),
14099 Keyword::VIEW => Ok(ShowCreateObject::View),
14100 keyword => Err(ParserError::ParserError(format!(
14101 "Unable to map keyword to ShowCreateObject: {keyword:?}"
14102 ))),
14103 }?;
14104
14105 let obj_name = self.parse_object_name(false)?;
14106
14107 Ok(Statement::ShowCreate { obj_type, obj_name })
14108 }
14109
14110 pub fn parse_show_columns(
14111 &mut self,
14112 extended: bool,
14113 full: bool,
14114 ) -> Result<Statement, ParserError> {
14115 let show_options = self.parse_show_stmt_options()?;
14116 Ok(Statement::ShowColumns {
14117 extended,
14118 full,
14119 show_options,
14120 })
14121 }
14122
14123 fn parse_show_tables(
14124 &mut self,
14125 terse: bool,
14126 extended: bool,
14127 full: bool,
14128 external: bool,
14129 ) -> Result<Statement, ParserError> {
14130 let history = !external && self.parse_keyword(Keyword::HISTORY);
14131 let show_options = self.parse_show_stmt_options()?;
14132 Ok(Statement::ShowTables {
14133 terse,
14134 history,
14135 extended,
14136 full,
14137 external,
14138 show_options,
14139 })
14140 }
14141
14142 fn parse_show_views(
14143 &mut self,
14144 terse: bool,
14145 materialized: bool,
14146 ) -> Result<Statement, ParserError> {
14147 let show_options = self.parse_show_stmt_options()?;
14148 Ok(Statement::ShowViews {
14149 materialized,
14150 terse,
14151 show_options,
14152 })
14153 }
14154
14155 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14156 let filter = self.parse_show_statement_filter()?;
14157 Ok(Statement::ShowFunctions { filter })
14158 }
14159
14160 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14161 let filter = self.parse_show_statement_filter()?;
14162 Ok(Statement::ShowCollation { filter })
14163 }
14164
14165 pub fn parse_show_statement_filter(
14166 &mut self,
14167 ) -> Result<Option<ShowStatementFilter>, ParserError> {
14168 if self.parse_keyword(Keyword::LIKE) {
14169 Ok(Some(ShowStatementFilter::Like(
14170 self.parse_literal_string()?,
14171 )))
14172 } else if self.parse_keyword(Keyword::ILIKE) {
14173 Ok(Some(ShowStatementFilter::ILike(
14174 self.parse_literal_string()?,
14175 )))
14176 } else if self.parse_keyword(Keyword::WHERE) {
14177 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14178 } else {
14179 self.maybe_parse(|parser| -> Result<String, ParserError> {
14180 parser.parse_literal_string()
14181 })?
14182 .map_or(Ok(None), |filter| {
14183 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14184 })
14185 }
14186 }
14187
14188 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14189 let parsed_keyword = if dialect_of!(self is HiveDialect) {
14191 if self.parse_keyword(Keyword::DEFAULT) {
14193 return Ok(Statement::Use(Use::Default));
14194 }
14195 None } else if dialect_of!(self is DatabricksDialect) {
14197 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14198 } else if dialect_of!(self is SnowflakeDialect) {
14199 self.parse_one_of_keywords(&[
14200 Keyword::DATABASE,
14201 Keyword::SCHEMA,
14202 Keyword::WAREHOUSE,
14203 Keyword::ROLE,
14204 Keyword::SECONDARY,
14205 ])
14206 } else {
14207 None };
14209
14210 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14211 self.parse_secondary_roles()?
14212 } else {
14213 let obj_name = self.parse_object_name(false)?;
14214 match parsed_keyword {
14215 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14216 Some(Keyword::DATABASE) => Use::Database(obj_name),
14217 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14218 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14219 Some(Keyword::ROLE) => Use::Role(obj_name),
14220 _ => Use::Object(obj_name),
14221 }
14222 };
14223
14224 Ok(Statement::Use(result))
14225 }
14226
14227 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14228 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14229 if self.parse_keyword(Keyword::NONE) {
14230 Ok(Use::SecondaryRoles(SecondaryRoles::None))
14231 } else if self.parse_keyword(Keyword::ALL) {
14232 Ok(Use::SecondaryRoles(SecondaryRoles::All))
14233 } else {
14234 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14235 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14236 }
14237 }
14238
14239 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14240 let relation = self.parse_table_factor()?;
14241 let joins = self.parse_joins()?;
14245 Ok(TableWithJoins { relation, joins })
14246 }
14247
14248 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14249 let mut joins = vec![];
14250 loop {
14251 let global = self.parse_keyword(Keyword::GLOBAL);
14252 let join = if self.parse_keyword(Keyword::CROSS) {
14253 let join_operator = if self.parse_keyword(Keyword::JOIN) {
14254 JoinOperator::CrossJoin(JoinConstraint::None)
14255 } else if self.parse_keyword(Keyword::APPLY) {
14256 JoinOperator::CrossApply
14258 } else {
14259 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14260 };
14261 let relation = self.parse_table_factor()?;
14262 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14263 && self.dialect.supports_cross_join_constraint()
14264 {
14265 let constraint = self.parse_join_constraint(false)?;
14266 JoinOperator::CrossJoin(constraint)
14267 } else {
14268 join_operator
14269 };
14270 Join {
14271 relation,
14272 global,
14273 join_operator,
14274 }
14275 } else if self.parse_keyword(Keyword::OUTER) {
14276 self.expect_keyword_is(Keyword::APPLY)?;
14278 Join {
14279 relation: self.parse_table_factor()?,
14280 global,
14281 join_operator: JoinOperator::OuterApply,
14282 }
14283 } else if self.parse_keyword(Keyword::ASOF) {
14284 self.expect_keyword_is(Keyword::JOIN)?;
14285 let relation = self.parse_table_factor()?;
14286 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14287 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14288 Join {
14289 relation,
14290 global,
14291 join_operator: JoinOperator::AsOf {
14292 match_condition,
14293 constraint: self.parse_join_constraint(false)?,
14294 },
14295 }
14296 } else {
14297 let natural = self.parse_keyword(Keyword::NATURAL);
14298 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14299 w.keyword
14300 } else {
14301 Keyword::NoKeyword
14302 };
14303
14304 let join_operator_type = match peek_keyword {
14305 Keyword::INNER | Keyword::JOIN => {
14306 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
14308 if inner {
14309 JoinOperator::Inner
14310 } else {
14311 JoinOperator::Join
14312 }
14313 }
14314 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14315 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
14317 let join_type = self.parse_one_of_keywords(&[
14318 Keyword::OUTER,
14319 Keyword::SEMI,
14320 Keyword::ANTI,
14321 Keyword::JOIN,
14322 ]);
14323 match join_type {
14324 Some(Keyword::OUTER) => {
14325 self.expect_keyword_is(Keyword::JOIN)?;
14326 if is_left {
14327 JoinOperator::LeftOuter
14328 } else {
14329 JoinOperator::RightOuter
14330 }
14331 }
14332 Some(Keyword::SEMI) => {
14333 self.expect_keyword_is(Keyword::JOIN)?;
14334 if is_left {
14335 JoinOperator::LeftSemi
14336 } else {
14337 JoinOperator::RightSemi
14338 }
14339 }
14340 Some(Keyword::ANTI) => {
14341 self.expect_keyword_is(Keyword::JOIN)?;
14342 if is_left {
14343 JoinOperator::LeftAnti
14344 } else {
14345 JoinOperator::RightAnti
14346 }
14347 }
14348 Some(Keyword::JOIN) => {
14349 if is_left {
14350 JoinOperator::Left
14351 } else {
14352 JoinOperator::Right
14353 }
14354 }
14355 _ => {
14356 return Err(ParserError::ParserError(format!(
14357 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14358 )))
14359 }
14360 }
14361 }
14362 Keyword::ANTI => {
14363 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
14365 JoinOperator::Anti
14366 }
14367 Keyword::SEMI => {
14368 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
14370 JoinOperator::Semi
14371 }
14372 Keyword::FULL => {
14373 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
14376 JoinOperator::FullOuter
14377 }
14378 Keyword::OUTER => {
14379 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14380 }
14381 Keyword::STRAIGHT_JOIN => {
14382 let _ = self.next_token(); JoinOperator::StraightJoin
14384 }
14385 _ if natural => {
14386 return self.expected("a join type after NATURAL", self.peek_token());
14387 }
14388 _ => break,
14389 };
14390 let mut relation = self.parse_table_factor()?;
14391
14392 if !self
14393 .dialect
14394 .supports_left_associative_joins_without_parens()
14395 && self.peek_parens_less_nested_join()
14396 {
14397 let joins = self.parse_joins()?;
14398 relation = TableFactor::NestedJoin {
14399 table_with_joins: Box::new(TableWithJoins { relation, joins }),
14400 alias: None,
14401 };
14402 }
14403
14404 let join_constraint = self.parse_join_constraint(natural)?;
14405 Join {
14406 relation,
14407 global,
14408 join_operator: join_operator_type(join_constraint),
14409 }
14410 };
14411 joins.push(join);
14412 }
14413 Ok(joins)
14414 }
14415
14416 fn peek_parens_less_nested_join(&self) -> bool {
14417 matches!(
14418 self.peek_token_ref().token,
14419 Token::Word(Word {
14420 keyword: Keyword::JOIN
14421 | Keyword::INNER
14422 | Keyword::LEFT
14423 | Keyword::RIGHT
14424 | Keyword::FULL,
14425 ..
14426 })
14427 )
14428 }
14429
14430 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14432 if self.parse_keyword(Keyword::LATERAL) {
14433 if self.consume_token(&Token::LParen) {
14435 self.parse_derived_table_factor(Lateral)
14436 } else {
14437 let name = self.parse_object_name(false)?;
14438 self.expect_token(&Token::LParen)?;
14439 let args = self.parse_optional_args()?;
14440 let alias = self.maybe_parse_table_alias()?;
14441 Ok(TableFactor::Function {
14442 lateral: true,
14443 name,
14444 args,
14445 alias,
14446 })
14447 }
14448 } else if self.parse_keyword(Keyword::TABLE) {
14449 self.expect_token(&Token::LParen)?;
14451 let expr = self.parse_expr()?;
14452 self.expect_token(&Token::RParen)?;
14453 let alias = self.maybe_parse_table_alias()?;
14454 Ok(TableFactor::TableFunction { expr, alias })
14455 } else if self.consume_token(&Token::LParen) {
14456 if let Some(mut table) =
14478 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14479 {
14480 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14481 {
14482 table = match kw {
14483 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14484 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14485 unexpected_keyword => return Err(ParserError::ParserError(
14486 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14487 )),
14488 }
14489 }
14490 return Ok(table);
14491 }
14492
14493 let mut table_and_joins = self.parse_table_and_joins()?;
14500
14501 #[allow(clippy::if_same_then_else)]
14502 if !table_and_joins.joins.is_empty() {
14503 self.expect_token(&Token::RParen)?;
14504 let alias = self.maybe_parse_table_alias()?;
14505 Ok(TableFactor::NestedJoin {
14506 table_with_joins: Box::new(table_and_joins),
14507 alias,
14508 }) } else if let TableFactor::NestedJoin {
14510 table_with_joins: _,
14511 alias: _,
14512 } = &table_and_joins.relation
14513 {
14514 self.expect_token(&Token::RParen)?;
14517 let alias = self.maybe_parse_table_alias()?;
14518 Ok(TableFactor::NestedJoin {
14519 table_with_joins: Box::new(table_and_joins),
14520 alias,
14521 })
14522 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14523 self.expect_token(&Token::RParen)?;
14530
14531 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14532 match &mut table_and_joins.relation {
14535 TableFactor::Derived { alias, .. }
14536 | TableFactor::Table { alias, .. }
14537 | TableFactor::Function { alias, .. }
14538 | TableFactor::UNNEST { alias, .. }
14539 | TableFactor::JsonTable { alias, .. }
14540 | TableFactor::XmlTable { alias, .. }
14541 | TableFactor::OpenJsonTable { alias, .. }
14542 | TableFactor::TableFunction { alias, .. }
14543 | TableFactor::Pivot { alias, .. }
14544 | TableFactor::Unpivot { alias, .. }
14545 | TableFactor::MatchRecognize { alias, .. }
14546 | TableFactor::SemanticView { alias, .. }
14547 | TableFactor::NestedJoin { alias, .. } => {
14548 if let Some(inner_alias) = alias {
14550 return Err(ParserError::ParserError(format!(
14551 "duplicate alias {inner_alias}"
14552 )));
14553 }
14554 alias.replace(outer_alias);
14558 }
14559 };
14560 }
14561 Ok(table_and_joins.relation)
14563 } else {
14564 self.expected("joined table", self.peek_token())
14567 }
14568 } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14569 && matches!(
14570 self.peek_tokens(),
14571 [
14572 Token::Word(Word {
14573 keyword: Keyword::VALUES,
14574 ..
14575 }),
14576 Token::LParen
14577 ]
14578 )
14579 {
14580 self.expect_keyword_is(Keyword::VALUES)?;
14581
14582 let values = SetExpr::Values(self.parse_values(false, false)?);
14586 let alias = self.maybe_parse_table_alias()?;
14587 Ok(TableFactor::Derived {
14588 lateral: false,
14589 subquery: Box::new(Query {
14590 with: None,
14591 body: Box::new(values),
14592 order_by: None,
14593 limit_clause: None,
14594 fetch: None,
14595 locks: vec![],
14596 for_clause: None,
14597 settings: None,
14598 format_clause: None,
14599 pipe_operators: vec![],
14600 }),
14601 alias,
14602 })
14603 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14604 && self.parse_keyword(Keyword::UNNEST)
14605 {
14606 self.expect_token(&Token::LParen)?;
14607 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14608 self.expect_token(&Token::RParen)?;
14609
14610 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14611 let alias = match self.maybe_parse_table_alias() {
14612 Ok(Some(alias)) => Some(alias),
14613 Ok(None) => None,
14614 Err(e) => return Err(e),
14615 };
14616
14617 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14618 Ok(()) => true,
14619 Err(_) => false,
14620 };
14621
14622 let with_offset_alias = if with_offset {
14623 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14624 Ok(Some(alias)) => Some(alias),
14625 Ok(None) => None,
14626 Err(e) => return Err(e),
14627 }
14628 } else {
14629 None
14630 };
14631
14632 Ok(TableFactor::UNNEST {
14633 alias,
14634 array_exprs,
14635 with_offset,
14636 with_offset_alias,
14637 with_ordinality,
14638 })
14639 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14640 let json_expr = self.parse_expr()?;
14641 self.expect_token(&Token::Comma)?;
14642 let json_path = self.parse_value()?.value;
14643 self.expect_keyword_is(Keyword::COLUMNS)?;
14644 self.expect_token(&Token::LParen)?;
14645 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14646 self.expect_token(&Token::RParen)?;
14647 self.expect_token(&Token::RParen)?;
14648 let alias = self.maybe_parse_table_alias()?;
14649 Ok(TableFactor::JsonTable {
14650 json_expr,
14651 json_path,
14652 columns,
14653 alias,
14654 })
14655 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14656 self.prev_token();
14657 self.parse_open_json_table_factor()
14658 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14659 self.prev_token();
14660 self.parse_xml_table_factor()
14661 } else if self.dialect.supports_semantic_view_table_factor()
14662 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14663 {
14664 self.parse_semantic_view_table_factor()
14665 } else {
14666 let name = self.parse_object_name(true)?;
14667
14668 let json_path = match self.peek_token().token {
14669 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14670 _ => None,
14671 };
14672
14673 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14674 && self.parse_keyword(Keyword::PARTITION)
14675 {
14676 self.parse_parenthesized_identifiers()?
14677 } else {
14678 vec![]
14679 };
14680
14681 let version = self.maybe_parse_table_version()?;
14683
14684 let args = if self.consume_token(&Token::LParen) {
14686 Some(self.parse_table_function_args()?)
14687 } else {
14688 None
14689 };
14690
14691 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14692
14693 let mut sample = None;
14694 if self.dialect.supports_table_sample_before_alias() {
14695 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14696 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14697 }
14698 }
14699
14700 let alias = self.maybe_parse_table_alias()?;
14701
14702 let index_hints = if self.dialect.supports_table_hints() {
14704 self.maybe_parse(|p| p.parse_table_index_hints())?
14705 .unwrap_or(vec![])
14706 } else {
14707 vec![]
14708 };
14709
14710 let mut with_hints = vec![];
14712 if self.parse_keyword(Keyword::WITH) {
14713 if self.consume_token(&Token::LParen) {
14714 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14715 self.expect_token(&Token::RParen)?;
14716 } else {
14717 self.prev_token();
14719 }
14720 };
14721
14722 if !self.dialect.supports_table_sample_before_alias() {
14723 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14724 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14725 }
14726 }
14727
14728 let mut table = TableFactor::Table {
14729 name,
14730 alias,
14731 args,
14732 with_hints,
14733 version,
14734 partitions,
14735 with_ordinality,
14736 json_path,
14737 sample,
14738 index_hints,
14739 };
14740
14741 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14742 table = match kw {
14743 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14744 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14745 unexpected_keyword => return Err(ParserError::ParserError(
14746 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14747 )),
14748 }
14749 }
14750
14751 if self.dialect.supports_match_recognize()
14752 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14753 {
14754 table = self.parse_match_recognize(table)?;
14755 }
14756
14757 Ok(table)
14758 }
14759 }
14760
14761 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14762 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14763 TableSampleModifier::TableSample
14764 } else if self.parse_keyword(Keyword::SAMPLE) {
14765 TableSampleModifier::Sample
14766 } else {
14767 return Ok(None);
14768 };
14769 self.parse_table_sample(modifier).map(Some)
14770 }
14771
14772 fn parse_table_sample(
14773 &mut self,
14774 modifier: TableSampleModifier,
14775 ) -> Result<Box<TableSample>, ParserError> {
14776 let name = match self.parse_one_of_keywords(&[
14777 Keyword::BERNOULLI,
14778 Keyword::ROW,
14779 Keyword::SYSTEM,
14780 Keyword::BLOCK,
14781 ]) {
14782 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14783 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14784 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14785 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14786 _ => None,
14787 };
14788
14789 let parenthesized = self.consume_token(&Token::LParen);
14790
14791 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14792 let selected_bucket = self.parse_number_value()?.value;
14793 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14794 let total = self.parse_number_value()?.value;
14795 let on = if self.parse_keyword(Keyword::ON) {
14796 Some(self.parse_expr()?)
14797 } else {
14798 None
14799 };
14800 (
14801 None,
14802 Some(TableSampleBucket {
14803 bucket: selected_bucket,
14804 total,
14805 on,
14806 }),
14807 )
14808 } else {
14809 let value = match self.maybe_parse(|p| p.parse_expr())? {
14810 Some(num) => num,
14811 None => {
14812 let next_token = self.next_token();
14813 if let Token::Word(w) = next_token.token {
14814 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
14815 } else {
14816 return parser_err!(
14817 "Expecting number or byte length e.g. 100M",
14818 self.peek_token().span.start
14819 );
14820 }
14821 }
14822 };
14823 let unit = if self.parse_keyword(Keyword::ROWS) {
14824 Some(TableSampleUnit::Rows)
14825 } else if self.parse_keyword(Keyword::PERCENT) {
14826 Some(TableSampleUnit::Percent)
14827 } else {
14828 None
14829 };
14830 (
14831 Some(TableSampleQuantity {
14832 parenthesized,
14833 value,
14834 unit,
14835 }),
14836 None,
14837 )
14838 };
14839 if parenthesized {
14840 self.expect_token(&Token::RParen)?;
14841 }
14842
14843 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
14844 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
14845 } else if self.parse_keyword(Keyword::SEED) {
14846 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
14847 } else {
14848 None
14849 };
14850
14851 let offset = if self.parse_keyword(Keyword::OFFSET) {
14852 Some(self.parse_expr()?)
14853 } else {
14854 None
14855 };
14856
14857 Ok(Box::new(TableSample {
14858 modifier,
14859 name,
14860 quantity,
14861 seed,
14862 bucket,
14863 offset,
14864 }))
14865 }
14866
14867 fn parse_table_sample_seed(
14868 &mut self,
14869 modifier: TableSampleSeedModifier,
14870 ) -> Result<TableSampleSeed, ParserError> {
14871 self.expect_token(&Token::LParen)?;
14872 let value = self.parse_number_value()?.value;
14873 self.expect_token(&Token::RParen)?;
14874 Ok(TableSampleSeed { modifier, value })
14875 }
14876
14877 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14880 self.expect_token(&Token::LParen)?;
14881 let json_expr = self.parse_expr()?;
14882 let json_path = if self.consume_token(&Token::Comma) {
14883 Some(self.parse_value()?.value)
14884 } else {
14885 None
14886 };
14887 self.expect_token(&Token::RParen)?;
14888 let columns = if self.parse_keyword(Keyword::WITH) {
14889 self.expect_token(&Token::LParen)?;
14890 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
14891 self.expect_token(&Token::RParen)?;
14892 columns
14893 } else {
14894 Vec::new()
14895 };
14896 let alias = self.maybe_parse_table_alias()?;
14897 Ok(TableFactor::OpenJsonTable {
14898 json_expr,
14899 json_path,
14900 columns,
14901 alias,
14902 })
14903 }
14904
14905 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14906 self.expect_token(&Token::LParen)?;
14907 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14908 self.expect_token(&Token::LParen)?;
14909 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14910 self.expect_token(&Token::RParen)?;
14911 self.expect_token(&Token::Comma)?;
14912 namespaces
14913 } else {
14914 vec![]
14915 };
14916 let row_expression = self.parse_expr()?;
14917 let passing = self.parse_xml_passing_clause()?;
14918 self.expect_keyword_is(Keyword::COLUMNS)?;
14919 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14920 self.expect_token(&Token::RParen)?;
14921 let alias = self.maybe_parse_table_alias()?;
14922 Ok(TableFactor::XmlTable {
14923 namespaces,
14924 row_expression,
14925 passing,
14926 columns,
14927 alias,
14928 })
14929 }
14930
14931 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14932 let uri = self.parse_expr()?;
14933 self.expect_keyword_is(Keyword::AS)?;
14934 let name = self.parse_identifier()?;
14935 Ok(XmlNamespaceDefinition { uri, name })
14936 }
14937
14938 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14939 let name = self.parse_identifier()?;
14940
14941 let option = if self.parse_keyword(Keyword::FOR) {
14942 self.expect_keyword(Keyword::ORDINALITY)?;
14943 XmlTableColumnOption::ForOrdinality
14944 } else {
14945 let r#type = self.parse_data_type()?;
14946 let mut path = None;
14947 let mut default = None;
14948
14949 if self.parse_keyword(Keyword::PATH) {
14950 path = Some(self.parse_expr()?);
14951 }
14952
14953 if self.parse_keyword(Keyword::DEFAULT) {
14954 default = Some(self.parse_expr()?);
14955 }
14956
14957 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14958 if !not_null {
14959 let _ = self.parse_keyword(Keyword::NULL);
14961 }
14962
14963 XmlTableColumnOption::NamedInfo {
14964 r#type,
14965 path,
14966 default,
14967 nullable: !not_null,
14968 }
14969 };
14970 Ok(XmlTableColumn { name, option })
14971 }
14972
14973 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14974 let mut arguments = vec![];
14975 if self.parse_keyword(Keyword::PASSING) {
14976 loop {
14977 let by_value =
14978 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14979 let expr = self.parse_expr()?;
14980 let alias = if self.parse_keyword(Keyword::AS) {
14981 Some(self.parse_identifier()?)
14982 } else {
14983 None
14984 };
14985 arguments.push(XmlPassingArgument {
14986 expr,
14987 alias,
14988 by_value,
14989 });
14990 if !self.consume_token(&Token::Comma) {
14991 break;
14992 }
14993 }
14994 }
14995 Ok(XmlPassingClause { arguments })
14996 }
14997
14998 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15000 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
15001 self.expect_token(&Token::LParen)?;
15002
15003 let name = self.parse_object_name(true)?;
15004
15005 let mut dimensions = Vec::new();
15007 let mut metrics = Vec::new();
15008 let mut facts = Vec::new();
15009 let mut where_clause = None;
15010
15011 while self.peek_token().token != Token::RParen {
15012 if self.parse_keyword(Keyword::DIMENSIONS) {
15013 if !dimensions.is_empty() {
15014 return Err(ParserError::ParserError(
15015 "DIMENSIONS clause can only be specified once".to_string(),
15016 ));
15017 }
15018 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15019 } else if self.parse_keyword(Keyword::METRICS) {
15020 if !metrics.is_empty() {
15021 return Err(ParserError::ParserError(
15022 "METRICS clause can only be specified once".to_string(),
15023 ));
15024 }
15025 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15026 } else if self.parse_keyword(Keyword::FACTS) {
15027 if !facts.is_empty() {
15028 return Err(ParserError::ParserError(
15029 "FACTS clause can only be specified once".to_string(),
15030 ));
15031 }
15032 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15033 } else if self.parse_keyword(Keyword::WHERE) {
15034 if where_clause.is_some() {
15035 return Err(ParserError::ParserError(
15036 "WHERE clause can only be specified once".to_string(),
15037 ));
15038 }
15039 where_clause = Some(self.parse_expr()?);
15040 } else {
15041 return parser_err!(
15042 format!(
15043 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
15044 self.peek_token().token
15045 ),
15046 self.peek_token().span.start
15047 )?;
15048 }
15049 }
15050
15051 self.expect_token(&Token::RParen)?;
15052
15053 let alias = self.maybe_parse_table_alias()?;
15054
15055 Ok(TableFactor::SemanticView {
15056 name,
15057 dimensions,
15058 metrics,
15059 facts,
15060 where_clause,
15061 alias,
15062 })
15063 }
15064
15065 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
15066 self.expect_token(&Token::LParen)?;
15067
15068 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15069 self.parse_comma_separated(Parser::parse_expr)?
15070 } else {
15071 vec![]
15072 };
15073
15074 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15075 self.parse_comma_separated(Parser::parse_order_by_expr)?
15076 } else {
15077 vec![]
15078 };
15079
15080 let measures = if self.parse_keyword(Keyword::MEASURES) {
15081 self.parse_comma_separated(|p| {
15082 let expr = p.parse_expr()?;
15083 let _ = p.parse_keyword(Keyword::AS);
15084 let alias = p.parse_identifier()?;
15085 Ok(Measure { expr, alias })
15086 })?
15087 } else {
15088 vec![]
15089 };
15090
15091 let rows_per_match =
15092 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15093 Some(RowsPerMatch::OneRow)
15094 } else if self.parse_keywords(&[
15095 Keyword::ALL,
15096 Keyword::ROWS,
15097 Keyword::PER,
15098 Keyword::MATCH,
15099 ]) {
15100 Some(RowsPerMatch::AllRows(
15101 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15102 Some(EmptyMatchesMode::Show)
15103 } else if self.parse_keywords(&[
15104 Keyword::OMIT,
15105 Keyword::EMPTY,
15106 Keyword::MATCHES,
15107 ]) {
15108 Some(EmptyMatchesMode::Omit)
15109 } else if self.parse_keywords(&[
15110 Keyword::WITH,
15111 Keyword::UNMATCHED,
15112 Keyword::ROWS,
15113 ]) {
15114 Some(EmptyMatchesMode::WithUnmatched)
15115 } else {
15116 None
15117 },
15118 ))
15119 } else {
15120 None
15121 };
15122
15123 let after_match_skip =
15124 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15125 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15126 Some(AfterMatchSkip::PastLastRow)
15127 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15128 Some(AfterMatchSkip::ToNextRow)
15129 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15130 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15131 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15132 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15133 } else {
15134 let found = self.next_token();
15135 return self.expected("after match skip option", found);
15136 }
15137 } else {
15138 None
15139 };
15140
15141 self.expect_keyword_is(Keyword::PATTERN)?;
15142 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15143
15144 self.expect_keyword_is(Keyword::DEFINE)?;
15145
15146 let symbols = self.parse_comma_separated(|p| {
15147 let symbol = p.parse_identifier()?;
15148 p.expect_keyword_is(Keyword::AS)?;
15149 let definition = p.parse_expr()?;
15150 Ok(SymbolDefinition { symbol, definition })
15151 })?;
15152
15153 self.expect_token(&Token::RParen)?;
15154
15155 let alias = self.maybe_parse_table_alias()?;
15156
15157 Ok(TableFactor::MatchRecognize {
15158 table: Box::new(table),
15159 partition_by,
15160 order_by,
15161 measures,
15162 rows_per_match,
15163 after_match_skip,
15164 pattern,
15165 symbols,
15166 alias,
15167 })
15168 }
15169
15170 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15171 match self.next_token().token {
15172 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15173 Token::Placeholder(s) if s == "$" => {
15174 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15175 }
15176 Token::LBrace => {
15177 self.expect_token(&Token::Minus)?;
15178 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15179 self.expect_token(&Token::Minus)?;
15180 self.expect_token(&Token::RBrace)?;
15181 Ok(MatchRecognizePattern::Exclude(symbol))
15182 }
15183 Token::Word(Word {
15184 value,
15185 quote_style: None,
15186 ..
15187 }) if value == "PERMUTE" => {
15188 self.expect_token(&Token::LParen)?;
15189 let symbols = self.parse_comma_separated(|p| {
15190 p.parse_identifier().map(MatchRecognizeSymbol::Named)
15191 })?;
15192 self.expect_token(&Token::RParen)?;
15193 Ok(MatchRecognizePattern::Permute(symbols))
15194 }
15195 Token::LParen => {
15196 let pattern = self.parse_pattern()?;
15197 self.expect_token(&Token::RParen)?;
15198 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15199 }
15200 _ => {
15201 self.prev_token();
15202 self.parse_identifier()
15203 .map(MatchRecognizeSymbol::Named)
15204 .map(MatchRecognizePattern::Symbol)
15205 }
15206 }
15207 }
15208
15209 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15210 let mut pattern = self.parse_base_pattern()?;
15211 loop {
15212 let token = self.next_token();
15213 let quantifier = match token.token {
15214 Token::Mul => RepetitionQuantifier::ZeroOrMore,
15215 Token::Plus => RepetitionQuantifier::OneOrMore,
15216 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15217 Token::LBrace => {
15218 let token = self.next_token();
15220 match token.token {
15221 Token::Comma => {
15222 let next_token = self.next_token();
15223 let Token::Number(n, _) = next_token.token else {
15224 return self.expected("literal number", next_token);
15225 };
15226 self.expect_token(&Token::RBrace)?;
15227 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15228 }
15229 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15230 let next_token = self.next_token();
15231 match next_token.token {
15232 Token::Number(m, _) => {
15233 self.expect_token(&Token::RBrace)?;
15234 RepetitionQuantifier::Range(
15235 Self::parse(n, token.span.start)?,
15236 Self::parse(m, token.span.start)?,
15237 )
15238 }
15239 Token::RBrace => {
15240 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15241 }
15242 _ => {
15243 return self.expected("} or upper bound", next_token);
15244 }
15245 }
15246 }
15247 Token::Number(n, _) => {
15248 self.expect_token(&Token::RBrace)?;
15249 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15250 }
15251 _ => return self.expected("quantifier range", token),
15252 }
15253 }
15254 _ => {
15255 self.prev_token();
15256 break;
15257 }
15258 };
15259 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15260 }
15261 Ok(pattern)
15262 }
15263
15264 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15265 let mut patterns = vec![self.parse_repetition_pattern()?];
15266 while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15267 patterns.push(self.parse_repetition_pattern()?);
15268 }
15269 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15270 Ok([pattern]) => Ok(pattern),
15271 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15272 }
15273 }
15274
15275 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15276 let pattern = self.parse_concat_pattern()?;
15277 if self.consume_token(&Token::Pipe) {
15278 match self.parse_pattern()? {
15279 MatchRecognizePattern::Alternation(mut patterns) => {
15281 patterns.insert(0, pattern);
15282 Ok(MatchRecognizePattern::Alternation(patterns))
15283 }
15284 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15285 }
15286 } else {
15287 Ok(pattern)
15288 }
15289 }
15290
15291 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15293 if self.dialect.supports_timestamp_versioning() {
15294 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15295 {
15296 let expr = self.parse_expr()?;
15297 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15298 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15299 let func_name = self.parse_object_name(true)?;
15300 let func = self.parse_function(func_name)?;
15301 return Ok(Some(TableVersion::Function(func)));
15302 }
15303 }
15304 Ok(None)
15305 }
15306
15307 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15310 if self.parse_keyword(Keyword::NESTED) {
15311 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15312 let path = self.parse_value()?.value;
15313 self.expect_keyword_is(Keyword::COLUMNS)?;
15314 let columns = self.parse_parenthesized(|p| {
15315 p.parse_comma_separated(Self::parse_json_table_column_def)
15316 })?;
15317 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15318 path,
15319 columns,
15320 }));
15321 }
15322 let name = self.parse_identifier()?;
15323 if self.parse_keyword(Keyword::FOR) {
15324 self.expect_keyword_is(Keyword::ORDINALITY)?;
15325 return Ok(JsonTableColumn::ForOrdinality(name));
15326 }
15327 let r#type = self.parse_data_type()?;
15328 let exists = self.parse_keyword(Keyword::EXISTS);
15329 self.expect_keyword_is(Keyword::PATH)?;
15330 let path = self.parse_value()?.value;
15331 let mut on_empty = None;
15332 let mut on_error = None;
15333 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15334 if self.parse_keyword(Keyword::EMPTY) {
15335 on_empty = Some(error_handling);
15336 } else {
15337 self.expect_keyword_is(Keyword::ERROR)?;
15338 on_error = Some(error_handling);
15339 }
15340 }
15341 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15342 name,
15343 r#type,
15344 path,
15345 exists,
15346 on_empty,
15347 on_error,
15348 }))
15349 }
15350
15351 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15359 let name = self.parse_identifier()?;
15360 let r#type = self.parse_data_type()?;
15361 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15362 self.next_token();
15363 Some(path)
15364 } else {
15365 None
15366 };
15367 let as_json = self.parse_keyword(Keyword::AS);
15368 if as_json {
15369 self.expect_keyword_is(Keyword::JSON)?;
15370 }
15371 Ok(OpenJsonTableColumn {
15372 name,
15373 r#type,
15374 path,
15375 as_json,
15376 })
15377 }
15378
15379 fn parse_json_table_column_error_handling(
15380 &mut self,
15381 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15382 let res = if self.parse_keyword(Keyword::NULL) {
15383 JsonTableColumnErrorHandling::Null
15384 } else if self.parse_keyword(Keyword::ERROR) {
15385 JsonTableColumnErrorHandling::Error
15386 } else if self.parse_keyword(Keyword::DEFAULT) {
15387 JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15388 } else {
15389 return Ok(None);
15390 };
15391 self.expect_keyword_is(Keyword::ON)?;
15392 Ok(Some(res))
15393 }
15394
15395 pub fn parse_derived_table_factor(
15396 &mut self,
15397 lateral: IsLateral,
15398 ) -> Result<TableFactor, ParserError> {
15399 let subquery = self.parse_query()?;
15400 self.expect_token(&Token::RParen)?;
15401 let alias = self.maybe_parse_table_alias()?;
15402 Ok(TableFactor::Derived {
15403 lateral: match lateral {
15404 Lateral => true,
15405 NotLateral => false,
15406 },
15407 subquery,
15408 alias,
15409 })
15410 }
15411
15412 fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15413 let function_name = match self.next_token().token {
15414 Token::Word(w) => Ok(w.value),
15415 _ => self.expected("a function identifier", self.peek_token()),
15416 }?;
15417 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15418 let alias = if self.parse_keyword(Keyword::AS) {
15419 Some(self.parse_identifier()?)
15420 } else {
15421 None
15422 };
15423
15424 Ok(ExprWithAlias { expr, alias })
15425 }
15426 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15449 let expr = self.parse_expr()?;
15450 let alias = if self.parse_keyword(Keyword::AS) {
15451 Some(self.parse_identifier()?)
15452 } else {
15453 None
15454 };
15455
15456 Ok(ExprWithAlias { expr, alias })
15457 }
15458
15459 pub fn parse_pivot_table_factor(
15460 &mut self,
15461 table: TableFactor,
15462 ) -> Result<TableFactor, ParserError> {
15463 self.expect_token(&Token::LParen)?;
15464 let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15465 self.expect_keyword_is(Keyword::FOR)?;
15466 let value_column = if self.peek_token_ref().token == Token::LParen {
15467 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15468 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15469 })?
15470 } else {
15471 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15472 };
15473 self.expect_keyword_is(Keyword::IN)?;
15474
15475 self.expect_token(&Token::LParen)?;
15476 let value_source = if self.parse_keyword(Keyword::ANY) {
15477 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15478 self.parse_comma_separated(Parser::parse_order_by_expr)?
15479 } else {
15480 vec![]
15481 };
15482 PivotValueSource::Any(order_by)
15483 } else if self.peek_sub_query() {
15484 PivotValueSource::Subquery(self.parse_query()?)
15485 } else {
15486 PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15487 };
15488 self.expect_token(&Token::RParen)?;
15489
15490 let default_on_null =
15491 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15492 self.expect_token(&Token::LParen)?;
15493 let expr = self.parse_expr()?;
15494 self.expect_token(&Token::RParen)?;
15495 Some(expr)
15496 } else {
15497 None
15498 };
15499
15500 self.expect_token(&Token::RParen)?;
15501 let alias = self.maybe_parse_table_alias()?;
15502 Ok(TableFactor::Pivot {
15503 table: Box::new(table),
15504 aggregate_functions,
15505 value_column,
15506 value_source,
15507 default_on_null,
15508 alias,
15509 })
15510 }
15511
15512 pub fn parse_unpivot_table_factor(
15513 &mut self,
15514 table: TableFactor,
15515 ) -> Result<TableFactor, ParserError> {
15516 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15517 self.expect_keyword_is(Keyword::NULLS)?;
15518 Some(NullInclusion::IncludeNulls)
15519 } else if self.parse_keyword(Keyword::EXCLUDE) {
15520 self.expect_keyword_is(Keyword::NULLS)?;
15521 Some(NullInclusion::ExcludeNulls)
15522 } else {
15523 None
15524 };
15525 self.expect_token(&Token::LParen)?;
15526 let value = self.parse_expr()?;
15527 self.expect_keyword_is(Keyword::FOR)?;
15528 let name = self.parse_identifier()?;
15529 self.expect_keyword_is(Keyword::IN)?;
15530 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15531 p.parse_expr_with_alias()
15532 })?;
15533 self.expect_token(&Token::RParen)?;
15534 let alias = self.maybe_parse_table_alias()?;
15535 Ok(TableFactor::Unpivot {
15536 table: Box::new(table),
15537 value,
15538 null_inclusion,
15539 name,
15540 columns,
15541 alias,
15542 })
15543 }
15544
15545 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15546 if natural {
15547 Ok(JoinConstraint::Natural)
15548 } else if self.parse_keyword(Keyword::ON) {
15549 let constraint = self.parse_expr()?;
15550 Ok(JoinConstraint::On(constraint))
15551 } else if self.parse_keyword(Keyword::USING) {
15552 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15553 Ok(JoinConstraint::Using(columns))
15554 } else {
15555 Ok(JoinConstraint::None)
15556 }
15558 }
15559
15560 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15562 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15563
15564 self.expect_keyword_is(Keyword::TO)?;
15565 let grantees = self.parse_grantees()?;
15566
15567 let with_grant_option =
15568 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15569
15570 let current_grants =
15571 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15572 Some(CurrentGrantsKind::CopyCurrentGrants)
15573 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15574 Some(CurrentGrantsKind::RevokeCurrentGrants)
15575 } else {
15576 None
15577 };
15578
15579 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15580 Some(self.parse_identifier()?)
15581 } else {
15582 None
15583 };
15584
15585 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15586 Some(self.parse_identifier()?)
15587 } else {
15588 None
15589 };
15590
15591 Ok(Statement::Grant {
15592 privileges,
15593 objects,
15594 grantees,
15595 with_grant_option,
15596 as_grantor,
15597 granted_by,
15598 current_grants,
15599 })
15600 }
15601
15602 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15603 let mut values = vec![];
15604 let mut grantee_type = GranteesType::None;
15605 loop {
15606 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15607 GranteesType::Role
15608 } else if self.parse_keyword(Keyword::USER) {
15609 GranteesType::User
15610 } else if self.parse_keyword(Keyword::SHARE) {
15611 GranteesType::Share
15612 } else if self.parse_keyword(Keyword::GROUP) {
15613 GranteesType::Group
15614 } else if self.parse_keyword(Keyword::PUBLIC) {
15615 GranteesType::Public
15616 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15617 GranteesType::DatabaseRole
15618 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15619 GranteesType::ApplicationRole
15620 } else if self.parse_keyword(Keyword::APPLICATION) {
15621 GranteesType::Application
15622 } else {
15623 grantee_type.clone() };
15625
15626 if self
15627 .dialect
15628 .get_reserved_grantees_types()
15629 .contains(&new_grantee_type)
15630 {
15631 self.prev_token();
15632 } else {
15633 grantee_type = new_grantee_type;
15634 }
15635
15636 let grantee = if grantee_type == GranteesType::Public {
15637 Grantee {
15638 grantee_type: grantee_type.clone(),
15639 name: None,
15640 }
15641 } else {
15642 let mut name = self.parse_grantee_name()?;
15643 if self.consume_token(&Token::Colon) {
15644 let ident = self.parse_identifier()?;
15648 if let GranteeName::ObjectName(namespace) = name {
15649 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15650 format!("{namespace}:{ident}"),
15651 )]));
15652 };
15653 }
15654 Grantee {
15655 grantee_type: grantee_type.clone(),
15656 name: Some(name),
15657 }
15658 };
15659
15660 values.push(grantee);
15661
15662 if !self.consume_token(&Token::Comma) {
15663 break;
15664 }
15665 }
15666
15667 Ok(values)
15668 }
15669
15670 pub fn parse_grant_deny_revoke_privileges_objects(
15671 &mut self,
15672 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15673 let privileges = if self.parse_keyword(Keyword::ALL) {
15674 Privileges::All {
15675 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15676 }
15677 } else {
15678 let actions = self.parse_actions_list()?;
15679 Privileges::Actions(actions)
15680 };
15681
15682 let objects = if self.parse_keyword(Keyword::ON) {
15683 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15684 Some(GrantObjects::AllTablesInSchema {
15685 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15686 })
15687 } else if self.parse_keywords(&[
15688 Keyword::ALL,
15689 Keyword::EXTERNAL,
15690 Keyword::TABLES,
15691 Keyword::IN,
15692 Keyword::SCHEMA,
15693 ]) {
15694 Some(GrantObjects::AllExternalTablesInSchema {
15695 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15696 })
15697 } else if self.parse_keywords(&[
15698 Keyword::ALL,
15699 Keyword::VIEWS,
15700 Keyword::IN,
15701 Keyword::SCHEMA,
15702 ]) {
15703 Some(GrantObjects::AllViewsInSchema {
15704 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15705 })
15706 } else if self.parse_keywords(&[
15707 Keyword::ALL,
15708 Keyword::MATERIALIZED,
15709 Keyword::VIEWS,
15710 Keyword::IN,
15711 Keyword::SCHEMA,
15712 ]) {
15713 Some(GrantObjects::AllMaterializedViewsInSchema {
15714 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15715 })
15716 } else if self.parse_keywords(&[
15717 Keyword::ALL,
15718 Keyword::FUNCTIONS,
15719 Keyword::IN,
15720 Keyword::SCHEMA,
15721 ]) {
15722 Some(GrantObjects::AllFunctionsInSchema {
15723 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15724 })
15725 } else if self.parse_keywords(&[
15726 Keyword::FUTURE,
15727 Keyword::SCHEMAS,
15728 Keyword::IN,
15729 Keyword::DATABASE,
15730 ]) {
15731 Some(GrantObjects::FutureSchemasInDatabase {
15732 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15733 })
15734 } else if self.parse_keywords(&[
15735 Keyword::FUTURE,
15736 Keyword::TABLES,
15737 Keyword::IN,
15738 Keyword::SCHEMA,
15739 ]) {
15740 Some(GrantObjects::FutureTablesInSchema {
15741 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15742 })
15743 } else if self.parse_keywords(&[
15744 Keyword::FUTURE,
15745 Keyword::EXTERNAL,
15746 Keyword::TABLES,
15747 Keyword::IN,
15748 Keyword::SCHEMA,
15749 ]) {
15750 Some(GrantObjects::FutureExternalTablesInSchema {
15751 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15752 })
15753 } else if self.parse_keywords(&[
15754 Keyword::FUTURE,
15755 Keyword::VIEWS,
15756 Keyword::IN,
15757 Keyword::SCHEMA,
15758 ]) {
15759 Some(GrantObjects::FutureViewsInSchema {
15760 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15761 })
15762 } else if self.parse_keywords(&[
15763 Keyword::FUTURE,
15764 Keyword::MATERIALIZED,
15765 Keyword::VIEWS,
15766 Keyword::IN,
15767 Keyword::SCHEMA,
15768 ]) {
15769 Some(GrantObjects::FutureMaterializedViewsInSchema {
15770 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15771 })
15772 } else if self.parse_keywords(&[
15773 Keyword::ALL,
15774 Keyword::SEQUENCES,
15775 Keyword::IN,
15776 Keyword::SCHEMA,
15777 ]) {
15778 Some(GrantObjects::AllSequencesInSchema {
15779 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15780 })
15781 } else if self.parse_keywords(&[
15782 Keyword::FUTURE,
15783 Keyword::SEQUENCES,
15784 Keyword::IN,
15785 Keyword::SCHEMA,
15786 ]) {
15787 Some(GrantObjects::FutureSequencesInSchema {
15788 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15789 })
15790 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15791 Some(GrantObjects::ResourceMonitors(
15792 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15793 ))
15794 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15795 Some(GrantObjects::ComputePools(
15796 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15797 ))
15798 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15799 Some(GrantObjects::FailoverGroup(
15800 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15801 ))
15802 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15803 Some(GrantObjects::ReplicationGroup(
15804 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15805 ))
15806 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15807 Some(GrantObjects::ExternalVolumes(
15808 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15809 ))
15810 } else {
15811 let object_type = self.parse_one_of_keywords(&[
15812 Keyword::SEQUENCE,
15813 Keyword::DATABASE,
15814 Keyword::SCHEMA,
15815 Keyword::TABLE,
15816 Keyword::VIEW,
15817 Keyword::WAREHOUSE,
15818 Keyword::INTEGRATION,
15819 Keyword::VIEW,
15820 Keyword::WAREHOUSE,
15821 Keyword::INTEGRATION,
15822 Keyword::USER,
15823 Keyword::CONNECTION,
15824 Keyword::PROCEDURE,
15825 Keyword::FUNCTION,
15826 ]);
15827 let objects =
15828 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
15829 match object_type {
15830 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
15831 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
15832 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
15833 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
15834 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
15835 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
15836 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
15837 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
15838 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
15839 if let Some(name) = objects?.first() {
15840 self.parse_grant_procedure_or_function(name, &kw)?
15841 } else {
15842 self.expected("procedure or function name", self.peek_token())?
15843 }
15844 }
15845 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
15846 Some(unexpected_keyword) => return Err(ParserError::ParserError(
15847 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
15848 )),
15849 }
15850 }
15851 } else {
15852 None
15853 };
15854
15855 Ok((privileges, objects))
15856 }
15857
15858 fn parse_grant_procedure_or_function(
15859 &mut self,
15860 name: &ObjectName,
15861 kw: &Option<Keyword>,
15862 ) -> Result<Option<GrantObjects>, ParserError> {
15863 let arg_types = if self.consume_token(&Token::LParen) {
15864 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
15865 self.expect_token(&Token::RParen)?;
15866 list
15867 } else {
15868 vec![]
15869 };
15870 match kw {
15871 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
15872 name: name.clone(),
15873 arg_types,
15874 })),
15875 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
15876 name: name.clone(),
15877 arg_types,
15878 })),
15879 _ => self.expected("procedure or function keywords", self.peek_token())?,
15880 }
15881 }
15882
15883 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
15884 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
15885 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
15886 if columns.is_empty() {
15887 Ok(None)
15888 } else {
15889 Ok(Some(columns))
15890 }
15891 }
15892
15893 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
15895 Ok(Action::ImportedPrivileges)
15896 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
15897 Ok(Action::AddSearchOptimization)
15898 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
15899 Ok(Action::AttachListing)
15900 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15901 Ok(Action::AttachPolicy)
15902 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15903 Ok(Action::BindServiceEndpoint)
15904 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15905 let role = self.parse_object_name(false)?;
15906 Ok(Action::DatabaseRole { role })
15907 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15908 Ok(Action::EvolveSchema)
15909 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15910 Ok(Action::ImportShare)
15911 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15912 Ok(Action::ManageVersions)
15913 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15914 Ok(Action::ManageReleases)
15915 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15916 Ok(Action::OverrideShareRestrictions)
15917 } else if self.parse_keywords(&[
15918 Keyword::PURCHASE,
15919 Keyword::DATA,
15920 Keyword::EXCHANGE,
15921 Keyword::LISTING,
15922 ]) {
15923 Ok(Action::PurchaseDataExchangeListing)
15924 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15925 Ok(Action::ResolveAll)
15926 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15927 Ok(Action::ReadSession)
15928
15929 } else if self.parse_keyword(Keyword::APPLY) {
15931 let apply_type = self.parse_action_apply_type()?;
15932 Ok(Action::Apply { apply_type })
15933 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15934 Ok(Action::ApplyBudget)
15935 } else if self.parse_keyword(Keyword::AUDIT) {
15936 Ok(Action::Audit)
15937 } else if self.parse_keyword(Keyword::CONNECT) {
15938 Ok(Action::Connect)
15939 } else if self.parse_keyword(Keyword::CREATE) {
15940 let obj_type = self.maybe_parse_action_create_object_type();
15941 Ok(Action::Create { obj_type })
15942 } else if self.parse_keyword(Keyword::DELETE) {
15943 Ok(Action::Delete)
15944 } else if self.parse_keyword(Keyword::EXEC) {
15945 let obj_type = self.maybe_parse_action_execute_obj_type();
15946 Ok(Action::Exec { obj_type })
15947 } else if self.parse_keyword(Keyword::EXECUTE) {
15948 let obj_type = self.maybe_parse_action_execute_obj_type();
15949 Ok(Action::Execute { obj_type })
15950 } else if self.parse_keyword(Keyword::FAILOVER) {
15951 Ok(Action::Failover)
15952 } else if self.parse_keyword(Keyword::INSERT) {
15953 Ok(Action::Insert {
15954 columns: parse_columns(self)?,
15955 })
15956 } else if self.parse_keyword(Keyword::MANAGE) {
15957 let manage_type = self.parse_action_manage_type()?;
15958 Ok(Action::Manage { manage_type })
15959 } else if self.parse_keyword(Keyword::MODIFY) {
15960 let modify_type = self.parse_action_modify_type();
15961 Ok(Action::Modify { modify_type })
15962 } else if self.parse_keyword(Keyword::MONITOR) {
15963 let monitor_type = self.parse_action_monitor_type();
15964 Ok(Action::Monitor { monitor_type })
15965 } else if self.parse_keyword(Keyword::OPERATE) {
15966 Ok(Action::Operate)
15967 } else if self.parse_keyword(Keyword::REFERENCES) {
15968 Ok(Action::References {
15969 columns: parse_columns(self)?,
15970 })
15971 } else if self.parse_keyword(Keyword::READ) {
15972 Ok(Action::Read)
15973 } else if self.parse_keyword(Keyword::REPLICATE) {
15974 Ok(Action::Replicate)
15975 } else if self.parse_keyword(Keyword::ROLE) {
15976 let role = self.parse_object_name(false)?;
15977 Ok(Action::Role { role })
15978 } else if self.parse_keyword(Keyword::SELECT) {
15979 Ok(Action::Select {
15980 columns: parse_columns(self)?,
15981 })
15982 } else if self.parse_keyword(Keyword::TEMPORARY) {
15983 Ok(Action::Temporary)
15984 } else if self.parse_keyword(Keyword::TRIGGER) {
15985 Ok(Action::Trigger)
15986 } else if self.parse_keyword(Keyword::TRUNCATE) {
15987 Ok(Action::Truncate)
15988 } else if self.parse_keyword(Keyword::UPDATE) {
15989 Ok(Action::Update {
15990 columns: parse_columns(self)?,
15991 })
15992 } else if self.parse_keyword(Keyword::USAGE) {
15993 Ok(Action::Usage)
15994 } else if self.parse_keyword(Keyword::OWNERSHIP) {
15995 Ok(Action::Ownership)
15996 } else if self.parse_keyword(Keyword::DROP) {
15997 Ok(Action::Drop)
15998 } else {
15999 self.expected("a privilege keyword", self.peek_token())?
16000 }
16001 }
16002
16003 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
16004 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
16006 Some(ActionCreateObjectType::ApplicationPackage)
16007 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
16008 Some(ActionCreateObjectType::ComputePool)
16009 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
16010 Some(ActionCreateObjectType::DataExchangeListing)
16011 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
16012 Some(ActionCreateObjectType::ExternalVolume)
16013 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
16014 Some(ActionCreateObjectType::FailoverGroup)
16015 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
16016 Some(ActionCreateObjectType::NetworkPolicy)
16017 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
16018 Some(ActionCreateObjectType::OrganiationListing)
16019 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
16020 Some(ActionCreateObjectType::ReplicationGroup)
16021 }
16022 else if self.parse_keyword(Keyword::ACCOUNT) {
16024 Some(ActionCreateObjectType::Account)
16025 } else if self.parse_keyword(Keyword::APPLICATION) {
16026 Some(ActionCreateObjectType::Application)
16027 } else if self.parse_keyword(Keyword::DATABASE) {
16028 Some(ActionCreateObjectType::Database)
16029 } else if self.parse_keyword(Keyword::INTEGRATION) {
16030 Some(ActionCreateObjectType::Integration)
16031 } else if self.parse_keyword(Keyword::ROLE) {
16032 Some(ActionCreateObjectType::Role)
16033 } else if self.parse_keyword(Keyword::SCHEMA) {
16034 Some(ActionCreateObjectType::Schema)
16035 } else if self.parse_keyword(Keyword::SHARE) {
16036 Some(ActionCreateObjectType::Share)
16037 } else if self.parse_keyword(Keyword::USER) {
16038 Some(ActionCreateObjectType::User)
16039 } else if self.parse_keyword(Keyword::WAREHOUSE) {
16040 Some(ActionCreateObjectType::Warehouse)
16041 } else {
16042 None
16043 }
16044 }
16045
16046 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
16047 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
16048 Ok(ActionApplyType::AggregationPolicy)
16049 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
16050 Ok(ActionApplyType::AuthenticationPolicy)
16051 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
16052 Ok(ActionApplyType::JoinPolicy)
16053 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
16054 Ok(ActionApplyType::MaskingPolicy)
16055 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
16056 Ok(ActionApplyType::PackagesPolicy)
16057 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
16058 Ok(ActionApplyType::PasswordPolicy)
16059 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
16060 Ok(ActionApplyType::ProjectionPolicy)
16061 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
16062 Ok(ActionApplyType::RowAccessPolicy)
16063 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
16064 Ok(ActionApplyType::SessionPolicy)
16065 } else if self.parse_keyword(Keyword::TAG) {
16066 Ok(ActionApplyType::Tag)
16067 } else {
16068 self.expected("GRANT APPLY type", self.peek_token())
16069 }
16070 }
16071
16072 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
16073 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
16074 Some(ActionExecuteObjectType::DataMetricFunction)
16075 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
16076 Some(ActionExecuteObjectType::ManagedAlert)
16077 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
16078 Some(ActionExecuteObjectType::ManagedTask)
16079 } else if self.parse_keyword(Keyword::ALERT) {
16080 Some(ActionExecuteObjectType::Alert)
16081 } else if self.parse_keyword(Keyword::TASK) {
16082 Some(ActionExecuteObjectType::Task)
16083 } else {
16084 None
16085 }
16086 }
16087
16088 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
16089 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
16090 Ok(ActionManageType::AccountSupportCases)
16091 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16092 Ok(ActionManageType::EventSharing)
16093 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16094 Ok(ActionManageType::ListingAutoFulfillment)
16095 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16096 Ok(ActionManageType::OrganizationSupportCases)
16097 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16098 Ok(ActionManageType::UserSupportCases)
16099 } else if self.parse_keyword(Keyword::GRANTS) {
16100 Ok(ActionManageType::Grants)
16101 } else if self.parse_keyword(Keyword::WAREHOUSES) {
16102 Ok(ActionManageType::Warehouses)
16103 } else {
16104 self.expected("GRANT MANAGE type", self.peek_token())
16105 }
16106 }
16107
16108 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16109 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16110 Some(ActionModifyType::LogLevel)
16111 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16112 Some(ActionModifyType::TraceLevel)
16113 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16114 Some(ActionModifyType::SessionLogLevel)
16115 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16116 Some(ActionModifyType::SessionTraceLevel)
16117 } else {
16118 None
16119 }
16120 }
16121
16122 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16123 if self.parse_keyword(Keyword::EXECUTION) {
16124 Some(ActionMonitorType::Execution)
16125 } else if self.parse_keyword(Keyword::SECURITY) {
16126 Some(ActionMonitorType::Security)
16127 } else if self.parse_keyword(Keyword::USAGE) {
16128 Some(ActionMonitorType::Usage)
16129 } else {
16130 None
16131 }
16132 }
16133
16134 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16135 let mut name = self.parse_object_name(false)?;
16136 if self.dialect.supports_user_host_grantee()
16137 && name.0.len() == 1
16138 && name.0[0].as_ident().is_some()
16139 && self.consume_token(&Token::AtSign)
16140 {
16141 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16142 let host = self.parse_identifier()?;
16143 Ok(GranteeName::UserHost { user, host })
16144 } else {
16145 Ok(GranteeName::ObjectName(name))
16146 }
16147 }
16148
16149 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16151 self.expect_keyword(Keyword::DENY)?;
16152
16153 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16154 let objects = match objects {
16155 Some(o) => o,
16156 None => {
16157 return parser_err!(
16158 "DENY statements must specify an object",
16159 self.peek_token().span.start
16160 )
16161 }
16162 };
16163
16164 self.expect_keyword_is(Keyword::TO)?;
16165 let grantees = self.parse_grantees()?;
16166 let cascade = self.parse_cascade_option();
16167 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16168 Some(self.parse_identifier()?)
16169 } else {
16170 None
16171 };
16172
16173 Ok(Statement::Deny(DenyStatement {
16174 privileges,
16175 objects,
16176 grantees,
16177 cascade,
16178 granted_by,
16179 }))
16180 }
16181
16182 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16184 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16185
16186 self.expect_keyword_is(Keyword::FROM)?;
16187 let grantees = self.parse_grantees()?;
16188
16189 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16190 Some(self.parse_identifier()?)
16191 } else {
16192 None
16193 };
16194
16195 let cascade = self.parse_cascade_option();
16196
16197 Ok(Statement::Revoke {
16198 privileges,
16199 objects,
16200 grantees,
16201 granted_by,
16202 cascade,
16203 })
16204 }
16205
16206 pub fn parse_replace(
16208 &mut self,
16209 replace_token: TokenWithSpan,
16210 ) -> Result<Statement, ParserError> {
16211 if !dialect_of!(self is MySqlDialect | GenericDialect) {
16212 return parser_err!(
16213 "Unsupported statement REPLACE",
16214 self.peek_token().span.start
16215 );
16216 }
16217
16218 let mut insert = self.parse_insert(replace_token)?;
16219 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16220 *replace_into = true;
16221 }
16222
16223 Ok(insert)
16224 }
16225
16226 fn parse_insert_setexpr_boxed(
16230 &mut self,
16231 insert_token: TokenWithSpan,
16232 ) -> Result<Box<SetExpr>, ParserError> {
16233 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16234 }
16235
16236 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16238 let or = self.parse_conflict_clause();
16239 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16240 None
16241 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16242 Some(MysqlInsertPriority::LowPriority)
16243 } else if self.parse_keyword(Keyword::DELAYED) {
16244 Some(MysqlInsertPriority::Delayed)
16245 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16246 Some(MysqlInsertPriority::HighPriority)
16247 } else {
16248 None
16249 };
16250
16251 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16252 && self.parse_keyword(Keyword::IGNORE);
16253
16254 let replace_into = false;
16255
16256 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16257 let into = self.parse_keyword(Keyword::INTO);
16258
16259 let local = self.parse_keyword(Keyword::LOCAL);
16260
16261 if self.parse_keyword(Keyword::DIRECTORY) {
16262 let path = self.parse_literal_string()?;
16263 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16264 Some(self.parse_file_format()?)
16265 } else {
16266 None
16267 };
16268 let source = self.parse_query()?;
16269 Ok(Statement::Directory {
16270 local,
16271 path,
16272 overwrite,
16273 file_format,
16274 source,
16275 })
16276 } else {
16277 let table = self.parse_keyword(Keyword::TABLE);
16279 let table_object = self.parse_table_object()?;
16280
16281 let table_alias =
16282 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16283 Some(self.parse_identifier()?)
16284 } else {
16285 None
16286 };
16287
16288 let is_mysql = dialect_of!(self is MySqlDialect);
16289
16290 let (columns, partitioned, after_columns, source, assignments) = if self
16291 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16292 {
16293 (vec![], None, vec![], None, vec![])
16294 } else {
16295 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16296 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16297
16298 let partitioned = self.parse_insert_partition()?;
16299 let after_columns = if dialect_of!(self is HiveDialect) {
16301 self.parse_parenthesized_column_list(Optional, false)?
16302 } else {
16303 vec![]
16304 };
16305 (columns, partitioned, after_columns)
16306 } else {
16307 Default::default()
16308 };
16309
16310 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16311 || self.peek_keyword(Keyword::SETTINGS)
16312 {
16313 (None, vec![])
16314 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16315 (None, self.parse_comma_separated(Parser::parse_assignment)?)
16316 } else {
16317 (Some(self.parse_query()?), vec![])
16318 };
16319
16320 (columns, partitioned, after_columns, source, assignments)
16321 };
16322
16323 let (format_clause, settings) = if self.dialect.supports_insert_format() {
16324 let settings = self.parse_settings()?;
16327
16328 let format = if self.parse_keyword(Keyword::FORMAT) {
16329 Some(self.parse_input_format_clause()?)
16330 } else {
16331 None
16332 };
16333
16334 (format, settings)
16335 } else {
16336 Default::default()
16337 };
16338
16339 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16340 && self.parse_keyword(Keyword::AS)
16341 {
16342 let row_alias = self.parse_object_name(false)?;
16343 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16344 Some(InsertAliases {
16345 row_alias,
16346 col_aliases,
16347 })
16348 } else {
16349 None
16350 };
16351
16352 let on = if self.parse_keyword(Keyword::ON) {
16353 if self.parse_keyword(Keyword::CONFLICT) {
16354 let conflict_target =
16355 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16356 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16357 } else if self.peek_token() == Token::LParen {
16358 Some(ConflictTarget::Columns(
16359 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16360 ))
16361 } else {
16362 None
16363 };
16364
16365 self.expect_keyword_is(Keyword::DO)?;
16366 let action = if self.parse_keyword(Keyword::NOTHING) {
16367 OnConflictAction::DoNothing
16368 } else {
16369 self.expect_keyword_is(Keyword::UPDATE)?;
16370 self.expect_keyword_is(Keyword::SET)?;
16371 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16372 let selection = if self.parse_keyword(Keyword::WHERE) {
16373 Some(self.parse_expr()?)
16374 } else {
16375 None
16376 };
16377 OnConflictAction::DoUpdate(DoUpdate {
16378 assignments,
16379 selection,
16380 })
16381 };
16382
16383 Some(OnInsert::OnConflict(OnConflict {
16384 conflict_target,
16385 action,
16386 }))
16387 } else {
16388 self.expect_keyword_is(Keyword::DUPLICATE)?;
16389 self.expect_keyword_is(Keyword::KEY)?;
16390 self.expect_keyword_is(Keyword::UPDATE)?;
16391 let l = self.parse_comma_separated(Parser::parse_assignment)?;
16392
16393 Some(OnInsert::DuplicateKeyUpdate(l))
16394 }
16395 } else {
16396 None
16397 };
16398
16399 let returning = if self.parse_keyword(Keyword::RETURNING) {
16400 Some(self.parse_comma_separated(Parser::parse_select_item)?)
16401 } else {
16402 None
16403 };
16404
16405 Ok(Statement::Insert(Insert {
16406 insert_token: insert_token.into(),
16407 or,
16408 table: table_object,
16409 table_alias,
16410 ignore,
16411 into,
16412 overwrite,
16413 partitioned,
16414 columns,
16415 after_columns,
16416 source,
16417 assignments,
16418 has_table_keyword: table,
16419 on,
16420 returning,
16421 replace_into,
16422 priority,
16423 insert_alias,
16424 settings,
16425 format_clause,
16426 }))
16427 }
16428 }
16429
16430 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16434 let ident = self.parse_identifier()?;
16435 let values = self
16436 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16437 .unwrap_or_default();
16438
16439 Ok(InputFormatClause { ident, values })
16440 }
16441
16442 fn peek_subquery_start(&mut self) -> bool {
16445 let [maybe_lparen, maybe_select] = self.peek_tokens();
16446 Token::LParen == maybe_lparen
16447 && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16448 }
16449
16450 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16451 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16452 Some(SqliteOnConflict::Replace)
16453 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16454 Some(SqliteOnConflict::Rollback)
16455 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16456 Some(SqliteOnConflict::Abort)
16457 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16458 Some(SqliteOnConflict::Fail)
16459 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16460 Some(SqliteOnConflict::Ignore)
16461 } else if self.parse_keyword(Keyword::REPLACE) {
16462 Some(SqliteOnConflict::Replace)
16463 } else {
16464 None
16465 }
16466 }
16467
16468 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16469 if self.parse_keyword(Keyword::PARTITION) {
16470 self.expect_token(&Token::LParen)?;
16471 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16472 self.expect_token(&Token::RParen)?;
16473 Ok(partition_cols)
16474 } else {
16475 Ok(None)
16476 }
16477 }
16478
16479 pub fn parse_load_data_table_format(
16480 &mut self,
16481 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16482 if self.parse_keyword(Keyword::INPUTFORMAT) {
16483 let input_format = self.parse_expr()?;
16484 self.expect_keyword_is(Keyword::SERDE)?;
16485 let serde = self.parse_expr()?;
16486 Ok(Some(HiveLoadDataFormat {
16487 input_format,
16488 serde,
16489 }))
16490 } else {
16491 Ok(None)
16492 }
16493 }
16494
16495 fn parse_update_setexpr_boxed(
16499 &mut self,
16500 update_token: TokenWithSpan,
16501 ) -> Result<Box<SetExpr>, ParserError> {
16502 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16503 }
16504
16505 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16506 let or = self.parse_conflict_clause();
16507 let table = self.parse_table_and_joins()?;
16508 let from_before_set = if self.parse_keyword(Keyword::FROM) {
16509 Some(UpdateTableFromKind::BeforeSet(
16510 self.parse_table_with_joins()?,
16511 ))
16512 } else {
16513 None
16514 };
16515 self.expect_keyword(Keyword::SET)?;
16516 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16517 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16518 Some(UpdateTableFromKind::AfterSet(
16519 self.parse_table_with_joins()?,
16520 ))
16521 } else {
16522 from_before_set
16523 };
16524 let selection = if self.parse_keyword(Keyword::WHERE) {
16525 Some(self.parse_expr()?)
16526 } else {
16527 None
16528 };
16529 let returning = if self.parse_keyword(Keyword::RETURNING) {
16530 Some(self.parse_comma_separated(Parser::parse_select_item)?)
16531 } else {
16532 None
16533 };
16534 let limit = if self.parse_keyword(Keyword::LIMIT) {
16535 Some(self.parse_expr()?)
16536 } else {
16537 None
16538 };
16539 Ok(Update {
16540 update_token: update_token.into(),
16541 table,
16542 assignments,
16543 from,
16544 selection,
16545 returning,
16546 or,
16547 limit,
16548 }
16549 .into())
16550 }
16551
16552 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16554 let target = self.parse_assignment_target()?;
16555 self.expect_token(&Token::Eq)?;
16556 let value = self.parse_expr()?;
16557 Ok(Assignment { target, value })
16558 }
16559
16560 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16562 if self.consume_token(&Token::LParen) {
16563 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16564 self.expect_token(&Token::RParen)?;
16565 Ok(AssignmentTarget::Tuple(columns))
16566 } else {
16567 let column = self.parse_object_name(false)?;
16568 Ok(AssignmentTarget::ColumnName(column))
16569 }
16570 }
16571
16572 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16573 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16574 self.maybe_parse(|p| {
16575 let name = p.parse_expr()?;
16576 let operator = p.parse_function_named_arg_operator()?;
16577 let arg = p.parse_wildcard_expr()?.into();
16578 Ok(FunctionArg::ExprNamed {
16579 name,
16580 arg,
16581 operator,
16582 })
16583 })?
16584 } else {
16585 self.maybe_parse(|p| {
16586 let name = p.parse_identifier()?;
16587 let operator = p.parse_function_named_arg_operator()?;
16588 let arg = p.parse_wildcard_expr()?.into();
16589 Ok(FunctionArg::Named {
16590 name,
16591 arg,
16592 operator,
16593 })
16594 })?
16595 };
16596 if let Some(arg) = arg {
16597 return Ok(arg);
16598 }
16599 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16600 }
16601
16602 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16603 if self.parse_keyword(Keyword::VALUE) {
16604 return Ok(FunctionArgOperator::Value);
16605 }
16606 let tok = self.next_token();
16607 match tok.token {
16608 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16609 Ok(FunctionArgOperator::RightArrow)
16610 }
16611 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16612 Ok(FunctionArgOperator::Equals)
16613 }
16614 Token::Assignment
16615 if self
16616 .dialect
16617 .supports_named_fn_args_with_assignment_operator() =>
16618 {
16619 Ok(FunctionArgOperator::Assignment)
16620 }
16621 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16622 Ok(FunctionArgOperator::Colon)
16623 }
16624 _ => {
16625 self.prev_token();
16626 self.expected("argument operator", tok)
16627 }
16628 }
16629 }
16630
16631 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16632 if self.consume_token(&Token::RParen) {
16633 Ok(vec![])
16634 } else {
16635 let args = self.parse_comma_separated(Parser::parse_function_args)?;
16636 self.expect_token(&Token::RParen)?;
16637 Ok(args)
16638 }
16639 }
16640
16641 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16642 if self.consume_token(&Token::RParen) {
16643 return Ok(TableFunctionArgs {
16644 args: vec![],
16645 settings: None,
16646 });
16647 }
16648 let mut args = vec![];
16649 let settings = loop {
16650 if let Some(settings) = self.parse_settings()? {
16651 break Some(settings);
16652 }
16653 args.push(self.parse_function_args()?);
16654 if self.is_parse_comma_separated_end() {
16655 break None;
16656 }
16657 };
16658 self.expect_token(&Token::RParen)?;
16659 Ok(TableFunctionArgs { args, settings })
16660 }
16661
16662 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16671 let mut clauses = vec![];
16672
16673 if let Some(null_clause) = self.parse_json_null_clause() {
16676 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16677 }
16678
16679 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16680 clauses.push(FunctionArgumentClause::JsonReturningClause(
16681 json_returning_clause,
16682 ));
16683 }
16684
16685 if self.consume_token(&Token::RParen) {
16686 return Ok(FunctionArgumentList {
16687 duplicate_treatment: None,
16688 args: vec![],
16689 clauses,
16690 });
16691 }
16692
16693 let duplicate_treatment = self.parse_duplicate_treatment()?;
16694 let args = self.parse_comma_separated(Parser::parse_function_args)?;
16695
16696 if self.dialect.supports_window_function_null_treatment_arg() {
16697 if let Some(null_treatment) = self.parse_null_treatment()? {
16698 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16699 }
16700 }
16701
16702 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16703 clauses.push(FunctionArgumentClause::OrderBy(
16704 self.parse_comma_separated(Parser::parse_order_by_expr)?,
16705 ));
16706 }
16707
16708 if self.parse_keyword(Keyword::LIMIT) {
16709 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16710 }
16711
16712 if dialect_of!(self is GenericDialect | BigQueryDialect)
16713 && self.parse_keyword(Keyword::HAVING)
16714 {
16715 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16716 Keyword::MIN => HavingBoundKind::Min,
16717 Keyword::MAX => HavingBoundKind::Max,
16718 unexpected_keyword => return Err(ParserError::ParserError(
16719 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
16720 )),
16721 };
16722 clauses.push(FunctionArgumentClause::Having(HavingBound(
16723 kind,
16724 self.parse_expr()?,
16725 )))
16726 }
16727
16728 if dialect_of!(self is GenericDialect | MySqlDialect)
16729 && self.parse_keyword(Keyword::SEPARATOR)
16730 {
16731 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16732 }
16733
16734 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16735 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16736 }
16737
16738 if let Some(null_clause) = self.parse_json_null_clause() {
16739 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16740 }
16741
16742 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16743 clauses.push(FunctionArgumentClause::JsonReturningClause(
16744 json_returning_clause,
16745 ));
16746 }
16747
16748 self.expect_token(&Token::RParen)?;
16749 Ok(FunctionArgumentList {
16750 duplicate_treatment,
16751 args,
16752 clauses,
16753 })
16754 }
16755
16756 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16757 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16758 Some(JsonNullClause::AbsentOnNull)
16759 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16760 Some(JsonNullClause::NullOnNull)
16761 } else {
16762 None
16763 }
16764 }
16765
16766 fn maybe_parse_json_returning_clause(
16767 &mut self,
16768 ) -> Result<Option<JsonReturningClause>, ParserError> {
16769 if self.parse_keyword(Keyword::RETURNING) {
16770 let data_type = self.parse_data_type()?;
16771 Ok(Some(JsonReturningClause { data_type }))
16772 } else {
16773 Ok(None)
16774 }
16775 }
16776
16777 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16778 let loc = self.peek_token().span.start;
16779 match (
16780 self.parse_keyword(Keyword::ALL),
16781 self.parse_keyword(Keyword::DISTINCT),
16782 ) {
16783 (true, false) => Ok(Some(DuplicateTreatment::All)),
16784 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16785 (false, false) => Ok(None),
16786 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16787 }
16788 }
16789
16790 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16792 let prefix = self
16793 .parse_one_of_keywords(
16794 self.dialect
16795 .get_reserved_keywords_for_select_item_operator(),
16796 )
16797 .map(|keyword| Ident::new(format!("{keyword:?}")));
16798
16799 match self.parse_wildcard_expr()? {
16800 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16801 SelectItemQualifiedWildcardKind::ObjectName(prefix),
16802 self.parse_wildcard_additional_options(token.0)?,
16803 )),
16804 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
16805 self.parse_wildcard_additional_options(token.0)?,
16806 )),
16807 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
16808 parser_err!(
16809 format!("Expected an expression, found: {}", v),
16810 self.peek_token().span.start
16811 )
16812 }
16813 Expr::BinaryOp {
16814 left,
16815 op: BinaryOperator::Eq,
16816 right,
16817 } if self.dialect.supports_eq_alias_assignment()
16818 && matches!(left.as_ref(), Expr::Identifier(_)) =>
16819 {
16820 let Expr::Identifier(alias) = *left else {
16821 return parser_err!(
16822 "BUG: expected identifier expression as alias",
16823 self.peek_token().span.start
16824 );
16825 };
16826 Ok(SelectItem::ExprWithAlias {
16827 expr: *right,
16828 alias,
16829 })
16830 }
16831 expr if self.dialect.supports_select_expr_star()
16832 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
16833 {
16834 let wildcard_token = self.get_previous_token().clone();
16835 Ok(SelectItem::QualifiedWildcard(
16836 SelectItemQualifiedWildcardKind::Expr(expr),
16837 self.parse_wildcard_additional_options(wildcard_token)?,
16838 ))
16839 }
16840 expr => self
16841 .maybe_parse_select_item_alias()
16842 .map(|alias| match alias {
16843 Some(alias) => SelectItem::ExprWithAlias {
16844 expr: maybe_prefixed_expr(expr, prefix),
16845 alias,
16846 },
16847 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
16848 }),
16849 }
16850 }
16851
16852 pub fn parse_wildcard_additional_options(
16856 &mut self,
16857 wildcard_token: TokenWithSpan,
16858 ) -> Result<WildcardAdditionalOptions, ParserError> {
16859 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16860 self.parse_optional_select_item_ilike()?
16861 } else {
16862 None
16863 };
16864 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
16865 {
16866 self.parse_optional_select_item_exclude()?
16867 } else {
16868 None
16869 };
16870 let opt_except = if self.dialect.supports_select_wildcard_except() {
16871 self.parse_optional_select_item_except()?
16872 } else {
16873 None
16874 };
16875 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
16876 {
16877 self.parse_optional_select_item_replace()?
16878 } else {
16879 None
16880 };
16881 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16882 self.parse_optional_select_item_rename()?
16883 } else {
16884 None
16885 };
16886
16887 Ok(WildcardAdditionalOptions {
16888 wildcard_token: wildcard_token.into(),
16889 opt_ilike,
16890 opt_exclude,
16891 opt_except,
16892 opt_rename,
16893 opt_replace,
16894 })
16895 }
16896
16897 pub fn parse_optional_select_item_ilike(
16901 &mut self,
16902 ) -> Result<Option<IlikeSelectItem>, ParserError> {
16903 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
16904 let next_token = self.next_token();
16905 let pattern = match next_token.token {
16906 Token::SingleQuotedString(s) => s,
16907 _ => return self.expected("ilike pattern", next_token),
16908 };
16909 Some(IlikeSelectItem { pattern })
16910 } else {
16911 None
16912 };
16913 Ok(opt_ilike)
16914 }
16915
16916 pub fn parse_optional_select_item_exclude(
16920 &mut self,
16921 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16922 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16923 if self.consume_token(&Token::LParen) {
16924 let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16925 self.expect_token(&Token::RParen)?;
16926 Some(ExcludeSelectItem::Multiple(columns))
16927 } else {
16928 let column = self.parse_identifier()?;
16929 Some(ExcludeSelectItem::Single(column))
16930 }
16931 } else {
16932 None
16933 };
16934
16935 Ok(opt_exclude)
16936 }
16937
16938 pub fn parse_optional_select_item_except(
16942 &mut self,
16943 ) -> Result<Option<ExceptSelectItem>, ParserError> {
16944 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16945 if self.peek_token().token == Token::LParen {
16946 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16947 match &idents[..] {
16948 [] => {
16949 return self.expected(
16950 "at least one column should be parsed by the expect clause",
16951 self.peek_token(),
16952 )?;
16953 }
16954 [first, idents @ ..] => Some(ExceptSelectItem {
16955 first_element: first.clone(),
16956 additional_elements: idents.to_vec(),
16957 }),
16958 }
16959 } else {
16960 let ident = self.parse_identifier()?;
16962 Some(ExceptSelectItem {
16963 first_element: ident,
16964 additional_elements: vec![],
16965 })
16966 }
16967 } else {
16968 None
16969 };
16970
16971 Ok(opt_except)
16972 }
16973
16974 pub fn parse_optional_select_item_rename(
16976 &mut self,
16977 ) -> Result<Option<RenameSelectItem>, ParserError> {
16978 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16979 if self.consume_token(&Token::LParen) {
16980 let idents =
16981 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16982 self.expect_token(&Token::RParen)?;
16983 Some(RenameSelectItem::Multiple(idents))
16984 } else {
16985 let ident = self.parse_identifier_with_alias()?;
16986 Some(RenameSelectItem::Single(ident))
16987 }
16988 } else {
16989 None
16990 };
16991
16992 Ok(opt_rename)
16993 }
16994
16995 pub fn parse_optional_select_item_replace(
16997 &mut self,
16998 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16999 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
17000 if self.consume_token(&Token::LParen) {
17001 let items = self.parse_comma_separated(|parser| {
17002 Ok(Box::new(parser.parse_replace_elements()?))
17003 })?;
17004 self.expect_token(&Token::RParen)?;
17005 Some(ReplaceSelectItem { items })
17006 } else {
17007 let tok = self.next_token();
17008 return self.expected("( after REPLACE but", tok);
17009 }
17010 } else {
17011 None
17012 };
17013
17014 Ok(opt_replace)
17015 }
17016 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
17017 let expr = self.parse_expr()?;
17018 let as_keyword = self.parse_keyword(Keyword::AS);
17019 let ident = self.parse_identifier()?;
17020 Ok(ReplaceSelectElement {
17021 expr,
17022 column_name: ident,
17023 as_keyword,
17024 })
17025 }
17026
17027 pub fn parse_asc_desc(&mut self) -> Option<bool> {
17030 if self.parse_keyword(Keyword::ASC) {
17031 Some(true)
17032 } else if self.parse_keyword(Keyword::DESC) {
17033 Some(false)
17034 } else {
17035 None
17036 }
17037 }
17038
17039 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
17041 self.parse_order_by_expr_inner(false)
17042 .map(|(order_by, _)| order_by)
17043 }
17044
17045 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
17047 self.parse_order_by_expr_inner(true)
17048 .map(|(column, operator_class)| IndexColumn {
17049 column,
17050 operator_class,
17051 })
17052 }
17053
17054 fn parse_order_by_expr_inner(
17055 &mut self,
17056 with_operator_class: bool,
17057 ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
17058 let expr = self.parse_expr()?;
17059
17060 let operator_class: Option<Ident> = if with_operator_class {
17061 if self
17064 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
17065 .is_some()
17066 {
17067 None
17068 } else {
17069 self.maybe_parse(|parser| parser.parse_identifier())?
17070 }
17071 } else {
17072 None
17073 };
17074
17075 let options = self.parse_order_by_options()?;
17076
17077 let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
17078 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
17079 {
17080 Some(self.parse_with_fill()?)
17081 } else {
17082 None
17083 };
17084
17085 Ok((
17086 OrderByExpr {
17087 expr,
17088 options,
17089 with_fill,
17090 },
17091 operator_class,
17092 ))
17093 }
17094
17095 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17096 let asc = self.parse_asc_desc();
17097
17098 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17099 Some(true)
17100 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17101 Some(false)
17102 } else {
17103 None
17104 };
17105
17106 Ok(OrderByOptions { asc, nulls_first })
17107 }
17108
17109 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17112 let from = if self.parse_keyword(Keyword::FROM) {
17113 Some(self.parse_expr()?)
17114 } else {
17115 None
17116 };
17117
17118 let to = if self.parse_keyword(Keyword::TO) {
17119 Some(self.parse_expr()?)
17120 } else {
17121 None
17122 };
17123
17124 let step = if self.parse_keyword(Keyword::STEP) {
17125 Some(self.parse_expr()?)
17126 } else {
17127 None
17128 };
17129
17130 Ok(WithFill { from, to, step })
17131 }
17132
17133 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17136 if !self.parse_keyword(Keyword::INTERPOLATE) {
17137 return Ok(None);
17138 }
17139
17140 if self.consume_token(&Token::LParen) {
17141 let interpolations =
17142 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17143 self.expect_token(&Token::RParen)?;
17144 return Ok(Some(Interpolate {
17146 exprs: Some(interpolations),
17147 }));
17148 }
17149
17150 Ok(Some(Interpolate { exprs: None }))
17152 }
17153
17154 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17156 let column = self.parse_identifier()?;
17157 let expr = if self.parse_keyword(Keyword::AS) {
17158 Some(self.parse_expr()?)
17159 } else {
17160 None
17161 };
17162 Ok(InterpolateExpr { column, expr })
17163 }
17164
17165 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17168 let quantity = if self.consume_token(&Token::LParen) {
17169 let quantity = self.parse_expr()?;
17170 self.expect_token(&Token::RParen)?;
17171 Some(TopQuantity::Expr(quantity))
17172 } else {
17173 let next_token = self.next_token();
17174 let quantity = match next_token.token {
17175 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17176 _ => self.expected("literal int", next_token)?,
17177 };
17178 Some(TopQuantity::Constant(quantity))
17179 };
17180
17181 let percent = self.parse_keyword(Keyword::PERCENT);
17182
17183 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17184
17185 Ok(Top {
17186 with_ties,
17187 percent,
17188 quantity,
17189 })
17190 }
17191
17192 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17194 if self.parse_keyword(Keyword::ALL) {
17195 Ok(None)
17196 } else {
17197 Ok(Some(self.parse_expr()?))
17198 }
17199 }
17200
17201 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17203 let value = self.parse_expr()?;
17204 let rows = if self.parse_keyword(Keyword::ROW) {
17205 OffsetRows::Row
17206 } else if self.parse_keyword(Keyword::ROWS) {
17207 OffsetRows::Rows
17208 } else {
17209 OffsetRows::None
17210 };
17211 Ok(Offset { value, rows })
17212 }
17213
17214 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17216 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17217
17218 let (quantity, percent) = if self
17219 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17220 .is_some()
17221 {
17222 (None, false)
17223 } else {
17224 let quantity = Expr::Value(self.parse_value()?);
17225 let percent = self.parse_keyword(Keyword::PERCENT);
17226 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17227 (Some(quantity), percent)
17228 };
17229
17230 let with_ties = if self.parse_keyword(Keyword::ONLY) {
17231 false
17232 } else {
17233 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17234 };
17235
17236 Ok(Fetch {
17237 with_ties,
17238 percent,
17239 quantity,
17240 })
17241 }
17242
17243 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17245 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17246 Keyword::UPDATE => LockType::Update,
17247 Keyword::SHARE => LockType::Share,
17248 unexpected_keyword => return Err(ParserError::ParserError(
17249 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17250 )),
17251 };
17252 let of = if self.parse_keyword(Keyword::OF) {
17253 Some(self.parse_object_name(false)?)
17254 } else {
17255 None
17256 };
17257 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17258 Some(NonBlock::Nowait)
17259 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17260 Some(NonBlock::SkipLocked)
17261 } else {
17262 None
17263 };
17264 Ok(LockClause {
17265 lock_type,
17266 of,
17267 nonblock,
17268 })
17269 }
17270
17271 pub fn parse_values(
17272 &mut self,
17273 allow_empty: bool,
17274 value_keyword: bool,
17275 ) -> Result<Values, ParserError> {
17276 let mut explicit_row = false;
17277
17278 let rows = self.parse_comma_separated(|parser| {
17279 if parser.parse_keyword(Keyword::ROW) {
17280 explicit_row = true;
17281 }
17282
17283 parser.expect_token(&Token::LParen)?;
17284 if allow_empty && parser.peek_token().token == Token::RParen {
17285 parser.next_token();
17286 Ok(vec![])
17287 } else {
17288 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17289 parser.expect_token(&Token::RParen)?;
17290 Ok(exprs)
17291 }
17292 })?;
17293 Ok(Values {
17294 explicit_row,
17295 rows,
17296 value_keyword,
17297 })
17298 }
17299
17300 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17301 self.expect_keyword_is(Keyword::TRANSACTION)?;
17302 Ok(Statement::StartTransaction {
17303 modes: self.parse_transaction_modes()?,
17304 begin: false,
17305 transaction: Some(BeginTransactionKind::Transaction),
17306 modifier: None,
17307 statements: vec![],
17308 exception: None,
17309 has_end_keyword: false,
17310 })
17311 }
17312
17313 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17314 let modifier = if !self.dialect.supports_start_transaction_modifier() {
17315 None
17316 } else if self.parse_keyword(Keyword::DEFERRED) {
17317 Some(TransactionModifier::Deferred)
17318 } else if self.parse_keyword(Keyword::IMMEDIATE) {
17319 Some(TransactionModifier::Immediate)
17320 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17321 Some(TransactionModifier::Exclusive)
17322 } else if self.parse_keyword(Keyword::TRY) {
17323 Some(TransactionModifier::Try)
17324 } else if self.parse_keyword(Keyword::CATCH) {
17325 Some(TransactionModifier::Catch)
17326 } else {
17327 None
17328 };
17329 let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17330 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17331 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17332 _ => None,
17333 };
17334 Ok(Statement::StartTransaction {
17335 modes: self.parse_transaction_modes()?,
17336 begin: true,
17337 transaction,
17338 modifier,
17339 statements: vec![],
17340 exception: None,
17341 has_end_keyword: false,
17342 })
17343 }
17344
17345 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17346 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17347
17348 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17349 let mut when = Vec::new();
17350
17351 while !self.peek_keyword(Keyword::END) {
17353 self.expect_keyword(Keyword::WHEN)?;
17354
17355 let mut idents = Vec::new();
17359
17360 while !self.parse_keyword(Keyword::THEN) {
17361 let ident = self.parse_identifier()?;
17362 idents.push(ident);
17363
17364 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17365 }
17366
17367 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17368
17369 when.push(ExceptionWhen { idents, statements });
17370 }
17371
17372 Some(when)
17373 } else {
17374 None
17375 };
17376
17377 self.expect_keyword(Keyword::END)?;
17378
17379 Ok(Statement::StartTransaction {
17380 begin: true,
17381 statements,
17382 exception,
17383 has_end_keyword: true,
17384 transaction: None,
17385 modifier: None,
17386 modes: Default::default(),
17387 })
17388 }
17389
17390 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17391 let modifier = if !self.dialect.supports_end_transaction_modifier() {
17392 None
17393 } else if self.parse_keyword(Keyword::TRY) {
17394 Some(TransactionModifier::Try)
17395 } else if self.parse_keyword(Keyword::CATCH) {
17396 Some(TransactionModifier::Catch)
17397 } else {
17398 None
17399 };
17400 Ok(Statement::Commit {
17401 chain: self.parse_commit_rollback_chain()?,
17402 end: true,
17403 modifier,
17404 })
17405 }
17406
17407 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17408 let mut modes = vec![];
17409 let mut required = false;
17410 loop {
17411 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17412 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17413 TransactionIsolationLevel::ReadUncommitted
17414 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17415 TransactionIsolationLevel::ReadCommitted
17416 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17417 TransactionIsolationLevel::RepeatableRead
17418 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17419 TransactionIsolationLevel::Serializable
17420 } else if self.parse_keyword(Keyword::SNAPSHOT) {
17421 TransactionIsolationLevel::Snapshot
17422 } else {
17423 self.expected("isolation level", self.peek_token())?
17424 };
17425 TransactionMode::IsolationLevel(iso_level)
17426 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17427 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17428 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17429 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17430 } else if required {
17431 self.expected("transaction mode", self.peek_token())?
17432 } else {
17433 break;
17434 };
17435 modes.push(mode);
17436 required = self.consume_token(&Token::Comma);
17441 }
17442 Ok(modes)
17443 }
17444
17445 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17446 Ok(Statement::Commit {
17447 chain: self.parse_commit_rollback_chain()?,
17448 end: false,
17449 modifier: None,
17450 })
17451 }
17452
17453 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17454 let chain = self.parse_commit_rollback_chain()?;
17455 let savepoint = self.parse_rollback_savepoint()?;
17456
17457 Ok(Statement::Rollback { chain, savepoint })
17458 }
17459
17460 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17461 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17462 if self.parse_keyword(Keyword::AND) {
17463 let chain = !self.parse_keyword(Keyword::NO);
17464 self.expect_keyword_is(Keyword::CHAIN)?;
17465 Ok(chain)
17466 } else {
17467 Ok(false)
17468 }
17469 }
17470
17471 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17472 if self.parse_keyword(Keyword::TO) {
17473 let _ = self.parse_keyword(Keyword::SAVEPOINT);
17474 let savepoint = self.parse_identifier()?;
17475
17476 Ok(Some(savepoint))
17477 } else {
17478 Ok(None)
17479 }
17480 }
17481
17482 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17484 self.expect_token(&Token::LParen)?;
17485 let message = Box::new(self.parse_expr()?);
17486 self.expect_token(&Token::Comma)?;
17487 let severity = Box::new(self.parse_expr()?);
17488 self.expect_token(&Token::Comma)?;
17489 let state = Box::new(self.parse_expr()?);
17490 let arguments = if self.consume_token(&Token::Comma) {
17491 self.parse_comma_separated(Parser::parse_expr)?
17492 } else {
17493 vec![]
17494 };
17495 self.expect_token(&Token::RParen)?;
17496 let options = if self.parse_keyword(Keyword::WITH) {
17497 self.parse_comma_separated(Parser::parse_raiserror_option)?
17498 } else {
17499 vec![]
17500 };
17501 Ok(Statement::RaisError {
17502 message,
17503 severity,
17504 state,
17505 arguments,
17506 options,
17507 })
17508 }
17509
17510 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17511 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17512 Keyword::LOG => Ok(RaisErrorOption::Log),
17513 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17514 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17515 _ => self.expected(
17516 "LOG, NOWAIT OR SETERROR raiserror option",
17517 self.peek_token(),
17518 ),
17519 }
17520 }
17521
17522 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17523 let prepare = self.parse_keyword(Keyword::PREPARE);
17524 let name = self.parse_identifier()?;
17525 Ok(Statement::Deallocate { name, prepare })
17526 }
17527
17528 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17529 let name = if self.dialect.supports_execute_immediate()
17530 && self.parse_keyword(Keyword::IMMEDIATE)
17531 {
17532 None
17533 } else {
17534 let name = self.parse_object_name(false)?;
17535 Some(name)
17536 };
17537
17538 let has_parentheses = self.consume_token(&Token::LParen);
17539
17540 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17541 let end_token = match (has_parentheses, self.peek_token().token) {
17542 (true, _) => Token::RParen,
17543 (false, Token::EOF) => Token::EOF,
17544 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17545 (false, _) => Token::SemiColon,
17546 };
17547
17548 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17549
17550 if has_parentheses {
17551 self.expect_token(&Token::RParen)?;
17552 }
17553
17554 let into = if self.parse_keyword(Keyword::INTO) {
17555 self.parse_comma_separated(Self::parse_identifier)?
17556 } else {
17557 vec![]
17558 };
17559
17560 let using = if self.parse_keyword(Keyword::USING) {
17561 self.parse_comma_separated(Self::parse_expr_with_alias)?
17562 } else {
17563 vec![]
17564 };
17565
17566 let output = self.parse_keyword(Keyword::OUTPUT);
17567
17568 let default = self.parse_keyword(Keyword::DEFAULT);
17569
17570 Ok(Statement::Execute {
17571 immediate: name.is_none(),
17572 name,
17573 parameters,
17574 has_parentheses,
17575 into,
17576 using,
17577 output,
17578 default,
17579 })
17580 }
17581
17582 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17583 let name = self.parse_identifier()?;
17584
17585 let mut data_types = vec![];
17586 if self.consume_token(&Token::LParen) {
17587 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17588 self.expect_token(&Token::RParen)?;
17589 }
17590
17591 self.expect_keyword_is(Keyword::AS)?;
17592 let statement = Box::new(self.parse_statement()?);
17593 Ok(Statement::Prepare {
17594 name,
17595 data_types,
17596 statement,
17597 })
17598 }
17599
17600 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17601 self.expect_keyword(Keyword::UNLOAD)?;
17602 self.expect_token(&Token::LParen)?;
17603 let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17604 {
17605 (None, Some(self.parse_literal_string()?))
17606 } else {
17607 (Some(self.parse_query()?), None)
17608 };
17609 self.expect_token(&Token::RParen)?;
17610
17611 self.expect_keyword_is(Keyword::TO)?;
17612 let to = self.parse_identifier()?;
17613 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17614 Some(self.parse_iam_role_kind()?)
17615 } else {
17616 None
17617 };
17618 let with = self.parse_options(Keyword::WITH)?;
17619 let mut options = vec![];
17620 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17621 options.push(opt);
17622 }
17623 Ok(Statement::Unload {
17624 query,
17625 query_text,
17626 to,
17627 auth,
17628 with,
17629 options,
17630 })
17631 }
17632
17633 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17634 let temporary = self
17635 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17636 .is_some();
17637 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17638 let table = self.parse_keyword(Keyword::TABLE);
17639 let name = self.parse_object_name(false)?;
17640
17641 Ok(SelectInto {
17642 temporary,
17643 unlogged,
17644 table,
17645 name,
17646 })
17647 }
17648
17649 fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17650 match self.parse_value()?.value {
17651 v @ Value::SingleQuotedString(_) => Ok(v),
17652 v @ Value::DoubleQuotedString(_) => Ok(v),
17653 v @ Value::Number(_, _) => Ok(v),
17654 v @ Value::Placeholder(_) => Ok(v),
17655 _ => {
17656 self.prev_token();
17657 self.expected("number or string or ? placeholder", self.peek_token())
17658 }
17659 }
17660 }
17661
17662 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17664 let name = self.parse_object_name(false)?;
17665 if self.consume_token(&Token::LParen) {
17666 let value = self.parse_pragma_value()?;
17667 self.expect_token(&Token::RParen)?;
17668 Ok(Statement::Pragma {
17669 name,
17670 value: Some(value),
17671 is_eq: false,
17672 })
17673 } else if self.consume_token(&Token::Eq) {
17674 Ok(Statement::Pragma {
17675 name,
17676 value: Some(self.parse_pragma_value()?),
17677 is_eq: true,
17678 })
17679 } else {
17680 Ok(Statement::Pragma {
17681 name,
17682 value: None,
17683 is_eq: false,
17684 })
17685 }
17686 }
17687
17688 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17690 let extension_name = self.parse_identifier()?;
17691
17692 Ok(Statement::Install { extension_name })
17693 }
17694
17695 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17697 if self.dialect.supports_load_extension() {
17698 let extension_name = self.parse_identifier()?;
17699 Ok(Statement::Load { extension_name })
17700 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17701 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17702 self.expect_keyword_is(Keyword::INPATH)?;
17703 let inpath = self.parse_literal_string()?;
17704 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17705 self.expect_keyword_is(Keyword::INTO)?;
17706 self.expect_keyword_is(Keyword::TABLE)?;
17707 let table_name = self.parse_object_name(false)?;
17708 let partitioned = self.parse_insert_partition()?;
17709 let table_format = self.parse_load_data_table_format()?;
17710 Ok(Statement::LoadData {
17711 local,
17712 inpath,
17713 overwrite,
17714 table_name,
17715 partitioned,
17716 table_format,
17717 })
17718 } else {
17719 self.expected(
17720 "`DATA` or an extension name after `LOAD`",
17721 self.peek_token(),
17722 )
17723 }
17724 }
17725
17726 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17731 self.expect_keyword_is(Keyword::TABLE)?;
17732 let name = self.parse_object_name(false)?;
17733 let on_cluster = self.parse_optional_on_cluster()?;
17734
17735 let partition = if self.parse_keyword(Keyword::PARTITION) {
17736 if self.parse_keyword(Keyword::ID) {
17737 Some(Partition::Identifier(self.parse_identifier()?))
17738 } else {
17739 Some(Partition::Expr(self.parse_expr()?))
17740 }
17741 } else {
17742 None
17743 };
17744
17745 let include_final = self.parse_keyword(Keyword::FINAL);
17746 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17747 if self.parse_keyword(Keyword::BY) {
17748 Some(Deduplicate::ByExpression(self.parse_expr()?))
17749 } else {
17750 Some(Deduplicate::All)
17751 }
17752 } else {
17753 None
17754 };
17755
17756 Ok(Statement::OptimizeTable {
17757 name,
17758 on_cluster,
17759 partition,
17760 include_final,
17761 deduplicate,
17762 })
17763 }
17764
17765 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17771 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17773 let name = self.parse_object_name(false)?;
17775 let mut data_type: Option<DataType> = None;
17777 if self.parse_keywords(&[Keyword::AS]) {
17778 data_type = Some(self.parse_data_type()?)
17779 }
17780 let sequence_options = self.parse_create_sequence_options()?;
17781 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17783 if self.parse_keywords(&[Keyword::NONE]) {
17784 Some(ObjectName::from(vec![Ident::new("NONE")]))
17785 } else {
17786 Some(self.parse_object_name(false)?)
17787 }
17788 } else {
17789 None
17790 };
17791 Ok(Statement::CreateSequence {
17792 temporary,
17793 if_not_exists,
17794 name,
17795 data_type,
17796 sequence_options,
17797 owned_by,
17798 })
17799 }
17800
17801 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17802 let mut sequence_options = vec![];
17803 if self.parse_keywords(&[Keyword::INCREMENT]) {
17805 if self.parse_keywords(&[Keyword::BY]) {
17806 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17807 } else {
17808 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17809 }
17810 }
17811 if self.parse_keyword(Keyword::MINVALUE) {
17813 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17814 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17815 sequence_options.push(SequenceOptions::MinValue(None));
17816 }
17817 if self.parse_keywords(&[Keyword::MAXVALUE]) {
17819 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17820 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17821 sequence_options.push(SequenceOptions::MaxValue(None));
17822 }
17823
17824 if self.parse_keywords(&[Keyword::START]) {
17826 if self.parse_keywords(&[Keyword::WITH]) {
17827 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17828 } else {
17829 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17830 }
17831 }
17832 if self.parse_keywords(&[Keyword::CACHE]) {
17834 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17835 }
17836 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17838 sequence_options.push(SequenceOptions::Cycle(true));
17839 } else if self.parse_keywords(&[Keyword::CYCLE]) {
17840 sequence_options.push(SequenceOptions::Cycle(false));
17841 }
17842
17843 Ok(sequence_options)
17844 }
17845
17846 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17850 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17851 let name = self.parse_object_name(false)?;
17852
17853 let server_type = if self.parse_keyword(Keyword::TYPE) {
17854 Some(self.parse_identifier()?)
17855 } else {
17856 None
17857 };
17858
17859 let version = if self.parse_keyword(Keyword::VERSION) {
17860 Some(self.parse_identifier()?)
17861 } else {
17862 None
17863 };
17864
17865 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17866 let foreign_data_wrapper = self.parse_object_name(false)?;
17867
17868 let mut options = None;
17869 if self.parse_keyword(Keyword::OPTIONS) {
17870 self.expect_token(&Token::LParen)?;
17871 options = Some(self.parse_comma_separated(|p| {
17872 let key = p.parse_identifier()?;
17873 let value = p.parse_identifier()?;
17874 Ok(CreateServerOption { key, value })
17875 })?);
17876 self.expect_token(&Token::RParen)?;
17877 }
17878
17879 Ok(Statement::CreateServer(CreateServerStatement {
17880 name,
17881 if_not_exists: ine,
17882 server_type,
17883 version,
17884 foreign_data_wrapper,
17885 options,
17886 }))
17887 }
17888
17889 pub fn index(&self) -> usize {
17891 self.index
17892 }
17893
17894 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17895 let ident = self.parse_identifier()?;
17896 self.expect_keyword_is(Keyword::AS)?;
17897
17898 let window_expr = if self.consume_token(&Token::LParen) {
17899 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17900 } else if self.dialect.supports_window_clause_named_window_reference() {
17901 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17902 } else {
17903 return self.expected("(", self.peek_token());
17904 };
17905
17906 Ok(NamedWindowDefinition(ident, window_expr))
17907 }
17908
17909 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17910 let name = self.parse_object_name(false)?;
17911 let params = self.parse_optional_procedure_parameters()?;
17912
17913 let language = if self.parse_keyword(Keyword::LANGUAGE) {
17914 Some(self.parse_identifier()?)
17915 } else {
17916 None
17917 };
17918
17919 self.expect_keyword_is(Keyword::AS)?;
17920
17921 let body = self.parse_conditional_statements(&[Keyword::END])?;
17922
17923 Ok(Statement::CreateProcedure {
17924 name,
17925 or_alter,
17926 params,
17927 language,
17928 body,
17929 })
17930 }
17931
17932 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17933 let window_name = match self.peek_token().token {
17934 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17935 self.parse_optional_ident()?
17936 }
17937 _ => None,
17938 };
17939
17940 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17941 self.parse_comma_separated(Parser::parse_expr)?
17942 } else {
17943 vec![]
17944 };
17945 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17946 self.parse_comma_separated(Parser::parse_order_by_expr)?
17947 } else {
17948 vec![]
17949 };
17950
17951 let window_frame = if !self.consume_token(&Token::RParen) {
17952 let window_frame = self.parse_window_frame()?;
17953 self.expect_token(&Token::RParen)?;
17954 Some(window_frame)
17955 } else {
17956 None
17957 };
17958 Ok(WindowSpec {
17959 window_name,
17960 partition_by,
17961 order_by,
17962 window_frame,
17963 })
17964 }
17965
17966 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17967 let name = self.parse_object_name(false)?;
17968
17969 let has_as = self.parse_keyword(Keyword::AS);
17971
17972 if !has_as {
17973 if self.consume_token(&Token::LParen) {
17975 let options = self.parse_create_type_sql_definition_options()?;
17977 self.expect_token(&Token::RParen)?;
17978 return Ok(Statement::CreateType {
17979 name,
17980 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
17981 });
17982 }
17983
17984 return Ok(Statement::CreateType {
17986 name,
17987 representation: None,
17988 });
17989 }
17990
17991 if self.parse_keyword(Keyword::ENUM) {
17993 self.parse_create_type_enum(name)
17995 } else if self.parse_keyword(Keyword::RANGE) {
17996 self.parse_create_type_range(name)
17998 } else if self.consume_token(&Token::LParen) {
17999 self.parse_create_type_composite(name)
18001 } else {
18002 self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
18003 }
18004 }
18005
18006 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18010 if self.consume_token(&Token::RParen) {
18011 return Ok(Statement::CreateType {
18013 name,
18014 representation: Some(UserDefinedTypeRepresentation::Composite {
18015 attributes: vec![],
18016 }),
18017 });
18018 }
18019
18020 let mut attributes = vec![];
18021 loop {
18022 let attr_name = self.parse_identifier()?;
18023 let attr_data_type = self.parse_data_type()?;
18024 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
18025 Some(self.parse_object_name(false)?)
18026 } else {
18027 None
18028 };
18029 attributes.push(UserDefinedTypeCompositeAttributeDef {
18030 name: attr_name,
18031 data_type: attr_data_type,
18032 collation: attr_collation,
18033 });
18034
18035 if !self.consume_token(&Token::Comma) {
18036 break;
18037 }
18038 }
18039 self.expect_token(&Token::RParen)?;
18040
18041 Ok(Statement::CreateType {
18042 name,
18043 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
18044 })
18045 }
18046
18047 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18051 self.expect_token(&Token::LParen)?;
18052 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18053 self.expect_token(&Token::RParen)?;
18054
18055 Ok(Statement::CreateType {
18056 name,
18057 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
18058 })
18059 }
18060
18061 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18065 self.expect_token(&Token::LParen)?;
18066 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
18067 self.expect_token(&Token::RParen)?;
18068
18069 Ok(Statement::CreateType {
18070 name,
18071 representation: Some(UserDefinedTypeRepresentation::Range { options }),
18072 })
18073 }
18074
18075 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
18077 let keyword = self.parse_one_of_keywords(&[
18078 Keyword::SUBTYPE,
18079 Keyword::SUBTYPE_OPCLASS,
18080 Keyword::COLLATION,
18081 Keyword::CANONICAL,
18082 Keyword::SUBTYPE_DIFF,
18083 Keyword::MULTIRANGE_TYPE_NAME,
18084 ]);
18085
18086 match keyword {
18087 Some(Keyword::SUBTYPE) => {
18088 self.expect_token(&Token::Eq)?;
18089 let data_type = self.parse_data_type()?;
18090 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18091 }
18092 Some(Keyword::SUBTYPE_OPCLASS) => {
18093 self.expect_token(&Token::Eq)?;
18094 let name = self.parse_object_name(false)?;
18095 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18096 }
18097 Some(Keyword::COLLATION) => {
18098 self.expect_token(&Token::Eq)?;
18099 let name = self.parse_object_name(false)?;
18100 Ok(UserDefinedTypeRangeOption::Collation(name))
18101 }
18102 Some(Keyword::CANONICAL) => {
18103 self.expect_token(&Token::Eq)?;
18104 let name = self.parse_object_name(false)?;
18105 Ok(UserDefinedTypeRangeOption::Canonical(name))
18106 }
18107 Some(Keyword::SUBTYPE_DIFF) => {
18108 self.expect_token(&Token::Eq)?;
18109 let name = self.parse_object_name(false)?;
18110 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18111 }
18112 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18113 self.expect_token(&Token::Eq)?;
18114 let name = self.parse_object_name(false)?;
18115 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18116 }
18117 _ => self.expected("range option keyword", self.peek_token()),
18118 }
18119 }
18120
18121 fn parse_create_type_sql_definition_options(
18123 &mut self,
18124 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18125 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18126 }
18127
18128 fn parse_sql_definition_option(
18130 &mut self,
18131 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18132 let keyword = self.parse_one_of_keywords(&[
18133 Keyword::INPUT,
18134 Keyword::OUTPUT,
18135 Keyword::RECEIVE,
18136 Keyword::SEND,
18137 Keyword::TYPMOD_IN,
18138 Keyword::TYPMOD_OUT,
18139 Keyword::ANALYZE,
18140 Keyword::SUBSCRIPT,
18141 Keyword::INTERNALLENGTH,
18142 Keyword::PASSEDBYVALUE,
18143 Keyword::ALIGNMENT,
18144 Keyword::STORAGE,
18145 Keyword::LIKE,
18146 Keyword::CATEGORY,
18147 Keyword::PREFERRED,
18148 Keyword::DEFAULT,
18149 Keyword::ELEMENT,
18150 Keyword::DELIMITER,
18151 Keyword::COLLATABLE,
18152 ]);
18153
18154 match keyword {
18155 Some(Keyword::INPUT) => {
18156 self.expect_token(&Token::Eq)?;
18157 let name = self.parse_object_name(false)?;
18158 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18159 }
18160 Some(Keyword::OUTPUT) => {
18161 self.expect_token(&Token::Eq)?;
18162 let name = self.parse_object_name(false)?;
18163 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18164 }
18165 Some(Keyword::RECEIVE) => {
18166 self.expect_token(&Token::Eq)?;
18167 let name = self.parse_object_name(false)?;
18168 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18169 }
18170 Some(Keyword::SEND) => {
18171 self.expect_token(&Token::Eq)?;
18172 let name = self.parse_object_name(false)?;
18173 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18174 }
18175 Some(Keyword::TYPMOD_IN) => {
18176 self.expect_token(&Token::Eq)?;
18177 let name = self.parse_object_name(false)?;
18178 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18179 }
18180 Some(Keyword::TYPMOD_OUT) => {
18181 self.expect_token(&Token::Eq)?;
18182 let name = self.parse_object_name(false)?;
18183 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18184 }
18185 Some(Keyword::ANALYZE) => {
18186 self.expect_token(&Token::Eq)?;
18187 let name = self.parse_object_name(false)?;
18188 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18189 }
18190 Some(Keyword::SUBSCRIPT) => {
18191 self.expect_token(&Token::Eq)?;
18192 let name = self.parse_object_name(false)?;
18193 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18194 }
18195 Some(Keyword::INTERNALLENGTH) => {
18196 self.expect_token(&Token::Eq)?;
18197 if self.parse_keyword(Keyword::VARIABLE) {
18198 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18199 UserDefinedTypeInternalLength::Variable,
18200 ))
18201 } else {
18202 let value = self.parse_literal_uint()?;
18203 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18204 UserDefinedTypeInternalLength::Fixed(value),
18205 ))
18206 }
18207 }
18208 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18209 Some(Keyword::ALIGNMENT) => {
18210 self.expect_token(&Token::Eq)?;
18211 let align_keyword = self.parse_one_of_keywords(&[
18212 Keyword::CHAR,
18213 Keyword::INT2,
18214 Keyword::INT4,
18215 Keyword::DOUBLE,
18216 ]);
18217 match align_keyword {
18218 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18219 Alignment::Char,
18220 )),
18221 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18222 Alignment::Int2,
18223 )),
18224 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18225 Alignment::Int4,
18226 )),
18227 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18228 Alignment::Double,
18229 )),
18230 _ => self.expected(
18231 "alignment value (char, int2, int4, or double)",
18232 self.peek_token(),
18233 ),
18234 }
18235 }
18236 Some(Keyword::STORAGE) => {
18237 self.expect_token(&Token::Eq)?;
18238 let storage_keyword = self.parse_one_of_keywords(&[
18239 Keyword::PLAIN,
18240 Keyword::EXTERNAL,
18241 Keyword::EXTENDED,
18242 Keyword::MAIN,
18243 ]);
18244 match storage_keyword {
18245 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18246 UserDefinedTypeStorage::Plain,
18247 )),
18248 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18249 UserDefinedTypeStorage::External,
18250 )),
18251 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18252 UserDefinedTypeStorage::Extended,
18253 )),
18254 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18255 UserDefinedTypeStorage::Main,
18256 )),
18257 _ => self.expected(
18258 "storage value (plain, external, extended, or main)",
18259 self.peek_token(),
18260 ),
18261 }
18262 }
18263 Some(Keyword::LIKE) => {
18264 self.expect_token(&Token::Eq)?;
18265 let name = self.parse_object_name(false)?;
18266 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18267 }
18268 Some(Keyword::CATEGORY) => {
18269 self.expect_token(&Token::Eq)?;
18270 let category_str = self.parse_literal_string()?;
18271 let category_char = category_str.chars().next().ok_or_else(|| {
18272 ParserError::ParserError(
18273 "CATEGORY value must be a single character".to_string(),
18274 )
18275 })?;
18276 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18277 }
18278 Some(Keyword::PREFERRED) => {
18279 self.expect_token(&Token::Eq)?;
18280 let value =
18281 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18282 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18283 }
18284 Some(Keyword::DEFAULT) => {
18285 self.expect_token(&Token::Eq)?;
18286 let expr = self.parse_expr()?;
18287 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18288 }
18289 Some(Keyword::ELEMENT) => {
18290 self.expect_token(&Token::Eq)?;
18291 let data_type = self.parse_data_type()?;
18292 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18293 }
18294 Some(Keyword::DELIMITER) => {
18295 self.expect_token(&Token::Eq)?;
18296 let delimiter = self.parse_literal_string()?;
18297 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18298 }
18299 Some(Keyword::COLLATABLE) => {
18300 self.expect_token(&Token::Eq)?;
18301 let value =
18302 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18303 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18304 }
18305 _ => self.expected("SQL definition option keyword", self.peek_token()),
18306 }
18307 }
18308
18309 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18310 self.expect_token(&Token::LParen)?;
18311 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18312 self.expect_token(&Token::RParen)?;
18313 Ok(idents)
18314 }
18315
18316 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18317 if dialect_of!(self is MySqlDialect | GenericDialect) {
18318 if self.parse_keyword(Keyword::FIRST) {
18319 Ok(Some(MySQLColumnPosition::First))
18320 } else if self.parse_keyword(Keyword::AFTER) {
18321 let ident = self.parse_identifier()?;
18322 Ok(Some(MySQLColumnPosition::After(ident)))
18323 } else {
18324 Ok(None)
18325 }
18326 } else {
18327 Ok(None)
18328 }
18329 }
18330
18331 fn parse_print(&mut self) -> Result<Statement, ParserError> {
18333 Ok(Statement::Print(PrintStatement {
18334 message: Box::new(self.parse_expr()?),
18335 }))
18336 }
18337
18338 fn parse_return(&mut self) -> Result<Statement, ParserError> {
18340 match self.maybe_parse(|p| p.parse_expr())? {
18341 Some(expr) => Ok(Statement::Return(ReturnStatement {
18342 value: Some(ReturnStatementValue::Expr(expr)),
18343 })),
18344 None => Ok(Statement::Return(ReturnStatement { value: None })),
18345 }
18346 }
18347
18348 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18352 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18353
18354 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18355 Some(self.parse_object_name(false)?)
18356 } else {
18357 None
18358 };
18359 self.expect_keyword(Keyword::OPTIONS)?;
18360 self.expect_token(&Token::LParen)?;
18361 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18362 self.expect_token(&Token::RParen)?;
18363 self.expect_keyword(Keyword::AS)?;
18364 let query = self.parse_query()?;
18365 Ok(Statement::ExportData(ExportData {
18366 options,
18367 query,
18368 connection,
18369 }))
18370 }
18371
18372 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18373 self.expect_keyword(Keyword::VACUUM)?;
18374 let full = self.parse_keyword(Keyword::FULL);
18375 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18376 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18377 let reindex = self.parse_keyword(Keyword::REINDEX);
18378 let recluster = self.parse_keyword(Keyword::RECLUSTER);
18379 let (table_name, threshold, boost) =
18380 match self.maybe_parse(|p| p.parse_object_name(false))? {
18381 Some(table_name) => {
18382 let threshold = if self.parse_keyword(Keyword::TO) {
18383 let value = self.parse_value()?;
18384 self.expect_keyword(Keyword::PERCENT)?;
18385 Some(value.value)
18386 } else {
18387 None
18388 };
18389 let boost = self.parse_keyword(Keyword::BOOST);
18390 (Some(table_name), threshold, boost)
18391 }
18392 _ => (None, None, false),
18393 };
18394 Ok(Statement::Vacuum(VacuumStatement {
18395 full,
18396 sort_only,
18397 delete_only,
18398 reindex,
18399 recluster,
18400 table_name,
18401 threshold,
18402 boost,
18403 }))
18404 }
18405
18406 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18408 self.tokens
18409 }
18410
18411 fn peek_sub_query(&mut self) -> bool {
18413 if self
18414 .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18415 .is_some()
18416 {
18417 self.prev_token();
18418 return true;
18419 }
18420 false
18421 }
18422
18423 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18424 let show_in;
18425 let mut filter_position = None;
18426 if self.dialect.supports_show_like_before_in() {
18427 if let Some(filter) = self.parse_show_statement_filter()? {
18428 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18429 }
18430 show_in = self.maybe_parse_show_stmt_in()?;
18431 } else {
18432 show_in = self.maybe_parse_show_stmt_in()?;
18433 if let Some(filter) = self.parse_show_statement_filter()? {
18434 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18435 }
18436 }
18437 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18438 let limit = self.maybe_parse_show_stmt_limit()?;
18439 let from = self.maybe_parse_show_stmt_from()?;
18440 Ok(ShowStatementOptions {
18441 filter_position,
18442 show_in,
18443 starts_with,
18444 limit,
18445 limit_from: from,
18446 })
18447 }
18448
18449 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18450 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18451 Some(Keyword::FROM) => ShowStatementInClause::FROM,
18452 Some(Keyword::IN) => ShowStatementInClause::IN,
18453 None => return Ok(None),
18454 _ => return self.expected("FROM or IN", self.peek_token()),
18455 };
18456
18457 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18458 Keyword::ACCOUNT,
18459 Keyword::DATABASE,
18460 Keyword::SCHEMA,
18461 Keyword::TABLE,
18462 Keyword::VIEW,
18463 ]) {
18464 Some(Keyword::DATABASE)
18466 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18467 | self.peek_keyword(Keyword::LIMIT) =>
18468 {
18469 (Some(ShowStatementInParentType::Database), None)
18470 }
18471 Some(Keyword::SCHEMA)
18472 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18473 | self.peek_keyword(Keyword::LIMIT) =>
18474 {
18475 (Some(ShowStatementInParentType::Schema), None)
18476 }
18477 Some(parent_kw) => {
18478 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18482 match parent_kw {
18483 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18484 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18485 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18486 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18487 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18488 _ => {
18489 return self.expected(
18490 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18491 self.peek_token(),
18492 )
18493 }
18494 }
18495 }
18496 None => {
18497 let mut parent_name = self.parse_object_name(false)?;
18500 if self
18501 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18502 .is_some()
18503 {
18504 parent_name
18505 .0
18506 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18507 }
18508 (None, Some(parent_name))
18509 }
18510 };
18511
18512 Ok(Some(ShowStatementIn {
18513 clause,
18514 parent_type,
18515 parent_name,
18516 }))
18517 }
18518
18519 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18520 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18521 Ok(Some(self.parse_value()?.value))
18522 } else {
18523 Ok(None)
18524 }
18525 }
18526
18527 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18528 if self.parse_keyword(Keyword::LIMIT) {
18529 Ok(self.parse_limit()?)
18530 } else {
18531 Ok(None)
18532 }
18533 }
18534
18535 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18536 if self.parse_keyword(Keyword::FROM) {
18537 Ok(Some(self.parse_value()?.value))
18538 } else {
18539 Ok(None)
18540 }
18541 }
18542
18543 pub(crate) fn in_column_definition_state(&self) -> bool {
18544 matches!(self.state, ColumnDefinition)
18545 }
18546
18547 pub(crate) fn parse_key_value_options(
18552 &mut self,
18553 parenthesized: bool,
18554 end_words: &[Keyword],
18555 ) -> Result<KeyValueOptions, ParserError> {
18556 let mut options: Vec<KeyValueOption> = Vec::new();
18557 let mut delimiter = KeyValueOptionsDelimiter::Space;
18558 if parenthesized {
18559 self.expect_token(&Token::LParen)?;
18560 }
18561 loop {
18562 match self.next_token().token {
18563 Token::RParen => {
18564 if parenthesized {
18565 break;
18566 } else {
18567 return self.expected(" another option or EOF", self.peek_token());
18568 }
18569 }
18570 Token::EOF => break,
18571 Token::Comma => {
18572 delimiter = KeyValueOptionsDelimiter::Comma;
18573 continue;
18574 }
18575 Token::Word(w) if !end_words.contains(&w.keyword) => {
18576 options.push(self.parse_key_value_option(&w)?)
18577 }
18578 Token::Word(w) if end_words.contains(&w.keyword) => {
18579 self.prev_token();
18580 break;
18581 }
18582 _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18583 };
18584 }
18585
18586 Ok(KeyValueOptions { delimiter, options })
18587 }
18588
18589 pub(crate) fn parse_key_value_option(
18591 &mut self,
18592 key: &Word,
18593 ) -> Result<KeyValueOption, ParserError> {
18594 self.expect_token(&Token::Eq)?;
18595 match self.peek_token().token {
18596 Token::SingleQuotedString(_) => Ok(KeyValueOption {
18597 option_name: key.value.clone(),
18598 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18599 }),
18600 Token::Word(word)
18601 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18602 {
18603 Ok(KeyValueOption {
18604 option_name: key.value.clone(),
18605 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18606 })
18607 }
18608 Token::Number(..) => Ok(KeyValueOption {
18609 option_name: key.value.clone(),
18610 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18611 }),
18612 Token::Word(word) => {
18613 self.next_token();
18614 Ok(KeyValueOption {
18615 option_name: key.value.clone(),
18616 option_value: KeyValueOptionKind::Single(Value::Placeholder(
18617 word.value.clone(),
18618 )),
18619 })
18620 }
18621 Token::LParen => {
18622 match self.maybe_parse(|parser| {
18626 parser.expect_token(&Token::LParen)?;
18627 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18628 parser.expect_token(&Token::RParen)?;
18629 values
18630 })? {
18631 Some(values) => {
18632 let values = values.into_iter().map(|v| v.value).collect();
18633 Ok(KeyValueOption {
18634 option_name: key.value.clone(),
18635 option_value: KeyValueOptionKind::Multi(values),
18636 })
18637 }
18638 None => Ok(KeyValueOption {
18639 option_name: key.value.clone(),
18640 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18641 self.parse_key_value_options(true, &[])?,
18642 )),
18643 }),
18644 }
18645 }
18646 _ => self.expected("expected option value", self.peek_token()),
18647 }
18648 }
18649
18650 fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18652 if self.parse_keyword(Keyword::ALL) {
18653 return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18654 }
18655
18656 let obj = self.parse_object_name(false)?;
18657 Ok(Statement::Reset(ResetStatement {
18658 reset: Reset::ConfigurationParameter(obj),
18659 }))
18660 }
18661}
18662
18663fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18664 if let Some(prefix) = prefix {
18665 Expr::Prefixed {
18666 prefix,
18667 value: Box::new(expr),
18668 }
18669 } else {
18670 expr
18671 }
18672}
18673
18674impl Word {
18675 #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18676 pub fn to_ident(&self, span: Span) -> Ident {
18677 Ident {
18678 value: self.value.clone(),
18679 quote_style: self.quote_style,
18680 span,
18681 }
18682 }
18683
18684 pub fn into_ident(self, span: Span) -> Ident {
18686 Ident {
18687 value: self.value,
18688 quote_style: self.quote_style,
18689 span,
18690 }
18691 }
18692}
18693
18694#[cfg(test)]
18695mod tests {
18696 use crate::test_utils::{all_dialects, TestedDialects};
18697
18698 use super::*;
18699
18700 #[test]
18701 fn test_prev_index() {
18702 let sql = "SELECT version";
18703 all_dialects().run_parser_method(sql, |parser| {
18704 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18705 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18706 parser.prev_token();
18707 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18708 assert_eq!(parser.next_token(), Token::make_word("version", None));
18709 parser.prev_token();
18710 assert_eq!(parser.peek_token(), Token::make_word("version", None));
18711 assert_eq!(parser.next_token(), Token::make_word("version", None));
18712 assert_eq!(parser.peek_token(), Token::EOF);
18713 parser.prev_token();
18714 assert_eq!(parser.next_token(), Token::make_word("version", None));
18715 assert_eq!(parser.next_token(), Token::EOF);
18716 assert_eq!(parser.next_token(), Token::EOF);
18717 parser.prev_token();
18718 });
18719 }
18720
18721 #[test]
18722 fn test_peek_tokens() {
18723 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18724 assert!(matches!(
18725 parser.peek_tokens(),
18726 [Token::Word(Word {
18727 keyword: Keyword::SELECT,
18728 ..
18729 })]
18730 ));
18731
18732 assert!(matches!(
18733 parser.peek_tokens(),
18734 [
18735 Token::Word(Word {
18736 keyword: Keyword::SELECT,
18737 ..
18738 }),
18739 Token::Word(_),
18740 Token::Word(Word {
18741 keyword: Keyword::AS,
18742 ..
18743 }),
18744 ]
18745 ));
18746
18747 for _ in 0..4 {
18748 parser.next_token();
18749 }
18750
18751 assert!(matches!(
18752 parser.peek_tokens(),
18753 [
18754 Token::Word(Word {
18755 keyword: Keyword::FROM,
18756 ..
18757 }),
18758 Token::Word(_),
18759 Token::EOF,
18760 Token::EOF,
18761 ]
18762 ))
18763 })
18764 }
18765
18766 #[cfg(test)]
18767 mod test_parse_data_type {
18768 use crate::ast::{
18769 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18770 };
18771 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18772 use crate::test_utils::TestedDialects;
18773
18774 macro_rules! test_parse_data_type {
18775 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18776 $dialect.run_parser_method(&*$input, |parser| {
18777 let data_type = parser.parse_data_type().unwrap();
18778 assert_eq!($expected_type, data_type);
18779 assert_eq!($input.to_string(), data_type.to_string());
18780 });
18781 }};
18782 }
18783
18784 #[test]
18785 fn test_ansii_character_string_types() {
18786 let dialect =
18788 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18789
18790 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18791
18792 test_parse_data_type!(
18793 dialect,
18794 "CHARACTER(20)",
18795 DataType::Character(Some(CharacterLength::IntegerLength {
18796 length: 20,
18797 unit: None
18798 }))
18799 );
18800
18801 test_parse_data_type!(
18802 dialect,
18803 "CHARACTER(20 CHARACTERS)",
18804 DataType::Character(Some(CharacterLength::IntegerLength {
18805 length: 20,
18806 unit: Some(CharLengthUnits::Characters)
18807 }))
18808 );
18809
18810 test_parse_data_type!(
18811 dialect,
18812 "CHARACTER(20 OCTETS)",
18813 DataType::Character(Some(CharacterLength::IntegerLength {
18814 length: 20,
18815 unit: Some(CharLengthUnits::Octets)
18816 }))
18817 );
18818
18819 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
18820
18821 test_parse_data_type!(
18822 dialect,
18823 "CHAR(20)",
18824 DataType::Char(Some(CharacterLength::IntegerLength {
18825 length: 20,
18826 unit: None
18827 }))
18828 );
18829
18830 test_parse_data_type!(
18831 dialect,
18832 "CHAR(20 CHARACTERS)",
18833 DataType::Char(Some(CharacterLength::IntegerLength {
18834 length: 20,
18835 unit: Some(CharLengthUnits::Characters)
18836 }))
18837 );
18838
18839 test_parse_data_type!(
18840 dialect,
18841 "CHAR(20 OCTETS)",
18842 DataType::Char(Some(CharacterLength::IntegerLength {
18843 length: 20,
18844 unit: Some(CharLengthUnits::Octets)
18845 }))
18846 );
18847
18848 test_parse_data_type!(
18849 dialect,
18850 "CHARACTER VARYING(20)",
18851 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18852 length: 20,
18853 unit: None
18854 }))
18855 );
18856
18857 test_parse_data_type!(
18858 dialect,
18859 "CHARACTER VARYING(20 CHARACTERS)",
18860 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18861 length: 20,
18862 unit: Some(CharLengthUnits::Characters)
18863 }))
18864 );
18865
18866 test_parse_data_type!(
18867 dialect,
18868 "CHARACTER VARYING(20 OCTETS)",
18869 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18870 length: 20,
18871 unit: Some(CharLengthUnits::Octets)
18872 }))
18873 );
18874
18875 test_parse_data_type!(
18876 dialect,
18877 "CHAR VARYING(20)",
18878 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18879 length: 20,
18880 unit: None
18881 }))
18882 );
18883
18884 test_parse_data_type!(
18885 dialect,
18886 "CHAR VARYING(20 CHARACTERS)",
18887 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18888 length: 20,
18889 unit: Some(CharLengthUnits::Characters)
18890 }))
18891 );
18892
18893 test_parse_data_type!(
18894 dialect,
18895 "CHAR VARYING(20 OCTETS)",
18896 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18897 length: 20,
18898 unit: Some(CharLengthUnits::Octets)
18899 }))
18900 );
18901
18902 test_parse_data_type!(
18903 dialect,
18904 "VARCHAR(20)",
18905 DataType::Varchar(Some(CharacterLength::IntegerLength {
18906 length: 20,
18907 unit: None
18908 }))
18909 );
18910 }
18911
18912 #[test]
18913 fn test_ansii_character_large_object_types() {
18914 let dialect =
18916 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18917
18918 test_parse_data_type!(
18919 dialect,
18920 "CHARACTER LARGE OBJECT",
18921 DataType::CharacterLargeObject(None)
18922 );
18923 test_parse_data_type!(
18924 dialect,
18925 "CHARACTER LARGE OBJECT(20)",
18926 DataType::CharacterLargeObject(Some(20))
18927 );
18928
18929 test_parse_data_type!(
18930 dialect,
18931 "CHAR LARGE OBJECT",
18932 DataType::CharLargeObject(None)
18933 );
18934 test_parse_data_type!(
18935 dialect,
18936 "CHAR LARGE OBJECT(20)",
18937 DataType::CharLargeObject(Some(20))
18938 );
18939
18940 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
18941 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
18942 }
18943
18944 #[test]
18945 fn test_parse_custom_types() {
18946 let dialect =
18947 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18948
18949 test_parse_data_type!(
18950 dialect,
18951 "GEOMETRY",
18952 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
18953 );
18954
18955 test_parse_data_type!(
18956 dialect,
18957 "GEOMETRY(POINT)",
18958 DataType::Custom(
18959 ObjectName::from(vec!["GEOMETRY".into()]),
18960 vec!["POINT".to_string()]
18961 )
18962 );
18963
18964 test_parse_data_type!(
18965 dialect,
18966 "GEOMETRY(POINT, 4326)",
18967 DataType::Custom(
18968 ObjectName::from(vec!["GEOMETRY".into()]),
18969 vec!["POINT".to_string(), "4326".to_string()]
18970 )
18971 );
18972 }
18973
18974 #[test]
18975 fn test_ansii_exact_numeric_types() {
18976 let dialect = TestedDialects::new(vec![
18978 Box::new(GenericDialect {}),
18979 Box::new(AnsiDialect {}),
18980 Box::new(PostgreSqlDialect {}),
18981 ]);
18982
18983 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
18984
18985 test_parse_data_type!(
18986 dialect,
18987 "NUMERIC(2)",
18988 DataType::Numeric(ExactNumberInfo::Precision(2))
18989 );
18990
18991 test_parse_data_type!(
18992 dialect,
18993 "NUMERIC(2,10)",
18994 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
18995 );
18996
18997 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
18998
18999 test_parse_data_type!(
19000 dialect,
19001 "DECIMAL(2)",
19002 DataType::Decimal(ExactNumberInfo::Precision(2))
19003 );
19004
19005 test_parse_data_type!(
19006 dialect,
19007 "DECIMAL(2,10)",
19008 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
19009 );
19010
19011 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
19012
19013 test_parse_data_type!(
19014 dialect,
19015 "DEC(2)",
19016 DataType::Dec(ExactNumberInfo::Precision(2))
19017 );
19018
19019 test_parse_data_type!(
19020 dialect,
19021 "DEC(2,10)",
19022 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
19023 );
19024
19025 test_parse_data_type!(
19027 dialect,
19028 "NUMERIC(10,-2)",
19029 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
19030 );
19031
19032 test_parse_data_type!(
19033 dialect,
19034 "DECIMAL(1000,-10)",
19035 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
19036 );
19037
19038 test_parse_data_type!(
19039 dialect,
19040 "DEC(5,-1000)",
19041 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
19042 );
19043
19044 test_parse_data_type!(
19045 dialect,
19046 "NUMERIC(10,-5)",
19047 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
19048 );
19049
19050 test_parse_data_type!(
19051 dialect,
19052 "DECIMAL(20,-10)",
19053 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
19054 );
19055
19056 test_parse_data_type!(
19057 dialect,
19058 "DEC(5,-2)",
19059 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
19060 );
19061
19062 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
19063 let data_type = parser.parse_data_type().unwrap();
19064 assert_eq!(
19065 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
19066 data_type
19067 );
19068 assert_eq!("NUMERIC(10,5)", data_type.to_string());
19070 });
19071 }
19072
19073 #[test]
19074 fn test_ansii_date_type() {
19075 let dialect =
19077 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19078
19079 test_parse_data_type!(dialect, "DATE", DataType::Date);
19080
19081 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
19082
19083 test_parse_data_type!(
19084 dialect,
19085 "TIME(6)",
19086 DataType::Time(Some(6), TimezoneInfo::None)
19087 );
19088
19089 test_parse_data_type!(
19090 dialect,
19091 "TIME WITH TIME ZONE",
19092 DataType::Time(None, TimezoneInfo::WithTimeZone)
19093 );
19094
19095 test_parse_data_type!(
19096 dialect,
19097 "TIME(6) WITH TIME ZONE",
19098 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19099 );
19100
19101 test_parse_data_type!(
19102 dialect,
19103 "TIME WITHOUT TIME ZONE",
19104 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19105 );
19106
19107 test_parse_data_type!(
19108 dialect,
19109 "TIME(6) WITHOUT TIME ZONE",
19110 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19111 );
19112
19113 test_parse_data_type!(
19114 dialect,
19115 "TIMESTAMP",
19116 DataType::Timestamp(None, TimezoneInfo::None)
19117 );
19118
19119 test_parse_data_type!(
19120 dialect,
19121 "TIMESTAMP(22)",
19122 DataType::Timestamp(Some(22), TimezoneInfo::None)
19123 );
19124
19125 test_parse_data_type!(
19126 dialect,
19127 "TIMESTAMP(22) WITH TIME ZONE",
19128 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19129 );
19130
19131 test_parse_data_type!(
19132 dialect,
19133 "TIMESTAMP(33) WITHOUT TIME ZONE",
19134 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19135 );
19136 }
19137 }
19138
19139 #[test]
19140 fn test_parse_schema_name() {
19141 macro_rules! test_parse_schema_name {
19143 ($input:expr, $expected_name:expr $(,)?) => {{
19144 all_dialects().run_parser_method(&*$input, |parser| {
19145 let schema_name = parser.parse_schema_name().unwrap();
19146 assert_eq!(schema_name, $expected_name);
19148 assert_eq!(schema_name.to_string(), $input.to_string());
19150 });
19151 }};
19152 }
19153
19154 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19155 let dummy_authorization = Ident::new("dummy_authorization");
19156
19157 test_parse_schema_name!(
19158 format!("{dummy_name}"),
19159 SchemaName::Simple(dummy_name.clone())
19160 );
19161
19162 test_parse_schema_name!(
19163 format!("AUTHORIZATION {dummy_authorization}"),
19164 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19165 );
19166 test_parse_schema_name!(
19167 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19168 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19169 );
19170 }
19171
19172 #[test]
19173 fn mysql_parse_index_table_constraint() {
19174 macro_rules! test_parse_table_constraint {
19175 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19176 $dialect.run_parser_method(&*$input, |parser| {
19177 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19178 assert_eq!(constraint, $expected);
19180 assert_eq!(constraint.to_string(), $input.to_string());
19182 });
19183 }};
19184 }
19185
19186 fn mk_expected_col(name: &str) -> IndexColumn {
19187 IndexColumn {
19188 column: OrderByExpr {
19189 expr: Expr::Identifier(name.into()),
19190 options: OrderByOptions {
19191 asc: None,
19192 nulls_first: None,
19193 },
19194 with_fill: None,
19195 },
19196 operator_class: None,
19197 }
19198 }
19199
19200 let dialect =
19201 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19202
19203 test_parse_table_constraint!(
19204 dialect,
19205 "INDEX (c1)",
19206 IndexConstraint {
19207 display_as_key: false,
19208 name: None,
19209 index_type: None,
19210 columns: vec![mk_expected_col("c1")],
19211 index_options: vec![],
19212 }
19213 .into()
19214 );
19215
19216 test_parse_table_constraint!(
19217 dialect,
19218 "KEY (c1)",
19219 IndexConstraint {
19220 display_as_key: true,
19221 name: None,
19222 index_type: None,
19223 columns: vec![mk_expected_col("c1")],
19224 index_options: vec![],
19225 }
19226 .into()
19227 );
19228
19229 test_parse_table_constraint!(
19230 dialect,
19231 "INDEX 'index' (c1, c2)",
19232 TableConstraint::Index(IndexConstraint {
19233 display_as_key: false,
19234 name: Some(Ident::with_quote('\'', "index")),
19235 index_type: None,
19236 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19237 index_options: vec![],
19238 })
19239 );
19240
19241 test_parse_table_constraint!(
19242 dialect,
19243 "INDEX USING BTREE (c1)",
19244 IndexConstraint {
19245 display_as_key: false,
19246 name: None,
19247 index_type: Some(IndexType::BTree),
19248 columns: vec![mk_expected_col("c1")],
19249 index_options: vec![],
19250 }
19251 .into()
19252 );
19253
19254 test_parse_table_constraint!(
19255 dialect,
19256 "INDEX USING HASH (c1)",
19257 IndexConstraint {
19258 display_as_key: false,
19259 name: None,
19260 index_type: Some(IndexType::Hash),
19261 columns: vec![mk_expected_col("c1")],
19262 index_options: vec![],
19263 }
19264 .into()
19265 );
19266
19267 test_parse_table_constraint!(
19268 dialect,
19269 "INDEX idx_name USING BTREE (c1)",
19270 IndexConstraint {
19271 display_as_key: false,
19272 name: Some(Ident::new("idx_name")),
19273 index_type: Some(IndexType::BTree),
19274 columns: vec![mk_expected_col("c1")],
19275 index_options: vec![],
19276 }
19277 .into()
19278 );
19279
19280 test_parse_table_constraint!(
19281 dialect,
19282 "INDEX idx_name USING HASH (c1)",
19283 IndexConstraint {
19284 display_as_key: false,
19285 name: Some(Ident::new("idx_name")),
19286 index_type: Some(IndexType::Hash),
19287 columns: vec![mk_expected_col("c1")],
19288 index_options: vec![],
19289 }
19290 .into()
19291 );
19292 }
19293
19294 #[test]
19295 fn test_tokenizer_error_loc() {
19296 let sql = "foo '";
19297 let ast = Parser::parse_sql(&GenericDialect, sql);
19298 assert_eq!(
19299 ast,
19300 Err(ParserError::TokenizerError(
19301 "Unterminated string literal at Line: 1, Column: 5".to_string()
19302 ))
19303 );
19304 }
19305
19306 #[test]
19307 fn test_parser_error_loc() {
19308 let sql = "SELECT this is a syntax error";
19309 let ast = Parser::parse_sql(&GenericDialect, sql);
19310 assert_eq!(
19311 ast,
19312 Err(ParserError::ParserError(
19313 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19314 .to_string()
19315 ))
19316 );
19317 }
19318
19319 #[test]
19320 fn test_nested_explain_error() {
19321 let sql = "EXPLAIN EXPLAIN SELECT 1";
19322 let ast = Parser::parse_sql(&GenericDialect, sql);
19323 assert_eq!(
19324 ast,
19325 Err(ParserError::ParserError(
19326 "Explain must be root of the plan".to_string()
19327 ))
19328 );
19329 }
19330
19331 #[test]
19332 fn test_parse_multipart_identifier_positive() {
19333 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19334
19335 let expected = vec![
19337 Ident {
19338 value: "CATALOG".to_string(),
19339 quote_style: None,
19340 span: Span::empty(),
19341 },
19342 Ident {
19343 value: "F(o)o. \"bar".to_string(),
19344 quote_style: Some('"'),
19345 span: Span::empty(),
19346 },
19347 Ident {
19348 value: "table".to_string(),
19349 quote_style: None,
19350 span: Span::empty(),
19351 },
19352 ];
19353 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19354 let actual = parser.parse_multipart_identifier().unwrap();
19355 assert_eq!(expected, actual);
19356 });
19357
19358 let expected = vec![
19360 Ident {
19361 value: "CATALOG".to_string(),
19362 quote_style: None,
19363 span: Span::empty(),
19364 },
19365 Ident {
19366 value: "table".to_string(),
19367 quote_style: None,
19368 span: Span::empty(),
19369 },
19370 ];
19371 dialect.run_parser_method("CATALOG . table", |parser| {
19372 let actual = parser.parse_multipart_identifier().unwrap();
19373 assert_eq!(expected, actual);
19374 });
19375 }
19376
19377 #[test]
19378 fn test_parse_multipart_identifier_negative() {
19379 macro_rules! test_parse_multipart_identifier_error {
19380 ($input:expr, $expected_err:expr $(,)?) => {{
19381 all_dialects().run_parser_method(&*$input, |parser| {
19382 let actual_err = parser.parse_multipart_identifier().unwrap_err();
19383 assert_eq!(actual_err.to_string(), $expected_err);
19384 });
19385 }};
19386 }
19387
19388 test_parse_multipart_identifier_error!(
19389 "",
19390 "sql parser error: Empty input when parsing identifier",
19391 );
19392
19393 test_parse_multipart_identifier_error!(
19394 "*schema.table",
19395 "sql parser error: Unexpected token in identifier: *",
19396 );
19397
19398 test_parse_multipart_identifier_error!(
19399 "schema.table*",
19400 "sql parser error: Unexpected token in identifier: *",
19401 );
19402
19403 test_parse_multipart_identifier_error!(
19404 "schema.table.",
19405 "sql parser error: Trailing period in identifier",
19406 );
19407
19408 test_parse_multipart_identifier_error!(
19409 "schema.*",
19410 "sql parser error: Unexpected token following period in identifier: *",
19411 );
19412 }
19413
19414 #[test]
19415 fn test_mysql_partition_selection() {
19416 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19417 let expected = vec!["p0", "p2"];
19418
19419 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19420 assert_eq!(ast.len(), 1);
19421 if let Statement::Query(v) = &ast[0] {
19422 if let SetExpr::Select(select) = &*v.body {
19423 assert_eq!(select.from.len(), 1);
19424 let from: &TableWithJoins = &select.from[0];
19425 let table_factor = &from.relation;
19426 if let TableFactor::Table { partitions, .. } = table_factor {
19427 let actual: Vec<&str> = partitions
19428 .iter()
19429 .map(|ident| ident.value.as_str())
19430 .collect();
19431 assert_eq!(expected, actual);
19432 }
19433 }
19434 } else {
19435 panic!("fail to parse mysql partition selection");
19436 }
19437 }
19438
19439 #[test]
19440 fn test_replace_into_placeholders() {
19441 let sql = "REPLACE INTO t (a) VALUES (&a)";
19442
19443 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19444 }
19445
19446 #[test]
19447 fn test_replace_into_set_placeholder() {
19448 let sql = "REPLACE INTO t SET ?";
19449
19450 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19451 }
19452
19453 #[test]
19454 fn test_replace_incomplete() {
19455 let sql = r#"REPLACE"#;
19456
19457 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19458 }
19459
19460 #[test]
19461 fn test_placeholder_invalid_whitespace() {
19462 for w in [" ", "/*invalid*/"] {
19463 let sql = format!("\nSELECT\n :{w}fooBar");
19464 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19465 }
19466 }
19467}