1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36 key_value_options::{
37 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
38 },
39 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48#[derive(Debug, Clone, PartialEq, Eq)]
49pub enum ParserError {
50 TokenizerError(String),
51 ParserError(String),
52 RecursionLimitExceeded,
53}
54
55macro_rules! parser_err {
57 ($MSG:expr, $loc:expr) => {
58 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
59 };
60}
61
62mod alter;
63mod merge;
64
65#[cfg(feature = "std")]
66mod recursion {
68 use std::cell::Cell;
69 use std::rc::Rc;
70
71 use super::ParserError;
72
73 pub(crate) struct RecursionCounter {
84 remaining_depth: Rc<Cell<usize>>,
85 }
86
87 impl RecursionCounter {
88 pub fn new(remaining_depth: usize) -> Self {
91 Self {
92 remaining_depth: Rc::new(remaining_depth.into()),
93 }
94 }
95
96 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
103 let old_value = self.remaining_depth.get();
104 if old_value == 0 {
106 Err(ParserError::RecursionLimitExceeded)
107 } else {
108 self.remaining_depth.set(old_value - 1);
109 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
110 }
111 }
112 }
113
114 pub struct DepthGuard {
116 remaining_depth: Rc<Cell<usize>>,
117 }
118
119 impl DepthGuard {
120 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
121 Self { remaining_depth }
122 }
123 }
124 impl Drop for DepthGuard {
125 fn drop(&mut self) {
126 let old_value = self.remaining_depth.get();
127 self.remaining_depth.set(old_value + 1);
128 }
129 }
130}
131
132#[cfg(not(feature = "std"))]
133mod recursion {
134 pub(crate) struct RecursionCounter {}
140
141 impl RecursionCounter {
142 pub fn new(_remaining_depth: usize) -> Self {
143 Self {}
144 }
145 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
146 Ok(DepthGuard {})
147 }
148 }
149
150 pub struct DepthGuard {}
151}
152
153#[derive(PartialEq, Eq)]
154pub enum IsOptional {
155 Optional,
156 Mandatory,
157}
158
159pub enum IsLateral {
160 Lateral,
161 NotLateral,
162}
163
164pub enum WildcardExpr {
165 Expr(Expr),
166 QualifiedWildcard(ObjectName),
167 Wildcard,
168}
169
170impl From<TokenizerError> for ParserError {
171 fn from(e: TokenizerError) -> Self {
172 ParserError::TokenizerError(e.to_string())
173 }
174}
175
176impl fmt::Display for ParserError {
177 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
178 write!(
179 f,
180 "sql parser error: {}",
181 match self {
182 ParserError::TokenizerError(s) => s,
183 ParserError::ParserError(s) => s,
184 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
185 }
186 )
187 }
188}
189
190#[cfg(feature = "std")]
191impl std::error::Error for ParserError {}
192
193const DEFAULT_REMAINING_DEPTH: usize = 50;
195
196const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
198 token: Token::EOF,
199 span: Span {
200 start: Location { line: 0, column: 0 },
201 end: Location { line: 0, column: 0 },
202 },
203};
204
205struct MatchedTrailingBracket(bool);
218
219impl From<bool> for MatchedTrailingBracket {
220 fn from(value: bool) -> Self {
221 Self(value)
222 }
223}
224
225#[derive(Debug, Clone, PartialEq, Eq)]
227pub struct ParserOptions {
228 pub trailing_commas: bool,
229 pub unescape: bool,
232 pub require_semicolon_stmt_delimiter: bool,
235}
236
237impl Default for ParserOptions {
238 fn default() -> Self {
239 Self {
240 trailing_commas: false,
241 unescape: true,
242 require_semicolon_stmt_delimiter: true,
243 }
244 }
245}
246
247impl ParserOptions {
248 pub fn new() -> Self {
250 Default::default()
251 }
252
253 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
265 self.trailing_commas = trailing_commas;
266 self
267 }
268
269 pub fn with_unescape(mut self, unescape: bool) -> Self {
272 self.unescape = unescape;
273 self
274 }
275}
276
277#[derive(Copy, Clone)]
278enum ParserState {
279 Normal,
281 ConnectBy,
285 ColumnDefinition,
291}
292
293pub struct Parser<'a> {
332 tokens: Vec<TokenWithSpan>,
334 index: usize,
336 state: ParserState,
338 dialect: &'a dyn Dialect,
340 options: ParserOptions,
344 recursion_counter: RecursionCounter,
346}
347
348impl<'a> Parser<'a> {
349 pub fn new(dialect: &'a dyn Dialect) -> Self {
365 Self {
366 tokens: vec![],
367 index: 0,
368 state: ParserState::Normal,
369 dialect,
370 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
371 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
372 }
373 }
374
375 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
398 self.recursion_counter = RecursionCounter::new(recursion_limit);
399 self
400 }
401
402 pub fn with_options(mut self, options: ParserOptions) -> Self {
425 self.options = options;
426 self
427 }
428
429 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
431 self.tokens = tokens;
432 self.index = 0;
433 self
434 }
435
436 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
438 let tokens_with_locations: Vec<TokenWithSpan> = tokens
440 .into_iter()
441 .map(|token| TokenWithSpan {
442 token,
443 span: Span::empty(),
444 })
445 .collect();
446 self.with_tokens_with_locations(tokens_with_locations)
447 }
448
449 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
456 debug!("Parsing sql '{sql}'...");
457 let tokens = Tokenizer::new(self.dialect, sql)
458 .with_unescape(self.options.unescape)
459 .tokenize_with_location()?;
460 Ok(self.with_tokens_with_locations(tokens))
461 }
462
463 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
479 let mut stmts = Vec::new();
480 let mut expecting_statement_delimiter = false;
481 loop {
482 while self.consume_token(&Token::SemiColon) {
484 expecting_statement_delimiter = false;
485 }
486
487 if !self.options.require_semicolon_stmt_delimiter {
488 expecting_statement_delimiter = false;
489 }
490
491 match self.peek_token().token {
492 Token::EOF => break,
493
494 Token::Word(word) => {
496 if expecting_statement_delimiter && word.keyword == Keyword::END {
497 break;
498 }
499 }
500 _ => {}
501 }
502
503 if expecting_statement_delimiter {
504 return self.expected("end of statement", self.peek_token());
505 }
506
507 let statement = self.parse_statement()?;
508 stmts.push(statement);
509 expecting_statement_delimiter = true;
510 }
511 Ok(stmts)
512 }
513
514 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
530 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
531 }
532
533 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
536 let _guard = self.recursion_counter.try_decrease()?;
537
538 if let Some(statement) = self.dialect.parse_statement(self) {
540 return statement;
541 }
542
543 let next_token = self.next_token();
544 match &next_token.token {
545 Token::Word(w) => match w.keyword {
546 Keyword::KILL => self.parse_kill(),
547 Keyword::FLUSH => self.parse_flush(),
548 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
549 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
550 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
551 Keyword::ANALYZE => self.parse_analyze(),
552 Keyword::CASE => {
553 self.prev_token();
554 self.parse_case_stmt()
555 }
556 Keyword::IF => {
557 self.prev_token();
558 self.parse_if_stmt()
559 }
560 Keyword::WHILE => {
561 self.prev_token();
562 self.parse_while()
563 }
564 Keyword::RAISE => {
565 self.prev_token();
566 self.parse_raise_stmt()
567 }
568 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
569 self.prev_token();
570 self.parse_query().map(Statement::Query)
571 }
572 Keyword::TRUNCATE => self.parse_truncate(),
573 Keyword::ATTACH => {
574 if dialect_of!(self is DuckDbDialect) {
575 self.parse_attach_duckdb_database()
576 } else {
577 self.parse_attach_database()
578 }
579 }
580 Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
581 self.parse_detach_duckdb_database()
582 }
583 Keyword::MSCK => self.parse_msck(),
584 Keyword::CREATE => self.parse_create(),
585 Keyword::CACHE => self.parse_cache_table(),
586 Keyword::DROP => self.parse_drop(),
587 Keyword::DISCARD => self.parse_discard(),
588 Keyword::DECLARE => self.parse_declare(),
589 Keyword::FETCH => self.parse_fetch_statement(),
590 Keyword::DELETE => self.parse_delete(next_token),
591 Keyword::INSERT => self.parse_insert(next_token),
592 Keyword::REPLACE => self.parse_replace(next_token),
593 Keyword::UNCACHE => self.parse_uncache_table(),
594 Keyword::UPDATE => self.parse_update(next_token),
595 Keyword::ALTER => self.parse_alter(),
596 Keyword::CALL => self.parse_call(),
597 Keyword::COPY => self.parse_copy(),
598 Keyword::OPEN => {
599 self.prev_token();
600 self.parse_open()
601 }
602 Keyword::CLOSE => self.parse_close(),
603 Keyword::SET => self.parse_set(),
604 Keyword::SHOW => self.parse_show(),
605 Keyword::USE => self.parse_use(),
606 Keyword::GRANT => self.parse_grant(),
607 Keyword::DENY => {
608 self.prev_token();
609 self.parse_deny()
610 }
611 Keyword::REVOKE => self.parse_revoke(),
612 Keyword::START => self.parse_start_transaction(),
613 Keyword::BEGIN => self.parse_begin(),
614 Keyword::END => self.parse_end(),
615 Keyword::SAVEPOINT => self.parse_savepoint(),
616 Keyword::RELEASE => self.parse_release(),
617 Keyword::COMMIT => self.parse_commit(),
618 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
619 Keyword::ROLLBACK => self.parse_rollback(),
620 Keyword::ASSERT => self.parse_assert(),
621 Keyword::DEALLOCATE => self.parse_deallocate(),
624 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
625 Keyword::PREPARE => self.parse_prepare(),
626 Keyword::MERGE => self.parse_merge(next_token),
627 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
630 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
631 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
632 Keyword::PRAGMA => self.parse_pragma(),
634 Keyword::UNLOAD => {
635 self.prev_token();
636 self.parse_unload()
637 }
638 Keyword::RENAME => self.parse_rename(),
639 Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
641 self.parse_install()
642 }
643 Keyword::LOAD => self.parse_load(),
644 Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
646 self.parse_optimize_table()
647 }
648 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
650 Keyword::PRINT => self.parse_print(),
651 Keyword::RETURN => self.parse_return(),
652 Keyword::EXPORT => {
653 self.prev_token();
654 self.parse_export_data()
655 }
656 Keyword::VACUUM => {
657 self.prev_token();
658 self.parse_vacuum()
659 }
660 Keyword::RESET => self.parse_reset(),
661 _ => self.expected("an SQL statement", next_token),
662 },
663 Token::LParen => {
664 self.prev_token();
665 self.parse_query().map(Statement::Query)
666 }
667 _ => self.expected("an SQL statement", next_token),
668 }
669 }
670
671 pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
675 let case_token = self.expect_keyword(Keyword::CASE)?;
676
677 let match_expr = if self.peek_keyword(Keyword::WHEN) {
678 None
679 } else {
680 Some(self.parse_expr()?)
681 };
682
683 self.expect_keyword_is(Keyword::WHEN)?;
684 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
685 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
686 })?;
687
688 let else_block = if self.parse_keyword(Keyword::ELSE) {
689 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
690 } else {
691 None
692 };
693
694 let mut end_case_token = self.expect_keyword(Keyword::END)?;
695 if self.peek_keyword(Keyword::CASE) {
696 end_case_token = self.expect_keyword(Keyword::CASE)?;
697 }
698
699 Ok(Statement::Case(CaseStatement {
700 case_token: AttachedToken(case_token),
701 match_expr,
702 when_blocks,
703 else_block,
704 end_case_token: AttachedToken(end_case_token),
705 }))
706 }
707
708 pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
712 self.expect_keyword_is(Keyword::IF)?;
713 let if_block = self.parse_conditional_statement_block(&[
714 Keyword::ELSE,
715 Keyword::ELSEIF,
716 Keyword::END,
717 ])?;
718
719 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
720 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
721 parser.parse_conditional_statement_block(&[
722 Keyword::ELSEIF,
723 Keyword::ELSE,
724 Keyword::END,
725 ])
726 })?
727 } else {
728 vec![]
729 };
730
731 let else_block = if self.parse_keyword(Keyword::ELSE) {
732 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
733 } else {
734 None
735 };
736
737 self.expect_keyword_is(Keyword::END)?;
738 let end_token = self.expect_keyword(Keyword::IF)?;
739
740 Ok(Statement::If(IfStatement {
741 if_block,
742 elseif_blocks,
743 else_block,
744 end_token: Some(AttachedToken(end_token)),
745 }))
746 }
747
748 fn parse_while(&mut self) -> Result<Statement, ParserError> {
752 self.expect_keyword_is(Keyword::WHILE)?;
753 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
754
755 Ok(Statement::While(WhileStatement { while_block }))
756 }
757
758 fn parse_conditional_statement_block(
766 &mut self,
767 terminal_keywords: &[Keyword],
768 ) -> Result<ConditionalStatementBlock, ParserError> {
769 let start_token = self.get_current_token().clone(); let mut then_token = None;
771
772 let condition = match &start_token.token {
773 Token::Word(w) if w.keyword == Keyword::ELSE => None,
774 Token::Word(w) if w.keyword == Keyword::WHILE => {
775 let expr = self.parse_expr()?;
776 Some(expr)
777 }
778 _ => {
779 let expr = self.parse_expr()?;
780 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
781 Some(expr)
782 }
783 };
784
785 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
786
787 Ok(ConditionalStatementBlock {
788 start_token: AttachedToken(start_token),
789 condition,
790 then_token,
791 conditional_statements,
792 })
793 }
794
795 pub(crate) fn parse_conditional_statements(
798 &mut self,
799 terminal_keywords: &[Keyword],
800 ) -> Result<ConditionalStatements, ParserError> {
801 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
802 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
803 let statements = self.parse_statement_list(terminal_keywords)?;
804 let end_token = self.expect_keyword(Keyword::END)?;
805
806 ConditionalStatements::BeginEnd(BeginEndStatements {
807 begin_token: AttachedToken(begin_token),
808 statements,
809 end_token: AttachedToken(end_token),
810 })
811 } else {
812 ConditionalStatements::Sequence {
813 statements: self.parse_statement_list(terminal_keywords)?,
814 }
815 };
816 Ok(conditional_statements)
817 }
818
819 pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
823 self.expect_keyword_is(Keyword::RAISE)?;
824
825 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
826 self.expect_token(&Token::Eq)?;
827 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
828 } else {
829 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
830 };
831
832 Ok(Statement::Raise(RaiseStatement { value }))
833 }
834
835 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
836 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
837
838 self.expect_keyword_is(Keyword::ON)?;
839 let token = self.next_token();
840
841 let (object_type, object_name) = match token.token {
842 Token::Word(w) if w.keyword == Keyword::COLUMN => {
843 (CommentObject::Column, self.parse_object_name(false)?)
844 }
845 Token::Word(w) if w.keyword == Keyword::TABLE => {
846 (CommentObject::Table, self.parse_object_name(false)?)
847 }
848 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
849 (CommentObject::Extension, self.parse_object_name(false)?)
850 }
851 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
852 (CommentObject::Schema, self.parse_object_name(false)?)
853 }
854 Token::Word(w) if w.keyword == Keyword::DATABASE => {
855 (CommentObject::Database, self.parse_object_name(false)?)
856 }
857 Token::Word(w) if w.keyword == Keyword::USER => {
858 (CommentObject::User, self.parse_object_name(false)?)
859 }
860 Token::Word(w) if w.keyword == Keyword::ROLE => {
861 (CommentObject::Role, self.parse_object_name(false)?)
862 }
863 _ => self.expected("comment object_type", token)?,
864 };
865
866 self.expect_keyword_is(Keyword::IS)?;
867 let comment = if self.parse_keyword(Keyword::NULL) {
868 None
869 } else {
870 Some(self.parse_literal_string()?)
871 };
872 Ok(Statement::Comment {
873 object_type,
874 object_name,
875 comment,
876 if_exists,
877 })
878 }
879
880 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
881 let mut channel = None;
882 let mut tables: Vec<ObjectName> = vec![];
883 let mut read_lock = false;
884 let mut export = false;
885
886 if !dialect_of!(self is MySqlDialect | GenericDialect) {
887 return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
888 }
889
890 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
891 Some(FlushLocation::NoWriteToBinlog)
892 } else if self.parse_keyword(Keyword::LOCAL) {
893 Some(FlushLocation::Local)
894 } else {
895 None
896 };
897
898 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
899 FlushType::BinaryLogs
900 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
901 FlushType::EngineLogs
902 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
903 FlushType::ErrorLogs
904 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
905 FlushType::GeneralLogs
906 } else if self.parse_keywords(&[Keyword::HOSTS]) {
907 FlushType::Hosts
908 } else if self.parse_keyword(Keyword::PRIVILEGES) {
909 FlushType::Privileges
910 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
911 FlushType::OptimizerCosts
912 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
913 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
914 channel = Some(self.parse_object_name(false).unwrap().to_string());
915 }
916 FlushType::RelayLogs
917 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
918 FlushType::SlowLogs
919 } else if self.parse_keyword(Keyword::STATUS) {
920 FlushType::Status
921 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
922 FlushType::UserResources
923 } else if self.parse_keywords(&[Keyword::LOGS]) {
924 FlushType::Logs
925 } else if self.parse_keywords(&[Keyword::TABLES]) {
926 loop {
927 let next_token = self.next_token();
928 match &next_token.token {
929 Token::Word(w) => match w.keyword {
930 Keyword::WITH => {
931 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
932 }
933 Keyword::FOR => {
934 export = self.parse_keyword(Keyword::EXPORT);
935 }
936 Keyword::NoKeyword => {
937 self.prev_token();
938 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
939 }
940 _ => {}
941 },
942 _ => {
943 break;
944 }
945 }
946 }
947
948 FlushType::Tables
949 } else {
950 return self.expected(
951 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
952 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
953 self.peek_token(),
954 );
955 };
956
957 Ok(Statement::Flush {
958 object_type,
959 location,
960 channel,
961 read_lock,
962 export,
963 tables,
964 })
965 }
966
967 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
968 let repair = self.parse_keyword(Keyword::REPAIR);
969 self.expect_keyword_is(Keyword::TABLE)?;
970 let table_name = self.parse_object_name(false)?;
971 let partition_action = self
972 .maybe_parse(|parser| {
973 let pa = match parser.parse_one_of_keywords(&[
974 Keyword::ADD,
975 Keyword::DROP,
976 Keyword::SYNC,
977 ]) {
978 Some(Keyword::ADD) => Some(AddDropSync::ADD),
979 Some(Keyword::DROP) => Some(AddDropSync::DROP),
980 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
981 _ => None,
982 };
983 parser.expect_keyword_is(Keyword::PARTITIONS)?;
984 Ok(pa)
985 })?
986 .unwrap_or_default();
987 Ok(Msck {
988 repair,
989 table_name,
990 partition_action,
991 }
992 .into())
993 }
994
995 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
996 let table = self.parse_keyword(Keyword::TABLE);
997
998 let table_names = self
999 .parse_comma_separated(|p| {
1000 Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1001 })?
1002 .into_iter()
1003 .map(|(only, name)| TruncateTableTarget { name, only })
1004 .collect();
1005
1006 let mut partitions = None;
1007 if self.parse_keyword(Keyword::PARTITION) {
1008 self.expect_token(&Token::LParen)?;
1009 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1010 self.expect_token(&Token::RParen)?;
1011 }
1012
1013 let mut identity = None;
1014 let mut cascade = None;
1015
1016 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1017 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1018 Some(TruncateIdentityOption::Restart)
1019 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1020 Some(TruncateIdentityOption::Continue)
1021 } else {
1022 None
1023 };
1024
1025 cascade = self.parse_cascade_option();
1026 };
1027
1028 let on_cluster = self.parse_optional_on_cluster()?;
1029
1030 Ok(Truncate {
1031 table_names,
1032 partitions,
1033 table,
1034 identity,
1035 cascade,
1036 on_cluster,
1037 }
1038 .into())
1039 }
1040
1041 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1042 if self.parse_keyword(Keyword::CASCADE) {
1043 Some(CascadeOption::Cascade)
1044 } else if self.parse_keyword(Keyword::RESTRICT) {
1045 Some(CascadeOption::Restrict)
1046 } else {
1047 None
1048 }
1049 }
1050
1051 pub fn parse_attach_duckdb_database_options(
1052 &mut self,
1053 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1054 if !self.consume_token(&Token::LParen) {
1055 return Ok(vec![]);
1056 }
1057
1058 let mut options = vec![];
1059 loop {
1060 if self.parse_keyword(Keyword::READ_ONLY) {
1061 let boolean = if self.parse_keyword(Keyword::TRUE) {
1062 Some(true)
1063 } else if self.parse_keyword(Keyword::FALSE) {
1064 Some(false)
1065 } else {
1066 None
1067 };
1068 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1069 } else if self.parse_keyword(Keyword::TYPE) {
1070 let ident = self.parse_identifier()?;
1071 options.push(AttachDuckDBDatabaseOption::Type(ident));
1072 } else {
1073 return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1074 };
1075
1076 if self.consume_token(&Token::RParen) {
1077 return Ok(options);
1078 } else if self.consume_token(&Token::Comma) {
1079 continue;
1080 } else {
1081 return self.expected("expected one of: ')', ','", self.peek_token());
1082 }
1083 }
1084 }
1085
1086 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1087 let database = self.parse_keyword(Keyword::DATABASE);
1088 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1089 let database_path = self.parse_identifier()?;
1090 let database_alias = if self.parse_keyword(Keyword::AS) {
1091 Some(self.parse_identifier()?)
1092 } else {
1093 None
1094 };
1095
1096 let attach_options = self.parse_attach_duckdb_database_options()?;
1097 Ok(Statement::AttachDuckDBDatabase {
1098 if_not_exists,
1099 database,
1100 database_path,
1101 database_alias,
1102 attach_options,
1103 })
1104 }
1105
1106 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1107 let database = self.parse_keyword(Keyword::DATABASE);
1108 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1109 let database_alias = self.parse_identifier()?;
1110 Ok(Statement::DetachDuckDBDatabase {
1111 if_exists,
1112 database,
1113 database_alias,
1114 })
1115 }
1116
1117 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1118 let database = self.parse_keyword(Keyword::DATABASE);
1119 let database_file_name = self.parse_expr()?;
1120 self.expect_keyword_is(Keyword::AS)?;
1121 let schema_name = self.parse_identifier()?;
1122 Ok(Statement::AttachDatabase {
1123 database,
1124 schema_name,
1125 database_file_name,
1126 })
1127 }
1128
1129 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1130 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1131 let table_name = self.parse_object_name(false)?;
1132 let mut for_columns = false;
1133 let mut cache_metadata = false;
1134 let mut noscan = false;
1135 let mut partitions = None;
1136 let mut compute_statistics = false;
1137 let mut columns = vec![];
1138 loop {
1139 match self.parse_one_of_keywords(&[
1140 Keyword::PARTITION,
1141 Keyword::FOR,
1142 Keyword::CACHE,
1143 Keyword::NOSCAN,
1144 Keyword::COMPUTE,
1145 ]) {
1146 Some(Keyword::PARTITION) => {
1147 self.expect_token(&Token::LParen)?;
1148 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1149 self.expect_token(&Token::RParen)?;
1150 }
1151 Some(Keyword::NOSCAN) => noscan = true,
1152 Some(Keyword::FOR) => {
1153 self.expect_keyword_is(Keyword::COLUMNS)?;
1154
1155 columns = self
1156 .maybe_parse(|parser| {
1157 parser.parse_comma_separated(|p| p.parse_identifier())
1158 })?
1159 .unwrap_or_default();
1160 for_columns = true
1161 }
1162 Some(Keyword::CACHE) => {
1163 self.expect_keyword_is(Keyword::METADATA)?;
1164 cache_metadata = true
1165 }
1166 Some(Keyword::COMPUTE) => {
1167 self.expect_keyword_is(Keyword::STATISTICS)?;
1168 compute_statistics = true
1169 }
1170 _ => break,
1171 }
1172 }
1173
1174 Ok(Analyze {
1175 has_table_keyword,
1176 table_name,
1177 for_columns,
1178 columns,
1179 partitions,
1180 cache_metadata,
1181 noscan,
1182 compute_statistics,
1183 }
1184 .into())
1185 }
1186
1187 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1189 let index = self.index;
1190
1191 let next_token = self.next_token();
1192 match next_token.token {
1193 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1194 if self.peek_token().token == Token::Period {
1195 let mut id_parts: Vec<Ident> = vec![match t {
1196 Token::Word(w) => w.into_ident(next_token.span),
1197 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1198 _ => {
1199 return Err(ParserError::ParserError(
1200 "Internal parser error: unexpected token type".to_string(),
1201 ))
1202 }
1203 }];
1204
1205 while self.consume_token(&Token::Period) {
1206 let next_token = self.next_token();
1207 match next_token.token {
1208 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1209 Token::SingleQuotedString(s) => {
1210 id_parts.push(Ident::with_quote('\'', s))
1212 }
1213 Token::Mul => {
1214 return Ok(Expr::QualifiedWildcard(
1215 ObjectName::from(id_parts),
1216 AttachedToken(next_token),
1217 ));
1218 }
1219 _ => {
1220 return self
1221 .expected("an identifier or a '*' after '.'", next_token);
1222 }
1223 }
1224 }
1225 }
1226 }
1227 Token::Mul => {
1228 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1229 }
1230 _ => (),
1231 };
1232
1233 self.index = index;
1234 self.parse_expr()
1235 }
1236
1237 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1239 self.parse_subexpr(self.dialect.prec_unknown())
1240 }
1241
1242 pub fn parse_expr_with_alias_and_order_by(
1243 &mut self,
1244 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1245 let expr = self.parse_expr()?;
1246
1247 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1248 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1249 }
1250 let alias = self.parse_optional_alias_inner(None, validator)?;
1251 let order_by = OrderByOptions {
1252 asc: self.parse_asc_desc(),
1253 nulls_first: None,
1254 };
1255 Ok(ExprWithAliasAndOrderBy {
1256 expr: ExprWithAlias { expr, alias },
1257 order_by,
1258 })
1259 }
1260
1261 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1263 let _guard = self.recursion_counter.try_decrease()?;
1264 debug!("parsing expr");
1265 let mut expr = self.parse_prefix()?;
1266
1267 expr = self.parse_compound_expr(expr, vec![])?;
1268
1269 debug!("prefix: {expr:?}");
1270 loop {
1271 let next_precedence = self.get_next_precedence()?;
1272 debug!("next precedence: {next_precedence:?}");
1273
1274 if precedence >= next_precedence {
1275 break;
1276 }
1277
1278 if Token::Period == self.peek_token_ref().token {
1281 break;
1282 }
1283
1284 expr = self.parse_infix(expr, next_precedence)?;
1285 }
1286 Ok(expr)
1287 }
1288
1289 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1290 let condition = self.parse_expr()?;
1291 let message = if self.parse_keyword(Keyword::AS) {
1292 Some(self.parse_expr()?)
1293 } else {
1294 None
1295 };
1296
1297 Ok(Statement::Assert { condition, message })
1298 }
1299
1300 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1301 let name = self.parse_identifier()?;
1302 Ok(Statement::Savepoint { name })
1303 }
1304
1305 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1306 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1307 let name = self.parse_identifier()?;
1308
1309 Ok(Statement::ReleaseSavepoint { name })
1310 }
1311
1312 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1313 let channel = self.parse_identifier()?;
1314 Ok(Statement::LISTEN { channel })
1315 }
1316
1317 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1318 let channel = if self.consume_token(&Token::Mul) {
1319 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1320 } else {
1321 match self.parse_identifier() {
1322 Ok(expr) => expr,
1323 _ => {
1324 self.prev_token();
1325 return self.expected("wildcard or identifier", self.peek_token());
1326 }
1327 }
1328 };
1329 Ok(Statement::UNLISTEN { channel })
1330 }
1331
1332 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1333 let channel = self.parse_identifier()?;
1334 let payload = if self.consume_token(&Token::Comma) {
1335 Some(self.parse_literal_string()?)
1336 } else {
1337 None
1338 };
1339 Ok(Statement::NOTIFY { channel, payload })
1340 }
1341
1342 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1344 if self.peek_keyword(Keyword::TABLE) {
1345 self.expect_keyword(Keyword::TABLE)?;
1346 let rename_tables = self.parse_comma_separated(|parser| {
1347 let old_name = parser.parse_object_name(false)?;
1348 parser.expect_keyword(Keyword::TO)?;
1349 let new_name = parser.parse_object_name(false)?;
1350
1351 Ok(RenameTable { old_name, new_name })
1352 })?;
1353 Ok(Statement::RenameTable(rename_tables))
1354 } else {
1355 self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1356 }
1357 }
1358
1359 fn parse_expr_prefix_by_reserved_word(
1362 &mut self,
1363 w: &Word,
1364 w_span: Span,
1365 ) -> Result<Option<Expr>, ParserError> {
1366 match w.keyword {
1367 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1368 self.prev_token();
1369 Ok(Some(Expr::Value(self.parse_value()?)))
1370 }
1371 Keyword::NULL => {
1372 self.prev_token();
1373 Ok(Some(Expr::Value(self.parse_value()?)))
1374 }
1375 Keyword::CURRENT_CATALOG
1376 | Keyword::CURRENT_USER
1377 | Keyword::SESSION_USER
1378 | Keyword::USER
1379 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1380 {
1381 Ok(Some(Expr::Function(Function {
1382 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1383 uses_odbc_syntax: false,
1384 parameters: FunctionArguments::None,
1385 args: FunctionArguments::None,
1386 null_treatment: None,
1387 filter: None,
1388 over: None,
1389 within_group: vec![],
1390 })))
1391 }
1392 Keyword::CURRENT_TIMESTAMP
1393 | Keyword::CURRENT_TIME
1394 | Keyword::CURRENT_DATE
1395 | Keyword::LOCALTIME
1396 | Keyword::LOCALTIMESTAMP => {
1397 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1398 }
1399 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1400 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1401 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1402 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1403 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1404 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1405 Keyword::EXISTS
1406 if !dialect_of!(self is DatabricksDialect)
1408 || matches!(
1409 self.peek_nth_token_ref(1).token,
1410 Token::Word(Word {
1411 keyword: Keyword::SELECT | Keyword::WITH,
1412 ..
1413 })
1414 ) =>
1415 {
1416 Ok(Some(self.parse_exists_expr(false)?))
1417 }
1418 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1419 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1420 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1421 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1422 Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1423 }
1424 Keyword::SUBSTR | Keyword::SUBSTRING => {
1425 self.prev_token();
1426 Ok(Some(self.parse_substring()?))
1427 }
1428 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1429 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1430 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1431 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1433 self.expect_token(&Token::LBracket)?;
1434 Ok(Some(self.parse_array_expr(true)?))
1435 }
1436 Keyword::ARRAY
1437 if self.peek_token() == Token::LParen
1438 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1439 {
1440 self.expect_token(&Token::LParen)?;
1441 let query = self.parse_query()?;
1442 self.expect_token(&Token::RParen)?;
1443 Ok(Some(Expr::Function(Function {
1444 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1445 uses_odbc_syntax: false,
1446 parameters: FunctionArguments::None,
1447 args: FunctionArguments::Subquery(query),
1448 filter: None,
1449 null_treatment: None,
1450 over: None,
1451 within_group: vec![],
1452 })))
1453 }
1454 Keyword::NOT => Ok(Some(self.parse_not()?)),
1455 Keyword::MATCH if self.dialect.supports_match_against() => {
1456 Ok(Some(self.parse_match_against()?))
1457 }
1458 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1459 let struct_expr = self.parse_struct_literal()?;
1460 Ok(Some(struct_expr))
1461 }
1462 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1463 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1464 Ok(Some(Expr::Prior(Box::new(expr))))
1465 }
1466 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1467 Ok(Some(self.parse_duckdb_map_literal()?))
1468 }
1469 _ if self.dialect.supports_geometric_types() => match w.keyword {
1470 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1471 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1472 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1473 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1474 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1475 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1476 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1477 _ => Ok(None),
1478 },
1479 _ => Ok(None),
1480 }
1481 }
1482
1483 fn parse_expr_prefix_by_unreserved_word(
1485 &mut self,
1486 w: &Word,
1487 w_span: Span,
1488 ) -> Result<Expr, ParserError> {
1489 match self.peek_token().token {
1490 Token::LParen if !self.peek_outer_join_operator() => {
1491 let id_parts = vec![w.clone().into_ident(w_span)];
1492 self.parse_function(ObjectName::from(id_parts))
1493 }
1494 Token::SingleQuotedString(_)
1496 | Token::DoubleQuotedString(_)
1497 | Token::HexStringLiteral(_)
1498 if w.value.starts_with('_') =>
1499 {
1500 Ok(Expr::Prefixed {
1501 prefix: w.clone().into_ident(w_span),
1502 value: self.parse_introduced_string_expr()?.into(),
1503 })
1504 }
1505 Token::SingleQuotedString(_)
1507 | Token::DoubleQuotedString(_)
1508 | Token::HexStringLiteral(_)
1509 if w.value.starts_with('_') =>
1510 {
1511 Ok(Expr::Prefixed {
1512 prefix: w.clone().into_ident(w_span),
1513 value: self.parse_introduced_string_expr()?.into(),
1514 })
1515 }
1516 Token::Arrow if self.dialect.supports_lambda_functions() => {
1517 self.expect_token(&Token::Arrow)?;
1518 Ok(Expr::Lambda(LambdaFunction {
1519 params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1520 body: Box::new(self.parse_expr()?),
1521 }))
1522 }
1523 _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1524 }
1525 }
1526
1527 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1529 if let Some(prefix) = self.dialect.parse_prefix(self) {
1531 return prefix;
1532 }
1533
1534 let loc = self.peek_token_ref().span.start;
1551 let opt_expr = self.maybe_parse(|parser| {
1552 match parser.parse_data_type()? {
1553 DataType::Interval { .. } => parser.parse_interval(),
1554 DataType::Custom(..) => parser_err!("dummy", loc),
1562 data_type => Ok(Expr::TypedString(TypedString {
1563 data_type,
1564 value: parser.parse_value()?,
1565 uses_odbc_syntax: false,
1566 })),
1567 }
1568 })?;
1569
1570 if let Some(expr) = opt_expr {
1571 return Ok(expr);
1572 }
1573
1574 let dialect = self.dialect;
1578
1579 self.advance_token();
1580 let next_token_index = self.get_current_index();
1581 let next_token = self.get_current_token();
1582 let span = next_token.span;
1583 let expr = match &next_token.token {
1584 Token::Word(w) => {
1585 let w = w.clone();
1594 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1595 Ok(Some(expr)) => Ok(expr),
1597
1598 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1600
1601 Err(e) => {
1608 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1609 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1610 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1611 }) {
1612 return Ok(expr);
1613 }
1614 }
1615 return Err(e);
1616 }
1617 }
1618 } Token::LBracket => self.parse_array_expr(false),
1621 tok @ Token::Minus | tok @ Token::Plus => {
1622 let op = if *tok == Token::Plus {
1623 UnaryOperator::Plus
1624 } else {
1625 UnaryOperator::Minus
1626 };
1627 Ok(Expr::UnaryOp {
1628 op,
1629 expr: Box::new(
1630 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1631 ),
1632 })
1633 }
1634 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1635 op: UnaryOperator::BangNot,
1636 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1637 }),
1638 tok @ Token::DoubleExclamationMark
1639 | tok @ Token::PGSquareRoot
1640 | tok @ Token::PGCubeRoot
1641 | tok @ Token::AtSign
1642 if dialect_is!(dialect is PostgreSqlDialect) =>
1643 {
1644 let op = match tok {
1645 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1646 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1647 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1648 Token::AtSign => UnaryOperator::PGAbs,
1649 _ => {
1650 return Err(ParserError::ParserError(
1651 "Internal parser error: unexpected unary operator token".to_string(),
1652 ))
1653 }
1654 };
1655 Ok(Expr::UnaryOp {
1656 op,
1657 expr: Box::new(
1658 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1659 ),
1660 })
1661 }
1662 Token::Tilde => Ok(Expr::UnaryOp {
1663 op: UnaryOperator::BitwiseNot,
1664 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1665 }),
1666 tok @ Token::Sharp
1667 | tok @ Token::AtDashAt
1668 | tok @ Token::AtAt
1669 | tok @ Token::QuestionMarkDash
1670 | tok @ Token::QuestionPipe
1671 if self.dialect.supports_geometric_types() =>
1672 {
1673 let op = match tok {
1674 Token::Sharp => UnaryOperator::Hash,
1675 Token::AtDashAt => UnaryOperator::AtDashAt,
1676 Token::AtAt => UnaryOperator::DoubleAt,
1677 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1678 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1679 _ => {
1680 return Err(ParserError::ParserError(format!(
1681 "Unexpected token in unary operator parsing: {tok:?}"
1682 )))
1683 }
1684 };
1685 Ok(Expr::UnaryOp {
1686 op,
1687 expr: Box::new(
1688 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1689 ),
1690 })
1691 }
1692 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1693 {
1694 self.prev_token();
1695 Ok(Expr::Value(self.parse_value()?))
1696 }
1697 Token::UnicodeStringLiteral(_) => {
1698 self.prev_token();
1699 Ok(Expr::Value(self.parse_value()?))
1700 }
1701 Token::Number(_, _)
1702 | Token::SingleQuotedString(_)
1703 | Token::DoubleQuotedString(_)
1704 | Token::TripleSingleQuotedString(_)
1705 | Token::TripleDoubleQuotedString(_)
1706 | Token::DollarQuotedString(_)
1707 | Token::SingleQuotedByteStringLiteral(_)
1708 | Token::DoubleQuotedByteStringLiteral(_)
1709 | Token::TripleSingleQuotedByteStringLiteral(_)
1710 | Token::TripleDoubleQuotedByteStringLiteral(_)
1711 | Token::SingleQuotedRawStringLiteral(_)
1712 | Token::DoubleQuotedRawStringLiteral(_)
1713 | Token::TripleSingleQuotedRawStringLiteral(_)
1714 | Token::TripleDoubleQuotedRawStringLiteral(_)
1715 | Token::NationalStringLiteral(_)
1716 | Token::HexStringLiteral(_) => {
1717 self.prev_token();
1718 Ok(Expr::Value(self.parse_value()?))
1719 }
1720 Token::LParen => {
1721 let expr =
1722 if let Some(expr) = self.try_parse_expr_sub_query()? {
1723 expr
1724 } else if let Some(lambda) = self.try_parse_lambda()? {
1725 return Ok(lambda);
1726 } else {
1727 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1728 match exprs.len() {
1729 0 => return Err(ParserError::ParserError(
1730 "Internal parser error: parse_comma_separated returned empty list"
1731 .to_string(),
1732 )),
1733 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1734 _ => Expr::Tuple(exprs),
1735 }
1736 };
1737 self.expect_token(&Token::RParen)?;
1738 Ok(expr)
1739 }
1740 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1741 self.prev_token();
1742 Ok(Expr::Value(self.parse_value()?))
1743 }
1744 Token::LBrace => {
1745 self.prev_token();
1746 self.parse_lbrace_expr()
1747 }
1748 _ => self.expected_at("an expression", next_token_index),
1749 }?;
1750
1751 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1752 Ok(Expr::Collate {
1753 expr: Box::new(expr),
1754 collation: self.parse_object_name(false)?,
1755 })
1756 } else {
1757 Ok(expr)
1758 }
1759 }
1760
1761 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1762 Ok(Expr::TypedString(TypedString {
1763 data_type: DataType::GeometricType(kind),
1764 value: self.parse_value()?,
1765 uses_odbc_syntax: false,
1766 }))
1767 }
1768
1769 pub fn parse_compound_expr(
1776 &mut self,
1777 root: Expr,
1778 mut chain: Vec<AccessExpr>,
1779 ) -> Result<Expr, ParserError> {
1780 let mut ending_wildcard: Option<TokenWithSpan> = None;
1781 loop {
1782 if self.consume_token(&Token::Period) {
1783 let next_token = self.peek_token_ref();
1784 match &next_token.token {
1785 Token::Mul => {
1786 if dialect_of!(self is PostgreSqlDialect) {
1789 ending_wildcard = Some(self.next_token());
1790 } else {
1791 self.prev_token(); }
1798
1799 break;
1800 }
1801 Token::SingleQuotedString(s) => {
1802 let expr =
1803 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1804 chain.push(AccessExpr::Dot(expr));
1805 self.advance_token(); }
1807 _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1809 Expr::CompoundFieldAccess { root, access_chain } => {
1818 chain.push(AccessExpr::Dot(*root));
1819 chain.extend(access_chain);
1820 }
1821 Expr::CompoundIdentifier(parts) => chain
1822 .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1823 expr => {
1824 chain.push(AccessExpr::Dot(expr));
1825 }
1826 },
1827 }
1828 } else if !self.dialect.supports_partiql()
1829 && self.peek_token_ref().token == Token::LBracket
1830 {
1831 self.parse_multi_dim_subscript(&mut chain)?;
1832 } else {
1833 break;
1834 }
1835 }
1836
1837 let tok_index = self.get_current_index();
1838 if let Some(wildcard_token) = ending_wildcard {
1839 if !Self::is_all_ident(&root, &chain) {
1840 return self.expected("an identifier or a '*' after '.'", self.peek_token());
1841 };
1842 Ok(Expr::QualifiedWildcard(
1843 ObjectName::from(Self::exprs_to_idents(root, chain)?),
1844 AttachedToken(wildcard_token),
1845 ))
1846 } else if self.maybe_parse_outer_join_operator() {
1847 if !Self::is_all_ident(&root, &chain) {
1848 return self.expected_at("column identifier before (+)", tok_index);
1849 };
1850 let expr = if chain.is_empty() {
1851 root
1852 } else {
1853 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1854 };
1855 Ok(Expr::OuterJoin(expr.into()))
1856 } else {
1857 Self::build_compound_expr(root, chain)
1858 }
1859 }
1860
1861 fn build_compound_expr(
1866 root: Expr,
1867 mut access_chain: Vec<AccessExpr>,
1868 ) -> Result<Expr, ParserError> {
1869 if access_chain.is_empty() {
1870 return Ok(root);
1871 }
1872
1873 if Self::is_all_ident(&root, &access_chain) {
1874 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1875 root,
1876 access_chain,
1877 )?));
1878 }
1879
1880 if matches!(root, Expr::Identifier(_))
1885 && matches!(
1886 access_chain.last(),
1887 Some(AccessExpr::Dot(Expr::Function(_)))
1888 )
1889 && access_chain
1890 .iter()
1891 .rev()
1892 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1894 {
1895 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1896 return parser_err!("expected function expression", root.span().start);
1897 };
1898
1899 let compound_func_name = [root]
1900 .into_iter()
1901 .chain(access_chain.into_iter().flat_map(|access| match access {
1902 AccessExpr::Dot(expr) => Some(expr),
1903 _ => None,
1904 }))
1905 .flat_map(|expr| match expr {
1906 Expr::Identifier(ident) => Some(ident),
1907 _ => None,
1908 })
1909 .map(ObjectNamePart::Identifier)
1910 .chain(func.name.0)
1911 .collect::<Vec<_>>();
1912 func.name = ObjectName(compound_func_name);
1913
1914 return Ok(Expr::Function(func));
1915 }
1916
1917 if access_chain.len() == 1
1922 && matches!(
1923 access_chain.last(),
1924 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1925 )
1926 {
1927 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1928 return parser_err!("expected (+) expression", root.span().start);
1929 };
1930
1931 if !Self::is_all_ident(&root, &[]) {
1932 return parser_err!("column identifier before (+)", root.span().start);
1933 };
1934
1935 let token_start = root.span().start;
1936 let mut idents = Self::exprs_to_idents(root, vec![])?;
1937 match *inner_expr {
1938 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1939 Expr::Identifier(suffix) => idents.push(suffix),
1940 _ => {
1941 return parser_err!("column identifier before (+)", token_start);
1942 }
1943 }
1944
1945 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1946 }
1947
1948 Ok(Expr::CompoundFieldAccess {
1949 root: Box::new(root),
1950 access_chain,
1951 })
1952 }
1953
1954 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1955 match k {
1956 Keyword::LOCAL => Some(ContextModifier::Local),
1957 Keyword::GLOBAL => Some(ContextModifier::Global),
1958 Keyword::SESSION => Some(ContextModifier::Session),
1959 _ => None,
1960 }
1961 }
1962
1963 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1965 if !matches!(root, Expr::Identifier(_)) {
1966 return false;
1967 }
1968 fields
1969 .iter()
1970 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1971 }
1972
1973 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1975 let mut idents = vec![];
1976 if let Expr::Identifier(root) = root {
1977 idents.push(root);
1978 for x in fields {
1979 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1980 idents.push(ident);
1981 } else {
1982 return parser_err!(
1983 format!("Expected identifier, found: {}", x),
1984 x.span().start
1985 );
1986 }
1987 }
1988 Ok(idents)
1989 } else {
1990 parser_err!(
1991 format!("Expected identifier, found: {}", root),
1992 root.span().start
1993 )
1994 }
1995 }
1996
1997 fn peek_outer_join_operator(&mut self) -> bool {
1999 if !self.dialect.supports_outer_join_operator() {
2000 return false;
2001 }
2002
2003 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2004 Token::LParen == maybe_lparen.token
2005 && Token::Plus == maybe_plus.token
2006 && Token::RParen == maybe_rparen.token
2007 }
2008
2009 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2012 self.dialect.supports_outer_join_operator()
2013 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2014 }
2015
2016 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2017 self.expect_token(&Token::LParen)?;
2018 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2019 self.expect_token(&Token::RParen)?;
2020
2021 Ok(options)
2022 }
2023
2024 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2025 let name = self.parse_identifier()?;
2026
2027 let next_token = self.peek_token();
2028 if next_token == Token::Comma || next_token == Token::RParen {
2029 return Ok(UtilityOption { name, arg: None });
2030 }
2031 let arg = self.parse_expr()?;
2032
2033 Ok(UtilityOption {
2034 name,
2035 arg: Some(arg),
2036 })
2037 }
2038
2039 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2040 if !self.peek_sub_query() {
2041 return Ok(None);
2042 }
2043
2044 Ok(Some(Expr::Subquery(self.parse_query()?)))
2045 }
2046
2047 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2048 if !self.dialect.supports_lambda_functions() {
2049 return Ok(None);
2050 }
2051 self.maybe_parse(|p| {
2052 let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2053 p.expect_token(&Token::RParen)?;
2054 p.expect_token(&Token::Arrow)?;
2055 let expr = p.parse_expr()?;
2056 Ok(Expr::Lambda(LambdaFunction {
2057 params: OneOrManyWithParens::Many(params),
2058 body: Box::new(expr),
2059 }))
2060 })
2061 }
2062
2063 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2070 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2072 return Ok(Some(expr));
2073 }
2074 self.maybe_parse_odbc_body_datetime()
2076 }
2077
2078 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2089 self.maybe_parse(|p| {
2090 let token = p.next_token().clone();
2091 let word_string = token.token.to_string();
2092 let data_type = match word_string.as_str() {
2093 "t" => DataType::Time(None, TimezoneInfo::None),
2094 "d" => DataType::Date,
2095 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2096 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2097 };
2098 let value = p.parse_value()?;
2099 Ok(Expr::TypedString(TypedString {
2100 data_type,
2101 value,
2102 uses_odbc_syntax: true,
2103 }))
2104 })
2105 }
2106
2107 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2116 self.maybe_parse(|p| {
2117 p.expect_keyword(Keyword::FN)?;
2118 let fn_name = p.parse_object_name(false)?;
2119 let mut fn_call = p.parse_function_call(fn_name)?;
2120 fn_call.uses_odbc_syntax = true;
2121 Ok(Expr::Function(fn_call))
2122 })
2123 }
2124
2125 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2126 self.parse_function_call(name).map(Expr::Function)
2127 }
2128
2129 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2130 self.expect_token(&Token::LParen)?;
2131
2132 if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2135 let subquery = self.parse_query()?;
2136 self.expect_token(&Token::RParen)?;
2137 return Ok(Function {
2138 name,
2139 uses_odbc_syntax: false,
2140 parameters: FunctionArguments::None,
2141 args: FunctionArguments::Subquery(subquery),
2142 filter: None,
2143 null_treatment: None,
2144 over: None,
2145 within_group: vec![],
2146 });
2147 }
2148
2149 let mut args = self.parse_function_argument_list()?;
2150 let mut parameters = FunctionArguments::None;
2151 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2154 && self.consume_token(&Token::LParen)
2155 {
2156 parameters = FunctionArguments::List(args);
2157 args = self.parse_function_argument_list()?;
2158 }
2159
2160 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2161 self.expect_token(&Token::LParen)?;
2162 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2163 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2164 self.expect_token(&Token::RParen)?;
2165 order_by
2166 } else {
2167 vec![]
2168 };
2169
2170 let filter = if self.dialect.supports_filter_during_aggregation()
2171 && self.parse_keyword(Keyword::FILTER)
2172 && self.consume_token(&Token::LParen)
2173 && self.parse_keyword(Keyword::WHERE)
2174 {
2175 let filter = Some(Box::new(self.parse_expr()?));
2176 self.expect_token(&Token::RParen)?;
2177 filter
2178 } else {
2179 None
2180 };
2181
2182 let null_treatment = if args
2185 .clauses
2186 .iter()
2187 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2188 {
2189 self.parse_null_treatment()?
2190 } else {
2191 None
2192 };
2193
2194 let over = if self.parse_keyword(Keyword::OVER) {
2195 if self.consume_token(&Token::LParen) {
2196 let window_spec = self.parse_window_spec()?;
2197 Some(WindowType::WindowSpec(window_spec))
2198 } else {
2199 Some(WindowType::NamedWindow(self.parse_identifier()?))
2200 }
2201 } else {
2202 None
2203 };
2204
2205 Ok(Function {
2206 name,
2207 uses_odbc_syntax: false,
2208 parameters,
2209 args: FunctionArguments::List(args),
2210 null_treatment,
2211 filter,
2212 over,
2213 within_group,
2214 })
2215 }
2216
2217 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2219 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2220 Some(keyword) => {
2221 self.expect_keyword_is(Keyword::NULLS)?;
2222
2223 Ok(match keyword {
2224 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2225 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2226 _ => None,
2227 })
2228 }
2229 None => Ok(None),
2230 }
2231 }
2232
2233 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2234 let args = if self.consume_token(&Token::LParen) {
2235 FunctionArguments::List(self.parse_function_argument_list()?)
2236 } else {
2237 FunctionArguments::None
2238 };
2239 Ok(Expr::Function(Function {
2240 name,
2241 uses_odbc_syntax: false,
2242 parameters: FunctionArguments::None,
2243 args,
2244 filter: None,
2245 over: None,
2246 null_treatment: None,
2247 within_group: vec![],
2248 }))
2249 }
2250
2251 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2252 let next_token = self.next_token();
2253 match &next_token.token {
2254 Token::Word(w) => match w.keyword {
2255 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2256 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2257 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2258 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2259 },
2260 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2261 }
2262 }
2263
2264 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2265 let units = self.parse_window_frame_units()?;
2266 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2267 let start_bound = self.parse_window_frame_bound()?;
2268 self.expect_keyword_is(Keyword::AND)?;
2269 let end_bound = Some(self.parse_window_frame_bound()?);
2270 (start_bound, end_bound)
2271 } else {
2272 (self.parse_window_frame_bound()?, None)
2273 };
2274 Ok(WindowFrame {
2275 units,
2276 start_bound,
2277 end_bound,
2278 })
2279 }
2280
2281 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2283 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2284 Ok(WindowFrameBound::CurrentRow)
2285 } else {
2286 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2287 None
2288 } else {
2289 Some(Box::new(match self.peek_token().token {
2290 Token::SingleQuotedString(_) => self.parse_interval()?,
2291 _ => self.parse_expr()?,
2292 }))
2293 };
2294 if self.parse_keyword(Keyword::PRECEDING) {
2295 Ok(WindowFrameBound::Preceding(rows))
2296 } else if self.parse_keyword(Keyword::FOLLOWING) {
2297 Ok(WindowFrameBound::Following(rows))
2298 } else {
2299 self.expected("PRECEDING or FOLLOWING", self.peek_token())
2300 }
2301 }
2302 }
2303
2304 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2306 if self.dialect.supports_group_by_expr() {
2307 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2308 self.expect_token(&Token::LParen)?;
2309 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2310 self.expect_token(&Token::RParen)?;
2311 Ok(Expr::GroupingSets(result))
2312 } else if self.parse_keyword(Keyword::CUBE) {
2313 self.expect_token(&Token::LParen)?;
2314 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2315 self.expect_token(&Token::RParen)?;
2316 Ok(Expr::Cube(result))
2317 } else if self.parse_keyword(Keyword::ROLLUP) {
2318 self.expect_token(&Token::LParen)?;
2319 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2320 self.expect_token(&Token::RParen)?;
2321 Ok(Expr::Rollup(result))
2322 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2323 Ok(Expr::Tuple(vec![]))
2327 } else {
2328 self.parse_expr()
2329 }
2330 } else {
2331 self.parse_expr()
2333 }
2334 }
2335
2336 fn parse_tuple(
2340 &mut self,
2341 lift_singleton: bool,
2342 allow_empty: bool,
2343 ) -> Result<Vec<Expr>, ParserError> {
2344 if lift_singleton {
2345 if self.consume_token(&Token::LParen) {
2346 let result = if allow_empty && self.consume_token(&Token::RParen) {
2347 vec![]
2348 } else {
2349 let result = self.parse_comma_separated(Parser::parse_expr)?;
2350 self.expect_token(&Token::RParen)?;
2351 result
2352 };
2353 Ok(result)
2354 } else {
2355 Ok(vec![self.parse_expr()?])
2356 }
2357 } else {
2358 self.expect_token(&Token::LParen)?;
2359 let result = if allow_empty && self.consume_token(&Token::RParen) {
2360 vec![]
2361 } else {
2362 let result = self.parse_comma_separated(Parser::parse_expr)?;
2363 self.expect_token(&Token::RParen)?;
2364 result
2365 };
2366 Ok(result)
2367 }
2368 }
2369
2370 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2371 let case_token = AttachedToken(self.get_current_token().clone());
2372 let mut operand = None;
2373 if !self.parse_keyword(Keyword::WHEN) {
2374 operand = Some(Box::new(self.parse_expr()?));
2375 self.expect_keyword_is(Keyword::WHEN)?;
2376 }
2377 let mut conditions = vec![];
2378 loop {
2379 let condition = self.parse_expr()?;
2380 self.expect_keyword_is(Keyword::THEN)?;
2381 let result = self.parse_expr()?;
2382 conditions.push(CaseWhen { condition, result });
2383 if !self.parse_keyword(Keyword::WHEN) {
2384 break;
2385 }
2386 }
2387 let else_result = if self.parse_keyword(Keyword::ELSE) {
2388 Some(Box::new(self.parse_expr()?))
2389 } else {
2390 None
2391 };
2392 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2393 Ok(Expr::Case {
2394 case_token,
2395 end_token,
2396 operand,
2397 conditions,
2398 else_result,
2399 })
2400 }
2401
2402 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2403 if self.parse_keyword(Keyword::FORMAT) {
2404 let value = self.parse_value()?.value;
2405 match self.parse_optional_time_zone()? {
2406 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2407 None => Ok(Some(CastFormat::Value(value))),
2408 }
2409 } else {
2410 Ok(None)
2411 }
2412 }
2413
2414 pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2415 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2416 self.parse_value().map(|v| Some(v.value))
2417 } else {
2418 Ok(None)
2419 }
2420 }
2421
2422 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2424 self.expect_token(&Token::LParen)?;
2425 let data_type = self.parse_data_type()?;
2426 self.expect_token(&Token::Comma)?;
2427 let expr = self.parse_expr()?;
2428 let styles = if self.consume_token(&Token::Comma) {
2429 self.parse_comma_separated(Parser::parse_expr)?
2430 } else {
2431 Default::default()
2432 };
2433 self.expect_token(&Token::RParen)?;
2434 Ok(Expr::Convert {
2435 is_try,
2436 expr: Box::new(expr),
2437 data_type: Some(data_type),
2438 charset: None,
2439 target_before_value: true,
2440 styles,
2441 })
2442 }
2443
2444 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2449 if self.dialect.convert_type_before_value() {
2450 return self.parse_mssql_convert(is_try);
2451 }
2452 self.expect_token(&Token::LParen)?;
2453 let expr = self.parse_expr()?;
2454 if self.parse_keyword(Keyword::USING) {
2455 let charset = self.parse_object_name(false)?;
2456 self.expect_token(&Token::RParen)?;
2457 return Ok(Expr::Convert {
2458 is_try,
2459 expr: Box::new(expr),
2460 data_type: None,
2461 charset: Some(charset),
2462 target_before_value: false,
2463 styles: vec![],
2464 });
2465 }
2466 self.expect_token(&Token::Comma)?;
2467 let data_type = self.parse_data_type()?;
2468 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2469 Some(self.parse_object_name(false)?)
2470 } else {
2471 None
2472 };
2473 self.expect_token(&Token::RParen)?;
2474 Ok(Expr::Convert {
2475 is_try,
2476 expr: Box::new(expr),
2477 data_type: Some(data_type),
2478 charset,
2479 target_before_value: false,
2480 styles: vec![],
2481 })
2482 }
2483
2484 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2486 self.expect_token(&Token::LParen)?;
2487 let expr = self.parse_expr()?;
2488 self.expect_keyword_is(Keyword::AS)?;
2489 let data_type = self.parse_data_type()?;
2490 let format = self.parse_optional_cast_format()?;
2491 self.expect_token(&Token::RParen)?;
2492 Ok(Expr::Cast {
2493 kind,
2494 expr: Box::new(expr),
2495 data_type,
2496 format,
2497 })
2498 }
2499
2500 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2502 self.expect_token(&Token::LParen)?;
2503 let exists_node = Expr::Exists {
2504 negated,
2505 subquery: self.parse_query()?,
2506 };
2507 self.expect_token(&Token::RParen)?;
2508 Ok(exists_node)
2509 }
2510
2511 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2512 self.expect_token(&Token::LParen)?;
2513 let field = self.parse_date_time_field()?;
2514
2515 let syntax = if self.parse_keyword(Keyword::FROM) {
2516 ExtractSyntax::From
2517 } else if self.consume_token(&Token::Comma)
2518 && dialect_of!(self is SnowflakeDialect | GenericDialect)
2519 {
2520 ExtractSyntax::Comma
2521 } else {
2522 return Err(ParserError::ParserError(
2523 "Expected 'FROM' or ','".to_string(),
2524 ));
2525 };
2526
2527 let expr = self.parse_expr()?;
2528 self.expect_token(&Token::RParen)?;
2529 Ok(Expr::Extract {
2530 field,
2531 expr: Box::new(expr),
2532 syntax,
2533 })
2534 }
2535
2536 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2537 self.expect_token(&Token::LParen)?;
2538 let expr = self.parse_expr()?;
2539 let field = if self.parse_keyword(Keyword::TO) {
2541 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2543 } else if self.consume_token(&Token::Comma) {
2544 match self.parse_value()?.value {
2546 Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2547 _ => {
2548 return Err(ParserError::ParserError(
2549 "Scale field can only be of number type".to_string(),
2550 ))
2551 }
2552 }
2553 } else {
2554 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2555 };
2556 self.expect_token(&Token::RParen)?;
2557 if is_ceil {
2558 Ok(Expr::Ceil {
2559 expr: Box::new(expr),
2560 field,
2561 })
2562 } else {
2563 Ok(Expr::Floor {
2564 expr: Box::new(expr),
2565 field,
2566 })
2567 }
2568 }
2569
2570 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2571 let between_prec = self.dialect.prec_value(Precedence::Between);
2572 let position_expr = self.maybe_parse(|p| {
2573 p.expect_token(&Token::LParen)?;
2575
2576 let expr = p.parse_subexpr(between_prec)?;
2578 p.expect_keyword_is(Keyword::IN)?;
2579 let from = p.parse_expr()?;
2580 p.expect_token(&Token::RParen)?;
2581 Ok(Expr::Position {
2582 expr: Box::new(expr),
2583 r#in: Box::new(from),
2584 })
2585 })?;
2586 match position_expr {
2587 Some(expr) => Ok(expr),
2588 None => self.parse_function(ObjectName::from(vec![ident])),
2591 }
2592 }
2593
2594 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2596 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2597 Keyword::SUBSTR => true,
2598 Keyword::SUBSTRING => false,
2599 _ => {
2600 self.prev_token();
2601 return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2602 }
2603 };
2604 self.expect_token(&Token::LParen)?;
2605 let expr = self.parse_expr()?;
2606 let mut from_expr = None;
2607 let special = self.consume_token(&Token::Comma);
2608 if special || self.parse_keyword(Keyword::FROM) {
2609 from_expr = Some(self.parse_expr()?);
2610 }
2611
2612 let mut to_expr = None;
2613 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2614 to_expr = Some(self.parse_expr()?);
2615 }
2616 self.expect_token(&Token::RParen)?;
2617
2618 Ok(Expr::Substring {
2619 expr: Box::new(expr),
2620 substring_from: from_expr.map(Box::new),
2621 substring_for: to_expr.map(Box::new),
2622 special,
2623 shorthand,
2624 })
2625 }
2626
2627 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2628 self.expect_token(&Token::LParen)?;
2630 let expr = self.parse_expr()?;
2631 self.expect_keyword_is(Keyword::PLACING)?;
2632 let what_expr = self.parse_expr()?;
2633 self.expect_keyword_is(Keyword::FROM)?;
2634 let from_expr = self.parse_expr()?;
2635 let mut for_expr = None;
2636 if self.parse_keyword(Keyword::FOR) {
2637 for_expr = Some(self.parse_expr()?);
2638 }
2639 self.expect_token(&Token::RParen)?;
2640
2641 Ok(Expr::Overlay {
2642 expr: Box::new(expr),
2643 overlay_what: Box::new(what_expr),
2644 overlay_from: Box::new(from_expr),
2645 overlay_for: for_expr.map(Box::new),
2646 })
2647 }
2648
2649 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2655 self.expect_token(&Token::LParen)?;
2656 let mut trim_where = None;
2657 if let Token::Word(word) = self.peek_token().token {
2658 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2659 trim_where = Some(self.parse_trim_where()?);
2660 }
2661 }
2662 let expr = self.parse_expr()?;
2663 if self.parse_keyword(Keyword::FROM) {
2664 let trim_what = Box::new(expr);
2665 let expr = self.parse_expr()?;
2666 self.expect_token(&Token::RParen)?;
2667 Ok(Expr::Trim {
2668 expr: Box::new(expr),
2669 trim_where,
2670 trim_what: Some(trim_what),
2671 trim_characters: None,
2672 })
2673 } else if self.consume_token(&Token::Comma)
2674 && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2675 {
2676 let characters = self.parse_comma_separated(Parser::parse_expr)?;
2677 self.expect_token(&Token::RParen)?;
2678 Ok(Expr::Trim {
2679 expr: Box::new(expr),
2680 trim_where: None,
2681 trim_what: None,
2682 trim_characters: Some(characters),
2683 })
2684 } else {
2685 self.expect_token(&Token::RParen)?;
2686 Ok(Expr::Trim {
2687 expr: Box::new(expr),
2688 trim_where,
2689 trim_what: None,
2690 trim_characters: None,
2691 })
2692 }
2693 }
2694
2695 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2696 let next_token = self.next_token();
2697 match &next_token.token {
2698 Token::Word(w) => match w.keyword {
2699 Keyword::BOTH => Ok(TrimWhereField::Both),
2700 Keyword::LEADING => Ok(TrimWhereField::Leading),
2701 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2702 _ => self.expected("trim_where field", next_token)?,
2703 },
2704 _ => self.expected("trim_where field", next_token),
2705 }
2706 }
2707
2708 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2711 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2712 self.expect_token(&Token::RBracket)?;
2713 Ok(Expr::Array(Array { elem: exprs, named }))
2714 }
2715
2716 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2717 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2718 if self.parse_keyword(Keyword::ERROR) {
2719 Ok(Some(ListAggOnOverflow::Error))
2720 } else {
2721 self.expect_keyword_is(Keyword::TRUNCATE)?;
2722 let filler = match self.peek_token().token {
2723 Token::Word(w)
2724 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2725 {
2726 None
2727 }
2728 Token::SingleQuotedString(_)
2729 | Token::EscapedStringLiteral(_)
2730 | Token::UnicodeStringLiteral(_)
2731 | Token::NationalStringLiteral(_)
2732 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2733 _ => self.expected(
2734 "either filler, WITH, or WITHOUT in LISTAGG",
2735 self.peek_token(),
2736 )?,
2737 };
2738 let with_count = self.parse_keyword(Keyword::WITH);
2739 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2740 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2741 }
2742 self.expect_keyword_is(Keyword::COUNT)?;
2743 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2744 }
2745 } else {
2746 Ok(None)
2747 }
2748 }
2749
2750 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2755 let next_token = self.next_token();
2756 match &next_token.token {
2757 Token::Word(w) => match w.keyword {
2758 Keyword::YEAR => Ok(DateTimeField::Year),
2759 Keyword::YEARS => Ok(DateTimeField::Years),
2760 Keyword::MONTH => Ok(DateTimeField::Month),
2761 Keyword::MONTHS => Ok(DateTimeField::Months),
2762 Keyword::WEEK => {
2763 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2764 && self.consume_token(&Token::LParen)
2765 {
2766 let week_day = self.parse_identifier()?;
2767 self.expect_token(&Token::RParen)?;
2768 Some(week_day)
2769 } else {
2770 None
2771 };
2772 Ok(DateTimeField::Week(week_day))
2773 }
2774 Keyword::WEEKS => Ok(DateTimeField::Weeks),
2775 Keyword::DAY => Ok(DateTimeField::Day),
2776 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2777 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2778 Keyword::DAYS => Ok(DateTimeField::Days),
2779 Keyword::DATE => Ok(DateTimeField::Date),
2780 Keyword::DATETIME => Ok(DateTimeField::Datetime),
2781 Keyword::HOUR => Ok(DateTimeField::Hour),
2782 Keyword::HOURS => Ok(DateTimeField::Hours),
2783 Keyword::MINUTE => Ok(DateTimeField::Minute),
2784 Keyword::MINUTES => Ok(DateTimeField::Minutes),
2785 Keyword::SECOND => Ok(DateTimeField::Second),
2786 Keyword::SECONDS => Ok(DateTimeField::Seconds),
2787 Keyword::CENTURY => Ok(DateTimeField::Century),
2788 Keyword::DECADE => Ok(DateTimeField::Decade),
2789 Keyword::DOY => Ok(DateTimeField::Doy),
2790 Keyword::DOW => Ok(DateTimeField::Dow),
2791 Keyword::EPOCH => Ok(DateTimeField::Epoch),
2792 Keyword::ISODOW => Ok(DateTimeField::Isodow),
2793 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2794 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2795 Keyword::JULIAN => Ok(DateTimeField::Julian),
2796 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2797 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2798 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2799 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2800 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2801 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2802 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2803 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2804 Keyword::QUARTER => Ok(DateTimeField::Quarter),
2805 Keyword::TIME => Ok(DateTimeField::Time),
2806 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2807 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2808 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2809 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2810 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2811 _ if self.dialect.allow_extract_custom() => {
2812 self.prev_token();
2813 let custom = self.parse_identifier()?;
2814 Ok(DateTimeField::Custom(custom))
2815 }
2816 _ => self.expected("date/time field", next_token),
2817 },
2818 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2819 self.prev_token();
2820 let custom = self.parse_identifier()?;
2821 Ok(DateTimeField::Custom(custom))
2822 }
2823 _ => self.expected("date/time field", next_token),
2824 }
2825 }
2826
2827 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2828 match self.peek_token().token {
2829 Token::Word(w) => match w.keyword {
2830 Keyword::EXISTS => {
2831 let negated = true;
2832 let _ = self.parse_keyword(Keyword::EXISTS);
2833 self.parse_exists_expr(negated)
2834 }
2835 _ => Ok(Expr::UnaryOp {
2836 op: UnaryOperator::Not,
2837 expr: Box::new(
2838 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2839 ),
2840 }),
2841 },
2842 _ => Ok(Expr::UnaryOp {
2843 op: UnaryOperator::Not,
2844 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2845 }),
2846 }
2847 }
2848
2849 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2859 let token = self.expect_token(&Token::LBrace)?;
2860
2861 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2862 self.expect_token(&Token::RBrace)?;
2863 return Ok(fn_expr);
2864 }
2865
2866 if self.dialect.supports_dictionary_syntax() {
2867 self.prev_token(); return self.parse_dictionary();
2869 }
2870
2871 self.expected("an expression", token)
2872 }
2873
2874 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2880 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2881
2882 self.expect_keyword_is(Keyword::AGAINST)?;
2883
2884 self.expect_token(&Token::LParen)?;
2885
2886 let match_value = self.parse_value()?.value;
2888
2889 let in_natural_language_mode_keywords = &[
2890 Keyword::IN,
2891 Keyword::NATURAL,
2892 Keyword::LANGUAGE,
2893 Keyword::MODE,
2894 ];
2895
2896 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2897
2898 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2899
2900 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2901 if self.parse_keywords(with_query_expansion_keywords) {
2902 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2903 } else {
2904 Some(SearchModifier::InNaturalLanguageMode)
2905 }
2906 } else if self.parse_keywords(in_boolean_mode_keywords) {
2907 Some(SearchModifier::InBooleanMode)
2908 } else if self.parse_keywords(with_query_expansion_keywords) {
2909 Some(SearchModifier::WithQueryExpansion)
2910 } else {
2911 None
2912 };
2913
2914 self.expect_token(&Token::RParen)?;
2915
2916 Ok(Expr::MatchAgainst {
2917 columns,
2918 match_value,
2919 opt_search_modifier,
2920 })
2921 }
2922
2923 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2939 let value = if self.dialect.require_interval_qualifier() {
2948 self.parse_expr()?
2950 } else {
2951 self.parse_prefix()?
2954 };
2955
2956 let leading_field = if self.next_token_is_temporal_unit() {
2962 Some(self.parse_date_time_field()?)
2963 } else if self.dialect.require_interval_qualifier() {
2964 return parser_err!(
2965 "INTERVAL requires a unit after the literal value",
2966 self.peek_token().span.start
2967 );
2968 } else {
2969 None
2970 };
2971
2972 let (leading_precision, last_field, fsec_precision) =
2973 if leading_field == Some(DateTimeField::Second) {
2974 let last_field = None;
2980 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2981 (leading_precision, last_field, fsec_precision)
2982 } else {
2983 let leading_precision = self.parse_optional_precision()?;
2984 if self.parse_keyword(Keyword::TO) {
2985 let last_field = Some(self.parse_date_time_field()?);
2986 let fsec_precision = if last_field == Some(DateTimeField::Second) {
2987 self.parse_optional_precision()?
2988 } else {
2989 None
2990 };
2991 (leading_precision, last_field, fsec_precision)
2992 } else {
2993 (leading_precision, None, None)
2994 }
2995 };
2996
2997 Ok(Expr::Interval(Interval {
2998 value: Box::new(value),
2999 leading_field,
3000 leading_precision,
3001 last_field,
3002 fractional_seconds_precision: fsec_precision,
3003 }))
3004 }
3005
3006 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3009 if let Token::Word(word) = self.peek_token().token {
3010 matches!(
3011 word.keyword,
3012 Keyword::YEAR
3013 | Keyword::YEARS
3014 | Keyword::MONTH
3015 | Keyword::MONTHS
3016 | Keyword::WEEK
3017 | Keyword::WEEKS
3018 | Keyword::DAY
3019 | Keyword::DAYS
3020 | Keyword::HOUR
3021 | Keyword::HOURS
3022 | Keyword::MINUTE
3023 | Keyword::MINUTES
3024 | Keyword::SECOND
3025 | Keyword::SECONDS
3026 | Keyword::CENTURY
3027 | Keyword::DECADE
3028 | Keyword::DOW
3029 | Keyword::DOY
3030 | Keyword::EPOCH
3031 | Keyword::ISODOW
3032 | Keyword::ISOYEAR
3033 | Keyword::JULIAN
3034 | Keyword::MICROSECOND
3035 | Keyword::MICROSECONDS
3036 | Keyword::MILLENIUM
3037 | Keyword::MILLENNIUM
3038 | Keyword::MILLISECOND
3039 | Keyword::MILLISECONDS
3040 | Keyword::NANOSECOND
3041 | Keyword::NANOSECONDS
3042 | Keyword::QUARTER
3043 | Keyword::TIMEZONE
3044 | Keyword::TIMEZONE_HOUR
3045 | Keyword::TIMEZONE_MINUTE
3046 )
3047 } else {
3048 false
3049 }
3050 }
3051
3052 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3060 self.prev_token();
3062 let (fields, trailing_bracket) =
3063 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3064 if trailing_bracket.0 {
3065 return parser_err!(
3066 "unmatched > in STRUCT literal",
3067 self.peek_token().span.start
3068 );
3069 }
3070
3071 self.expect_token(&Token::LParen)?;
3073 let values = self
3074 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3075 self.expect_token(&Token::RParen)?;
3076
3077 Ok(Expr::Struct { values, fields })
3078 }
3079
3080 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3094 let expr = self.parse_expr()?;
3095 if self.parse_keyword(Keyword::AS) {
3096 if typed_syntax {
3097 return parser_err!("Typed syntax does not allow AS", {
3098 self.prev_token();
3099 self.peek_token().span.start
3100 });
3101 }
3102 let field_name = self.parse_identifier()?;
3103 Ok(Expr::Named {
3104 expr: expr.into(),
3105 name: field_name,
3106 })
3107 } else {
3108 Ok(expr)
3109 }
3110 }
3111
3112 fn parse_struct_type_def<F>(
3125 &mut self,
3126 mut elem_parser: F,
3127 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3128 where
3129 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3130 {
3131 self.expect_keyword_is(Keyword::STRUCT)?;
3132
3133 if Token::Lt != self.peek_token() {
3135 return Ok((Default::default(), false.into()));
3136 }
3137 self.next_token();
3138
3139 let mut field_defs = vec![];
3140 let trailing_bracket = loop {
3141 let (def, trailing_bracket) = elem_parser(self)?;
3142 field_defs.push(def);
3143 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3145 break trailing_bracket;
3146 }
3147 };
3148
3149 Ok((
3150 field_defs,
3151 self.expect_closing_angle_bracket(trailing_bracket)?,
3152 ))
3153 }
3154
3155 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3157 self.expect_keyword_is(Keyword::STRUCT)?;
3158 self.expect_token(&Token::LParen)?;
3159 let struct_body = self.parse_comma_separated(|parser| {
3160 let field_name = parser.parse_identifier()?;
3161 let field_type = parser.parse_data_type()?;
3162
3163 Ok(StructField {
3164 field_name: Some(field_name),
3165 field_type,
3166 options: None,
3167 })
3168 });
3169 self.expect_token(&Token::RParen)?;
3170 struct_body
3171 }
3172
3173 fn parse_struct_field_def(
3183 &mut self,
3184 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3185 let is_anonymous_field = !matches!(
3188 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3189 (Token::Word(_), Token::Word(_))
3190 );
3191
3192 let field_name = if is_anonymous_field {
3193 None
3194 } else {
3195 Some(self.parse_identifier()?)
3196 };
3197
3198 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3199
3200 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3201 Ok((
3202 StructField {
3203 field_name,
3204 field_type,
3205 options,
3206 },
3207 trailing_bracket,
3208 ))
3209 }
3210
3211 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3221 self.expect_keyword_is(Keyword::UNION)?;
3222
3223 self.expect_token(&Token::LParen)?;
3224
3225 let fields = self.parse_comma_separated(|p| {
3226 Ok(UnionField {
3227 field_name: p.parse_identifier()?,
3228 field_type: p.parse_data_type()?,
3229 })
3230 })?;
3231
3232 self.expect_token(&Token::RParen)?;
3233
3234 Ok(fields)
3235 }
3236
3237 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3248 self.expect_token(&Token::LBrace)?;
3249
3250 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3251
3252 self.expect_token(&Token::RBrace)?;
3253
3254 Ok(Expr::Dictionary(fields))
3255 }
3256
3257 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3268 let key = self.parse_identifier()?;
3269
3270 self.expect_token(&Token::Colon)?;
3271
3272 let expr = self.parse_expr()?;
3273
3274 Ok(DictionaryField {
3275 key,
3276 value: Box::new(expr),
3277 })
3278 }
3279
3280 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3290 self.expect_token(&Token::LBrace)?;
3291 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3292 self.expect_token(&Token::RBrace)?;
3293 Ok(Expr::Map(Map { entries: fields }))
3294 }
3295
3296 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3306 let key = self.parse_expr()?;
3307
3308 self.expect_token(&Token::Colon)?;
3309
3310 let value = self.parse_expr()?;
3311
3312 Ok(MapEntry {
3313 key: Box::new(key),
3314 value: Box::new(value),
3315 })
3316 }
3317
3318 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3328 self.expect_keyword_is(Keyword::MAP)?;
3329 self.expect_token(&Token::LParen)?;
3330 let key_data_type = self.parse_data_type()?;
3331 self.expect_token(&Token::Comma)?;
3332 let value_data_type = self.parse_data_type()?;
3333 self.expect_token(&Token::RParen)?;
3334
3335 Ok((key_data_type, value_data_type))
3336 }
3337
3338 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3348 self.expect_keyword_is(Keyword::TUPLE)?;
3349 self.expect_token(&Token::LParen)?;
3350 let mut field_defs = vec![];
3351 loop {
3352 let (def, _) = self.parse_struct_field_def()?;
3353 field_defs.push(def);
3354 if !self.consume_token(&Token::Comma) {
3355 break;
3356 }
3357 }
3358 self.expect_token(&Token::RParen)?;
3359
3360 Ok(field_defs)
3361 }
3362
3363 fn expect_closing_angle_bracket(
3368 &mut self,
3369 trailing_bracket: MatchedTrailingBracket,
3370 ) -> Result<MatchedTrailingBracket, ParserError> {
3371 let trailing_bracket = if !trailing_bracket.0 {
3372 match self.peek_token().token {
3373 Token::Gt => {
3374 self.next_token();
3375 false.into()
3376 }
3377 Token::ShiftRight => {
3378 self.next_token();
3379 true.into()
3380 }
3381 _ => return self.expected(">", self.peek_token()),
3382 }
3383 } else {
3384 false.into()
3385 };
3386
3387 Ok(trailing_bracket)
3388 }
3389
3390 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3392 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3394 return infix;
3395 }
3396
3397 let dialect = self.dialect;
3398
3399 self.advance_token();
3400 let tok = self.get_current_token();
3401 debug!("infix: {tok:?}");
3402 let tok_index = self.get_current_index();
3403 let span = tok.span;
3404 let regular_binary_operator = match &tok.token {
3405 Token::Spaceship => Some(BinaryOperator::Spaceship),
3406 Token::DoubleEq => Some(BinaryOperator::Eq),
3407 Token::Assignment => Some(BinaryOperator::Assignment),
3408 Token::Eq => Some(BinaryOperator::Eq),
3409 Token::Neq => Some(BinaryOperator::NotEq),
3410 Token::Gt => Some(BinaryOperator::Gt),
3411 Token::GtEq => Some(BinaryOperator::GtEq),
3412 Token::Lt => Some(BinaryOperator::Lt),
3413 Token::LtEq => Some(BinaryOperator::LtEq),
3414 Token::Plus => Some(BinaryOperator::Plus),
3415 Token::Minus => Some(BinaryOperator::Minus),
3416 Token::Mul => Some(BinaryOperator::Multiply),
3417 Token::Mod => Some(BinaryOperator::Modulo),
3418 Token::StringConcat => Some(BinaryOperator::StringConcat),
3419 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3420 Token::Caret => {
3421 if dialect_is!(dialect is PostgreSqlDialect) {
3424 Some(BinaryOperator::PGExp)
3425 } else {
3426 Some(BinaryOperator::BitwiseXor)
3427 }
3428 }
3429 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3430 Token::Div => Some(BinaryOperator::Divide),
3431 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3432 Some(BinaryOperator::DuckIntegerDivide)
3433 }
3434 Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3435 Some(BinaryOperator::PGBitwiseShiftLeft)
3436 }
3437 Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3438 Some(BinaryOperator::PGBitwiseShiftRight)
3439 }
3440 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3441 Some(BinaryOperator::PGBitwiseXor)
3442 }
3443 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3444 Some(BinaryOperator::PGOverlap)
3445 }
3446 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3447 Some(BinaryOperator::PGOverlap)
3448 }
3449 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3450 Some(BinaryOperator::PGStartsWith)
3451 }
3452 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3453 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3454 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3455 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3456 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3457 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3458 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3459 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3460 Token::Arrow => Some(BinaryOperator::Arrow),
3461 Token::LongArrow => Some(BinaryOperator::LongArrow),
3462 Token::HashArrow => Some(BinaryOperator::HashArrow),
3463 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3464 Token::AtArrow => Some(BinaryOperator::AtArrow),
3465 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3466 Token::HashMinus => Some(BinaryOperator::HashMinus),
3467 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3468 Token::AtAt => Some(BinaryOperator::AtAt),
3469 Token::Question => Some(BinaryOperator::Question),
3470 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3471 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3472 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3473 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3474 Some(BinaryOperator::DoubleHash)
3475 }
3476
3477 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3478 Some(BinaryOperator::AndLt)
3479 }
3480 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3481 Some(BinaryOperator::AndGt)
3482 }
3483 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3484 Some(BinaryOperator::QuestionDash)
3485 }
3486 Token::AmpersandLeftAngleBracketVerticalBar
3487 if self.dialect.supports_geometric_types() =>
3488 {
3489 Some(BinaryOperator::AndLtPipe)
3490 }
3491 Token::VerticalBarAmpersandRightAngleBracket
3492 if self.dialect.supports_geometric_types() =>
3493 {
3494 Some(BinaryOperator::PipeAndGt)
3495 }
3496 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3497 Some(BinaryOperator::LtDashGt)
3498 }
3499 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3500 Some(BinaryOperator::LtCaret)
3501 }
3502 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3503 Some(BinaryOperator::GtCaret)
3504 }
3505 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3506 Some(BinaryOperator::QuestionHash)
3507 }
3508 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3509 Some(BinaryOperator::QuestionDoublePipe)
3510 }
3511 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3512 Some(BinaryOperator::QuestionDashPipe)
3513 }
3514 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3515 Some(BinaryOperator::TildeEq)
3516 }
3517 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3518 Some(BinaryOperator::LtLtPipe)
3519 }
3520 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3521 Some(BinaryOperator::PipeGtGt)
3522 }
3523 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3524
3525 Token::Word(w) => match w.keyword {
3526 Keyword::AND => Some(BinaryOperator::And),
3527 Keyword::OR => Some(BinaryOperator::Or),
3528 Keyword::XOR => Some(BinaryOperator::Xor),
3529 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3530 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3531 self.expect_token(&Token::LParen)?;
3532 let mut idents = vec![];
3537 loop {
3538 self.advance_token();
3539 idents.push(self.get_current_token().to_string());
3540 if !self.consume_token(&Token::Period) {
3541 break;
3542 }
3543 }
3544 self.expect_token(&Token::RParen)?;
3545 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3546 }
3547 _ => None,
3548 },
3549 _ => None,
3550 };
3551
3552 let tok = self.token_at(tok_index);
3553 if let Some(op) = regular_binary_operator {
3554 if let Some(keyword) =
3555 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3556 {
3557 self.expect_token(&Token::LParen)?;
3558 let right = if self.peek_sub_query() {
3559 self.prev_token(); self.parse_subexpr(precedence)?
3563 } else {
3564 let right = self.parse_subexpr(precedence)?;
3566 self.expect_token(&Token::RParen)?;
3567 right
3568 };
3569
3570 if !matches!(
3571 op,
3572 BinaryOperator::Gt
3573 | BinaryOperator::Lt
3574 | BinaryOperator::GtEq
3575 | BinaryOperator::LtEq
3576 | BinaryOperator::Eq
3577 | BinaryOperator::NotEq
3578 | BinaryOperator::PGRegexMatch
3579 | BinaryOperator::PGRegexIMatch
3580 | BinaryOperator::PGRegexNotMatch
3581 | BinaryOperator::PGRegexNotIMatch
3582 | BinaryOperator::PGLikeMatch
3583 | BinaryOperator::PGILikeMatch
3584 | BinaryOperator::PGNotLikeMatch
3585 | BinaryOperator::PGNotILikeMatch
3586 ) {
3587 return parser_err!(
3588 format!(
3589 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3590 ),
3591 span.start
3592 );
3593 };
3594
3595 Ok(match keyword {
3596 Keyword::ALL => Expr::AllOp {
3597 left: Box::new(expr),
3598 compare_op: op,
3599 right: Box::new(right),
3600 },
3601 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3602 left: Box::new(expr),
3603 compare_op: op,
3604 right: Box::new(right),
3605 is_some: keyword == Keyword::SOME,
3606 },
3607 unexpected_keyword => return Err(ParserError::ParserError(
3608 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3609 )),
3610 })
3611 } else {
3612 Ok(Expr::BinaryOp {
3613 left: Box::new(expr),
3614 op,
3615 right: Box::new(self.parse_subexpr(precedence)?),
3616 })
3617 }
3618 } else if let Token::Word(w) = &tok.token {
3619 match w.keyword {
3620 Keyword::IS => {
3621 if self.parse_keyword(Keyword::NULL) {
3622 Ok(Expr::IsNull(Box::new(expr)))
3623 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3624 Ok(Expr::IsNotNull(Box::new(expr)))
3625 } else if self.parse_keywords(&[Keyword::TRUE]) {
3626 Ok(Expr::IsTrue(Box::new(expr)))
3627 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3628 Ok(Expr::IsNotTrue(Box::new(expr)))
3629 } else if self.parse_keywords(&[Keyword::FALSE]) {
3630 Ok(Expr::IsFalse(Box::new(expr)))
3631 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3632 Ok(Expr::IsNotFalse(Box::new(expr)))
3633 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3634 Ok(Expr::IsUnknown(Box::new(expr)))
3635 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3636 Ok(Expr::IsNotUnknown(Box::new(expr)))
3637 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3638 let expr2 = self.parse_expr()?;
3639 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3640 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3641 {
3642 let expr2 = self.parse_expr()?;
3643 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3644 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3645 Ok(is_normalized)
3646 } else {
3647 self.expected(
3648 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3649 self.peek_token(),
3650 )
3651 }
3652 }
3653 Keyword::AT => {
3654 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3655 Ok(Expr::AtTimeZone {
3656 timestamp: Box::new(expr),
3657 time_zone: Box::new(self.parse_subexpr(precedence)?),
3658 })
3659 }
3660 Keyword::NOT
3661 | Keyword::IN
3662 | Keyword::BETWEEN
3663 | Keyword::LIKE
3664 | Keyword::ILIKE
3665 | Keyword::SIMILAR
3666 | Keyword::REGEXP
3667 | Keyword::RLIKE => {
3668 self.prev_token();
3669 let negated = self.parse_keyword(Keyword::NOT);
3670 let regexp = self.parse_keyword(Keyword::REGEXP);
3671 let rlike = self.parse_keyword(Keyword::RLIKE);
3672 let null = if !self.in_column_definition_state() {
3673 self.parse_keyword(Keyword::NULL)
3674 } else {
3675 false
3676 };
3677 if regexp || rlike {
3678 Ok(Expr::RLike {
3679 negated,
3680 expr: Box::new(expr),
3681 pattern: Box::new(
3682 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3683 ),
3684 regexp,
3685 })
3686 } else if negated && null {
3687 Ok(Expr::IsNotNull(Box::new(expr)))
3688 } else if self.parse_keyword(Keyword::IN) {
3689 self.parse_in(expr, negated)
3690 } else if self.parse_keyword(Keyword::BETWEEN) {
3691 self.parse_between(expr, negated)
3692 } else if self.parse_keyword(Keyword::LIKE) {
3693 Ok(Expr::Like {
3694 negated,
3695 any: self.parse_keyword(Keyword::ANY),
3696 expr: Box::new(expr),
3697 pattern: Box::new(
3698 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3699 ),
3700 escape_char: self.parse_escape_char()?,
3701 })
3702 } else if self.parse_keyword(Keyword::ILIKE) {
3703 Ok(Expr::ILike {
3704 negated,
3705 any: self.parse_keyword(Keyword::ANY),
3706 expr: Box::new(expr),
3707 pattern: Box::new(
3708 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3709 ),
3710 escape_char: self.parse_escape_char()?,
3711 })
3712 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3713 Ok(Expr::SimilarTo {
3714 negated,
3715 expr: Box::new(expr),
3716 pattern: Box::new(
3717 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3718 ),
3719 escape_char: self.parse_escape_char()?,
3720 })
3721 } else {
3722 self.expected("IN or BETWEEN after NOT", self.peek_token())
3723 }
3724 }
3725 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3726 Ok(Expr::IsNotNull(Box::new(expr)))
3727 }
3728 Keyword::MEMBER => {
3729 if self.parse_keyword(Keyword::OF) {
3730 self.expect_token(&Token::LParen)?;
3731 let array = self.parse_expr()?;
3732 self.expect_token(&Token::RParen)?;
3733 Ok(Expr::MemberOf(MemberOf {
3734 value: Box::new(expr),
3735 array: Box::new(array),
3736 }))
3737 } else {
3738 self.expected("OF after MEMBER", self.peek_token())
3739 }
3740 }
3741 _ => parser_err!(
3743 format!("No infix parser for token {:?}", tok.token),
3744 tok.span.start
3745 ),
3746 }
3747 } else if Token::DoubleColon == *tok {
3748 Ok(Expr::Cast {
3749 kind: CastKind::DoubleColon,
3750 expr: Box::new(expr),
3751 data_type: self.parse_data_type()?,
3752 format: None,
3753 })
3754 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3755 Ok(Expr::UnaryOp {
3756 op: UnaryOperator::PGPostfixFactorial,
3757 expr: Box::new(expr),
3758 })
3759 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3760 || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3761 {
3762 self.prev_token();
3763 self.parse_json_access(expr)
3764 } else {
3765 parser_err!(
3767 format!("No infix parser for token {:?}", tok.token),
3768 tok.span.start
3769 )
3770 }
3771 }
3772
3773 pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3775 if self.parse_keyword(Keyword::ESCAPE) {
3776 Ok(Some(self.parse_value()?.into()))
3777 } else {
3778 Ok(None)
3779 }
3780 }
3781
3782 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3792 let lower_bound = if self.consume_token(&Token::Colon) {
3794 None
3795 } else {
3796 Some(self.parse_expr()?)
3797 };
3798
3799 if self.consume_token(&Token::RBracket) {
3801 if let Some(lower_bound) = lower_bound {
3802 return Ok(Subscript::Index { index: lower_bound });
3803 };
3804 return Ok(Subscript::Slice {
3805 lower_bound,
3806 upper_bound: None,
3807 stride: None,
3808 });
3809 }
3810
3811 if lower_bound.is_some() {
3813 self.expect_token(&Token::Colon)?;
3814 }
3815
3816 let upper_bound = if self.consume_token(&Token::RBracket) {
3818 return Ok(Subscript::Slice {
3819 lower_bound,
3820 upper_bound: None,
3821 stride: None,
3822 });
3823 } else {
3824 Some(self.parse_expr()?)
3825 };
3826
3827 if self.consume_token(&Token::RBracket) {
3829 return Ok(Subscript::Slice {
3830 lower_bound,
3831 upper_bound,
3832 stride: None,
3833 });
3834 }
3835
3836 self.expect_token(&Token::Colon)?;
3838 let stride = if self.consume_token(&Token::RBracket) {
3839 None
3840 } else {
3841 Some(self.parse_expr()?)
3842 };
3843
3844 if stride.is_some() {
3845 self.expect_token(&Token::RBracket)?;
3846 }
3847
3848 Ok(Subscript::Slice {
3849 lower_bound,
3850 upper_bound,
3851 stride,
3852 })
3853 }
3854
3855 pub fn parse_multi_dim_subscript(
3857 &mut self,
3858 chain: &mut Vec<AccessExpr>,
3859 ) -> Result<(), ParserError> {
3860 while self.consume_token(&Token::LBracket) {
3861 self.parse_subscript(chain)?;
3862 }
3863 Ok(())
3864 }
3865
3866 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3870 let subscript = self.parse_subscript_inner()?;
3871 chain.push(AccessExpr::Subscript(subscript));
3872 Ok(())
3873 }
3874
3875 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3876 let token = self.next_token();
3877 match token.token {
3878 Token::Word(Word {
3879 value,
3880 quote_style: quote_style @ (Some('"') | None),
3882 keyword: _,
3885 }) => Ok(JsonPathElem::Dot {
3886 key: value,
3887 quoted: quote_style.is_some(),
3888 }),
3889
3890 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3894
3895 _ => self.expected("variant object key name", token),
3896 }
3897 }
3898
3899 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3900 let path = self.parse_json_path()?;
3901 Ok(Expr::JsonAccess {
3902 value: Box::new(expr),
3903 path,
3904 })
3905 }
3906
3907 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3908 let mut path = Vec::new();
3909 loop {
3910 match self.next_token().token {
3911 Token::Colon if path.is_empty() => {
3912 path.push(self.parse_json_path_object_key()?);
3913 }
3914 Token::Period if !path.is_empty() => {
3915 path.push(self.parse_json_path_object_key()?);
3916 }
3917 Token::LBracket => {
3918 let key = self.parse_expr()?;
3919 self.expect_token(&Token::RBracket)?;
3920
3921 path.push(JsonPathElem::Bracket { key });
3922 }
3923 _ => {
3924 self.prev_token();
3925 break;
3926 }
3927 };
3928 }
3929
3930 debug_assert!(!path.is_empty());
3931 Ok(JsonPath { path })
3932 }
3933
3934 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3936 if self.parse_keyword(Keyword::UNNEST) {
3939 self.expect_token(&Token::LParen)?;
3940 let array_expr = self.parse_expr()?;
3941 self.expect_token(&Token::RParen)?;
3942 return Ok(Expr::InUnnest {
3943 expr: Box::new(expr),
3944 array_expr: Box::new(array_expr),
3945 negated,
3946 });
3947 }
3948 self.expect_token(&Token::LParen)?;
3949 let in_op = match self.maybe_parse(|p| p.parse_query())? {
3950 Some(subquery) => Expr::InSubquery {
3951 expr: Box::new(expr),
3952 subquery,
3953 negated,
3954 },
3955 None => Expr::InList {
3956 expr: Box::new(expr),
3957 list: if self.dialect.supports_in_empty_list() {
3958 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3959 } else {
3960 self.parse_comma_separated(Parser::parse_expr)?
3961 },
3962 negated,
3963 },
3964 };
3965 self.expect_token(&Token::RParen)?;
3966 Ok(in_op)
3967 }
3968
3969 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3971 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3974 self.expect_keyword_is(Keyword::AND)?;
3975 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3976 Ok(Expr::Between {
3977 expr: Box::new(expr),
3978 negated,
3979 low: Box::new(low),
3980 high: Box::new(high),
3981 })
3982 }
3983
3984 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3986 Ok(Expr::Cast {
3987 kind: CastKind::DoubleColon,
3988 expr: Box::new(expr),
3989 data_type: self.parse_data_type()?,
3990 format: None,
3991 })
3992 }
3993
3994 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3996 self.dialect.get_next_precedence_default(self)
3997 }
3998
3999 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4002 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4003 }
4004
4005 pub fn peek_token(&self) -> TokenWithSpan {
4010 self.peek_nth_token(0)
4011 }
4012
4013 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4016 self.peek_nth_token_ref(0)
4017 }
4018
4019 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4042 self.peek_tokens_with_location()
4043 .map(|with_loc| with_loc.token)
4044 }
4045
4046 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4051 let mut index = self.index;
4052 core::array::from_fn(|_| loop {
4053 let token = self.tokens.get(index);
4054 index += 1;
4055 if let Some(TokenWithSpan {
4056 token: Token::Whitespace(_),
4057 span: _,
4058 }) = token
4059 {
4060 continue;
4061 }
4062 break token.cloned().unwrap_or(TokenWithSpan {
4063 token: Token::EOF,
4064 span: Span::empty(),
4065 });
4066 })
4067 }
4068
4069 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4074 let mut index = self.index;
4075 core::array::from_fn(|_| loop {
4076 let token = self.tokens.get(index);
4077 index += 1;
4078 if let Some(TokenWithSpan {
4079 token: Token::Whitespace(_),
4080 span: _,
4081 }) = token
4082 {
4083 continue;
4084 }
4085 break token.unwrap_or(&EOF_TOKEN);
4086 })
4087 }
4088
4089 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4091 self.peek_nth_token_ref(n).clone()
4092 }
4093
4094 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4096 let mut index = self.index;
4097 loop {
4098 index += 1;
4099 match self.tokens.get(index - 1) {
4100 Some(TokenWithSpan {
4101 token: Token::Whitespace(_),
4102 span: _,
4103 }) => continue,
4104 non_whitespace => {
4105 if n == 0 {
4106 return non_whitespace.unwrap_or(&EOF_TOKEN);
4107 }
4108 n -= 1;
4109 }
4110 }
4111 }
4112 }
4113
4114 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4117 self.peek_nth_token_no_skip(0)
4118 }
4119
4120 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4122 self.tokens
4123 .get(self.index + n)
4124 .cloned()
4125 .unwrap_or(TokenWithSpan {
4126 token: Token::EOF,
4127 span: Span::empty(),
4128 })
4129 }
4130
4131 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4135 let index = self.index;
4136 let matched = self.parse_keywords(expected);
4137 self.index = index;
4138 matched
4139 }
4140
4141 pub fn next_token(&mut self) -> TokenWithSpan {
4146 self.advance_token();
4147 self.get_current_token().clone()
4148 }
4149
4150 pub fn get_current_index(&self) -> usize {
4155 self.index.saturating_sub(1)
4156 }
4157
4158 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4160 self.index += 1;
4161 self.tokens.get(self.index - 1)
4162 }
4163
4164 pub fn advance_token(&mut self) {
4168 loop {
4169 self.index += 1;
4170 match self.tokens.get(self.index - 1) {
4171 Some(TokenWithSpan {
4172 token: Token::Whitespace(_),
4173 span: _,
4174 }) => continue,
4175 _ => break,
4176 }
4177 }
4178 }
4179
4180 pub fn get_current_token(&self) -> &TokenWithSpan {
4184 self.token_at(self.index.saturating_sub(1))
4185 }
4186
4187 pub fn get_previous_token(&self) -> &TokenWithSpan {
4191 self.token_at(self.index.saturating_sub(2))
4192 }
4193
4194 pub fn get_next_token(&self) -> &TokenWithSpan {
4198 self.token_at(self.index)
4199 }
4200
4201 pub fn prev_token(&mut self) {
4208 loop {
4209 assert!(self.index > 0);
4210 self.index -= 1;
4211 if let Some(TokenWithSpan {
4212 token: Token::Whitespace(_),
4213 span: _,
4214 }) = self.tokens.get(self.index)
4215 {
4216 continue;
4217 }
4218 return;
4219 }
4220 }
4221
4222 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4224 parser_err!(
4225 format!("Expected: {expected}, found: {found}"),
4226 found.span.start
4227 )
4228 }
4229
4230 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4232 parser_err!(
4233 format!("Expected: {expected}, found: {found}"),
4234 found.span.start
4235 )
4236 }
4237
4238 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4240 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4241 parser_err!(
4242 format!("Expected: {expected}, found: {found}"),
4243 found.span.start
4244 )
4245 }
4246
4247 #[must_use]
4250 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4251 if self.peek_keyword(expected) {
4252 self.advance_token();
4253 true
4254 } else {
4255 false
4256 }
4257 }
4258
4259 #[must_use]
4260 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4261 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4262 }
4263
4264 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4272 self.keyword_with_tokens(expected, tokens, true)
4273 }
4274
4275 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4280 self.keyword_with_tokens(expected, tokens, false)
4281 }
4282
4283 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4284 match &self.peek_token_ref().token {
4285 Token::Word(w) if expected == w.keyword => {
4286 for (idx, token) in tokens.iter().enumerate() {
4287 if self.peek_nth_token_ref(idx + 1).token != *token {
4288 return false;
4289 }
4290 }
4291
4292 if consume {
4293 for _ in 0..(tokens.len() + 1) {
4294 self.advance_token();
4295 }
4296 }
4297
4298 true
4299 }
4300 _ => false,
4301 }
4302 }
4303
4304 #[must_use]
4308 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4309 let index = self.index;
4310 for &keyword in keywords {
4311 if !self.parse_keyword(keyword) {
4312 self.index = index;
4315 return false;
4316 }
4317 }
4318 true
4319 }
4320
4321 #[must_use]
4324 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4325 for keyword in keywords {
4326 if self.peek_keyword(*keyword) {
4327 return Some(*keyword);
4328 }
4329 }
4330 None
4331 }
4332
4333 #[must_use]
4337 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4338 match &self.peek_token_ref().token {
4339 Token::Word(w) => {
4340 keywords
4341 .iter()
4342 .find(|keyword| **keyword == w.keyword)
4343 .map(|keyword| {
4344 self.advance_token();
4345 *keyword
4346 })
4347 }
4348 _ => None,
4349 }
4350 }
4351
4352 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4355 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4356 Ok(keyword)
4357 } else {
4358 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4359 self.expected_ref(
4360 &format!("one of {}", keywords.join(" or ")),
4361 self.peek_token_ref(),
4362 )
4363 }
4364 }
4365
4366 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4371 if self.parse_keyword(expected) {
4372 Ok(self.get_current_token().clone())
4373 } else {
4374 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4375 }
4376 }
4377
4378 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4384 if self.parse_keyword(expected) {
4385 Ok(())
4386 } else {
4387 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4388 }
4389 }
4390
4391 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4394 for &kw in expected {
4395 self.expect_keyword_is(kw)?;
4396 }
4397 Ok(())
4398 }
4399
4400 #[must_use]
4404 pub fn consume_token(&mut self, expected: &Token) -> bool {
4405 if self.peek_token_ref() == expected {
4406 self.advance_token();
4407 true
4408 } else {
4409 false
4410 }
4411 }
4412
4413 #[must_use]
4417 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4418 let index = self.index;
4419 for token in tokens {
4420 if !self.consume_token(token) {
4421 self.index = index;
4422 return false;
4423 }
4424 }
4425 true
4426 }
4427
4428 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4430 if self.peek_token_ref() == expected {
4431 Ok(self.next_token())
4432 } else {
4433 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4434 }
4435 }
4436
4437 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4438 where
4439 <T as FromStr>::Err: Display,
4440 {
4441 s.parse::<T>().map_err(|e| {
4442 ParserError::ParserError(format!(
4443 "Could not parse '{s}' as {}: {e}{loc}",
4444 core::any::type_name::<T>()
4445 ))
4446 })
4447 }
4448
4449 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4451 let trailing_commas =
4457 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4458
4459 self.parse_comma_separated_with_trailing_commas(
4460 |p| p.parse_select_item(),
4461 trailing_commas,
4462 Self::is_reserved_for_column_alias,
4463 )
4464 }
4465
4466 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4467 let mut values = vec![];
4468 loop {
4469 values.push(self.parse_grant_permission()?);
4470 if !self.consume_token(&Token::Comma) {
4471 break;
4472 } else if self.options.trailing_commas {
4473 match self.peek_token().token {
4474 Token::Word(kw) if kw.keyword == Keyword::ON => {
4475 break;
4476 }
4477 Token::RParen
4478 | Token::SemiColon
4479 | Token::EOF
4480 | Token::RBracket
4481 | Token::RBrace => break,
4482 _ => continue,
4483 }
4484 }
4485 }
4486 Ok(values)
4487 }
4488
4489 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4491 let trailing_commas = self.dialect.supports_from_trailing_commas();
4492
4493 self.parse_comma_separated_with_trailing_commas(
4494 Parser::parse_table_and_joins,
4495 trailing_commas,
4496 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4497 )
4498 }
4499
4500 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4507 &mut self,
4508 trailing_commas: bool,
4509 is_reserved_keyword: &R,
4510 ) -> bool
4511 where
4512 R: Fn(&Keyword, &mut Parser) -> bool,
4513 {
4514 if !self.consume_token(&Token::Comma) {
4515 true
4516 } else if trailing_commas {
4517 let token = self.next_token().token;
4518 let is_end = match token {
4519 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4520 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4521 true
4522 }
4523 _ => false,
4524 };
4525 self.prev_token();
4526
4527 is_end
4528 } else {
4529 false
4530 }
4531 }
4532
4533 fn is_parse_comma_separated_end(&mut self) -> bool {
4536 self.is_parse_comma_separated_end_with_trailing_commas(
4537 self.options.trailing_commas,
4538 &Self::is_reserved_for_column_alias,
4539 )
4540 }
4541
4542 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4544 where
4545 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4546 {
4547 self.parse_comma_separated_with_trailing_commas(
4548 f,
4549 self.options.trailing_commas,
4550 Self::is_reserved_for_column_alias,
4551 )
4552 }
4553
4554 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4559 &mut self,
4560 mut f: F,
4561 trailing_commas: bool,
4562 is_reserved_keyword: R,
4563 ) -> Result<Vec<T>, ParserError>
4564 where
4565 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4566 R: Fn(&Keyword, &mut Parser) -> bool,
4567 {
4568 let mut values = vec![];
4569 loop {
4570 values.push(f(self)?);
4571 if self.is_parse_comma_separated_end_with_trailing_commas(
4572 trailing_commas,
4573 &is_reserved_keyword,
4574 ) {
4575 break;
4576 }
4577 }
4578 Ok(values)
4579 }
4580
4581 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4583 where
4584 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4585 {
4586 let mut values = vec![];
4587 loop {
4588 values.push(f(self)?);
4589 if !self.consume_token(&Token::Period) {
4590 break;
4591 }
4592 }
4593 Ok(values)
4594 }
4595
4596 pub fn parse_keyword_separated<T, F>(
4598 &mut self,
4599 keyword: Keyword,
4600 mut f: F,
4601 ) -> Result<Vec<T>, ParserError>
4602 where
4603 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4604 {
4605 let mut values = vec![];
4606 loop {
4607 values.push(f(self)?);
4608 if !self.parse_keyword(keyword) {
4609 break;
4610 }
4611 }
4612 Ok(values)
4613 }
4614
4615 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4616 where
4617 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4618 {
4619 self.expect_token(&Token::LParen)?;
4620 let res = f(self)?;
4621 self.expect_token(&Token::RParen)?;
4622 Ok(res)
4623 }
4624
4625 pub fn parse_comma_separated0<T, F>(
4628 &mut self,
4629 f: F,
4630 end_token: Token,
4631 ) -> Result<Vec<T>, ParserError>
4632 where
4633 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4634 {
4635 if self.peek_token().token == end_token {
4636 return Ok(vec![]);
4637 }
4638
4639 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4640 let _ = self.consume_token(&Token::Comma);
4641 return Ok(vec![]);
4642 }
4643
4644 self.parse_comma_separated(f)
4645 }
4646
4647 pub(crate) fn parse_statement_list(
4651 &mut self,
4652 terminal_keywords: &[Keyword],
4653 ) -> Result<Vec<Statement>, ParserError> {
4654 let mut values = vec![];
4655 loop {
4656 match &self.peek_nth_token_ref(0).token {
4657 Token::EOF => break,
4658 Token::Word(w) => {
4659 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4660 break;
4661 }
4662 }
4663 _ => {}
4664 }
4665
4666 values.push(self.parse_statement()?);
4667 self.expect_token(&Token::SemiColon)?;
4668 }
4669 Ok(values)
4670 }
4671
4672 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4676 !parser.dialect.is_column_alias(kw, parser)
4677 }
4678
4679 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4683 where
4684 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4685 {
4686 match self.try_parse(f) {
4687 Ok(t) => Ok(Some(t)),
4688 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4689 _ => Ok(None),
4690 }
4691 }
4692
4693 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4695 where
4696 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4697 {
4698 let index = self.index;
4699 match f(self) {
4700 Ok(t) => Ok(t),
4701 Err(e) => {
4702 self.index = index;
4704 Err(e)
4705 }
4706 }
4707 }
4708
4709 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4712 let loc = self.peek_token().span.start;
4713 let all = self.parse_keyword(Keyword::ALL);
4714 let distinct = self.parse_keyword(Keyword::DISTINCT);
4715 if !distinct {
4716 return Ok(None);
4717 }
4718 if all {
4719 return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4720 }
4721 let on = self.parse_keyword(Keyword::ON);
4722 if !on {
4723 return Ok(Some(Distinct::Distinct));
4724 }
4725
4726 self.expect_token(&Token::LParen)?;
4727 let col_names = if self.consume_token(&Token::RParen) {
4728 self.prev_token();
4729 Vec::new()
4730 } else {
4731 self.parse_comma_separated(Parser::parse_expr)?
4732 };
4733 self.expect_token(&Token::RParen)?;
4734 Ok(Some(Distinct::On(col_names)))
4735 }
4736
4737 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4739 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4740 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4741 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4742 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4743 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4744 let global: Option<bool> = if global {
4745 Some(true)
4746 } else if local {
4747 Some(false)
4748 } else {
4749 None
4750 };
4751 let temporary = self
4752 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4753 .is_some();
4754 let persistent = dialect_of!(self is DuckDbDialect)
4755 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4756 let create_view_params = self.parse_create_view_params()?;
4757 if self.parse_keyword(Keyword::TABLE) {
4758 self.parse_create_table(or_replace, temporary, global, transient)
4759 } else if self.peek_keyword(Keyword::MATERIALIZED)
4760 || self.peek_keyword(Keyword::VIEW)
4761 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4762 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4763 {
4764 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4765 } else if self.parse_keyword(Keyword::POLICY) {
4766 self.parse_create_policy()
4767 } else if self.parse_keyword(Keyword::EXTERNAL) {
4768 self.parse_create_external_table(or_replace)
4769 } else if self.parse_keyword(Keyword::FUNCTION) {
4770 self.parse_create_function(or_alter, or_replace, temporary)
4771 } else if self.parse_keyword(Keyword::DOMAIN) {
4772 self.parse_create_domain()
4773 } else if self.parse_keyword(Keyword::TRIGGER) {
4774 self.parse_create_trigger(temporary, or_alter, or_replace, false)
4775 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4776 self.parse_create_trigger(temporary, or_alter, or_replace, true)
4777 } else if self.parse_keyword(Keyword::MACRO) {
4778 self.parse_create_macro(or_replace, temporary)
4779 } else if self.parse_keyword(Keyword::SECRET) {
4780 self.parse_create_secret(or_replace, temporary, persistent)
4781 } else if self.parse_keyword(Keyword::USER) {
4782 self.parse_create_user(or_replace)
4783 } else if or_replace {
4784 self.expected(
4785 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4786 self.peek_token(),
4787 )
4788 } else if self.parse_keyword(Keyword::EXTENSION) {
4789 self.parse_create_extension()
4790 } else if self.parse_keyword(Keyword::INDEX) {
4791 self.parse_create_index(false)
4792 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4793 self.parse_create_index(true)
4794 } else if self.parse_keyword(Keyword::VIRTUAL) {
4795 self.parse_create_virtual_table()
4796 } else if self.parse_keyword(Keyword::SCHEMA) {
4797 self.parse_create_schema()
4798 } else if self.parse_keyword(Keyword::DATABASE) {
4799 self.parse_create_database()
4800 } else if self.parse_keyword(Keyword::ROLE) {
4801 self.parse_create_role()
4802 } else if self.parse_keyword(Keyword::SEQUENCE) {
4803 self.parse_create_sequence(temporary)
4804 } else if self.parse_keyword(Keyword::TYPE) {
4805 self.parse_create_type()
4806 } else if self.parse_keyword(Keyword::PROCEDURE) {
4807 self.parse_create_procedure(or_alter)
4808 } else if self.parse_keyword(Keyword::CONNECTOR) {
4809 self.parse_create_connector()
4810 } else if self.parse_keyword(Keyword::OPERATOR) {
4811 if self.parse_keyword(Keyword::FAMILY) {
4813 self.parse_create_operator_family()
4814 } else if self.parse_keyword(Keyword::CLASS) {
4815 self.parse_create_operator_class()
4816 } else {
4817 self.parse_create_operator()
4818 }
4819 } else if self.parse_keyword(Keyword::SERVER) {
4820 self.parse_pg_create_server()
4821 } else {
4822 self.expected("an object type after CREATE", self.peek_token())
4823 }
4824 }
4825
4826 fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4827 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4828 let name = self.parse_identifier()?;
4829 let options = self
4830 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4831 .options;
4832 let with_tags = self.parse_keyword(Keyword::WITH);
4833 let tags = if self.parse_keyword(Keyword::TAG) {
4834 self.parse_key_value_options(true, &[])?.options
4835 } else {
4836 vec![]
4837 };
4838 Ok(Statement::CreateUser(CreateUser {
4839 or_replace,
4840 if_not_exists,
4841 name,
4842 options: KeyValueOptions {
4843 options,
4844 delimiter: KeyValueOptionsDelimiter::Space,
4845 },
4846 with_tags,
4847 tags: KeyValueOptions {
4848 options: tags,
4849 delimiter: KeyValueOptionsDelimiter::Comma,
4850 },
4851 }))
4852 }
4853
4854 pub fn parse_create_secret(
4856 &mut self,
4857 or_replace: bool,
4858 temporary: bool,
4859 persistent: bool,
4860 ) -> Result<Statement, ParserError> {
4861 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4862
4863 let mut storage_specifier = None;
4864 let mut name = None;
4865 if self.peek_token() != Token::LParen {
4866 if self.parse_keyword(Keyword::IN) {
4867 storage_specifier = self.parse_identifier().ok()
4868 } else {
4869 name = self.parse_identifier().ok();
4870 }
4871
4872 if storage_specifier.is_none()
4874 && self.peek_token() != Token::LParen
4875 && self.parse_keyword(Keyword::IN)
4876 {
4877 storage_specifier = self.parse_identifier().ok();
4878 }
4879 }
4880
4881 self.expect_token(&Token::LParen)?;
4882 self.expect_keyword_is(Keyword::TYPE)?;
4883 let secret_type = self.parse_identifier()?;
4884
4885 let mut options = Vec::new();
4886 if self.consume_token(&Token::Comma) {
4887 options.append(&mut self.parse_comma_separated(|p| {
4888 let key = p.parse_identifier()?;
4889 let value = p.parse_identifier()?;
4890 Ok(SecretOption { key, value })
4891 })?);
4892 }
4893 self.expect_token(&Token::RParen)?;
4894
4895 let temp = match (temporary, persistent) {
4896 (true, false) => Some(true),
4897 (false, true) => Some(false),
4898 (false, false) => None,
4899 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4900 };
4901
4902 Ok(Statement::CreateSecret {
4903 or_replace,
4904 temporary: temp,
4905 if_not_exists,
4906 name,
4907 storage_specifier,
4908 secret_type,
4909 options,
4910 })
4911 }
4912
4913 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4915 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4916 if self.parse_keyword(Keyword::TABLE) {
4917 let table_name = self.parse_object_name(false)?;
4918 if self.peek_token().token != Token::EOF {
4919 if let Token::Word(word) = self.peek_token().token {
4920 if word.keyword == Keyword::OPTIONS {
4921 options = self.parse_options(Keyword::OPTIONS)?
4922 }
4923 };
4924
4925 if self.peek_token().token != Token::EOF {
4926 let (a, q) = self.parse_as_query()?;
4927 has_as = a;
4928 query = Some(q);
4929 }
4930
4931 Ok(Statement::Cache {
4932 table_flag,
4933 table_name,
4934 has_as,
4935 options,
4936 query,
4937 })
4938 } else {
4939 Ok(Statement::Cache {
4940 table_flag,
4941 table_name,
4942 has_as,
4943 options,
4944 query,
4945 })
4946 }
4947 } else {
4948 table_flag = Some(self.parse_object_name(false)?);
4949 if self.parse_keyword(Keyword::TABLE) {
4950 let table_name = self.parse_object_name(false)?;
4951 if self.peek_token() != Token::EOF {
4952 if let Token::Word(word) = self.peek_token().token {
4953 if word.keyword == Keyword::OPTIONS {
4954 options = self.parse_options(Keyword::OPTIONS)?
4955 }
4956 };
4957
4958 if self.peek_token() != Token::EOF {
4959 let (a, q) = self.parse_as_query()?;
4960 has_as = a;
4961 query = Some(q);
4962 }
4963
4964 Ok(Statement::Cache {
4965 table_flag,
4966 table_name,
4967 has_as,
4968 options,
4969 query,
4970 })
4971 } else {
4972 Ok(Statement::Cache {
4973 table_flag,
4974 table_name,
4975 has_as,
4976 options,
4977 query,
4978 })
4979 }
4980 } else {
4981 if self.peek_token() == Token::EOF {
4982 self.prev_token();
4983 }
4984 self.expected("a `TABLE` keyword", self.peek_token())
4985 }
4986 }
4987 }
4988
4989 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4991 match self.peek_token().token {
4992 Token::Word(word) => match word.keyword {
4993 Keyword::AS => {
4994 self.next_token();
4995 Ok((true, self.parse_query()?))
4996 }
4997 _ => Ok((false, self.parse_query()?)),
4998 },
4999 _ => self.expected("a QUERY statement", self.peek_token()),
5000 }
5001 }
5002
5003 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5005 self.expect_keyword_is(Keyword::TABLE)?;
5006 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5007 let table_name = self.parse_object_name(false)?;
5008 Ok(Statement::UNCache {
5009 table_name,
5010 if_exists,
5011 })
5012 }
5013
5014 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5016 self.expect_keyword_is(Keyword::TABLE)?;
5017 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5018 let table_name = self.parse_object_name(false)?;
5019 self.expect_keyword_is(Keyword::USING)?;
5020 let module_name = self.parse_identifier()?;
5021 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5026 Ok(Statement::CreateVirtualTable {
5027 name: table_name,
5028 if_not_exists,
5029 module_name,
5030 module_args,
5031 })
5032 }
5033
5034 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5035 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5036
5037 let schema_name = self.parse_schema_name()?;
5038
5039 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5040 Some(self.parse_expr()?)
5041 } else {
5042 None
5043 };
5044
5045 let with = if self.peek_keyword(Keyword::WITH) {
5046 Some(self.parse_options(Keyword::WITH)?)
5047 } else {
5048 None
5049 };
5050
5051 let options = if self.peek_keyword(Keyword::OPTIONS) {
5052 Some(self.parse_options(Keyword::OPTIONS)?)
5053 } else {
5054 None
5055 };
5056
5057 let clone = if self.parse_keyword(Keyword::CLONE) {
5058 Some(self.parse_object_name(false)?)
5059 } else {
5060 None
5061 };
5062
5063 Ok(Statement::CreateSchema {
5064 schema_name,
5065 if_not_exists,
5066 with,
5067 options,
5068 default_collate_spec,
5069 clone,
5070 })
5071 }
5072
5073 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5074 if self.parse_keyword(Keyword::AUTHORIZATION) {
5075 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5076 } else {
5077 let name = self.parse_object_name(false)?;
5078
5079 if self.parse_keyword(Keyword::AUTHORIZATION) {
5080 Ok(SchemaName::NamedAuthorization(
5081 name,
5082 self.parse_identifier()?,
5083 ))
5084 } else {
5085 Ok(SchemaName::Simple(name))
5086 }
5087 }
5088 }
5089
5090 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5091 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5092 let db_name = self.parse_object_name(false)?;
5093 let mut location = None;
5094 let mut managed_location = None;
5095 loop {
5096 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5097 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5098 Some(Keyword::MANAGEDLOCATION) => {
5099 managed_location = Some(self.parse_literal_string()?)
5100 }
5101 _ => break,
5102 }
5103 }
5104 let clone = if self.parse_keyword(Keyword::CLONE) {
5105 Some(self.parse_object_name(false)?)
5106 } else {
5107 None
5108 };
5109
5110 Ok(Statement::CreateDatabase {
5111 db_name,
5112 if_not_exists: ine,
5113 location,
5114 managed_location,
5115 or_replace: false,
5116 transient: false,
5117 clone,
5118 data_retention_time_in_days: None,
5119 max_data_extension_time_in_days: None,
5120 external_volume: None,
5121 catalog: None,
5122 replace_invalid_characters: None,
5123 default_ddl_collation: None,
5124 storage_serialization_policy: None,
5125 comment: None,
5126 catalog_sync: None,
5127 catalog_sync_namespace_mode: None,
5128 catalog_sync_namespace_flatten_delimiter: None,
5129 with_tags: None,
5130 with_contacts: None,
5131 })
5132 }
5133
5134 pub fn parse_optional_create_function_using(
5135 &mut self,
5136 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5137 if !self.parse_keyword(Keyword::USING) {
5138 return Ok(None);
5139 };
5140 let keyword =
5141 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5142
5143 let uri = self.parse_literal_string()?;
5144
5145 match keyword {
5146 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5147 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5148 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5149 _ => self.expected(
5150 "JAR, FILE or ARCHIVE, got {:?}",
5151 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5152 ),
5153 }
5154 }
5155
5156 pub fn parse_create_function(
5157 &mut self,
5158 or_alter: bool,
5159 or_replace: bool,
5160 temporary: bool,
5161 ) -> Result<Statement, ParserError> {
5162 if dialect_of!(self is HiveDialect) {
5163 self.parse_hive_create_function(or_replace, temporary)
5164 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5165 self.parse_postgres_create_function(or_replace, temporary)
5166 } else if dialect_of!(self is DuckDbDialect) {
5167 self.parse_create_macro(or_replace, temporary)
5168 } else if dialect_of!(self is BigQueryDialect) {
5169 self.parse_bigquery_create_function(or_replace, temporary)
5170 } else if dialect_of!(self is MsSqlDialect) {
5171 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5172 } else {
5173 self.prev_token();
5174 self.expected("an object type after CREATE", self.peek_token())
5175 }
5176 }
5177
5178 fn parse_postgres_create_function(
5182 &mut self,
5183 or_replace: bool,
5184 temporary: bool,
5185 ) -> Result<Statement, ParserError> {
5186 let name = self.parse_object_name(false)?;
5187
5188 self.expect_token(&Token::LParen)?;
5189 let args = if Token::RParen != self.peek_token_ref().token {
5190 self.parse_comma_separated(Parser::parse_function_arg)?
5191 } else {
5192 vec![]
5193 };
5194 self.expect_token(&Token::RParen)?;
5195
5196 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5197 Some(self.parse_data_type()?)
5198 } else {
5199 None
5200 };
5201
5202 #[derive(Default)]
5203 struct Body {
5204 language: Option<Ident>,
5205 behavior: Option<FunctionBehavior>,
5206 function_body: Option<CreateFunctionBody>,
5207 called_on_null: Option<FunctionCalledOnNull>,
5208 parallel: Option<FunctionParallel>,
5209 security: Option<FunctionSecurity>,
5210 }
5211 let mut body = Body::default();
5212 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5213 loop {
5214 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5215 if field.is_some() {
5216 return Err(ParserError::ParserError(format!(
5217 "{name} specified more than once",
5218 )));
5219 }
5220 Ok(())
5221 }
5222 if self.parse_keyword(Keyword::AS) {
5223 ensure_not_set(&body.function_body, "AS")?;
5224 body.function_body = Some(self.parse_create_function_body_string()?);
5225 } else if self.parse_keyword(Keyword::LANGUAGE) {
5226 ensure_not_set(&body.language, "LANGUAGE")?;
5227 body.language = Some(self.parse_identifier()?);
5228 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5229 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5230 body.behavior = Some(FunctionBehavior::Immutable);
5231 } else if self.parse_keyword(Keyword::STABLE) {
5232 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5233 body.behavior = Some(FunctionBehavior::Stable);
5234 } else if self.parse_keyword(Keyword::VOLATILE) {
5235 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5236 body.behavior = Some(FunctionBehavior::Volatile);
5237 } else if self.parse_keywords(&[
5238 Keyword::CALLED,
5239 Keyword::ON,
5240 Keyword::NULL,
5241 Keyword::INPUT,
5242 ]) {
5243 ensure_not_set(
5244 &body.called_on_null,
5245 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5246 )?;
5247 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5248 } else if self.parse_keywords(&[
5249 Keyword::RETURNS,
5250 Keyword::NULL,
5251 Keyword::ON,
5252 Keyword::NULL,
5253 Keyword::INPUT,
5254 ]) {
5255 ensure_not_set(
5256 &body.called_on_null,
5257 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5258 )?;
5259 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5260 } else if self.parse_keyword(Keyword::STRICT) {
5261 ensure_not_set(
5262 &body.called_on_null,
5263 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5264 )?;
5265 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5266 } else if self.parse_keyword(Keyword::PARALLEL) {
5267 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5268 if self.parse_keyword(Keyword::UNSAFE) {
5269 body.parallel = Some(FunctionParallel::Unsafe);
5270 } else if self.parse_keyword(Keyword::RESTRICTED) {
5271 body.parallel = Some(FunctionParallel::Restricted);
5272 } else if self.parse_keyword(Keyword::SAFE) {
5273 body.parallel = Some(FunctionParallel::Safe);
5274 } else {
5275 return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5276 }
5277 } else if self.parse_keyword(Keyword::SECURITY) {
5278 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5279 if self.parse_keyword(Keyword::DEFINER) {
5280 body.security = Some(FunctionSecurity::Definer);
5281 } else if self.parse_keyword(Keyword::INVOKER) {
5282 body.security = Some(FunctionSecurity::Invoker);
5283 } else {
5284 return self.expected("DEFINER or INVOKER", self.peek_token());
5285 }
5286 } else if self.parse_keyword(Keyword::SET) {
5287 let name = self.parse_identifier()?;
5288 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5289 FunctionSetValue::FromCurrent
5290 } else {
5291 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5292 return self.expected("= or TO", self.peek_token());
5293 }
5294 let values = self.parse_comma_separated(Parser::parse_expr)?;
5295 FunctionSetValue::Values(values)
5296 };
5297 set_params.push(FunctionDefinitionSetParam { name, value });
5298 } else if self.parse_keyword(Keyword::RETURN) {
5299 ensure_not_set(&body.function_body, "RETURN")?;
5300 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5301 } else {
5302 break;
5303 }
5304 }
5305
5306 Ok(Statement::CreateFunction(CreateFunction {
5307 or_alter: false,
5308 or_replace,
5309 temporary,
5310 name,
5311 args: Some(args),
5312 return_type,
5313 behavior: body.behavior,
5314 called_on_null: body.called_on_null,
5315 parallel: body.parallel,
5316 security: body.security,
5317 set_params,
5318 language: body.language,
5319 function_body: body.function_body,
5320 if_not_exists: false,
5321 using: None,
5322 determinism_specifier: None,
5323 options: None,
5324 remote_connection: None,
5325 }))
5326 }
5327
5328 fn parse_hive_create_function(
5332 &mut self,
5333 or_replace: bool,
5334 temporary: bool,
5335 ) -> Result<Statement, ParserError> {
5336 let name = self.parse_object_name(false)?;
5337 self.expect_keyword_is(Keyword::AS)?;
5338
5339 let body = self.parse_create_function_body_string()?;
5340 let using = self.parse_optional_create_function_using()?;
5341
5342 Ok(Statement::CreateFunction(CreateFunction {
5343 or_alter: false,
5344 or_replace,
5345 temporary,
5346 name,
5347 function_body: Some(body),
5348 using,
5349 if_not_exists: false,
5350 args: None,
5351 return_type: None,
5352 behavior: None,
5353 called_on_null: None,
5354 parallel: None,
5355 security: None,
5356 set_params: vec![],
5357 language: None,
5358 determinism_specifier: None,
5359 options: None,
5360 remote_connection: None,
5361 }))
5362 }
5363
5364 fn parse_bigquery_create_function(
5368 &mut self,
5369 or_replace: bool,
5370 temporary: bool,
5371 ) -> Result<Statement, ParserError> {
5372 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5373 let (name, args) = self.parse_create_function_name_and_params()?;
5374
5375 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5376 Some(self.parse_data_type()?)
5377 } else {
5378 None
5379 };
5380
5381 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5382 Some(FunctionDeterminismSpecifier::Deterministic)
5383 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5384 Some(FunctionDeterminismSpecifier::NotDeterministic)
5385 } else {
5386 None
5387 };
5388
5389 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5390 Some(self.parse_identifier()?)
5391 } else {
5392 None
5393 };
5394
5395 let remote_connection =
5396 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5397 Some(self.parse_object_name(false)?)
5398 } else {
5399 None
5400 };
5401
5402 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5405
5406 let function_body = if remote_connection.is_none() {
5407 self.expect_keyword_is(Keyword::AS)?;
5408 let expr = self.parse_expr()?;
5409 if options.is_none() {
5410 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5411 Some(CreateFunctionBody::AsBeforeOptions {
5412 body: expr,
5413 link_symbol: None,
5414 })
5415 } else {
5416 Some(CreateFunctionBody::AsAfterOptions(expr))
5417 }
5418 } else {
5419 None
5420 };
5421
5422 Ok(Statement::CreateFunction(CreateFunction {
5423 or_alter: false,
5424 or_replace,
5425 temporary,
5426 if_not_exists,
5427 name,
5428 args: Some(args),
5429 return_type,
5430 function_body,
5431 language,
5432 determinism_specifier,
5433 options,
5434 remote_connection,
5435 using: None,
5436 behavior: None,
5437 called_on_null: None,
5438 parallel: None,
5439 security: None,
5440 set_params: vec![],
5441 }))
5442 }
5443
5444 fn parse_mssql_create_function(
5448 &mut self,
5449 or_alter: bool,
5450 or_replace: bool,
5451 temporary: bool,
5452 ) -> Result<Statement, ParserError> {
5453 let (name, args) = self.parse_create_function_name_and_params()?;
5454
5455 self.expect_keyword(Keyword::RETURNS)?;
5456
5457 let return_table = self.maybe_parse(|p| {
5458 let return_table_name = p.parse_identifier()?;
5459
5460 p.expect_keyword_is(Keyword::TABLE)?;
5461 p.prev_token();
5462
5463 let table_column_defs = match p.parse_data_type()? {
5464 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5465 table_column_defs
5466 }
5467 _ => parser_err!(
5468 "Expected table column definitions after TABLE keyword",
5469 p.peek_token().span.start
5470 )?,
5471 };
5472
5473 Ok(DataType::NamedTable {
5474 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5475 columns: table_column_defs,
5476 })
5477 })?;
5478
5479 let return_type = if return_table.is_some() {
5480 return_table
5481 } else {
5482 Some(self.parse_data_type()?)
5483 };
5484
5485 let _ = self.parse_keyword(Keyword::AS);
5486
5487 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5488 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5489 let statements = self.parse_statement_list(&[Keyword::END])?;
5490 let end_token = self.expect_keyword(Keyword::END)?;
5491
5492 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5493 begin_token: AttachedToken(begin_token),
5494 statements,
5495 end_token: AttachedToken(end_token),
5496 }))
5497 } else if self.parse_keyword(Keyword::RETURN) {
5498 if self.peek_token() == Token::LParen {
5499 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5500 } else if self.peek_keyword(Keyword::SELECT) {
5501 let select = self.parse_select()?;
5502 Some(CreateFunctionBody::AsReturnSelect(select))
5503 } else {
5504 parser_err!(
5505 "Expected a subquery (or bare SELECT statement) after RETURN",
5506 self.peek_token().span.start
5507 )?
5508 }
5509 } else {
5510 parser_err!("Unparsable function body", self.peek_token().span.start)?
5511 };
5512
5513 Ok(Statement::CreateFunction(CreateFunction {
5514 or_alter,
5515 or_replace,
5516 temporary,
5517 if_not_exists: false,
5518 name,
5519 args: Some(args),
5520 return_type,
5521 function_body,
5522 language: None,
5523 determinism_specifier: None,
5524 options: None,
5525 remote_connection: None,
5526 using: None,
5527 behavior: None,
5528 called_on_null: None,
5529 parallel: None,
5530 security: None,
5531 set_params: vec![],
5532 }))
5533 }
5534
5535 fn parse_create_function_name_and_params(
5536 &mut self,
5537 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5538 let name = self.parse_object_name(false)?;
5539 let parse_function_param =
5540 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5541 let name = parser.parse_identifier()?;
5542 let data_type = parser.parse_data_type()?;
5543 let default_expr = if parser.consume_token(&Token::Eq) {
5544 Some(parser.parse_expr()?)
5545 } else {
5546 None
5547 };
5548
5549 Ok(OperateFunctionArg {
5550 mode: None,
5551 name: Some(name),
5552 data_type,
5553 default_expr,
5554 })
5555 };
5556 self.expect_token(&Token::LParen)?;
5557 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5558 self.expect_token(&Token::RParen)?;
5559 Ok((name, args))
5560 }
5561
5562 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5563 let mode = if self.parse_keyword(Keyword::IN) {
5564 Some(ArgMode::In)
5565 } else if self.parse_keyword(Keyword::OUT) {
5566 Some(ArgMode::Out)
5567 } else if self.parse_keyword(Keyword::INOUT) {
5568 Some(ArgMode::InOut)
5569 } else {
5570 None
5571 };
5572
5573 let mut name = None;
5575 let mut data_type = self.parse_data_type()?;
5576
5577 let data_type_idx = self.get_current_index();
5581
5582 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5584 if parser.peek_keyword(Keyword::DEFAULT) {
5585 parser_err!(
5587 "The DEFAULT keyword is not a type",
5588 parser.peek_token().span.start
5589 )
5590 } else {
5591 parser.parse_data_type()
5592 }
5593 }
5594
5595 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5596 let token = self.token_at(data_type_idx);
5597
5598 if !matches!(token.token, Token::Word(_)) {
5600 return self.expected("a name or type", token.clone());
5601 }
5602
5603 name = Some(Ident::new(token.to_string()));
5604 data_type = next_data_type;
5605 }
5606
5607 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5608 {
5609 Some(self.parse_expr()?)
5610 } else {
5611 None
5612 };
5613 Ok(OperateFunctionArg {
5614 mode,
5615 name,
5616 data_type,
5617 default_expr,
5618 })
5619 }
5620
5621 pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5627 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5628 {
5629 self.prev_token();
5630 return self.expected("an object type after DROP", self.peek_token());
5631 }
5632 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5633 let trigger_name = self.parse_object_name(false)?;
5634 let table_name = if self.parse_keyword(Keyword::ON) {
5635 Some(self.parse_object_name(false)?)
5636 } else {
5637 None
5638 };
5639 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5640 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5641 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5642 Some(unexpected_keyword) => return Err(ParserError::ParserError(
5643 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5644 )),
5645 None => None,
5646 };
5647 Ok(Statement::DropTrigger(DropTrigger {
5648 if_exists,
5649 trigger_name,
5650 table_name,
5651 option,
5652 }))
5653 }
5654
5655 pub fn parse_create_trigger(
5656 &mut self,
5657 temporary: bool,
5658 or_alter: bool,
5659 or_replace: bool,
5660 is_constraint: bool,
5661 ) -> Result<Statement, ParserError> {
5662 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5663 {
5664 self.prev_token();
5665 return self.expected("an object type after CREATE", self.peek_token());
5666 }
5667
5668 let name = self.parse_object_name(false)?;
5669 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5670
5671 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5672 self.expect_keyword_is(Keyword::ON)?;
5673 let table_name = self.parse_object_name(false)?;
5674
5675 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5676 self.parse_object_name(true).ok()
5677 } else {
5678 None
5679 };
5680
5681 let characteristics = self.parse_constraint_characteristics()?;
5682
5683 let mut referencing = vec![];
5684 if self.parse_keyword(Keyword::REFERENCING) {
5685 while let Some(refer) = self.parse_trigger_referencing()? {
5686 referencing.push(refer);
5687 }
5688 }
5689
5690 let trigger_object = if self.parse_keyword(Keyword::FOR) {
5691 let include_each = self.parse_keyword(Keyword::EACH);
5692 let trigger_object =
5693 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5694 Keyword::ROW => TriggerObject::Row,
5695 Keyword::STATEMENT => TriggerObject::Statement,
5696 unexpected_keyword => return Err(ParserError::ParserError(
5697 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5698 )),
5699 };
5700
5701 Some(if include_each {
5702 TriggerObjectKind::ForEach(trigger_object)
5703 } else {
5704 TriggerObjectKind::For(trigger_object)
5705 })
5706 } else {
5707 let _ = self.parse_keyword(Keyword::FOR);
5708
5709 None
5710 };
5711
5712 let condition = self
5713 .parse_keyword(Keyword::WHEN)
5714 .then(|| self.parse_expr())
5715 .transpose()?;
5716
5717 let mut exec_body = None;
5718 let mut statements = None;
5719 if self.parse_keyword(Keyword::EXECUTE) {
5720 exec_body = Some(self.parse_trigger_exec_body()?);
5721 } else {
5722 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5723 }
5724
5725 Ok(CreateTrigger {
5726 or_alter,
5727 temporary,
5728 or_replace,
5729 is_constraint,
5730 name,
5731 period,
5732 period_before_table: true,
5733 events,
5734 table_name,
5735 referenced_table_name,
5736 referencing,
5737 trigger_object,
5738 condition,
5739 exec_body,
5740 statements_as: false,
5741 statements,
5742 characteristics,
5743 }
5744 .into())
5745 }
5746
5747 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5748 Ok(
5749 match self.expect_one_of_keywords(&[
5750 Keyword::FOR,
5751 Keyword::BEFORE,
5752 Keyword::AFTER,
5753 Keyword::INSTEAD,
5754 ])? {
5755 Keyword::FOR => TriggerPeriod::For,
5756 Keyword::BEFORE => TriggerPeriod::Before,
5757 Keyword::AFTER => TriggerPeriod::After,
5758 Keyword::INSTEAD => self
5759 .expect_keyword_is(Keyword::OF)
5760 .map(|_| TriggerPeriod::InsteadOf)?,
5761 unexpected_keyword => return Err(ParserError::ParserError(
5762 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
5763 )),
5764 },
5765 )
5766 }
5767
5768 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5769 Ok(
5770 match self.expect_one_of_keywords(&[
5771 Keyword::INSERT,
5772 Keyword::UPDATE,
5773 Keyword::DELETE,
5774 Keyword::TRUNCATE,
5775 ])? {
5776 Keyword::INSERT => TriggerEvent::Insert,
5777 Keyword::UPDATE => {
5778 if self.parse_keyword(Keyword::OF) {
5779 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5780 TriggerEvent::Update(cols)
5781 } else {
5782 TriggerEvent::Update(vec![])
5783 }
5784 }
5785 Keyword::DELETE => TriggerEvent::Delete,
5786 Keyword::TRUNCATE => TriggerEvent::Truncate,
5787 unexpected_keyword => return Err(ParserError::ParserError(
5788 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
5789 )),
5790 },
5791 )
5792 }
5793
5794 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5795 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5796 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5797 TriggerReferencingType::OldTable
5798 }
5799 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5800 TriggerReferencingType::NewTable
5801 }
5802 _ => {
5803 return Ok(None);
5804 }
5805 };
5806
5807 let is_as = self.parse_keyword(Keyword::AS);
5808 let transition_relation_name = self.parse_object_name(false)?;
5809 Ok(Some(TriggerReferencing {
5810 refer_type,
5811 is_as,
5812 transition_relation_name,
5813 }))
5814 }
5815
5816 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5817 Ok(TriggerExecBody {
5818 exec_type: match self
5819 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5820 {
5821 Keyword::FUNCTION => TriggerExecBodyType::Function,
5822 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5823 unexpected_keyword => return Err(ParserError::ParserError(
5824 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
5825 )),
5826 },
5827 func_desc: self.parse_function_desc()?,
5828 })
5829 }
5830
5831 pub fn parse_create_macro(
5832 &mut self,
5833 or_replace: bool,
5834 temporary: bool,
5835 ) -> Result<Statement, ParserError> {
5836 if dialect_of!(self is DuckDbDialect | GenericDialect) {
5837 let name = self.parse_object_name(false)?;
5838 self.expect_token(&Token::LParen)?;
5839 let args = if self.consume_token(&Token::RParen) {
5840 self.prev_token();
5841 None
5842 } else {
5843 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5844 };
5845
5846 self.expect_token(&Token::RParen)?;
5847 self.expect_keyword_is(Keyword::AS)?;
5848
5849 Ok(Statement::CreateMacro {
5850 or_replace,
5851 temporary,
5852 name,
5853 args,
5854 definition: if self.parse_keyword(Keyword::TABLE) {
5855 MacroDefinition::Table(self.parse_query()?)
5856 } else {
5857 MacroDefinition::Expr(self.parse_expr()?)
5858 },
5859 })
5860 } else {
5861 self.prev_token();
5862 self.expected("an object type after CREATE", self.peek_token())
5863 }
5864 }
5865
5866 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5867 let name = self.parse_identifier()?;
5868
5869 let default_expr =
5870 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5871 Some(self.parse_expr()?)
5872 } else {
5873 None
5874 };
5875 Ok(MacroArg { name, default_expr })
5876 }
5877
5878 pub fn parse_create_external_table(
5879 &mut self,
5880 or_replace: bool,
5881 ) -> Result<Statement, ParserError> {
5882 self.expect_keyword_is(Keyword::TABLE)?;
5883 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5884 let table_name = self.parse_object_name(false)?;
5885 let (columns, constraints) = self.parse_columns()?;
5886
5887 let hive_distribution = self.parse_hive_distribution()?;
5888 let hive_formats = self.parse_hive_formats()?;
5889
5890 let file_format = if let Some(ref hf) = hive_formats {
5891 if let Some(ref ff) = hf.storage {
5892 match ff {
5893 HiveIOFormat::FileFormat { format } => Some(*format),
5894 _ => None,
5895 }
5896 } else {
5897 None
5898 }
5899 } else {
5900 None
5901 };
5902 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5903 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5904 let table_options = if !table_properties.is_empty() {
5905 CreateTableOptions::TableProperties(table_properties)
5906 } else {
5907 CreateTableOptions::None
5908 };
5909 Ok(CreateTableBuilder::new(table_name)
5910 .columns(columns)
5911 .constraints(constraints)
5912 .hive_distribution(hive_distribution)
5913 .hive_formats(hive_formats)
5914 .table_options(table_options)
5915 .or_replace(or_replace)
5916 .if_not_exists(if_not_exists)
5917 .external(true)
5918 .file_format(file_format)
5919 .location(location)
5920 .build())
5921 }
5922
5923 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5924 let next_token = self.next_token();
5925 match &next_token.token {
5926 Token::Word(w) => match w.keyword {
5927 Keyword::AVRO => Ok(FileFormat::AVRO),
5928 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5929 Keyword::ORC => Ok(FileFormat::ORC),
5930 Keyword::PARQUET => Ok(FileFormat::PARQUET),
5931 Keyword::RCFILE => Ok(FileFormat::RCFILE),
5932 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5933 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5934 _ => self.expected("fileformat", next_token),
5935 },
5936 _ => self.expected("fileformat", next_token),
5937 }
5938 }
5939
5940 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5941 if self.consume_token(&Token::Eq) {
5942 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5943 } else {
5944 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5945 }
5946 }
5947
5948 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5949 let next_token = self.next_token();
5950 match &next_token.token {
5951 Token::Word(w) => match w.keyword {
5952 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5953 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5954 Keyword::JSON => Ok(AnalyzeFormat::JSON),
5955 _ => self.expected("fileformat", next_token),
5956 },
5957 _ => self.expected("fileformat", next_token),
5958 }
5959 }
5960
5961 pub fn parse_create_view(
5962 &mut self,
5963 or_alter: bool,
5964 or_replace: bool,
5965 temporary: bool,
5966 create_view_params: Option<CreateViewParams>,
5967 ) -> Result<Statement, ParserError> {
5968 let secure = self.parse_keyword(Keyword::SECURE);
5969 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5970 self.expect_keyword_is(Keyword::VIEW)?;
5971 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5972 let if_not_exists_first =
5975 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5976 let name = self.parse_object_name(allow_unquoted_hyphen)?;
5977 let name_before_not_exists = !if_not_exists_first
5978 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5979 let if_not_exists = if_not_exists_first || name_before_not_exists;
5980 let columns = self.parse_view_columns()?;
5983 let mut options = CreateTableOptions::None;
5984 let with_options = self.parse_options(Keyword::WITH)?;
5985 if !with_options.is_empty() {
5986 options = CreateTableOptions::With(with_options);
5987 }
5988
5989 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5990 self.expect_keyword_is(Keyword::BY)?;
5991 self.parse_parenthesized_column_list(Optional, false)?
5992 } else {
5993 vec![]
5994 };
5995
5996 if dialect_of!(self is BigQueryDialect | GenericDialect) {
5997 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5998 if !opts.is_empty() {
5999 options = CreateTableOptions::Options(opts);
6000 }
6001 };
6002 }
6003
6004 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6005 && self.parse_keyword(Keyword::TO)
6006 {
6007 Some(self.parse_object_name(false)?)
6008 } else {
6009 None
6010 };
6011
6012 let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
6013 && self.parse_keyword(Keyword::COMMENT)
6014 {
6015 self.expect_token(&Token::Eq)?;
6016 Some(self.parse_comment_value()?)
6017 } else {
6018 None
6019 };
6020
6021 self.expect_keyword_is(Keyword::AS)?;
6022 let query = self.parse_query()?;
6023 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6026 && self.parse_keywords(&[
6027 Keyword::WITH,
6028 Keyword::NO,
6029 Keyword::SCHEMA,
6030 Keyword::BINDING,
6031 ]);
6032
6033 Ok(CreateView {
6034 or_alter,
6035 name,
6036 columns,
6037 query,
6038 materialized,
6039 secure,
6040 or_replace,
6041 options,
6042 cluster_by,
6043 comment,
6044 with_no_schema_binding,
6045 if_not_exists,
6046 temporary,
6047 to,
6048 params: create_view_params,
6049 name_before_not_exists,
6050 }
6051 .into())
6052 }
6053
6054 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6058 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6059 self.expect_token(&Token::Eq)?;
6060 Some(
6061 match self.expect_one_of_keywords(&[
6062 Keyword::UNDEFINED,
6063 Keyword::MERGE,
6064 Keyword::TEMPTABLE,
6065 ])? {
6066 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6067 Keyword::MERGE => CreateViewAlgorithm::Merge,
6068 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6069 _ => {
6070 self.prev_token();
6071 let found = self.next_token();
6072 return self
6073 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6074 }
6075 },
6076 )
6077 } else {
6078 None
6079 };
6080 let definer = if self.parse_keyword(Keyword::DEFINER) {
6081 self.expect_token(&Token::Eq)?;
6082 Some(self.parse_grantee_name()?)
6083 } else {
6084 None
6085 };
6086 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6087 Some(
6088 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6089 Keyword::DEFINER => CreateViewSecurity::Definer,
6090 Keyword::INVOKER => CreateViewSecurity::Invoker,
6091 _ => {
6092 self.prev_token();
6093 let found = self.next_token();
6094 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6095 }
6096 },
6097 )
6098 } else {
6099 None
6100 };
6101 if algorithm.is_some() || definer.is_some() || security.is_some() {
6102 Ok(Some(CreateViewParams {
6103 algorithm,
6104 definer,
6105 security,
6106 }))
6107 } else {
6108 Ok(None)
6109 }
6110 }
6111
6112 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6113 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6114 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6115
6116 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6119 vec![Keyword::AUTHORIZATION]
6120 } else if dialect_of!(self is PostgreSqlDialect) {
6121 vec![
6122 Keyword::LOGIN,
6123 Keyword::NOLOGIN,
6124 Keyword::INHERIT,
6125 Keyword::NOINHERIT,
6126 Keyword::BYPASSRLS,
6127 Keyword::NOBYPASSRLS,
6128 Keyword::PASSWORD,
6129 Keyword::CREATEDB,
6130 Keyword::NOCREATEDB,
6131 Keyword::CREATEROLE,
6132 Keyword::NOCREATEROLE,
6133 Keyword::SUPERUSER,
6134 Keyword::NOSUPERUSER,
6135 Keyword::REPLICATION,
6136 Keyword::NOREPLICATION,
6137 Keyword::CONNECTION,
6138 Keyword::VALID,
6139 Keyword::IN,
6140 Keyword::ROLE,
6141 Keyword::ADMIN,
6142 Keyword::USER,
6143 ]
6144 } else {
6145 vec![]
6146 };
6147
6148 let mut authorization_owner = None;
6150 let mut login = None;
6152 let mut inherit = None;
6153 let mut bypassrls = None;
6154 let mut password = None;
6155 let mut create_db = None;
6156 let mut create_role = None;
6157 let mut superuser = None;
6158 let mut replication = None;
6159 let mut connection_limit = None;
6160 let mut valid_until = None;
6161 let mut in_role = vec![];
6162 let mut in_group = vec![];
6163 let mut role = vec![];
6164 let mut user = vec![];
6165 let mut admin = vec![];
6166
6167 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6168 let loc = self
6169 .tokens
6170 .get(self.index - 1)
6171 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6172 match keyword {
6173 Keyword::AUTHORIZATION => {
6174 if authorization_owner.is_some() {
6175 parser_err!("Found multiple AUTHORIZATION", loc)
6176 } else {
6177 authorization_owner = Some(self.parse_object_name(false)?);
6178 Ok(())
6179 }
6180 }
6181 Keyword::LOGIN | Keyword::NOLOGIN => {
6182 if login.is_some() {
6183 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6184 } else {
6185 login = Some(keyword == Keyword::LOGIN);
6186 Ok(())
6187 }
6188 }
6189 Keyword::INHERIT | Keyword::NOINHERIT => {
6190 if inherit.is_some() {
6191 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6192 } else {
6193 inherit = Some(keyword == Keyword::INHERIT);
6194 Ok(())
6195 }
6196 }
6197 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6198 if bypassrls.is_some() {
6199 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6200 } else {
6201 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6202 Ok(())
6203 }
6204 }
6205 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6206 if create_db.is_some() {
6207 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6208 } else {
6209 create_db = Some(keyword == Keyword::CREATEDB);
6210 Ok(())
6211 }
6212 }
6213 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6214 if create_role.is_some() {
6215 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6216 } else {
6217 create_role = Some(keyword == Keyword::CREATEROLE);
6218 Ok(())
6219 }
6220 }
6221 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6222 if superuser.is_some() {
6223 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6224 } else {
6225 superuser = Some(keyword == Keyword::SUPERUSER);
6226 Ok(())
6227 }
6228 }
6229 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6230 if replication.is_some() {
6231 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6232 } else {
6233 replication = Some(keyword == Keyword::REPLICATION);
6234 Ok(())
6235 }
6236 }
6237 Keyword::PASSWORD => {
6238 if password.is_some() {
6239 parser_err!("Found multiple PASSWORD", loc)
6240 } else {
6241 password = if self.parse_keyword(Keyword::NULL) {
6242 Some(Password::NullPassword)
6243 } else {
6244 Some(Password::Password(Expr::Value(self.parse_value()?)))
6245 };
6246 Ok(())
6247 }
6248 }
6249 Keyword::CONNECTION => {
6250 self.expect_keyword_is(Keyword::LIMIT)?;
6251 if connection_limit.is_some() {
6252 parser_err!("Found multiple CONNECTION LIMIT", loc)
6253 } else {
6254 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6255 Ok(())
6256 }
6257 }
6258 Keyword::VALID => {
6259 self.expect_keyword_is(Keyword::UNTIL)?;
6260 if valid_until.is_some() {
6261 parser_err!("Found multiple VALID UNTIL", loc)
6262 } else {
6263 valid_until = Some(Expr::Value(self.parse_value()?));
6264 Ok(())
6265 }
6266 }
6267 Keyword::IN => {
6268 if self.parse_keyword(Keyword::ROLE) {
6269 if !in_role.is_empty() {
6270 parser_err!("Found multiple IN ROLE", loc)
6271 } else {
6272 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6273 Ok(())
6274 }
6275 } else if self.parse_keyword(Keyword::GROUP) {
6276 if !in_group.is_empty() {
6277 parser_err!("Found multiple IN GROUP", loc)
6278 } else {
6279 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6280 Ok(())
6281 }
6282 } else {
6283 self.expected("ROLE or GROUP after IN", self.peek_token())
6284 }
6285 }
6286 Keyword::ROLE => {
6287 if !role.is_empty() {
6288 parser_err!("Found multiple ROLE", loc)
6289 } else {
6290 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6291 Ok(())
6292 }
6293 }
6294 Keyword::USER => {
6295 if !user.is_empty() {
6296 parser_err!("Found multiple USER", loc)
6297 } else {
6298 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6299 Ok(())
6300 }
6301 }
6302 Keyword::ADMIN => {
6303 if !admin.is_empty() {
6304 parser_err!("Found multiple ADMIN", loc)
6305 } else {
6306 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6307 Ok(())
6308 }
6309 }
6310 _ => break,
6311 }?
6312 }
6313
6314 Ok(CreateRole {
6315 names,
6316 if_not_exists,
6317 login,
6318 inherit,
6319 bypassrls,
6320 password,
6321 create_db,
6322 create_role,
6323 replication,
6324 superuser,
6325 connection_limit,
6326 valid_until,
6327 in_role,
6328 in_group,
6329 role,
6330 user,
6331 admin,
6332 authorization_owner,
6333 }
6334 .into())
6335 }
6336
6337 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6338 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6339 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6340 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6341 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6342 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6343 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6344 )),
6345 None => {
6346 match self.parse_identifier() {
6347 Ok(ident) => Owner::Ident(ident),
6348 Err(e) => {
6349 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6350 }
6351 }
6352 }
6353 };
6354 Ok(owner)
6355 }
6356
6357 fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6359 let name = self.parse_object_name(false)?;
6360 self.expect_keyword_is(Keyword::AS)?;
6361 let data_type = self.parse_data_type()?;
6362 let collation = if self.parse_keyword(Keyword::COLLATE) {
6363 Some(self.parse_identifier()?)
6364 } else {
6365 None
6366 };
6367 let default = if self.parse_keyword(Keyword::DEFAULT) {
6368 Some(self.parse_expr()?)
6369 } else {
6370 None
6371 };
6372 let mut constraints = Vec::new();
6373 while let Some(constraint) = self.parse_optional_table_constraint()? {
6374 constraints.push(constraint);
6375 }
6376
6377 Ok(Statement::CreateDomain(CreateDomain {
6378 name,
6379 data_type,
6380 collation,
6381 default,
6382 constraints,
6383 }))
6384 }
6385
6386 pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6396 let name = self.parse_identifier()?;
6397 self.expect_keyword_is(Keyword::ON)?;
6398 let table_name = self.parse_object_name(false)?;
6399
6400 let policy_type = if self.parse_keyword(Keyword::AS) {
6401 let keyword =
6402 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6403 Some(match keyword {
6404 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6405 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6406 unexpected_keyword => return Err(ParserError::ParserError(
6407 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6408 )),
6409 })
6410 } else {
6411 None
6412 };
6413
6414 let command = if self.parse_keyword(Keyword::FOR) {
6415 let keyword = self.expect_one_of_keywords(&[
6416 Keyword::ALL,
6417 Keyword::SELECT,
6418 Keyword::INSERT,
6419 Keyword::UPDATE,
6420 Keyword::DELETE,
6421 ])?;
6422 Some(match keyword {
6423 Keyword::ALL => CreatePolicyCommand::All,
6424 Keyword::SELECT => CreatePolicyCommand::Select,
6425 Keyword::INSERT => CreatePolicyCommand::Insert,
6426 Keyword::UPDATE => CreatePolicyCommand::Update,
6427 Keyword::DELETE => CreatePolicyCommand::Delete,
6428 unexpected_keyword => return Err(ParserError::ParserError(
6429 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6430 )),
6431 })
6432 } else {
6433 None
6434 };
6435
6436 let to = if self.parse_keyword(Keyword::TO) {
6437 Some(self.parse_comma_separated(|p| p.parse_owner())?)
6438 } else {
6439 None
6440 };
6441
6442 let using = if self.parse_keyword(Keyword::USING) {
6443 self.expect_token(&Token::LParen)?;
6444 let expr = self.parse_expr()?;
6445 self.expect_token(&Token::RParen)?;
6446 Some(expr)
6447 } else {
6448 None
6449 };
6450
6451 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6452 self.expect_token(&Token::LParen)?;
6453 let expr = self.parse_expr()?;
6454 self.expect_token(&Token::RParen)?;
6455 Some(expr)
6456 } else {
6457 None
6458 };
6459
6460 Ok(CreatePolicy {
6461 name,
6462 table_name,
6463 policy_type,
6464 command,
6465 to,
6466 using,
6467 with_check,
6468 })
6469 }
6470
6471 pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6481 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6482 let name = self.parse_identifier()?;
6483
6484 let connector_type = if self.parse_keyword(Keyword::TYPE) {
6485 Some(self.parse_literal_string()?)
6486 } else {
6487 None
6488 };
6489
6490 let url = if self.parse_keyword(Keyword::URL) {
6491 Some(self.parse_literal_string()?)
6492 } else {
6493 None
6494 };
6495
6496 let comment = self.parse_optional_inline_comment()?;
6497
6498 let with_dcproperties =
6499 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6500 properties if !properties.is_empty() => Some(properties),
6501 _ => None,
6502 };
6503
6504 Ok(Statement::CreateConnector(CreateConnector {
6505 name,
6506 if_not_exists,
6507 connector_type,
6508 url,
6509 comment,
6510 with_dcproperties,
6511 }))
6512 }
6513
6514 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6520 let mut parts = vec![];
6521 loop {
6522 parts.push(ObjectNamePart::Identifier(Ident::new(
6523 self.next_token().to_string(),
6524 )));
6525 if !self.consume_token(&Token::Period) {
6526 break;
6527 }
6528 }
6529 Ok(ObjectName(parts))
6530 }
6531
6532 pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6536 let name = self.parse_operator_name()?;
6537 self.expect_token(&Token::LParen)?;
6538
6539 let mut function: Option<ObjectName> = None;
6540 let mut is_procedure = false;
6541 let mut left_arg: Option<DataType> = None;
6542 let mut right_arg: Option<DataType> = None;
6543 let mut options: Vec<OperatorOption> = Vec::new();
6544
6545 loop {
6546 let keyword = self.expect_one_of_keywords(&[
6547 Keyword::FUNCTION,
6548 Keyword::PROCEDURE,
6549 Keyword::LEFTARG,
6550 Keyword::RIGHTARG,
6551 Keyword::COMMUTATOR,
6552 Keyword::NEGATOR,
6553 Keyword::RESTRICT,
6554 Keyword::JOIN,
6555 Keyword::HASHES,
6556 Keyword::MERGES,
6557 ])?;
6558
6559 match keyword {
6560 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6561 options.push(OperatorOption::Hashes);
6562 }
6563 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6564 options.push(OperatorOption::Merges);
6565 }
6566 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6567 self.expect_token(&Token::Eq)?;
6568 function = Some(self.parse_object_name(false)?);
6569 is_procedure = keyword == Keyword::PROCEDURE;
6570 }
6571 Keyword::LEFTARG if left_arg.is_none() => {
6572 self.expect_token(&Token::Eq)?;
6573 left_arg = Some(self.parse_data_type()?);
6574 }
6575 Keyword::RIGHTARG if right_arg.is_none() => {
6576 self.expect_token(&Token::Eq)?;
6577 right_arg = Some(self.parse_data_type()?);
6578 }
6579 Keyword::COMMUTATOR
6580 if !options
6581 .iter()
6582 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6583 {
6584 self.expect_token(&Token::Eq)?;
6585 if self.parse_keyword(Keyword::OPERATOR) {
6586 self.expect_token(&Token::LParen)?;
6587 let op = self.parse_operator_name()?;
6588 self.expect_token(&Token::RParen)?;
6589 options.push(OperatorOption::Commutator(op));
6590 } else {
6591 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6592 }
6593 }
6594 Keyword::NEGATOR
6595 if !options
6596 .iter()
6597 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6598 {
6599 self.expect_token(&Token::Eq)?;
6600 if self.parse_keyword(Keyword::OPERATOR) {
6601 self.expect_token(&Token::LParen)?;
6602 let op = self.parse_operator_name()?;
6603 self.expect_token(&Token::RParen)?;
6604 options.push(OperatorOption::Negator(op));
6605 } else {
6606 options.push(OperatorOption::Negator(self.parse_operator_name()?));
6607 }
6608 }
6609 Keyword::RESTRICT
6610 if !options
6611 .iter()
6612 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6613 {
6614 self.expect_token(&Token::Eq)?;
6615 options.push(OperatorOption::Restrict(Some(
6616 self.parse_object_name(false)?,
6617 )));
6618 }
6619 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6620 self.expect_token(&Token::Eq)?;
6621 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6622 }
6623 _ => {
6624 return Err(ParserError::ParserError(format!(
6625 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6626 keyword
6627 )))
6628 }
6629 }
6630
6631 if !self.consume_token(&Token::Comma) {
6632 break;
6633 }
6634 }
6635
6636 self.expect_token(&Token::RParen)?;
6638
6639 let function = function.ok_or_else(|| {
6641 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6642 })?;
6643
6644 Ok(Statement::CreateOperator(CreateOperator {
6645 name,
6646 function,
6647 is_procedure,
6648 left_arg,
6649 right_arg,
6650 options,
6651 }))
6652 }
6653
6654 pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6658 let name = self.parse_object_name(false)?;
6659 self.expect_keyword(Keyword::USING)?;
6660 let using = self.parse_identifier()?;
6661
6662 Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6663 name,
6664 using,
6665 }))
6666 }
6667
6668 pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6672 let name = self.parse_object_name(false)?;
6673 let default = self.parse_keyword(Keyword::DEFAULT);
6674 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6675 let for_type = self.parse_data_type()?;
6676 self.expect_keyword(Keyword::USING)?;
6677 let using = self.parse_identifier()?;
6678
6679 let family = if self.parse_keyword(Keyword::FAMILY) {
6680 Some(self.parse_object_name(false)?)
6681 } else {
6682 None
6683 };
6684
6685 self.expect_keyword(Keyword::AS)?;
6686
6687 let mut items = vec![];
6688 loop {
6689 if self.parse_keyword(Keyword::OPERATOR) {
6690 let strategy_number = self.parse_literal_uint()? as u32;
6691 let operator_name = self.parse_operator_name()?;
6692
6693 let op_types = if self.consume_token(&Token::LParen) {
6695 let left = self.parse_data_type()?;
6696 self.expect_token(&Token::Comma)?;
6697 let right = self.parse_data_type()?;
6698 self.expect_token(&Token::RParen)?;
6699 Some(OperatorArgTypes { left, right })
6700 } else {
6701 None
6702 };
6703
6704 let purpose = if self.parse_keyword(Keyword::FOR) {
6706 if self.parse_keyword(Keyword::SEARCH) {
6707 Some(OperatorPurpose::ForSearch)
6708 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6709 let sort_family = self.parse_object_name(false)?;
6710 Some(OperatorPurpose::ForOrderBy { sort_family })
6711 } else {
6712 return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6713 }
6714 } else {
6715 None
6716 };
6717
6718 items.push(OperatorClassItem::Operator {
6719 strategy_number,
6720 operator_name,
6721 op_types,
6722 purpose,
6723 });
6724 } else if self.parse_keyword(Keyword::FUNCTION) {
6725 let support_number = self.parse_literal_uint()? as u32;
6726
6727 let op_types =
6729 if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6730 let mut types = vec![];
6731 loop {
6732 types.push(self.parse_data_type()?);
6733 if !self.consume_token(&Token::Comma) {
6734 break;
6735 }
6736 }
6737 self.expect_token(&Token::RParen)?;
6738 Some(types)
6739 } else if self.consume_token(&Token::LParen) {
6740 self.expect_token(&Token::RParen)?;
6741 Some(vec![])
6742 } else {
6743 None
6744 };
6745
6746 let function_name = self.parse_object_name(false)?;
6747
6748 let argument_types = if self.consume_token(&Token::LParen) {
6750 let mut types = vec![];
6751 loop {
6752 if self.peek_token() == Token::RParen {
6753 break;
6754 }
6755 types.push(self.parse_data_type()?);
6756 if !self.consume_token(&Token::Comma) {
6757 break;
6758 }
6759 }
6760 self.expect_token(&Token::RParen)?;
6761 types
6762 } else {
6763 vec![]
6764 };
6765
6766 items.push(OperatorClassItem::Function {
6767 support_number,
6768 op_types,
6769 function_name,
6770 argument_types,
6771 });
6772 } else if self.parse_keyword(Keyword::STORAGE) {
6773 let storage_type = self.parse_data_type()?;
6774 items.push(OperatorClassItem::Storage { storage_type });
6775 } else {
6776 break;
6777 }
6778
6779 if !self.consume_token(&Token::Comma) {
6781 break;
6782 }
6783 }
6784
6785 Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6786 name,
6787 default,
6788 for_type,
6789 using,
6790 family,
6791 items,
6792 }))
6793 }
6794
6795 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6796 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6798 && self.parse_keyword(Keyword::TEMPORARY);
6799 let persistent = dialect_of!(self is DuckDbDialect)
6800 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6801
6802 let object_type = if self.parse_keyword(Keyword::TABLE) {
6803 ObjectType::Table
6804 } else if self.parse_keyword(Keyword::VIEW) {
6805 ObjectType::View
6806 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6807 ObjectType::MaterializedView
6808 } else if self.parse_keyword(Keyword::INDEX) {
6809 ObjectType::Index
6810 } else if self.parse_keyword(Keyword::ROLE) {
6811 ObjectType::Role
6812 } else if self.parse_keyword(Keyword::SCHEMA) {
6813 ObjectType::Schema
6814 } else if self.parse_keyword(Keyword::DATABASE) {
6815 ObjectType::Database
6816 } else if self.parse_keyword(Keyword::SEQUENCE) {
6817 ObjectType::Sequence
6818 } else if self.parse_keyword(Keyword::STAGE) {
6819 ObjectType::Stage
6820 } else if self.parse_keyword(Keyword::TYPE) {
6821 ObjectType::Type
6822 } else if self.parse_keyword(Keyword::USER) {
6823 ObjectType::User
6824 } else if self.parse_keyword(Keyword::STREAM) {
6825 ObjectType::Stream
6826 } else if self.parse_keyword(Keyword::FUNCTION) {
6827 return self.parse_drop_function();
6828 } else if self.parse_keyword(Keyword::POLICY) {
6829 return self.parse_drop_policy();
6830 } else if self.parse_keyword(Keyword::CONNECTOR) {
6831 return self.parse_drop_connector();
6832 } else if self.parse_keyword(Keyword::DOMAIN) {
6833 return self.parse_drop_domain();
6834 } else if self.parse_keyword(Keyword::PROCEDURE) {
6835 return self.parse_drop_procedure();
6836 } else if self.parse_keyword(Keyword::SECRET) {
6837 return self.parse_drop_secret(temporary, persistent);
6838 } else if self.parse_keyword(Keyword::TRIGGER) {
6839 return self.parse_drop_trigger();
6840 } else if self.parse_keyword(Keyword::EXTENSION) {
6841 return self.parse_drop_extension();
6842 } else if self.parse_keyword(Keyword::OPERATOR) {
6843 return if self.parse_keyword(Keyword::FAMILY) {
6845 self.parse_drop_operator_family()
6846 } else if self.parse_keyword(Keyword::CLASS) {
6847 self.parse_drop_operator_class()
6848 } else {
6849 self.parse_drop_operator()
6850 };
6851 } else {
6852 return self.expected(
6853 "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6854 self.peek_token(),
6855 );
6856 };
6857 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6860 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6861
6862 let loc = self.peek_token().span.start;
6863 let cascade = self.parse_keyword(Keyword::CASCADE);
6864 let restrict = self.parse_keyword(Keyword::RESTRICT);
6865 let purge = self.parse_keyword(Keyword::PURGE);
6866 if cascade && restrict {
6867 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6868 }
6869 if object_type == ObjectType::Role && (cascade || restrict || purge) {
6870 return parser_err!(
6871 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6872 loc
6873 );
6874 }
6875 let table = if self.parse_keyword(Keyword::ON) {
6876 Some(self.parse_object_name(false)?)
6877 } else {
6878 None
6879 };
6880 Ok(Statement::Drop {
6881 object_type,
6882 if_exists,
6883 names,
6884 cascade,
6885 restrict,
6886 purge,
6887 temporary,
6888 table,
6889 })
6890 }
6891
6892 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6893 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6894 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6895 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6896 _ => None,
6897 }
6898 }
6899
6900 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6905 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6906 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6907 let drop_behavior = self.parse_optional_drop_behavior();
6908 Ok(Statement::DropFunction(DropFunction {
6909 if_exists,
6910 func_desc,
6911 drop_behavior,
6912 }))
6913 }
6914
6915 fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6921 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6922 let name = self.parse_identifier()?;
6923 self.expect_keyword_is(Keyword::ON)?;
6924 let table_name = self.parse_object_name(false)?;
6925 let drop_behavior = self.parse_optional_drop_behavior();
6926 Ok(Statement::DropPolicy {
6927 if_exists,
6928 name,
6929 table_name,
6930 drop_behavior,
6931 })
6932 }
6933 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6939 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6940 let name = self.parse_identifier()?;
6941 Ok(Statement::DropConnector { if_exists, name })
6942 }
6943
6944 fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6948 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6949 let name = self.parse_object_name(false)?;
6950 let drop_behavior = self.parse_optional_drop_behavior();
6951 Ok(Statement::DropDomain(DropDomain {
6952 if_exists,
6953 name,
6954 drop_behavior,
6955 }))
6956 }
6957
6958 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6963 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6964 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6965 let drop_behavior = self.parse_optional_drop_behavior();
6966 Ok(Statement::DropProcedure {
6967 if_exists,
6968 proc_desc,
6969 drop_behavior,
6970 })
6971 }
6972
6973 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6974 let name = self.parse_object_name(false)?;
6975
6976 let args = if self.consume_token(&Token::LParen) {
6977 if self.consume_token(&Token::RParen) {
6978 Some(vec![])
6979 } else {
6980 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6981 self.expect_token(&Token::RParen)?;
6982 Some(args)
6983 }
6984 } else {
6985 None
6986 };
6987
6988 Ok(FunctionDesc { name, args })
6989 }
6990
6991 fn parse_drop_secret(
6993 &mut self,
6994 temporary: bool,
6995 persistent: bool,
6996 ) -> Result<Statement, ParserError> {
6997 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6998 let name = self.parse_identifier()?;
6999 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7000 self.parse_identifier().ok()
7001 } else {
7002 None
7003 };
7004 let temp = match (temporary, persistent) {
7005 (true, false) => Some(true),
7006 (false, true) => Some(false),
7007 (false, false) => None,
7008 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7009 };
7010
7011 Ok(Statement::DropSecret {
7012 if_exists,
7013 temporary: temp,
7014 name,
7015 storage_specifier,
7016 })
7017 }
7018
7019 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7029 if dialect_of!(self is BigQueryDialect) {
7030 return self.parse_big_query_declare();
7031 }
7032 if dialect_of!(self is SnowflakeDialect) {
7033 return self.parse_snowflake_declare();
7034 }
7035 if dialect_of!(self is MsSqlDialect) {
7036 return self.parse_mssql_declare();
7037 }
7038
7039 let name = self.parse_identifier()?;
7040
7041 let binary = Some(self.parse_keyword(Keyword::BINARY));
7042 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7043 Some(true)
7044 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7045 Some(false)
7046 } else {
7047 None
7048 };
7049 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7050 Some(true)
7051 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7052 Some(false)
7053 } else {
7054 None
7055 };
7056
7057 self.expect_keyword_is(Keyword::CURSOR)?;
7058 let declare_type = Some(DeclareType::Cursor);
7059
7060 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7061 Some(keyword) => {
7062 self.expect_keyword_is(Keyword::HOLD)?;
7063
7064 match keyword {
7065 Keyword::WITH => Some(true),
7066 Keyword::WITHOUT => Some(false),
7067 unexpected_keyword => return Err(ParserError::ParserError(
7068 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7069 )),
7070 }
7071 }
7072 None => None,
7073 };
7074
7075 self.expect_keyword_is(Keyword::FOR)?;
7076
7077 let query = Some(self.parse_query()?);
7078
7079 Ok(Statement::Declare {
7080 stmts: vec![Declare {
7081 names: vec![name],
7082 data_type: None,
7083 assignment: None,
7084 declare_type,
7085 binary,
7086 sensitive,
7087 scroll,
7088 hold,
7089 for_query: query,
7090 }],
7091 })
7092 }
7093
7094 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7102 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7103
7104 let data_type = match self.peek_token().token {
7105 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7106 _ => Some(self.parse_data_type()?),
7107 };
7108
7109 let expr = if data_type.is_some() {
7110 if self.parse_keyword(Keyword::DEFAULT) {
7111 Some(self.parse_expr()?)
7112 } else {
7113 None
7114 }
7115 } else {
7116 self.expect_keyword_is(Keyword::DEFAULT)?;
7119 Some(self.parse_expr()?)
7120 };
7121
7122 Ok(Statement::Declare {
7123 stmts: vec![Declare {
7124 names,
7125 data_type,
7126 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7127 declare_type: None,
7128 binary: None,
7129 sensitive: None,
7130 scroll: None,
7131 hold: None,
7132 for_query: None,
7133 }],
7134 })
7135 }
7136
7137 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7162 let mut stmts = vec![];
7163 loop {
7164 let name = self.parse_identifier()?;
7165 let (declare_type, for_query, assigned_expr, data_type) =
7166 if self.parse_keyword(Keyword::CURSOR) {
7167 self.expect_keyword_is(Keyword::FOR)?;
7168 match self.peek_token().token {
7169 Token::Word(w) if w.keyword == Keyword::SELECT => (
7170 Some(DeclareType::Cursor),
7171 Some(self.parse_query()?),
7172 None,
7173 None,
7174 ),
7175 _ => (
7176 Some(DeclareType::Cursor),
7177 None,
7178 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7179 None,
7180 ),
7181 }
7182 } else if self.parse_keyword(Keyword::RESULTSET) {
7183 let assigned_expr = if self.peek_token().token != Token::SemiColon {
7184 self.parse_snowflake_variable_declaration_expression()?
7185 } else {
7186 None
7188 };
7189
7190 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7191 } else if self.parse_keyword(Keyword::EXCEPTION) {
7192 let assigned_expr = if self.peek_token().token == Token::LParen {
7193 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7194 } else {
7195 None
7197 };
7198
7199 (Some(DeclareType::Exception), None, assigned_expr, None)
7200 } else {
7201 let (assigned_expr, data_type) = if let Some(assigned_expr) =
7203 self.parse_snowflake_variable_declaration_expression()?
7204 {
7205 (Some(assigned_expr), None)
7206 } else if let Token::Word(_) = self.peek_token().token {
7207 let data_type = self.parse_data_type()?;
7208 (
7209 self.parse_snowflake_variable_declaration_expression()?,
7210 Some(data_type),
7211 )
7212 } else {
7213 (None, None)
7214 };
7215 (None, None, assigned_expr, data_type)
7216 };
7217 let stmt = Declare {
7218 names: vec![name],
7219 data_type,
7220 assignment: assigned_expr,
7221 declare_type,
7222 binary: None,
7223 sensitive: None,
7224 scroll: None,
7225 hold: None,
7226 for_query,
7227 };
7228
7229 stmts.push(stmt);
7230 if self.consume_token(&Token::SemiColon) {
7231 match self.peek_token().token {
7232 Token::Word(w)
7233 if ALL_KEYWORDS
7234 .binary_search(&w.value.to_uppercase().as_str())
7235 .is_err() =>
7236 {
7237 continue;
7239 }
7240 _ => {
7241 self.prev_token();
7243 }
7244 }
7245 }
7246
7247 break;
7248 }
7249
7250 Ok(Statement::Declare { stmts })
7251 }
7252
7253 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7265 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7266
7267 Ok(Statement::Declare { stmts })
7268 }
7269
7270 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7281 let name = {
7282 let ident = self.parse_identifier()?;
7283 if !ident.value.starts_with('@')
7284 && !matches!(
7285 self.peek_token().token,
7286 Token::Word(w) if w.keyword == Keyword::CURSOR
7287 )
7288 {
7289 Err(ParserError::TokenizerError(
7290 "Invalid MsSql variable declaration.".to_string(),
7291 ))
7292 } else {
7293 Ok(ident)
7294 }
7295 }?;
7296
7297 let (declare_type, data_type) = match self.peek_token().token {
7298 Token::Word(w) => match w.keyword {
7299 Keyword::CURSOR => {
7300 self.next_token();
7301 (Some(DeclareType::Cursor), None)
7302 }
7303 Keyword::AS => {
7304 self.next_token();
7305 (None, Some(self.parse_data_type()?))
7306 }
7307 _ => (None, Some(self.parse_data_type()?)),
7308 },
7309 _ => (None, Some(self.parse_data_type()?)),
7310 };
7311
7312 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7313 self.next_token();
7314 let query = Some(self.parse_query()?);
7315 (query, None)
7316 } else {
7317 let assignment = self.parse_mssql_variable_declaration_expression()?;
7318 (None, assignment)
7319 };
7320
7321 Ok(Declare {
7322 names: vec![name],
7323 data_type,
7324 assignment,
7325 declare_type,
7326 binary: None,
7327 sensitive: None,
7328 scroll: None,
7329 hold: None,
7330 for_query,
7331 })
7332 }
7333
7334 pub fn parse_snowflake_variable_declaration_expression(
7342 &mut self,
7343 ) -> Result<Option<DeclareAssignment>, ParserError> {
7344 Ok(match self.peek_token().token {
7345 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7346 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7348 }
7349 Token::Assignment => {
7350 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
7352 self.parse_expr()?,
7353 )))
7354 }
7355 _ => None,
7356 })
7357 }
7358
7359 pub fn parse_mssql_variable_declaration_expression(
7366 &mut self,
7367 ) -> Result<Option<DeclareAssignment>, ParserError> {
7368 Ok(match self.peek_token().token {
7369 Token::Eq => {
7370 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
7372 self.parse_expr()?,
7373 )))
7374 }
7375 _ => None,
7376 })
7377 }
7378
7379 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7381 let direction = if self.parse_keyword(Keyword::NEXT) {
7382 FetchDirection::Next
7383 } else if self.parse_keyword(Keyword::PRIOR) {
7384 FetchDirection::Prior
7385 } else if self.parse_keyword(Keyword::FIRST) {
7386 FetchDirection::First
7387 } else if self.parse_keyword(Keyword::LAST) {
7388 FetchDirection::Last
7389 } else if self.parse_keyword(Keyword::ABSOLUTE) {
7390 FetchDirection::Absolute {
7391 limit: self.parse_number_value()?.value,
7392 }
7393 } else if self.parse_keyword(Keyword::RELATIVE) {
7394 FetchDirection::Relative {
7395 limit: self.parse_number_value()?.value,
7396 }
7397 } else if self.parse_keyword(Keyword::FORWARD) {
7398 if self.parse_keyword(Keyword::ALL) {
7399 FetchDirection::ForwardAll
7400 } else {
7401 FetchDirection::Forward {
7402 limit: Some(self.parse_number_value()?.value),
7404 }
7405 }
7406 } else if self.parse_keyword(Keyword::BACKWARD) {
7407 if self.parse_keyword(Keyword::ALL) {
7408 FetchDirection::BackwardAll
7409 } else {
7410 FetchDirection::Backward {
7411 limit: Some(self.parse_number_value()?.value),
7413 }
7414 }
7415 } else if self.parse_keyword(Keyword::ALL) {
7416 FetchDirection::All
7417 } else {
7418 FetchDirection::Count {
7419 limit: self.parse_number_value()?.value,
7420 }
7421 };
7422
7423 let position = if self.peek_keyword(Keyword::FROM) {
7424 self.expect_keyword(Keyword::FROM)?;
7425 FetchPosition::From
7426 } else if self.peek_keyword(Keyword::IN) {
7427 self.expect_keyword(Keyword::IN)?;
7428 FetchPosition::In
7429 } else {
7430 return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7431 };
7432
7433 let name = self.parse_identifier()?;
7434
7435 let into = if self.parse_keyword(Keyword::INTO) {
7436 Some(self.parse_object_name(false)?)
7437 } else {
7438 None
7439 };
7440
7441 Ok(Statement::Fetch {
7442 name,
7443 direction,
7444 position,
7445 into,
7446 })
7447 }
7448
7449 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7450 let object_type = if self.parse_keyword(Keyword::ALL) {
7451 DiscardObject::ALL
7452 } else if self.parse_keyword(Keyword::PLANS) {
7453 DiscardObject::PLANS
7454 } else if self.parse_keyword(Keyword::SEQUENCES) {
7455 DiscardObject::SEQUENCES
7456 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7457 DiscardObject::TEMP
7458 } else {
7459 return self.expected(
7460 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7461 self.peek_token(),
7462 );
7463 };
7464 Ok(Statement::Discard { object_type })
7465 }
7466
7467 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7468 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7469 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7470
7471 let mut using = None;
7472
7473 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7474 let index_name = self.parse_object_name(false)?;
7475 using = self.parse_optional_using_then_index_type()?;
7477 self.expect_keyword_is(Keyword::ON)?;
7478 Some(index_name)
7479 } else {
7480 None
7481 };
7482
7483 let table_name = self.parse_object_name(false)?;
7484
7485 using = self.parse_optional_using_then_index_type()?.or(using);
7488
7489 let columns = self.parse_parenthesized_index_column_list()?;
7490
7491 let include = if self.parse_keyword(Keyword::INCLUDE) {
7492 self.expect_token(&Token::LParen)?;
7493 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7494 self.expect_token(&Token::RParen)?;
7495 columns
7496 } else {
7497 vec![]
7498 };
7499
7500 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7501 let not = self.parse_keyword(Keyword::NOT);
7502 self.expect_keyword_is(Keyword::DISTINCT)?;
7503 Some(!not)
7504 } else {
7505 None
7506 };
7507
7508 let with = if self.dialect.supports_create_index_with_clause()
7509 && self.parse_keyword(Keyword::WITH)
7510 {
7511 self.expect_token(&Token::LParen)?;
7512 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7513 self.expect_token(&Token::RParen)?;
7514 with_params
7515 } else {
7516 Vec::new()
7517 };
7518
7519 let predicate = if self.parse_keyword(Keyword::WHERE) {
7520 Some(self.parse_expr()?)
7521 } else {
7522 None
7523 };
7524
7525 let index_options = self.parse_index_options()?;
7531
7532 let mut alter_options = Vec::new();
7534 while self
7535 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7536 .is_some()
7537 {
7538 alter_options.push(self.parse_alter_table_operation()?)
7539 }
7540
7541 Ok(Statement::CreateIndex(CreateIndex {
7542 name: index_name,
7543 table_name,
7544 using,
7545 columns,
7546 unique,
7547 concurrently,
7548 if_not_exists,
7549 include,
7550 nulls_distinct,
7551 with,
7552 predicate,
7553 index_options,
7554 alter_options,
7555 }))
7556 }
7557
7558 pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7559 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7560 let name = self.parse_identifier()?;
7561
7562 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7563 let schema = if self.parse_keyword(Keyword::SCHEMA) {
7564 Some(self.parse_identifier()?)
7565 } else {
7566 None
7567 };
7568
7569 let version = if self.parse_keyword(Keyword::VERSION) {
7570 Some(self.parse_identifier()?)
7571 } else {
7572 None
7573 };
7574
7575 let cascade = self.parse_keyword(Keyword::CASCADE);
7576
7577 (schema, version, cascade)
7578 } else {
7579 (None, None, false)
7580 };
7581
7582 Ok(CreateExtension {
7583 name,
7584 if_not_exists,
7585 schema,
7586 version,
7587 cascade,
7588 }
7589 .into())
7590 }
7591
7592 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7594 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7595 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7596 let cascade_or_restrict =
7597 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7598 Ok(Statement::DropExtension(DropExtension {
7599 names,
7600 if_exists,
7601 cascade_or_restrict: cascade_or_restrict
7602 .map(|k| match k {
7603 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7604 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7605 _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7606 })
7607 .transpose()?,
7608 }))
7609 }
7610
7611 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7614 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7615 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7616 let drop_behavior = self.parse_optional_drop_behavior();
7617 Ok(Statement::DropOperator(DropOperator {
7618 if_exists,
7619 operators,
7620 drop_behavior,
7621 }))
7622 }
7623
7624 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7627 let name = self.parse_operator_name()?;
7628 self.expect_token(&Token::LParen)?;
7629
7630 let left_type = if self.parse_keyword(Keyword::NONE) {
7632 None
7633 } else {
7634 Some(self.parse_data_type()?)
7635 };
7636
7637 self.expect_token(&Token::Comma)?;
7638
7639 let right_type = self.parse_data_type()?;
7641
7642 self.expect_token(&Token::RParen)?;
7643
7644 Ok(DropOperatorSignature {
7645 name,
7646 left_type,
7647 right_type,
7648 })
7649 }
7650
7651 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7655 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7656 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7657 self.expect_keyword(Keyword::USING)?;
7658 let using = self.parse_identifier()?;
7659 let drop_behavior = self.parse_optional_drop_behavior();
7660 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7661 if_exists,
7662 names,
7663 using,
7664 drop_behavior,
7665 }))
7666 }
7667
7668 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7672 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7673 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7674 self.expect_keyword(Keyword::USING)?;
7675 let using = self.parse_identifier()?;
7676 let drop_behavior = self.parse_optional_drop_behavior();
7677 Ok(Statement::DropOperatorClass(DropOperatorClass {
7678 if_exists,
7679 names,
7680 using,
7681 drop_behavior,
7682 }))
7683 }
7684
7685 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7687 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7688 self.expect_token(&Token::LParen)?;
7689 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7690 self.expect_token(&Token::RParen)?;
7691 Ok(HiveDistributionStyle::PARTITIONED { columns })
7692 } else {
7693 Ok(HiveDistributionStyle::NONE)
7694 }
7695 }
7696
7697 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7698 let mut hive_format: Option<HiveFormat> = None;
7699 loop {
7700 match self.parse_one_of_keywords(&[
7701 Keyword::ROW,
7702 Keyword::STORED,
7703 Keyword::LOCATION,
7704 Keyword::WITH,
7705 ]) {
7706 Some(Keyword::ROW) => {
7707 hive_format
7708 .get_or_insert_with(HiveFormat::default)
7709 .row_format = Some(self.parse_row_format()?);
7710 }
7711 Some(Keyword::STORED) => {
7712 self.expect_keyword_is(Keyword::AS)?;
7713 if self.parse_keyword(Keyword::INPUTFORMAT) {
7714 let input_format = self.parse_expr()?;
7715 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7716 let output_format = self.parse_expr()?;
7717 hive_format.get_or_insert_with(HiveFormat::default).storage =
7718 Some(HiveIOFormat::IOF {
7719 input_format,
7720 output_format,
7721 });
7722 } else {
7723 let format = self.parse_file_format()?;
7724 hive_format.get_or_insert_with(HiveFormat::default).storage =
7725 Some(HiveIOFormat::FileFormat { format });
7726 }
7727 }
7728 Some(Keyword::LOCATION) => {
7729 hive_format.get_or_insert_with(HiveFormat::default).location =
7730 Some(self.parse_literal_string()?);
7731 }
7732 Some(Keyword::WITH) => {
7733 self.prev_token();
7734 let properties = self
7735 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7736 if !properties.is_empty() {
7737 hive_format
7738 .get_or_insert_with(HiveFormat::default)
7739 .serde_properties = Some(properties);
7740 } else {
7741 break;
7742 }
7743 }
7744 None => break,
7745 _ => break,
7746 }
7747 }
7748
7749 Ok(hive_format)
7750 }
7751
7752 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7753 self.expect_keyword_is(Keyword::FORMAT)?;
7754 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7755 Some(Keyword::SERDE) => {
7756 let class = self.parse_literal_string()?;
7757 Ok(HiveRowFormat::SERDE { class })
7758 }
7759 _ => {
7760 let mut row_delimiters = vec![];
7761
7762 loop {
7763 match self.parse_one_of_keywords(&[
7764 Keyword::FIELDS,
7765 Keyword::COLLECTION,
7766 Keyword::MAP,
7767 Keyword::LINES,
7768 Keyword::NULL,
7769 ]) {
7770 Some(Keyword::FIELDS) => {
7771 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7772 row_delimiters.push(HiveRowDelimiter {
7773 delimiter: HiveDelimiter::FieldsTerminatedBy,
7774 char: self.parse_identifier()?,
7775 });
7776
7777 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7778 row_delimiters.push(HiveRowDelimiter {
7779 delimiter: HiveDelimiter::FieldsEscapedBy,
7780 char: self.parse_identifier()?,
7781 });
7782 }
7783 } else {
7784 break;
7785 }
7786 }
7787 Some(Keyword::COLLECTION) => {
7788 if self.parse_keywords(&[
7789 Keyword::ITEMS,
7790 Keyword::TERMINATED,
7791 Keyword::BY,
7792 ]) {
7793 row_delimiters.push(HiveRowDelimiter {
7794 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7795 char: self.parse_identifier()?,
7796 });
7797 } else {
7798 break;
7799 }
7800 }
7801 Some(Keyword::MAP) => {
7802 if self.parse_keywords(&[
7803 Keyword::KEYS,
7804 Keyword::TERMINATED,
7805 Keyword::BY,
7806 ]) {
7807 row_delimiters.push(HiveRowDelimiter {
7808 delimiter: HiveDelimiter::MapKeysTerminatedBy,
7809 char: self.parse_identifier()?,
7810 });
7811 } else {
7812 break;
7813 }
7814 }
7815 Some(Keyword::LINES) => {
7816 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7817 row_delimiters.push(HiveRowDelimiter {
7818 delimiter: HiveDelimiter::LinesTerminatedBy,
7819 char: self.parse_identifier()?,
7820 });
7821 } else {
7822 break;
7823 }
7824 }
7825 Some(Keyword::NULL) => {
7826 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7827 row_delimiters.push(HiveRowDelimiter {
7828 delimiter: HiveDelimiter::NullDefinedAs,
7829 char: self.parse_identifier()?,
7830 });
7831 } else {
7832 break;
7833 }
7834 }
7835 _ => {
7836 break;
7837 }
7838 }
7839 }
7840
7841 Ok(HiveRowFormat::DELIMITED {
7842 delimiters: row_delimiters,
7843 })
7844 }
7845 }
7846 }
7847
7848 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7849 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7850 Ok(Some(self.parse_identifier()?))
7851 } else {
7852 Ok(None)
7853 }
7854 }
7855
7856 pub fn parse_create_table(
7857 &mut self,
7858 or_replace: bool,
7859 temporary: bool,
7860 global: Option<bool>,
7861 transient: bool,
7862 ) -> Result<Statement, ParserError> {
7863 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7864 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7865 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7866
7867 let partition_of = if dialect_of!(self is PostgreSqlDialect | GenericDialect)
7869 && self.parse_keywords(&[Keyword::PARTITION, Keyword::OF])
7870 {
7871 Some(self.parse_object_name(allow_unquoted_hyphen)?)
7872 } else {
7873 None
7874 };
7875
7876 let on_cluster = self.parse_optional_on_cluster()?;
7878
7879 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7880
7881 let clone = if self.parse_keyword(Keyword::CLONE) {
7882 self.parse_object_name(allow_unquoted_hyphen).ok()
7883 } else {
7884 None
7885 };
7886
7887 let (columns, constraints) = self.parse_columns()?;
7889 let comment_after_column_def =
7890 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7891 let next_token = self.next_token();
7892 match next_token.token {
7893 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7894 _ => self.expected("comment", next_token)?,
7895 }
7896 } else {
7897 None
7898 };
7899
7900 let for_values = if partition_of.is_some() {
7902 Some(self.parse_partition_for_values()?)
7903 } else {
7904 None
7905 };
7906
7907 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7909
7910 let hive_distribution = self.parse_hive_distribution()?;
7911 let clustered_by = self.parse_optional_clustered_by()?;
7912 let hive_formats = self.parse_hive_formats()?;
7913
7914 let create_table_config = self.parse_optional_create_table_config()?;
7915
7916 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7919 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7920 {
7921 Some(Box::new(self.parse_expr()?))
7922 } else {
7923 None
7924 };
7925
7926 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7927 if self.consume_token(&Token::LParen) {
7928 let columns = if self.peek_token() != Token::RParen {
7929 self.parse_comma_separated(|p| p.parse_expr())?
7930 } else {
7931 vec![]
7932 };
7933 self.expect_token(&Token::RParen)?;
7934 Some(OneOrManyWithParens::Many(columns))
7935 } else {
7936 Some(OneOrManyWithParens::One(self.parse_expr()?))
7937 }
7938 } else {
7939 None
7940 };
7941
7942 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7943 Some(self.parse_create_table_on_commit()?)
7944 } else {
7945 None
7946 };
7947
7948 let strict = self.parse_keyword(Keyword::STRICT);
7949
7950 let query = if self.parse_keyword(Keyword::AS) {
7952 Some(self.parse_query()?)
7953 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7954 {
7955 self.prev_token();
7957 Some(self.parse_query()?)
7958 } else {
7959 None
7960 };
7961
7962 Ok(CreateTableBuilder::new(table_name)
7963 .temporary(temporary)
7964 .columns(columns)
7965 .constraints(constraints)
7966 .or_replace(or_replace)
7967 .if_not_exists(if_not_exists)
7968 .transient(transient)
7969 .hive_distribution(hive_distribution)
7970 .hive_formats(hive_formats)
7971 .global(global)
7972 .query(query)
7973 .without_rowid(without_rowid)
7974 .like(like)
7975 .clone_clause(clone)
7976 .comment_after_column_def(comment_after_column_def)
7977 .order_by(order_by)
7978 .on_commit(on_commit)
7979 .on_cluster(on_cluster)
7980 .clustered_by(clustered_by)
7981 .partition_by(create_table_config.partition_by)
7982 .cluster_by(create_table_config.cluster_by)
7983 .inherits(create_table_config.inherits)
7984 .partition_of(partition_of)
7985 .for_values(for_values)
7986 .table_options(create_table_config.table_options)
7987 .primary_key(primary_key)
7988 .strict(strict)
7989 .build())
7990 }
7991
7992 fn maybe_parse_create_table_like(
7993 &mut self,
7994 allow_unquoted_hyphen: bool,
7995 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
7996 let like = if self.dialect.supports_create_table_like_parenthesized()
7997 && self.consume_token(&Token::LParen)
7998 {
7999 if self.parse_keyword(Keyword::LIKE) {
8000 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8001 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8002 Some(CreateTableLikeDefaults::Including)
8003 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8004 Some(CreateTableLikeDefaults::Excluding)
8005 } else {
8006 None
8007 };
8008 self.expect_token(&Token::RParen)?;
8009 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8010 name,
8011 defaults,
8012 }))
8013 } else {
8014 self.prev_token();
8016 None
8017 }
8018 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8019 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8020 Some(CreateTableLikeKind::Plain(CreateTableLike {
8021 name,
8022 defaults: None,
8023 }))
8024 } else {
8025 None
8026 };
8027 Ok(like)
8028 }
8029
8030 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8031 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8032 Ok(OnCommit::DeleteRows)
8033 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8034 Ok(OnCommit::PreserveRows)
8035 } else if self.parse_keywords(&[Keyword::DROP]) {
8036 Ok(OnCommit::Drop)
8037 } else {
8038 parser_err!(
8039 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8040 self.peek_token()
8041 )
8042 }
8043 }
8044
8045 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8051 if self.parse_keyword(Keyword::DEFAULT) {
8052 return Ok(ForValues::Default);
8053 }
8054
8055 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8056
8057 if self.parse_keyword(Keyword::IN) {
8058 self.expect_token(&Token::LParen)?;
8060 let values = self.parse_comma_separated(Parser::parse_expr)?;
8061 self.expect_token(&Token::RParen)?;
8062 Ok(ForValues::In(values))
8063 } else if self.parse_keyword(Keyword::FROM) {
8064 self.expect_token(&Token::LParen)?;
8066 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8067 self.expect_token(&Token::RParen)?;
8068 self.expect_keyword(Keyword::TO)?;
8069 self.expect_token(&Token::LParen)?;
8070 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8071 self.expect_token(&Token::RParen)?;
8072 Ok(ForValues::From { from, to })
8073 } else if self.parse_keyword(Keyword::WITH) {
8074 self.expect_token(&Token::LParen)?;
8076 self.expect_keyword(Keyword::MODULUS)?;
8077 let modulus = self.parse_literal_uint()?;
8078 self.expect_token(&Token::Comma)?;
8079 self.expect_keyword(Keyword::REMAINDER)?;
8080 let remainder = self.parse_literal_uint()?;
8081 self.expect_token(&Token::RParen)?;
8082 Ok(ForValues::With { modulus, remainder })
8083 } else {
8084 self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8085 }
8086 }
8087
8088 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8090 if self.parse_keyword(Keyword::MINVALUE) {
8091 Ok(PartitionBoundValue::MinValue)
8092 } else if self.parse_keyword(Keyword::MAXVALUE) {
8093 Ok(PartitionBoundValue::MaxValue)
8094 } else {
8095 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8096 }
8097 }
8098
8099 fn parse_optional_create_table_config(
8105 &mut self,
8106 ) -> Result<CreateTableConfiguration, ParserError> {
8107 let mut table_options = CreateTableOptions::None;
8108
8109 let inherits = if self.parse_keyword(Keyword::INHERITS) {
8110 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8111 } else {
8112 None
8113 };
8114
8115 let with_options = self.parse_options(Keyword::WITH)?;
8117 if !with_options.is_empty() {
8118 table_options = CreateTableOptions::With(with_options)
8119 }
8120
8121 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8122 if !table_properties.is_empty() {
8123 table_options = CreateTableOptions::TableProperties(table_properties);
8124 }
8125 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8126 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8127 {
8128 Some(Box::new(self.parse_expr()?))
8129 } else {
8130 None
8131 };
8132
8133 let mut cluster_by = None;
8134 if dialect_of!(self is BigQueryDialect | GenericDialect) {
8135 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8136 cluster_by = Some(WrappedCollection::NoWrapping(
8137 self.parse_comma_separated(|p| p.parse_expr())?,
8138 ));
8139 };
8140
8141 if let Token::Word(word) = self.peek_token().token {
8142 if word.keyword == Keyword::OPTIONS {
8143 table_options =
8144 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8145 }
8146 };
8147 }
8148
8149 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8150 let plain_options = self.parse_plain_options()?;
8151 if !plain_options.is_empty() {
8152 table_options = CreateTableOptions::Plain(plain_options)
8153 }
8154 };
8155
8156 Ok(CreateTableConfiguration {
8157 partition_by,
8158 cluster_by,
8159 inherits,
8160 table_options,
8161 })
8162 }
8163
8164 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8165 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8168 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8169 }
8170
8171 if self.parse_keywords(&[Keyword::COMMENT]) {
8174 let has_eq = self.consume_token(&Token::Eq);
8175 let value = self.next_token();
8176
8177 let comment = match (has_eq, value.token) {
8178 (true, Token::SingleQuotedString(s)) => {
8179 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8180 }
8181 (false, Token::SingleQuotedString(s)) => {
8182 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8183 }
8184 (_, token) => {
8185 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8186 }
8187 };
8188 return comment;
8189 }
8190
8191 if self.parse_keywords(&[Keyword::ENGINE]) {
8194 let _ = self.consume_token(&Token::Eq);
8195 let value = self.next_token();
8196
8197 let engine = match value.token {
8198 Token::Word(w) => {
8199 let parameters = if self.peek_token() == Token::LParen {
8200 self.parse_parenthesized_identifiers()?
8201 } else {
8202 vec![]
8203 };
8204
8205 Ok(Some(SqlOption::NamedParenthesizedList(
8206 NamedParenthesizedList {
8207 key: Ident::new("ENGINE"),
8208 name: Some(Ident::new(w.value)),
8209 values: parameters,
8210 },
8211 )))
8212 }
8213 _ => {
8214 return self.expected("Token::Word", value)?;
8215 }
8216 };
8217
8218 return engine;
8219 }
8220
8221 if self.parse_keywords(&[Keyword::TABLESPACE]) {
8223 let _ = self.consume_token(&Token::Eq);
8224 let value = self.next_token();
8225
8226 let tablespace = match value.token {
8227 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8228 let storage = match self.parse_keyword(Keyword::STORAGE) {
8229 true => {
8230 let _ = self.consume_token(&Token::Eq);
8231 let storage_token = self.next_token();
8232 match &storage_token.token {
8233 Token::Word(w) => match w.value.to_uppercase().as_str() {
8234 "DISK" => Some(StorageType::Disk),
8235 "MEMORY" => Some(StorageType::Memory),
8236 _ => self
8237 .expected("Storage type (DISK or MEMORY)", storage_token)?,
8238 },
8239 _ => self.expected("Token::Word", storage_token)?,
8240 }
8241 }
8242 false => None,
8243 };
8244
8245 Ok(Some(SqlOption::TableSpace(TablespaceOption {
8246 name,
8247 storage,
8248 })))
8249 }
8250 _ => {
8251 return self.expected("Token::Word", value)?;
8252 }
8253 };
8254
8255 return tablespace;
8256 }
8257
8258 if self.parse_keyword(Keyword::UNION) {
8260 let _ = self.consume_token(&Token::Eq);
8261 let value = self.next_token();
8262
8263 match value.token {
8264 Token::LParen => {
8265 let tables: Vec<Ident> =
8266 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8267 self.expect_token(&Token::RParen)?;
8268
8269 return Ok(Some(SqlOption::NamedParenthesizedList(
8270 NamedParenthesizedList {
8271 key: Ident::new("UNION"),
8272 name: None,
8273 values: tables,
8274 },
8275 )));
8276 }
8277 _ => {
8278 return self.expected("Token::LParen", value)?;
8279 }
8280 }
8281 }
8282
8283 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8285 Ident::new("DEFAULT CHARSET")
8286 } else if self.parse_keyword(Keyword::CHARSET) {
8287 Ident::new("CHARSET")
8288 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8289 Ident::new("DEFAULT CHARACTER SET")
8290 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8291 Ident::new("CHARACTER SET")
8292 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8293 Ident::new("DEFAULT COLLATE")
8294 } else if self.parse_keyword(Keyword::COLLATE) {
8295 Ident::new("COLLATE")
8296 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8297 Ident::new("DATA DIRECTORY")
8298 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8299 Ident::new("INDEX DIRECTORY")
8300 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8301 Ident::new("KEY_BLOCK_SIZE")
8302 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8303 Ident::new("ROW_FORMAT")
8304 } else if self.parse_keyword(Keyword::PACK_KEYS) {
8305 Ident::new("PACK_KEYS")
8306 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8307 Ident::new("STATS_AUTO_RECALC")
8308 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8309 Ident::new("STATS_PERSISTENT")
8310 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8311 Ident::new("STATS_SAMPLE_PAGES")
8312 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8313 Ident::new("DELAY_KEY_WRITE")
8314 } else if self.parse_keyword(Keyword::COMPRESSION) {
8315 Ident::new("COMPRESSION")
8316 } else if self.parse_keyword(Keyword::ENCRYPTION) {
8317 Ident::new("ENCRYPTION")
8318 } else if self.parse_keyword(Keyword::MAX_ROWS) {
8319 Ident::new("MAX_ROWS")
8320 } else if self.parse_keyword(Keyword::MIN_ROWS) {
8321 Ident::new("MIN_ROWS")
8322 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8323 Ident::new("AUTOEXTEND_SIZE")
8324 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8325 Ident::new("AVG_ROW_LENGTH")
8326 } else if self.parse_keyword(Keyword::CHECKSUM) {
8327 Ident::new("CHECKSUM")
8328 } else if self.parse_keyword(Keyword::CONNECTION) {
8329 Ident::new("CONNECTION")
8330 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8331 Ident::new("ENGINE_ATTRIBUTE")
8332 } else if self.parse_keyword(Keyword::PASSWORD) {
8333 Ident::new("PASSWORD")
8334 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8335 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8336 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8337 Ident::new("INSERT_METHOD")
8338 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8339 Ident::new("AUTO_INCREMENT")
8340 } else {
8341 return Ok(None);
8342 };
8343
8344 let _ = self.consume_token(&Token::Eq);
8345
8346 let value = match self
8347 .maybe_parse(|parser| parser.parse_value())?
8348 .map(Expr::Value)
8349 {
8350 Some(expr) => expr,
8351 None => Expr::Identifier(self.parse_identifier()?),
8352 };
8353
8354 Ok(Some(SqlOption::KeyValue { key, value }))
8355 }
8356
8357 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8358 let mut options = Vec::new();
8359
8360 while let Some(option) = self.parse_plain_option()? {
8361 options.push(option);
8362 let _ = self.consume_token(&Token::Comma);
8365 }
8366
8367 Ok(options)
8368 }
8369
8370 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8371 let comment = if self.parse_keyword(Keyword::COMMENT) {
8372 let has_eq = self.consume_token(&Token::Eq);
8373 let comment = self.parse_comment_value()?;
8374 Some(if has_eq {
8375 CommentDef::WithEq(comment)
8376 } else {
8377 CommentDef::WithoutEq(comment)
8378 })
8379 } else {
8380 None
8381 };
8382 Ok(comment)
8383 }
8384
8385 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8386 let next_token = self.next_token();
8387 let value = match next_token.token {
8388 Token::SingleQuotedString(str) => str,
8389 Token::DollarQuotedString(str) => str.value,
8390 _ => self.expected("string literal", next_token)?,
8391 };
8392 Ok(value)
8393 }
8394
8395 pub fn parse_optional_procedure_parameters(
8396 &mut self,
8397 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8398 let mut params = vec![];
8399 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8400 return Ok(Some(params));
8401 }
8402 loop {
8403 if let Token::Word(_) = self.peek_token().token {
8404 params.push(self.parse_procedure_param()?)
8405 }
8406 let comma = self.consume_token(&Token::Comma);
8407 if self.consume_token(&Token::RParen) {
8408 break;
8410 } else if !comma {
8411 return self.expected("',' or ')' after parameter definition", self.peek_token());
8412 }
8413 }
8414 Ok(Some(params))
8415 }
8416
8417 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8418 let mut columns = vec![];
8419 let mut constraints = vec![];
8420 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8421 return Ok((columns, constraints));
8422 }
8423
8424 loop {
8425 if let Some(constraint) = self.parse_optional_table_constraint()? {
8426 constraints.push(constraint);
8427 } else if let Token::Word(_) = self.peek_token().token {
8428 columns.push(self.parse_column_def()?);
8429 } else {
8430 return self.expected("column name or constraint definition", self.peek_token());
8431 }
8432
8433 let comma = self.consume_token(&Token::Comma);
8434 let rparen = self.peek_token().token == Token::RParen;
8435
8436 if !comma && !rparen {
8437 return self.expected("',' or ')' after column definition", self.peek_token());
8438 };
8439
8440 if rparen
8441 && (!comma
8442 || self.dialect.supports_column_definition_trailing_commas()
8443 || self.options.trailing_commas)
8444 {
8445 let _ = self.consume_token(&Token::RParen);
8446 break;
8447 }
8448 }
8449
8450 Ok((columns, constraints))
8451 }
8452
8453 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8454 let mode = if self.parse_keyword(Keyword::IN) {
8455 Some(ArgMode::In)
8456 } else if self.parse_keyword(Keyword::OUT) {
8457 Some(ArgMode::Out)
8458 } else if self.parse_keyword(Keyword::INOUT) {
8459 Some(ArgMode::InOut)
8460 } else {
8461 None
8462 };
8463 let name = self.parse_identifier()?;
8464 let data_type = self.parse_data_type()?;
8465 let default = if self.consume_token(&Token::Eq) {
8466 Some(self.parse_expr()?)
8467 } else {
8468 None
8469 };
8470
8471 Ok(ProcedureParam {
8472 name,
8473 data_type,
8474 mode,
8475 default,
8476 })
8477 }
8478
8479 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8480 let col_name = self.parse_identifier()?;
8481 let data_type = if self.is_column_type_sqlite_unspecified() {
8482 DataType::Unspecified
8483 } else {
8484 self.parse_data_type()?
8485 };
8486 let mut options = vec![];
8487 loop {
8488 if self.parse_keyword(Keyword::CONSTRAINT) {
8489 let name = Some(self.parse_identifier()?);
8490 if let Some(option) = self.parse_optional_column_option()? {
8491 options.push(ColumnOptionDef { name, option });
8492 } else {
8493 return self.expected(
8494 "constraint details after CONSTRAINT <name>",
8495 self.peek_token(),
8496 );
8497 }
8498 } else if let Some(option) = self.parse_optional_column_option()? {
8499 options.push(ColumnOptionDef { name: None, option });
8500 } else {
8501 break;
8502 };
8503 }
8504 Ok(ColumnDef {
8505 name: col_name,
8506 data_type,
8507 options,
8508 })
8509 }
8510
8511 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8512 if dialect_of!(self is SQLiteDialect) {
8513 match self.peek_token().token {
8514 Token::Word(word) => matches!(
8515 word.keyword,
8516 Keyword::CONSTRAINT
8517 | Keyword::PRIMARY
8518 | Keyword::NOT
8519 | Keyword::UNIQUE
8520 | Keyword::CHECK
8521 | Keyword::DEFAULT
8522 | Keyword::COLLATE
8523 | Keyword::REFERENCES
8524 | Keyword::GENERATED
8525 | Keyword::AS
8526 ),
8527 _ => true, }
8529 } else {
8530 false
8531 }
8532 }
8533
8534 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8535 if let Some(option) = self.dialect.parse_column_option(self)? {
8536 return option;
8537 }
8538
8539 self.with_state(
8540 ColumnDefinition,
8541 |parser| -> Result<Option<ColumnOption>, ParserError> {
8542 parser.parse_optional_column_option_inner()
8543 },
8544 )
8545 }
8546
8547 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8548 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8549 Ok(Some(ColumnOption::CharacterSet(
8550 self.parse_object_name(false)?,
8551 )))
8552 } else if self.parse_keywords(&[Keyword::COLLATE]) {
8553 Ok(Some(ColumnOption::Collation(
8554 self.parse_object_name(false)?,
8555 )))
8556 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8557 Ok(Some(ColumnOption::NotNull))
8558 } else if self.parse_keywords(&[Keyword::COMMENT]) {
8559 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8560 } else if self.parse_keyword(Keyword::NULL) {
8561 Ok(Some(ColumnOption::Null))
8562 } else if self.parse_keyword(Keyword::DEFAULT) {
8563 Ok(Some(ColumnOption::Default(
8564 self.parse_column_option_expr()?,
8565 )))
8566 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8567 && self.parse_keyword(Keyword::MATERIALIZED)
8568 {
8569 Ok(Some(ColumnOption::Materialized(
8570 self.parse_column_option_expr()?,
8571 )))
8572 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8573 && self.parse_keyword(Keyword::ALIAS)
8574 {
8575 Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8576 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8577 && self.parse_keyword(Keyword::EPHEMERAL)
8578 {
8579 if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8582 Ok(Some(ColumnOption::Ephemeral(None)))
8583 } else {
8584 Ok(Some(ColumnOption::Ephemeral(Some(
8585 self.parse_column_option_expr()?,
8586 ))))
8587 }
8588 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8589 let characteristics = self.parse_constraint_characteristics()?;
8590 Ok(Some(
8591 PrimaryKeyConstraint {
8592 name: None,
8593 index_name: None,
8594 index_type: None,
8595 columns: vec![],
8596 index_options: vec![],
8597 characteristics,
8598 }
8599 .into(),
8600 ))
8601 } else if self.parse_keyword(Keyword::UNIQUE) {
8602 let characteristics = self.parse_constraint_characteristics()?;
8603 Ok(Some(
8604 UniqueConstraint {
8605 name: None,
8606 index_name: None,
8607 index_type_display: KeyOrIndexDisplay::None,
8608 index_type: None,
8609 columns: vec![],
8610 index_options: vec![],
8611 characteristics,
8612 nulls_distinct: NullsDistinctOption::None,
8613 }
8614 .into(),
8615 ))
8616 } else if self.parse_keyword(Keyword::REFERENCES) {
8617 let foreign_table = self.parse_object_name(false)?;
8618 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8621 let mut match_kind = None;
8622 let mut on_delete = None;
8623 let mut on_update = None;
8624 loop {
8625 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8626 match_kind = Some(self.parse_match_kind()?);
8627 } else if on_delete.is_none()
8628 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8629 {
8630 on_delete = Some(self.parse_referential_action()?);
8631 } else if on_update.is_none()
8632 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8633 {
8634 on_update = Some(self.parse_referential_action()?);
8635 } else {
8636 break;
8637 }
8638 }
8639 let characteristics = self.parse_constraint_characteristics()?;
8640
8641 Ok(Some(
8642 ForeignKeyConstraint {
8643 name: None, index_name: None, columns: vec![], foreign_table,
8647 referred_columns,
8648 on_delete,
8649 on_update,
8650 match_kind,
8651 characteristics,
8652 }
8653 .into(),
8654 ))
8655 } else if self.parse_keyword(Keyword::CHECK) {
8656 self.expect_token(&Token::LParen)?;
8657 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8659 self.expect_token(&Token::RParen)?;
8660 Ok(Some(
8661 CheckConstraint {
8662 name: None, expr: Box::new(expr),
8664 enforced: None, }
8666 .into(),
8667 ))
8668 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8669 && dialect_of!(self is MySqlDialect | GenericDialect)
8670 {
8671 Ok(Some(ColumnOption::DialectSpecific(vec![
8673 Token::make_keyword("AUTO_INCREMENT"),
8674 ])))
8675 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8676 && dialect_of!(self is SQLiteDialect | GenericDialect)
8677 {
8678 Ok(Some(ColumnOption::DialectSpecific(vec![
8680 Token::make_keyword("AUTOINCREMENT"),
8681 ])))
8682 } else if self.parse_keyword(Keyword::ASC)
8683 && self.dialect.supports_asc_desc_in_column_definition()
8684 {
8685 Ok(Some(ColumnOption::DialectSpecific(vec![
8687 Token::make_keyword("ASC"),
8688 ])))
8689 } else if self.parse_keyword(Keyword::DESC)
8690 && self.dialect.supports_asc_desc_in_column_definition()
8691 {
8692 Ok(Some(ColumnOption::DialectSpecific(vec![
8694 Token::make_keyword("DESC"),
8695 ])))
8696 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8697 && dialect_of!(self is MySqlDialect | GenericDialect)
8698 {
8699 let expr = self.parse_column_option_expr()?;
8700 Ok(Some(ColumnOption::OnUpdate(expr)))
8701 } else if self.parse_keyword(Keyword::GENERATED) {
8702 self.parse_optional_column_option_generated()
8703 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8704 && self.parse_keyword(Keyword::OPTIONS)
8705 {
8706 self.prev_token();
8707 Ok(Some(ColumnOption::Options(
8708 self.parse_options(Keyword::OPTIONS)?,
8709 )))
8710 } else if self.parse_keyword(Keyword::AS)
8711 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8712 {
8713 self.parse_optional_column_option_as()
8714 } else if self.parse_keyword(Keyword::SRID)
8715 && dialect_of!(self is MySqlDialect | GenericDialect)
8716 {
8717 Ok(Some(ColumnOption::Srid(Box::new(
8718 self.parse_column_option_expr()?,
8719 ))))
8720 } else if self.parse_keyword(Keyword::IDENTITY)
8721 && dialect_of!(self is MsSqlDialect | GenericDialect)
8722 {
8723 let parameters = if self.consume_token(&Token::LParen) {
8724 let seed = self.parse_number()?;
8725 self.expect_token(&Token::Comma)?;
8726 let increment = self.parse_number()?;
8727 self.expect_token(&Token::RParen)?;
8728
8729 Some(IdentityPropertyFormatKind::FunctionCall(
8730 IdentityParameters { seed, increment },
8731 ))
8732 } else {
8733 None
8734 };
8735 Ok(Some(ColumnOption::Identity(
8736 IdentityPropertyKind::Identity(IdentityProperty {
8737 parameters,
8738 order: None,
8739 }),
8740 )))
8741 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8742 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8743 {
8744 Ok(Some(ColumnOption::OnConflict(
8746 self.expect_one_of_keywords(&[
8747 Keyword::ROLLBACK,
8748 Keyword::ABORT,
8749 Keyword::FAIL,
8750 Keyword::IGNORE,
8751 Keyword::REPLACE,
8752 ])?,
8753 )))
8754 } else if self.parse_keyword(Keyword::INVISIBLE) {
8755 Ok(Some(ColumnOption::Invisible))
8756 } else {
8757 Ok(None)
8758 }
8759 }
8760
8761 fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8778 if self.peek_token_ref().token == Token::LParen {
8779 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8780 Ok(expr)
8781 } else {
8782 Ok(self.parse_expr()?)
8783 }
8784 }
8785
8786 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8787 let name = self.parse_object_name(false)?;
8788 self.expect_token(&Token::Eq)?;
8789 let value = self.parse_literal_string()?;
8790
8791 Ok(Tag::new(name, value))
8792 }
8793
8794 fn parse_optional_column_option_generated(
8795 &mut self,
8796 ) -> Result<Option<ColumnOption>, ParserError> {
8797 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8798 let mut sequence_options = vec![];
8799 if self.expect_token(&Token::LParen).is_ok() {
8800 sequence_options = self.parse_create_sequence_options()?;
8801 self.expect_token(&Token::RParen)?;
8802 }
8803 Ok(Some(ColumnOption::Generated {
8804 generated_as: GeneratedAs::Always,
8805 sequence_options: Some(sequence_options),
8806 generation_expr: None,
8807 generation_expr_mode: None,
8808 generated_keyword: true,
8809 }))
8810 } else if self.parse_keywords(&[
8811 Keyword::BY,
8812 Keyword::DEFAULT,
8813 Keyword::AS,
8814 Keyword::IDENTITY,
8815 ]) {
8816 let mut sequence_options = vec![];
8817 if self.expect_token(&Token::LParen).is_ok() {
8818 sequence_options = self.parse_create_sequence_options()?;
8819 self.expect_token(&Token::RParen)?;
8820 }
8821 Ok(Some(ColumnOption::Generated {
8822 generated_as: GeneratedAs::ByDefault,
8823 sequence_options: Some(sequence_options),
8824 generation_expr: None,
8825 generation_expr_mode: None,
8826 generated_keyword: true,
8827 }))
8828 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8829 if self.expect_token(&Token::LParen).is_ok() {
8830 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8831 self.expect_token(&Token::RParen)?;
8832 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8833 Ok((
8834 GeneratedAs::ExpStored,
8835 Some(GeneratedExpressionMode::Stored),
8836 ))
8837 } else if dialect_of!(self is PostgreSqlDialect) {
8838 self.expected("STORED", self.peek_token())
8840 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8841 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8842 } else {
8843 Ok((GeneratedAs::Always, None))
8844 }?;
8845
8846 Ok(Some(ColumnOption::Generated {
8847 generated_as: gen_as,
8848 sequence_options: None,
8849 generation_expr: Some(expr),
8850 generation_expr_mode: expr_mode,
8851 generated_keyword: true,
8852 }))
8853 } else {
8854 Ok(None)
8855 }
8856 } else {
8857 Ok(None)
8858 }
8859 }
8860
8861 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8862 self.expect_token(&Token::LParen)?;
8864 let expr = self.parse_expr()?;
8865 self.expect_token(&Token::RParen)?;
8866
8867 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8868 (
8869 GeneratedAs::ExpStored,
8870 Some(GeneratedExpressionMode::Stored),
8871 )
8872 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8873 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8874 } else {
8875 (GeneratedAs::Always, None)
8876 };
8877
8878 Ok(Some(ColumnOption::Generated {
8879 generated_as: gen_as,
8880 sequence_options: None,
8881 generation_expr: Some(expr),
8882 generation_expr_mode: expr_mode,
8883 generated_keyword: false,
8884 }))
8885 }
8886
8887 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8888 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8889 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8890 {
8891 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8892
8893 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8894 self.expect_token(&Token::LParen)?;
8895 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8896 self.expect_token(&Token::RParen)?;
8897 Some(sorted_by_columns)
8898 } else {
8899 None
8900 };
8901
8902 self.expect_keyword_is(Keyword::INTO)?;
8903 let num_buckets = self.parse_number_value()?.value;
8904 self.expect_keyword_is(Keyword::BUCKETS)?;
8905 Some(ClusteredBy {
8906 columns,
8907 sorted_by,
8908 num_buckets,
8909 })
8910 } else {
8911 None
8912 };
8913 Ok(clustered_by)
8914 }
8915
8916 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8917 if self.parse_keyword(Keyword::RESTRICT) {
8918 Ok(ReferentialAction::Restrict)
8919 } else if self.parse_keyword(Keyword::CASCADE) {
8920 Ok(ReferentialAction::Cascade)
8921 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8922 Ok(ReferentialAction::SetNull)
8923 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8924 Ok(ReferentialAction::NoAction)
8925 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8926 Ok(ReferentialAction::SetDefault)
8927 } else {
8928 self.expected(
8929 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8930 self.peek_token(),
8931 )
8932 }
8933 }
8934
8935 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
8936 if self.parse_keyword(Keyword::FULL) {
8937 Ok(ConstraintReferenceMatchKind::Full)
8938 } else if self.parse_keyword(Keyword::PARTIAL) {
8939 Ok(ConstraintReferenceMatchKind::Partial)
8940 } else if self.parse_keyword(Keyword::SIMPLE) {
8941 Ok(ConstraintReferenceMatchKind::Simple)
8942 } else {
8943 self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
8944 }
8945 }
8946
8947 pub fn parse_constraint_characteristics(
8948 &mut self,
8949 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8950 let mut cc = ConstraintCharacteristics::default();
8951
8952 loop {
8953 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8954 {
8955 cc.deferrable = Some(false);
8956 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8957 cc.deferrable = Some(true);
8958 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8959 if self.parse_keyword(Keyword::DEFERRED) {
8960 cc.initially = Some(DeferrableInitial::Deferred);
8961 } else if self.parse_keyword(Keyword::IMMEDIATE) {
8962 cc.initially = Some(DeferrableInitial::Immediate);
8963 } else {
8964 self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8965 }
8966 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8967 cc.enforced = Some(true);
8968 } else if cc.enforced.is_none()
8969 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8970 {
8971 cc.enforced = Some(false);
8972 } else {
8973 break;
8974 }
8975 }
8976
8977 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8978 Ok(Some(cc))
8979 } else {
8980 Ok(None)
8981 }
8982 }
8983
8984 pub fn parse_optional_table_constraint(
8985 &mut self,
8986 ) -> Result<Option<TableConstraint>, ParserError> {
8987 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8988 Some(self.parse_identifier()?)
8989 } else {
8990 None
8991 };
8992
8993 let next_token = self.next_token();
8994 match next_token.token {
8995 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8996 let index_type_display = self.parse_index_type_display();
8997 if !dialect_of!(self is GenericDialect | MySqlDialect)
8998 && !index_type_display.is_none()
8999 {
9000 return self
9001 .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
9002 }
9003
9004 let nulls_distinct = self.parse_optional_nulls_distinct()?;
9005
9006 let index_name = self.parse_optional_ident()?;
9008 let index_type = self.parse_optional_using_then_index_type()?;
9009
9010 let columns = self.parse_parenthesized_index_column_list()?;
9011 let index_options = self.parse_index_options()?;
9012 let characteristics = self.parse_constraint_characteristics()?;
9013 Ok(Some(
9014 UniqueConstraint {
9015 name,
9016 index_name,
9017 index_type_display,
9018 index_type,
9019 columns,
9020 index_options,
9021 characteristics,
9022 nulls_distinct,
9023 }
9024 .into(),
9025 ))
9026 }
9027 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9028 self.expect_keyword_is(Keyword::KEY)?;
9030
9031 let index_name = self.parse_optional_ident()?;
9033 let index_type = self.parse_optional_using_then_index_type()?;
9034
9035 let columns = self.parse_parenthesized_index_column_list()?;
9036 let index_options = self.parse_index_options()?;
9037 let characteristics = self.parse_constraint_characteristics()?;
9038 Ok(Some(
9039 PrimaryKeyConstraint {
9040 name,
9041 index_name,
9042 index_type,
9043 columns,
9044 index_options,
9045 characteristics,
9046 }
9047 .into(),
9048 ))
9049 }
9050 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9051 self.expect_keyword_is(Keyword::KEY)?;
9052 let index_name = self.parse_optional_ident()?;
9053 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9054 self.expect_keyword_is(Keyword::REFERENCES)?;
9055 let foreign_table = self.parse_object_name(false)?;
9056 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9057 let mut match_kind = None;
9058 let mut on_delete = None;
9059 let mut on_update = None;
9060 loop {
9061 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9062 match_kind = Some(self.parse_match_kind()?);
9063 } else if on_delete.is_none()
9064 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9065 {
9066 on_delete = Some(self.parse_referential_action()?);
9067 } else if on_update.is_none()
9068 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9069 {
9070 on_update = Some(self.parse_referential_action()?);
9071 } else {
9072 break;
9073 }
9074 }
9075
9076 let characteristics = self.parse_constraint_characteristics()?;
9077
9078 Ok(Some(
9079 ForeignKeyConstraint {
9080 name,
9081 index_name,
9082 columns,
9083 foreign_table,
9084 referred_columns,
9085 on_delete,
9086 on_update,
9087 match_kind,
9088 characteristics,
9089 }
9090 .into(),
9091 ))
9092 }
9093 Token::Word(w) if w.keyword == Keyword::CHECK => {
9094 self.expect_token(&Token::LParen)?;
9095 let expr = Box::new(self.parse_expr()?);
9096 self.expect_token(&Token::RParen)?;
9097
9098 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9099 Some(true)
9100 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9101 Some(false)
9102 } else {
9103 None
9104 };
9105
9106 Ok(Some(
9107 CheckConstraint {
9108 name,
9109 expr,
9110 enforced,
9111 }
9112 .into(),
9113 ))
9114 }
9115 Token::Word(w)
9116 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9117 && dialect_of!(self is GenericDialect | MySqlDialect)
9118 && name.is_none() =>
9119 {
9120 let display_as_key = w.keyword == Keyword::KEY;
9121
9122 let name = match self.peek_token().token {
9123 Token::Word(word) if word.keyword == Keyword::USING => None,
9124 _ => self.parse_optional_ident()?,
9125 };
9126
9127 let index_type = self.parse_optional_using_then_index_type()?;
9128 let columns = self.parse_parenthesized_index_column_list()?;
9129 let index_options = self.parse_index_options()?;
9130
9131 Ok(Some(
9132 IndexConstraint {
9133 display_as_key,
9134 name,
9135 index_type,
9136 columns,
9137 index_options,
9138 }
9139 .into(),
9140 ))
9141 }
9142 Token::Word(w)
9143 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9144 && dialect_of!(self is GenericDialect | MySqlDialect) =>
9145 {
9146 if let Some(name) = name {
9147 return self.expected(
9148 "FULLTEXT or SPATIAL option without constraint name",
9149 TokenWithSpan {
9150 token: Token::make_keyword(&name.to_string()),
9151 span: next_token.span,
9152 },
9153 );
9154 }
9155
9156 let fulltext = w.keyword == Keyword::FULLTEXT;
9157
9158 let index_type_display = self.parse_index_type_display();
9159
9160 let opt_index_name = self.parse_optional_ident()?;
9161
9162 let columns = self.parse_parenthesized_index_column_list()?;
9163
9164 Ok(Some(
9165 FullTextOrSpatialConstraint {
9166 fulltext,
9167 index_type_display,
9168 opt_index_name,
9169 columns,
9170 }
9171 .into(),
9172 ))
9173 }
9174 _ => {
9175 if name.is_some() {
9176 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9177 } else {
9178 self.prev_token();
9179 Ok(None)
9180 }
9181 }
9182 }
9183 }
9184
9185 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9186 Ok(if self.parse_keyword(Keyword::NULLS) {
9187 let not = self.parse_keyword(Keyword::NOT);
9188 self.expect_keyword_is(Keyword::DISTINCT)?;
9189 if not {
9190 NullsDistinctOption::NotDistinct
9191 } else {
9192 NullsDistinctOption::Distinct
9193 }
9194 } else {
9195 NullsDistinctOption::None
9196 })
9197 }
9198
9199 pub fn maybe_parse_options(
9200 &mut self,
9201 keyword: Keyword,
9202 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9203 if let Token::Word(word) = self.peek_token().token {
9204 if word.keyword == keyword {
9205 return Ok(Some(self.parse_options(keyword)?));
9206 }
9207 };
9208 Ok(None)
9209 }
9210
9211 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9212 if self.parse_keyword(keyword) {
9213 self.expect_token(&Token::LParen)?;
9214 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9215 self.expect_token(&Token::RParen)?;
9216 Ok(options)
9217 } else {
9218 Ok(vec![])
9219 }
9220 }
9221
9222 pub fn parse_options_with_keywords(
9223 &mut self,
9224 keywords: &[Keyword],
9225 ) -> Result<Vec<SqlOption>, ParserError> {
9226 if self.parse_keywords(keywords) {
9227 self.expect_token(&Token::LParen)?;
9228 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9229 self.expect_token(&Token::RParen)?;
9230 Ok(options)
9231 } else {
9232 Ok(vec![])
9233 }
9234 }
9235
9236 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9237 Ok(if self.parse_keyword(Keyword::BTREE) {
9238 IndexType::BTree
9239 } else if self.parse_keyword(Keyword::HASH) {
9240 IndexType::Hash
9241 } else if self.parse_keyword(Keyword::GIN) {
9242 IndexType::GIN
9243 } else if self.parse_keyword(Keyword::GIST) {
9244 IndexType::GiST
9245 } else if self.parse_keyword(Keyword::SPGIST) {
9246 IndexType::SPGiST
9247 } else if self.parse_keyword(Keyword::BRIN) {
9248 IndexType::BRIN
9249 } else if self.parse_keyword(Keyword::BLOOM) {
9250 IndexType::Bloom
9251 } else {
9252 IndexType::Custom(self.parse_identifier()?)
9253 })
9254 }
9255
9256 pub fn parse_optional_using_then_index_type(
9262 &mut self,
9263 ) -> Result<Option<IndexType>, ParserError> {
9264 if self.parse_keyword(Keyword::USING) {
9265 Ok(Some(self.parse_index_type()?))
9266 } else {
9267 Ok(None)
9268 }
9269 }
9270
9271 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9274 self.maybe_parse(|parser| parser.parse_identifier())
9275 }
9276
9277 #[must_use]
9278 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9279 if self.parse_keyword(Keyword::KEY) {
9280 KeyOrIndexDisplay::Key
9281 } else if self.parse_keyword(Keyword::INDEX) {
9282 KeyOrIndexDisplay::Index
9283 } else {
9284 KeyOrIndexDisplay::None
9285 }
9286 }
9287
9288 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9289 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9290 Ok(Some(IndexOption::Using(index_type)))
9291 } else if self.parse_keyword(Keyword::COMMENT) {
9292 let s = self.parse_literal_string()?;
9293 Ok(Some(IndexOption::Comment(s)))
9294 } else {
9295 Ok(None)
9296 }
9297 }
9298
9299 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9300 let mut options = Vec::new();
9301
9302 loop {
9303 match self.parse_optional_index_option()? {
9304 Some(index_option) => options.push(index_option),
9305 None => return Ok(options),
9306 }
9307 }
9308 }
9309
9310 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9311 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9312
9313 match self.peek_token().token {
9314 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9315 Ok(SqlOption::Ident(self.parse_identifier()?))
9316 }
9317 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9318 self.parse_option_partition()
9319 }
9320 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9321 self.parse_option_clustered()
9322 }
9323 _ => {
9324 let name = self.parse_identifier()?;
9325 self.expect_token(&Token::Eq)?;
9326 let value = self.parse_expr()?;
9327
9328 Ok(SqlOption::KeyValue { key: name, value })
9329 }
9330 }
9331 }
9332
9333 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9334 if self.parse_keywords(&[
9335 Keyword::CLUSTERED,
9336 Keyword::COLUMNSTORE,
9337 Keyword::INDEX,
9338 Keyword::ORDER,
9339 ]) {
9340 Ok(SqlOption::Clustered(
9341 TableOptionsClustered::ColumnstoreIndexOrder(
9342 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9343 ),
9344 ))
9345 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9346 Ok(SqlOption::Clustered(
9347 TableOptionsClustered::ColumnstoreIndex,
9348 ))
9349 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9350 self.expect_token(&Token::LParen)?;
9351
9352 let columns = self.parse_comma_separated(|p| {
9353 let name = p.parse_identifier()?;
9354 let asc = p.parse_asc_desc();
9355
9356 Ok(ClusteredIndex { name, asc })
9357 })?;
9358
9359 self.expect_token(&Token::RParen)?;
9360
9361 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9362 } else {
9363 Err(ParserError::ParserError(
9364 "invalid CLUSTERED sequence".to_string(),
9365 ))
9366 }
9367 }
9368
9369 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9370 self.expect_keyword_is(Keyword::PARTITION)?;
9371 self.expect_token(&Token::LParen)?;
9372 let column_name = self.parse_identifier()?;
9373
9374 self.expect_keyword_is(Keyword::RANGE)?;
9375 let range_direction = if self.parse_keyword(Keyword::LEFT) {
9376 Some(PartitionRangeDirection::Left)
9377 } else if self.parse_keyword(Keyword::RIGHT) {
9378 Some(PartitionRangeDirection::Right)
9379 } else {
9380 None
9381 };
9382
9383 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9384 self.expect_token(&Token::LParen)?;
9385
9386 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9387
9388 self.expect_token(&Token::RParen)?;
9389 self.expect_token(&Token::RParen)?;
9390
9391 Ok(SqlOption::Partition {
9392 column_name,
9393 range_direction,
9394 for_values,
9395 })
9396 }
9397
9398 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9399 self.expect_token(&Token::LParen)?;
9400 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9401 self.expect_token(&Token::RParen)?;
9402 Ok(Partition::Partitions(partitions))
9403 }
9404
9405 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9406 self.expect_token(&Token::LParen)?;
9407 self.expect_keyword_is(Keyword::SELECT)?;
9408 let projection = self.parse_projection()?;
9409 let group_by = self.parse_optional_group_by()?;
9410 let order_by = self.parse_optional_order_by()?;
9411 self.expect_token(&Token::RParen)?;
9412 Ok(ProjectionSelect {
9413 projection,
9414 group_by,
9415 order_by,
9416 })
9417 }
9418 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9419 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9420 let name = self.parse_identifier()?;
9421 let query = self.parse_projection_select()?;
9422 Ok(AlterTableOperation::AddProjection {
9423 if_not_exists,
9424 name,
9425 select: query,
9426 })
9427 }
9428
9429 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9430 let operation = if self.parse_keyword(Keyword::ADD) {
9431 if let Some(constraint) = self.parse_optional_table_constraint()? {
9432 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9433 AlterTableOperation::AddConstraint {
9434 constraint,
9435 not_valid,
9436 }
9437 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9438 && self.parse_keyword(Keyword::PROJECTION)
9439 {
9440 return self.parse_alter_table_add_projection();
9441 } else {
9442 let if_not_exists =
9443 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9444 let mut new_partitions = vec![];
9445 loop {
9446 if self.parse_keyword(Keyword::PARTITION) {
9447 new_partitions.push(self.parse_partition()?);
9448 } else {
9449 break;
9450 }
9451 }
9452 if !new_partitions.is_empty() {
9453 AlterTableOperation::AddPartitions {
9454 if_not_exists,
9455 new_partitions,
9456 }
9457 } else {
9458 let column_keyword = self.parse_keyword(Keyword::COLUMN);
9459
9460 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9461 {
9462 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9463 || if_not_exists
9464 } else {
9465 false
9466 };
9467
9468 let column_def = self.parse_column_def()?;
9469
9470 let column_position = self.parse_column_position()?;
9471
9472 AlterTableOperation::AddColumn {
9473 column_keyword,
9474 if_not_exists,
9475 column_def,
9476 column_position,
9477 }
9478 }
9479 }
9480 } else if self.parse_keyword(Keyword::RENAME) {
9481 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9482 let old_name = self.parse_identifier()?;
9483 self.expect_keyword_is(Keyword::TO)?;
9484 let new_name = self.parse_identifier()?;
9485 AlterTableOperation::RenameConstraint { old_name, new_name }
9486 } else if self.parse_keyword(Keyword::TO) {
9487 let table_name = self.parse_object_name(false)?;
9488 AlterTableOperation::RenameTable {
9489 table_name: RenameTableNameKind::To(table_name),
9490 }
9491 } else if self.parse_keyword(Keyword::AS) {
9492 let table_name = self.parse_object_name(false)?;
9493 AlterTableOperation::RenameTable {
9494 table_name: RenameTableNameKind::As(table_name),
9495 }
9496 } else {
9497 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
9499 self.expect_keyword_is(Keyword::TO)?;
9500 let new_column_name = self.parse_identifier()?;
9501 AlterTableOperation::RenameColumn {
9502 old_column_name,
9503 new_column_name,
9504 }
9505 }
9506 } else if self.parse_keyword(Keyword::DISABLE) {
9507 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9508 AlterTableOperation::DisableRowLevelSecurity {}
9509 } else if self.parse_keyword(Keyword::RULE) {
9510 let name = self.parse_identifier()?;
9511 AlterTableOperation::DisableRule { name }
9512 } else if self.parse_keyword(Keyword::TRIGGER) {
9513 let name = self.parse_identifier()?;
9514 AlterTableOperation::DisableTrigger { name }
9515 } else {
9516 return self.expected(
9517 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9518 self.peek_token(),
9519 );
9520 }
9521 } else if self.parse_keyword(Keyword::ENABLE) {
9522 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9523 let name = self.parse_identifier()?;
9524 AlterTableOperation::EnableAlwaysRule { name }
9525 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9526 let name = self.parse_identifier()?;
9527 AlterTableOperation::EnableAlwaysTrigger { name }
9528 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9529 AlterTableOperation::EnableRowLevelSecurity {}
9530 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9531 let name = self.parse_identifier()?;
9532 AlterTableOperation::EnableReplicaRule { name }
9533 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9534 let name = self.parse_identifier()?;
9535 AlterTableOperation::EnableReplicaTrigger { name }
9536 } else if self.parse_keyword(Keyword::RULE) {
9537 let name = self.parse_identifier()?;
9538 AlterTableOperation::EnableRule { name }
9539 } else if self.parse_keyword(Keyword::TRIGGER) {
9540 let name = self.parse_identifier()?;
9541 AlterTableOperation::EnableTrigger { name }
9542 } else {
9543 return self.expected(
9544 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9545 self.peek_token(),
9546 );
9547 }
9548 } else if self.parse_keywords(&[Keyword::FORCE, Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9549 AlterTableOperation::ForceRowLevelSecurity
9550 } else if self.parse_keywords(&[Keyword::NO, Keyword::FORCE, Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9551 AlterTableOperation::NoForceRowLevelSecurity
9552 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9553 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9554 {
9555 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9556 let name = self.parse_identifier()?;
9557 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9558 Some(self.parse_identifier()?)
9559 } else {
9560 None
9561 };
9562 AlterTableOperation::ClearProjection {
9563 if_exists,
9564 name,
9565 partition,
9566 }
9567 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9568 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9569 {
9570 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9571 let name = self.parse_identifier()?;
9572 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9573 Some(self.parse_identifier()?)
9574 } else {
9575 None
9576 };
9577 AlterTableOperation::MaterializeProjection {
9578 if_exists,
9579 name,
9580 partition,
9581 }
9582 } else if self.parse_keyword(Keyword::DROP) {
9583 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9584 self.expect_token(&Token::LParen)?;
9585 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9586 self.expect_token(&Token::RParen)?;
9587 AlterTableOperation::DropPartitions {
9588 partitions,
9589 if_exists: true,
9590 }
9591 } else if self.parse_keyword(Keyword::PARTITION) {
9592 self.expect_token(&Token::LParen)?;
9593 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9594 self.expect_token(&Token::RParen)?;
9595 AlterTableOperation::DropPartitions {
9596 partitions,
9597 if_exists: false,
9598 }
9599 } else if self.parse_keyword(Keyword::CONSTRAINT) {
9600 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9601 let name = self.parse_identifier()?;
9602 let drop_behavior = self.parse_optional_drop_behavior();
9603 AlterTableOperation::DropConstraint {
9604 if_exists,
9605 name,
9606 drop_behavior,
9607 }
9608 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9609 let drop_behavior = self.parse_optional_drop_behavior();
9610 AlterTableOperation::DropPrimaryKey { drop_behavior }
9611 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9612 let name = self.parse_identifier()?;
9613 let drop_behavior = self.parse_optional_drop_behavior();
9614 AlterTableOperation::DropForeignKey {
9615 name,
9616 drop_behavior,
9617 }
9618 } else if self.parse_keyword(Keyword::INDEX) {
9619 let name = self.parse_identifier()?;
9620 AlterTableOperation::DropIndex { name }
9621 } else if self.parse_keyword(Keyword::PROJECTION)
9622 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9623 {
9624 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9625 let name = self.parse_identifier()?;
9626 AlterTableOperation::DropProjection { if_exists, name }
9627 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9628 AlterTableOperation::DropClusteringKey
9629 } else {
9630 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9632 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9633 self.parse_comma_separated(Parser::parse_identifier)?
9634 } else {
9635 vec![self.parse_identifier()?]
9636 };
9637 let drop_behavior = self.parse_optional_drop_behavior();
9638 AlterTableOperation::DropColumn {
9639 has_column_keyword,
9640 column_names,
9641 if_exists,
9642 drop_behavior,
9643 }
9644 }
9645 } else if self.parse_keyword(Keyword::PARTITION) {
9646 self.expect_token(&Token::LParen)?;
9647 let before = self.parse_comma_separated(Parser::parse_expr)?;
9648 self.expect_token(&Token::RParen)?;
9649 self.expect_keyword_is(Keyword::RENAME)?;
9650 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9651 self.expect_token(&Token::LParen)?;
9652 let renames = self.parse_comma_separated(Parser::parse_expr)?;
9653 self.expect_token(&Token::RParen)?;
9654 AlterTableOperation::RenamePartitions {
9655 old_partitions: before,
9656 new_partitions: renames,
9657 }
9658 } else if self.parse_keyword(Keyword::CHANGE) {
9659 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
9661 let new_name = self.parse_identifier()?;
9662 let data_type = self.parse_data_type()?;
9663 let mut options = vec![];
9664 while let Some(option) = self.parse_optional_column_option()? {
9665 options.push(option);
9666 }
9667
9668 let column_position = self.parse_column_position()?;
9669
9670 AlterTableOperation::ChangeColumn {
9671 old_name,
9672 new_name,
9673 data_type,
9674 options,
9675 column_position,
9676 }
9677 } else if self.parse_keyword(Keyword::MODIFY) {
9678 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
9680 let data_type = self.parse_data_type()?;
9681 let mut options = vec![];
9682 while let Some(option) = self.parse_optional_column_option()? {
9683 options.push(option);
9684 }
9685
9686 let column_position = self.parse_column_position()?;
9687
9688 AlterTableOperation::ModifyColumn {
9689 col_name,
9690 data_type,
9691 options,
9692 column_position,
9693 }
9694 } else if self.parse_keyword(Keyword::ALTER) {
9695 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
9697 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9698
9699 let op: AlterColumnOperation = if self.parse_keywords(&[
9700 Keyword::SET,
9701 Keyword::NOT,
9702 Keyword::NULL,
9703 ]) {
9704 AlterColumnOperation::SetNotNull {}
9705 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9706 AlterColumnOperation::DropNotNull {}
9707 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9708 AlterColumnOperation::SetDefault {
9709 value: self.parse_expr()?,
9710 }
9711 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9712 AlterColumnOperation::DropDefault {}
9713 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9714 self.parse_set_data_type(true)?
9715 } else if self.parse_keyword(Keyword::TYPE) {
9716 self.parse_set_data_type(false)?
9717 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9718 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9719 Some(GeneratedAs::Always)
9720 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9721 Some(GeneratedAs::ByDefault)
9722 } else {
9723 None
9724 };
9725
9726 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9727
9728 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9729
9730 if self.peek_token().token == Token::LParen {
9731 self.expect_token(&Token::LParen)?;
9732 sequence_options = Some(self.parse_create_sequence_options()?);
9733 self.expect_token(&Token::RParen)?;
9734 }
9735
9736 AlterColumnOperation::AddGenerated {
9737 generated_as,
9738 sequence_options,
9739 }
9740 } else {
9741 let message = if is_postgresql {
9742 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9743 } else {
9744 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9745 };
9746
9747 return self.expected(message, self.peek_token());
9748 };
9749 AlterTableOperation::AlterColumn { column_name, op }
9750 } else if self.parse_keyword(Keyword::SWAP) {
9751 self.expect_keyword_is(Keyword::WITH)?;
9752 let table_name = self.parse_object_name(false)?;
9753 AlterTableOperation::SwapWith { table_name }
9754 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9755 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9756 {
9757 let new_owner = self.parse_owner()?;
9758 AlterTableOperation::OwnerTo { new_owner }
9759 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9760 && self.parse_keyword(Keyword::ATTACH)
9761 {
9762 AlterTableOperation::AttachPartition {
9763 partition: self.parse_part_or_partition()?,
9764 }
9765 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9766 && self.parse_keyword(Keyword::DETACH)
9767 {
9768 AlterTableOperation::DetachPartition {
9769 partition: self.parse_part_or_partition()?,
9770 }
9771 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9772 && self.parse_keyword(Keyword::FREEZE)
9773 {
9774 let partition = self.parse_part_or_partition()?;
9775 let with_name = if self.parse_keyword(Keyword::WITH) {
9776 self.expect_keyword_is(Keyword::NAME)?;
9777 Some(self.parse_identifier()?)
9778 } else {
9779 None
9780 };
9781 AlterTableOperation::FreezePartition {
9782 partition,
9783 with_name,
9784 }
9785 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9786 && self.parse_keyword(Keyword::UNFREEZE)
9787 {
9788 let partition = self.parse_part_or_partition()?;
9789 let with_name = if self.parse_keyword(Keyword::WITH) {
9790 self.expect_keyword_is(Keyword::NAME)?;
9791 Some(self.parse_identifier()?)
9792 } else {
9793 None
9794 };
9795 AlterTableOperation::UnfreezePartition {
9796 partition,
9797 with_name,
9798 }
9799 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9800 self.expect_token(&Token::LParen)?;
9801 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9802 self.expect_token(&Token::RParen)?;
9803 AlterTableOperation::ClusterBy { exprs }
9804 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9805 AlterTableOperation::SuspendRecluster
9806 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9807 AlterTableOperation::ResumeRecluster
9808 } else if self.parse_keyword(Keyword::LOCK) {
9809 let equals = self.consume_token(&Token::Eq);
9810 let lock = match self.parse_one_of_keywords(&[
9811 Keyword::DEFAULT,
9812 Keyword::EXCLUSIVE,
9813 Keyword::NONE,
9814 Keyword::SHARED,
9815 ]) {
9816 Some(Keyword::DEFAULT) => AlterTableLock::Default,
9817 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9818 Some(Keyword::NONE) => AlterTableLock::None,
9819 Some(Keyword::SHARED) => AlterTableLock::Shared,
9820 _ => self.expected(
9821 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9822 self.peek_token(),
9823 )?,
9824 };
9825 AlterTableOperation::Lock { equals, lock }
9826 } else if self.parse_keyword(Keyword::ALGORITHM) {
9827 let equals = self.consume_token(&Token::Eq);
9828 let algorithm = match self.parse_one_of_keywords(&[
9829 Keyword::DEFAULT,
9830 Keyword::INSTANT,
9831 Keyword::INPLACE,
9832 Keyword::COPY,
9833 ]) {
9834 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9835 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9836 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9837 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9838 _ => self.expected(
9839 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9840 self.peek_token(),
9841 )?,
9842 };
9843 AlterTableOperation::Algorithm { equals, algorithm }
9844 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9845 let equals = self.consume_token(&Token::Eq);
9846 let value = self.parse_number_value()?;
9847 AlterTableOperation::AutoIncrement { equals, value }
9848 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9849 let identity = if self.parse_keyword(Keyword::NONE) {
9850 ReplicaIdentity::None
9851 } else if self.parse_keyword(Keyword::FULL) {
9852 ReplicaIdentity::Full
9853 } else if self.parse_keyword(Keyword::DEFAULT) {
9854 ReplicaIdentity::Default
9855 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9856 ReplicaIdentity::Index(self.parse_identifier()?)
9857 } else {
9858 return self.expected(
9859 "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9860 self.peek_token(),
9861 );
9862 };
9863
9864 AlterTableOperation::ReplicaIdentity { identity }
9865 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9866 let name = self.parse_identifier()?;
9867 AlterTableOperation::ValidateConstraint { name }
9868 } else {
9869 let mut options =
9870 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9871 if !options.is_empty() {
9872 AlterTableOperation::SetTblProperties {
9873 table_properties: options,
9874 }
9875 } else {
9876 options = self.parse_options(Keyword::SET)?;
9877 if !options.is_empty() {
9878 AlterTableOperation::SetOptionsParens { options }
9879 } else {
9880 return self.expected(
9881 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9882 self.peek_token(),
9883 );
9884 }
9885 }
9886 };
9887 Ok(operation)
9888 }
9889
9890 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9891 let data_type = self.parse_data_type()?;
9892 let using = if self.dialect.supports_alter_column_type_using()
9893 && self.parse_keyword(Keyword::USING)
9894 {
9895 Some(self.parse_expr()?)
9896 } else {
9897 None
9898 };
9899 Ok(AlterColumnOperation::SetDataType {
9900 data_type,
9901 using,
9902 had_set,
9903 })
9904 }
9905
9906 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9907 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9908 match keyword {
9909 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9910 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9911 unexpected_keyword => Err(ParserError::ParserError(
9913 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
9914 )),
9915 }
9916 }
9917
9918 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9919 let object_type = self.expect_one_of_keywords(&[
9920 Keyword::VIEW,
9921 Keyword::TYPE,
9922 Keyword::TABLE,
9923 Keyword::INDEX,
9924 Keyword::ROLE,
9925 Keyword::POLICY,
9926 Keyword::CONNECTOR,
9927 Keyword::ICEBERG,
9928 Keyword::SCHEMA,
9929 Keyword::USER,
9930 Keyword::OPERATOR,
9931 ])?;
9932 match object_type {
9933 Keyword::SCHEMA => {
9934 self.prev_token();
9935 self.prev_token();
9936 self.parse_alter_schema()
9937 }
9938 Keyword::VIEW => self.parse_alter_view(),
9939 Keyword::TYPE => self.parse_alter_type(),
9940 Keyword::TABLE => self.parse_alter_table(false),
9941 Keyword::ICEBERG => {
9942 self.expect_keyword(Keyword::TABLE)?;
9943 self.parse_alter_table(true)
9944 }
9945 Keyword::INDEX => {
9946 let index_name = self.parse_object_name(false)?;
9947 let operation = if self.parse_keyword(Keyword::RENAME) {
9948 if self.parse_keyword(Keyword::TO) {
9949 let index_name = self.parse_object_name(false)?;
9950 AlterIndexOperation::RenameIndex { index_name }
9951 } else {
9952 return self.expected("TO after RENAME", self.peek_token());
9953 }
9954 } else {
9955 return self.expected("RENAME after ALTER INDEX", self.peek_token());
9956 };
9957
9958 Ok(Statement::AlterIndex {
9959 name: index_name,
9960 operation,
9961 })
9962 }
9963 Keyword::OPERATOR => self.parse_alter_operator(),
9964 Keyword::ROLE => self.parse_alter_role(),
9965 Keyword::POLICY => self.parse_alter_policy(),
9966 Keyword::CONNECTOR => self.parse_alter_connector(),
9967 Keyword::USER => self.parse_alter_user(),
9968 unexpected_keyword => Err(ParserError::ParserError(
9970 format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
9971 )),
9972 }
9973 }
9974
9975 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9977 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9978 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
9980 let on_cluster = self.parse_optional_on_cluster()?;
9981 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9982
9983 let mut location = None;
9984 if self.parse_keyword(Keyword::LOCATION) {
9985 location = Some(HiveSetLocation {
9986 has_set: false,
9987 location: self.parse_identifier()?,
9988 });
9989 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9990 location = Some(HiveSetLocation {
9991 has_set: true,
9992 location: self.parse_identifier()?,
9993 });
9994 }
9995
9996 let end_token = if self.peek_token_ref().token == Token::SemiColon {
9997 self.peek_token_ref().clone()
9998 } else {
9999 self.get_current_token().clone()
10000 };
10001
10002 Ok(AlterTable {
10003 name: table_name,
10004 if_exists,
10005 only,
10006 operations,
10007 location,
10008 on_cluster,
10009 table_type: if iceberg {
10010 Some(AlterTableType::Iceberg)
10011 } else {
10012 None
10013 },
10014 end_token: AttachedToken(end_token),
10015 }
10016 .into())
10017 }
10018
10019 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
10020 let name = self.parse_object_name(false)?;
10021 let columns = self.parse_parenthesized_column_list(Optional, false)?;
10022
10023 let with_options = self.parse_options(Keyword::WITH)?;
10024
10025 self.expect_keyword_is(Keyword::AS)?;
10026 let query = self.parse_query()?;
10027
10028 Ok(Statement::AlterView {
10029 name,
10030 columns,
10031 query,
10032 with_options,
10033 })
10034 }
10035
10036 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10038 let name = self.parse_object_name(false)?;
10039
10040 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10041 let new_name = self.parse_identifier()?;
10042 Ok(Statement::AlterType(AlterType {
10043 name,
10044 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10045 }))
10046 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10047 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10048 let new_enum_value = self.parse_identifier()?;
10049 let position = if self.parse_keyword(Keyword::BEFORE) {
10050 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10051 } else if self.parse_keyword(Keyword::AFTER) {
10052 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10053 } else {
10054 None
10055 };
10056
10057 Ok(Statement::AlterType(AlterType {
10058 name,
10059 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10060 if_not_exists,
10061 value: new_enum_value,
10062 position,
10063 }),
10064 }))
10065 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10066 let existing_enum_value = self.parse_identifier()?;
10067 self.expect_keyword(Keyword::TO)?;
10068 let new_enum_value = self.parse_identifier()?;
10069
10070 Ok(Statement::AlterType(AlterType {
10071 name,
10072 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10073 from: existing_enum_value,
10074 to: new_enum_value,
10075 }),
10076 }))
10077 } else {
10078 self.expected_ref(
10079 "{RENAME TO | { RENAME | ADD } VALUE}",
10080 self.peek_token_ref(),
10081 )
10082 }
10083 }
10084
10085 pub fn parse_alter_operator(&mut self) -> Result<Statement, ParserError> {
10089 let name = self.parse_operator_name()?;
10090
10091 self.expect_token(&Token::LParen)?;
10093
10094 let left_type = if self.parse_keyword(Keyword::NONE) {
10095 None
10096 } else {
10097 Some(self.parse_data_type()?)
10098 };
10099
10100 self.expect_token(&Token::Comma)?;
10101 let right_type = self.parse_data_type()?;
10102 self.expect_token(&Token::RParen)?;
10103
10104 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10106 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10107 Owner::CurrentRole
10108 } else if self.parse_keyword(Keyword::CURRENT_USER) {
10109 Owner::CurrentUser
10110 } else if self.parse_keyword(Keyword::SESSION_USER) {
10111 Owner::SessionUser
10112 } else {
10113 Owner::Ident(self.parse_identifier()?)
10114 };
10115 AlterOperatorOperation::OwnerTo(owner)
10116 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10117 let schema_name = self.parse_object_name(false)?;
10118 AlterOperatorOperation::SetSchema { schema_name }
10119 } else if self.parse_keyword(Keyword::SET) {
10120 self.expect_token(&Token::LParen)?;
10121
10122 let mut options = Vec::new();
10123 loop {
10124 let keyword = self.expect_one_of_keywords(&[
10125 Keyword::RESTRICT,
10126 Keyword::JOIN,
10127 Keyword::COMMUTATOR,
10128 Keyword::NEGATOR,
10129 Keyword::HASHES,
10130 Keyword::MERGES,
10131 ])?;
10132
10133 match keyword {
10134 Keyword::RESTRICT => {
10135 self.expect_token(&Token::Eq)?;
10136 let proc_name = if self.parse_keyword(Keyword::NONE) {
10137 None
10138 } else {
10139 Some(self.parse_object_name(false)?)
10140 };
10141 options.push(OperatorOption::Restrict(proc_name));
10142 }
10143 Keyword::JOIN => {
10144 self.expect_token(&Token::Eq)?;
10145 let proc_name = if self.parse_keyword(Keyword::NONE) {
10146 None
10147 } else {
10148 Some(self.parse_object_name(false)?)
10149 };
10150 options.push(OperatorOption::Join(proc_name));
10151 }
10152 Keyword::COMMUTATOR => {
10153 self.expect_token(&Token::Eq)?;
10154 let op_name = self.parse_operator_name()?;
10155 options.push(OperatorOption::Commutator(op_name));
10156 }
10157 Keyword::NEGATOR => {
10158 self.expect_token(&Token::Eq)?;
10159 let op_name = self.parse_operator_name()?;
10160 options.push(OperatorOption::Negator(op_name));
10161 }
10162 Keyword::HASHES => {
10163 options.push(OperatorOption::Hashes);
10164 }
10165 Keyword::MERGES => {
10166 options.push(OperatorOption::Merges);
10167 }
10168 unexpected_keyword => return Err(ParserError::ParserError(
10169 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10170 )),
10171 }
10172
10173 if !self.consume_token(&Token::Comma) {
10174 break;
10175 }
10176 }
10177
10178 self.expect_token(&Token::RParen)?;
10179 AlterOperatorOperation::Set { options }
10180 } else {
10181 return self.expected_ref(
10182 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10183 self.peek_token_ref(),
10184 );
10185 };
10186
10187 Ok(Statement::AlterOperator(AlterOperator {
10188 name,
10189 left_type,
10190 right_type,
10191 operation,
10192 }))
10193 }
10194
10195 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10198 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10199 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10200 let name = self.parse_object_name(false)?;
10201 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10202 self.prev_token();
10203 let options = self.parse_options(Keyword::OPTIONS)?;
10204 AlterSchemaOperation::SetOptionsParens { options }
10205 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10206 let collate = self.parse_expr()?;
10207 AlterSchemaOperation::SetDefaultCollate { collate }
10208 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10209 let replica = self.parse_identifier()?;
10210 let options = if self.peek_keyword(Keyword::OPTIONS) {
10211 Some(self.parse_options(Keyword::OPTIONS)?)
10212 } else {
10213 None
10214 };
10215 AlterSchemaOperation::AddReplica { replica, options }
10216 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10217 let replica = self.parse_identifier()?;
10218 AlterSchemaOperation::DropReplica { replica }
10219 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10220 let new_name = self.parse_object_name(false)?;
10221 AlterSchemaOperation::Rename { name: new_name }
10222 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10223 let owner = self.parse_owner()?;
10224 AlterSchemaOperation::OwnerTo { owner }
10225 } else {
10226 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10227 };
10228 Ok(Statement::AlterSchema(AlterSchema {
10229 name,
10230 if_exists,
10231 operations: vec![operation],
10232 }))
10233 }
10234
10235 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10238 let object_name = self.parse_object_name(false)?;
10239 if self.peek_token().token == Token::LParen {
10240 match self.parse_function(object_name)? {
10241 Expr::Function(f) => Ok(Statement::Call(f)),
10242 other => parser_err!(
10243 format!("Expected a simple procedure call but found: {other}"),
10244 self.peek_token().span.start
10245 ),
10246 }
10247 } else {
10248 Ok(Statement::Call(Function {
10249 name: object_name,
10250 uses_odbc_syntax: false,
10251 parameters: FunctionArguments::None,
10252 args: FunctionArguments::None,
10253 over: None,
10254 filter: None,
10255 null_treatment: None,
10256 within_group: vec![],
10257 }))
10258 }
10259 }
10260
10261 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10263 let source;
10264 if self.consume_token(&Token::LParen) {
10265 source = CopySource::Query(self.parse_query()?);
10266 self.expect_token(&Token::RParen)?;
10267 } else {
10268 let table_name = self.parse_object_name(false)?;
10269 let columns = self.parse_parenthesized_column_list(Optional, false)?;
10270 source = CopySource::Table {
10271 table_name,
10272 columns,
10273 };
10274 }
10275 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10276 Some(Keyword::FROM) => false,
10277 Some(Keyword::TO) => true,
10278 _ => self.expected("FROM or TO", self.peek_token())?,
10279 };
10280 if !to {
10281 if let CopySource::Query(_) = source {
10284 return Err(ParserError::ParserError(
10285 "COPY ... FROM does not support query as a source".to_string(),
10286 ));
10287 }
10288 }
10289 let target = if self.parse_keyword(Keyword::STDIN) {
10290 CopyTarget::Stdin
10291 } else if self.parse_keyword(Keyword::STDOUT) {
10292 CopyTarget::Stdout
10293 } else if self.parse_keyword(Keyword::PROGRAM) {
10294 CopyTarget::Program {
10295 command: self.parse_literal_string()?,
10296 }
10297 } else {
10298 CopyTarget::File {
10299 filename: self.parse_literal_string()?,
10300 }
10301 };
10302 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
10304 if self.consume_token(&Token::LParen) {
10305 options = self.parse_comma_separated(Parser::parse_copy_option)?;
10306 self.expect_token(&Token::RParen)?;
10307 }
10308 let mut legacy_options = vec![];
10309 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10310 legacy_options.push(opt);
10311 }
10312 let values = if let CopyTarget::Stdin = target {
10313 self.expect_token(&Token::SemiColon)?;
10314 self.parse_tsv()
10315 } else {
10316 vec![]
10317 };
10318 Ok(Statement::Copy {
10319 source,
10320 to,
10321 target,
10322 options,
10323 legacy_options,
10324 values,
10325 })
10326 }
10327
10328 fn parse_open(&mut self) -> Result<Statement, ParserError> {
10330 self.expect_keyword(Keyword::OPEN)?;
10331 Ok(Statement::Open(OpenStatement {
10332 cursor_name: self.parse_identifier()?,
10333 }))
10334 }
10335
10336 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10337 let cursor = if self.parse_keyword(Keyword::ALL) {
10338 CloseCursor::All
10339 } else {
10340 let name = self.parse_identifier()?;
10341
10342 CloseCursor::Specific { name }
10343 };
10344
10345 Ok(Statement::Close { cursor })
10346 }
10347
10348 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10349 let ret = match self.parse_one_of_keywords(&[
10350 Keyword::FORMAT,
10351 Keyword::FREEZE,
10352 Keyword::DELIMITER,
10353 Keyword::NULL,
10354 Keyword::HEADER,
10355 Keyword::QUOTE,
10356 Keyword::ESCAPE,
10357 Keyword::FORCE_QUOTE,
10358 Keyword::FORCE_NOT_NULL,
10359 Keyword::FORCE_NULL,
10360 Keyword::ENCODING,
10361 ]) {
10362 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10363 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10364 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10365 Some(Keyword::FALSE)
10366 )),
10367 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10368 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10369 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10370 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10371 Some(Keyword::FALSE)
10372 )),
10373 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10374 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10375 Some(Keyword::FORCE_QUOTE) => {
10376 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10377 }
10378 Some(Keyword::FORCE_NOT_NULL) => {
10379 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10380 }
10381 Some(Keyword::FORCE_NULL) => {
10382 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10383 }
10384 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10385 _ => self.expected("option", self.peek_token())?,
10386 };
10387 Ok(ret)
10388 }
10389
10390 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10391 if self.parse_keyword(Keyword::FORMAT) {
10393 let _ = self.parse_keyword(Keyword::AS);
10394 }
10395
10396 let ret = match self.parse_one_of_keywords(&[
10397 Keyword::ACCEPTANYDATE,
10398 Keyword::ACCEPTINVCHARS,
10399 Keyword::ADDQUOTES,
10400 Keyword::ALLOWOVERWRITE,
10401 Keyword::BINARY,
10402 Keyword::BLANKSASNULL,
10403 Keyword::BZIP2,
10404 Keyword::CLEANPATH,
10405 Keyword::COMPUPDATE,
10406 Keyword::CSV,
10407 Keyword::DATEFORMAT,
10408 Keyword::DELIMITER,
10409 Keyword::EMPTYASNULL,
10410 Keyword::ENCRYPTED,
10411 Keyword::ESCAPE,
10412 Keyword::EXTENSION,
10413 Keyword::FIXEDWIDTH,
10414 Keyword::GZIP,
10415 Keyword::HEADER,
10416 Keyword::IAM_ROLE,
10417 Keyword::IGNOREHEADER,
10418 Keyword::JSON,
10419 Keyword::MANIFEST,
10420 Keyword::MAXFILESIZE,
10421 Keyword::NULL,
10422 Keyword::PARALLEL,
10423 Keyword::PARQUET,
10424 Keyword::PARTITION,
10425 Keyword::REGION,
10426 Keyword::REMOVEQUOTES,
10427 Keyword::ROWGROUPSIZE,
10428 Keyword::STATUPDATE,
10429 Keyword::TIMEFORMAT,
10430 Keyword::TRUNCATECOLUMNS,
10431 Keyword::ZSTD,
10432 ]) {
10433 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10434 Some(Keyword::ACCEPTINVCHARS) => {
10435 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10437 Some(self.parse_literal_string()?)
10438 } else {
10439 None
10440 };
10441 CopyLegacyOption::AcceptInvChars(ch)
10442 }
10443 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10444 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10445 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10446 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10447 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10448 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10449 Some(Keyword::COMPUPDATE) => {
10450 let preset = self.parse_keyword(Keyword::PRESET);
10451 let enabled = match self.parse_one_of_keywords(&[
10452 Keyword::TRUE,
10453 Keyword::FALSE,
10454 Keyword::ON,
10455 Keyword::OFF,
10456 ]) {
10457 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10458 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10459 _ => None,
10460 };
10461 CopyLegacyOption::CompUpdate { preset, enabled }
10462 }
10463 Some(Keyword::CSV) => CopyLegacyOption::Csv({
10464 let mut opts = vec![];
10465 while let Some(opt) =
10466 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10467 {
10468 opts.push(opt);
10469 }
10470 opts
10471 }),
10472 Some(Keyword::DATEFORMAT) => {
10473 let _ = self.parse_keyword(Keyword::AS);
10474 let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10475 Some(self.parse_literal_string()?)
10476 } else {
10477 None
10478 };
10479 CopyLegacyOption::DateFormat(fmt)
10480 }
10481 Some(Keyword::DELIMITER) => {
10482 let _ = self.parse_keyword(Keyword::AS);
10483 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10484 }
10485 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10486 Some(Keyword::ENCRYPTED) => {
10487 let auto = self.parse_keyword(Keyword::AUTO);
10488 CopyLegacyOption::Encrypted { auto }
10489 }
10490 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10491 Some(Keyword::EXTENSION) => {
10492 let ext = self.parse_literal_string()?;
10493 CopyLegacyOption::Extension(ext)
10494 }
10495 Some(Keyword::FIXEDWIDTH) => {
10496 let spec = self.parse_literal_string()?;
10497 CopyLegacyOption::FixedWidth(spec)
10498 }
10499 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10500 Some(Keyword::HEADER) => CopyLegacyOption::Header,
10501 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10502 Some(Keyword::IGNOREHEADER) => {
10503 let _ = self.parse_keyword(Keyword::AS);
10504 let num_rows = self.parse_literal_uint()?;
10505 CopyLegacyOption::IgnoreHeader(num_rows)
10506 }
10507 Some(Keyword::JSON) => CopyLegacyOption::Json,
10508 Some(Keyword::MANIFEST) => {
10509 let verbose = self.parse_keyword(Keyword::VERBOSE);
10510 CopyLegacyOption::Manifest { verbose }
10511 }
10512 Some(Keyword::MAXFILESIZE) => {
10513 let _ = self.parse_keyword(Keyword::AS);
10514 let size = self.parse_number_value()?.value;
10515 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10516 Some(Keyword::MB) => Some(FileSizeUnit::MB),
10517 Some(Keyword::GB) => Some(FileSizeUnit::GB),
10518 _ => None,
10519 };
10520 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10521 }
10522 Some(Keyword::NULL) => {
10523 let _ = self.parse_keyword(Keyword::AS);
10524 CopyLegacyOption::Null(self.parse_literal_string()?)
10525 }
10526 Some(Keyword::PARALLEL) => {
10527 let enabled = match self.parse_one_of_keywords(&[
10528 Keyword::TRUE,
10529 Keyword::FALSE,
10530 Keyword::ON,
10531 Keyword::OFF,
10532 ]) {
10533 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10534 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10535 _ => None,
10536 };
10537 CopyLegacyOption::Parallel(enabled)
10538 }
10539 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10540 Some(Keyword::PARTITION) => {
10541 self.expect_keyword(Keyword::BY)?;
10542 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10543 let include = self.parse_keyword(Keyword::INCLUDE);
10544 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10545 }
10546 Some(Keyword::REGION) => {
10547 let _ = self.parse_keyword(Keyword::AS);
10548 let region = self.parse_literal_string()?;
10549 CopyLegacyOption::Region(region)
10550 }
10551 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10552 Some(Keyword::ROWGROUPSIZE) => {
10553 let _ = self.parse_keyword(Keyword::AS);
10554 let file_size = self.parse_file_size()?;
10555 CopyLegacyOption::RowGroupSize(file_size)
10556 }
10557 Some(Keyword::STATUPDATE) => {
10558 let enabled = match self.parse_one_of_keywords(&[
10559 Keyword::TRUE,
10560 Keyword::FALSE,
10561 Keyword::ON,
10562 Keyword::OFF,
10563 ]) {
10564 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10565 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10566 _ => None,
10567 };
10568 CopyLegacyOption::StatUpdate(enabled)
10569 }
10570 Some(Keyword::TIMEFORMAT) => {
10571 let _ = self.parse_keyword(Keyword::AS);
10572 let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10573 Some(self.parse_literal_string()?)
10574 } else {
10575 None
10576 };
10577 CopyLegacyOption::TimeFormat(fmt)
10578 }
10579 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10580 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10581 _ => self.expected("option", self.peek_token())?,
10582 };
10583 Ok(ret)
10584 }
10585
10586 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10587 let size = self.parse_number_value()?.value;
10588 let unit = self.maybe_parse_file_size_unit();
10589 Ok(FileSize { size, unit })
10590 }
10591
10592 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10593 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10594 Some(Keyword::MB) => Some(FileSizeUnit::MB),
10595 Some(Keyword::GB) => Some(FileSizeUnit::GB),
10596 _ => None,
10597 }
10598 }
10599
10600 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10601 if self.parse_keyword(Keyword::DEFAULT) {
10602 Ok(IamRoleKind::Default)
10603 } else {
10604 let arn = self.parse_literal_string()?;
10605 Ok(IamRoleKind::Arn(arn))
10606 }
10607 }
10608
10609 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10610 let ret = match self.parse_one_of_keywords(&[
10611 Keyword::HEADER,
10612 Keyword::QUOTE,
10613 Keyword::ESCAPE,
10614 Keyword::FORCE,
10615 ]) {
10616 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10617 Some(Keyword::QUOTE) => {
10618 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10620 }
10621 Some(Keyword::ESCAPE) => {
10622 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10624 }
10625 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10626 CopyLegacyCsvOption::ForceNotNull(
10627 self.parse_comma_separated(|p| p.parse_identifier())?,
10628 )
10629 }
10630 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10631 CopyLegacyCsvOption::ForceQuote(
10632 self.parse_comma_separated(|p| p.parse_identifier())?,
10633 )
10634 }
10635 _ => self.expected("csv option", self.peek_token())?,
10636 };
10637 Ok(ret)
10638 }
10639
10640 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10641 let s = self.parse_literal_string()?;
10642 if s.len() != 1 {
10643 let loc = self
10644 .tokens
10645 .get(self.index - 1)
10646 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10647 return parser_err!(format!("Expect a char, found {s:?}"), loc);
10648 }
10649 Ok(s.chars().next().unwrap())
10650 }
10651
10652 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10655 self.parse_tab_value()
10656 }
10657
10658 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10659 let mut values = vec![];
10660 let mut content = String::from("");
10661 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10662 match t {
10663 Token::Whitespace(Whitespace::Tab) => {
10664 values.push(Some(content.to_string()));
10665 content.clear();
10666 }
10667 Token::Whitespace(Whitespace::Newline) => {
10668 values.push(Some(content.to_string()));
10669 content.clear();
10670 }
10671 Token::Backslash => {
10672 if self.consume_token(&Token::Period) {
10673 return values;
10674 }
10675 if let Token::Word(w) = self.next_token().token {
10676 if w.value == "N" {
10677 values.push(None);
10678 }
10679 }
10680 }
10681 _ => {
10682 content.push_str(&t.to_string());
10683 }
10684 }
10685 }
10686 values
10687 }
10688
10689 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10691 let next_token = self.next_token();
10692 let span = next_token.span;
10693 let ok_value = |value: Value| Ok(value.with_span(span));
10694 match next_token.token {
10695 Token::Word(w) => match w.keyword {
10696 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10697 ok_value(Value::Boolean(true))
10698 }
10699 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10700 ok_value(Value::Boolean(false))
10701 }
10702 Keyword::NULL => ok_value(Value::Null),
10703 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10704 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10705 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10706 _ => self.expected(
10707 "A value?",
10708 TokenWithSpan {
10709 token: Token::Word(w),
10710 span,
10711 },
10712 )?,
10713 },
10714 _ => self.expected(
10715 "a concrete value",
10716 TokenWithSpan {
10717 token: Token::Word(w),
10718 span,
10719 },
10720 ),
10721 },
10722 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10726 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10727 self.maybe_concat_string_literal(s.to_string()),
10728 )),
10729 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10730 self.maybe_concat_string_literal(s.to_string()),
10731 )),
10732 Token::TripleSingleQuotedString(ref s) => {
10733 ok_value(Value::TripleSingleQuotedString(s.to_string()))
10734 }
10735 Token::TripleDoubleQuotedString(ref s) => {
10736 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10737 }
10738 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10739 Token::SingleQuotedByteStringLiteral(ref s) => {
10740 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10741 }
10742 Token::DoubleQuotedByteStringLiteral(ref s) => {
10743 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10744 }
10745 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10746 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10747 }
10748 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10749 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10750 }
10751 Token::SingleQuotedRawStringLiteral(ref s) => {
10752 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10753 }
10754 Token::DoubleQuotedRawStringLiteral(ref s) => {
10755 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10756 }
10757 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
10758 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
10759 }
10760 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
10761 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
10762 }
10763 Token::NationalStringLiteral(ref s) => {
10764 ok_value(Value::NationalStringLiteral(s.to_string()))
10765 }
10766 Token::EscapedStringLiteral(ref s) => {
10767 ok_value(Value::EscapedStringLiteral(s.to_string()))
10768 }
10769 Token::UnicodeStringLiteral(ref s) => {
10770 ok_value(Value::UnicodeStringLiteral(s.to_string()))
10771 }
10772 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
10773 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
10774 tok @ Token::Colon | tok @ Token::AtSign => {
10775 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
10783 let ident = match next_token.token {
10784 Token::Word(w) => Ok(w.into_ident(next_token.span)),
10785 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
10786 _ => self.expected("placeholder", next_token),
10787 }?;
10788 Ok(Value::Placeholder(tok.to_string() + &ident.value)
10789 .with_span(Span::new(span.start, ident.span.end)))
10790 }
10791 unexpected => self.expected(
10792 "a value",
10793 TokenWithSpan {
10794 token: unexpected,
10795 span,
10796 },
10797 ),
10798 }
10799 }
10800
10801 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
10802 if self.dialect.supports_string_literal_concatenation() {
10803 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10804 self.peek_token_ref().token
10805 {
10806 str.push_str(s.clone().as_str());
10807 self.advance_token();
10808 }
10809 }
10810 str
10811 }
10812
10813 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10815 let value_wrapper = self.parse_value()?;
10816 match &value_wrapper.value {
10817 Value::Number(_, _) => Ok(value_wrapper),
10818 Value::Placeholder(_) => Ok(value_wrapper),
10819 _ => {
10820 self.prev_token();
10821 self.expected("literal number", self.peek_token())
10822 }
10823 }
10824 }
10825
10826 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10829 let next_token = self.next_token();
10830 match next_token.token {
10831 Token::Plus => Ok(Expr::UnaryOp {
10832 op: UnaryOperator::Plus,
10833 expr: Box::new(Expr::Value(self.parse_number_value()?)),
10834 }),
10835 Token::Minus => Ok(Expr::UnaryOp {
10836 op: UnaryOperator::Minus,
10837 expr: Box::new(Expr::Value(self.parse_number_value()?)),
10838 }),
10839 _ => {
10840 self.prev_token();
10841 Ok(Expr::Value(self.parse_number_value()?))
10842 }
10843 }
10844 }
10845
10846 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10847 let next_token = self.next_token();
10848 let span = next_token.span;
10849 match next_token.token {
10850 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10851 Value::SingleQuotedString(s.to_string()).with_span(span),
10852 )),
10853 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10854 Value::DoubleQuotedString(s.to_string()).with_span(span),
10855 )),
10856 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10857 Value::HexStringLiteral(s.to_string()).with_span(span),
10858 )),
10859 unexpected => self.expected(
10860 "a string value",
10861 TokenWithSpan {
10862 token: unexpected,
10863 span,
10864 },
10865 ),
10866 }
10867 }
10868
10869 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10871 let next_token = self.next_token();
10872 match next_token.token {
10873 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10874 _ => self.expected("literal int", next_token),
10875 }
10876 }
10877
10878 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
10881 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
10882 let peek_token = parser.peek_token();
10883 let span = peek_token.span;
10884 match peek_token.token {
10885 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
10886 {
10887 parser.next_token();
10888 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10889 }
10890 _ => Ok(Expr::Value(
10891 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
10892 )),
10893 }
10894 };
10895
10896 Ok(CreateFunctionBody::AsBeforeOptions {
10897 body: parse_string_expr(self)?,
10898 link_symbol: if self.consume_token(&Token::Comma) {
10899 Some(parse_string_expr(self)?)
10900 } else {
10901 None
10902 },
10903 })
10904 }
10905
10906 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10908 let next_token = self.next_token();
10909 match next_token.token {
10910 Token::Word(Word {
10911 value,
10912 keyword: Keyword::NoKeyword,
10913 ..
10914 }) => Ok(value),
10915 Token::SingleQuotedString(s) => Ok(s),
10916 Token::DoubleQuotedString(s) => Ok(s),
10917 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10918 Ok(s)
10919 }
10920 Token::UnicodeStringLiteral(s) => Ok(s),
10921 _ => self.expected("literal string", next_token),
10922 }
10923 }
10924
10925 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10927 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10928 Some(Keyword::TRUE) => Ok(true),
10929 Some(Keyword::FALSE) => Ok(false),
10930 _ => self.expected("TRUE or FALSE", self.peek_token()),
10931 }
10932 }
10933
10934 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10936 let neg = self.parse_keyword(Keyword::NOT);
10937 let normalized_form = self.maybe_parse(|parser| {
10938 match parser.parse_one_of_keywords(&[
10939 Keyword::NFC,
10940 Keyword::NFD,
10941 Keyword::NFKC,
10942 Keyword::NFKD,
10943 ]) {
10944 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10945 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10946 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10947 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
10948 _ => parser.expected("unicode normalization form", parser.peek_token()),
10949 }
10950 })?;
10951 if self.parse_keyword(Keyword::NORMALIZED) {
10952 return Ok(Expr::IsNormalized {
10953 expr: Box::new(expr),
10954 form: normalized_form,
10955 negated: neg,
10956 });
10957 }
10958 self.expected("unicode normalization form", self.peek_token())
10959 }
10960
10961 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
10962 self.expect_token(&Token::LParen)?;
10963 let values = self.parse_comma_separated(|parser| {
10964 let name = parser.parse_literal_string()?;
10965 let e = if parser.consume_token(&Token::Eq) {
10966 let value = parser.parse_number()?;
10967 EnumMember::NamedValue(name, value)
10968 } else {
10969 EnumMember::Name(name)
10970 };
10971 Ok(e)
10972 })?;
10973 self.expect_token(&Token::RParen)?;
10974
10975 Ok(values)
10976 }
10977
10978 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
10980 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
10981 if trailing_bracket.0 {
10982 return parser_err!(
10983 format!("unmatched > after parsing data type {ty}"),
10984 self.peek_token()
10985 );
10986 }
10987
10988 Ok(ty)
10989 }
10990
10991 fn parse_data_type_helper(
10992 &mut self,
10993 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
10994 let dialect = self.dialect;
10995 self.advance_token();
10996 let next_token = self.get_current_token();
10997 let next_token_index = self.get_current_index();
10998
10999 let mut trailing_bracket: MatchedTrailingBracket = false.into();
11000 let mut data = match &next_token.token {
11001 Token::Word(w) => match w.keyword {
11002 Keyword::BOOLEAN => Ok(DataType::Boolean),
11003 Keyword::BOOL => Ok(DataType::Bool),
11004 Keyword::FLOAT => {
11005 let precision = self.parse_exact_number_optional_precision_scale()?;
11006
11007 if self.parse_keyword(Keyword::UNSIGNED) {
11008 Ok(DataType::FloatUnsigned(precision))
11009 } else {
11010 Ok(DataType::Float(precision))
11011 }
11012 }
11013 Keyword::REAL => {
11014 if self.parse_keyword(Keyword::UNSIGNED) {
11015 Ok(DataType::RealUnsigned)
11016 } else {
11017 Ok(DataType::Real)
11018 }
11019 }
11020 Keyword::FLOAT4 => Ok(DataType::Float4),
11021 Keyword::FLOAT32 => Ok(DataType::Float32),
11022 Keyword::FLOAT64 => Ok(DataType::Float64),
11023 Keyword::FLOAT8 => Ok(DataType::Float8),
11024 Keyword::DOUBLE => {
11025 if self.parse_keyword(Keyword::PRECISION) {
11026 if self.parse_keyword(Keyword::UNSIGNED) {
11027 Ok(DataType::DoublePrecisionUnsigned)
11028 } else {
11029 Ok(DataType::DoublePrecision)
11030 }
11031 } else {
11032 let precision = self.parse_exact_number_optional_precision_scale()?;
11033
11034 if self.parse_keyword(Keyword::UNSIGNED) {
11035 Ok(DataType::DoubleUnsigned(precision))
11036 } else {
11037 Ok(DataType::Double(precision))
11038 }
11039 }
11040 }
11041 Keyword::TINYINT => {
11042 let optional_precision = self.parse_optional_precision();
11043 if self.parse_keyword(Keyword::UNSIGNED) {
11044 Ok(DataType::TinyIntUnsigned(optional_precision?))
11045 } else {
11046 if dialect.supports_data_type_signed_suffix() {
11047 let _ = self.parse_keyword(Keyword::SIGNED);
11048 }
11049 Ok(DataType::TinyInt(optional_precision?))
11050 }
11051 }
11052 Keyword::INT2 => {
11053 let optional_precision = self.parse_optional_precision();
11054 if self.parse_keyword(Keyword::UNSIGNED) {
11055 Ok(DataType::Int2Unsigned(optional_precision?))
11056 } else {
11057 Ok(DataType::Int2(optional_precision?))
11058 }
11059 }
11060 Keyword::SMALLINT => {
11061 let optional_precision = self.parse_optional_precision();
11062 if self.parse_keyword(Keyword::UNSIGNED) {
11063 Ok(DataType::SmallIntUnsigned(optional_precision?))
11064 } else {
11065 if dialect.supports_data_type_signed_suffix() {
11066 let _ = self.parse_keyword(Keyword::SIGNED);
11067 }
11068 Ok(DataType::SmallInt(optional_precision?))
11069 }
11070 }
11071 Keyword::MEDIUMINT => {
11072 let optional_precision = self.parse_optional_precision();
11073 if self.parse_keyword(Keyword::UNSIGNED) {
11074 Ok(DataType::MediumIntUnsigned(optional_precision?))
11075 } else {
11076 if dialect.supports_data_type_signed_suffix() {
11077 let _ = self.parse_keyword(Keyword::SIGNED);
11078 }
11079 Ok(DataType::MediumInt(optional_precision?))
11080 }
11081 }
11082 Keyword::INT => {
11083 let optional_precision = self.parse_optional_precision();
11084 if self.parse_keyword(Keyword::UNSIGNED) {
11085 Ok(DataType::IntUnsigned(optional_precision?))
11086 } else {
11087 if dialect.supports_data_type_signed_suffix() {
11088 let _ = self.parse_keyword(Keyword::SIGNED);
11089 }
11090 Ok(DataType::Int(optional_precision?))
11091 }
11092 }
11093 Keyword::INT4 => {
11094 let optional_precision = self.parse_optional_precision();
11095 if self.parse_keyword(Keyword::UNSIGNED) {
11096 Ok(DataType::Int4Unsigned(optional_precision?))
11097 } else {
11098 Ok(DataType::Int4(optional_precision?))
11099 }
11100 }
11101 Keyword::INT8 => {
11102 let optional_precision = self.parse_optional_precision();
11103 if self.parse_keyword(Keyword::UNSIGNED) {
11104 Ok(DataType::Int8Unsigned(optional_precision?))
11105 } else {
11106 Ok(DataType::Int8(optional_precision?))
11107 }
11108 }
11109 Keyword::INT16 => Ok(DataType::Int16),
11110 Keyword::INT32 => Ok(DataType::Int32),
11111 Keyword::INT64 => Ok(DataType::Int64),
11112 Keyword::INT128 => Ok(DataType::Int128),
11113 Keyword::INT256 => Ok(DataType::Int256),
11114 Keyword::INTEGER => {
11115 let optional_precision = self.parse_optional_precision();
11116 if self.parse_keyword(Keyword::UNSIGNED) {
11117 Ok(DataType::IntegerUnsigned(optional_precision?))
11118 } else {
11119 if dialect.supports_data_type_signed_suffix() {
11120 let _ = self.parse_keyword(Keyword::SIGNED);
11121 }
11122 Ok(DataType::Integer(optional_precision?))
11123 }
11124 }
11125 Keyword::BIGINT => {
11126 let optional_precision = self.parse_optional_precision();
11127 if self.parse_keyword(Keyword::UNSIGNED) {
11128 Ok(DataType::BigIntUnsigned(optional_precision?))
11129 } else {
11130 if dialect.supports_data_type_signed_suffix() {
11131 let _ = self.parse_keyword(Keyword::SIGNED);
11132 }
11133 Ok(DataType::BigInt(optional_precision?))
11134 }
11135 }
11136 Keyword::HUGEINT => Ok(DataType::HugeInt),
11137 Keyword::UBIGINT => Ok(DataType::UBigInt),
11138 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11139 Keyword::USMALLINT => Ok(DataType::USmallInt),
11140 Keyword::UTINYINT => Ok(DataType::UTinyInt),
11141 Keyword::UINT8 => Ok(DataType::UInt8),
11142 Keyword::UINT16 => Ok(DataType::UInt16),
11143 Keyword::UINT32 => Ok(DataType::UInt32),
11144 Keyword::UINT64 => Ok(DataType::UInt64),
11145 Keyword::UINT128 => Ok(DataType::UInt128),
11146 Keyword::UINT256 => Ok(DataType::UInt256),
11147 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11148 Keyword::NVARCHAR => {
11149 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11150 }
11151 Keyword::CHARACTER => {
11152 if self.parse_keyword(Keyword::VARYING) {
11153 Ok(DataType::CharacterVarying(
11154 self.parse_optional_character_length()?,
11155 ))
11156 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11157 Ok(DataType::CharacterLargeObject(
11158 self.parse_optional_precision()?,
11159 ))
11160 } else {
11161 Ok(DataType::Character(self.parse_optional_character_length()?))
11162 }
11163 }
11164 Keyword::CHAR => {
11165 if self.parse_keyword(Keyword::VARYING) {
11166 Ok(DataType::CharVarying(
11167 self.parse_optional_character_length()?,
11168 ))
11169 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11170 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11171 } else {
11172 Ok(DataType::Char(self.parse_optional_character_length()?))
11173 }
11174 }
11175 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11176 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11177 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11178 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11179 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11180 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11181 Keyword::LONGBLOB => Ok(DataType::LongBlob),
11182 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11183 Keyword::BIT => {
11184 if self.parse_keyword(Keyword::VARYING) {
11185 Ok(DataType::BitVarying(self.parse_optional_precision()?))
11186 } else {
11187 Ok(DataType::Bit(self.parse_optional_precision()?))
11188 }
11189 }
11190 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11191 Keyword::UUID => Ok(DataType::Uuid),
11192 Keyword::DATE => Ok(DataType::Date),
11193 Keyword::DATE32 => Ok(DataType::Date32),
11194 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11195 Keyword::DATETIME64 => {
11196 self.prev_token();
11197 let (precision, time_zone) = self.parse_datetime_64()?;
11198 Ok(DataType::Datetime64(precision, time_zone))
11199 }
11200 Keyword::TIMESTAMP => {
11201 let precision = self.parse_optional_precision()?;
11202 let tz = if self.parse_keyword(Keyword::WITH) {
11203 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11204 TimezoneInfo::WithTimeZone
11205 } else if self.parse_keyword(Keyword::WITHOUT) {
11206 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11207 TimezoneInfo::WithoutTimeZone
11208 } else {
11209 TimezoneInfo::None
11210 };
11211 Ok(DataType::Timestamp(precision, tz))
11212 }
11213 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11214 self.parse_optional_precision()?,
11215 TimezoneInfo::Tz,
11216 )),
11217 Keyword::TIMESTAMP_NTZ => {
11218 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11219 }
11220 Keyword::TIME => {
11221 let precision = self.parse_optional_precision()?;
11222 let tz = if self.parse_keyword(Keyword::WITH) {
11223 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11224 TimezoneInfo::WithTimeZone
11225 } else if self.parse_keyword(Keyword::WITHOUT) {
11226 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11227 TimezoneInfo::WithoutTimeZone
11228 } else {
11229 TimezoneInfo::None
11230 };
11231 Ok(DataType::Time(precision, tz))
11232 }
11233 Keyword::TIMETZ => Ok(DataType::Time(
11234 self.parse_optional_precision()?,
11235 TimezoneInfo::Tz,
11236 )),
11237 Keyword::INTERVAL => {
11238 if self.dialect.supports_interval_options() {
11239 let fields = self.maybe_parse_optional_interval_fields()?;
11240 let precision = self.parse_optional_precision()?;
11241 Ok(DataType::Interval { fields, precision })
11242 } else {
11243 Ok(DataType::Interval {
11244 fields: None,
11245 precision: None,
11246 })
11247 }
11248 }
11249 Keyword::JSON => Ok(DataType::JSON),
11250 Keyword::JSONB => Ok(DataType::JSONB),
11251 Keyword::REGCLASS => Ok(DataType::Regclass),
11252 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11253 Keyword::FIXEDSTRING => {
11254 self.expect_token(&Token::LParen)?;
11255 let character_length = self.parse_literal_uint()?;
11256 self.expect_token(&Token::RParen)?;
11257 Ok(DataType::FixedString(character_length))
11258 }
11259 Keyword::TEXT => Ok(DataType::Text),
11260 Keyword::TINYTEXT => Ok(DataType::TinyText),
11261 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11262 Keyword::LONGTEXT => Ok(DataType::LongText),
11263 Keyword::BYTEA => Ok(DataType::Bytea),
11264 Keyword::NUMERIC => Ok(DataType::Numeric(
11265 self.parse_exact_number_optional_precision_scale()?,
11266 )),
11267 Keyword::DECIMAL => {
11268 let precision = self.parse_exact_number_optional_precision_scale()?;
11269
11270 if self.parse_keyword(Keyword::UNSIGNED) {
11271 Ok(DataType::DecimalUnsigned(precision))
11272 } else {
11273 Ok(DataType::Decimal(precision))
11274 }
11275 }
11276 Keyword::DEC => {
11277 let precision = self.parse_exact_number_optional_precision_scale()?;
11278
11279 if self.parse_keyword(Keyword::UNSIGNED) {
11280 Ok(DataType::DecUnsigned(precision))
11281 } else {
11282 Ok(DataType::Dec(precision))
11283 }
11284 }
11285 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11286 self.parse_exact_number_optional_precision_scale()?,
11287 )),
11288 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11289 self.parse_exact_number_optional_precision_scale()?,
11290 )),
11291 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11292 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11293 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11294 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11295 Keyword::ARRAY => {
11296 if dialect_of!(self is SnowflakeDialect) {
11297 Ok(DataType::Array(ArrayElemTypeDef::None))
11298 } else if dialect_of!(self is ClickHouseDialect) {
11299 Ok(self.parse_sub_type(|internal_type| {
11300 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11301 })?)
11302 } else {
11303 self.expect_token(&Token::Lt)?;
11304 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11305 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11306 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11307 inside_type,
11308 ))))
11309 }
11310 }
11311 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11312 self.prev_token();
11313 let field_defs = self.parse_duckdb_struct_type_def()?;
11314 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11315 }
11316 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11317 self.prev_token();
11318 let (field_defs, _trailing_bracket) =
11319 self.parse_struct_type_def(Self::parse_struct_field_def)?;
11320 trailing_bracket = _trailing_bracket;
11321 Ok(DataType::Struct(
11322 field_defs,
11323 StructBracketKind::AngleBrackets,
11324 ))
11325 }
11326 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11327 self.prev_token();
11328 let fields = self.parse_union_type_def()?;
11329 Ok(DataType::Union(fields))
11330 }
11331 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11332 Ok(self.parse_sub_type(DataType::Nullable)?)
11333 }
11334 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11335 Ok(self.parse_sub_type(DataType::LowCardinality)?)
11336 }
11337 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11338 self.prev_token();
11339 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11340 Ok(DataType::Map(
11341 Box::new(key_data_type),
11342 Box::new(value_data_type),
11343 ))
11344 }
11345 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11346 self.expect_token(&Token::LParen)?;
11347 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11348 self.expect_token(&Token::RParen)?;
11349 Ok(DataType::Nested(field_defs))
11350 }
11351 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11352 self.prev_token();
11353 let field_defs = self.parse_click_house_tuple_def()?;
11354 Ok(DataType::Tuple(field_defs))
11355 }
11356 Keyword::TRIGGER => Ok(DataType::Trigger),
11357 Keyword::SETOF => {
11358 let inner = self.parse_data_type()?;
11359 Ok(DataType::SetOf(Box::new(inner)))
11360 }
11361 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11362 let _ = self.parse_keyword(Keyword::TYPE);
11363 Ok(DataType::AnyType)
11364 }
11365 Keyword::TABLE => {
11366 if self.peek_token() == Token::LParen {
11369 let columns = self.parse_returns_table_columns()?;
11370 Ok(DataType::Table(Some(columns)))
11371 } else {
11372 Ok(DataType::Table(None))
11373 }
11374 }
11375 Keyword::SIGNED => {
11376 if self.parse_keyword(Keyword::INTEGER) {
11377 Ok(DataType::SignedInteger)
11378 } else {
11379 Ok(DataType::Signed)
11380 }
11381 }
11382 Keyword::UNSIGNED => {
11383 if self.parse_keyword(Keyword::INTEGER) {
11384 Ok(DataType::UnsignedInteger)
11385 } else {
11386 Ok(DataType::Unsigned)
11387 }
11388 }
11389 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11390 Ok(DataType::TsVector)
11391 }
11392 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11393 Ok(DataType::TsQuery)
11394 }
11395 _ => {
11396 self.prev_token();
11397 let type_name = self.parse_object_name(false)?;
11398 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11399 Ok(DataType::Custom(type_name, modifiers))
11400 } else {
11401 Ok(DataType::Custom(type_name, vec![]))
11402 }
11403 }
11404 },
11405 _ => self.expected_at("a data type name", next_token_index),
11406 }?;
11407
11408 if self.dialect.supports_array_typedef_with_brackets() {
11409 while self.consume_token(&Token::LBracket) {
11410 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11412 self.expect_token(&Token::RBracket)?;
11413 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11414 }
11415 }
11416 Ok((data, trailing_bracket))
11417 }
11418
11419 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11420 self.parse_column_def()
11421 }
11422
11423 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11424 self.expect_token(&Token::LParen)?;
11425 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11426 self.expect_token(&Token::RParen)?;
11427 Ok(columns)
11428 }
11429
11430 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11431 self.expect_token(&Token::LParen)?;
11432 let mut values = Vec::new();
11433 loop {
11434 let next_token = self.next_token();
11435 match next_token.token {
11436 Token::SingleQuotedString(value) => values.push(value),
11437 _ => self.expected("a string", next_token)?,
11438 }
11439 let next_token = self.next_token();
11440 match next_token.token {
11441 Token::Comma => (),
11442 Token::RParen => break,
11443 _ => self.expected(", or }", next_token)?,
11444 }
11445 }
11446 Ok(values)
11447 }
11448
11449 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11451 let ident = self.parse_identifier()?;
11452 self.expect_keyword_is(Keyword::AS)?;
11453 let alias = self.parse_identifier()?;
11454 Ok(IdentWithAlias { ident, alias })
11455 }
11456
11457 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11459 let ident = self.parse_identifier()?;
11460 let _after_as = self.parse_keyword(Keyword::AS);
11461 let alias = self.parse_identifier()?;
11462 Ok(IdentWithAlias { ident, alias })
11463 }
11464
11465 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11467 self.parse_comma_separated(|parser| {
11468 parser.expect_token(&Token::LParen)?;
11469 let query = parser.parse_query()?;
11470 parser.expect_token(&Token::RParen)?;
11471 Ok(*query)
11472 })
11473 }
11474
11475 fn parse_distinct_required_set_quantifier(
11477 &mut self,
11478 operator_name: &str,
11479 ) -> Result<SetQuantifier, ParserError> {
11480 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11481 match quantifier {
11482 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11483 _ => Err(ParserError::ParserError(format!(
11484 "{operator_name} pipe operator requires DISTINCT modifier",
11485 ))),
11486 }
11487 }
11488
11489 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11491 if self.parse_keyword(Keyword::AS) {
11492 Ok(Some(self.parse_identifier()?))
11493 } else {
11494 self.maybe_parse(|parser| parser.parse_identifier())
11496 }
11497 }
11498
11499 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11501 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11502 parser.dialect.is_select_item_alias(explicit, kw, parser)
11503 }
11504 self.parse_optional_alias_inner(None, validator)
11505 }
11506
11507 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11511 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11512 parser.dialect.is_table_factor_alias(explicit, kw, parser)
11513 }
11514 let explicit = self.peek_keyword(Keyword::AS);
11515 match self.parse_optional_alias_inner(None, validator)? {
11516 Some(name) => {
11517 let columns = self.parse_table_alias_column_defs()?;
11518 Ok(Some(TableAlias {
11519 explicit,
11520 name,
11521 columns,
11522 }))
11523 }
11524 None => Ok(None),
11525 }
11526 }
11527
11528 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11529 let mut hints = vec![];
11530 while let Some(hint_type) =
11531 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11532 {
11533 let hint_type = match hint_type {
11534 Keyword::USE => TableIndexHintType::Use,
11535 Keyword::IGNORE => TableIndexHintType::Ignore,
11536 Keyword::FORCE => TableIndexHintType::Force,
11537 _ => {
11538 return self.expected(
11539 "expected to match USE/IGNORE/FORCE keyword",
11540 self.peek_token(),
11541 )
11542 }
11543 };
11544 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11545 Some(Keyword::INDEX) => TableIndexType::Index,
11546 Some(Keyword::KEY) => TableIndexType::Key,
11547 _ => {
11548 return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11549 }
11550 };
11551 let for_clause = if self.parse_keyword(Keyword::FOR) {
11552 let clause = if self.parse_keyword(Keyword::JOIN) {
11553 TableIndexHintForClause::Join
11554 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11555 TableIndexHintForClause::OrderBy
11556 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11557 TableIndexHintForClause::GroupBy
11558 } else {
11559 return self.expected(
11560 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11561 self.peek_token(),
11562 );
11563 };
11564 Some(clause)
11565 } else {
11566 None
11567 };
11568
11569 self.expect_token(&Token::LParen)?;
11570 let index_names = if self.peek_token().token != Token::RParen {
11571 self.parse_comma_separated(Parser::parse_identifier)?
11572 } else {
11573 vec![]
11574 };
11575 self.expect_token(&Token::RParen)?;
11576 hints.push(TableIndexHints {
11577 hint_type,
11578 index_type,
11579 for_clause,
11580 index_names,
11581 });
11582 }
11583 Ok(hints)
11584 }
11585
11586 pub fn parse_optional_alias(
11590 &mut self,
11591 reserved_kwds: &[Keyword],
11592 ) -> Result<Option<Ident>, ParserError> {
11593 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11594 false
11595 }
11596 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11597 }
11598
11599 fn parse_optional_alias_inner<F>(
11606 &mut self,
11607 reserved_kwds: Option<&[Keyword]>,
11608 validator: F,
11609 ) -> Result<Option<Ident>, ParserError>
11610 where
11611 F: Fn(bool, &Keyword, &mut Parser) -> bool,
11612 {
11613 let after_as = self.parse_keyword(Keyword::AS);
11614
11615 let next_token = self.next_token();
11616 match next_token.token {
11617 Token::Word(w)
11620 if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
11621 {
11622 Ok(Some(w.into_ident(next_token.span)))
11623 }
11624 Token::Word(w) if validator(after_as, &w.keyword, self) => {
11628 Ok(Some(w.into_ident(next_token.span)))
11629 }
11630 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11632 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11633 _ => {
11634 if after_as {
11635 return self.expected("an identifier after AS", next_token);
11636 }
11637 self.prev_token();
11638 Ok(None) }
11640 }
11641 }
11642
11643 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11644 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11645 let expressions = if self.parse_keyword(Keyword::ALL) {
11646 None
11647 } else {
11648 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11649 };
11650
11651 let mut modifiers = vec![];
11652 if self.dialect.supports_group_by_with_modifier() {
11653 loop {
11654 if !self.parse_keyword(Keyword::WITH) {
11655 break;
11656 }
11657 let keyword = self.expect_one_of_keywords(&[
11658 Keyword::ROLLUP,
11659 Keyword::CUBE,
11660 Keyword::TOTALS,
11661 ])?;
11662 modifiers.push(match keyword {
11663 Keyword::ROLLUP => GroupByWithModifier::Rollup,
11664 Keyword::CUBE => GroupByWithModifier::Cube,
11665 Keyword::TOTALS => GroupByWithModifier::Totals,
11666 _ => {
11667 return parser_err!(
11668 "BUG: expected to match GroupBy modifier keyword",
11669 self.peek_token().span.start
11670 )
11671 }
11672 });
11673 }
11674 }
11675 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11676 self.expect_token(&Token::LParen)?;
11677 let result = self.parse_comma_separated(|p| {
11678 if p.peek_token_ref().token == Token::LParen {
11679 p.parse_tuple(true, true)
11680 } else {
11681 Ok(vec![p.parse_expr()?])
11682 }
11683 })?;
11684 self.expect_token(&Token::RParen)?;
11685 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11686 result,
11687 )));
11688 };
11689 let group_by = match expressions {
11690 None => GroupByExpr::All(modifiers),
11691 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11692 };
11693 Ok(Some(group_by))
11694 } else {
11695 Ok(None)
11696 }
11697 }
11698
11699 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11700 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11701 let order_by =
11702 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11703 let order_by_options = self.parse_order_by_options()?;
11704 OrderBy {
11705 kind: OrderByKind::All(order_by_options),
11706 interpolate: None,
11707 }
11708 } else {
11709 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11710 let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11711 self.parse_interpolations()?
11712 } else {
11713 None
11714 };
11715 OrderBy {
11716 kind: OrderByKind::Expressions(exprs),
11717 interpolate,
11718 }
11719 };
11720 Ok(Some(order_by))
11721 } else {
11722 Ok(None)
11723 }
11724 }
11725
11726 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11727 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11728 Some(self.parse_offset()?)
11729 } else {
11730 None
11731 };
11732
11733 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11734 let expr = self.parse_limit()?;
11735
11736 if self.dialect.supports_limit_comma()
11737 && offset.is_none()
11738 && expr.is_some() && self.consume_token(&Token::Comma)
11740 {
11741 let offset = expr.ok_or_else(|| {
11742 ParserError::ParserError(
11743 "Missing offset for LIMIT <offset>, <limit>".to_string(),
11744 )
11745 })?;
11746 return Ok(Some(LimitClause::OffsetCommaLimit {
11747 offset,
11748 limit: self.parse_expr()?,
11749 }));
11750 }
11751
11752 let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11753 && self.parse_keyword(Keyword::BY)
11754 {
11755 Some(self.parse_comma_separated(Parser::parse_expr)?)
11756 } else {
11757 None
11758 };
11759
11760 (Some(expr), limit_by)
11761 } else {
11762 (None, None)
11763 };
11764
11765 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
11766 offset = Some(self.parse_offset()?);
11767 }
11768
11769 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
11770 Ok(Some(LimitClause::LimitOffset {
11771 limit: limit.unwrap_or_default(),
11772 offset,
11773 limit_by: limit_by.unwrap_or_default(),
11774 }))
11775 } else {
11776 Ok(None)
11777 }
11778 }
11779
11780 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
11783 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
11784 let fn_name = self.parse_object_name(false)?;
11785 self.parse_function_call(fn_name)
11786 .map(TableObject::TableFunction)
11787 } else {
11788 self.parse_object_name(false).map(TableObject::TableName)
11789 }
11790 }
11791
11792 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
11799 self.parse_object_name_inner(in_table_clause, false)
11800 }
11801
11802 fn parse_object_name_inner(
11812 &mut self,
11813 in_table_clause: bool,
11814 allow_wildcards: bool,
11815 ) -> Result<ObjectName, ParserError> {
11816 let mut parts = vec![];
11817 if dialect_of!(self is BigQueryDialect) && in_table_clause {
11818 loop {
11819 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11820 parts.push(ObjectNamePart::Identifier(ident));
11821 if !self.consume_token(&Token::Period) && !end_with_period {
11822 break;
11823 }
11824 }
11825 } else {
11826 loop {
11827 if allow_wildcards && self.peek_token().token == Token::Mul {
11828 let span = self.next_token().span;
11829 parts.push(ObjectNamePart::Identifier(Ident {
11830 value: Token::Mul.to_string(),
11831 quote_style: None,
11832 span,
11833 }));
11834 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11835 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11836 parts.push(ObjectNamePart::Identifier(ident));
11837 if !self.consume_token(&Token::Period) && !end_with_period {
11838 break;
11839 }
11840 } else if self.dialect.supports_object_name_double_dot_notation()
11841 && parts.len() == 1
11842 && matches!(self.peek_token().token, Token::Period)
11843 {
11844 parts.push(ObjectNamePart::Identifier(Ident::new("")));
11846 } else {
11847 let ident = self.parse_identifier()?;
11848 let part = if self
11849 .dialect
11850 .is_identifier_generating_function_name(&ident, &parts)
11851 {
11852 self.expect_token(&Token::LParen)?;
11853 let args: Vec<FunctionArg> =
11854 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11855 self.expect_token(&Token::RParen)?;
11856 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11857 } else {
11858 ObjectNamePart::Identifier(ident)
11859 };
11860 parts.push(part);
11861 }
11862
11863 if !self.consume_token(&Token::Period) {
11864 break;
11865 }
11866 }
11867 }
11868
11869 if dialect_of!(self is BigQueryDialect)
11872 && parts.iter().any(|part| {
11873 part.as_ident()
11874 .is_some_and(|ident| ident.value.contains('.'))
11875 })
11876 {
11877 parts = parts
11878 .into_iter()
11879 .flat_map(|part| match part.as_ident() {
11880 Some(ident) => ident
11881 .value
11882 .split('.')
11883 .map(|value| {
11884 ObjectNamePart::Identifier(Ident {
11885 value: value.into(),
11886 quote_style: ident.quote_style,
11887 span: ident.span,
11888 })
11889 })
11890 .collect::<Vec<_>>(),
11891 None => vec![part],
11892 })
11893 .collect()
11894 }
11895
11896 Ok(ObjectName(parts))
11897 }
11898
11899 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11901 let mut idents = vec![];
11902 loop {
11903 match &self.peek_token_ref().token {
11904 Token::Word(w) => {
11905 idents.push(w.clone().into_ident(self.peek_token_ref().span));
11906 }
11907 Token::EOF | Token::Eq => break,
11908 _ => {}
11909 }
11910 self.advance_token();
11911 }
11912 Ok(idents)
11913 }
11914
11915 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
11955 let mut idents = vec![];
11956
11957 let next_token = self.next_token();
11959 match next_token.token {
11960 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11961 Token::EOF => {
11962 return Err(ParserError::ParserError(
11963 "Empty input when parsing identifier".to_string(),
11964 ))?
11965 }
11966 token => {
11967 return Err(ParserError::ParserError(format!(
11968 "Unexpected token in identifier: {token}"
11969 )))?
11970 }
11971 };
11972
11973 loop {
11975 match self.next_token().token {
11976 Token::Period => {
11978 let next_token = self.next_token();
11979 match next_token.token {
11980 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11981 Token::EOF => {
11982 return Err(ParserError::ParserError(
11983 "Trailing period in identifier".to_string(),
11984 ))?
11985 }
11986 token => {
11987 return Err(ParserError::ParserError(format!(
11988 "Unexpected token following period in identifier: {token}"
11989 )))?
11990 }
11991 }
11992 }
11993 Token::EOF => break,
11994 token => {
11995 return Err(ParserError::ParserError(format!(
11996 "Unexpected token in identifier: {token}"
11997 )))?;
11998 }
11999 }
12000 }
12001
12002 Ok(idents)
12003 }
12004
12005 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
12007 let next_token = self.next_token();
12008 match next_token.token {
12009 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12010 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
12011 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
12012 _ => self.expected("identifier", next_token),
12013 }
12014 }
12015
12016 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
12027 match self.peek_token().token {
12028 Token::Word(w) => {
12029 let quote_style_is_none = w.quote_style.is_none();
12030 let mut requires_whitespace = false;
12031 let mut ident = w.into_ident(self.next_token().span);
12032 if quote_style_is_none {
12033 while matches!(self.peek_token_no_skip().token, Token::Minus) {
12034 self.next_token();
12035 ident.value.push('-');
12036
12037 let token = self
12038 .next_token_no_skip()
12039 .cloned()
12040 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12041 requires_whitespace = match token.token {
12042 Token::Word(next_word) if next_word.quote_style.is_none() => {
12043 ident.value.push_str(&next_word.value);
12044 false
12045 }
12046 Token::Number(s, false) => {
12047 if s.ends_with('.') {
12054 let Some(s) = s.split('.').next().filter(|s| {
12055 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12056 }) else {
12057 return self.expected(
12058 "continuation of hyphenated identifier",
12059 TokenWithSpan::new(Token::Number(s, false), token.span),
12060 );
12061 };
12062 ident.value.push_str(s);
12063 return Ok((ident, true));
12064 } else {
12065 ident.value.push_str(&s);
12066 }
12067 !matches!(self.peek_token().token, Token::Period)
12070 }
12071 _ => {
12072 return self
12073 .expected("continuation of hyphenated identifier", token);
12074 }
12075 }
12076 }
12077
12078 if requires_whitespace {
12081 let token = self.next_token();
12082 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12083 return self
12084 .expected("whitespace following hyphenated identifier", token);
12085 }
12086 }
12087 }
12088 Ok((ident, false))
12089 }
12090 _ => Ok((self.parse_identifier()?, false)),
12091 }
12092 }
12093
12094 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12096 if self.consume_token(&Token::LParen) {
12097 if self.peek_token().token == Token::RParen {
12098 self.next_token();
12099 Ok(vec![])
12100 } else {
12101 let cols = self.parse_comma_separated_with_trailing_commas(
12102 Parser::parse_view_column,
12103 self.dialect.supports_column_definition_trailing_commas(),
12104 Self::is_reserved_for_column_alias,
12105 )?;
12106 self.expect_token(&Token::RParen)?;
12107 Ok(cols)
12108 }
12109 } else {
12110 Ok(vec![])
12111 }
12112 }
12113
12114 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12116 let name = self.parse_identifier()?;
12117 let options = self.parse_view_column_options()?;
12118 let data_type = if dialect_of!(self is ClickHouseDialect) {
12119 Some(self.parse_data_type()?)
12120 } else {
12121 None
12122 };
12123 Ok(ViewColumnDef {
12124 name,
12125 data_type,
12126 options,
12127 })
12128 }
12129
12130 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12131 let mut options = Vec::new();
12132 loop {
12133 let option = self.parse_optional_column_option()?;
12134 if let Some(option) = option {
12135 options.push(option);
12136 } else {
12137 break;
12138 }
12139 }
12140 if options.is_empty() {
12141 Ok(None)
12142 } else if self.dialect.supports_space_separated_column_options() {
12143 Ok(Some(ColumnOptions::SpaceSeparated(options)))
12144 } else {
12145 Ok(Some(ColumnOptions::CommaSeparated(options)))
12146 }
12147 }
12148
12149 pub fn parse_parenthesized_column_list(
12152 &mut self,
12153 optional: IsOptional,
12154 allow_empty: bool,
12155 ) -> Result<Vec<Ident>, ParserError> {
12156 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12157 }
12158
12159 pub fn parse_parenthesized_compound_identifier_list(
12160 &mut self,
12161 optional: IsOptional,
12162 allow_empty: bool,
12163 ) -> Result<Vec<Expr>, ParserError> {
12164 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12165 Ok(Expr::CompoundIdentifier(
12166 p.parse_period_separated(|p| p.parse_identifier())?,
12167 ))
12168 })
12169 }
12170
12171 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12174 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12175 p.parse_create_index_expr()
12176 })
12177 }
12178
12179 pub fn parse_parenthesized_qualified_column_list(
12182 &mut self,
12183 optional: IsOptional,
12184 allow_empty: bool,
12185 ) -> Result<Vec<ObjectName>, ParserError> {
12186 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12187 p.parse_object_name(true)
12188 })
12189 }
12190
12191 fn parse_parenthesized_column_list_inner<F, T>(
12194 &mut self,
12195 optional: IsOptional,
12196 allow_empty: bool,
12197 mut f: F,
12198 ) -> Result<Vec<T>, ParserError>
12199 where
12200 F: FnMut(&mut Parser) -> Result<T, ParserError>,
12201 {
12202 if self.consume_token(&Token::LParen) {
12203 if allow_empty && self.peek_token().token == Token::RParen {
12204 self.next_token();
12205 Ok(vec![])
12206 } else {
12207 let cols = self.parse_comma_separated(|p| f(p))?;
12208 self.expect_token(&Token::RParen)?;
12209 Ok(cols)
12210 }
12211 } else if optional == Optional {
12212 Ok(vec![])
12213 } else {
12214 self.expected("a list of columns in parentheses", self.peek_token())
12215 }
12216 }
12217
12218 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12220 if self.consume_token(&Token::LParen) {
12221 let cols = self.parse_comma_separated(|p| {
12222 let name = p.parse_identifier()?;
12223 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12224 Ok(TableAliasColumnDef { name, data_type })
12225 })?;
12226 self.expect_token(&Token::RParen)?;
12227 Ok(cols)
12228 } else {
12229 Ok(vec![])
12230 }
12231 }
12232
12233 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12234 self.expect_token(&Token::LParen)?;
12235 let n = self.parse_literal_uint()?;
12236 self.expect_token(&Token::RParen)?;
12237 Ok(n)
12238 }
12239
12240 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12241 if self.consume_token(&Token::LParen) {
12242 let n = self.parse_literal_uint()?;
12243 self.expect_token(&Token::RParen)?;
12244 Ok(Some(n))
12245 } else {
12246 Ok(None)
12247 }
12248 }
12249
12250 fn maybe_parse_optional_interval_fields(
12251 &mut self,
12252 ) -> Result<Option<IntervalFields>, ParserError> {
12253 match self.parse_one_of_keywords(&[
12254 Keyword::YEAR,
12256 Keyword::DAY,
12257 Keyword::HOUR,
12258 Keyword::MINUTE,
12259 Keyword::MONTH,
12261 Keyword::SECOND,
12262 ]) {
12263 Some(Keyword::YEAR) => {
12264 if self.peek_keyword(Keyword::TO) {
12265 self.expect_keyword(Keyword::TO)?;
12266 self.expect_keyword(Keyword::MONTH)?;
12267 Ok(Some(IntervalFields::YearToMonth))
12268 } else {
12269 Ok(Some(IntervalFields::Year))
12270 }
12271 }
12272 Some(Keyword::DAY) => {
12273 if self.peek_keyword(Keyword::TO) {
12274 self.expect_keyword(Keyword::TO)?;
12275 match self.expect_one_of_keywords(&[
12276 Keyword::HOUR,
12277 Keyword::MINUTE,
12278 Keyword::SECOND,
12279 ])? {
12280 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12281 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12282 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12283 _ => {
12284 self.prev_token();
12285 self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12286 }
12287 }
12288 } else {
12289 Ok(Some(IntervalFields::Day))
12290 }
12291 }
12292 Some(Keyword::HOUR) => {
12293 if self.peek_keyword(Keyword::TO) {
12294 self.expect_keyword(Keyword::TO)?;
12295 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12296 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12297 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12298 _ => {
12299 self.prev_token();
12300 self.expected("MINUTE or SECOND", self.peek_token())
12301 }
12302 }
12303 } else {
12304 Ok(Some(IntervalFields::Hour))
12305 }
12306 }
12307 Some(Keyword::MINUTE) => {
12308 if self.peek_keyword(Keyword::TO) {
12309 self.expect_keyword(Keyword::TO)?;
12310 self.expect_keyword(Keyword::SECOND)?;
12311 Ok(Some(IntervalFields::MinuteToSecond))
12312 } else {
12313 Ok(Some(IntervalFields::Minute))
12314 }
12315 }
12316 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12317 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12318 Some(_) => {
12319 self.prev_token();
12320 self.expected(
12321 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12322 self.peek_token(),
12323 )
12324 }
12325 None => Ok(None),
12326 }
12327 }
12328
12329 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12337 self.expect_keyword_is(Keyword::DATETIME64)?;
12338 self.expect_token(&Token::LParen)?;
12339 let precision = self.parse_literal_uint()?;
12340 let time_zone = if self.consume_token(&Token::Comma) {
12341 Some(self.parse_literal_string()?)
12342 } else {
12343 None
12344 };
12345 self.expect_token(&Token::RParen)?;
12346 Ok((precision, time_zone))
12347 }
12348
12349 pub fn parse_optional_character_length(
12350 &mut self,
12351 ) -> Result<Option<CharacterLength>, ParserError> {
12352 if self.consume_token(&Token::LParen) {
12353 let character_length = self.parse_character_length()?;
12354 self.expect_token(&Token::RParen)?;
12355 Ok(Some(character_length))
12356 } else {
12357 Ok(None)
12358 }
12359 }
12360
12361 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12362 if self.consume_token(&Token::LParen) {
12363 let binary_length = self.parse_binary_length()?;
12364 self.expect_token(&Token::RParen)?;
12365 Ok(Some(binary_length))
12366 } else {
12367 Ok(None)
12368 }
12369 }
12370
12371 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12372 if self.parse_keyword(Keyword::MAX) {
12373 return Ok(CharacterLength::Max);
12374 }
12375 let length = self.parse_literal_uint()?;
12376 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12377 Some(CharLengthUnits::Characters)
12378 } else if self.parse_keyword(Keyword::OCTETS) {
12379 Some(CharLengthUnits::Octets)
12380 } else {
12381 None
12382 };
12383 Ok(CharacterLength::IntegerLength { length, unit })
12384 }
12385
12386 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12387 if self.parse_keyword(Keyword::MAX) {
12388 return Ok(BinaryLength::Max);
12389 }
12390 let length = self.parse_literal_uint()?;
12391 Ok(BinaryLength::IntegerLength { length })
12392 }
12393
12394 pub fn parse_optional_precision_scale(
12395 &mut self,
12396 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12397 if self.consume_token(&Token::LParen) {
12398 let n = self.parse_literal_uint()?;
12399 let scale = if self.consume_token(&Token::Comma) {
12400 Some(self.parse_literal_uint()?)
12401 } else {
12402 None
12403 };
12404 self.expect_token(&Token::RParen)?;
12405 Ok((Some(n), scale))
12406 } else {
12407 Ok((None, None))
12408 }
12409 }
12410
12411 pub fn parse_exact_number_optional_precision_scale(
12412 &mut self,
12413 ) -> Result<ExactNumberInfo, ParserError> {
12414 if self.consume_token(&Token::LParen) {
12415 let precision = self.parse_literal_uint()?;
12416 let scale = if self.consume_token(&Token::Comma) {
12417 Some(self.parse_signed_integer()?)
12418 } else {
12419 None
12420 };
12421
12422 self.expect_token(&Token::RParen)?;
12423
12424 match scale {
12425 None => Ok(ExactNumberInfo::Precision(precision)),
12426 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12427 }
12428 } else {
12429 Ok(ExactNumberInfo::None)
12430 }
12431 }
12432
12433 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12435 let is_negative = self.consume_token(&Token::Minus);
12436
12437 if !is_negative {
12438 let _ = self.consume_token(&Token::Plus);
12439 }
12440
12441 let current_token = self.peek_token_ref();
12442 match ¤t_token.token {
12443 Token::Number(s, _) => {
12444 let s = s.clone();
12445 let span_start = current_token.span.start;
12446 self.advance_token();
12447 let value = Self::parse::<i64>(s, span_start)?;
12448 Ok(if is_negative { -value } else { value })
12449 }
12450 _ => self.expected_ref("number", current_token),
12451 }
12452 }
12453
12454 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12455 if self.consume_token(&Token::LParen) {
12456 let mut modifiers = Vec::new();
12457 loop {
12458 let next_token = self.next_token();
12459 match next_token.token {
12460 Token::Word(w) => modifiers.push(w.to_string()),
12461 Token::Number(n, _) => modifiers.push(n),
12462 Token::SingleQuotedString(s) => modifiers.push(s),
12463
12464 Token::Comma => {
12465 continue;
12466 }
12467 Token::RParen => {
12468 break;
12469 }
12470 _ => self.expected("type modifiers", next_token)?,
12471 }
12472 }
12473
12474 Ok(Some(modifiers))
12475 } else {
12476 Ok(None)
12477 }
12478 }
12479
12480 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12482 where
12483 F: FnOnce(Box<DataType>) -> DataType,
12484 {
12485 self.expect_token(&Token::LParen)?;
12486 let inside_type = self.parse_data_type()?;
12487 self.expect_token(&Token::RParen)?;
12488 Ok(parent_type(inside_type.into()))
12489 }
12490
12491 fn parse_delete_setexpr_boxed(
12495 &mut self,
12496 delete_token: TokenWithSpan,
12497 ) -> Result<Box<SetExpr>, ParserError> {
12498 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12499 }
12500
12501 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12502 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12503 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12506 (vec![], false)
12507 } else {
12508 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12509 self.expect_keyword_is(Keyword::FROM)?;
12510 (tables, true)
12511 }
12512 } else {
12513 (vec![], true)
12514 };
12515
12516 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12517 let using = if self.parse_keyword(Keyword::USING) {
12518 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12519 } else {
12520 None
12521 };
12522 let selection = if self.parse_keyword(Keyword::WHERE) {
12523 Some(self.parse_expr()?)
12524 } else {
12525 None
12526 };
12527 let returning = if self.parse_keyword(Keyword::RETURNING) {
12528 Some(self.parse_comma_separated(Parser::parse_select_item)?)
12529 } else {
12530 None
12531 };
12532 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12533 self.parse_comma_separated(Parser::parse_order_by_expr)?
12534 } else {
12535 vec![]
12536 };
12537 let limit = if self.parse_keyword(Keyword::LIMIT) {
12538 self.parse_limit()?
12539 } else {
12540 None
12541 };
12542
12543 Ok(Statement::Delete(Delete {
12544 delete_token: delete_token.into(),
12545 tables,
12546 from: if with_from_keyword {
12547 FromTable::WithFromKeyword(from)
12548 } else {
12549 FromTable::WithoutKeyword(from)
12550 },
12551 using,
12552 selection,
12553 returning,
12554 order_by,
12555 limit,
12556 }))
12557 }
12558
12559 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12561 let modifier_keyword =
12562 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12563
12564 let id = self.parse_literal_uint()?;
12565
12566 let modifier = match modifier_keyword {
12567 Some(Keyword::CONNECTION) => Some(KillType::Connection),
12568 Some(Keyword::QUERY) => Some(KillType::Query),
12569 Some(Keyword::MUTATION) => {
12570 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12571 Some(KillType::Mutation)
12572 } else {
12573 self.expected(
12574 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12575 self.peek_token(),
12576 )?
12577 }
12578 }
12579 _ => None,
12580 };
12581
12582 Ok(Statement::Kill { modifier, id })
12583 }
12584
12585 pub fn parse_explain(
12586 &mut self,
12587 describe_alias: DescribeAlias,
12588 ) -> Result<Statement, ParserError> {
12589 let mut analyze = false;
12590 let mut verbose = false;
12591 let mut query_plan = false;
12592 let mut estimate = false;
12593 let mut format = None;
12594 let mut options = None;
12595
12596 if describe_alias == DescribeAlias::Explain
12599 && self.dialect.supports_explain_with_utility_options()
12600 && self.peek_token().token == Token::LParen
12601 {
12602 options = Some(self.parse_utility_options()?)
12603 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12604 query_plan = true;
12605 } else if self.parse_keyword(Keyword::ESTIMATE) {
12606 estimate = true;
12607 } else {
12608 analyze = self.parse_keyword(Keyword::ANALYZE);
12609 verbose = self.parse_keyword(Keyword::VERBOSE);
12610 if self.parse_keyword(Keyword::FORMAT) {
12611 format = Some(self.parse_analyze_format_kind()?);
12612 }
12613 }
12614
12615 match self.maybe_parse(|parser| parser.parse_statement())? {
12616 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12617 ParserError::ParserError("Explain must be root of the plan".to_string()),
12618 ),
12619 Some(statement) => Ok(Statement::Explain {
12620 describe_alias,
12621 analyze,
12622 verbose,
12623 query_plan,
12624 estimate,
12625 statement: Box::new(statement),
12626 format,
12627 options,
12628 }),
12629 _ => {
12630 let hive_format =
12631 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12632 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12633 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12634 _ => None,
12635 };
12636
12637 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12638 self.parse_keyword(Keyword::TABLE)
12640 } else {
12641 false
12642 };
12643
12644 let table_name = self.parse_object_name(false)?;
12645 Ok(Statement::ExplainTable {
12646 describe_alias,
12647 hive_format,
12648 has_table_keyword,
12649 table_name,
12650 })
12651 }
12652 }
12653 }
12654
12655 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12660 let _guard = self.recursion_counter.try_decrease()?;
12661 let with = if self.parse_keyword(Keyword::WITH) {
12662 let with_token = self.get_current_token();
12663 Some(With {
12664 with_token: with_token.clone().into(),
12665 recursive: self.parse_keyword(Keyword::RECURSIVE),
12666 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12667 })
12668 } else {
12669 None
12670 };
12671 if self.parse_keyword(Keyword::INSERT) {
12672 Ok(Query {
12673 with,
12674 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12675 order_by: None,
12676 limit_clause: None,
12677 fetch: None,
12678 locks: vec![],
12679 for_clause: None,
12680 settings: None,
12681 format_clause: None,
12682 pipe_operators: vec![],
12683 }
12684 .into())
12685 } else if self.parse_keyword(Keyword::UPDATE) {
12686 Ok(Query {
12687 with,
12688 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12689 order_by: None,
12690 limit_clause: None,
12691 fetch: None,
12692 locks: vec![],
12693 for_clause: None,
12694 settings: None,
12695 format_clause: None,
12696 pipe_operators: vec![],
12697 }
12698 .into())
12699 } else if self.parse_keyword(Keyword::DELETE) {
12700 Ok(Query {
12701 with,
12702 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12703 limit_clause: None,
12704 order_by: None,
12705 fetch: None,
12706 locks: vec![],
12707 for_clause: None,
12708 settings: None,
12709 format_clause: None,
12710 pipe_operators: vec![],
12711 }
12712 .into())
12713 } else if self.parse_keyword(Keyword::MERGE) {
12714 Ok(Query {
12715 with,
12716 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12717 limit_clause: None,
12718 order_by: None,
12719 fetch: None,
12720 locks: vec![],
12721 for_clause: None,
12722 settings: None,
12723 format_clause: None,
12724 pipe_operators: vec![],
12725 }
12726 .into())
12727 } else {
12728 let body = self.parse_query_body(self.dialect.prec_unknown())?;
12729
12730 let order_by = self.parse_optional_order_by()?;
12731
12732 let limit_clause = self.parse_optional_limit_clause()?;
12733
12734 let settings = self.parse_settings()?;
12735
12736 let fetch = if self.parse_keyword(Keyword::FETCH) {
12737 Some(self.parse_fetch()?)
12738 } else {
12739 None
12740 };
12741
12742 let mut for_clause = None;
12743 let mut locks = Vec::new();
12744 while self.parse_keyword(Keyword::FOR) {
12745 if let Some(parsed_for_clause) = self.parse_for_clause()? {
12746 for_clause = Some(parsed_for_clause);
12747 break;
12748 } else {
12749 locks.push(self.parse_lock()?);
12750 }
12751 }
12752 let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12753 && self.parse_keyword(Keyword::FORMAT)
12754 {
12755 if self.parse_keyword(Keyword::NULL) {
12756 Some(FormatClause::Null)
12757 } else {
12758 let ident = self.parse_identifier()?;
12759 Some(FormatClause::Identifier(ident))
12760 }
12761 } else {
12762 None
12763 };
12764
12765 let pipe_operators = if self.dialect.supports_pipe_operator() {
12766 self.parse_pipe_operators()?
12767 } else {
12768 Vec::new()
12769 };
12770
12771 Ok(Query {
12772 with,
12773 body,
12774 order_by,
12775 limit_clause,
12776 fetch,
12777 locks,
12778 for_clause,
12779 settings,
12780 format_clause,
12781 pipe_operators,
12782 }
12783 .into())
12784 }
12785 }
12786
12787 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
12788 let mut pipe_operators = Vec::new();
12789
12790 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
12791 let kw = self.expect_one_of_keywords(&[
12792 Keyword::SELECT,
12793 Keyword::EXTEND,
12794 Keyword::SET,
12795 Keyword::DROP,
12796 Keyword::AS,
12797 Keyword::WHERE,
12798 Keyword::LIMIT,
12799 Keyword::AGGREGATE,
12800 Keyword::ORDER,
12801 Keyword::TABLESAMPLE,
12802 Keyword::RENAME,
12803 Keyword::UNION,
12804 Keyword::INTERSECT,
12805 Keyword::EXCEPT,
12806 Keyword::CALL,
12807 Keyword::PIVOT,
12808 Keyword::UNPIVOT,
12809 Keyword::JOIN,
12810 Keyword::INNER,
12811 Keyword::LEFT,
12812 Keyword::RIGHT,
12813 Keyword::FULL,
12814 Keyword::CROSS,
12815 ])?;
12816 match kw {
12817 Keyword::SELECT => {
12818 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12819 pipe_operators.push(PipeOperator::Select { exprs })
12820 }
12821 Keyword::EXTEND => {
12822 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12823 pipe_operators.push(PipeOperator::Extend { exprs })
12824 }
12825 Keyword::SET => {
12826 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12827 pipe_operators.push(PipeOperator::Set { assignments })
12828 }
12829 Keyword::DROP => {
12830 let columns = self.parse_identifiers()?;
12831 pipe_operators.push(PipeOperator::Drop { columns })
12832 }
12833 Keyword::AS => {
12834 let alias = self.parse_identifier()?;
12835 pipe_operators.push(PipeOperator::As { alias })
12836 }
12837 Keyword::WHERE => {
12838 let expr = self.parse_expr()?;
12839 pipe_operators.push(PipeOperator::Where { expr })
12840 }
12841 Keyword::LIMIT => {
12842 let expr = self.parse_expr()?;
12843 let offset = if self.parse_keyword(Keyword::OFFSET) {
12844 Some(self.parse_expr()?)
12845 } else {
12846 None
12847 };
12848 pipe_operators.push(PipeOperator::Limit { expr, offset })
12849 }
12850 Keyword::AGGREGATE => {
12851 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12852 vec![]
12853 } else {
12854 self.parse_comma_separated(|parser| {
12855 parser.parse_expr_with_alias_and_order_by()
12856 })?
12857 };
12858
12859 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12860 self.parse_comma_separated(|parser| {
12861 parser.parse_expr_with_alias_and_order_by()
12862 })?
12863 } else {
12864 vec![]
12865 };
12866
12867 pipe_operators.push(PipeOperator::Aggregate {
12868 full_table_exprs,
12869 group_by_expr,
12870 })
12871 }
12872 Keyword::ORDER => {
12873 self.expect_one_of_keywords(&[Keyword::BY])?;
12874 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12875 pipe_operators.push(PipeOperator::OrderBy { exprs })
12876 }
12877 Keyword::TABLESAMPLE => {
12878 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12879 pipe_operators.push(PipeOperator::TableSample { sample });
12880 }
12881 Keyword::RENAME => {
12882 let mappings =
12883 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12884 pipe_operators.push(PipeOperator::Rename { mappings });
12885 }
12886 Keyword::UNION => {
12887 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12888 let queries = self.parse_pipe_operator_queries()?;
12889 pipe_operators.push(PipeOperator::Union {
12890 set_quantifier,
12891 queries,
12892 });
12893 }
12894 Keyword::INTERSECT => {
12895 let set_quantifier =
12896 self.parse_distinct_required_set_quantifier("INTERSECT")?;
12897 let queries = self.parse_pipe_operator_queries()?;
12898 pipe_operators.push(PipeOperator::Intersect {
12899 set_quantifier,
12900 queries,
12901 });
12902 }
12903 Keyword::EXCEPT => {
12904 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12905 let queries = self.parse_pipe_operator_queries()?;
12906 pipe_operators.push(PipeOperator::Except {
12907 set_quantifier,
12908 queries,
12909 });
12910 }
12911 Keyword::CALL => {
12912 let function_name = self.parse_object_name(false)?;
12913 let function_expr = self.parse_function(function_name)?;
12914 if let Expr::Function(function) = function_expr {
12915 let alias = self.parse_identifier_optional_alias()?;
12916 pipe_operators.push(PipeOperator::Call { function, alias });
12917 } else {
12918 return Err(ParserError::ParserError(
12919 "Expected function call after CALL".to_string(),
12920 ));
12921 }
12922 }
12923 Keyword::PIVOT => {
12924 self.expect_token(&Token::LParen)?;
12925 let aggregate_functions =
12926 self.parse_comma_separated(Self::parse_aliased_function_call)?;
12927 self.expect_keyword_is(Keyword::FOR)?;
12928 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12929 self.expect_keyword_is(Keyword::IN)?;
12930
12931 self.expect_token(&Token::LParen)?;
12932 let value_source = if self.parse_keyword(Keyword::ANY) {
12933 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12934 self.parse_comma_separated(Parser::parse_order_by_expr)?
12935 } else {
12936 vec![]
12937 };
12938 PivotValueSource::Any(order_by)
12939 } else if self.peek_sub_query() {
12940 PivotValueSource::Subquery(self.parse_query()?)
12941 } else {
12942 PivotValueSource::List(
12943 self.parse_comma_separated(Self::parse_expr_with_alias)?,
12944 )
12945 };
12946 self.expect_token(&Token::RParen)?;
12947 self.expect_token(&Token::RParen)?;
12948
12949 let alias = self.parse_identifier_optional_alias()?;
12950
12951 pipe_operators.push(PipeOperator::Pivot {
12952 aggregate_functions,
12953 value_column,
12954 value_source,
12955 alias,
12956 });
12957 }
12958 Keyword::UNPIVOT => {
12959 self.expect_token(&Token::LParen)?;
12960 let value_column = self.parse_identifier()?;
12961 self.expect_keyword(Keyword::FOR)?;
12962 let name_column = self.parse_identifier()?;
12963 self.expect_keyword(Keyword::IN)?;
12964
12965 self.expect_token(&Token::LParen)?;
12966 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
12967 self.expect_token(&Token::RParen)?;
12968
12969 self.expect_token(&Token::RParen)?;
12970
12971 let alias = self.parse_identifier_optional_alias()?;
12972
12973 pipe_operators.push(PipeOperator::Unpivot {
12974 value_column,
12975 name_column,
12976 unpivot_columns,
12977 alias,
12978 });
12979 }
12980 Keyword::JOIN
12981 | Keyword::INNER
12982 | Keyword::LEFT
12983 | Keyword::RIGHT
12984 | Keyword::FULL
12985 | Keyword::CROSS => {
12986 self.prev_token();
12987 let mut joins = self.parse_joins()?;
12988 if joins.len() != 1 {
12989 return Err(ParserError::ParserError(
12990 "Join pipe operator must have a single join".to_string(),
12991 ));
12992 }
12993 let join = joins.swap_remove(0);
12994 pipe_operators.push(PipeOperator::Join(join))
12995 }
12996 unhandled => {
12997 return Err(ParserError::ParserError(format!(
12998 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
12999 )))
13000 }
13001 }
13002 }
13003 Ok(pipe_operators)
13004 }
13005
13006 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
13007 let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13008 && self.parse_keyword(Keyword::SETTINGS)
13009 {
13010 let key_values = self.parse_comma_separated(|p| {
13011 let key = p.parse_identifier()?;
13012 p.expect_token(&Token::Eq)?;
13013 let value = p.parse_expr()?;
13014 Ok(Setting { key, value })
13015 })?;
13016 Some(key_values)
13017 } else {
13018 None
13019 };
13020 Ok(settings)
13021 }
13022
13023 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
13025 if self.parse_keyword(Keyword::XML) {
13026 Ok(Some(self.parse_for_xml()?))
13027 } else if self.parse_keyword(Keyword::JSON) {
13028 Ok(Some(self.parse_for_json()?))
13029 } else if self.parse_keyword(Keyword::BROWSE) {
13030 Ok(Some(ForClause::Browse))
13031 } else {
13032 Ok(None)
13033 }
13034 }
13035
13036 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
13038 let for_xml = if self.parse_keyword(Keyword::RAW) {
13039 let mut element_name = None;
13040 if self.peek_token().token == Token::LParen {
13041 self.expect_token(&Token::LParen)?;
13042 element_name = Some(self.parse_literal_string()?);
13043 self.expect_token(&Token::RParen)?;
13044 }
13045 ForXml::Raw(element_name)
13046 } else if self.parse_keyword(Keyword::AUTO) {
13047 ForXml::Auto
13048 } else if self.parse_keyword(Keyword::EXPLICIT) {
13049 ForXml::Explicit
13050 } else if self.parse_keyword(Keyword::PATH) {
13051 let mut element_name = None;
13052 if self.peek_token().token == Token::LParen {
13053 self.expect_token(&Token::LParen)?;
13054 element_name = Some(self.parse_literal_string()?);
13055 self.expect_token(&Token::RParen)?;
13056 }
13057 ForXml::Path(element_name)
13058 } else {
13059 return Err(ParserError::ParserError(
13060 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13061 ));
13062 };
13063 let mut elements = false;
13064 let mut binary_base64 = false;
13065 let mut root = None;
13066 let mut r#type = false;
13067 while self.peek_token().token == Token::Comma {
13068 self.next_token();
13069 if self.parse_keyword(Keyword::ELEMENTS) {
13070 elements = true;
13071 } else if self.parse_keyword(Keyword::BINARY) {
13072 self.expect_keyword_is(Keyword::BASE64)?;
13073 binary_base64 = true;
13074 } else if self.parse_keyword(Keyword::ROOT) {
13075 self.expect_token(&Token::LParen)?;
13076 root = Some(self.parse_literal_string()?);
13077 self.expect_token(&Token::RParen)?;
13078 } else if self.parse_keyword(Keyword::TYPE) {
13079 r#type = true;
13080 }
13081 }
13082 Ok(ForClause::Xml {
13083 for_xml,
13084 elements,
13085 binary_base64,
13086 root,
13087 r#type,
13088 })
13089 }
13090
13091 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13093 let for_json = if self.parse_keyword(Keyword::AUTO) {
13094 ForJson::Auto
13095 } else if self.parse_keyword(Keyword::PATH) {
13096 ForJson::Path
13097 } else {
13098 return Err(ParserError::ParserError(
13099 "Expected FOR JSON [AUTO | PATH ]".to_string(),
13100 ));
13101 };
13102 let mut root = None;
13103 let mut include_null_values = false;
13104 let mut without_array_wrapper = false;
13105 while self.peek_token().token == Token::Comma {
13106 self.next_token();
13107 if self.parse_keyword(Keyword::ROOT) {
13108 self.expect_token(&Token::LParen)?;
13109 root = Some(self.parse_literal_string()?);
13110 self.expect_token(&Token::RParen)?;
13111 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13112 include_null_values = true;
13113 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13114 without_array_wrapper = true;
13115 }
13116 }
13117 Ok(ForClause::Json {
13118 for_json,
13119 root,
13120 include_null_values,
13121 without_array_wrapper,
13122 })
13123 }
13124
13125 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13127 let name = self.parse_identifier()?;
13128
13129 let mut cte = if self.parse_keyword(Keyword::AS) {
13130 let mut is_materialized = None;
13131 if dialect_of!(self is PostgreSqlDialect) {
13132 if self.parse_keyword(Keyword::MATERIALIZED) {
13133 is_materialized = Some(CteAsMaterialized::Materialized);
13134 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13135 is_materialized = Some(CteAsMaterialized::NotMaterialized);
13136 }
13137 }
13138 self.expect_token(&Token::LParen)?;
13139
13140 let query = self.parse_query()?;
13141 let closing_paren_token = self.expect_token(&Token::RParen)?;
13142
13143 let alias = TableAlias {
13144 explicit: false,
13145 name,
13146 columns: vec![],
13147 };
13148 Cte {
13149 alias,
13150 query,
13151 from: None,
13152 materialized: is_materialized,
13153 closing_paren_token: closing_paren_token.into(),
13154 }
13155 } else {
13156 let columns = self.parse_table_alias_column_defs()?;
13157 self.expect_keyword_is(Keyword::AS)?;
13158 let mut is_materialized = None;
13159 if dialect_of!(self is PostgreSqlDialect) {
13160 if self.parse_keyword(Keyword::MATERIALIZED) {
13161 is_materialized = Some(CteAsMaterialized::Materialized);
13162 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13163 is_materialized = Some(CteAsMaterialized::NotMaterialized);
13164 }
13165 }
13166 self.expect_token(&Token::LParen)?;
13167
13168 let query = self.parse_query()?;
13169 let closing_paren_token = self.expect_token(&Token::RParen)?;
13170
13171 let alias = TableAlias {
13172 explicit: false,
13173 name,
13174 columns,
13175 };
13176 Cte {
13177 alias,
13178 query,
13179 from: None,
13180 materialized: is_materialized,
13181 closing_paren_token: closing_paren_token.into(),
13182 }
13183 };
13184 if self.parse_keyword(Keyword::FROM) {
13185 cte.from = Some(self.parse_identifier()?);
13186 }
13187 Ok(cte)
13188 }
13189
13190 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13199 let expr = if self.peek_keyword(Keyword::SELECT)
13202 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13203 {
13204 SetExpr::Select(self.parse_select().map(Box::new)?)
13205 } else if self.consume_token(&Token::LParen) {
13206 let subquery = self.parse_query()?;
13208 self.expect_token(&Token::RParen)?;
13209 SetExpr::Query(subquery)
13210 } else if self.parse_keyword(Keyword::VALUES) {
13211 let is_mysql = dialect_of!(self is MySqlDialect);
13212 SetExpr::Values(self.parse_values(is_mysql, false)?)
13213 } else if self.parse_keyword(Keyword::VALUE) {
13214 let is_mysql = dialect_of!(self is MySqlDialect);
13215 SetExpr::Values(self.parse_values(is_mysql, true)?)
13216 } else if self.parse_keyword(Keyword::TABLE) {
13217 SetExpr::Table(Box::new(self.parse_as_table()?))
13218 } else {
13219 return self.expected(
13220 "SELECT, VALUES, or a subquery in the query body",
13221 self.peek_token(),
13222 );
13223 };
13224
13225 self.parse_remaining_set_exprs(expr, precedence)
13226 }
13227
13228 fn parse_remaining_set_exprs(
13232 &mut self,
13233 mut expr: SetExpr,
13234 precedence: u8,
13235 ) -> Result<Box<SetExpr>, ParserError> {
13236 loop {
13237 let op = self.parse_set_operator(&self.peek_token().token);
13239 let next_precedence = match op {
13240 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13242 10
13243 }
13244 Some(SetOperator::Intersect) => 20,
13246 None => break,
13248 };
13249 if precedence >= next_precedence {
13250 break;
13251 }
13252 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
13254 expr = SetExpr::SetOperation {
13255 left: Box::new(expr),
13256 op: op.unwrap(),
13257 set_quantifier,
13258 right: self.parse_query_body(next_precedence)?,
13259 };
13260 }
13261
13262 Ok(expr.into())
13263 }
13264
13265 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13266 match token {
13267 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13268 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13269 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13270 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13271 _ => None,
13272 }
13273 }
13274
13275 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13276 match op {
13277 Some(
13278 SetOperator::Except
13279 | SetOperator::Intersect
13280 | SetOperator::Union
13281 | SetOperator::Minus,
13282 ) => {
13283 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13284 SetQuantifier::DistinctByName
13285 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13286 SetQuantifier::ByName
13287 } else if self.parse_keyword(Keyword::ALL) {
13288 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13289 SetQuantifier::AllByName
13290 } else {
13291 SetQuantifier::All
13292 }
13293 } else if self.parse_keyword(Keyword::DISTINCT) {
13294 SetQuantifier::Distinct
13295 } else {
13296 SetQuantifier::None
13297 }
13298 }
13299 _ => SetQuantifier::None,
13300 }
13301 }
13302
13303 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13305 let mut from_first = None;
13306
13307 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13308 let from_token = self.expect_keyword(Keyword::FROM)?;
13309 let from = self.parse_table_with_joins()?;
13310 if !self.peek_keyword(Keyword::SELECT) {
13311 return Ok(Select {
13312 select_token: AttachedToken(from_token),
13313 distinct: None,
13314 top: None,
13315 top_before_distinct: false,
13316 projection: vec![],
13317 exclude: None,
13318 into: None,
13319 from,
13320 lateral_views: vec![],
13321 prewhere: None,
13322 selection: None,
13323 group_by: GroupByExpr::Expressions(vec![], vec![]),
13324 cluster_by: vec![],
13325 distribute_by: vec![],
13326 sort_by: vec![],
13327 having: None,
13328 named_window: vec![],
13329 window_before_qualify: false,
13330 qualify: None,
13331 value_table_mode: None,
13332 connect_by: None,
13333 flavor: SelectFlavor::FromFirstNoSelect,
13334 });
13335 }
13336 from_first = Some(from);
13337 }
13338
13339 let select_token = self.expect_keyword(Keyword::SELECT)?;
13340 let value_table_mode = self.parse_value_table_mode()?;
13341
13342 let mut top_before_distinct = false;
13343 let mut top = None;
13344 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13345 top = Some(self.parse_top()?);
13346 top_before_distinct = true;
13347 }
13348 let distinct = self.parse_all_or_distinct()?;
13349 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13350 top = Some(self.parse_top()?);
13351 }
13352
13353 let projection =
13354 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13355 vec![]
13356 } else {
13357 self.parse_projection()?
13358 };
13359
13360 let exclude = if self.dialect.supports_select_exclude() {
13361 self.parse_optional_select_item_exclude()?
13362 } else {
13363 None
13364 };
13365
13366 let into = if self.parse_keyword(Keyword::INTO) {
13367 Some(self.parse_select_into()?)
13368 } else {
13369 None
13370 };
13371
13372 let (from, from_first) = if let Some(from) = from_first.take() {
13378 (from, true)
13379 } else if self.parse_keyword(Keyword::FROM) {
13380 (self.parse_table_with_joins()?, false)
13381 } else {
13382 (vec![], false)
13383 };
13384
13385 let mut lateral_views = vec![];
13386 loop {
13387 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13388 let outer = self.parse_keyword(Keyword::OUTER);
13389 let lateral_view = self.parse_expr()?;
13390 let lateral_view_name = self.parse_object_name(false)?;
13391 let lateral_col_alias = self
13392 .parse_comma_separated(|parser| {
13393 parser.parse_optional_alias(&[
13394 Keyword::WHERE,
13395 Keyword::GROUP,
13396 Keyword::CLUSTER,
13397 Keyword::HAVING,
13398 Keyword::LATERAL,
13399 ]) })?
13401 .into_iter()
13402 .flatten()
13403 .collect();
13404
13405 lateral_views.push(LateralView {
13406 lateral_view,
13407 lateral_view_name,
13408 lateral_col_alias,
13409 outer,
13410 });
13411 } else {
13412 break;
13413 }
13414 }
13415
13416 let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13417 && self.parse_keyword(Keyword::PREWHERE)
13418 {
13419 Some(self.parse_expr()?)
13420 } else {
13421 None
13422 };
13423
13424 let selection = if self.parse_keyword(Keyword::WHERE) {
13425 Some(self.parse_expr()?)
13426 } else {
13427 None
13428 };
13429
13430 let group_by = self
13431 .parse_optional_group_by()?
13432 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13433
13434 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13435 self.parse_comma_separated(Parser::parse_expr)?
13436 } else {
13437 vec![]
13438 };
13439
13440 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13441 self.parse_comma_separated(Parser::parse_expr)?
13442 } else {
13443 vec![]
13444 };
13445
13446 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13447 self.parse_comma_separated(Parser::parse_order_by_expr)?
13448 } else {
13449 vec![]
13450 };
13451
13452 let having = if self.parse_keyword(Keyword::HAVING) {
13453 Some(self.parse_expr()?)
13454 } else {
13455 None
13456 };
13457
13458 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13460 {
13461 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13462 if self.parse_keyword(Keyword::QUALIFY) {
13463 (named_windows, Some(self.parse_expr()?), true)
13464 } else {
13465 (named_windows, None, true)
13466 }
13467 } else if self.parse_keyword(Keyword::QUALIFY) {
13468 let qualify = Some(self.parse_expr()?);
13469 if self.parse_keyword(Keyword::WINDOW) {
13470 (
13471 self.parse_comma_separated(Parser::parse_named_window)?,
13472 qualify,
13473 false,
13474 )
13475 } else {
13476 (Default::default(), qualify, false)
13477 }
13478 } else {
13479 Default::default()
13480 };
13481
13482 let connect_by = if self.dialect.supports_connect_by()
13483 && self
13484 .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13485 .is_some()
13486 {
13487 self.prev_token();
13488 Some(self.parse_connect_by()?)
13489 } else {
13490 None
13491 };
13492
13493 Ok(Select {
13494 select_token: AttachedToken(select_token),
13495 distinct,
13496 top,
13497 top_before_distinct,
13498 projection,
13499 exclude,
13500 into,
13501 from,
13502 lateral_views,
13503 prewhere,
13504 selection,
13505 group_by,
13506 cluster_by,
13507 distribute_by,
13508 sort_by,
13509 having,
13510 named_window: named_windows,
13511 window_before_qualify,
13512 qualify,
13513 value_table_mode,
13514 connect_by,
13515 flavor: if from_first {
13516 SelectFlavor::FromFirst
13517 } else {
13518 SelectFlavor::Standard
13519 },
13520 })
13521 }
13522
13523 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13524 if !dialect_of!(self is BigQueryDialect) {
13525 return Ok(None);
13526 }
13527
13528 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13529 Some(ValueTableMode::DistinctAsValue)
13530 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13531 Some(ValueTableMode::DistinctAsStruct)
13532 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13533 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13534 {
13535 Some(ValueTableMode::AsValue)
13536 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13537 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13538 {
13539 Some(ValueTableMode::AsStruct)
13540 } else if self.parse_keyword(Keyword::AS) {
13541 self.expected("VALUE or STRUCT", self.peek_token())?
13542 } else {
13543 None
13544 };
13545
13546 Ok(mode)
13547 }
13548
13549 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13553 where
13554 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13555 {
13556 let current_state = self.state;
13557 self.state = state;
13558 let res = f(self);
13559 self.state = current_state;
13560 res
13561 }
13562
13563 pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13564 let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13565 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13566 parser.parse_comma_separated(Parser::parse_expr)
13567 })?;
13568 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13569 let condition = self.parse_expr()?;
13570 (condition, relationships)
13571 } else {
13572 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13573 let condition = self.parse_expr()?;
13574 self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13575 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13576 parser.parse_comma_separated(Parser::parse_expr)
13577 })?;
13578 (condition, relationships)
13579 };
13580 Ok(ConnectBy {
13581 condition,
13582 relationships,
13583 })
13584 }
13585
13586 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13588 let token1 = self.next_token();
13589 let token2 = self.next_token();
13590 let token3 = self.next_token();
13591
13592 let table_name;
13593 let schema_name;
13594 if token2 == Token::Period {
13595 match token1.token {
13596 Token::Word(w) => {
13597 schema_name = w.value;
13598 }
13599 _ => {
13600 return self.expected("Schema name", token1);
13601 }
13602 }
13603 match token3.token {
13604 Token::Word(w) => {
13605 table_name = w.value;
13606 }
13607 _ => {
13608 return self.expected("Table name", token3);
13609 }
13610 }
13611 Ok(Table {
13612 table_name: Some(table_name),
13613 schema_name: Some(schema_name),
13614 })
13615 } else {
13616 match token1.token {
13617 Token::Word(w) => {
13618 table_name = w.value;
13619 }
13620 _ => {
13621 return self.expected("Table name", token1);
13622 }
13623 }
13624 Ok(Table {
13625 table_name: Some(table_name),
13626 schema_name: None,
13627 })
13628 }
13629 }
13630
13631 fn parse_set_role(
13633 &mut self,
13634 modifier: Option<ContextModifier>,
13635 ) -> Result<Statement, ParserError> {
13636 self.expect_keyword_is(Keyword::ROLE)?;
13637
13638 let role_name = if self.parse_keyword(Keyword::NONE) {
13639 None
13640 } else {
13641 Some(self.parse_identifier()?)
13642 };
13643 Ok(Statement::Set(Set::SetRole {
13644 context_modifier: modifier,
13645 role_name,
13646 }))
13647 }
13648
13649 fn parse_set_values(
13650 &mut self,
13651 parenthesized_assignment: bool,
13652 ) -> Result<Vec<Expr>, ParserError> {
13653 let mut values = vec![];
13654
13655 if parenthesized_assignment {
13656 self.expect_token(&Token::LParen)?;
13657 }
13658
13659 loop {
13660 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13661 expr
13662 } else if let Ok(expr) = self.parse_expr() {
13663 expr
13664 } else {
13665 self.expected("variable value", self.peek_token())?
13666 };
13667
13668 values.push(value);
13669 if self.consume_token(&Token::Comma) {
13670 continue;
13671 }
13672
13673 if parenthesized_assignment {
13674 self.expect_token(&Token::RParen)?;
13675 }
13676 return Ok(values);
13677 }
13678 }
13679
13680 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13681 let modifier =
13682 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13683
13684 Self::keyword_to_modifier(modifier)
13685 }
13686
13687 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13689 let scope = self.parse_context_modifier();
13690
13691 let name = if self.dialect.supports_parenthesized_set_variables()
13692 && self.consume_token(&Token::LParen)
13693 {
13694 self.expected("Unparenthesized assignment", self.peek_token())?
13698 } else {
13699 self.parse_object_name(false)?
13700 };
13701
13702 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13703 return self.expected("assignment operator", self.peek_token());
13704 }
13705
13706 let value = self.parse_expr()?;
13707
13708 Ok(SetAssignment { scope, name, value })
13709 }
13710
13711 fn parse_set(&mut self) -> Result<Statement, ParserError> {
13712 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13713
13714 let scope = if !hivevar {
13716 self.parse_context_modifier()
13717 } else {
13718 None
13719 };
13720
13721 if hivevar {
13722 self.expect_token(&Token::Colon)?;
13723 }
13724
13725 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13726 return Ok(set_role_stmt);
13727 }
13728
13729 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13731 || self.parse_keyword(Keyword::TIMEZONE)
13732 {
13733 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13734 return Ok(Set::SingleAssignment {
13735 scope,
13736 hivevar,
13737 variable: ObjectName::from(vec!["TIMEZONE".into()]),
13738 values: self.parse_set_values(false)?,
13739 }
13740 .into());
13741 } else {
13742 return Ok(Set::SetTimeZone {
13746 local: scope == Some(ContextModifier::Local),
13747 value: self.parse_expr()?,
13748 }
13749 .into());
13750 }
13751 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
13752 if self.parse_keyword(Keyword::DEFAULT) {
13753 return Ok(Set::SetNamesDefault {}.into());
13754 }
13755 let charset_name = self.parse_identifier()?;
13756 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
13757 Some(self.parse_literal_string()?)
13758 } else {
13759 None
13760 };
13761
13762 return Ok(Set::SetNames {
13763 charset_name,
13764 collation_name,
13765 }
13766 .into());
13767 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
13768 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
13769 return Ok(Set::SetTransaction {
13770 modes: self.parse_transaction_modes()?,
13771 snapshot: None,
13772 session: true,
13773 }
13774 .into());
13775 } else if self.parse_keyword(Keyword::TRANSACTION) {
13776 if self.parse_keyword(Keyword::SNAPSHOT) {
13777 let snapshot_id = self.parse_value()?.value;
13778 return Ok(Set::SetTransaction {
13779 modes: vec![],
13780 snapshot: Some(snapshot_id),
13781 session: false,
13782 }
13783 .into());
13784 }
13785 return Ok(Set::SetTransaction {
13786 modes: self.parse_transaction_modes()?,
13787 snapshot: None,
13788 session: false,
13789 }
13790 .into());
13791 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
13792 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
13793 SetSessionAuthorizationParamKind::Default
13794 } else {
13795 let value = self.parse_identifier()?;
13796 SetSessionAuthorizationParamKind::User(value)
13797 };
13798 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
13799 scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
13800 kind: auth_value,
13801 })
13802 .into());
13803 }
13804
13805 if self.dialect.supports_comma_separated_set_assignments() {
13806 if scope.is_some() {
13807 self.prev_token();
13808 }
13809
13810 if let Some(assignments) = self
13811 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
13812 {
13813 return if assignments.len() > 1 {
13814 Ok(Set::MultipleAssignments { assignments }.into())
13815 } else {
13816 let SetAssignment { scope, name, value } =
13817 assignments.into_iter().next().ok_or_else(|| {
13818 ParserError::ParserError("Expected at least one assignment".to_string())
13819 })?;
13820
13821 Ok(Set::SingleAssignment {
13822 scope,
13823 hivevar,
13824 variable: name,
13825 values: vec![value],
13826 }
13827 .into())
13828 };
13829 }
13830 }
13831
13832 let variables = if self.dialect.supports_parenthesized_set_variables()
13833 && self.consume_token(&Token::LParen)
13834 {
13835 let vars = OneOrManyWithParens::Many(
13836 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
13837 .into_iter()
13838 .map(|ident| ObjectName::from(vec![ident]))
13839 .collect(),
13840 );
13841 self.expect_token(&Token::RParen)?;
13842 vars
13843 } else {
13844 OneOrManyWithParens::One(self.parse_object_name(false)?)
13845 };
13846
13847 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13848 let stmt = match variables {
13849 OneOrManyWithParens::One(var) => Set::SingleAssignment {
13850 scope,
13851 hivevar,
13852 variable: var,
13853 values: self.parse_set_values(false)?,
13854 },
13855 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13856 variables: vars,
13857 values: self.parse_set_values(true)?,
13858 },
13859 };
13860
13861 return Ok(stmt.into());
13862 }
13863
13864 if self.dialect.supports_set_stmt_without_operator() {
13865 self.prev_token();
13866 return self.parse_set_session_params();
13867 };
13868
13869 self.expected("equals sign or TO", self.peek_token())
13870 }
13871
13872 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13873 if self.parse_keyword(Keyword::STATISTICS) {
13874 let topic = match self.parse_one_of_keywords(&[
13875 Keyword::IO,
13876 Keyword::PROFILE,
13877 Keyword::TIME,
13878 Keyword::XML,
13879 ]) {
13880 Some(Keyword::IO) => SessionParamStatsTopic::IO,
13881 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13882 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13883 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13884 _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13885 };
13886 let value = self.parse_session_param_value()?;
13887 Ok(
13888 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13889 topic,
13890 value,
13891 }))
13892 .into(),
13893 )
13894 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13895 let obj = self.parse_object_name(false)?;
13896 let value = self.parse_session_param_value()?;
13897 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13898 SetSessionParamIdentityInsert { obj, value },
13899 ))
13900 .into())
13901 } else if self.parse_keyword(Keyword::OFFSETS) {
13902 let keywords = self.parse_comma_separated(|parser| {
13903 let next_token = parser.next_token();
13904 match &next_token.token {
13905 Token::Word(w) => Ok(w.to_string()),
13906 _ => parser.expected("SQL keyword", next_token),
13907 }
13908 })?;
13909 let value = self.parse_session_param_value()?;
13910 Ok(
13911 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13912 keywords,
13913 value,
13914 }))
13915 .into(),
13916 )
13917 } else {
13918 let names = self.parse_comma_separated(|parser| {
13919 let next_token = parser.next_token();
13920 match next_token.token {
13921 Token::Word(w) => Ok(w.to_string()),
13922 _ => parser.expected("Session param name", next_token),
13923 }
13924 })?;
13925 let value = self.parse_expr()?.to_string();
13926 Ok(
13927 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13928 names,
13929 value,
13930 }))
13931 .into(),
13932 )
13933 }
13934 }
13935
13936 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13937 if self.parse_keyword(Keyword::ON) {
13938 Ok(SessionParamValue::On)
13939 } else if self.parse_keyword(Keyword::OFF) {
13940 Ok(SessionParamValue::Off)
13941 } else {
13942 self.expected("ON or OFF", self.peek_token())
13943 }
13944 }
13945
13946 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13947 let terse = self.parse_keyword(Keyword::TERSE);
13948 let extended = self.parse_keyword(Keyword::EXTENDED);
13949 let full = self.parse_keyword(Keyword::FULL);
13950 let session = self.parse_keyword(Keyword::SESSION);
13951 let global = self.parse_keyword(Keyword::GLOBAL);
13952 let external = self.parse_keyword(Keyword::EXTERNAL);
13953 if self
13954 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
13955 .is_some()
13956 {
13957 Ok(self.parse_show_columns(extended, full)?)
13958 } else if self.parse_keyword(Keyword::TABLES) {
13959 Ok(self.parse_show_tables(terse, extended, full, external)?)
13960 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
13961 Ok(self.parse_show_views(terse, true)?)
13962 } else if self.parse_keyword(Keyword::VIEWS) {
13963 Ok(self.parse_show_views(terse, false)?)
13964 } else if self.parse_keyword(Keyword::FUNCTIONS) {
13965 Ok(self.parse_show_functions()?)
13966 } else if extended || full {
13967 Err(ParserError::ParserError(
13968 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
13969 ))
13970 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
13971 Ok(self.parse_show_create()?)
13972 } else if self.parse_keyword(Keyword::COLLATION) {
13973 Ok(self.parse_show_collation()?)
13974 } else if self.parse_keyword(Keyword::VARIABLES)
13975 && dialect_of!(self is MySqlDialect | GenericDialect)
13976 {
13977 Ok(Statement::ShowVariables {
13978 filter: self.parse_show_statement_filter()?,
13979 session,
13980 global,
13981 })
13982 } else if self.parse_keyword(Keyword::STATUS)
13983 && dialect_of!(self is MySqlDialect | GenericDialect)
13984 {
13985 Ok(Statement::ShowStatus {
13986 filter: self.parse_show_statement_filter()?,
13987 session,
13988 global,
13989 })
13990 } else if self.parse_keyword(Keyword::DATABASES) {
13991 self.parse_show_databases(terse)
13992 } else if self.parse_keyword(Keyword::SCHEMAS) {
13993 self.parse_show_schemas(terse)
13994 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
13995 self.parse_show_charset(false)
13996 } else if self.parse_keyword(Keyword::CHARSET) {
13997 self.parse_show_charset(true)
13998 } else {
13999 Ok(Statement::ShowVariable {
14000 variable: self.parse_identifiers()?,
14001 })
14002 }
14003 }
14004
14005 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
14006 Ok(Statement::ShowCharset(ShowCharset {
14008 is_shorthand,
14009 filter: self.parse_show_statement_filter()?,
14010 }))
14011 }
14012
14013 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
14014 let history = self.parse_keyword(Keyword::HISTORY);
14015 let show_options = self.parse_show_stmt_options()?;
14016 Ok(Statement::ShowDatabases {
14017 terse,
14018 history,
14019 show_options,
14020 })
14021 }
14022
14023 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
14024 let history = self.parse_keyword(Keyword::HISTORY);
14025 let show_options = self.parse_show_stmt_options()?;
14026 Ok(Statement::ShowSchemas {
14027 terse,
14028 history,
14029 show_options,
14030 })
14031 }
14032
14033 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
14034 let obj_type = match self.expect_one_of_keywords(&[
14035 Keyword::TABLE,
14036 Keyword::TRIGGER,
14037 Keyword::FUNCTION,
14038 Keyword::PROCEDURE,
14039 Keyword::EVENT,
14040 Keyword::VIEW,
14041 ])? {
14042 Keyword::TABLE => Ok(ShowCreateObject::Table),
14043 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14044 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14045 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14046 Keyword::EVENT => Ok(ShowCreateObject::Event),
14047 Keyword::VIEW => Ok(ShowCreateObject::View),
14048 keyword => Err(ParserError::ParserError(format!(
14049 "Unable to map keyword to ShowCreateObject: {keyword:?}"
14050 ))),
14051 }?;
14052
14053 let obj_name = self.parse_object_name(false)?;
14054
14055 Ok(Statement::ShowCreate { obj_type, obj_name })
14056 }
14057
14058 pub fn parse_show_columns(
14059 &mut self,
14060 extended: bool,
14061 full: bool,
14062 ) -> Result<Statement, ParserError> {
14063 let show_options = self.parse_show_stmt_options()?;
14064 Ok(Statement::ShowColumns {
14065 extended,
14066 full,
14067 show_options,
14068 })
14069 }
14070
14071 fn parse_show_tables(
14072 &mut self,
14073 terse: bool,
14074 extended: bool,
14075 full: bool,
14076 external: bool,
14077 ) -> Result<Statement, ParserError> {
14078 let history = !external && self.parse_keyword(Keyword::HISTORY);
14079 let show_options = self.parse_show_stmt_options()?;
14080 Ok(Statement::ShowTables {
14081 terse,
14082 history,
14083 extended,
14084 full,
14085 external,
14086 show_options,
14087 })
14088 }
14089
14090 fn parse_show_views(
14091 &mut self,
14092 terse: bool,
14093 materialized: bool,
14094 ) -> Result<Statement, ParserError> {
14095 let show_options = self.parse_show_stmt_options()?;
14096 Ok(Statement::ShowViews {
14097 materialized,
14098 terse,
14099 show_options,
14100 })
14101 }
14102
14103 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14104 let filter = self.parse_show_statement_filter()?;
14105 Ok(Statement::ShowFunctions { filter })
14106 }
14107
14108 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14109 let filter = self.parse_show_statement_filter()?;
14110 Ok(Statement::ShowCollation { filter })
14111 }
14112
14113 pub fn parse_show_statement_filter(
14114 &mut self,
14115 ) -> Result<Option<ShowStatementFilter>, ParserError> {
14116 if self.parse_keyword(Keyword::LIKE) {
14117 Ok(Some(ShowStatementFilter::Like(
14118 self.parse_literal_string()?,
14119 )))
14120 } else if self.parse_keyword(Keyword::ILIKE) {
14121 Ok(Some(ShowStatementFilter::ILike(
14122 self.parse_literal_string()?,
14123 )))
14124 } else if self.parse_keyword(Keyword::WHERE) {
14125 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14126 } else {
14127 self.maybe_parse(|parser| -> Result<String, ParserError> {
14128 parser.parse_literal_string()
14129 })?
14130 .map_or(Ok(None), |filter| {
14131 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14132 })
14133 }
14134 }
14135
14136 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14137 let parsed_keyword = if dialect_of!(self is HiveDialect) {
14139 if self.parse_keyword(Keyword::DEFAULT) {
14141 return Ok(Statement::Use(Use::Default));
14142 }
14143 None } else if dialect_of!(self is DatabricksDialect) {
14145 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14146 } else if dialect_of!(self is SnowflakeDialect) {
14147 self.parse_one_of_keywords(&[
14148 Keyword::DATABASE,
14149 Keyword::SCHEMA,
14150 Keyword::WAREHOUSE,
14151 Keyword::ROLE,
14152 Keyword::SECONDARY,
14153 ])
14154 } else {
14155 None };
14157
14158 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14159 self.parse_secondary_roles()?
14160 } else {
14161 let obj_name = self.parse_object_name(false)?;
14162 match parsed_keyword {
14163 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14164 Some(Keyword::DATABASE) => Use::Database(obj_name),
14165 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14166 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14167 Some(Keyword::ROLE) => Use::Role(obj_name),
14168 _ => Use::Object(obj_name),
14169 }
14170 };
14171
14172 Ok(Statement::Use(result))
14173 }
14174
14175 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14176 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14177 if self.parse_keyword(Keyword::NONE) {
14178 Ok(Use::SecondaryRoles(SecondaryRoles::None))
14179 } else if self.parse_keyword(Keyword::ALL) {
14180 Ok(Use::SecondaryRoles(SecondaryRoles::All))
14181 } else {
14182 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14183 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14184 }
14185 }
14186
14187 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14188 let relation = self.parse_table_factor()?;
14189 let joins = self.parse_joins()?;
14193 Ok(TableWithJoins { relation, joins })
14194 }
14195
14196 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14197 let mut joins = vec![];
14198 loop {
14199 let global = self.parse_keyword(Keyword::GLOBAL);
14200 let join = if self.parse_keyword(Keyword::CROSS) {
14201 let join_operator = if self.parse_keyword(Keyword::JOIN) {
14202 JoinOperator::CrossJoin(JoinConstraint::None)
14203 } else if self.parse_keyword(Keyword::APPLY) {
14204 JoinOperator::CrossApply
14206 } else {
14207 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14208 };
14209 let relation = self.parse_table_factor()?;
14210 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14211 && self.dialect.supports_cross_join_constraint()
14212 {
14213 let constraint = self.parse_join_constraint(false)?;
14214 JoinOperator::CrossJoin(constraint)
14215 } else {
14216 join_operator
14217 };
14218 Join {
14219 relation,
14220 global,
14221 join_operator,
14222 }
14223 } else if self.parse_keyword(Keyword::OUTER) {
14224 self.expect_keyword_is(Keyword::APPLY)?;
14226 Join {
14227 relation: self.parse_table_factor()?,
14228 global,
14229 join_operator: JoinOperator::OuterApply,
14230 }
14231 } else if self.parse_keyword(Keyword::ASOF) {
14232 self.expect_keyword_is(Keyword::JOIN)?;
14233 let relation = self.parse_table_factor()?;
14234 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14235 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14236 Join {
14237 relation,
14238 global,
14239 join_operator: JoinOperator::AsOf {
14240 match_condition,
14241 constraint: self.parse_join_constraint(false)?,
14242 },
14243 }
14244 } else {
14245 let natural = self.parse_keyword(Keyword::NATURAL);
14246 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14247 w.keyword
14248 } else {
14249 Keyword::NoKeyword
14250 };
14251
14252 let join_operator_type = match peek_keyword {
14253 Keyword::INNER | Keyword::JOIN => {
14254 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
14256 if inner {
14257 JoinOperator::Inner
14258 } else {
14259 JoinOperator::Join
14260 }
14261 }
14262 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14263 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
14265 let join_type = self.parse_one_of_keywords(&[
14266 Keyword::OUTER,
14267 Keyword::SEMI,
14268 Keyword::ANTI,
14269 Keyword::JOIN,
14270 ]);
14271 match join_type {
14272 Some(Keyword::OUTER) => {
14273 self.expect_keyword_is(Keyword::JOIN)?;
14274 if is_left {
14275 JoinOperator::LeftOuter
14276 } else {
14277 JoinOperator::RightOuter
14278 }
14279 }
14280 Some(Keyword::SEMI) => {
14281 self.expect_keyword_is(Keyword::JOIN)?;
14282 if is_left {
14283 JoinOperator::LeftSemi
14284 } else {
14285 JoinOperator::RightSemi
14286 }
14287 }
14288 Some(Keyword::ANTI) => {
14289 self.expect_keyword_is(Keyword::JOIN)?;
14290 if is_left {
14291 JoinOperator::LeftAnti
14292 } else {
14293 JoinOperator::RightAnti
14294 }
14295 }
14296 Some(Keyword::JOIN) => {
14297 if is_left {
14298 JoinOperator::Left
14299 } else {
14300 JoinOperator::Right
14301 }
14302 }
14303 _ => {
14304 return Err(ParserError::ParserError(format!(
14305 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14306 )))
14307 }
14308 }
14309 }
14310 Keyword::ANTI => {
14311 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
14313 JoinOperator::Anti
14314 }
14315 Keyword::SEMI => {
14316 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
14318 JoinOperator::Semi
14319 }
14320 Keyword::FULL => {
14321 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
14324 JoinOperator::FullOuter
14325 }
14326 Keyword::OUTER => {
14327 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14328 }
14329 Keyword::STRAIGHT_JOIN => {
14330 let _ = self.next_token(); JoinOperator::StraightJoin
14332 }
14333 _ if natural => {
14334 return self.expected("a join type after NATURAL", self.peek_token());
14335 }
14336 _ => break,
14337 };
14338 let mut relation = self.parse_table_factor()?;
14339
14340 if !self
14341 .dialect
14342 .supports_left_associative_joins_without_parens()
14343 && self.peek_parens_less_nested_join()
14344 {
14345 let joins = self.parse_joins()?;
14346 relation = TableFactor::NestedJoin {
14347 table_with_joins: Box::new(TableWithJoins { relation, joins }),
14348 alias: None,
14349 };
14350 }
14351
14352 let join_constraint = self.parse_join_constraint(natural)?;
14353 Join {
14354 relation,
14355 global,
14356 join_operator: join_operator_type(join_constraint),
14357 }
14358 };
14359 joins.push(join);
14360 }
14361 Ok(joins)
14362 }
14363
14364 fn peek_parens_less_nested_join(&self) -> bool {
14365 matches!(
14366 self.peek_token_ref().token,
14367 Token::Word(Word {
14368 keyword: Keyword::JOIN
14369 | Keyword::INNER
14370 | Keyword::LEFT
14371 | Keyword::RIGHT
14372 | Keyword::FULL,
14373 ..
14374 })
14375 )
14376 }
14377
14378 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14380 if self.parse_keyword(Keyword::LATERAL) {
14381 if self.consume_token(&Token::LParen) {
14383 self.parse_derived_table_factor(Lateral)
14384 } else {
14385 let name = self.parse_object_name(false)?;
14386 self.expect_token(&Token::LParen)?;
14387 let args = self.parse_optional_args()?;
14388 let alias = self.maybe_parse_table_alias()?;
14389 Ok(TableFactor::Function {
14390 lateral: true,
14391 name,
14392 args,
14393 alias,
14394 })
14395 }
14396 } else if self.parse_keyword(Keyword::TABLE) {
14397 self.expect_token(&Token::LParen)?;
14399 let expr = self.parse_expr()?;
14400 self.expect_token(&Token::RParen)?;
14401 let alias = self.maybe_parse_table_alias()?;
14402 Ok(TableFactor::TableFunction { expr, alias })
14403 } else if self.consume_token(&Token::LParen) {
14404 if let Some(mut table) =
14426 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14427 {
14428 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14429 {
14430 table = match kw {
14431 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14432 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14433 unexpected_keyword => return Err(ParserError::ParserError(
14434 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14435 )),
14436 }
14437 }
14438 return Ok(table);
14439 }
14440
14441 let mut table_and_joins = self.parse_table_and_joins()?;
14448
14449 #[allow(clippy::if_same_then_else)]
14450 if !table_and_joins.joins.is_empty() {
14451 self.expect_token(&Token::RParen)?;
14452 let alias = self.maybe_parse_table_alias()?;
14453 Ok(TableFactor::NestedJoin {
14454 table_with_joins: Box::new(table_and_joins),
14455 alias,
14456 }) } else if let TableFactor::NestedJoin {
14458 table_with_joins: _,
14459 alias: _,
14460 } = &table_and_joins.relation
14461 {
14462 self.expect_token(&Token::RParen)?;
14465 let alias = self.maybe_parse_table_alias()?;
14466 Ok(TableFactor::NestedJoin {
14467 table_with_joins: Box::new(table_and_joins),
14468 alias,
14469 })
14470 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14471 self.expect_token(&Token::RParen)?;
14478
14479 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14480 match &mut table_and_joins.relation {
14483 TableFactor::Derived { alias, .. }
14484 | TableFactor::Table { alias, .. }
14485 | TableFactor::Function { alias, .. }
14486 | TableFactor::UNNEST { alias, .. }
14487 | TableFactor::JsonTable { alias, .. }
14488 | TableFactor::XmlTable { alias, .. }
14489 | TableFactor::OpenJsonTable { alias, .. }
14490 | TableFactor::TableFunction { alias, .. }
14491 | TableFactor::Pivot { alias, .. }
14492 | TableFactor::Unpivot { alias, .. }
14493 | TableFactor::MatchRecognize { alias, .. }
14494 | TableFactor::SemanticView { alias, .. }
14495 | TableFactor::NestedJoin { alias, .. } => {
14496 if let Some(inner_alias) = alias {
14498 return Err(ParserError::ParserError(format!(
14499 "duplicate alias {inner_alias}"
14500 )));
14501 }
14502 alias.replace(outer_alias);
14506 }
14507 };
14508 }
14509 Ok(table_and_joins.relation)
14511 } else {
14512 self.expected("joined table", self.peek_token())
14515 }
14516 } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14517 && matches!(
14518 self.peek_tokens(),
14519 [
14520 Token::Word(Word {
14521 keyword: Keyword::VALUES,
14522 ..
14523 }),
14524 Token::LParen
14525 ]
14526 )
14527 {
14528 self.expect_keyword_is(Keyword::VALUES)?;
14529
14530 let values = SetExpr::Values(self.parse_values(false, false)?);
14534 let alias = self.maybe_parse_table_alias()?;
14535 Ok(TableFactor::Derived {
14536 lateral: false,
14537 subquery: Box::new(Query {
14538 with: None,
14539 body: Box::new(values),
14540 order_by: None,
14541 limit_clause: None,
14542 fetch: None,
14543 locks: vec![],
14544 for_clause: None,
14545 settings: None,
14546 format_clause: None,
14547 pipe_operators: vec![],
14548 }),
14549 alias,
14550 })
14551 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14552 && self.parse_keyword(Keyword::UNNEST)
14553 {
14554 self.expect_token(&Token::LParen)?;
14555 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14556 self.expect_token(&Token::RParen)?;
14557
14558 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14559 let alias = match self.maybe_parse_table_alias() {
14560 Ok(Some(alias)) => Some(alias),
14561 Ok(None) => None,
14562 Err(e) => return Err(e),
14563 };
14564
14565 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14566 Ok(()) => true,
14567 Err(_) => false,
14568 };
14569
14570 let with_offset_alias = if with_offset {
14571 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14572 Ok(Some(alias)) => Some(alias),
14573 Ok(None) => None,
14574 Err(e) => return Err(e),
14575 }
14576 } else {
14577 None
14578 };
14579
14580 Ok(TableFactor::UNNEST {
14581 alias,
14582 array_exprs,
14583 with_offset,
14584 with_offset_alias,
14585 with_ordinality,
14586 })
14587 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14588 let json_expr = self.parse_expr()?;
14589 self.expect_token(&Token::Comma)?;
14590 let json_path = self.parse_value()?.value;
14591 self.expect_keyword_is(Keyword::COLUMNS)?;
14592 self.expect_token(&Token::LParen)?;
14593 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14594 self.expect_token(&Token::RParen)?;
14595 self.expect_token(&Token::RParen)?;
14596 let alias = self.maybe_parse_table_alias()?;
14597 Ok(TableFactor::JsonTable {
14598 json_expr,
14599 json_path,
14600 columns,
14601 alias,
14602 })
14603 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14604 self.prev_token();
14605 self.parse_open_json_table_factor()
14606 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14607 self.prev_token();
14608 self.parse_xml_table_factor()
14609 } else if self.dialect.supports_semantic_view_table_factor()
14610 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14611 {
14612 self.parse_semantic_view_table_factor()
14613 } else {
14614 let name = self.parse_object_name(true)?;
14615
14616 let json_path = match self.peek_token().token {
14617 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14618 _ => None,
14619 };
14620
14621 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14622 && self.parse_keyword(Keyword::PARTITION)
14623 {
14624 self.parse_parenthesized_identifiers()?
14625 } else {
14626 vec![]
14627 };
14628
14629 let version = self.maybe_parse_table_version()?;
14631
14632 let args = if self.consume_token(&Token::LParen) {
14634 Some(self.parse_table_function_args()?)
14635 } else {
14636 None
14637 };
14638
14639 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14640
14641 let mut sample = None;
14642 if self.dialect.supports_table_sample_before_alias() {
14643 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14644 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14645 }
14646 }
14647
14648 let alias = self.maybe_parse_table_alias()?;
14649
14650 let index_hints = if self.dialect.supports_table_hints() {
14652 self.maybe_parse(|p| p.parse_table_index_hints())?
14653 .unwrap_or(vec![])
14654 } else {
14655 vec![]
14656 };
14657
14658 let mut with_hints = vec![];
14660 if self.parse_keyword(Keyword::WITH) {
14661 if self.consume_token(&Token::LParen) {
14662 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14663 self.expect_token(&Token::RParen)?;
14664 } else {
14665 self.prev_token();
14667 }
14668 };
14669
14670 if !self.dialect.supports_table_sample_before_alias() {
14671 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14672 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14673 }
14674 }
14675
14676 let mut table = TableFactor::Table {
14677 name,
14678 alias,
14679 args,
14680 with_hints,
14681 version,
14682 partitions,
14683 with_ordinality,
14684 json_path,
14685 sample,
14686 index_hints,
14687 };
14688
14689 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14690 table = match kw {
14691 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14692 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14693 unexpected_keyword => return Err(ParserError::ParserError(
14694 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14695 )),
14696 }
14697 }
14698
14699 if self.dialect.supports_match_recognize()
14700 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14701 {
14702 table = self.parse_match_recognize(table)?;
14703 }
14704
14705 Ok(table)
14706 }
14707 }
14708
14709 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14710 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14711 TableSampleModifier::TableSample
14712 } else if self.parse_keyword(Keyword::SAMPLE) {
14713 TableSampleModifier::Sample
14714 } else {
14715 return Ok(None);
14716 };
14717 self.parse_table_sample(modifier).map(Some)
14718 }
14719
14720 fn parse_table_sample(
14721 &mut self,
14722 modifier: TableSampleModifier,
14723 ) -> Result<Box<TableSample>, ParserError> {
14724 let name = match self.parse_one_of_keywords(&[
14725 Keyword::BERNOULLI,
14726 Keyword::ROW,
14727 Keyword::SYSTEM,
14728 Keyword::BLOCK,
14729 ]) {
14730 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14731 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14732 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14733 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14734 _ => None,
14735 };
14736
14737 let parenthesized = self.consume_token(&Token::LParen);
14738
14739 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14740 let selected_bucket = self.parse_number_value()?.value;
14741 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14742 let total = self.parse_number_value()?.value;
14743 let on = if self.parse_keyword(Keyword::ON) {
14744 Some(self.parse_expr()?)
14745 } else {
14746 None
14747 };
14748 (
14749 None,
14750 Some(TableSampleBucket {
14751 bucket: selected_bucket,
14752 total,
14753 on,
14754 }),
14755 )
14756 } else {
14757 let value = match self.maybe_parse(|p| p.parse_expr())? {
14758 Some(num) => num,
14759 None => {
14760 let next_token = self.next_token();
14761 if let Token::Word(w) = next_token.token {
14762 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
14763 } else {
14764 return parser_err!(
14765 "Expecting number or byte length e.g. 100M",
14766 self.peek_token().span.start
14767 );
14768 }
14769 }
14770 };
14771 let unit = if self.parse_keyword(Keyword::ROWS) {
14772 Some(TableSampleUnit::Rows)
14773 } else if self.parse_keyword(Keyword::PERCENT) {
14774 Some(TableSampleUnit::Percent)
14775 } else {
14776 None
14777 };
14778 (
14779 Some(TableSampleQuantity {
14780 parenthesized,
14781 value,
14782 unit,
14783 }),
14784 None,
14785 )
14786 };
14787 if parenthesized {
14788 self.expect_token(&Token::RParen)?;
14789 }
14790
14791 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
14792 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
14793 } else if self.parse_keyword(Keyword::SEED) {
14794 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
14795 } else {
14796 None
14797 };
14798
14799 let offset = if self.parse_keyword(Keyword::OFFSET) {
14800 Some(self.parse_expr()?)
14801 } else {
14802 None
14803 };
14804
14805 Ok(Box::new(TableSample {
14806 modifier,
14807 name,
14808 quantity,
14809 seed,
14810 bucket,
14811 offset,
14812 }))
14813 }
14814
14815 fn parse_table_sample_seed(
14816 &mut self,
14817 modifier: TableSampleSeedModifier,
14818 ) -> Result<TableSampleSeed, ParserError> {
14819 self.expect_token(&Token::LParen)?;
14820 let value = self.parse_number_value()?.value;
14821 self.expect_token(&Token::RParen)?;
14822 Ok(TableSampleSeed { modifier, value })
14823 }
14824
14825 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14828 self.expect_token(&Token::LParen)?;
14829 let json_expr = self.parse_expr()?;
14830 let json_path = if self.consume_token(&Token::Comma) {
14831 Some(self.parse_value()?.value)
14832 } else {
14833 None
14834 };
14835 self.expect_token(&Token::RParen)?;
14836 let columns = if self.parse_keyword(Keyword::WITH) {
14837 self.expect_token(&Token::LParen)?;
14838 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
14839 self.expect_token(&Token::RParen)?;
14840 columns
14841 } else {
14842 Vec::new()
14843 };
14844 let alias = self.maybe_parse_table_alias()?;
14845 Ok(TableFactor::OpenJsonTable {
14846 json_expr,
14847 json_path,
14848 columns,
14849 alias,
14850 })
14851 }
14852
14853 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14854 self.expect_token(&Token::LParen)?;
14855 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14856 self.expect_token(&Token::LParen)?;
14857 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14858 self.expect_token(&Token::RParen)?;
14859 self.expect_token(&Token::Comma)?;
14860 namespaces
14861 } else {
14862 vec![]
14863 };
14864 let row_expression = self.parse_expr()?;
14865 let passing = self.parse_xml_passing_clause()?;
14866 self.expect_keyword_is(Keyword::COLUMNS)?;
14867 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14868 self.expect_token(&Token::RParen)?;
14869 let alias = self.maybe_parse_table_alias()?;
14870 Ok(TableFactor::XmlTable {
14871 namespaces,
14872 row_expression,
14873 passing,
14874 columns,
14875 alias,
14876 })
14877 }
14878
14879 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14880 let uri = self.parse_expr()?;
14881 self.expect_keyword_is(Keyword::AS)?;
14882 let name = self.parse_identifier()?;
14883 Ok(XmlNamespaceDefinition { uri, name })
14884 }
14885
14886 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14887 let name = self.parse_identifier()?;
14888
14889 let option = if self.parse_keyword(Keyword::FOR) {
14890 self.expect_keyword(Keyword::ORDINALITY)?;
14891 XmlTableColumnOption::ForOrdinality
14892 } else {
14893 let r#type = self.parse_data_type()?;
14894 let mut path = None;
14895 let mut default = None;
14896
14897 if self.parse_keyword(Keyword::PATH) {
14898 path = Some(self.parse_expr()?);
14899 }
14900
14901 if self.parse_keyword(Keyword::DEFAULT) {
14902 default = Some(self.parse_expr()?);
14903 }
14904
14905 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14906 if !not_null {
14907 let _ = self.parse_keyword(Keyword::NULL);
14909 }
14910
14911 XmlTableColumnOption::NamedInfo {
14912 r#type,
14913 path,
14914 default,
14915 nullable: !not_null,
14916 }
14917 };
14918 Ok(XmlTableColumn { name, option })
14919 }
14920
14921 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14922 let mut arguments = vec![];
14923 if self.parse_keyword(Keyword::PASSING) {
14924 loop {
14925 let by_value =
14926 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14927 let expr = self.parse_expr()?;
14928 let alias = if self.parse_keyword(Keyword::AS) {
14929 Some(self.parse_identifier()?)
14930 } else {
14931 None
14932 };
14933 arguments.push(XmlPassingArgument {
14934 expr,
14935 alias,
14936 by_value,
14937 });
14938 if !self.consume_token(&Token::Comma) {
14939 break;
14940 }
14941 }
14942 }
14943 Ok(XmlPassingClause { arguments })
14944 }
14945
14946 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14948 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
14949 self.expect_token(&Token::LParen)?;
14950
14951 let name = self.parse_object_name(true)?;
14952
14953 let mut dimensions = Vec::new();
14955 let mut metrics = Vec::new();
14956 let mut facts = Vec::new();
14957 let mut where_clause = None;
14958
14959 while self.peek_token().token != Token::RParen {
14960 if self.parse_keyword(Keyword::DIMENSIONS) {
14961 if !dimensions.is_empty() {
14962 return Err(ParserError::ParserError(
14963 "DIMENSIONS clause can only be specified once".to_string(),
14964 ));
14965 }
14966 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14967 } else if self.parse_keyword(Keyword::METRICS) {
14968 if !metrics.is_empty() {
14969 return Err(ParserError::ParserError(
14970 "METRICS clause can only be specified once".to_string(),
14971 ));
14972 }
14973 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14974 } else if self.parse_keyword(Keyword::FACTS) {
14975 if !facts.is_empty() {
14976 return Err(ParserError::ParserError(
14977 "FACTS clause can only be specified once".to_string(),
14978 ));
14979 }
14980 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14981 } else if self.parse_keyword(Keyword::WHERE) {
14982 if where_clause.is_some() {
14983 return Err(ParserError::ParserError(
14984 "WHERE clause can only be specified once".to_string(),
14985 ));
14986 }
14987 where_clause = Some(self.parse_expr()?);
14988 } else {
14989 return parser_err!(
14990 format!(
14991 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
14992 self.peek_token().token
14993 ),
14994 self.peek_token().span.start
14995 )?;
14996 }
14997 }
14998
14999 self.expect_token(&Token::RParen)?;
15000
15001 let alias = self.maybe_parse_table_alias()?;
15002
15003 Ok(TableFactor::SemanticView {
15004 name,
15005 dimensions,
15006 metrics,
15007 facts,
15008 where_clause,
15009 alias,
15010 })
15011 }
15012
15013 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
15014 self.expect_token(&Token::LParen)?;
15015
15016 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15017 self.parse_comma_separated(Parser::parse_expr)?
15018 } else {
15019 vec![]
15020 };
15021
15022 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15023 self.parse_comma_separated(Parser::parse_order_by_expr)?
15024 } else {
15025 vec![]
15026 };
15027
15028 let measures = if self.parse_keyword(Keyword::MEASURES) {
15029 self.parse_comma_separated(|p| {
15030 let expr = p.parse_expr()?;
15031 let _ = p.parse_keyword(Keyword::AS);
15032 let alias = p.parse_identifier()?;
15033 Ok(Measure { expr, alias })
15034 })?
15035 } else {
15036 vec![]
15037 };
15038
15039 let rows_per_match =
15040 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15041 Some(RowsPerMatch::OneRow)
15042 } else if self.parse_keywords(&[
15043 Keyword::ALL,
15044 Keyword::ROWS,
15045 Keyword::PER,
15046 Keyword::MATCH,
15047 ]) {
15048 Some(RowsPerMatch::AllRows(
15049 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15050 Some(EmptyMatchesMode::Show)
15051 } else if self.parse_keywords(&[
15052 Keyword::OMIT,
15053 Keyword::EMPTY,
15054 Keyword::MATCHES,
15055 ]) {
15056 Some(EmptyMatchesMode::Omit)
15057 } else if self.parse_keywords(&[
15058 Keyword::WITH,
15059 Keyword::UNMATCHED,
15060 Keyword::ROWS,
15061 ]) {
15062 Some(EmptyMatchesMode::WithUnmatched)
15063 } else {
15064 None
15065 },
15066 ))
15067 } else {
15068 None
15069 };
15070
15071 let after_match_skip =
15072 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15073 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15074 Some(AfterMatchSkip::PastLastRow)
15075 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15076 Some(AfterMatchSkip::ToNextRow)
15077 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15078 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15079 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15080 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15081 } else {
15082 let found = self.next_token();
15083 return self.expected("after match skip option", found);
15084 }
15085 } else {
15086 None
15087 };
15088
15089 self.expect_keyword_is(Keyword::PATTERN)?;
15090 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15091
15092 self.expect_keyword_is(Keyword::DEFINE)?;
15093
15094 let symbols = self.parse_comma_separated(|p| {
15095 let symbol = p.parse_identifier()?;
15096 p.expect_keyword_is(Keyword::AS)?;
15097 let definition = p.parse_expr()?;
15098 Ok(SymbolDefinition { symbol, definition })
15099 })?;
15100
15101 self.expect_token(&Token::RParen)?;
15102
15103 let alias = self.maybe_parse_table_alias()?;
15104
15105 Ok(TableFactor::MatchRecognize {
15106 table: Box::new(table),
15107 partition_by,
15108 order_by,
15109 measures,
15110 rows_per_match,
15111 after_match_skip,
15112 pattern,
15113 symbols,
15114 alias,
15115 })
15116 }
15117
15118 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15119 match self.next_token().token {
15120 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15121 Token::Placeholder(s) if s == "$" => {
15122 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15123 }
15124 Token::LBrace => {
15125 self.expect_token(&Token::Minus)?;
15126 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15127 self.expect_token(&Token::Minus)?;
15128 self.expect_token(&Token::RBrace)?;
15129 Ok(MatchRecognizePattern::Exclude(symbol))
15130 }
15131 Token::Word(Word {
15132 value,
15133 quote_style: None,
15134 ..
15135 }) if value == "PERMUTE" => {
15136 self.expect_token(&Token::LParen)?;
15137 let symbols = self.parse_comma_separated(|p| {
15138 p.parse_identifier().map(MatchRecognizeSymbol::Named)
15139 })?;
15140 self.expect_token(&Token::RParen)?;
15141 Ok(MatchRecognizePattern::Permute(symbols))
15142 }
15143 Token::LParen => {
15144 let pattern = self.parse_pattern()?;
15145 self.expect_token(&Token::RParen)?;
15146 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15147 }
15148 _ => {
15149 self.prev_token();
15150 self.parse_identifier()
15151 .map(MatchRecognizeSymbol::Named)
15152 .map(MatchRecognizePattern::Symbol)
15153 }
15154 }
15155 }
15156
15157 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15158 let mut pattern = self.parse_base_pattern()?;
15159 loop {
15160 let token = self.next_token();
15161 let quantifier = match token.token {
15162 Token::Mul => RepetitionQuantifier::ZeroOrMore,
15163 Token::Plus => RepetitionQuantifier::OneOrMore,
15164 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15165 Token::LBrace => {
15166 let token = self.next_token();
15168 match token.token {
15169 Token::Comma => {
15170 let next_token = self.next_token();
15171 let Token::Number(n, _) = next_token.token else {
15172 return self.expected("literal number", next_token);
15173 };
15174 self.expect_token(&Token::RBrace)?;
15175 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15176 }
15177 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15178 let next_token = self.next_token();
15179 match next_token.token {
15180 Token::Number(m, _) => {
15181 self.expect_token(&Token::RBrace)?;
15182 RepetitionQuantifier::Range(
15183 Self::parse(n, token.span.start)?,
15184 Self::parse(m, token.span.start)?,
15185 )
15186 }
15187 Token::RBrace => {
15188 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15189 }
15190 _ => {
15191 return self.expected("} or upper bound", next_token);
15192 }
15193 }
15194 }
15195 Token::Number(n, _) => {
15196 self.expect_token(&Token::RBrace)?;
15197 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15198 }
15199 _ => return self.expected("quantifier range", token),
15200 }
15201 }
15202 _ => {
15203 self.prev_token();
15204 break;
15205 }
15206 };
15207 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15208 }
15209 Ok(pattern)
15210 }
15211
15212 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15213 let mut patterns = vec![self.parse_repetition_pattern()?];
15214 while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15215 patterns.push(self.parse_repetition_pattern()?);
15216 }
15217 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15218 Ok([pattern]) => Ok(pattern),
15219 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15220 }
15221 }
15222
15223 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15224 let pattern = self.parse_concat_pattern()?;
15225 if self.consume_token(&Token::Pipe) {
15226 match self.parse_pattern()? {
15227 MatchRecognizePattern::Alternation(mut patterns) => {
15229 patterns.insert(0, pattern);
15230 Ok(MatchRecognizePattern::Alternation(patterns))
15231 }
15232 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15233 }
15234 } else {
15235 Ok(pattern)
15236 }
15237 }
15238
15239 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15241 if self.dialect.supports_timestamp_versioning() {
15242 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15243 {
15244 let expr = self.parse_expr()?;
15245 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15246 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15247 let func_name = self.parse_object_name(true)?;
15248 let func = self.parse_function(func_name)?;
15249 return Ok(Some(TableVersion::Function(func)));
15250 }
15251 }
15252 Ok(None)
15253 }
15254
15255 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15258 if self.parse_keyword(Keyword::NESTED) {
15259 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15260 let path = self.parse_value()?.value;
15261 self.expect_keyword_is(Keyword::COLUMNS)?;
15262 let columns = self.parse_parenthesized(|p| {
15263 p.parse_comma_separated(Self::parse_json_table_column_def)
15264 })?;
15265 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15266 path,
15267 columns,
15268 }));
15269 }
15270 let name = self.parse_identifier()?;
15271 if self.parse_keyword(Keyword::FOR) {
15272 self.expect_keyword_is(Keyword::ORDINALITY)?;
15273 return Ok(JsonTableColumn::ForOrdinality(name));
15274 }
15275 let r#type = self.parse_data_type()?;
15276 let exists = self.parse_keyword(Keyword::EXISTS);
15277 self.expect_keyword_is(Keyword::PATH)?;
15278 let path = self.parse_value()?.value;
15279 let mut on_empty = None;
15280 let mut on_error = None;
15281 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15282 if self.parse_keyword(Keyword::EMPTY) {
15283 on_empty = Some(error_handling);
15284 } else {
15285 self.expect_keyword_is(Keyword::ERROR)?;
15286 on_error = Some(error_handling);
15287 }
15288 }
15289 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15290 name,
15291 r#type,
15292 path,
15293 exists,
15294 on_empty,
15295 on_error,
15296 }))
15297 }
15298
15299 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15307 let name = self.parse_identifier()?;
15308 let r#type = self.parse_data_type()?;
15309 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15310 self.next_token();
15311 Some(path)
15312 } else {
15313 None
15314 };
15315 let as_json = self.parse_keyword(Keyword::AS);
15316 if as_json {
15317 self.expect_keyword_is(Keyword::JSON)?;
15318 }
15319 Ok(OpenJsonTableColumn {
15320 name,
15321 r#type,
15322 path,
15323 as_json,
15324 })
15325 }
15326
15327 fn parse_json_table_column_error_handling(
15328 &mut self,
15329 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15330 let res = if self.parse_keyword(Keyword::NULL) {
15331 JsonTableColumnErrorHandling::Null
15332 } else if self.parse_keyword(Keyword::ERROR) {
15333 JsonTableColumnErrorHandling::Error
15334 } else if self.parse_keyword(Keyword::DEFAULT) {
15335 JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15336 } else {
15337 return Ok(None);
15338 };
15339 self.expect_keyword_is(Keyword::ON)?;
15340 Ok(Some(res))
15341 }
15342
15343 pub fn parse_derived_table_factor(
15344 &mut self,
15345 lateral: IsLateral,
15346 ) -> Result<TableFactor, ParserError> {
15347 let subquery = self.parse_query()?;
15348 self.expect_token(&Token::RParen)?;
15349 let alias = self.maybe_parse_table_alias()?;
15350 Ok(TableFactor::Derived {
15351 lateral: match lateral {
15352 Lateral => true,
15353 NotLateral => false,
15354 },
15355 subquery,
15356 alias,
15357 })
15358 }
15359
15360 fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15361 let function_name = match self.next_token().token {
15362 Token::Word(w) => Ok(w.value),
15363 _ => self.expected("a function identifier", self.peek_token()),
15364 }?;
15365 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15366 let alias = if self.parse_keyword(Keyword::AS) {
15367 Some(self.parse_identifier()?)
15368 } else {
15369 None
15370 };
15371
15372 Ok(ExprWithAlias { expr, alias })
15373 }
15374 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15397 let expr = self.parse_expr()?;
15398 let alias = if self.parse_keyword(Keyword::AS) {
15399 Some(self.parse_identifier()?)
15400 } else {
15401 None
15402 };
15403
15404 Ok(ExprWithAlias { expr, alias })
15405 }
15406
15407 pub fn parse_pivot_table_factor(
15408 &mut self,
15409 table: TableFactor,
15410 ) -> Result<TableFactor, ParserError> {
15411 self.expect_token(&Token::LParen)?;
15412 let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15413 self.expect_keyword_is(Keyword::FOR)?;
15414 let value_column = if self.peek_token_ref().token == Token::LParen {
15415 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15416 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15417 })?
15418 } else {
15419 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15420 };
15421 self.expect_keyword_is(Keyword::IN)?;
15422
15423 self.expect_token(&Token::LParen)?;
15424 let value_source = if self.parse_keyword(Keyword::ANY) {
15425 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15426 self.parse_comma_separated(Parser::parse_order_by_expr)?
15427 } else {
15428 vec![]
15429 };
15430 PivotValueSource::Any(order_by)
15431 } else if self.peek_sub_query() {
15432 PivotValueSource::Subquery(self.parse_query()?)
15433 } else {
15434 PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15435 };
15436 self.expect_token(&Token::RParen)?;
15437
15438 let default_on_null =
15439 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15440 self.expect_token(&Token::LParen)?;
15441 let expr = self.parse_expr()?;
15442 self.expect_token(&Token::RParen)?;
15443 Some(expr)
15444 } else {
15445 None
15446 };
15447
15448 self.expect_token(&Token::RParen)?;
15449 let alias = self.maybe_parse_table_alias()?;
15450 Ok(TableFactor::Pivot {
15451 table: Box::new(table),
15452 aggregate_functions,
15453 value_column,
15454 value_source,
15455 default_on_null,
15456 alias,
15457 })
15458 }
15459
15460 pub fn parse_unpivot_table_factor(
15461 &mut self,
15462 table: TableFactor,
15463 ) -> Result<TableFactor, ParserError> {
15464 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15465 self.expect_keyword_is(Keyword::NULLS)?;
15466 Some(NullInclusion::IncludeNulls)
15467 } else if self.parse_keyword(Keyword::EXCLUDE) {
15468 self.expect_keyword_is(Keyword::NULLS)?;
15469 Some(NullInclusion::ExcludeNulls)
15470 } else {
15471 None
15472 };
15473 self.expect_token(&Token::LParen)?;
15474 let value = self.parse_expr()?;
15475 self.expect_keyword_is(Keyword::FOR)?;
15476 let name = self.parse_identifier()?;
15477 self.expect_keyword_is(Keyword::IN)?;
15478 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15479 p.parse_expr_with_alias()
15480 })?;
15481 self.expect_token(&Token::RParen)?;
15482 let alias = self.maybe_parse_table_alias()?;
15483 Ok(TableFactor::Unpivot {
15484 table: Box::new(table),
15485 value,
15486 null_inclusion,
15487 name,
15488 columns,
15489 alias,
15490 })
15491 }
15492
15493 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15494 if natural {
15495 Ok(JoinConstraint::Natural)
15496 } else if self.parse_keyword(Keyword::ON) {
15497 let constraint = self.parse_expr()?;
15498 Ok(JoinConstraint::On(constraint))
15499 } else if self.parse_keyword(Keyword::USING) {
15500 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15501 Ok(JoinConstraint::Using(columns))
15502 } else {
15503 Ok(JoinConstraint::None)
15504 }
15506 }
15507
15508 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15510 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15511
15512 self.expect_keyword_is(Keyword::TO)?;
15513 let grantees = self.parse_grantees()?;
15514
15515 let with_grant_option =
15516 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15517
15518 let current_grants =
15519 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15520 Some(CurrentGrantsKind::CopyCurrentGrants)
15521 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15522 Some(CurrentGrantsKind::RevokeCurrentGrants)
15523 } else {
15524 None
15525 };
15526
15527 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15528 Some(self.parse_identifier()?)
15529 } else {
15530 None
15531 };
15532
15533 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15534 Some(self.parse_identifier()?)
15535 } else {
15536 None
15537 };
15538
15539 Ok(Statement::Grant {
15540 privileges,
15541 objects,
15542 grantees,
15543 with_grant_option,
15544 as_grantor,
15545 granted_by,
15546 current_grants,
15547 })
15548 }
15549
15550 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15551 let mut values = vec![];
15552 let mut grantee_type = GranteesType::None;
15553 loop {
15554 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15555 GranteesType::Role
15556 } else if self.parse_keyword(Keyword::USER) {
15557 GranteesType::User
15558 } else if self.parse_keyword(Keyword::SHARE) {
15559 GranteesType::Share
15560 } else if self.parse_keyword(Keyword::GROUP) {
15561 GranteesType::Group
15562 } else if self.parse_keyword(Keyword::PUBLIC) {
15563 GranteesType::Public
15564 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15565 GranteesType::DatabaseRole
15566 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15567 GranteesType::ApplicationRole
15568 } else if self.parse_keyword(Keyword::APPLICATION) {
15569 GranteesType::Application
15570 } else {
15571 grantee_type.clone() };
15573
15574 if self
15575 .dialect
15576 .get_reserved_grantees_types()
15577 .contains(&new_grantee_type)
15578 {
15579 self.prev_token();
15580 } else {
15581 grantee_type = new_grantee_type;
15582 }
15583
15584 let grantee = if grantee_type == GranteesType::Public {
15585 Grantee {
15586 grantee_type: grantee_type.clone(),
15587 name: None,
15588 }
15589 } else {
15590 let mut name = self.parse_grantee_name()?;
15591 if self.consume_token(&Token::Colon) {
15592 let ident = self.parse_identifier()?;
15596 if let GranteeName::ObjectName(namespace) = name {
15597 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15598 format!("{namespace}:{ident}"),
15599 )]));
15600 };
15601 }
15602 Grantee {
15603 grantee_type: grantee_type.clone(),
15604 name: Some(name),
15605 }
15606 };
15607
15608 values.push(grantee);
15609
15610 if !self.consume_token(&Token::Comma) {
15611 break;
15612 }
15613 }
15614
15615 Ok(values)
15616 }
15617
15618 pub fn parse_grant_deny_revoke_privileges_objects(
15619 &mut self,
15620 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15621 let privileges = if self.parse_keyword(Keyword::ALL) {
15622 Privileges::All {
15623 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15624 }
15625 } else {
15626 let actions = self.parse_actions_list()?;
15627 Privileges::Actions(actions)
15628 };
15629
15630 let objects = if self.parse_keyword(Keyword::ON) {
15631 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15632 Some(GrantObjects::AllTablesInSchema {
15633 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15634 })
15635 } else if self.parse_keywords(&[
15636 Keyword::ALL,
15637 Keyword::EXTERNAL,
15638 Keyword::TABLES,
15639 Keyword::IN,
15640 Keyword::SCHEMA,
15641 ]) {
15642 Some(GrantObjects::AllExternalTablesInSchema {
15643 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15644 })
15645 } else if self.parse_keywords(&[
15646 Keyword::ALL,
15647 Keyword::VIEWS,
15648 Keyword::IN,
15649 Keyword::SCHEMA,
15650 ]) {
15651 Some(GrantObjects::AllViewsInSchema {
15652 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15653 })
15654 } else if self.parse_keywords(&[
15655 Keyword::ALL,
15656 Keyword::MATERIALIZED,
15657 Keyword::VIEWS,
15658 Keyword::IN,
15659 Keyword::SCHEMA,
15660 ]) {
15661 Some(GrantObjects::AllMaterializedViewsInSchema {
15662 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15663 })
15664 } else if self.parse_keywords(&[
15665 Keyword::ALL,
15666 Keyword::FUNCTIONS,
15667 Keyword::IN,
15668 Keyword::SCHEMA,
15669 ]) {
15670 Some(GrantObjects::AllFunctionsInSchema {
15671 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15672 })
15673 } else if self.parse_keywords(&[
15674 Keyword::FUTURE,
15675 Keyword::SCHEMAS,
15676 Keyword::IN,
15677 Keyword::DATABASE,
15678 ]) {
15679 Some(GrantObjects::FutureSchemasInDatabase {
15680 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15681 })
15682 } else if self.parse_keywords(&[
15683 Keyword::FUTURE,
15684 Keyword::TABLES,
15685 Keyword::IN,
15686 Keyword::SCHEMA,
15687 ]) {
15688 Some(GrantObjects::FutureTablesInSchema {
15689 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15690 })
15691 } else if self.parse_keywords(&[
15692 Keyword::FUTURE,
15693 Keyword::EXTERNAL,
15694 Keyword::TABLES,
15695 Keyword::IN,
15696 Keyword::SCHEMA,
15697 ]) {
15698 Some(GrantObjects::FutureExternalTablesInSchema {
15699 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15700 })
15701 } else if self.parse_keywords(&[
15702 Keyword::FUTURE,
15703 Keyword::VIEWS,
15704 Keyword::IN,
15705 Keyword::SCHEMA,
15706 ]) {
15707 Some(GrantObjects::FutureViewsInSchema {
15708 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15709 })
15710 } else if self.parse_keywords(&[
15711 Keyword::FUTURE,
15712 Keyword::MATERIALIZED,
15713 Keyword::VIEWS,
15714 Keyword::IN,
15715 Keyword::SCHEMA,
15716 ]) {
15717 Some(GrantObjects::FutureMaterializedViewsInSchema {
15718 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15719 })
15720 } else if self.parse_keywords(&[
15721 Keyword::ALL,
15722 Keyword::SEQUENCES,
15723 Keyword::IN,
15724 Keyword::SCHEMA,
15725 ]) {
15726 Some(GrantObjects::AllSequencesInSchema {
15727 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15728 })
15729 } else if self.parse_keywords(&[
15730 Keyword::FUTURE,
15731 Keyword::SEQUENCES,
15732 Keyword::IN,
15733 Keyword::SCHEMA,
15734 ]) {
15735 Some(GrantObjects::FutureSequencesInSchema {
15736 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15737 })
15738 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15739 Some(GrantObjects::ResourceMonitors(
15740 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15741 ))
15742 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15743 Some(GrantObjects::ComputePools(
15744 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15745 ))
15746 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15747 Some(GrantObjects::FailoverGroup(
15748 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15749 ))
15750 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15751 Some(GrantObjects::ReplicationGroup(
15752 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15753 ))
15754 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15755 Some(GrantObjects::ExternalVolumes(
15756 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15757 ))
15758 } else {
15759 let object_type = self.parse_one_of_keywords(&[
15760 Keyword::SEQUENCE,
15761 Keyword::DATABASE,
15762 Keyword::SCHEMA,
15763 Keyword::TABLE,
15764 Keyword::VIEW,
15765 Keyword::WAREHOUSE,
15766 Keyword::INTEGRATION,
15767 Keyword::VIEW,
15768 Keyword::WAREHOUSE,
15769 Keyword::INTEGRATION,
15770 Keyword::USER,
15771 Keyword::CONNECTION,
15772 Keyword::PROCEDURE,
15773 Keyword::FUNCTION,
15774 ]);
15775 let objects =
15776 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
15777 match object_type {
15778 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
15779 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
15780 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
15781 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
15782 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
15783 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
15784 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
15785 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
15786 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
15787 if let Some(name) = objects?.first() {
15788 self.parse_grant_procedure_or_function(name, &kw)?
15789 } else {
15790 self.expected("procedure or function name", self.peek_token())?
15791 }
15792 }
15793 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
15794 Some(unexpected_keyword) => return Err(ParserError::ParserError(
15795 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
15796 )),
15797 }
15798 }
15799 } else {
15800 None
15801 };
15802
15803 Ok((privileges, objects))
15804 }
15805
15806 fn parse_grant_procedure_or_function(
15807 &mut self,
15808 name: &ObjectName,
15809 kw: &Option<Keyword>,
15810 ) -> Result<Option<GrantObjects>, ParserError> {
15811 let arg_types = if self.consume_token(&Token::LParen) {
15812 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
15813 self.expect_token(&Token::RParen)?;
15814 list
15815 } else {
15816 vec![]
15817 };
15818 match kw {
15819 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
15820 name: name.clone(),
15821 arg_types,
15822 })),
15823 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
15824 name: name.clone(),
15825 arg_types,
15826 })),
15827 _ => self.expected("procedure or function keywords", self.peek_token())?,
15828 }
15829 }
15830
15831 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
15832 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
15833 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
15834 if columns.is_empty() {
15835 Ok(None)
15836 } else {
15837 Ok(Some(columns))
15838 }
15839 }
15840
15841 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
15843 Ok(Action::ImportedPrivileges)
15844 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
15845 Ok(Action::AddSearchOptimization)
15846 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
15847 Ok(Action::AttachListing)
15848 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15849 Ok(Action::AttachPolicy)
15850 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15851 Ok(Action::BindServiceEndpoint)
15852 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15853 let role = self.parse_object_name(false)?;
15854 Ok(Action::DatabaseRole { role })
15855 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15856 Ok(Action::EvolveSchema)
15857 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15858 Ok(Action::ImportShare)
15859 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15860 Ok(Action::ManageVersions)
15861 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15862 Ok(Action::ManageReleases)
15863 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15864 Ok(Action::OverrideShareRestrictions)
15865 } else if self.parse_keywords(&[
15866 Keyword::PURCHASE,
15867 Keyword::DATA,
15868 Keyword::EXCHANGE,
15869 Keyword::LISTING,
15870 ]) {
15871 Ok(Action::PurchaseDataExchangeListing)
15872 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15873 Ok(Action::ResolveAll)
15874 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15875 Ok(Action::ReadSession)
15876
15877 } else if self.parse_keyword(Keyword::APPLY) {
15879 let apply_type = self.parse_action_apply_type()?;
15880 Ok(Action::Apply { apply_type })
15881 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15882 Ok(Action::ApplyBudget)
15883 } else if self.parse_keyword(Keyword::AUDIT) {
15884 Ok(Action::Audit)
15885 } else if self.parse_keyword(Keyword::CONNECT) {
15886 Ok(Action::Connect)
15887 } else if self.parse_keyword(Keyword::CREATE) {
15888 let obj_type = self.maybe_parse_action_create_object_type();
15889 Ok(Action::Create { obj_type })
15890 } else if self.parse_keyword(Keyword::DELETE) {
15891 Ok(Action::Delete)
15892 } else if self.parse_keyword(Keyword::EXEC) {
15893 let obj_type = self.maybe_parse_action_execute_obj_type();
15894 Ok(Action::Exec { obj_type })
15895 } else if self.parse_keyword(Keyword::EXECUTE) {
15896 let obj_type = self.maybe_parse_action_execute_obj_type();
15897 Ok(Action::Execute { obj_type })
15898 } else if self.parse_keyword(Keyword::FAILOVER) {
15899 Ok(Action::Failover)
15900 } else if self.parse_keyword(Keyword::INSERT) {
15901 Ok(Action::Insert {
15902 columns: parse_columns(self)?,
15903 })
15904 } else if self.parse_keyword(Keyword::MANAGE) {
15905 let manage_type = self.parse_action_manage_type()?;
15906 Ok(Action::Manage { manage_type })
15907 } else if self.parse_keyword(Keyword::MODIFY) {
15908 let modify_type = self.parse_action_modify_type();
15909 Ok(Action::Modify { modify_type })
15910 } else if self.parse_keyword(Keyword::MONITOR) {
15911 let monitor_type = self.parse_action_monitor_type();
15912 Ok(Action::Monitor { monitor_type })
15913 } else if self.parse_keyword(Keyword::OPERATE) {
15914 Ok(Action::Operate)
15915 } else if self.parse_keyword(Keyword::REFERENCES) {
15916 Ok(Action::References {
15917 columns: parse_columns(self)?,
15918 })
15919 } else if self.parse_keyword(Keyword::READ) {
15920 Ok(Action::Read)
15921 } else if self.parse_keyword(Keyword::REPLICATE) {
15922 Ok(Action::Replicate)
15923 } else if self.parse_keyword(Keyword::ROLE) {
15924 let role = self.parse_object_name(false)?;
15925 Ok(Action::Role { role })
15926 } else if self.parse_keyword(Keyword::SELECT) {
15927 Ok(Action::Select {
15928 columns: parse_columns(self)?,
15929 })
15930 } else if self.parse_keyword(Keyword::TEMPORARY) {
15931 Ok(Action::Temporary)
15932 } else if self.parse_keyword(Keyword::TRIGGER) {
15933 Ok(Action::Trigger)
15934 } else if self.parse_keyword(Keyword::TRUNCATE) {
15935 Ok(Action::Truncate)
15936 } else if self.parse_keyword(Keyword::UPDATE) {
15937 Ok(Action::Update {
15938 columns: parse_columns(self)?,
15939 })
15940 } else if self.parse_keyword(Keyword::USAGE) {
15941 Ok(Action::Usage)
15942 } else if self.parse_keyword(Keyword::OWNERSHIP) {
15943 Ok(Action::Ownership)
15944 } else if self.parse_keyword(Keyword::DROP) {
15945 Ok(Action::Drop)
15946 } else {
15947 self.expected("a privilege keyword", self.peek_token())?
15948 }
15949 }
15950
15951 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
15952 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
15954 Some(ActionCreateObjectType::ApplicationPackage)
15955 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15956 Some(ActionCreateObjectType::ComputePool)
15957 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
15958 Some(ActionCreateObjectType::DataExchangeListing)
15959 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15960 Some(ActionCreateObjectType::ExternalVolume)
15961 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15962 Some(ActionCreateObjectType::FailoverGroup)
15963 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
15964 Some(ActionCreateObjectType::NetworkPolicy)
15965 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
15966 Some(ActionCreateObjectType::OrganiationListing)
15967 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15968 Some(ActionCreateObjectType::ReplicationGroup)
15969 }
15970 else if self.parse_keyword(Keyword::ACCOUNT) {
15972 Some(ActionCreateObjectType::Account)
15973 } else if self.parse_keyword(Keyword::APPLICATION) {
15974 Some(ActionCreateObjectType::Application)
15975 } else if self.parse_keyword(Keyword::DATABASE) {
15976 Some(ActionCreateObjectType::Database)
15977 } else if self.parse_keyword(Keyword::INTEGRATION) {
15978 Some(ActionCreateObjectType::Integration)
15979 } else if self.parse_keyword(Keyword::ROLE) {
15980 Some(ActionCreateObjectType::Role)
15981 } else if self.parse_keyword(Keyword::SCHEMA) {
15982 Some(ActionCreateObjectType::Schema)
15983 } else if self.parse_keyword(Keyword::SHARE) {
15984 Some(ActionCreateObjectType::Share)
15985 } else if self.parse_keyword(Keyword::USER) {
15986 Some(ActionCreateObjectType::User)
15987 } else if self.parse_keyword(Keyword::WAREHOUSE) {
15988 Some(ActionCreateObjectType::Warehouse)
15989 } else {
15990 None
15991 }
15992 }
15993
15994 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
15995 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
15996 Ok(ActionApplyType::AggregationPolicy)
15997 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
15998 Ok(ActionApplyType::AuthenticationPolicy)
15999 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
16000 Ok(ActionApplyType::JoinPolicy)
16001 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
16002 Ok(ActionApplyType::MaskingPolicy)
16003 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
16004 Ok(ActionApplyType::PackagesPolicy)
16005 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
16006 Ok(ActionApplyType::PasswordPolicy)
16007 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
16008 Ok(ActionApplyType::ProjectionPolicy)
16009 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
16010 Ok(ActionApplyType::RowAccessPolicy)
16011 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
16012 Ok(ActionApplyType::SessionPolicy)
16013 } else if self.parse_keyword(Keyword::TAG) {
16014 Ok(ActionApplyType::Tag)
16015 } else {
16016 self.expected("GRANT APPLY type", self.peek_token())
16017 }
16018 }
16019
16020 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
16021 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
16022 Some(ActionExecuteObjectType::DataMetricFunction)
16023 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
16024 Some(ActionExecuteObjectType::ManagedAlert)
16025 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
16026 Some(ActionExecuteObjectType::ManagedTask)
16027 } else if self.parse_keyword(Keyword::ALERT) {
16028 Some(ActionExecuteObjectType::Alert)
16029 } else if self.parse_keyword(Keyword::TASK) {
16030 Some(ActionExecuteObjectType::Task)
16031 } else {
16032 None
16033 }
16034 }
16035
16036 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
16037 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
16038 Ok(ActionManageType::AccountSupportCases)
16039 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16040 Ok(ActionManageType::EventSharing)
16041 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16042 Ok(ActionManageType::ListingAutoFulfillment)
16043 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16044 Ok(ActionManageType::OrganizationSupportCases)
16045 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16046 Ok(ActionManageType::UserSupportCases)
16047 } else if self.parse_keyword(Keyword::GRANTS) {
16048 Ok(ActionManageType::Grants)
16049 } else if self.parse_keyword(Keyword::WAREHOUSES) {
16050 Ok(ActionManageType::Warehouses)
16051 } else {
16052 self.expected("GRANT MANAGE type", self.peek_token())
16053 }
16054 }
16055
16056 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16057 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16058 Some(ActionModifyType::LogLevel)
16059 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16060 Some(ActionModifyType::TraceLevel)
16061 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16062 Some(ActionModifyType::SessionLogLevel)
16063 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16064 Some(ActionModifyType::SessionTraceLevel)
16065 } else {
16066 None
16067 }
16068 }
16069
16070 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16071 if self.parse_keyword(Keyword::EXECUTION) {
16072 Some(ActionMonitorType::Execution)
16073 } else if self.parse_keyword(Keyword::SECURITY) {
16074 Some(ActionMonitorType::Security)
16075 } else if self.parse_keyword(Keyword::USAGE) {
16076 Some(ActionMonitorType::Usage)
16077 } else {
16078 None
16079 }
16080 }
16081
16082 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16083 let mut name = self.parse_object_name(false)?;
16084 if self.dialect.supports_user_host_grantee()
16085 && name.0.len() == 1
16086 && name.0[0].as_ident().is_some()
16087 && self.consume_token(&Token::AtSign)
16088 {
16089 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16090 let host = self.parse_identifier()?;
16091 Ok(GranteeName::UserHost { user, host })
16092 } else {
16093 Ok(GranteeName::ObjectName(name))
16094 }
16095 }
16096
16097 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16099 self.expect_keyword(Keyword::DENY)?;
16100
16101 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16102 let objects = match objects {
16103 Some(o) => o,
16104 None => {
16105 return parser_err!(
16106 "DENY statements must specify an object",
16107 self.peek_token().span.start
16108 )
16109 }
16110 };
16111
16112 self.expect_keyword_is(Keyword::TO)?;
16113 let grantees = self.parse_grantees()?;
16114 let cascade = self.parse_cascade_option();
16115 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16116 Some(self.parse_identifier()?)
16117 } else {
16118 None
16119 };
16120
16121 Ok(Statement::Deny(DenyStatement {
16122 privileges,
16123 objects,
16124 grantees,
16125 cascade,
16126 granted_by,
16127 }))
16128 }
16129
16130 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16132 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16133
16134 self.expect_keyword_is(Keyword::FROM)?;
16135 let grantees = self.parse_grantees()?;
16136
16137 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16138 Some(self.parse_identifier()?)
16139 } else {
16140 None
16141 };
16142
16143 let cascade = self.parse_cascade_option();
16144
16145 Ok(Statement::Revoke {
16146 privileges,
16147 objects,
16148 grantees,
16149 granted_by,
16150 cascade,
16151 })
16152 }
16153
16154 pub fn parse_replace(
16156 &mut self,
16157 replace_token: TokenWithSpan,
16158 ) -> Result<Statement, ParserError> {
16159 if !dialect_of!(self is MySqlDialect | GenericDialect) {
16160 return parser_err!(
16161 "Unsupported statement REPLACE",
16162 self.peek_token().span.start
16163 );
16164 }
16165
16166 let mut insert = self.parse_insert(replace_token)?;
16167 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16168 *replace_into = true;
16169 }
16170
16171 Ok(insert)
16172 }
16173
16174 fn parse_insert_setexpr_boxed(
16178 &mut self,
16179 insert_token: TokenWithSpan,
16180 ) -> Result<Box<SetExpr>, ParserError> {
16181 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16182 }
16183
16184 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16186 let or = self.parse_conflict_clause();
16187 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16188 None
16189 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16190 Some(MysqlInsertPriority::LowPriority)
16191 } else if self.parse_keyword(Keyword::DELAYED) {
16192 Some(MysqlInsertPriority::Delayed)
16193 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16194 Some(MysqlInsertPriority::HighPriority)
16195 } else {
16196 None
16197 };
16198
16199 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16200 && self.parse_keyword(Keyword::IGNORE);
16201
16202 let replace_into = false;
16203
16204 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16205 let into = self.parse_keyword(Keyword::INTO);
16206
16207 let local = self.parse_keyword(Keyword::LOCAL);
16208
16209 if self.parse_keyword(Keyword::DIRECTORY) {
16210 let path = self.parse_literal_string()?;
16211 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16212 Some(self.parse_file_format()?)
16213 } else {
16214 None
16215 };
16216 let source = self.parse_query()?;
16217 Ok(Statement::Directory {
16218 local,
16219 path,
16220 overwrite,
16221 file_format,
16222 source,
16223 })
16224 } else {
16225 let table = self.parse_keyword(Keyword::TABLE);
16227 let table_object = self.parse_table_object()?;
16228
16229 let table_alias =
16230 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16231 Some(self.parse_identifier()?)
16232 } else {
16233 None
16234 };
16235
16236 let is_mysql = dialect_of!(self is MySqlDialect);
16237
16238 let (columns, partitioned, after_columns, source, assignments) = if self
16239 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16240 {
16241 (vec![], None, vec![], None, vec![])
16242 } else {
16243 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16244 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16245
16246 let partitioned = self.parse_insert_partition()?;
16247 let after_columns = if dialect_of!(self is HiveDialect) {
16249 self.parse_parenthesized_column_list(Optional, false)?
16250 } else {
16251 vec![]
16252 };
16253 (columns, partitioned, after_columns)
16254 } else {
16255 Default::default()
16256 };
16257
16258 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16259 || self.peek_keyword(Keyword::SETTINGS)
16260 {
16261 (None, vec![])
16262 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16263 (None, self.parse_comma_separated(Parser::parse_assignment)?)
16264 } else {
16265 (Some(self.parse_query()?), vec![])
16266 };
16267
16268 (columns, partitioned, after_columns, source, assignments)
16269 };
16270
16271 let (format_clause, settings) = if self.dialect.supports_insert_format() {
16272 let settings = self.parse_settings()?;
16275
16276 let format = if self.parse_keyword(Keyword::FORMAT) {
16277 Some(self.parse_input_format_clause()?)
16278 } else {
16279 None
16280 };
16281
16282 (format, settings)
16283 } else {
16284 Default::default()
16285 };
16286
16287 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16288 && self.parse_keyword(Keyword::AS)
16289 {
16290 let row_alias = self.parse_object_name(false)?;
16291 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16292 Some(InsertAliases {
16293 row_alias,
16294 col_aliases,
16295 })
16296 } else {
16297 None
16298 };
16299
16300 let on = if self.parse_keyword(Keyword::ON) {
16301 if self.parse_keyword(Keyword::CONFLICT) {
16302 let conflict_target =
16303 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16304 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16305 } else if self.peek_token() == Token::LParen {
16306 Some(ConflictTarget::Columns(
16307 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16308 ))
16309 } else {
16310 None
16311 };
16312
16313 self.expect_keyword_is(Keyword::DO)?;
16314 let action = if self.parse_keyword(Keyword::NOTHING) {
16315 OnConflictAction::DoNothing
16316 } else {
16317 self.expect_keyword_is(Keyword::UPDATE)?;
16318 self.expect_keyword_is(Keyword::SET)?;
16319 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16320 let selection = if self.parse_keyword(Keyword::WHERE) {
16321 Some(self.parse_expr()?)
16322 } else {
16323 None
16324 };
16325 OnConflictAction::DoUpdate(DoUpdate {
16326 assignments,
16327 selection,
16328 })
16329 };
16330
16331 Some(OnInsert::OnConflict(OnConflict {
16332 conflict_target,
16333 action,
16334 }))
16335 } else {
16336 self.expect_keyword_is(Keyword::DUPLICATE)?;
16337 self.expect_keyword_is(Keyword::KEY)?;
16338 self.expect_keyword_is(Keyword::UPDATE)?;
16339 let l = self.parse_comma_separated(Parser::parse_assignment)?;
16340
16341 Some(OnInsert::DuplicateKeyUpdate(l))
16342 }
16343 } else {
16344 None
16345 };
16346
16347 let returning = if self.parse_keyword(Keyword::RETURNING) {
16348 Some(self.parse_comma_separated(Parser::parse_select_item)?)
16349 } else {
16350 None
16351 };
16352
16353 Ok(Statement::Insert(Insert {
16354 insert_token: insert_token.into(),
16355 or,
16356 table: table_object,
16357 table_alias,
16358 ignore,
16359 into,
16360 overwrite,
16361 partitioned,
16362 columns,
16363 after_columns,
16364 source,
16365 assignments,
16366 has_table_keyword: table,
16367 on,
16368 returning,
16369 replace_into,
16370 priority,
16371 insert_alias,
16372 settings,
16373 format_clause,
16374 }))
16375 }
16376 }
16377
16378 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16382 let ident = self.parse_identifier()?;
16383 let values = self
16384 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16385 .unwrap_or_default();
16386
16387 Ok(InputFormatClause { ident, values })
16388 }
16389
16390 fn peek_subquery_start(&mut self) -> bool {
16393 let [maybe_lparen, maybe_select] = self.peek_tokens();
16394 Token::LParen == maybe_lparen
16395 && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16396 }
16397
16398 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16399 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16400 Some(SqliteOnConflict::Replace)
16401 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16402 Some(SqliteOnConflict::Rollback)
16403 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16404 Some(SqliteOnConflict::Abort)
16405 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16406 Some(SqliteOnConflict::Fail)
16407 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16408 Some(SqliteOnConflict::Ignore)
16409 } else if self.parse_keyword(Keyword::REPLACE) {
16410 Some(SqliteOnConflict::Replace)
16411 } else {
16412 None
16413 }
16414 }
16415
16416 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16417 if self.parse_keyword(Keyword::PARTITION) {
16418 self.expect_token(&Token::LParen)?;
16419 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16420 self.expect_token(&Token::RParen)?;
16421 Ok(partition_cols)
16422 } else {
16423 Ok(None)
16424 }
16425 }
16426
16427 pub fn parse_load_data_table_format(
16428 &mut self,
16429 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16430 if self.parse_keyword(Keyword::INPUTFORMAT) {
16431 let input_format = self.parse_expr()?;
16432 self.expect_keyword_is(Keyword::SERDE)?;
16433 let serde = self.parse_expr()?;
16434 Ok(Some(HiveLoadDataFormat {
16435 input_format,
16436 serde,
16437 }))
16438 } else {
16439 Ok(None)
16440 }
16441 }
16442
16443 fn parse_update_setexpr_boxed(
16447 &mut self,
16448 update_token: TokenWithSpan,
16449 ) -> Result<Box<SetExpr>, ParserError> {
16450 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16451 }
16452
16453 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16454 let or = self.parse_conflict_clause();
16455 let table = self.parse_table_and_joins()?;
16456 let from_before_set = if self.parse_keyword(Keyword::FROM) {
16457 Some(UpdateTableFromKind::BeforeSet(
16458 self.parse_table_with_joins()?,
16459 ))
16460 } else {
16461 None
16462 };
16463 self.expect_keyword(Keyword::SET)?;
16464 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16465 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16466 Some(UpdateTableFromKind::AfterSet(
16467 self.parse_table_with_joins()?,
16468 ))
16469 } else {
16470 from_before_set
16471 };
16472 let selection = if self.parse_keyword(Keyword::WHERE) {
16473 Some(self.parse_expr()?)
16474 } else {
16475 None
16476 };
16477 let returning = if self.parse_keyword(Keyword::RETURNING) {
16478 Some(self.parse_comma_separated(Parser::parse_select_item)?)
16479 } else {
16480 None
16481 };
16482 let limit = if self.parse_keyword(Keyword::LIMIT) {
16483 Some(self.parse_expr()?)
16484 } else {
16485 None
16486 };
16487 Ok(Update {
16488 update_token: update_token.into(),
16489 table,
16490 assignments,
16491 from,
16492 selection,
16493 returning,
16494 or,
16495 limit,
16496 }
16497 .into())
16498 }
16499
16500 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16502 let target = self.parse_assignment_target()?;
16503 self.expect_token(&Token::Eq)?;
16504 let value = self.parse_expr()?;
16505 Ok(Assignment { target, value })
16506 }
16507
16508 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16510 if self.consume_token(&Token::LParen) {
16511 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16512 self.expect_token(&Token::RParen)?;
16513 Ok(AssignmentTarget::Tuple(columns))
16514 } else {
16515 let column = self.parse_object_name(false)?;
16516 Ok(AssignmentTarget::ColumnName(column))
16517 }
16518 }
16519
16520 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16521 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16522 self.maybe_parse(|p| {
16523 let name = p.parse_expr()?;
16524 let operator = p.parse_function_named_arg_operator()?;
16525 let arg = p.parse_wildcard_expr()?.into();
16526 Ok(FunctionArg::ExprNamed {
16527 name,
16528 arg,
16529 operator,
16530 })
16531 })?
16532 } else {
16533 self.maybe_parse(|p| {
16534 let name = p.parse_identifier()?;
16535 let operator = p.parse_function_named_arg_operator()?;
16536 let arg = p.parse_wildcard_expr()?.into();
16537 Ok(FunctionArg::Named {
16538 name,
16539 arg,
16540 operator,
16541 })
16542 })?
16543 };
16544 if let Some(arg) = arg {
16545 return Ok(arg);
16546 }
16547 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16548 }
16549
16550 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16551 if self.parse_keyword(Keyword::VALUE) {
16552 return Ok(FunctionArgOperator::Value);
16553 }
16554 let tok = self.next_token();
16555 match tok.token {
16556 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16557 Ok(FunctionArgOperator::RightArrow)
16558 }
16559 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16560 Ok(FunctionArgOperator::Equals)
16561 }
16562 Token::Assignment
16563 if self
16564 .dialect
16565 .supports_named_fn_args_with_assignment_operator() =>
16566 {
16567 Ok(FunctionArgOperator::Assignment)
16568 }
16569 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16570 Ok(FunctionArgOperator::Colon)
16571 }
16572 _ => {
16573 self.prev_token();
16574 self.expected("argument operator", tok)
16575 }
16576 }
16577 }
16578
16579 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16580 if self.consume_token(&Token::RParen) {
16581 Ok(vec![])
16582 } else {
16583 let args = self.parse_comma_separated(Parser::parse_function_args)?;
16584 self.expect_token(&Token::RParen)?;
16585 Ok(args)
16586 }
16587 }
16588
16589 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16590 if self.consume_token(&Token::RParen) {
16591 return Ok(TableFunctionArgs {
16592 args: vec![],
16593 settings: None,
16594 });
16595 }
16596 let mut args = vec![];
16597 let settings = loop {
16598 if let Some(settings) = self.parse_settings()? {
16599 break Some(settings);
16600 }
16601 args.push(self.parse_function_args()?);
16602 if self.is_parse_comma_separated_end() {
16603 break None;
16604 }
16605 };
16606 self.expect_token(&Token::RParen)?;
16607 Ok(TableFunctionArgs { args, settings })
16608 }
16609
16610 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16619 let mut clauses = vec![];
16620
16621 if let Some(null_clause) = self.parse_json_null_clause() {
16624 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16625 }
16626
16627 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16628 clauses.push(FunctionArgumentClause::JsonReturningClause(
16629 json_returning_clause,
16630 ));
16631 }
16632
16633 if self.consume_token(&Token::RParen) {
16634 return Ok(FunctionArgumentList {
16635 duplicate_treatment: None,
16636 args: vec![],
16637 clauses,
16638 });
16639 }
16640
16641 let duplicate_treatment = self.parse_duplicate_treatment()?;
16642 let args = self.parse_comma_separated(Parser::parse_function_args)?;
16643
16644 if self.dialect.supports_window_function_null_treatment_arg() {
16645 if let Some(null_treatment) = self.parse_null_treatment()? {
16646 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16647 }
16648 }
16649
16650 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16651 clauses.push(FunctionArgumentClause::OrderBy(
16652 self.parse_comma_separated(Parser::parse_order_by_expr)?,
16653 ));
16654 }
16655
16656 if self.parse_keyword(Keyword::LIMIT) {
16657 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16658 }
16659
16660 if dialect_of!(self is GenericDialect | BigQueryDialect)
16661 && self.parse_keyword(Keyword::HAVING)
16662 {
16663 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16664 Keyword::MIN => HavingBoundKind::Min,
16665 Keyword::MAX => HavingBoundKind::Max,
16666 unexpected_keyword => return Err(ParserError::ParserError(
16667 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
16668 )),
16669 };
16670 clauses.push(FunctionArgumentClause::Having(HavingBound(
16671 kind,
16672 self.parse_expr()?,
16673 )))
16674 }
16675
16676 if dialect_of!(self is GenericDialect | MySqlDialect)
16677 && self.parse_keyword(Keyword::SEPARATOR)
16678 {
16679 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16680 }
16681
16682 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16683 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16684 }
16685
16686 if let Some(null_clause) = self.parse_json_null_clause() {
16687 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16688 }
16689
16690 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16691 clauses.push(FunctionArgumentClause::JsonReturningClause(
16692 json_returning_clause,
16693 ));
16694 }
16695
16696 self.expect_token(&Token::RParen)?;
16697 Ok(FunctionArgumentList {
16698 duplicate_treatment,
16699 args,
16700 clauses,
16701 })
16702 }
16703
16704 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16705 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16706 Some(JsonNullClause::AbsentOnNull)
16707 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16708 Some(JsonNullClause::NullOnNull)
16709 } else {
16710 None
16711 }
16712 }
16713
16714 fn maybe_parse_json_returning_clause(
16715 &mut self,
16716 ) -> Result<Option<JsonReturningClause>, ParserError> {
16717 if self.parse_keyword(Keyword::RETURNING) {
16718 let data_type = self.parse_data_type()?;
16719 Ok(Some(JsonReturningClause { data_type }))
16720 } else {
16721 Ok(None)
16722 }
16723 }
16724
16725 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16726 let loc = self.peek_token().span.start;
16727 match (
16728 self.parse_keyword(Keyword::ALL),
16729 self.parse_keyword(Keyword::DISTINCT),
16730 ) {
16731 (true, false) => Ok(Some(DuplicateTreatment::All)),
16732 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16733 (false, false) => Ok(None),
16734 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16735 }
16736 }
16737
16738 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16740 let prefix = self
16741 .parse_one_of_keywords(
16742 self.dialect
16743 .get_reserved_keywords_for_select_item_operator(),
16744 )
16745 .map(|keyword| Ident::new(format!("{keyword:?}")));
16746
16747 match self.parse_wildcard_expr()? {
16748 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16749 SelectItemQualifiedWildcardKind::ObjectName(prefix),
16750 self.parse_wildcard_additional_options(token.0)?,
16751 )),
16752 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
16753 self.parse_wildcard_additional_options(token.0)?,
16754 )),
16755 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
16756 parser_err!(
16757 format!("Expected an expression, found: {}", v),
16758 self.peek_token().span.start
16759 )
16760 }
16761 Expr::BinaryOp {
16762 left,
16763 op: BinaryOperator::Eq,
16764 right,
16765 } if self.dialect.supports_eq_alias_assignment()
16766 && matches!(left.as_ref(), Expr::Identifier(_)) =>
16767 {
16768 let Expr::Identifier(alias) = *left else {
16769 return parser_err!(
16770 "BUG: expected identifier expression as alias",
16771 self.peek_token().span.start
16772 );
16773 };
16774 Ok(SelectItem::ExprWithAlias {
16775 expr: *right,
16776 alias,
16777 })
16778 }
16779 expr if self.dialect.supports_select_expr_star()
16780 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
16781 {
16782 let wildcard_token = self.get_previous_token().clone();
16783 Ok(SelectItem::QualifiedWildcard(
16784 SelectItemQualifiedWildcardKind::Expr(expr),
16785 self.parse_wildcard_additional_options(wildcard_token)?,
16786 ))
16787 }
16788 expr => self
16789 .maybe_parse_select_item_alias()
16790 .map(|alias| match alias {
16791 Some(alias) => SelectItem::ExprWithAlias {
16792 expr: maybe_prefixed_expr(expr, prefix),
16793 alias,
16794 },
16795 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
16796 }),
16797 }
16798 }
16799
16800 pub fn parse_wildcard_additional_options(
16804 &mut self,
16805 wildcard_token: TokenWithSpan,
16806 ) -> Result<WildcardAdditionalOptions, ParserError> {
16807 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16808 self.parse_optional_select_item_ilike()?
16809 } else {
16810 None
16811 };
16812 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
16813 {
16814 self.parse_optional_select_item_exclude()?
16815 } else {
16816 None
16817 };
16818 let opt_except = if self.dialect.supports_select_wildcard_except() {
16819 self.parse_optional_select_item_except()?
16820 } else {
16821 None
16822 };
16823 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
16824 {
16825 self.parse_optional_select_item_replace()?
16826 } else {
16827 None
16828 };
16829 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16830 self.parse_optional_select_item_rename()?
16831 } else {
16832 None
16833 };
16834
16835 Ok(WildcardAdditionalOptions {
16836 wildcard_token: wildcard_token.into(),
16837 opt_ilike,
16838 opt_exclude,
16839 opt_except,
16840 opt_rename,
16841 opt_replace,
16842 })
16843 }
16844
16845 pub fn parse_optional_select_item_ilike(
16849 &mut self,
16850 ) -> Result<Option<IlikeSelectItem>, ParserError> {
16851 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
16852 let next_token = self.next_token();
16853 let pattern = match next_token.token {
16854 Token::SingleQuotedString(s) => s,
16855 _ => return self.expected("ilike pattern", next_token),
16856 };
16857 Some(IlikeSelectItem { pattern })
16858 } else {
16859 None
16860 };
16861 Ok(opt_ilike)
16862 }
16863
16864 pub fn parse_optional_select_item_exclude(
16868 &mut self,
16869 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16870 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16871 if self.consume_token(&Token::LParen) {
16872 let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16873 self.expect_token(&Token::RParen)?;
16874 Some(ExcludeSelectItem::Multiple(columns))
16875 } else {
16876 let column = self.parse_identifier()?;
16877 Some(ExcludeSelectItem::Single(column))
16878 }
16879 } else {
16880 None
16881 };
16882
16883 Ok(opt_exclude)
16884 }
16885
16886 pub fn parse_optional_select_item_except(
16890 &mut self,
16891 ) -> Result<Option<ExceptSelectItem>, ParserError> {
16892 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16893 if self.peek_token().token == Token::LParen {
16894 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16895 match &idents[..] {
16896 [] => {
16897 return self.expected(
16898 "at least one column should be parsed by the expect clause",
16899 self.peek_token(),
16900 )?;
16901 }
16902 [first, idents @ ..] => Some(ExceptSelectItem {
16903 first_element: first.clone(),
16904 additional_elements: idents.to_vec(),
16905 }),
16906 }
16907 } else {
16908 let ident = self.parse_identifier()?;
16910 Some(ExceptSelectItem {
16911 first_element: ident,
16912 additional_elements: vec![],
16913 })
16914 }
16915 } else {
16916 None
16917 };
16918
16919 Ok(opt_except)
16920 }
16921
16922 pub fn parse_optional_select_item_rename(
16924 &mut self,
16925 ) -> Result<Option<RenameSelectItem>, ParserError> {
16926 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16927 if self.consume_token(&Token::LParen) {
16928 let idents =
16929 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16930 self.expect_token(&Token::RParen)?;
16931 Some(RenameSelectItem::Multiple(idents))
16932 } else {
16933 let ident = self.parse_identifier_with_alias()?;
16934 Some(RenameSelectItem::Single(ident))
16935 }
16936 } else {
16937 None
16938 };
16939
16940 Ok(opt_rename)
16941 }
16942
16943 pub fn parse_optional_select_item_replace(
16945 &mut self,
16946 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16947 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
16948 if self.consume_token(&Token::LParen) {
16949 let items = self.parse_comma_separated(|parser| {
16950 Ok(Box::new(parser.parse_replace_elements()?))
16951 })?;
16952 self.expect_token(&Token::RParen)?;
16953 Some(ReplaceSelectItem { items })
16954 } else {
16955 let tok = self.next_token();
16956 return self.expected("( after REPLACE but", tok);
16957 }
16958 } else {
16959 None
16960 };
16961
16962 Ok(opt_replace)
16963 }
16964 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
16965 let expr = self.parse_expr()?;
16966 let as_keyword = self.parse_keyword(Keyword::AS);
16967 let ident = self.parse_identifier()?;
16968 Ok(ReplaceSelectElement {
16969 expr,
16970 column_name: ident,
16971 as_keyword,
16972 })
16973 }
16974
16975 pub fn parse_asc_desc(&mut self) -> Option<bool> {
16978 if self.parse_keyword(Keyword::ASC) {
16979 Some(true)
16980 } else if self.parse_keyword(Keyword::DESC) {
16981 Some(false)
16982 } else {
16983 None
16984 }
16985 }
16986
16987 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
16989 self.parse_order_by_expr_inner(false)
16990 .map(|(order_by, _)| order_by)
16991 }
16992
16993 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
16995 self.parse_order_by_expr_inner(true)
16996 .map(|(column, operator_class)| IndexColumn {
16997 column,
16998 operator_class,
16999 })
17000 }
17001
17002 fn parse_order_by_expr_inner(
17003 &mut self,
17004 with_operator_class: bool,
17005 ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
17006 let expr = self.parse_expr()?;
17007
17008 let operator_class: Option<Ident> = if with_operator_class {
17009 if self
17012 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
17013 .is_some()
17014 {
17015 None
17016 } else {
17017 self.maybe_parse(|parser| parser.parse_identifier())?
17018 }
17019 } else {
17020 None
17021 };
17022
17023 let options = self.parse_order_by_options()?;
17024
17025 let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
17026 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
17027 {
17028 Some(self.parse_with_fill()?)
17029 } else {
17030 None
17031 };
17032
17033 Ok((
17034 OrderByExpr {
17035 expr,
17036 options,
17037 with_fill,
17038 },
17039 operator_class,
17040 ))
17041 }
17042
17043 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17044 let asc = self.parse_asc_desc();
17045
17046 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17047 Some(true)
17048 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17049 Some(false)
17050 } else {
17051 None
17052 };
17053
17054 Ok(OrderByOptions { asc, nulls_first })
17055 }
17056
17057 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17060 let from = if self.parse_keyword(Keyword::FROM) {
17061 Some(self.parse_expr()?)
17062 } else {
17063 None
17064 };
17065
17066 let to = if self.parse_keyword(Keyword::TO) {
17067 Some(self.parse_expr()?)
17068 } else {
17069 None
17070 };
17071
17072 let step = if self.parse_keyword(Keyword::STEP) {
17073 Some(self.parse_expr()?)
17074 } else {
17075 None
17076 };
17077
17078 Ok(WithFill { from, to, step })
17079 }
17080
17081 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17084 if !self.parse_keyword(Keyword::INTERPOLATE) {
17085 return Ok(None);
17086 }
17087
17088 if self.consume_token(&Token::LParen) {
17089 let interpolations =
17090 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17091 self.expect_token(&Token::RParen)?;
17092 return Ok(Some(Interpolate {
17094 exprs: Some(interpolations),
17095 }));
17096 }
17097
17098 Ok(Some(Interpolate { exprs: None }))
17100 }
17101
17102 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17104 let column = self.parse_identifier()?;
17105 let expr = if self.parse_keyword(Keyword::AS) {
17106 Some(self.parse_expr()?)
17107 } else {
17108 None
17109 };
17110 Ok(InterpolateExpr { column, expr })
17111 }
17112
17113 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17116 let quantity = if self.consume_token(&Token::LParen) {
17117 let quantity = self.parse_expr()?;
17118 self.expect_token(&Token::RParen)?;
17119 Some(TopQuantity::Expr(quantity))
17120 } else {
17121 let next_token = self.next_token();
17122 let quantity = match next_token.token {
17123 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17124 _ => self.expected("literal int", next_token)?,
17125 };
17126 Some(TopQuantity::Constant(quantity))
17127 };
17128
17129 let percent = self.parse_keyword(Keyword::PERCENT);
17130
17131 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17132
17133 Ok(Top {
17134 with_ties,
17135 percent,
17136 quantity,
17137 })
17138 }
17139
17140 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17142 if self.parse_keyword(Keyword::ALL) {
17143 Ok(None)
17144 } else {
17145 Ok(Some(self.parse_expr()?))
17146 }
17147 }
17148
17149 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17151 let value = self.parse_expr()?;
17152 let rows = if self.parse_keyword(Keyword::ROW) {
17153 OffsetRows::Row
17154 } else if self.parse_keyword(Keyword::ROWS) {
17155 OffsetRows::Rows
17156 } else {
17157 OffsetRows::None
17158 };
17159 Ok(Offset { value, rows })
17160 }
17161
17162 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17164 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17165
17166 let (quantity, percent) = if self
17167 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17168 .is_some()
17169 {
17170 (None, false)
17171 } else {
17172 let quantity = Expr::Value(self.parse_value()?);
17173 let percent = self.parse_keyword(Keyword::PERCENT);
17174 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17175 (Some(quantity), percent)
17176 };
17177
17178 let with_ties = if self.parse_keyword(Keyword::ONLY) {
17179 false
17180 } else {
17181 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17182 };
17183
17184 Ok(Fetch {
17185 with_ties,
17186 percent,
17187 quantity,
17188 })
17189 }
17190
17191 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17193 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17194 Keyword::UPDATE => LockType::Update,
17195 Keyword::SHARE => LockType::Share,
17196 unexpected_keyword => return Err(ParserError::ParserError(
17197 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17198 )),
17199 };
17200 let of = if self.parse_keyword(Keyword::OF) {
17201 Some(self.parse_object_name(false)?)
17202 } else {
17203 None
17204 };
17205 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17206 Some(NonBlock::Nowait)
17207 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17208 Some(NonBlock::SkipLocked)
17209 } else {
17210 None
17211 };
17212 Ok(LockClause {
17213 lock_type,
17214 of,
17215 nonblock,
17216 })
17217 }
17218
17219 pub fn parse_values(
17220 &mut self,
17221 allow_empty: bool,
17222 value_keyword: bool,
17223 ) -> Result<Values, ParserError> {
17224 let mut explicit_row = false;
17225
17226 let rows = self.parse_comma_separated(|parser| {
17227 if parser.parse_keyword(Keyword::ROW) {
17228 explicit_row = true;
17229 }
17230
17231 parser.expect_token(&Token::LParen)?;
17232 if allow_empty && parser.peek_token().token == Token::RParen {
17233 parser.next_token();
17234 Ok(vec![])
17235 } else {
17236 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17237 parser.expect_token(&Token::RParen)?;
17238 Ok(exprs)
17239 }
17240 })?;
17241 Ok(Values {
17242 explicit_row,
17243 rows,
17244 value_keyword,
17245 })
17246 }
17247
17248 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17249 self.expect_keyword_is(Keyword::TRANSACTION)?;
17250 Ok(Statement::StartTransaction {
17251 modes: self.parse_transaction_modes()?,
17252 begin: false,
17253 transaction: Some(BeginTransactionKind::Transaction),
17254 modifier: None,
17255 statements: vec![],
17256 exception: None,
17257 has_end_keyword: false,
17258 })
17259 }
17260
17261 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17262 let modifier = if !self.dialect.supports_start_transaction_modifier() {
17263 None
17264 } else if self.parse_keyword(Keyword::DEFERRED) {
17265 Some(TransactionModifier::Deferred)
17266 } else if self.parse_keyword(Keyword::IMMEDIATE) {
17267 Some(TransactionModifier::Immediate)
17268 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17269 Some(TransactionModifier::Exclusive)
17270 } else if self.parse_keyword(Keyword::TRY) {
17271 Some(TransactionModifier::Try)
17272 } else if self.parse_keyword(Keyword::CATCH) {
17273 Some(TransactionModifier::Catch)
17274 } else {
17275 None
17276 };
17277 let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17278 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17279 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17280 _ => None,
17281 };
17282 Ok(Statement::StartTransaction {
17283 modes: self.parse_transaction_modes()?,
17284 begin: true,
17285 transaction,
17286 modifier,
17287 statements: vec![],
17288 exception: None,
17289 has_end_keyword: false,
17290 })
17291 }
17292
17293 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17294 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17295
17296 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17297 let mut when = Vec::new();
17298
17299 while !self.peek_keyword(Keyword::END) {
17301 self.expect_keyword(Keyword::WHEN)?;
17302
17303 let mut idents = Vec::new();
17307
17308 while !self.parse_keyword(Keyword::THEN) {
17309 let ident = self.parse_identifier()?;
17310 idents.push(ident);
17311
17312 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17313 }
17314
17315 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17316
17317 when.push(ExceptionWhen { idents, statements });
17318 }
17319
17320 Some(when)
17321 } else {
17322 None
17323 };
17324
17325 self.expect_keyword(Keyword::END)?;
17326
17327 Ok(Statement::StartTransaction {
17328 begin: true,
17329 statements,
17330 exception,
17331 has_end_keyword: true,
17332 transaction: None,
17333 modifier: None,
17334 modes: Default::default(),
17335 })
17336 }
17337
17338 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17339 let modifier = if !self.dialect.supports_end_transaction_modifier() {
17340 None
17341 } else if self.parse_keyword(Keyword::TRY) {
17342 Some(TransactionModifier::Try)
17343 } else if self.parse_keyword(Keyword::CATCH) {
17344 Some(TransactionModifier::Catch)
17345 } else {
17346 None
17347 };
17348 Ok(Statement::Commit {
17349 chain: self.parse_commit_rollback_chain()?,
17350 end: true,
17351 modifier,
17352 })
17353 }
17354
17355 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17356 let mut modes = vec![];
17357 let mut required = false;
17358 loop {
17359 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17360 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17361 TransactionIsolationLevel::ReadUncommitted
17362 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17363 TransactionIsolationLevel::ReadCommitted
17364 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17365 TransactionIsolationLevel::RepeatableRead
17366 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17367 TransactionIsolationLevel::Serializable
17368 } else if self.parse_keyword(Keyword::SNAPSHOT) {
17369 TransactionIsolationLevel::Snapshot
17370 } else {
17371 self.expected("isolation level", self.peek_token())?
17372 };
17373 TransactionMode::IsolationLevel(iso_level)
17374 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17375 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17376 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17377 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17378 } else if required {
17379 self.expected("transaction mode", self.peek_token())?
17380 } else {
17381 break;
17382 };
17383 modes.push(mode);
17384 required = self.consume_token(&Token::Comma);
17389 }
17390 Ok(modes)
17391 }
17392
17393 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17394 Ok(Statement::Commit {
17395 chain: self.parse_commit_rollback_chain()?,
17396 end: false,
17397 modifier: None,
17398 })
17399 }
17400
17401 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17402 let chain = self.parse_commit_rollback_chain()?;
17403 let savepoint = self.parse_rollback_savepoint()?;
17404
17405 Ok(Statement::Rollback { chain, savepoint })
17406 }
17407
17408 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17409 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17410 if self.parse_keyword(Keyword::AND) {
17411 let chain = !self.parse_keyword(Keyword::NO);
17412 self.expect_keyword_is(Keyword::CHAIN)?;
17413 Ok(chain)
17414 } else {
17415 Ok(false)
17416 }
17417 }
17418
17419 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17420 if self.parse_keyword(Keyword::TO) {
17421 let _ = self.parse_keyword(Keyword::SAVEPOINT);
17422 let savepoint = self.parse_identifier()?;
17423
17424 Ok(Some(savepoint))
17425 } else {
17426 Ok(None)
17427 }
17428 }
17429
17430 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17432 self.expect_token(&Token::LParen)?;
17433 let message = Box::new(self.parse_expr()?);
17434 self.expect_token(&Token::Comma)?;
17435 let severity = Box::new(self.parse_expr()?);
17436 self.expect_token(&Token::Comma)?;
17437 let state = Box::new(self.parse_expr()?);
17438 let arguments = if self.consume_token(&Token::Comma) {
17439 self.parse_comma_separated(Parser::parse_expr)?
17440 } else {
17441 vec![]
17442 };
17443 self.expect_token(&Token::RParen)?;
17444 let options = if self.parse_keyword(Keyword::WITH) {
17445 self.parse_comma_separated(Parser::parse_raiserror_option)?
17446 } else {
17447 vec![]
17448 };
17449 Ok(Statement::RaisError {
17450 message,
17451 severity,
17452 state,
17453 arguments,
17454 options,
17455 })
17456 }
17457
17458 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17459 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17460 Keyword::LOG => Ok(RaisErrorOption::Log),
17461 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17462 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17463 _ => self.expected(
17464 "LOG, NOWAIT OR SETERROR raiserror option",
17465 self.peek_token(),
17466 ),
17467 }
17468 }
17469
17470 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17471 let prepare = self.parse_keyword(Keyword::PREPARE);
17472 let name = self.parse_identifier()?;
17473 Ok(Statement::Deallocate { name, prepare })
17474 }
17475
17476 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17477 let name = if self.dialect.supports_execute_immediate()
17478 && self.parse_keyword(Keyword::IMMEDIATE)
17479 {
17480 None
17481 } else {
17482 let name = self.parse_object_name(false)?;
17483 Some(name)
17484 };
17485
17486 let has_parentheses = self.consume_token(&Token::LParen);
17487
17488 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17489 let end_token = match (has_parentheses, self.peek_token().token) {
17490 (true, _) => Token::RParen,
17491 (false, Token::EOF) => Token::EOF,
17492 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17493 (false, _) => Token::SemiColon,
17494 };
17495
17496 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17497
17498 if has_parentheses {
17499 self.expect_token(&Token::RParen)?;
17500 }
17501
17502 let into = if self.parse_keyword(Keyword::INTO) {
17503 self.parse_comma_separated(Self::parse_identifier)?
17504 } else {
17505 vec![]
17506 };
17507
17508 let using = if self.parse_keyword(Keyword::USING) {
17509 self.parse_comma_separated(Self::parse_expr_with_alias)?
17510 } else {
17511 vec![]
17512 };
17513
17514 let output = self.parse_keyword(Keyword::OUTPUT);
17515
17516 let default = self.parse_keyword(Keyword::DEFAULT);
17517
17518 Ok(Statement::Execute {
17519 immediate: name.is_none(),
17520 name,
17521 parameters,
17522 has_parentheses,
17523 into,
17524 using,
17525 output,
17526 default,
17527 })
17528 }
17529
17530 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17531 let name = self.parse_identifier()?;
17532
17533 let mut data_types = vec![];
17534 if self.consume_token(&Token::LParen) {
17535 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17536 self.expect_token(&Token::RParen)?;
17537 }
17538
17539 self.expect_keyword_is(Keyword::AS)?;
17540 let statement = Box::new(self.parse_statement()?);
17541 Ok(Statement::Prepare {
17542 name,
17543 data_types,
17544 statement,
17545 })
17546 }
17547
17548 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17549 self.expect_keyword(Keyword::UNLOAD)?;
17550 self.expect_token(&Token::LParen)?;
17551 let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17552 {
17553 (None, Some(self.parse_literal_string()?))
17554 } else {
17555 (Some(self.parse_query()?), None)
17556 };
17557 self.expect_token(&Token::RParen)?;
17558
17559 self.expect_keyword_is(Keyword::TO)?;
17560 let to = self.parse_identifier()?;
17561 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17562 Some(self.parse_iam_role_kind()?)
17563 } else {
17564 None
17565 };
17566 let with = self.parse_options(Keyword::WITH)?;
17567 let mut options = vec![];
17568 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17569 options.push(opt);
17570 }
17571 Ok(Statement::Unload {
17572 query,
17573 query_text,
17574 to,
17575 auth,
17576 with,
17577 options,
17578 })
17579 }
17580
17581 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17582 let temporary = self
17583 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17584 .is_some();
17585 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17586 let table = self.parse_keyword(Keyword::TABLE);
17587 let name = self.parse_object_name(false)?;
17588
17589 Ok(SelectInto {
17590 temporary,
17591 unlogged,
17592 table,
17593 name,
17594 })
17595 }
17596
17597 fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17598 match self.parse_value()?.value {
17599 v @ Value::SingleQuotedString(_) => Ok(v),
17600 v @ Value::DoubleQuotedString(_) => Ok(v),
17601 v @ Value::Number(_, _) => Ok(v),
17602 v @ Value::Placeholder(_) => Ok(v),
17603 _ => {
17604 self.prev_token();
17605 self.expected("number or string or ? placeholder", self.peek_token())
17606 }
17607 }
17608 }
17609
17610 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17612 let name = self.parse_object_name(false)?;
17613 if self.consume_token(&Token::LParen) {
17614 let value = self.parse_pragma_value()?;
17615 self.expect_token(&Token::RParen)?;
17616 Ok(Statement::Pragma {
17617 name,
17618 value: Some(value),
17619 is_eq: false,
17620 })
17621 } else if self.consume_token(&Token::Eq) {
17622 Ok(Statement::Pragma {
17623 name,
17624 value: Some(self.parse_pragma_value()?),
17625 is_eq: true,
17626 })
17627 } else {
17628 Ok(Statement::Pragma {
17629 name,
17630 value: None,
17631 is_eq: false,
17632 })
17633 }
17634 }
17635
17636 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17638 let extension_name = self.parse_identifier()?;
17639
17640 Ok(Statement::Install { extension_name })
17641 }
17642
17643 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17645 if self.dialect.supports_load_extension() {
17646 let extension_name = self.parse_identifier()?;
17647 Ok(Statement::Load { extension_name })
17648 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17649 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17650 self.expect_keyword_is(Keyword::INPATH)?;
17651 let inpath = self.parse_literal_string()?;
17652 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17653 self.expect_keyword_is(Keyword::INTO)?;
17654 self.expect_keyword_is(Keyword::TABLE)?;
17655 let table_name = self.parse_object_name(false)?;
17656 let partitioned = self.parse_insert_partition()?;
17657 let table_format = self.parse_load_data_table_format()?;
17658 Ok(Statement::LoadData {
17659 local,
17660 inpath,
17661 overwrite,
17662 table_name,
17663 partitioned,
17664 table_format,
17665 })
17666 } else {
17667 self.expected(
17668 "`DATA` or an extension name after `LOAD`",
17669 self.peek_token(),
17670 )
17671 }
17672 }
17673
17674 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17679 self.expect_keyword_is(Keyword::TABLE)?;
17680 let name = self.parse_object_name(false)?;
17681 let on_cluster = self.parse_optional_on_cluster()?;
17682
17683 let partition = if self.parse_keyword(Keyword::PARTITION) {
17684 if self.parse_keyword(Keyword::ID) {
17685 Some(Partition::Identifier(self.parse_identifier()?))
17686 } else {
17687 Some(Partition::Expr(self.parse_expr()?))
17688 }
17689 } else {
17690 None
17691 };
17692
17693 let include_final = self.parse_keyword(Keyword::FINAL);
17694 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17695 if self.parse_keyword(Keyword::BY) {
17696 Some(Deduplicate::ByExpression(self.parse_expr()?))
17697 } else {
17698 Some(Deduplicate::All)
17699 }
17700 } else {
17701 None
17702 };
17703
17704 Ok(Statement::OptimizeTable {
17705 name,
17706 on_cluster,
17707 partition,
17708 include_final,
17709 deduplicate,
17710 })
17711 }
17712
17713 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17719 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17721 let name = self.parse_object_name(false)?;
17723 let mut data_type: Option<DataType> = None;
17725 if self.parse_keywords(&[Keyword::AS]) {
17726 data_type = Some(self.parse_data_type()?)
17727 }
17728 let sequence_options = self.parse_create_sequence_options()?;
17729 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17731 if self.parse_keywords(&[Keyword::NONE]) {
17732 Some(ObjectName::from(vec![Ident::new("NONE")]))
17733 } else {
17734 Some(self.parse_object_name(false)?)
17735 }
17736 } else {
17737 None
17738 };
17739 Ok(Statement::CreateSequence {
17740 temporary,
17741 if_not_exists,
17742 name,
17743 data_type,
17744 sequence_options,
17745 owned_by,
17746 })
17747 }
17748
17749 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17750 let mut sequence_options = vec![];
17751 if self.parse_keywords(&[Keyword::INCREMENT]) {
17753 if self.parse_keywords(&[Keyword::BY]) {
17754 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17755 } else {
17756 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17757 }
17758 }
17759 if self.parse_keyword(Keyword::MINVALUE) {
17761 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17762 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17763 sequence_options.push(SequenceOptions::MinValue(None));
17764 }
17765 if self.parse_keywords(&[Keyword::MAXVALUE]) {
17767 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17768 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17769 sequence_options.push(SequenceOptions::MaxValue(None));
17770 }
17771
17772 if self.parse_keywords(&[Keyword::START]) {
17774 if self.parse_keywords(&[Keyword::WITH]) {
17775 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17776 } else {
17777 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17778 }
17779 }
17780 if self.parse_keywords(&[Keyword::CACHE]) {
17782 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17783 }
17784 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17786 sequence_options.push(SequenceOptions::Cycle(true));
17787 } else if self.parse_keywords(&[Keyword::CYCLE]) {
17788 sequence_options.push(SequenceOptions::Cycle(false));
17789 }
17790
17791 Ok(sequence_options)
17792 }
17793
17794 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17798 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17799 let name = self.parse_object_name(false)?;
17800
17801 let server_type = if self.parse_keyword(Keyword::TYPE) {
17802 Some(self.parse_identifier()?)
17803 } else {
17804 None
17805 };
17806
17807 let version = if self.parse_keyword(Keyword::VERSION) {
17808 Some(self.parse_identifier()?)
17809 } else {
17810 None
17811 };
17812
17813 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17814 let foreign_data_wrapper = self.parse_object_name(false)?;
17815
17816 let mut options = None;
17817 if self.parse_keyword(Keyword::OPTIONS) {
17818 self.expect_token(&Token::LParen)?;
17819 options = Some(self.parse_comma_separated(|p| {
17820 let key = p.parse_identifier()?;
17821 let value = p.parse_identifier()?;
17822 Ok(CreateServerOption { key, value })
17823 })?);
17824 self.expect_token(&Token::RParen)?;
17825 }
17826
17827 Ok(Statement::CreateServer(CreateServerStatement {
17828 name,
17829 if_not_exists: ine,
17830 server_type,
17831 version,
17832 foreign_data_wrapper,
17833 options,
17834 }))
17835 }
17836
17837 pub fn index(&self) -> usize {
17839 self.index
17840 }
17841
17842 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17843 let ident = self.parse_identifier()?;
17844 self.expect_keyword_is(Keyword::AS)?;
17845
17846 let window_expr = if self.consume_token(&Token::LParen) {
17847 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17848 } else if self.dialect.supports_window_clause_named_window_reference() {
17849 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17850 } else {
17851 return self.expected("(", self.peek_token());
17852 };
17853
17854 Ok(NamedWindowDefinition(ident, window_expr))
17855 }
17856
17857 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17858 let name = self.parse_object_name(false)?;
17859 let params = self.parse_optional_procedure_parameters()?;
17860
17861 let language = if self.parse_keyword(Keyword::LANGUAGE) {
17862 Some(self.parse_identifier()?)
17863 } else {
17864 None
17865 };
17866
17867 self.expect_keyword_is(Keyword::AS)?;
17868
17869 let body = self.parse_conditional_statements(&[Keyword::END])?;
17870
17871 Ok(Statement::CreateProcedure {
17872 name,
17873 or_alter,
17874 params,
17875 language,
17876 body,
17877 })
17878 }
17879
17880 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17881 let window_name = match self.peek_token().token {
17882 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17883 self.parse_optional_ident()?
17884 }
17885 _ => None,
17886 };
17887
17888 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17889 self.parse_comma_separated(Parser::parse_expr)?
17890 } else {
17891 vec![]
17892 };
17893 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17894 self.parse_comma_separated(Parser::parse_order_by_expr)?
17895 } else {
17896 vec![]
17897 };
17898
17899 let window_frame = if !self.consume_token(&Token::RParen) {
17900 let window_frame = self.parse_window_frame()?;
17901 self.expect_token(&Token::RParen)?;
17902 Some(window_frame)
17903 } else {
17904 None
17905 };
17906 Ok(WindowSpec {
17907 window_name,
17908 partition_by,
17909 order_by,
17910 window_frame,
17911 })
17912 }
17913
17914 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17915 let name = self.parse_object_name(false)?;
17916
17917 let has_as = self.parse_keyword(Keyword::AS);
17919
17920 if !has_as {
17921 if self.consume_token(&Token::LParen) {
17923 let options = self.parse_create_type_sql_definition_options()?;
17925 self.expect_token(&Token::RParen)?;
17926 return Ok(Statement::CreateType {
17927 name,
17928 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
17929 });
17930 }
17931
17932 return Ok(Statement::CreateType {
17934 name,
17935 representation: None,
17936 });
17937 }
17938
17939 if self.parse_keyword(Keyword::ENUM) {
17941 self.parse_create_type_enum(name)
17943 } else if self.parse_keyword(Keyword::RANGE) {
17944 self.parse_create_type_range(name)
17946 } else if self.consume_token(&Token::LParen) {
17947 self.parse_create_type_composite(name)
17949 } else {
17950 self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
17951 }
17952 }
17953
17954 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17958 if self.consume_token(&Token::RParen) {
17959 return Ok(Statement::CreateType {
17961 name,
17962 representation: Some(UserDefinedTypeRepresentation::Composite {
17963 attributes: vec![],
17964 }),
17965 });
17966 }
17967
17968 let mut attributes = vec![];
17969 loop {
17970 let attr_name = self.parse_identifier()?;
17971 let attr_data_type = self.parse_data_type()?;
17972 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
17973 Some(self.parse_object_name(false)?)
17974 } else {
17975 None
17976 };
17977 attributes.push(UserDefinedTypeCompositeAttributeDef {
17978 name: attr_name,
17979 data_type: attr_data_type,
17980 collation: attr_collation,
17981 });
17982
17983 if !self.consume_token(&Token::Comma) {
17984 break;
17985 }
17986 }
17987 self.expect_token(&Token::RParen)?;
17988
17989 Ok(Statement::CreateType {
17990 name,
17991 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
17992 })
17993 }
17994
17995 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17999 self.expect_token(&Token::LParen)?;
18000 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18001 self.expect_token(&Token::RParen)?;
18002
18003 Ok(Statement::CreateType {
18004 name,
18005 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
18006 })
18007 }
18008
18009 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18013 self.expect_token(&Token::LParen)?;
18014 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
18015 self.expect_token(&Token::RParen)?;
18016
18017 Ok(Statement::CreateType {
18018 name,
18019 representation: Some(UserDefinedTypeRepresentation::Range { options }),
18020 })
18021 }
18022
18023 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
18025 let keyword = self.parse_one_of_keywords(&[
18026 Keyword::SUBTYPE,
18027 Keyword::SUBTYPE_OPCLASS,
18028 Keyword::COLLATION,
18029 Keyword::CANONICAL,
18030 Keyword::SUBTYPE_DIFF,
18031 Keyword::MULTIRANGE_TYPE_NAME,
18032 ]);
18033
18034 match keyword {
18035 Some(Keyword::SUBTYPE) => {
18036 self.expect_token(&Token::Eq)?;
18037 let data_type = self.parse_data_type()?;
18038 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18039 }
18040 Some(Keyword::SUBTYPE_OPCLASS) => {
18041 self.expect_token(&Token::Eq)?;
18042 let name = self.parse_object_name(false)?;
18043 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18044 }
18045 Some(Keyword::COLLATION) => {
18046 self.expect_token(&Token::Eq)?;
18047 let name = self.parse_object_name(false)?;
18048 Ok(UserDefinedTypeRangeOption::Collation(name))
18049 }
18050 Some(Keyword::CANONICAL) => {
18051 self.expect_token(&Token::Eq)?;
18052 let name = self.parse_object_name(false)?;
18053 Ok(UserDefinedTypeRangeOption::Canonical(name))
18054 }
18055 Some(Keyword::SUBTYPE_DIFF) => {
18056 self.expect_token(&Token::Eq)?;
18057 let name = self.parse_object_name(false)?;
18058 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18059 }
18060 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18061 self.expect_token(&Token::Eq)?;
18062 let name = self.parse_object_name(false)?;
18063 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18064 }
18065 _ => self.expected("range option keyword", self.peek_token()),
18066 }
18067 }
18068
18069 fn parse_create_type_sql_definition_options(
18071 &mut self,
18072 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18073 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18074 }
18075
18076 fn parse_sql_definition_option(
18078 &mut self,
18079 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18080 let keyword = self.parse_one_of_keywords(&[
18081 Keyword::INPUT,
18082 Keyword::OUTPUT,
18083 Keyword::RECEIVE,
18084 Keyword::SEND,
18085 Keyword::TYPMOD_IN,
18086 Keyword::TYPMOD_OUT,
18087 Keyword::ANALYZE,
18088 Keyword::SUBSCRIPT,
18089 Keyword::INTERNALLENGTH,
18090 Keyword::PASSEDBYVALUE,
18091 Keyword::ALIGNMENT,
18092 Keyword::STORAGE,
18093 Keyword::LIKE,
18094 Keyword::CATEGORY,
18095 Keyword::PREFERRED,
18096 Keyword::DEFAULT,
18097 Keyword::ELEMENT,
18098 Keyword::DELIMITER,
18099 Keyword::COLLATABLE,
18100 ]);
18101
18102 match keyword {
18103 Some(Keyword::INPUT) => {
18104 self.expect_token(&Token::Eq)?;
18105 let name = self.parse_object_name(false)?;
18106 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18107 }
18108 Some(Keyword::OUTPUT) => {
18109 self.expect_token(&Token::Eq)?;
18110 let name = self.parse_object_name(false)?;
18111 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18112 }
18113 Some(Keyword::RECEIVE) => {
18114 self.expect_token(&Token::Eq)?;
18115 let name = self.parse_object_name(false)?;
18116 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18117 }
18118 Some(Keyword::SEND) => {
18119 self.expect_token(&Token::Eq)?;
18120 let name = self.parse_object_name(false)?;
18121 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18122 }
18123 Some(Keyword::TYPMOD_IN) => {
18124 self.expect_token(&Token::Eq)?;
18125 let name = self.parse_object_name(false)?;
18126 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18127 }
18128 Some(Keyword::TYPMOD_OUT) => {
18129 self.expect_token(&Token::Eq)?;
18130 let name = self.parse_object_name(false)?;
18131 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18132 }
18133 Some(Keyword::ANALYZE) => {
18134 self.expect_token(&Token::Eq)?;
18135 let name = self.parse_object_name(false)?;
18136 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18137 }
18138 Some(Keyword::SUBSCRIPT) => {
18139 self.expect_token(&Token::Eq)?;
18140 let name = self.parse_object_name(false)?;
18141 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18142 }
18143 Some(Keyword::INTERNALLENGTH) => {
18144 self.expect_token(&Token::Eq)?;
18145 if self.parse_keyword(Keyword::VARIABLE) {
18146 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18147 UserDefinedTypeInternalLength::Variable,
18148 ))
18149 } else {
18150 let value = self.parse_literal_uint()?;
18151 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18152 UserDefinedTypeInternalLength::Fixed(value),
18153 ))
18154 }
18155 }
18156 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18157 Some(Keyword::ALIGNMENT) => {
18158 self.expect_token(&Token::Eq)?;
18159 let align_keyword = self.parse_one_of_keywords(&[
18160 Keyword::CHAR,
18161 Keyword::INT2,
18162 Keyword::INT4,
18163 Keyword::DOUBLE,
18164 ]);
18165 match align_keyword {
18166 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18167 Alignment::Char,
18168 )),
18169 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18170 Alignment::Int2,
18171 )),
18172 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18173 Alignment::Int4,
18174 )),
18175 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18176 Alignment::Double,
18177 )),
18178 _ => self.expected(
18179 "alignment value (char, int2, int4, or double)",
18180 self.peek_token(),
18181 ),
18182 }
18183 }
18184 Some(Keyword::STORAGE) => {
18185 self.expect_token(&Token::Eq)?;
18186 let storage_keyword = self.parse_one_of_keywords(&[
18187 Keyword::PLAIN,
18188 Keyword::EXTERNAL,
18189 Keyword::EXTENDED,
18190 Keyword::MAIN,
18191 ]);
18192 match storage_keyword {
18193 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18194 UserDefinedTypeStorage::Plain,
18195 )),
18196 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18197 UserDefinedTypeStorage::External,
18198 )),
18199 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18200 UserDefinedTypeStorage::Extended,
18201 )),
18202 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18203 UserDefinedTypeStorage::Main,
18204 )),
18205 _ => self.expected(
18206 "storage value (plain, external, extended, or main)",
18207 self.peek_token(),
18208 ),
18209 }
18210 }
18211 Some(Keyword::LIKE) => {
18212 self.expect_token(&Token::Eq)?;
18213 let name = self.parse_object_name(false)?;
18214 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18215 }
18216 Some(Keyword::CATEGORY) => {
18217 self.expect_token(&Token::Eq)?;
18218 let category_str = self.parse_literal_string()?;
18219 let category_char = category_str.chars().next().ok_or_else(|| {
18220 ParserError::ParserError(
18221 "CATEGORY value must be a single character".to_string(),
18222 )
18223 })?;
18224 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18225 }
18226 Some(Keyword::PREFERRED) => {
18227 self.expect_token(&Token::Eq)?;
18228 let value =
18229 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18230 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18231 }
18232 Some(Keyword::DEFAULT) => {
18233 self.expect_token(&Token::Eq)?;
18234 let expr = self.parse_expr()?;
18235 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18236 }
18237 Some(Keyword::ELEMENT) => {
18238 self.expect_token(&Token::Eq)?;
18239 let data_type = self.parse_data_type()?;
18240 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18241 }
18242 Some(Keyword::DELIMITER) => {
18243 self.expect_token(&Token::Eq)?;
18244 let delimiter = self.parse_literal_string()?;
18245 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18246 }
18247 Some(Keyword::COLLATABLE) => {
18248 self.expect_token(&Token::Eq)?;
18249 let value =
18250 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18251 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18252 }
18253 _ => self.expected("SQL definition option keyword", self.peek_token()),
18254 }
18255 }
18256
18257 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18258 self.expect_token(&Token::LParen)?;
18259 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18260 self.expect_token(&Token::RParen)?;
18261 Ok(idents)
18262 }
18263
18264 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18265 if dialect_of!(self is MySqlDialect | GenericDialect) {
18266 if self.parse_keyword(Keyword::FIRST) {
18267 Ok(Some(MySQLColumnPosition::First))
18268 } else if self.parse_keyword(Keyword::AFTER) {
18269 let ident = self.parse_identifier()?;
18270 Ok(Some(MySQLColumnPosition::After(ident)))
18271 } else {
18272 Ok(None)
18273 }
18274 } else {
18275 Ok(None)
18276 }
18277 }
18278
18279 fn parse_print(&mut self) -> Result<Statement, ParserError> {
18281 Ok(Statement::Print(PrintStatement {
18282 message: Box::new(self.parse_expr()?),
18283 }))
18284 }
18285
18286 fn parse_return(&mut self) -> Result<Statement, ParserError> {
18288 match self.maybe_parse(|p| p.parse_expr())? {
18289 Some(expr) => Ok(Statement::Return(ReturnStatement {
18290 value: Some(ReturnStatementValue::Expr(expr)),
18291 })),
18292 None => Ok(Statement::Return(ReturnStatement { value: None })),
18293 }
18294 }
18295
18296 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18300 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18301
18302 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18303 Some(self.parse_object_name(false)?)
18304 } else {
18305 None
18306 };
18307 self.expect_keyword(Keyword::OPTIONS)?;
18308 self.expect_token(&Token::LParen)?;
18309 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18310 self.expect_token(&Token::RParen)?;
18311 self.expect_keyword(Keyword::AS)?;
18312 let query = self.parse_query()?;
18313 Ok(Statement::ExportData(ExportData {
18314 options,
18315 query,
18316 connection,
18317 }))
18318 }
18319
18320 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18321 self.expect_keyword(Keyword::VACUUM)?;
18322 let full = self.parse_keyword(Keyword::FULL);
18323 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18324 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18325 let reindex = self.parse_keyword(Keyword::REINDEX);
18326 let recluster = self.parse_keyword(Keyword::RECLUSTER);
18327 let (table_name, threshold, boost) =
18328 match self.maybe_parse(|p| p.parse_object_name(false))? {
18329 Some(table_name) => {
18330 let threshold = if self.parse_keyword(Keyword::TO) {
18331 let value = self.parse_value()?;
18332 self.expect_keyword(Keyword::PERCENT)?;
18333 Some(value.value)
18334 } else {
18335 None
18336 };
18337 let boost = self.parse_keyword(Keyword::BOOST);
18338 (Some(table_name), threshold, boost)
18339 }
18340 _ => (None, None, false),
18341 };
18342 Ok(Statement::Vacuum(VacuumStatement {
18343 full,
18344 sort_only,
18345 delete_only,
18346 reindex,
18347 recluster,
18348 table_name,
18349 threshold,
18350 boost,
18351 }))
18352 }
18353
18354 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18356 self.tokens
18357 }
18358
18359 fn peek_sub_query(&mut self) -> bool {
18361 if self
18362 .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18363 .is_some()
18364 {
18365 self.prev_token();
18366 return true;
18367 }
18368 false
18369 }
18370
18371 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18372 let show_in;
18373 let mut filter_position = None;
18374 if self.dialect.supports_show_like_before_in() {
18375 if let Some(filter) = self.parse_show_statement_filter()? {
18376 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18377 }
18378 show_in = self.maybe_parse_show_stmt_in()?;
18379 } else {
18380 show_in = self.maybe_parse_show_stmt_in()?;
18381 if let Some(filter) = self.parse_show_statement_filter()? {
18382 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18383 }
18384 }
18385 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18386 let limit = self.maybe_parse_show_stmt_limit()?;
18387 let from = self.maybe_parse_show_stmt_from()?;
18388 Ok(ShowStatementOptions {
18389 filter_position,
18390 show_in,
18391 starts_with,
18392 limit,
18393 limit_from: from,
18394 })
18395 }
18396
18397 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18398 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18399 Some(Keyword::FROM) => ShowStatementInClause::FROM,
18400 Some(Keyword::IN) => ShowStatementInClause::IN,
18401 None => return Ok(None),
18402 _ => return self.expected("FROM or IN", self.peek_token()),
18403 };
18404
18405 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18406 Keyword::ACCOUNT,
18407 Keyword::DATABASE,
18408 Keyword::SCHEMA,
18409 Keyword::TABLE,
18410 Keyword::VIEW,
18411 ]) {
18412 Some(Keyword::DATABASE)
18414 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18415 | self.peek_keyword(Keyword::LIMIT) =>
18416 {
18417 (Some(ShowStatementInParentType::Database), None)
18418 }
18419 Some(Keyword::SCHEMA)
18420 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18421 | self.peek_keyword(Keyword::LIMIT) =>
18422 {
18423 (Some(ShowStatementInParentType::Schema), None)
18424 }
18425 Some(parent_kw) => {
18426 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18430 match parent_kw {
18431 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18432 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18433 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18434 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18435 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18436 _ => {
18437 return self.expected(
18438 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18439 self.peek_token(),
18440 )
18441 }
18442 }
18443 }
18444 None => {
18445 let mut parent_name = self.parse_object_name(false)?;
18448 if self
18449 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18450 .is_some()
18451 {
18452 parent_name
18453 .0
18454 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18455 }
18456 (None, Some(parent_name))
18457 }
18458 };
18459
18460 Ok(Some(ShowStatementIn {
18461 clause,
18462 parent_type,
18463 parent_name,
18464 }))
18465 }
18466
18467 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18468 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18469 Ok(Some(self.parse_value()?.value))
18470 } else {
18471 Ok(None)
18472 }
18473 }
18474
18475 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18476 if self.parse_keyword(Keyword::LIMIT) {
18477 Ok(self.parse_limit()?)
18478 } else {
18479 Ok(None)
18480 }
18481 }
18482
18483 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18484 if self.parse_keyword(Keyword::FROM) {
18485 Ok(Some(self.parse_value()?.value))
18486 } else {
18487 Ok(None)
18488 }
18489 }
18490
18491 pub(crate) fn in_column_definition_state(&self) -> bool {
18492 matches!(self.state, ColumnDefinition)
18493 }
18494
18495 pub(crate) fn parse_key_value_options(
18500 &mut self,
18501 parenthesized: bool,
18502 end_words: &[Keyword],
18503 ) -> Result<KeyValueOptions, ParserError> {
18504 let mut options: Vec<KeyValueOption> = Vec::new();
18505 let mut delimiter = KeyValueOptionsDelimiter::Space;
18506 if parenthesized {
18507 self.expect_token(&Token::LParen)?;
18508 }
18509 loop {
18510 match self.next_token().token {
18511 Token::RParen => {
18512 if parenthesized {
18513 break;
18514 } else {
18515 return self.expected(" another option or EOF", self.peek_token());
18516 }
18517 }
18518 Token::EOF => break,
18519 Token::Comma => {
18520 delimiter = KeyValueOptionsDelimiter::Comma;
18521 continue;
18522 }
18523 Token::Word(w) if !end_words.contains(&w.keyword) => {
18524 options.push(self.parse_key_value_option(&w)?)
18525 }
18526 Token::Word(w) if end_words.contains(&w.keyword) => {
18527 self.prev_token();
18528 break;
18529 }
18530 _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18531 };
18532 }
18533
18534 Ok(KeyValueOptions { delimiter, options })
18535 }
18536
18537 pub(crate) fn parse_key_value_option(
18539 &mut self,
18540 key: &Word,
18541 ) -> Result<KeyValueOption, ParserError> {
18542 self.expect_token(&Token::Eq)?;
18543 match self.peek_token().token {
18544 Token::SingleQuotedString(_) => Ok(KeyValueOption {
18545 option_name: key.value.clone(),
18546 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18547 }),
18548 Token::Word(word)
18549 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18550 {
18551 Ok(KeyValueOption {
18552 option_name: key.value.clone(),
18553 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18554 })
18555 }
18556 Token::Number(..) => Ok(KeyValueOption {
18557 option_name: key.value.clone(),
18558 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18559 }),
18560 Token::Word(word) => {
18561 self.next_token();
18562 Ok(KeyValueOption {
18563 option_name: key.value.clone(),
18564 option_value: KeyValueOptionKind::Single(Value::Placeholder(
18565 word.value.clone(),
18566 )),
18567 })
18568 }
18569 Token::LParen => {
18570 match self.maybe_parse(|parser| {
18574 parser.expect_token(&Token::LParen)?;
18575 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18576 parser.expect_token(&Token::RParen)?;
18577 values
18578 })? {
18579 Some(values) => {
18580 let values = values.into_iter().map(|v| v.value).collect();
18581 Ok(KeyValueOption {
18582 option_name: key.value.clone(),
18583 option_value: KeyValueOptionKind::Multi(values),
18584 })
18585 }
18586 None => Ok(KeyValueOption {
18587 option_name: key.value.clone(),
18588 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18589 self.parse_key_value_options(true, &[])?,
18590 )),
18591 }),
18592 }
18593 }
18594 _ => self.expected("expected option value", self.peek_token()),
18595 }
18596 }
18597
18598 fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18600 if self.parse_keyword(Keyword::ALL) {
18601 return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18602 }
18603
18604 let obj = self.parse_object_name(false)?;
18605 Ok(Statement::Reset(ResetStatement {
18606 reset: Reset::ConfigurationParameter(obj),
18607 }))
18608 }
18609}
18610
18611fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18612 if let Some(prefix) = prefix {
18613 Expr::Prefixed {
18614 prefix,
18615 value: Box::new(expr),
18616 }
18617 } else {
18618 expr
18619 }
18620}
18621
18622impl Word {
18623 #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18624 pub fn to_ident(&self, span: Span) -> Ident {
18625 Ident {
18626 value: self.value.clone(),
18627 quote_style: self.quote_style,
18628 span,
18629 }
18630 }
18631
18632 pub fn into_ident(self, span: Span) -> Ident {
18634 Ident {
18635 value: self.value,
18636 quote_style: self.quote_style,
18637 span,
18638 }
18639 }
18640}
18641
18642#[cfg(test)]
18643mod tests {
18644 use crate::test_utils::{all_dialects, TestedDialects};
18645
18646 use super::*;
18647
18648 #[test]
18649 fn test_prev_index() {
18650 let sql = "SELECT version";
18651 all_dialects().run_parser_method(sql, |parser| {
18652 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18653 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18654 parser.prev_token();
18655 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18656 assert_eq!(parser.next_token(), Token::make_word("version", None));
18657 parser.prev_token();
18658 assert_eq!(parser.peek_token(), Token::make_word("version", None));
18659 assert_eq!(parser.next_token(), Token::make_word("version", None));
18660 assert_eq!(parser.peek_token(), Token::EOF);
18661 parser.prev_token();
18662 assert_eq!(parser.next_token(), Token::make_word("version", None));
18663 assert_eq!(parser.next_token(), Token::EOF);
18664 assert_eq!(parser.next_token(), Token::EOF);
18665 parser.prev_token();
18666 });
18667 }
18668
18669 #[test]
18670 fn test_peek_tokens() {
18671 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18672 assert!(matches!(
18673 parser.peek_tokens(),
18674 [Token::Word(Word {
18675 keyword: Keyword::SELECT,
18676 ..
18677 })]
18678 ));
18679
18680 assert!(matches!(
18681 parser.peek_tokens(),
18682 [
18683 Token::Word(Word {
18684 keyword: Keyword::SELECT,
18685 ..
18686 }),
18687 Token::Word(_),
18688 Token::Word(Word {
18689 keyword: Keyword::AS,
18690 ..
18691 }),
18692 ]
18693 ));
18694
18695 for _ in 0..4 {
18696 parser.next_token();
18697 }
18698
18699 assert!(matches!(
18700 parser.peek_tokens(),
18701 [
18702 Token::Word(Word {
18703 keyword: Keyword::FROM,
18704 ..
18705 }),
18706 Token::Word(_),
18707 Token::EOF,
18708 Token::EOF,
18709 ]
18710 ))
18711 })
18712 }
18713
18714 #[cfg(test)]
18715 mod test_parse_data_type {
18716 use crate::ast::{
18717 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18718 };
18719 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18720 use crate::test_utils::TestedDialects;
18721
18722 macro_rules! test_parse_data_type {
18723 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18724 $dialect.run_parser_method(&*$input, |parser| {
18725 let data_type = parser.parse_data_type().unwrap();
18726 assert_eq!($expected_type, data_type);
18727 assert_eq!($input.to_string(), data_type.to_string());
18728 });
18729 }};
18730 }
18731
18732 #[test]
18733 fn test_ansii_character_string_types() {
18734 let dialect =
18736 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18737
18738 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18739
18740 test_parse_data_type!(
18741 dialect,
18742 "CHARACTER(20)",
18743 DataType::Character(Some(CharacterLength::IntegerLength {
18744 length: 20,
18745 unit: None
18746 }))
18747 );
18748
18749 test_parse_data_type!(
18750 dialect,
18751 "CHARACTER(20 CHARACTERS)",
18752 DataType::Character(Some(CharacterLength::IntegerLength {
18753 length: 20,
18754 unit: Some(CharLengthUnits::Characters)
18755 }))
18756 );
18757
18758 test_parse_data_type!(
18759 dialect,
18760 "CHARACTER(20 OCTETS)",
18761 DataType::Character(Some(CharacterLength::IntegerLength {
18762 length: 20,
18763 unit: Some(CharLengthUnits::Octets)
18764 }))
18765 );
18766
18767 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
18768
18769 test_parse_data_type!(
18770 dialect,
18771 "CHAR(20)",
18772 DataType::Char(Some(CharacterLength::IntegerLength {
18773 length: 20,
18774 unit: None
18775 }))
18776 );
18777
18778 test_parse_data_type!(
18779 dialect,
18780 "CHAR(20 CHARACTERS)",
18781 DataType::Char(Some(CharacterLength::IntegerLength {
18782 length: 20,
18783 unit: Some(CharLengthUnits::Characters)
18784 }))
18785 );
18786
18787 test_parse_data_type!(
18788 dialect,
18789 "CHAR(20 OCTETS)",
18790 DataType::Char(Some(CharacterLength::IntegerLength {
18791 length: 20,
18792 unit: Some(CharLengthUnits::Octets)
18793 }))
18794 );
18795
18796 test_parse_data_type!(
18797 dialect,
18798 "CHARACTER VARYING(20)",
18799 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18800 length: 20,
18801 unit: None
18802 }))
18803 );
18804
18805 test_parse_data_type!(
18806 dialect,
18807 "CHARACTER VARYING(20 CHARACTERS)",
18808 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18809 length: 20,
18810 unit: Some(CharLengthUnits::Characters)
18811 }))
18812 );
18813
18814 test_parse_data_type!(
18815 dialect,
18816 "CHARACTER VARYING(20 OCTETS)",
18817 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18818 length: 20,
18819 unit: Some(CharLengthUnits::Octets)
18820 }))
18821 );
18822
18823 test_parse_data_type!(
18824 dialect,
18825 "CHAR VARYING(20)",
18826 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18827 length: 20,
18828 unit: None
18829 }))
18830 );
18831
18832 test_parse_data_type!(
18833 dialect,
18834 "CHAR VARYING(20 CHARACTERS)",
18835 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18836 length: 20,
18837 unit: Some(CharLengthUnits::Characters)
18838 }))
18839 );
18840
18841 test_parse_data_type!(
18842 dialect,
18843 "CHAR VARYING(20 OCTETS)",
18844 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18845 length: 20,
18846 unit: Some(CharLengthUnits::Octets)
18847 }))
18848 );
18849
18850 test_parse_data_type!(
18851 dialect,
18852 "VARCHAR(20)",
18853 DataType::Varchar(Some(CharacterLength::IntegerLength {
18854 length: 20,
18855 unit: None
18856 }))
18857 );
18858 }
18859
18860 #[test]
18861 fn test_ansii_character_large_object_types() {
18862 let dialect =
18864 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18865
18866 test_parse_data_type!(
18867 dialect,
18868 "CHARACTER LARGE OBJECT",
18869 DataType::CharacterLargeObject(None)
18870 );
18871 test_parse_data_type!(
18872 dialect,
18873 "CHARACTER LARGE OBJECT(20)",
18874 DataType::CharacterLargeObject(Some(20))
18875 );
18876
18877 test_parse_data_type!(
18878 dialect,
18879 "CHAR LARGE OBJECT",
18880 DataType::CharLargeObject(None)
18881 );
18882 test_parse_data_type!(
18883 dialect,
18884 "CHAR LARGE OBJECT(20)",
18885 DataType::CharLargeObject(Some(20))
18886 );
18887
18888 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
18889 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
18890 }
18891
18892 #[test]
18893 fn test_parse_custom_types() {
18894 let dialect =
18895 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18896
18897 test_parse_data_type!(
18898 dialect,
18899 "GEOMETRY",
18900 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
18901 );
18902
18903 test_parse_data_type!(
18904 dialect,
18905 "GEOMETRY(POINT)",
18906 DataType::Custom(
18907 ObjectName::from(vec!["GEOMETRY".into()]),
18908 vec!["POINT".to_string()]
18909 )
18910 );
18911
18912 test_parse_data_type!(
18913 dialect,
18914 "GEOMETRY(POINT, 4326)",
18915 DataType::Custom(
18916 ObjectName::from(vec!["GEOMETRY".into()]),
18917 vec!["POINT".to_string(), "4326".to_string()]
18918 )
18919 );
18920 }
18921
18922 #[test]
18923 fn test_ansii_exact_numeric_types() {
18924 let dialect = TestedDialects::new(vec![
18926 Box::new(GenericDialect {}),
18927 Box::new(AnsiDialect {}),
18928 Box::new(PostgreSqlDialect {}),
18929 ]);
18930
18931 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
18932
18933 test_parse_data_type!(
18934 dialect,
18935 "NUMERIC(2)",
18936 DataType::Numeric(ExactNumberInfo::Precision(2))
18937 );
18938
18939 test_parse_data_type!(
18940 dialect,
18941 "NUMERIC(2,10)",
18942 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
18943 );
18944
18945 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
18946
18947 test_parse_data_type!(
18948 dialect,
18949 "DECIMAL(2)",
18950 DataType::Decimal(ExactNumberInfo::Precision(2))
18951 );
18952
18953 test_parse_data_type!(
18954 dialect,
18955 "DECIMAL(2,10)",
18956 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
18957 );
18958
18959 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
18960
18961 test_parse_data_type!(
18962 dialect,
18963 "DEC(2)",
18964 DataType::Dec(ExactNumberInfo::Precision(2))
18965 );
18966
18967 test_parse_data_type!(
18968 dialect,
18969 "DEC(2,10)",
18970 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
18971 );
18972
18973 test_parse_data_type!(
18975 dialect,
18976 "NUMERIC(10,-2)",
18977 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
18978 );
18979
18980 test_parse_data_type!(
18981 dialect,
18982 "DECIMAL(1000,-10)",
18983 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
18984 );
18985
18986 test_parse_data_type!(
18987 dialect,
18988 "DEC(5,-1000)",
18989 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
18990 );
18991
18992 test_parse_data_type!(
18993 dialect,
18994 "NUMERIC(10,-5)",
18995 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
18996 );
18997
18998 test_parse_data_type!(
18999 dialect,
19000 "DECIMAL(20,-10)",
19001 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
19002 );
19003
19004 test_parse_data_type!(
19005 dialect,
19006 "DEC(5,-2)",
19007 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
19008 );
19009
19010 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
19011 let data_type = parser.parse_data_type().unwrap();
19012 assert_eq!(
19013 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
19014 data_type
19015 );
19016 assert_eq!("NUMERIC(10,5)", data_type.to_string());
19018 });
19019 }
19020
19021 #[test]
19022 fn test_ansii_date_type() {
19023 let dialect =
19025 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19026
19027 test_parse_data_type!(dialect, "DATE", DataType::Date);
19028
19029 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
19030
19031 test_parse_data_type!(
19032 dialect,
19033 "TIME(6)",
19034 DataType::Time(Some(6), TimezoneInfo::None)
19035 );
19036
19037 test_parse_data_type!(
19038 dialect,
19039 "TIME WITH TIME ZONE",
19040 DataType::Time(None, TimezoneInfo::WithTimeZone)
19041 );
19042
19043 test_parse_data_type!(
19044 dialect,
19045 "TIME(6) WITH TIME ZONE",
19046 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19047 );
19048
19049 test_parse_data_type!(
19050 dialect,
19051 "TIME WITHOUT TIME ZONE",
19052 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19053 );
19054
19055 test_parse_data_type!(
19056 dialect,
19057 "TIME(6) WITHOUT TIME ZONE",
19058 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19059 );
19060
19061 test_parse_data_type!(
19062 dialect,
19063 "TIMESTAMP",
19064 DataType::Timestamp(None, TimezoneInfo::None)
19065 );
19066
19067 test_parse_data_type!(
19068 dialect,
19069 "TIMESTAMP(22)",
19070 DataType::Timestamp(Some(22), TimezoneInfo::None)
19071 );
19072
19073 test_parse_data_type!(
19074 dialect,
19075 "TIMESTAMP(22) WITH TIME ZONE",
19076 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19077 );
19078
19079 test_parse_data_type!(
19080 dialect,
19081 "TIMESTAMP(33) WITHOUT TIME ZONE",
19082 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19083 );
19084 }
19085 }
19086
19087 #[test]
19088 fn test_parse_schema_name() {
19089 macro_rules! test_parse_schema_name {
19091 ($input:expr, $expected_name:expr $(,)?) => {{
19092 all_dialects().run_parser_method(&*$input, |parser| {
19093 let schema_name = parser.parse_schema_name().unwrap();
19094 assert_eq!(schema_name, $expected_name);
19096 assert_eq!(schema_name.to_string(), $input.to_string());
19098 });
19099 }};
19100 }
19101
19102 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19103 let dummy_authorization = Ident::new("dummy_authorization");
19104
19105 test_parse_schema_name!(
19106 format!("{dummy_name}"),
19107 SchemaName::Simple(dummy_name.clone())
19108 );
19109
19110 test_parse_schema_name!(
19111 format!("AUTHORIZATION {dummy_authorization}"),
19112 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19113 );
19114 test_parse_schema_name!(
19115 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19116 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19117 );
19118 }
19119
19120 #[test]
19121 fn mysql_parse_index_table_constraint() {
19122 macro_rules! test_parse_table_constraint {
19123 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19124 $dialect.run_parser_method(&*$input, |parser| {
19125 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19126 assert_eq!(constraint, $expected);
19128 assert_eq!(constraint.to_string(), $input.to_string());
19130 });
19131 }};
19132 }
19133
19134 fn mk_expected_col(name: &str) -> IndexColumn {
19135 IndexColumn {
19136 column: OrderByExpr {
19137 expr: Expr::Identifier(name.into()),
19138 options: OrderByOptions {
19139 asc: None,
19140 nulls_first: None,
19141 },
19142 with_fill: None,
19143 },
19144 operator_class: None,
19145 }
19146 }
19147
19148 let dialect =
19149 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19150
19151 test_parse_table_constraint!(
19152 dialect,
19153 "INDEX (c1)",
19154 IndexConstraint {
19155 display_as_key: false,
19156 name: None,
19157 index_type: None,
19158 columns: vec![mk_expected_col("c1")],
19159 index_options: vec![],
19160 }
19161 .into()
19162 );
19163
19164 test_parse_table_constraint!(
19165 dialect,
19166 "KEY (c1)",
19167 IndexConstraint {
19168 display_as_key: true,
19169 name: None,
19170 index_type: None,
19171 columns: vec![mk_expected_col("c1")],
19172 index_options: vec![],
19173 }
19174 .into()
19175 );
19176
19177 test_parse_table_constraint!(
19178 dialect,
19179 "INDEX 'index' (c1, c2)",
19180 TableConstraint::Index(IndexConstraint {
19181 display_as_key: false,
19182 name: Some(Ident::with_quote('\'', "index")),
19183 index_type: None,
19184 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19185 index_options: vec![],
19186 })
19187 );
19188
19189 test_parse_table_constraint!(
19190 dialect,
19191 "INDEX USING BTREE (c1)",
19192 IndexConstraint {
19193 display_as_key: false,
19194 name: None,
19195 index_type: Some(IndexType::BTree),
19196 columns: vec![mk_expected_col("c1")],
19197 index_options: vec![],
19198 }
19199 .into()
19200 );
19201
19202 test_parse_table_constraint!(
19203 dialect,
19204 "INDEX USING HASH (c1)",
19205 IndexConstraint {
19206 display_as_key: false,
19207 name: None,
19208 index_type: Some(IndexType::Hash),
19209 columns: vec![mk_expected_col("c1")],
19210 index_options: vec![],
19211 }
19212 .into()
19213 );
19214
19215 test_parse_table_constraint!(
19216 dialect,
19217 "INDEX idx_name USING BTREE (c1)",
19218 IndexConstraint {
19219 display_as_key: false,
19220 name: Some(Ident::new("idx_name")),
19221 index_type: Some(IndexType::BTree),
19222 columns: vec![mk_expected_col("c1")],
19223 index_options: vec![],
19224 }
19225 .into()
19226 );
19227
19228 test_parse_table_constraint!(
19229 dialect,
19230 "INDEX idx_name USING HASH (c1)",
19231 IndexConstraint {
19232 display_as_key: false,
19233 name: Some(Ident::new("idx_name")),
19234 index_type: Some(IndexType::Hash),
19235 columns: vec![mk_expected_col("c1")],
19236 index_options: vec![],
19237 }
19238 .into()
19239 );
19240 }
19241
19242 #[test]
19243 fn test_tokenizer_error_loc() {
19244 let sql = "foo '";
19245 let ast = Parser::parse_sql(&GenericDialect, sql);
19246 assert_eq!(
19247 ast,
19248 Err(ParserError::TokenizerError(
19249 "Unterminated string literal at Line: 1, Column: 5".to_string()
19250 ))
19251 );
19252 }
19253
19254 #[test]
19255 fn test_parser_error_loc() {
19256 let sql = "SELECT this is a syntax error";
19257 let ast = Parser::parse_sql(&GenericDialect, sql);
19258 assert_eq!(
19259 ast,
19260 Err(ParserError::ParserError(
19261 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19262 .to_string()
19263 ))
19264 );
19265 }
19266
19267 #[test]
19268 fn test_nested_explain_error() {
19269 let sql = "EXPLAIN EXPLAIN SELECT 1";
19270 let ast = Parser::parse_sql(&GenericDialect, sql);
19271 assert_eq!(
19272 ast,
19273 Err(ParserError::ParserError(
19274 "Explain must be root of the plan".to_string()
19275 ))
19276 );
19277 }
19278
19279 #[test]
19280 fn test_parse_multipart_identifier_positive() {
19281 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19282
19283 let expected = vec![
19285 Ident {
19286 value: "CATALOG".to_string(),
19287 quote_style: None,
19288 span: Span::empty(),
19289 },
19290 Ident {
19291 value: "F(o)o. \"bar".to_string(),
19292 quote_style: Some('"'),
19293 span: Span::empty(),
19294 },
19295 Ident {
19296 value: "table".to_string(),
19297 quote_style: None,
19298 span: Span::empty(),
19299 },
19300 ];
19301 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19302 let actual = parser.parse_multipart_identifier().unwrap();
19303 assert_eq!(expected, actual);
19304 });
19305
19306 let expected = vec![
19308 Ident {
19309 value: "CATALOG".to_string(),
19310 quote_style: None,
19311 span: Span::empty(),
19312 },
19313 Ident {
19314 value: "table".to_string(),
19315 quote_style: None,
19316 span: Span::empty(),
19317 },
19318 ];
19319 dialect.run_parser_method("CATALOG . table", |parser| {
19320 let actual = parser.parse_multipart_identifier().unwrap();
19321 assert_eq!(expected, actual);
19322 });
19323 }
19324
19325 #[test]
19326 fn test_parse_multipart_identifier_negative() {
19327 macro_rules! test_parse_multipart_identifier_error {
19328 ($input:expr, $expected_err:expr $(,)?) => {{
19329 all_dialects().run_parser_method(&*$input, |parser| {
19330 let actual_err = parser.parse_multipart_identifier().unwrap_err();
19331 assert_eq!(actual_err.to_string(), $expected_err);
19332 });
19333 }};
19334 }
19335
19336 test_parse_multipart_identifier_error!(
19337 "",
19338 "sql parser error: Empty input when parsing identifier",
19339 );
19340
19341 test_parse_multipart_identifier_error!(
19342 "*schema.table",
19343 "sql parser error: Unexpected token in identifier: *",
19344 );
19345
19346 test_parse_multipart_identifier_error!(
19347 "schema.table*",
19348 "sql parser error: Unexpected token in identifier: *",
19349 );
19350
19351 test_parse_multipart_identifier_error!(
19352 "schema.table.",
19353 "sql parser error: Trailing period in identifier",
19354 );
19355
19356 test_parse_multipart_identifier_error!(
19357 "schema.*",
19358 "sql parser error: Unexpected token following period in identifier: *",
19359 );
19360 }
19361
19362 #[test]
19363 fn test_mysql_partition_selection() {
19364 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19365 let expected = vec!["p0", "p2"];
19366
19367 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19368 assert_eq!(ast.len(), 1);
19369 if let Statement::Query(v) = &ast[0] {
19370 if let SetExpr::Select(select) = &*v.body {
19371 assert_eq!(select.from.len(), 1);
19372 let from: &TableWithJoins = &select.from[0];
19373 let table_factor = &from.relation;
19374 if let TableFactor::Table { partitions, .. } = table_factor {
19375 let actual: Vec<&str> = partitions
19376 .iter()
19377 .map(|ident| ident.value.as_str())
19378 .collect();
19379 assert_eq!(expected, actual);
19380 }
19381 }
19382 } else {
19383 panic!("fail to parse mysql partition selection");
19384 }
19385 }
19386
19387 #[test]
19388 fn test_replace_into_placeholders() {
19389 let sql = "REPLACE INTO t (a) VALUES (&a)";
19390
19391 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19392 }
19393
19394 #[test]
19395 fn test_replace_into_set_placeholder() {
19396 let sql = "REPLACE INTO t SET ?";
19397
19398 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19399 }
19400
19401 #[test]
19402 fn test_replace_incomplete() {
19403 let sql = r#"REPLACE"#;
19404
19405 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19406 }
19407
19408 #[test]
19409 fn test_placeholder_invalid_whitespace() {
19410 for w in [" ", "/*invalid*/"] {
19411 let sql = format!("\nSELECT\n :{w}fooBar");
19412 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19413 }
19414 }
19415}