1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36 key_value_options::{
37 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
38 },
39 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48#[derive(Debug, Clone, PartialEq, Eq)]
49pub enum ParserError {
50 TokenizerError(String),
51 ParserError(String),
52 RecursionLimitExceeded,
53}
54
55macro_rules! parser_err {
57 ($MSG:expr, $loc:expr) => {
58 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
59 };
60}
61
62mod alter;
63mod merge;
64
65#[cfg(feature = "std")]
66mod recursion {
68 use std::cell::Cell;
69 use std::rc::Rc;
70
71 use super::ParserError;
72
73 pub(crate) struct RecursionCounter {
84 remaining_depth: Rc<Cell<usize>>,
85 }
86
87 impl RecursionCounter {
88 pub fn new(remaining_depth: usize) -> Self {
91 Self {
92 remaining_depth: Rc::new(remaining_depth.into()),
93 }
94 }
95
96 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
103 let old_value = self.remaining_depth.get();
104 if old_value == 0 {
106 Err(ParserError::RecursionLimitExceeded)
107 } else {
108 self.remaining_depth.set(old_value - 1);
109 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
110 }
111 }
112 }
113
114 pub struct DepthGuard {
116 remaining_depth: Rc<Cell<usize>>,
117 }
118
119 impl DepthGuard {
120 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
121 Self { remaining_depth }
122 }
123 }
124 impl Drop for DepthGuard {
125 fn drop(&mut self) {
126 let old_value = self.remaining_depth.get();
127 self.remaining_depth.set(old_value + 1);
128 }
129 }
130}
131
132#[cfg(not(feature = "std"))]
133mod recursion {
134 pub(crate) struct RecursionCounter {}
140
141 impl RecursionCounter {
142 pub fn new(_remaining_depth: usize) -> Self {
143 Self {}
144 }
145 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
146 Ok(DepthGuard {})
147 }
148 }
149
150 pub struct DepthGuard {}
151}
152
153#[derive(PartialEq, Eq)]
154pub enum IsOptional {
155 Optional,
156 Mandatory,
157}
158
159pub enum IsLateral {
160 Lateral,
161 NotLateral,
162}
163
164pub enum WildcardExpr {
165 Expr(Expr),
166 QualifiedWildcard(ObjectName),
167 Wildcard,
168}
169
170impl From<TokenizerError> for ParserError {
171 fn from(e: TokenizerError) -> Self {
172 ParserError::TokenizerError(e.to_string())
173 }
174}
175
176impl fmt::Display for ParserError {
177 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
178 write!(
179 f,
180 "sql parser error: {}",
181 match self {
182 ParserError::TokenizerError(s) => s,
183 ParserError::ParserError(s) => s,
184 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
185 }
186 )
187 }
188}
189
190#[cfg(feature = "std")]
191impl std::error::Error for ParserError {}
192
193const DEFAULT_REMAINING_DEPTH: usize = 50;
195
196const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
198 token: Token::EOF,
199 span: Span {
200 start: Location { line: 0, column: 0 },
201 end: Location { line: 0, column: 0 },
202 },
203};
204
205struct MatchedTrailingBracket(bool);
218
219impl From<bool> for MatchedTrailingBracket {
220 fn from(value: bool) -> Self {
221 Self(value)
222 }
223}
224
225#[derive(Debug, Clone, PartialEq, Eq)]
227pub struct ParserOptions {
228 pub trailing_commas: bool,
229 pub unescape: bool,
232 pub require_semicolon_stmt_delimiter: bool,
235}
236
237impl Default for ParserOptions {
238 fn default() -> Self {
239 Self {
240 trailing_commas: false,
241 unescape: true,
242 require_semicolon_stmt_delimiter: true,
243 }
244 }
245}
246
247impl ParserOptions {
248 pub fn new() -> Self {
250 Default::default()
251 }
252
253 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
265 self.trailing_commas = trailing_commas;
266 self
267 }
268
269 pub fn with_unescape(mut self, unescape: bool) -> Self {
272 self.unescape = unescape;
273 self
274 }
275}
276
277#[derive(Copy, Clone)]
278enum ParserState {
279 Normal,
281 ConnectBy,
285 ColumnDefinition,
291}
292
293pub struct Parser<'a> {
332 tokens: Vec<TokenWithSpan>,
334 index: usize,
336 state: ParserState,
338 dialect: &'a dyn Dialect,
340 options: ParserOptions,
344 recursion_counter: RecursionCounter,
346}
347
348impl<'a> Parser<'a> {
349 pub fn new(dialect: &'a dyn Dialect) -> Self {
365 Self {
366 tokens: vec![],
367 index: 0,
368 state: ParserState::Normal,
369 dialect,
370 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
371 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
372 }
373 }
374
375 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
398 self.recursion_counter = RecursionCounter::new(recursion_limit);
399 self
400 }
401
402 pub fn with_options(mut self, options: ParserOptions) -> Self {
425 self.options = options;
426 self
427 }
428
429 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
431 self.tokens = tokens;
432 self.index = 0;
433 self
434 }
435
436 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
438 let tokens_with_locations: Vec<TokenWithSpan> = tokens
440 .into_iter()
441 .map(|token| TokenWithSpan {
442 token,
443 span: Span::empty(),
444 })
445 .collect();
446 self.with_tokens_with_locations(tokens_with_locations)
447 }
448
449 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
456 debug!("Parsing sql '{sql}'...");
457 let tokens = Tokenizer::new(self.dialect, sql)
458 .with_unescape(self.options.unescape)
459 .tokenize_with_location()?;
460 Ok(self.with_tokens_with_locations(tokens))
461 }
462
463 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
479 let mut stmts = Vec::new();
480 let mut expecting_statement_delimiter = false;
481 loop {
482 while self.consume_token(&Token::SemiColon) {
484 expecting_statement_delimiter = false;
485 }
486
487 if !self.options.require_semicolon_stmt_delimiter {
488 expecting_statement_delimiter = false;
489 }
490
491 match self.peek_token().token {
492 Token::EOF => break,
493
494 Token::Word(word) => {
496 if expecting_statement_delimiter && word.keyword == Keyword::END {
497 break;
498 }
499 }
500 _ => {}
501 }
502
503 if expecting_statement_delimiter {
504 return self.expected("end of statement", self.peek_token());
505 }
506
507 let statement = self.parse_statement()?;
508 stmts.push(statement);
509 expecting_statement_delimiter = true;
510 }
511 Ok(stmts)
512 }
513
514 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
530 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
531 }
532
533 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
536 let _guard = self.recursion_counter.try_decrease()?;
537
538 if let Some(statement) = self.dialect.parse_statement(self) {
540 return statement;
541 }
542
543 let next_token = self.next_token();
544 match &next_token.token {
545 Token::Word(w) => match w.keyword {
546 Keyword::KILL => self.parse_kill(),
547 Keyword::FLUSH => self.parse_flush(),
548 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
549 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
550 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
551 Keyword::ANALYZE => self.parse_analyze(),
552 Keyword::CASE => {
553 self.prev_token();
554 self.parse_case_stmt()
555 }
556 Keyword::IF => {
557 self.prev_token();
558 self.parse_if_stmt()
559 }
560 Keyword::WHILE => {
561 self.prev_token();
562 self.parse_while()
563 }
564 Keyword::RAISE => {
565 self.prev_token();
566 self.parse_raise_stmt()
567 }
568 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
569 self.prev_token();
570 self.parse_query().map(Statement::Query)
571 }
572 Keyword::TRUNCATE => self.parse_truncate(),
573 Keyword::ATTACH => {
574 if dialect_of!(self is DuckDbDialect) {
575 self.parse_attach_duckdb_database()
576 } else {
577 self.parse_attach_database()
578 }
579 }
580 Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
581 self.parse_detach_duckdb_database()
582 }
583 Keyword::MSCK => self.parse_msck(),
584 Keyword::CREATE => self.parse_create(),
585 Keyword::CACHE => self.parse_cache_table(),
586 Keyword::DROP => self.parse_drop(),
587 Keyword::DISCARD => self.parse_discard(),
588 Keyword::DECLARE => self.parse_declare(),
589 Keyword::FETCH => self.parse_fetch_statement(),
590 Keyword::DELETE => self.parse_delete(next_token),
591 Keyword::INSERT => self.parse_insert(next_token),
592 Keyword::REPLACE => self.parse_replace(next_token),
593 Keyword::UNCACHE => self.parse_uncache_table(),
594 Keyword::UPDATE => self.parse_update(next_token),
595 Keyword::ALTER => self.parse_alter(),
596 Keyword::CALL => self.parse_call(),
597 Keyword::COPY => self.parse_copy(),
598 Keyword::OPEN => {
599 self.prev_token();
600 self.parse_open()
601 }
602 Keyword::CLOSE => self.parse_close(),
603 Keyword::SET => self.parse_set(),
604 Keyword::SHOW => self.parse_show(),
605 Keyword::USE => self.parse_use(),
606 Keyword::GRANT => self.parse_grant(),
607 Keyword::DENY => {
608 self.prev_token();
609 self.parse_deny()
610 }
611 Keyword::REVOKE => self.parse_revoke(),
612 Keyword::START => self.parse_start_transaction(),
613 Keyword::BEGIN => self.parse_begin(),
614 Keyword::END => self.parse_end(),
615 Keyword::SAVEPOINT => self.parse_savepoint(),
616 Keyword::RELEASE => self.parse_release(),
617 Keyword::COMMIT => self.parse_commit(),
618 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
619 Keyword::ROLLBACK => self.parse_rollback(),
620 Keyword::ASSERT => self.parse_assert(),
621 Keyword::DEALLOCATE => self.parse_deallocate(),
624 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
625 Keyword::PREPARE => self.parse_prepare(),
626 Keyword::MERGE => self.parse_merge(next_token),
627 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
630 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
631 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
632 Keyword::PRAGMA => self.parse_pragma(),
634 Keyword::UNLOAD => {
635 self.prev_token();
636 self.parse_unload()
637 }
638 Keyword::RENAME => self.parse_rename(),
639 Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
641 self.parse_install()
642 }
643 Keyword::LOAD => self.parse_load(),
644 Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
646 self.parse_optimize_table()
647 }
648 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
650 Keyword::PRINT => self.parse_print(),
651 Keyword::RETURN => self.parse_return(),
652 Keyword::EXPORT => {
653 self.prev_token();
654 self.parse_export_data()
655 }
656 Keyword::VACUUM => {
657 self.prev_token();
658 self.parse_vacuum()
659 }
660 Keyword::RESET => self.parse_reset(),
661 _ => self.expected("an SQL statement", next_token),
662 },
663 Token::LParen => {
664 self.prev_token();
665 self.parse_query().map(Statement::Query)
666 }
667 _ => self.expected("an SQL statement", next_token),
668 }
669 }
670
671 pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
675 let case_token = self.expect_keyword(Keyword::CASE)?;
676
677 let match_expr = if self.peek_keyword(Keyword::WHEN) {
678 None
679 } else {
680 Some(self.parse_expr()?)
681 };
682
683 self.expect_keyword_is(Keyword::WHEN)?;
684 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
685 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
686 })?;
687
688 let else_block = if self.parse_keyword(Keyword::ELSE) {
689 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
690 } else {
691 None
692 };
693
694 let mut end_case_token = self.expect_keyword(Keyword::END)?;
695 if self.peek_keyword(Keyword::CASE) {
696 end_case_token = self.expect_keyword(Keyword::CASE)?;
697 }
698
699 Ok(Statement::Case(CaseStatement {
700 case_token: AttachedToken(case_token),
701 match_expr,
702 when_blocks,
703 else_block,
704 end_case_token: AttachedToken(end_case_token),
705 }))
706 }
707
708 pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
712 self.expect_keyword_is(Keyword::IF)?;
713 let if_block = self.parse_conditional_statement_block(&[
714 Keyword::ELSE,
715 Keyword::ELSEIF,
716 Keyword::END,
717 ])?;
718
719 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
720 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
721 parser.parse_conditional_statement_block(&[
722 Keyword::ELSEIF,
723 Keyword::ELSE,
724 Keyword::END,
725 ])
726 })?
727 } else {
728 vec![]
729 };
730
731 let else_block = if self.parse_keyword(Keyword::ELSE) {
732 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
733 } else {
734 None
735 };
736
737 self.expect_keyword_is(Keyword::END)?;
738 let end_token = self.expect_keyword(Keyword::IF)?;
739
740 Ok(Statement::If(IfStatement {
741 if_block,
742 elseif_blocks,
743 else_block,
744 end_token: Some(AttachedToken(end_token)),
745 }))
746 }
747
748 fn parse_while(&mut self) -> Result<Statement, ParserError> {
752 self.expect_keyword_is(Keyword::WHILE)?;
753 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
754
755 Ok(Statement::While(WhileStatement { while_block }))
756 }
757
758 fn parse_conditional_statement_block(
766 &mut self,
767 terminal_keywords: &[Keyword],
768 ) -> Result<ConditionalStatementBlock, ParserError> {
769 let start_token = self.get_current_token().clone(); let mut then_token = None;
771
772 let condition = match &start_token.token {
773 Token::Word(w) if w.keyword == Keyword::ELSE => None,
774 Token::Word(w) if w.keyword == Keyword::WHILE => {
775 let expr = self.parse_expr()?;
776 Some(expr)
777 }
778 _ => {
779 let expr = self.parse_expr()?;
780 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
781 Some(expr)
782 }
783 };
784
785 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
786
787 Ok(ConditionalStatementBlock {
788 start_token: AttachedToken(start_token),
789 condition,
790 then_token,
791 conditional_statements,
792 })
793 }
794
795 pub(crate) fn parse_conditional_statements(
798 &mut self,
799 terminal_keywords: &[Keyword],
800 ) -> Result<ConditionalStatements, ParserError> {
801 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
802 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
803 let statements = self.parse_statement_list(terminal_keywords)?;
804 let end_token = self.expect_keyword(Keyword::END)?;
805
806 ConditionalStatements::BeginEnd(BeginEndStatements {
807 begin_token: AttachedToken(begin_token),
808 statements,
809 end_token: AttachedToken(end_token),
810 })
811 } else {
812 ConditionalStatements::Sequence {
813 statements: self.parse_statement_list(terminal_keywords)?,
814 }
815 };
816 Ok(conditional_statements)
817 }
818
819 pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
823 self.expect_keyword_is(Keyword::RAISE)?;
824
825 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
826 self.expect_token(&Token::Eq)?;
827 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
828 } else {
829 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
830 };
831
832 Ok(Statement::Raise(RaiseStatement { value }))
833 }
834
835 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
836 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
837
838 self.expect_keyword_is(Keyword::ON)?;
839 let token = self.next_token();
840
841 let (object_type, object_name) = match token.token {
842 Token::Word(w) if w.keyword == Keyword::COLUMN => {
843 (CommentObject::Column, self.parse_object_name(false)?)
844 }
845 Token::Word(w) if w.keyword == Keyword::TABLE => {
846 (CommentObject::Table, self.parse_object_name(false)?)
847 }
848 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
849 (CommentObject::Extension, self.parse_object_name(false)?)
850 }
851 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
852 (CommentObject::Schema, self.parse_object_name(false)?)
853 }
854 Token::Word(w) if w.keyword == Keyword::DATABASE => {
855 (CommentObject::Database, self.parse_object_name(false)?)
856 }
857 Token::Word(w) if w.keyword == Keyword::USER => {
858 (CommentObject::User, self.parse_object_name(false)?)
859 }
860 Token::Word(w) if w.keyword == Keyword::ROLE => {
861 (CommentObject::Role, self.parse_object_name(false)?)
862 }
863 _ => self.expected("comment object_type", token)?,
864 };
865
866 self.expect_keyword_is(Keyword::IS)?;
867 let comment = if self.parse_keyword(Keyword::NULL) {
868 None
869 } else {
870 Some(self.parse_literal_string()?)
871 };
872 Ok(Statement::Comment {
873 object_type,
874 object_name,
875 comment,
876 if_exists,
877 })
878 }
879
880 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
881 let mut channel = None;
882 let mut tables: Vec<ObjectName> = vec![];
883 let mut read_lock = false;
884 let mut export = false;
885
886 if !dialect_of!(self is MySqlDialect | GenericDialect) {
887 return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
888 }
889
890 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
891 Some(FlushLocation::NoWriteToBinlog)
892 } else if self.parse_keyword(Keyword::LOCAL) {
893 Some(FlushLocation::Local)
894 } else {
895 None
896 };
897
898 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
899 FlushType::BinaryLogs
900 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
901 FlushType::EngineLogs
902 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
903 FlushType::ErrorLogs
904 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
905 FlushType::GeneralLogs
906 } else if self.parse_keywords(&[Keyword::HOSTS]) {
907 FlushType::Hosts
908 } else if self.parse_keyword(Keyword::PRIVILEGES) {
909 FlushType::Privileges
910 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
911 FlushType::OptimizerCosts
912 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
913 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
914 channel = Some(self.parse_object_name(false).unwrap().to_string());
915 }
916 FlushType::RelayLogs
917 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
918 FlushType::SlowLogs
919 } else if self.parse_keyword(Keyword::STATUS) {
920 FlushType::Status
921 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
922 FlushType::UserResources
923 } else if self.parse_keywords(&[Keyword::LOGS]) {
924 FlushType::Logs
925 } else if self.parse_keywords(&[Keyword::TABLES]) {
926 loop {
927 let next_token = self.next_token();
928 match &next_token.token {
929 Token::Word(w) => match w.keyword {
930 Keyword::WITH => {
931 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
932 }
933 Keyword::FOR => {
934 export = self.parse_keyword(Keyword::EXPORT);
935 }
936 Keyword::NoKeyword => {
937 self.prev_token();
938 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
939 }
940 _ => {}
941 },
942 _ => {
943 break;
944 }
945 }
946 }
947
948 FlushType::Tables
949 } else {
950 return self.expected(
951 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
952 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
953 self.peek_token(),
954 );
955 };
956
957 Ok(Statement::Flush {
958 object_type,
959 location,
960 channel,
961 read_lock,
962 export,
963 tables,
964 })
965 }
966
967 pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
968 let repair = self.parse_keyword(Keyword::REPAIR);
969 self.expect_keyword_is(Keyword::TABLE)?;
970 let table_name = self.parse_object_name(false)?;
971 let partition_action = self
972 .maybe_parse(|parser| {
973 let pa = match parser.parse_one_of_keywords(&[
974 Keyword::ADD,
975 Keyword::DROP,
976 Keyword::SYNC,
977 ]) {
978 Some(Keyword::ADD) => Some(AddDropSync::ADD),
979 Some(Keyword::DROP) => Some(AddDropSync::DROP),
980 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
981 _ => None,
982 };
983 parser.expect_keyword_is(Keyword::PARTITIONS)?;
984 Ok(pa)
985 })?
986 .unwrap_or_default();
987 Ok(Msck {
988 repair,
989 table_name,
990 partition_action,
991 }
992 .into())
993 }
994
995 pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
996 let table = self.parse_keyword(Keyword::TABLE);
997
998 let table_names = self
999 .parse_comma_separated(|p| {
1000 Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1001 })?
1002 .into_iter()
1003 .map(|(only, name)| TruncateTableTarget { name, only })
1004 .collect();
1005
1006 let mut partitions = None;
1007 if self.parse_keyword(Keyword::PARTITION) {
1008 self.expect_token(&Token::LParen)?;
1009 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1010 self.expect_token(&Token::RParen)?;
1011 }
1012
1013 let mut identity = None;
1014 let mut cascade = None;
1015
1016 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1017 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1018 Some(TruncateIdentityOption::Restart)
1019 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1020 Some(TruncateIdentityOption::Continue)
1021 } else {
1022 None
1023 };
1024
1025 cascade = self.parse_cascade_option();
1026 };
1027
1028 let on_cluster = self.parse_optional_on_cluster()?;
1029
1030 Ok(Truncate {
1031 table_names,
1032 partitions,
1033 table,
1034 identity,
1035 cascade,
1036 on_cluster,
1037 }
1038 .into())
1039 }
1040
1041 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1042 if self.parse_keyword(Keyword::CASCADE) {
1043 Some(CascadeOption::Cascade)
1044 } else if self.parse_keyword(Keyword::RESTRICT) {
1045 Some(CascadeOption::Restrict)
1046 } else {
1047 None
1048 }
1049 }
1050
1051 pub fn parse_attach_duckdb_database_options(
1052 &mut self,
1053 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1054 if !self.consume_token(&Token::LParen) {
1055 return Ok(vec![]);
1056 }
1057
1058 let mut options = vec![];
1059 loop {
1060 if self.parse_keyword(Keyword::READ_ONLY) {
1061 let boolean = if self.parse_keyword(Keyword::TRUE) {
1062 Some(true)
1063 } else if self.parse_keyword(Keyword::FALSE) {
1064 Some(false)
1065 } else {
1066 None
1067 };
1068 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1069 } else if self.parse_keyword(Keyword::TYPE) {
1070 let ident = self.parse_identifier()?;
1071 options.push(AttachDuckDBDatabaseOption::Type(ident));
1072 } else {
1073 return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1074 };
1075
1076 if self.consume_token(&Token::RParen) {
1077 return Ok(options);
1078 } else if self.consume_token(&Token::Comma) {
1079 continue;
1080 } else {
1081 return self.expected("expected one of: ')', ','", self.peek_token());
1082 }
1083 }
1084 }
1085
1086 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1087 let database = self.parse_keyword(Keyword::DATABASE);
1088 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1089 let database_path = self.parse_identifier()?;
1090 let database_alias = if self.parse_keyword(Keyword::AS) {
1091 Some(self.parse_identifier()?)
1092 } else {
1093 None
1094 };
1095
1096 let attach_options = self.parse_attach_duckdb_database_options()?;
1097 Ok(Statement::AttachDuckDBDatabase {
1098 if_not_exists,
1099 database,
1100 database_path,
1101 database_alias,
1102 attach_options,
1103 })
1104 }
1105
1106 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1107 let database = self.parse_keyword(Keyword::DATABASE);
1108 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1109 let database_alias = self.parse_identifier()?;
1110 Ok(Statement::DetachDuckDBDatabase {
1111 if_exists,
1112 database,
1113 database_alias,
1114 })
1115 }
1116
1117 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1118 let database = self.parse_keyword(Keyword::DATABASE);
1119 let database_file_name = self.parse_expr()?;
1120 self.expect_keyword_is(Keyword::AS)?;
1121 let schema_name = self.parse_identifier()?;
1122 Ok(Statement::AttachDatabase {
1123 database,
1124 schema_name,
1125 database_file_name,
1126 })
1127 }
1128
1129 pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1130 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1131 let table_name = self.parse_object_name(false)?;
1132 let mut for_columns = false;
1133 let mut cache_metadata = false;
1134 let mut noscan = false;
1135 let mut partitions = None;
1136 let mut compute_statistics = false;
1137 let mut columns = vec![];
1138 loop {
1139 match self.parse_one_of_keywords(&[
1140 Keyword::PARTITION,
1141 Keyword::FOR,
1142 Keyword::CACHE,
1143 Keyword::NOSCAN,
1144 Keyword::COMPUTE,
1145 ]) {
1146 Some(Keyword::PARTITION) => {
1147 self.expect_token(&Token::LParen)?;
1148 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1149 self.expect_token(&Token::RParen)?;
1150 }
1151 Some(Keyword::NOSCAN) => noscan = true,
1152 Some(Keyword::FOR) => {
1153 self.expect_keyword_is(Keyword::COLUMNS)?;
1154
1155 columns = self
1156 .maybe_parse(|parser| {
1157 parser.parse_comma_separated(|p| p.parse_identifier())
1158 })?
1159 .unwrap_or_default();
1160 for_columns = true
1161 }
1162 Some(Keyword::CACHE) => {
1163 self.expect_keyword_is(Keyword::METADATA)?;
1164 cache_metadata = true
1165 }
1166 Some(Keyword::COMPUTE) => {
1167 self.expect_keyword_is(Keyword::STATISTICS)?;
1168 compute_statistics = true
1169 }
1170 _ => break,
1171 }
1172 }
1173
1174 Ok(Analyze {
1175 has_table_keyword,
1176 table_name,
1177 for_columns,
1178 columns,
1179 partitions,
1180 cache_metadata,
1181 noscan,
1182 compute_statistics,
1183 }
1184 .into())
1185 }
1186
1187 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1189 let index = self.index;
1190
1191 let next_token = self.next_token();
1192 match next_token.token {
1193 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1194 if self.peek_token().token == Token::Period {
1195 let mut id_parts: Vec<Ident> = vec![match t {
1196 Token::Word(w) => w.into_ident(next_token.span),
1197 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1198 _ => {
1199 return Err(ParserError::ParserError(
1200 "Internal parser error: unexpected token type".to_string(),
1201 ))
1202 }
1203 }];
1204
1205 while self.consume_token(&Token::Period) {
1206 let next_token = self.next_token();
1207 match next_token.token {
1208 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1209 Token::SingleQuotedString(s) => {
1210 id_parts.push(Ident::with_quote('\'', s))
1212 }
1213 Token::Mul => {
1214 return Ok(Expr::QualifiedWildcard(
1215 ObjectName::from(id_parts),
1216 AttachedToken(next_token),
1217 ));
1218 }
1219 _ => {
1220 return self
1221 .expected("an identifier or a '*' after '.'", next_token);
1222 }
1223 }
1224 }
1225 }
1226 }
1227 Token::Mul => {
1228 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1229 }
1230 _ => (),
1231 };
1232
1233 self.index = index;
1234 self.parse_expr()
1235 }
1236
1237 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1239 self.parse_subexpr(self.dialect.prec_unknown())
1240 }
1241
1242 pub fn parse_expr_with_alias_and_order_by(
1243 &mut self,
1244 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1245 let expr = self.parse_expr()?;
1246
1247 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1248 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1249 }
1250 let alias = self.parse_optional_alias_inner(None, validator)?;
1251 let order_by = OrderByOptions {
1252 asc: self.parse_asc_desc(),
1253 nulls_first: None,
1254 };
1255 Ok(ExprWithAliasAndOrderBy {
1256 expr: ExprWithAlias { expr, alias },
1257 order_by,
1258 })
1259 }
1260
1261 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1263 let _guard = self.recursion_counter.try_decrease()?;
1264 debug!("parsing expr");
1265 let mut expr = self.parse_prefix()?;
1266
1267 expr = self.parse_compound_expr(expr, vec![])?;
1268
1269 debug!("prefix: {expr:?}");
1270 loop {
1271 let next_precedence = self.get_next_precedence()?;
1272 debug!("next precedence: {next_precedence:?}");
1273
1274 if precedence >= next_precedence {
1275 break;
1276 }
1277
1278 if Token::Period == self.peek_token_ref().token {
1281 break;
1282 }
1283
1284 expr = self.parse_infix(expr, next_precedence)?;
1285 }
1286 Ok(expr)
1287 }
1288
1289 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1290 let condition = self.parse_expr()?;
1291 let message = if self.parse_keyword(Keyword::AS) {
1292 Some(self.parse_expr()?)
1293 } else {
1294 None
1295 };
1296
1297 Ok(Statement::Assert { condition, message })
1298 }
1299
1300 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1301 let name = self.parse_identifier()?;
1302 Ok(Statement::Savepoint { name })
1303 }
1304
1305 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1306 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1307 let name = self.parse_identifier()?;
1308
1309 Ok(Statement::ReleaseSavepoint { name })
1310 }
1311
1312 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1313 let channel = self.parse_identifier()?;
1314 Ok(Statement::LISTEN { channel })
1315 }
1316
1317 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1318 let channel = if self.consume_token(&Token::Mul) {
1319 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1320 } else {
1321 match self.parse_identifier() {
1322 Ok(expr) => expr,
1323 _ => {
1324 self.prev_token();
1325 return self.expected("wildcard or identifier", self.peek_token());
1326 }
1327 }
1328 };
1329 Ok(Statement::UNLISTEN { channel })
1330 }
1331
1332 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1333 let channel = self.parse_identifier()?;
1334 let payload = if self.consume_token(&Token::Comma) {
1335 Some(self.parse_literal_string()?)
1336 } else {
1337 None
1338 };
1339 Ok(Statement::NOTIFY { channel, payload })
1340 }
1341
1342 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1344 if self.peek_keyword(Keyword::TABLE) {
1345 self.expect_keyword(Keyword::TABLE)?;
1346 let rename_tables = self.parse_comma_separated(|parser| {
1347 let old_name = parser.parse_object_name(false)?;
1348 parser.expect_keyword(Keyword::TO)?;
1349 let new_name = parser.parse_object_name(false)?;
1350
1351 Ok(RenameTable { old_name, new_name })
1352 })?;
1353 Ok(Statement::RenameTable(rename_tables))
1354 } else {
1355 self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1356 }
1357 }
1358
1359 fn parse_expr_prefix_by_reserved_word(
1362 &mut self,
1363 w: &Word,
1364 w_span: Span,
1365 ) -> Result<Option<Expr>, ParserError> {
1366 match w.keyword {
1367 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1368 self.prev_token();
1369 Ok(Some(Expr::Value(self.parse_value()?)))
1370 }
1371 Keyword::NULL => {
1372 self.prev_token();
1373 Ok(Some(Expr::Value(self.parse_value()?)))
1374 }
1375 Keyword::CURRENT_CATALOG
1376 | Keyword::CURRENT_USER
1377 | Keyword::SESSION_USER
1378 | Keyword::USER
1379 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1380 {
1381 Ok(Some(Expr::Function(Function {
1382 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1383 uses_odbc_syntax: false,
1384 parameters: FunctionArguments::None,
1385 args: FunctionArguments::None,
1386 null_treatment: None,
1387 filter: None,
1388 over: None,
1389 within_group: vec![],
1390 })))
1391 }
1392 Keyword::CURRENT_TIMESTAMP
1393 | Keyword::CURRENT_TIME
1394 | Keyword::CURRENT_DATE
1395 | Keyword::LOCALTIME
1396 | Keyword::LOCALTIMESTAMP => {
1397 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1398 }
1399 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1400 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1401 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1402 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1403 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1404 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1405 Keyword::EXISTS
1406 if !dialect_of!(self is DatabricksDialect)
1408 || matches!(
1409 self.peek_nth_token_ref(1).token,
1410 Token::Word(Word {
1411 keyword: Keyword::SELECT | Keyword::WITH,
1412 ..
1413 })
1414 ) =>
1415 {
1416 Ok(Some(self.parse_exists_expr(false)?))
1417 }
1418 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1419 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1420 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1421 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1422 Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1423 }
1424 Keyword::SUBSTR | Keyword::SUBSTRING => {
1425 self.prev_token();
1426 Ok(Some(self.parse_substring()?))
1427 }
1428 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1429 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1430 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1431 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1433 self.expect_token(&Token::LBracket)?;
1434 Ok(Some(self.parse_array_expr(true)?))
1435 }
1436 Keyword::ARRAY
1437 if self.peek_token() == Token::LParen
1438 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1439 {
1440 self.expect_token(&Token::LParen)?;
1441 let query = self.parse_query()?;
1442 self.expect_token(&Token::RParen)?;
1443 Ok(Some(Expr::Function(Function {
1444 name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1445 uses_odbc_syntax: false,
1446 parameters: FunctionArguments::None,
1447 args: FunctionArguments::Subquery(query),
1448 filter: None,
1449 null_treatment: None,
1450 over: None,
1451 within_group: vec![],
1452 })))
1453 }
1454 Keyword::NOT => Ok(Some(self.parse_not()?)),
1455 Keyword::MATCH if self.dialect.supports_match_against() => {
1456 Ok(Some(self.parse_match_against()?))
1457 }
1458 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1459 let struct_expr = self.parse_struct_literal()?;
1460 Ok(Some(struct_expr))
1461 }
1462 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1463 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1464 Ok(Some(Expr::Prior(Box::new(expr))))
1465 }
1466 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1467 Ok(Some(self.parse_duckdb_map_literal()?))
1468 }
1469 _ if self.dialect.supports_geometric_types() => match w.keyword {
1470 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1471 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1472 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1473 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1474 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1475 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1476 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1477 _ => Ok(None),
1478 },
1479 _ => Ok(None),
1480 }
1481 }
1482
1483 fn parse_expr_prefix_by_unreserved_word(
1485 &mut self,
1486 w: &Word,
1487 w_span: Span,
1488 ) -> Result<Expr, ParserError> {
1489 match self.peek_token().token {
1490 Token::LParen if !self.peek_outer_join_operator() => {
1491 let id_parts = vec![w.clone().into_ident(w_span)];
1492 self.parse_function(ObjectName::from(id_parts))
1493 }
1494 Token::SingleQuotedString(_)
1496 | Token::DoubleQuotedString(_)
1497 | Token::HexStringLiteral(_)
1498 if w.value.starts_with('_') =>
1499 {
1500 Ok(Expr::Prefixed {
1501 prefix: w.clone().into_ident(w_span),
1502 value: self.parse_introduced_string_expr()?.into(),
1503 })
1504 }
1505 Token::SingleQuotedString(_)
1507 | Token::DoubleQuotedString(_)
1508 | Token::HexStringLiteral(_)
1509 if w.value.starts_with('_') =>
1510 {
1511 Ok(Expr::Prefixed {
1512 prefix: w.clone().into_ident(w_span),
1513 value: self.parse_introduced_string_expr()?.into(),
1514 })
1515 }
1516 Token::Arrow if self.dialect.supports_lambda_functions() => {
1517 self.expect_token(&Token::Arrow)?;
1518 Ok(Expr::Lambda(LambdaFunction {
1519 params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1520 body: Box::new(self.parse_expr()?),
1521 }))
1522 }
1523 _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1524 }
1525 }
1526
1527 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1529 if let Some(prefix) = self.dialect.parse_prefix(self) {
1531 return prefix;
1532 }
1533
1534 let loc = self.peek_token_ref().span.start;
1551 let opt_expr = self.maybe_parse(|parser| {
1552 match parser.parse_data_type()? {
1553 DataType::Interval { .. } => parser.parse_interval(),
1554 DataType::Custom(..) => parser_err!("dummy", loc),
1562 data_type => Ok(Expr::TypedString(TypedString {
1563 data_type,
1564 value: parser.parse_value()?,
1565 uses_odbc_syntax: false,
1566 })),
1567 }
1568 })?;
1569
1570 if let Some(expr) = opt_expr {
1571 return Ok(expr);
1572 }
1573
1574 let dialect = self.dialect;
1578
1579 self.advance_token();
1580 let next_token_index = self.get_current_index();
1581 let next_token = self.get_current_token();
1582 let span = next_token.span;
1583 let expr = match &next_token.token {
1584 Token::Word(w) => {
1585 let w = w.clone();
1594 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1595 Ok(Some(expr)) => Ok(expr),
1597
1598 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1600
1601 Err(e) => {
1608 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1609 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1610 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1611 }) {
1612 return Ok(expr);
1613 }
1614 }
1615 return Err(e);
1616 }
1617 }
1618 } Token::LBracket => self.parse_array_expr(false),
1621 tok @ Token::Minus | tok @ Token::Plus => {
1622 let op = if *tok == Token::Plus {
1623 UnaryOperator::Plus
1624 } else {
1625 UnaryOperator::Minus
1626 };
1627 Ok(Expr::UnaryOp {
1628 op,
1629 expr: Box::new(
1630 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1631 ),
1632 })
1633 }
1634 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1635 op: UnaryOperator::BangNot,
1636 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1637 }),
1638 tok @ Token::DoubleExclamationMark
1639 | tok @ Token::PGSquareRoot
1640 | tok @ Token::PGCubeRoot
1641 | tok @ Token::AtSign
1642 if dialect_is!(dialect is PostgreSqlDialect) =>
1643 {
1644 let op = match tok {
1645 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1646 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1647 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1648 Token::AtSign => UnaryOperator::PGAbs,
1649 _ => {
1650 return Err(ParserError::ParserError(
1651 "Internal parser error: unexpected unary operator token".to_string(),
1652 ))
1653 }
1654 };
1655 Ok(Expr::UnaryOp {
1656 op,
1657 expr: Box::new(
1658 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1659 ),
1660 })
1661 }
1662 Token::Tilde => Ok(Expr::UnaryOp {
1663 op: UnaryOperator::BitwiseNot,
1664 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1665 }),
1666 tok @ Token::Sharp
1667 | tok @ Token::AtDashAt
1668 | tok @ Token::AtAt
1669 | tok @ Token::QuestionMarkDash
1670 | tok @ Token::QuestionPipe
1671 if self.dialect.supports_geometric_types() =>
1672 {
1673 let op = match tok {
1674 Token::Sharp => UnaryOperator::Hash,
1675 Token::AtDashAt => UnaryOperator::AtDashAt,
1676 Token::AtAt => UnaryOperator::DoubleAt,
1677 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1678 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1679 _ => {
1680 return Err(ParserError::ParserError(format!(
1681 "Unexpected token in unary operator parsing: {tok:?}"
1682 )))
1683 }
1684 };
1685 Ok(Expr::UnaryOp {
1686 op,
1687 expr: Box::new(
1688 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1689 ),
1690 })
1691 }
1692 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1693 {
1694 self.prev_token();
1695 Ok(Expr::Value(self.parse_value()?))
1696 }
1697 Token::UnicodeStringLiteral(_) => {
1698 self.prev_token();
1699 Ok(Expr::Value(self.parse_value()?))
1700 }
1701 Token::Number(_, _)
1702 | Token::SingleQuotedString(_)
1703 | Token::DoubleQuotedString(_)
1704 | Token::TripleSingleQuotedString(_)
1705 | Token::TripleDoubleQuotedString(_)
1706 | Token::DollarQuotedString(_)
1707 | Token::SingleQuotedByteStringLiteral(_)
1708 | Token::DoubleQuotedByteStringLiteral(_)
1709 | Token::TripleSingleQuotedByteStringLiteral(_)
1710 | Token::TripleDoubleQuotedByteStringLiteral(_)
1711 | Token::SingleQuotedRawStringLiteral(_)
1712 | Token::DoubleQuotedRawStringLiteral(_)
1713 | Token::TripleSingleQuotedRawStringLiteral(_)
1714 | Token::TripleDoubleQuotedRawStringLiteral(_)
1715 | Token::NationalStringLiteral(_)
1716 | Token::HexStringLiteral(_) => {
1717 self.prev_token();
1718 Ok(Expr::Value(self.parse_value()?))
1719 }
1720 Token::LParen => {
1721 let expr =
1722 if let Some(expr) = self.try_parse_expr_sub_query()? {
1723 expr
1724 } else if let Some(lambda) = self.try_parse_lambda()? {
1725 return Ok(lambda);
1726 } else {
1727 let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1728 match exprs.len() {
1729 0 => return Err(ParserError::ParserError(
1730 "Internal parser error: parse_comma_separated returned empty list"
1731 .to_string(),
1732 )),
1733 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1734 _ => Expr::Tuple(exprs),
1735 }
1736 };
1737 self.expect_token(&Token::RParen)?;
1738 Ok(expr)
1739 }
1740 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1741 self.prev_token();
1742 Ok(Expr::Value(self.parse_value()?))
1743 }
1744 Token::LBrace => {
1745 self.prev_token();
1746 self.parse_lbrace_expr()
1747 }
1748 _ => self.expected_at("an expression", next_token_index),
1749 }?;
1750
1751 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1752 Ok(Expr::Collate {
1753 expr: Box::new(expr),
1754 collation: self.parse_object_name(false)?,
1755 })
1756 } else {
1757 Ok(expr)
1758 }
1759 }
1760
1761 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1762 Ok(Expr::TypedString(TypedString {
1763 data_type: DataType::GeometricType(kind),
1764 value: self.parse_value()?,
1765 uses_odbc_syntax: false,
1766 }))
1767 }
1768
1769 pub fn parse_compound_expr(
1776 &mut self,
1777 root: Expr,
1778 mut chain: Vec<AccessExpr>,
1779 ) -> Result<Expr, ParserError> {
1780 let mut ending_wildcard: Option<TokenWithSpan> = None;
1781 loop {
1782 if self.consume_token(&Token::Period) {
1783 let next_token = self.peek_token_ref();
1784 match &next_token.token {
1785 Token::Mul => {
1786 if dialect_of!(self is PostgreSqlDialect) {
1789 ending_wildcard = Some(self.next_token());
1790 } else {
1791 self.prev_token(); }
1798
1799 break;
1800 }
1801 Token::SingleQuotedString(s) => {
1802 let expr =
1803 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1804 chain.push(AccessExpr::Dot(expr));
1805 self.advance_token(); }
1807 _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1809 Expr::CompoundFieldAccess { root, access_chain } => {
1818 chain.push(AccessExpr::Dot(*root));
1819 chain.extend(access_chain);
1820 }
1821 Expr::CompoundIdentifier(parts) => chain
1822 .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1823 expr => {
1824 chain.push(AccessExpr::Dot(expr));
1825 }
1826 },
1827 }
1828 } else if !self.dialect.supports_partiql()
1829 && self.peek_token_ref().token == Token::LBracket
1830 {
1831 self.parse_multi_dim_subscript(&mut chain)?;
1832 } else {
1833 break;
1834 }
1835 }
1836
1837 let tok_index = self.get_current_index();
1838 if let Some(wildcard_token) = ending_wildcard {
1839 if !Self::is_all_ident(&root, &chain) {
1840 return self.expected("an identifier or a '*' after '.'", self.peek_token());
1841 };
1842 Ok(Expr::QualifiedWildcard(
1843 ObjectName::from(Self::exprs_to_idents(root, chain)?),
1844 AttachedToken(wildcard_token),
1845 ))
1846 } else if self.maybe_parse_outer_join_operator() {
1847 if !Self::is_all_ident(&root, &chain) {
1848 return self.expected_at("column identifier before (+)", tok_index);
1849 };
1850 let expr = if chain.is_empty() {
1851 root
1852 } else {
1853 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1854 };
1855 Ok(Expr::OuterJoin(expr.into()))
1856 } else {
1857 Self::build_compound_expr(root, chain)
1858 }
1859 }
1860
1861 fn build_compound_expr(
1866 root: Expr,
1867 mut access_chain: Vec<AccessExpr>,
1868 ) -> Result<Expr, ParserError> {
1869 if access_chain.is_empty() {
1870 return Ok(root);
1871 }
1872
1873 if Self::is_all_ident(&root, &access_chain) {
1874 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1875 root,
1876 access_chain,
1877 )?));
1878 }
1879
1880 if matches!(root, Expr::Identifier(_))
1885 && matches!(
1886 access_chain.last(),
1887 Some(AccessExpr::Dot(Expr::Function(_)))
1888 )
1889 && access_chain
1890 .iter()
1891 .rev()
1892 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1894 {
1895 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1896 return parser_err!("expected function expression", root.span().start);
1897 };
1898
1899 let compound_func_name = [root]
1900 .into_iter()
1901 .chain(access_chain.into_iter().flat_map(|access| match access {
1902 AccessExpr::Dot(expr) => Some(expr),
1903 _ => None,
1904 }))
1905 .flat_map(|expr| match expr {
1906 Expr::Identifier(ident) => Some(ident),
1907 _ => None,
1908 })
1909 .map(ObjectNamePart::Identifier)
1910 .chain(func.name.0)
1911 .collect::<Vec<_>>();
1912 func.name = ObjectName(compound_func_name);
1913
1914 return Ok(Expr::Function(func));
1915 }
1916
1917 if access_chain.len() == 1
1922 && matches!(
1923 access_chain.last(),
1924 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1925 )
1926 {
1927 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1928 return parser_err!("expected (+) expression", root.span().start);
1929 };
1930
1931 if !Self::is_all_ident(&root, &[]) {
1932 return parser_err!("column identifier before (+)", root.span().start);
1933 };
1934
1935 let token_start = root.span().start;
1936 let mut idents = Self::exprs_to_idents(root, vec![])?;
1937 match *inner_expr {
1938 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1939 Expr::Identifier(suffix) => idents.push(suffix),
1940 _ => {
1941 return parser_err!("column identifier before (+)", token_start);
1942 }
1943 }
1944
1945 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1946 }
1947
1948 Ok(Expr::CompoundFieldAccess {
1949 root: Box::new(root),
1950 access_chain,
1951 })
1952 }
1953
1954 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1955 match k {
1956 Keyword::LOCAL => Some(ContextModifier::Local),
1957 Keyword::GLOBAL => Some(ContextModifier::Global),
1958 Keyword::SESSION => Some(ContextModifier::Session),
1959 _ => None,
1960 }
1961 }
1962
1963 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1965 if !matches!(root, Expr::Identifier(_)) {
1966 return false;
1967 }
1968 fields
1969 .iter()
1970 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1971 }
1972
1973 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1975 let mut idents = vec![];
1976 if let Expr::Identifier(root) = root {
1977 idents.push(root);
1978 for x in fields {
1979 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1980 idents.push(ident);
1981 } else {
1982 return parser_err!(
1983 format!("Expected identifier, found: {}", x),
1984 x.span().start
1985 );
1986 }
1987 }
1988 Ok(idents)
1989 } else {
1990 parser_err!(
1991 format!("Expected identifier, found: {}", root),
1992 root.span().start
1993 )
1994 }
1995 }
1996
1997 fn peek_outer_join_operator(&mut self) -> bool {
1999 if !self.dialect.supports_outer_join_operator() {
2000 return false;
2001 }
2002
2003 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2004 Token::LParen == maybe_lparen.token
2005 && Token::Plus == maybe_plus.token
2006 && Token::RParen == maybe_rparen.token
2007 }
2008
2009 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2012 self.dialect.supports_outer_join_operator()
2013 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2014 }
2015
2016 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2017 self.expect_token(&Token::LParen)?;
2018 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2019 self.expect_token(&Token::RParen)?;
2020
2021 Ok(options)
2022 }
2023
2024 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2025 let name = self.parse_identifier()?;
2026
2027 let next_token = self.peek_token();
2028 if next_token == Token::Comma || next_token == Token::RParen {
2029 return Ok(UtilityOption { name, arg: None });
2030 }
2031 let arg = self.parse_expr()?;
2032
2033 Ok(UtilityOption {
2034 name,
2035 arg: Some(arg),
2036 })
2037 }
2038
2039 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2040 if !self.peek_sub_query() {
2041 return Ok(None);
2042 }
2043
2044 Ok(Some(Expr::Subquery(self.parse_query()?)))
2045 }
2046
2047 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2048 if !self.dialect.supports_lambda_functions() {
2049 return Ok(None);
2050 }
2051 self.maybe_parse(|p| {
2052 let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2053 p.expect_token(&Token::RParen)?;
2054 p.expect_token(&Token::Arrow)?;
2055 let expr = p.parse_expr()?;
2056 Ok(Expr::Lambda(LambdaFunction {
2057 params: OneOrManyWithParens::Many(params),
2058 body: Box::new(expr),
2059 }))
2060 })
2061 }
2062
2063 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2070 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2072 return Ok(Some(expr));
2073 }
2074 self.maybe_parse_odbc_body_datetime()
2076 }
2077
2078 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2089 self.maybe_parse(|p| {
2090 let token = p.next_token().clone();
2091 let word_string = token.token.to_string();
2092 let data_type = match word_string.as_str() {
2093 "t" => DataType::Time(None, TimezoneInfo::None),
2094 "d" => DataType::Date,
2095 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2096 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2097 };
2098 let value = p.parse_value()?;
2099 Ok(Expr::TypedString(TypedString {
2100 data_type,
2101 value,
2102 uses_odbc_syntax: true,
2103 }))
2104 })
2105 }
2106
2107 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2116 self.maybe_parse(|p| {
2117 p.expect_keyword(Keyword::FN)?;
2118 let fn_name = p.parse_object_name(false)?;
2119 let mut fn_call = p.parse_function_call(fn_name)?;
2120 fn_call.uses_odbc_syntax = true;
2121 Ok(Expr::Function(fn_call))
2122 })
2123 }
2124
2125 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2126 self.parse_function_call(name).map(Expr::Function)
2127 }
2128
2129 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2130 self.expect_token(&Token::LParen)?;
2131
2132 if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2135 let subquery = self.parse_query()?;
2136 self.expect_token(&Token::RParen)?;
2137 return Ok(Function {
2138 name,
2139 uses_odbc_syntax: false,
2140 parameters: FunctionArguments::None,
2141 args: FunctionArguments::Subquery(subquery),
2142 filter: None,
2143 null_treatment: None,
2144 over: None,
2145 within_group: vec![],
2146 });
2147 }
2148
2149 let mut args = self.parse_function_argument_list()?;
2150 let mut parameters = FunctionArguments::None;
2151 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2154 && self.consume_token(&Token::LParen)
2155 {
2156 parameters = FunctionArguments::List(args);
2157 args = self.parse_function_argument_list()?;
2158 }
2159
2160 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2161 self.expect_token(&Token::LParen)?;
2162 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2163 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2164 self.expect_token(&Token::RParen)?;
2165 order_by
2166 } else {
2167 vec![]
2168 };
2169
2170 let filter = if self.dialect.supports_filter_during_aggregation()
2171 && self.parse_keyword(Keyword::FILTER)
2172 && self.consume_token(&Token::LParen)
2173 && self.parse_keyword(Keyword::WHERE)
2174 {
2175 let filter = Some(Box::new(self.parse_expr()?));
2176 self.expect_token(&Token::RParen)?;
2177 filter
2178 } else {
2179 None
2180 };
2181
2182 let null_treatment = if args
2185 .clauses
2186 .iter()
2187 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2188 {
2189 self.parse_null_treatment()?
2190 } else {
2191 None
2192 };
2193
2194 let over = if self.parse_keyword(Keyword::OVER) {
2195 if self.consume_token(&Token::LParen) {
2196 let window_spec = self.parse_window_spec()?;
2197 Some(WindowType::WindowSpec(window_spec))
2198 } else {
2199 Some(WindowType::NamedWindow(self.parse_identifier()?))
2200 }
2201 } else {
2202 None
2203 };
2204
2205 Ok(Function {
2206 name,
2207 uses_odbc_syntax: false,
2208 parameters,
2209 args: FunctionArguments::List(args),
2210 null_treatment,
2211 filter,
2212 over,
2213 within_group,
2214 })
2215 }
2216
2217 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2219 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2220 Some(keyword) => {
2221 self.expect_keyword_is(Keyword::NULLS)?;
2222
2223 Ok(match keyword {
2224 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2225 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2226 _ => None,
2227 })
2228 }
2229 None => Ok(None),
2230 }
2231 }
2232
2233 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2234 let args = if self.consume_token(&Token::LParen) {
2235 FunctionArguments::List(self.parse_function_argument_list()?)
2236 } else {
2237 FunctionArguments::None
2238 };
2239 Ok(Expr::Function(Function {
2240 name,
2241 uses_odbc_syntax: false,
2242 parameters: FunctionArguments::None,
2243 args,
2244 filter: None,
2245 over: None,
2246 null_treatment: None,
2247 within_group: vec![],
2248 }))
2249 }
2250
2251 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2252 let next_token = self.next_token();
2253 match &next_token.token {
2254 Token::Word(w) => match w.keyword {
2255 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2256 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2257 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2258 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2259 },
2260 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2261 }
2262 }
2263
2264 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2265 let units = self.parse_window_frame_units()?;
2266 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2267 let start_bound = self.parse_window_frame_bound()?;
2268 self.expect_keyword_is(Keyword::AND)?;
2269 let end_bound = Some(self.parse_window_frame_bound()?);
2270 (start_bound, end_bound)
2271 } else {
2272 (self.parse_window_frame_bound()?, None)
2273 };
2274 Ok(WindowFrame {
2275 units,
2276 start_bound,
2277 end_bound,
2278 })
2279 }
2280
2281 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2283 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2284 Ok(WindowFrameBound::CurrentRow)
2285 } else {
2286 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2287 None
2288 } else {
2289 Some(Box::new(match self.peek_token().token {
2290 Token::SingleQuotedString(_) => self.parse_interval()?,
2291 _ => self.parse_expr()?,
2292 }))
2293 };
2294 if self.parse_keyword(Keyword::PRECEDING) {
2295 Ok(WindowFrameBound::Preceding(rows))
2296 } else if self.parse_keyword(Keyword::FOLLOWING) {
2297 Ok(WindowFrameBound::Following(rows))
2298 } else {
2299 self.expected("PRECEDING or FOLLOWING", self.peek_token())
2300 }
2301 }
2302 }
2303
2304 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2306 if self.dialect.supports_group_by_expr() {
2307 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2308 self.expect_token(&Token::LParen)?;
2309 let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2310 self.expect_token(&Token::RParen)?;
2311 Ok(Expr::GroupingSets(result))
2312 } else if self.parse_keyword(Keyword::CUBE) {
2313 self.expect_token(&Token::LParen)?;
2314 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2315 self.expect_token(&Token::RParen)?;
2316 Ok(Expr::Cube(result))
2317 } else if self.parse_keyword(Keyword::ROLLUP) {
2318 self.expect_token(&Token::LParen)?;
2319 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2320 self.expect_token(&Token::RParen)?;
2321 Ok(Expr::Rollup(result))
2322 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2323 Ok(Expr::Tuple(vec![]))
2327 } else {
2328 self.parse_expr()
2329 }
2330 } else {
2331 self.parse_expr()
2333 }
2334 }
2335
2336 fn parse_tuple(
2340 &mut self,
2341 lift_singleton: bool,
2342 allow_empty: bool,
2343 ) -> Result<Vec<Expr>, ParserError> {
2344 if lift_singleton {
2345 if self.consume_token(&Token::LParen) {
2346 let result = if allow_empty && self.consume_token(&Token::RParen) {
2347 vec![]
2348 } else {
2349 let result = self.parse_comma_separated(Parser::parse_expr)?;
2350 self.expect_token(&Token::RParen)?;
2351 result
2352 };
2353 Ok(result)
2354 } else {
2355 Ok(vec![self.parse_expr()?])
2356 }
2357 } else {
2358 self.expect_token(&Token::LParen)?;
2359 let result = if allow_empty && self.consume_token(&Token::RParen) {
2360 vec![]
2361 } else {
2362 let result = self.parse_comma_separated(Parser::parse_expr)?;
2363 self.expect_token(&Token::RParen)?;
2364 result
2365 };
2366 Ok(result)
2367 }
2368 }
2369
2370 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2371 let case_token = AttachedToken(self.get_current_token().clone());
2372 let mut operand = None;
2373 if !self.parse_keyword(Keyword::WHEN) {
2374 operand = Some(Box::new(self.parse_expr()?));
2375 self.expect_keyword_is(Keyword::WHEN)?;
2376 }
2377 let mut conditions = vec![];
2378 loop {
2379 let condition = self.parse_expr()?;
2380 self.expect_keyword_is(Keyword::THEN)?;
2381 let result = self.parse_expr()?;
2382 conditions.push(CaseWhen { condition, result });
2383 if !self.parse_keyword(Keyword::WHEN) {
2384 break;
2385 }
2386 }
2387 let else_result = if self.parse_keyword(Keyword::ELSE) {
2388 Some(Box::new(self.parse_expr()?))
2389 } else {
2390 None
2391 };
2392 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2393 Ok(Expr::Case {
2394 case_token,
2395 end_token,
2396 operand,
2397 conditions,
2398 else_result,
2399 })
2400 }
2401
2402 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2403 if self.parse_keyword(Keyword::FORMAT) {
2404 let value = self.parse_value()?.value;
2405 match self.parse_optional_time_zone()? {
2406 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2407 None => Ok(Some(CastFormat::Value(value))),
2408 }
2409 } else {
2410 Ok(None)
2411 }
2412 }
2413
2414 pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2415 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2416 self.parse_value().map(|v| Some(v.value))
2417 } else {
2418 Ok(None)
2419 }
2420 }
2421
2422 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2424 self.expect_token(&Token::LParen)?;
2425 let data_type = self.parse_data_type()?;
2426 self.expect_token(&Token::Comma)?;
2427 let expr = self.parse_expr()?;
2428 let styles = if self.consume_token(&Token::Comma) {
2429 self.parse_comma_separated(Parser::parse_expr)?
2430 } else {
2431 Default::default()
2432 };
2433 self.expect_token(&Token::RParen)?;
2434 Ok(Expr::Convert {
2435 is_try,
2436 expr: Box::new(expr),
2437 data_type: Some(data_type),
2438 charset: None,
2439 target_before_value: true,
2440 styles,
2441 })
2442 }
2443
2444 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2449 if self.dialect.convert_type_before_value() {
2450 return self.parse_mssql_convert(is_try);
2451 }
2452 self.expect_token(&Token::LParen)?;
2453 let expr = self.parse_expr()?;
2454 if self.parse_keyword(Keyword::USING) {
2455 let charset = self.parse_object_name(false)?;
2456 self.expect_token(&Token::RParen)?;
2457 return Ok(Expr::Convert {
2458 is_try,
2459 expr: Box::new(expr),
2460 data_type: None,
2461 charset: Some(charset),
2462 target_before_value: false,
2463 styles: vec![],
2464 });
2465 }
2466 self.expect_token(&Token::Comma)?;
2467 let data_type = self.parse_data_type()?;
2468 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2469 Some(self.parse_object_name(false)?)
2470 } else {
2471 None
2472 };
2473 self.expect_token(&Token::RParen)?;
2474 Ok(Expr::Convert {
2475 is_try,
2476 expr: Box::new(expr),
2477 data_type: Some(data_type),
2478 charset,
2479 target_before_value: false,
2480 styles: vec![],
2481 })
2482 }
2483
2484 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2486 self.expect_token(&Token::LParen)?;
2487 let expr = self.parse_expr()?;
2488 self.expect_keyword_is(Keyword::AS)?;
2489 let data_type = self.parse_data_type()?;
2490 let format = self.parse_optional_cast_format()?;
2491 self.expect_token(&Token::RParen)?;
2492 Ok(Expr::Cast {
2493 kind,
2494 expr: Box::new(expr),
2495 data_type,
2496 format,
2497 })
2498 }
2499
2500 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2502 self.expect_token(&Token::LParen)?;
2503 let exists_node = Expr::Exists {
2504 negated,
2505 subquery: self.parse_query()?,
2506 };
2507 self.expect_token(&Token::RParen)?;
2508 Ok(exists_node)
2509 }
2510
2511 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2512 self.expect_token(&Token::LParen)?;
2513 let field = self.parse_date_time_field()?;
2514
2515 let syntax = if self.parse_keyword(Keyword::FROM) {
2516 ExtractSyntax::From
2517 } else if self.consume_token(&Token::Comma)
2518 && dialect_of!(self is SnowflakeDialect | GenericDialect)
2519 {
2520 ExtractSyntax::Comma
2521 } else {
2522 return Err(ParserError::ParserError(
2523 "Expected 'FROM' or ','".to_string(),
2524 ));
2525 };
2526
2527 let expr = self.parse_expr()?;
2528 self.expect_token(&Token::RParen)?;
2529 Ok(Expr::Extract {
2530 field,
2531 expr: Box::new(expr),
2532 syntax,
2533 })
2534 }
2535
2536 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2537 self.expect_token(&Token::LParen)?;
2538 let expr = self.parse_expr()?;
2539 let field = if self.parse_keyword(Keyword::TO) {
2541 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2543 } else if self.consume_token(&Token::Comma) {
2544 match self.parse_value()?.value {
2546 Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2547 _ => {
2548 return Err(ParserError::ParserError(
2549 "Scale field can only be of number type".to_string(),
2550 ))
2551 }
2552 }
2553 } else {
2554 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2555 };
2556 self.expect_token(&Token::RParen)?;
2557 if is_ceil {
2558 Ok(Expr::Ceil {
2559 expr: Box::new(expr),
2560 field,
2561 })
2562 } else {
2563 Ok(Expr::Floor {
2564 expr: Box::new(expr),
2565 field,
2566 })
2567 }
2568 }
2569
2570 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2571 let between_prec = self.dialect.prec_value(Precedence::Between);
2572 let position_expr = self.maybe_parse(|p| {
2573 p.expect_token(&Token::LParen)?;
2575
2576 let expr = p.parse_subexpr(between_prec)?;
2578 p.expect_keyword_is(Keyword::IN)?;
2579 let from = p.parse_expr()?;
2580 p.expect_token(&Token::RParen)?;
2581 Ok(Expr::Position {
2582 expr: Box::new(expr),
2583 r#in: Box::new(from),
2584 })
2585 })?;
2586 match position_expr {
2587 Some(expr) => Ok(expr),
2588 None => self.parse_function(ObjectName::from(vec![ident])),
2591 }
2592 }
2593
2594 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2596 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2597 Keyword::SUBSTR => true,
2598 Keyword::SUBSTRING => false,
2599 _ => {
2600 self.prev_token();
2601 return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2602 }
2603 };
2604 self.expect_token(&Token::LParen)?;
2605 let expr = self.parse_expr()?;
2606 let mut from_expr = None;
2607 let special = self.consume_token(&Token::Comma);
2608 if special || self.parse_keyword(Keyword::FROM) {
2609 from_expr = Some(self.parse_expr()?);
2610 }
2611
2612 let mut to_expr = None;
2613 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2614 to_expr = Some(self.parse_expr()?);
2615 }
2616 self.expect_token(&Token::RParen)?;
2617
2618 Ok(Expr::Substring {
2619 expr: Box::new(expr),
2620 substring_from: from_expr.map(Box::new),
2621 substring_for: to_expr.map(Box::new),
2622 special,
2623 shorthand,
2624 })
2625 }
2626
2627 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2628 self.expect_token(&Token::LParen)?;
2630 let expr = self.parse_expr()?;
2631 self.expect_keyword_is(Keyword::PLACING)?;
2632 let what_expr = self.parse_expr()?;
2633 self.expect_keyword_is(Keyword::FROM)?;
2634 let from_expr = self.parse_expr()?;
2635 let mut for_expr = None;
2636 if self.parse_keyword(Keyword::FOR) {
2637 for_expr = Some(self.parse_expr()?);
2638 }
2639 self.expect_token(&Token::RParen)?;
2640
2641 Ok(Expr::Overlay {
2642 expr: Box::new(expr),
2643 overlay_what: Box::new(what_expr),
2644 overlay_from: Box::new(from_expr),
2645 overlay_for: for_expr.map(Box::new),
2646 })
2647 }
2648
2649 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2655 self.expect_token(&Token::LParen)?;
2656 let mut trim_where = None;
2657 if let Token::Word(word) = self.peek_token().token {
2658 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2659 trim_where = Some(self.parse_trim_where()?);
2660 }
2661 }
2662 let expr = self.parse_expr()?;
2663 if self.parse_keyword(Keyword::FROM) {
2664 let trim_what = Box::new(expr);
2665 let expr = self.parse_expr()?;
2666 self.expect_token(&Token::RParen)?;
2667 Ok(Expr::Trim {
2668 expr: Box::new(expr),
2669 trim_where,
2670 trim_what: Some(trim_what),
2671 trim_characters: None,
2672 })
2673 } else if self.consume_token(&Token::Comma)
2674 && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2675 {
2676 let characters = self.parse_comma_separated(Parser::parse_expr)?;
2677 self.expect_token(&Token::RParen)?;
2678 Ok(Expr::Trim {
2679 expr: Box::new(expr),
2680 trim_where: None,
2681 trim_what: None,
2682 trim_characters: Some(characters),
2683 })
2684 } else {
2685 self.expect_token(&Token::RParen)?;
2686 Ok(Expr::Trim {
2687 expr: Box::new(expr),
2688 trim_where,
2689 trim_what: None,
2690 trim_characters: None,
2691 })
2692 }
2693 }
2694
2695 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2696 let next_token = self.next_token();
2697 match &next_token.token {
2698 Token::Word(w) => match w.keyword {
2699 Keyword::BOTH => Ok(TrimWhereField::Both),
2700 Keyword::LEADING => Ok(TrimWhereField::Leading),
2701 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2702 _ => self.expected("trim_where field", next_token)?,
2703 },
2704 _ => self.expected("trim_where field", next_token),
2705 }
2706 }
2707
2708 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2711 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2712 self.expect_token(&Token::RBracket)?;
2713 Ok(Expr::Array(Array { elem: exprs, named }))
2714 }
2715
2716 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2717 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2718 if self.parse_keyword(Keyword::ERROR) {
2719 Ok(Some(ListAggOnOverflow::Error))
2720 } else {
2721 self.expect_keyword_is(Keyword::TRUNCATE)?;
2722 let filler = match self.peek_token().token {
2723 Token::Word(w)
2724 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2725 {
2726 None
2727 }
2728 Token::SingleQuotedString(_)
2729 | Token::EscapedStringLiteral(_)
2730 | Token::UnicodeStringLiteral(_)
2731 | Token::NationalStringLiteral(_)
2732 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2733 _ => self.expected(
2734 "either filler, WITH, or WITHOUT in LISTAGG",
2735 self.peek_token(),
2736 )?,
2737 };
2738 let with_count = self.parse_keyword(Keyword::WITH);
2739 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2740 self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2741 }
2742 self.expect_keyword_is(Keyword::COUNT)?;
2743 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2744 }
2745 } else {
2746 Ok(None)
2747 }
2748 }
2749
2750 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2755 let next_token = self.next_token();
2756 match &next_token.token {
2757 Token::Word(w) => match w.keyword {
2758 Keyword::YEAR => Ok(DateTimeField::Year),
2759 Keyword::YEARS => Ok(DateTimeField::Years),
2760 Keyword::MONTH => Ok(DateTimeField::Month),
2761 Keyword::MONTHS => Ok(DateTimeField::Months),
2762 Keyword::WEEK => {
2763 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2764 && self.consume_token(&Token::LParen)
2765 {
2766 let week_day = self.parse_identifier()?;
2767 self.expect_token(&Token::RParen)?;
2768 Some(week_day)
2769 } else {
2770 None
2771 };
2772 Ok(DateTimeField::Week(week_day))
2773 }
2774 Keyword::WEEKS => Ok(DateTimeField::Weeks),
2775 Keyword::DAY => Ok(DateTimeField::Day),
2776 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2777 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2778 Keyword::DAYS => Ok(DateTimeField::Days),
2779 Keyword::DATE => Ok(DateTimeField::Date),
2780 Keyword::DATETIME => Ok(DateTimeField::Datetime),
2781 Keyword::HOUR => Ok(DateTimeField::Hour),
2782 Keyword::HOURS => Ok(DateTimeField::Hours),
2783 Keyword::MINUTE => Ok(DateTimeField::Minute),
2784 Keyword::MINUTES => Ok(DateTimeField::Minutes),
2785 Keyword::SECOND => Ok(DateTimeField::Second),
2786 Keyword::SECONDS => Ok(DateTimeField::Seconds),
2787 Keyword::CENTURY => Ok(DateTimeField::Century),
2788 Keyword::DECADE => Ok(DateTimeField::Decade),
2789 Keyword::DOY => Ok(DateTimeField::Doy),
2790 Keyword::DOW => Ok(DateTimeField::Dow),
2791 Keyword::EPOCH => Ok(DateTimeField::Epoch),
2792 Keyword::ISODOW => Ok(DateTimeField::Isodow),
2793 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2794 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2795 Keyword::JULIAN => Ok(DateTimeField::Julian),
2796 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2797 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2798 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2799 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2800 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2801 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2802 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2803 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2804 Keyword::QUARTER => Ok(DateTimeField::Quarter),
2805 Keyword::TIME => Ok(DateTimeField::Time),
2806 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2807 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2808 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2809 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2810 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2811 _ if self.dialect.allow_extract_custom() => {
2812 self.prev_token();
2813 let custom = self.parse_identifier()?;
2814 Ok(DateTimeField::Custom(custom))
2815 }
2816 _ => self.expected("date/time field", next_token),
2817 },
2818 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2819 self.prev_token();
2820 let custom = self.parse_identifier()?;
2821 Ok(DateTimeField::Custom(custom))
2822 }
2823 _ => self.expected("date/time field", next_token),
2824 }
2825 }
2826
2827 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2828 match self.peek_token().token {
2829 Token::Word(w) => match w.keyword {
2830 Keyword::EXISTS => {
2831 let negated = true;
2832 let _ = self.parse_keyword(Keyword::EXISTS);
2833 self.parse_exists_expr(negated)
2834 }
2835 _ => Ok(Expr::UnaryOp {
2836 op: UnaryOperator::Not,
2837 expr: Box::new(
2838 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2839 ),
2840 }),
2841 },
2842 _ => Ok(Expr::UnaryOp {
2843 op: UnaryOperator::Not,
2844 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2845 }),
2846 }
2847 }
2848
2849 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2859 let token = self.expect_token(&Token::LBrace)?;
2860
2861 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2862 self.expect_token(&Token::RBrace)?;
2863 return Ok(fn_expr);
2864 }
2865
2866 if self.dialect.supports_dictionary_syntax() {
2867 self.prev_token(); return self.parse_dictionary();
2869 }
2870
2871 self.expected("an expression", token)
2872 }
2873
2874 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2880 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2881
2882 self.expect_keyword_is(Keyword::AGAINST)?;
2883
2884 self.expect_token(&Token::LParen)?;
2885
2886 let match_value = self.parse_value()?.value;
2888
2889 let in_natural_language_mode_keywords = &[
2890 Keyword::IN,
2891 Keyword::NATURAL,
2892 Keyword::LANGUAGE,
2893 Keyword::MODE,
2894 ];
2895
2896 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2897
2898 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2899
2900 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2901 if self.parse_keywords(with_query_expansion_keywords) {
2902 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2903 } else {
2904 Some(SearchModifier::InNaturalLanguageMode)
2905 }
2906 } else if self.parse_keywords(in_boolean_mode_keywords) {
2907 Some(SearchModifier::InBooleanMode)
2908 } else if self.parse_keywords(with_query_expansion_keywords) {
2909 Some(SearchModifier::WithQueryExpansion)
2910 } else {
2911 None
2912 };
2913
2914 self.expect_token(&Token::RParen)?;
2915
2916 Ok(Expr::MatchAgainst {
2917 columns,
2918 match_value,
2919 opt_search_modifier,
2920 })
2921 }
2922
2923 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2939 let value = if self.dialect.require_interval_qualifier() {
2948 self.parse_expr()?
2950 } else {
2951 self.parse_prefix()?
2954 };
2955
2956 let leading_field = if self.next_token_is_temporal_unit() {
2962 Some(self.parse_date_time_field()?)
2963 } else if self.dialect.require_interval_qualifier() {
2964 return parser_err!(
2965 "INTERVAL requires a unit after the literal value",
2966 self.peek_token().span.start
2967 );
2968 } else {
2969 None
2970 };
2971
2972 let (leading_precision, last_field, fsec_precision) =
2973 if leading_field == Some(DateTimeField::Second) {
2974 let last_field = None;
2980 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2981 (leading_precision, last_field, fsec_precision)
2982 } else {
2983 let leading_precision = self.parse_optional_precision()?;
2984 if self.parse_keyword(Keyword::TO) {
2985 let last_field = Some(self.parse_date_time_field()?);
2986 let fsec_precision = if last_field == Some(DateTimeField::Second) {
2987 self.parse_optional_precision()?
2988 } else {
2989 None
2990 };
2991 (leading_precision, last_field, fsec_precision)
2992 } else {
2993 (leading_precision, None, None)
2994 }
2995 };
2996
2997 Ok(Expr::Interval(Interval {
2998 value: Box::new(value),
2999 leading_field,
3000 leading_precision,
3001 last_field,
3002 fractional_seconds_precision: fsec_precision,
3003 }))
3004 }
3005
3006 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3009 if let Token::Word(word) = self.peek_token().token {
3010 matches!(
3011 word.keyword,
3012 Keyword::YEAR
3013 | Keyword::YEARS
3014 | Keyword::MONTH
3015 | Keyword::MONTHS
3016 | Keyword::WEEK
3017 | Keyword::WEEKS
3018 | Keyword::DAY
3019 | Keyword::DAYS
3020 | Keyword::HOUR
3021 | Keyword::HOURS
3022 | Keyword::MINUTE
3023 | Keyword::MINUTES
3024 | Keyword::SECOND
3025 | Keyword::SECONDS
3026 | Keyword::CENTURY
3027 | Keyword::DECADE
3028 | Keyword::DOW
3029 | Keyword::DOY
3030 | Keyword::EPOCH
3031 | Keyword::ISODOW
3032 | Keyword::ISOYEAR
3033 | Keyword::JULIAN
3034 | Keyword::MICROSECOND
3035 | Keyword::MICROSECONDS
3036 | Keyword::MILLENIUM
3037 | Keyword::MILLENNIUM
3038 | Keyword::MILLISECOND
3039 | Keyword::MILLISECONDS
3040 | Keyword::NANOSECOND
3041 | Keyword::NANOSECONDS
3042 | Keyword::QUARTER
3043 | Keyword::TIMEZONE
3044 | Keyword::TIMEZONE_HOUR
3045 | Keyword::TIMEZONE_MINUTE
3046 )
3047 } else {
3048 false
3049 }
3050 }
3051
3052 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3060 self.prev_token();
3062 let (fields, trailing_bracket) =
3063 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3064 if trailing_bracket.0 {
3065 return parser_err!(
3066 "unmatched > in STRUCT literal",
3067 self.peek_token().span.start
3068 );
3069 }
3070
3071 self.expect_token(&Token::LParen)?;
3073 let values = self
3074 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3075 self.expect_token(&Token::RParen)?;
3076
3077 Ok(Expr::Struct { values, fields })
3078 }
3079
3080 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3094 let expr = self.parse_expr()?;
3095 if self.parse_keyword(Keyword::AS) {
3096 if typed_syntax {
3097 return parser_err!("Typed syntax does not allow AS", {
3098 self.prev_token();
3099 self.peek_token().span.start
3100 });
3101 }
3102 let field_name = self.parse_identifier()?;
3103 Ok(Expr::Named {
3104 expr: expr.into(),
3105 name: field_name,
3106 })
3107 } else {
3108 Ok(expr)
3109 }
3110 }
3111
3112 fn parse_struct_type_def<F>(
3125 &mut self,
3126 mut elem_parser: F,
3127 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3128 where
3129 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3130 {
3131 self.expect_keyword_is(Keyword::STRUCT)?;
3132
3133 if Token::Lt != self.peek_token() {
3135 return Ok((Default::default(), false.into()));
3136 }
3137 self.next_token();
3138
3139 let mut field_defs = vec![];
3140 let trailing_bracket = loop {
3141 let (def, trailing_bracket) = elem_parser(self)?;
3142 field_defs.push(def);
3143 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3145 break trailing_bracket;
3146 }
3147 };
3148
3149 Ok((
3150 field_defs,
3151 self.expect_closing_angle_bracket(trailing_bracket)?,
3152 ))
3153 }
3154
3155 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3157 self.expect_keyword_is(Keyword::STRUCT)?;
3158 self.expect_token(&Token::LParen)?;
3159 let struct_body = self.parse_comma_separated(|parser| {
3160 let field_name = parser.parse_identifier()?;
3161 let field_type = parser.parse_data_type()?;
3162
3163 Ok(StructField {
3164 field_name: Some(field_name),
3165 field_type,
3166 options: None,
3167 })
3168 });
3169 self.expect_token(&Token::RParen)?;
3170 struct_body
3171 }
3172
3173 fn parse_struct_field_def(
3183 &mut self,
3184 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3185 let is_anonymous_field = !matches!(
3188 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3189 (Token::Word(_), Token::Word(_))
3190 );
3191
3192 let field_name = if is_anonymous_field {
3193 None
3194 } else {
3195 Some(self.parse_identifier()?)
3196 };
3197
3198 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3199
3200 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3201 Ok((
3202 StructField {
3203 field_name,
3204 field_type,
3205 options,
3206 },
3207 trailing_bracket,
3208 ))
3209 }
3210
3211 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3221 self.expect_keyword_is(Keyword::UNION)?;
3222
3223 self.expect_token(&Token::LParen)?;
3224
3225 let fields = self.parse_comma_separated(|p| {
3226 Ok(UnionField {
3227 field_name: p.parse_identifier()?,
3228 field_type: p.parse_data_type()?,
3229 })
3230 })?;
3231
3232 self.expect_token(&Token::RParen)?;
3233
3234 Ok(fields)
3235 }
3236
3237 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3248 self.expect_token(&Token::LBrace)?;
3249
3250 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3251
3252 self.expect_token(&Token::RBrace)?;
3253
3254 Ok(Expr::Dictionary(fields))
3255 }
3256
3257 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3268 let key = self.parse_identifier()?;
3269
3270 self.expect_token(&Token::Colon)?;
3271
3272 let expr = self.parse_expr()?;
3273
3274 Ok(DictionaryField {
3275 key,
3276 value: Box::new(expr),
3277 })
3278 }
3279
3280 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3290 self.expect_token(&Token::LBrace)?;
3291 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3292 self.expect_token(&Token::RBrace)?;
3293 Ok(Expr::Map(Map { entries: fields }))
3294 }
3295
3296 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3306 let key = self.parse_expr()?;
3307
3308 self.expect_token(&Token::Colon)?;
3309
3310 let value = self.parse_expr()?;
3311
3312 Ok(MapEntry {
3313 key: Box::new(key),
3314 value: Box::new(value),
3315 })
3316 }
3317
3318 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3328 self.expect_keyword_is(Keyword::MAP)?;
3329 self.expect_token(&Token::LParen)?;
3330 let key_data_type = self.parse_data_type()?;
3331 self.expect_token(&Token::Comma)?;
3332 let value_data_type = self.parse_data_type()?;
3333 self.expect_token(&Token::RParen)?;
3334
3335 Ok((key_data_type, value_data_type))
3336 }
3337
3338 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3348 self.expect_keyword_is(Keyword::TUPLE)?;
3349 self.expect_token(&Token::LParen)?;
3350 let mut field_defs = vec![];
3351 loop {
3352 let (def, _) = self.parse_struct_field_def()?;
3353 field_defs.push(def);
3354 if !self.consume_token(&Token::Comma) {
3355 break;
3356 }
3357 }
3358 self.expect_token(&Token::RParen)?;
3359
3360 Ok(field_defs)
3361 }
3362
3363 fn expect_closing_angle_bracket(
3368 &mut self,
3369 trailing_bracket: MatchedTrailingBracket,
3370 ) -> Result<MatchedTrailingBracket, ParserError> {
3371 let trailing_bracket = if !trailing_bracket.0 {
3372 match self.peek_token().token {
3373 Token::Gt => {
3374 self.next_token();
3375 false.into()
3376 }
3377 Token::ShiftRight => {
3378 self.next_token();
3379 true.into()
3380 }
3381 _ => return self.expected(">", self.peek_token()),
3382 }
3383 } else {
3384 false.into()
3385 };
3386
3387 Ok(trailing_bracket)
3388 }
3389
3390 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3392 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3394 return infix;
3395 }
3396
3397 let dialect = self.dialect;
3398
3399 self.advance_token();
3400 let tok = self.get_current_token();
3401 debug!("infix: {tok:?}");
3402 let tok_index = self.get_current_index();
3403 let span = tok.span;
3404 let regular_binary_operator = match &tok.token {
3405 Token::Spaceship => Some(BinaryOperator::Spaceship),
3406 Token::DoubleEq => Some(BinaryOperator::Eq),
3407 Token::Assignment => Some(BinaryOperator::Assignment),
3408 Token::Eq => Some(BinaryOperator::Eq),
3409 Token::Neq => Some(BinaryOperator::NotEq),
3410 Token::Gt => Some(BinaryOperator::Gt),
3411 Token::GtEq => Some(BinaryOperator::GtEq),
3412 Token::Lt => Some(BinaryOperator::Lt),
3413 Token::LtEq => Some(BinaryOperator::LtEq),
3414 Token::Plus => Some(BinaryOperator::Plus),
3415 Token::Minus => Some(BinaryOperator::Minus),
3416 Token::Mul => Some(BinaryOperator::Multiply),
3417 Token::Mod => Some(BinaryOperator::Modulo),
3418 Token::StringConcat => Some(BinaryOperator::StringConcat),
3419 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3420 Token::Caret => {
3421 if dialect_is!(dialect is PostgreSqlDialect) {
3424 Some(BinaryOperator::PGExp)
3425 } else {
3426 Some(BinaryOperator::BitwiseXor)
3427 }
3428 }
3429 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3430 Token::Div => Some(BinaryOperator::Divide),
3431 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3432 Some(BinaryOperator::DuckIntegerDivide)
3433 }
3434 Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3435 Some(BinaryOperator::PGBitwiseShiftLeft)
3436 }
3437 Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3438 Some(BinaryOperator::PGBitwiseShiftRight)
3439 }
3440 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3441 Some(BinaryOperator::PGBitwiseXor)
3442 }
3443 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3444 Some(BinaryOperator::PGOverlap)
3445 }
3446 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3447 Some(BinaryOperator::PGOverlap)
3448 }
3449 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3450 Some(BinaryOperator::PGStartsWith)
3451 }
3452 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3453 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3454 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3455 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3456 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3457 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3458 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3459 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3460 Token::Arrow => Some(BinaryOperator::Arrow),
3461 Token::LongArrow => Some(BinaryOperator::LongArrow),
3462 Token::HashArrow => Some(BinaryOperator::HashArrow),
3463 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3464 Token::AtArrow => Some(BinaryOperator::AtArrow),
3465 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3466 Token::HashMinus => Some(BinaryOperator::HashMinus),
3467 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3468 Token::AtAt => Some(BinaryOperator::AtAt),
3469 Token::Question => Some(BinaryOperator::Question),
3470 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3471 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3472 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3473 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3474 Some(BinaryOperator::DoubleHash)
3475 }
3476
3477 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3478 Some(BinaryOperator::AndLt)
3479 }
3480 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3481 Some(BinaryOperator::AndGt)
3482 }
3483 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3484 Some(BinaryOperator::QuestionDash)
3485 }
3486 Token::AmpersandLeftAngleBracketVerticalBar
3487 if self.dialect.supports_geometric_types() =>
3488 {
3489 Some(BinaryOperator::AndLtPipe)
3490 }
3491 Token::VerticalBarAmpersandRightAngleBracket
3492 if self.dialect.supports_geometric_types() =>
3493 {
3494 Some(BinaryOperator::PipeAndGt)
3495 }
3496 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3497 Some(BinaryOperator::LtDashGt)
3498 }
3499 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3500 Some(BinaryOperator::LtCaret)
3501 }
3502 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3503 Some(BinaryOperator::GtCaret)
3504 }
3505 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3506 Some(BinaryOperator::QuestionHash)
3507 }
3508 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3509 Some(BinaryOperator::QuestionDoublePipe)
3510 }
3511 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3512 Some(BinaryOperator::QuestionDashPipe)
3513 }
3514 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3515 Some(BinaryOperator::TildeEq)
3516 }
3517 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3518 Some(BinaryOperator::LtLtPipe)
3519 }
3520 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3521 Some(BinaryOperator::PipeGtGt)
3522 }
3523 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3524
3525 Token::Word(w) => match w.keyword {
3526 Keyword::AND => Some(BinaryOperator::And),
3527 Keyword::OR => Some(BinaryOperator::Or),
3528 Keyword::XOR => Some(BinaryOperator::Xor),
3529 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3530 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3531 self.expect_token(&Token::LParen)?;
3532 let mut idents = vec![];
3537 loop {
3538 self.advance_token();
3539 idents.push(self.get_current_token().to_string());
3540 if !self.consume_token(&Token::Period) {
3541 break;
3542 }
3543 }
3544 self.expect_token(&Token::RParen)?;
3545 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3546 }
3547 _ => None,
3548 },
3549 _ => None,
3550 };
3551
3552 let tok = self.token_at(tok_index);
3553 if let Some(op) = regular_binary_operator {
3554 if let Some(keyword) =
3555 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3556 {
3557 self.expect_token(&Token::LParen)?;
3558 let right = if self.peek_sub_query() {
3559 self.prev_token(); self.parse_subexpr(precedence)?
3563 } else {
3564 let right = self.parse_subexpr(precedence)?;
3566 self.expect_token(&Token::RParen)?;
3567 right
3568 };
3569
3570 if !matches!(
3571 op,
3572 BinaryOperator::Gt
3573 | BinaryOperator::Lt
3574 | BinaryOperator::GtEq
3575 | BinaryOperator::LtEq
3576 | BinaryOperator::Eq
3577 | BinaryOperator::NotEq
3578 | BinaryOperator::PGRegexMatch
3579 | BinaryOperator::PGRegexIMatch
3580 | BinaryOperator::PGRegexNotMatch
3581 | BinaryOperator::PGRegexNotIMatch
3582 | BinaryOperator::PGLikeMatch
3583 | BinaryOperator::PGILikeMatch
3584 | BinaryOperator::PGNotLikeMatch
3585 | BinaryOperator::PGNotILikeMatch
3586 ) {
3587 return parser_err!(
3588 format!(
3589 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3590 ),
3591 span.start
3592 );
3593 };
3594
3595 Ok(match keyword {
3596 Keyword::ALL => Expr::AllOp {
3597 left: Box::new(expr),
3598 compare_op: op,
3599 right: Box::new(right),
3600 },
3601 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3602 left: Box::new(expr),
3603 compare_op: op,
3604 right: Box::new(right),
3605 is_some: keyword == Keyword::SOME,
3606 },
3607 unexpected_keyword => return Err(ParserError::ParserError(
3608 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3609 )),
3610 })
3611 } else {
3612 Ok(Expr::BinaryOp {
3613 left: Box::new(expr),
3614 op,
3615 right: Box::new(self.parse_subexpr(precedence)?),
3616 })
3617 }
3618 } else if let Token::Word(w) = &tok.token {
3619 match w.keyword {
3620 Keyword::IS => {
3621 if self.parse_keyword(Keyword::NULL) {
3622 Ok(Expr::IsNull(Box::new(expr)))
3623 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3624 Ok(Expr::IsNotNull(Box::new(expr)))
3625 } else if self.parse_keywords(&[Keyword::TRUE]) {
3626 Ok(Expr::IsTrue(Box::new(expr)))
3627 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3628 Ok(Expr::IsNotTrue(Box::new(expr)))
3629 } else if self.parse_keywords(&[Keyword::FALSE]) {
3630 Ok(Expr::IsFalse(Box::new(expr)))
3631 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3632 Ok(Expr::IsNotFalse(Box::new(expr)))
3633 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3634 Ok(Expr::IsUnknown(Box::new(expr)))
3635 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3636 Ok(Expr::IsNotUnknown(Box::new(expr)))
3637 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3638 let expr2 = self.parse_expr()?;
3639 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3640 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3641 {
3642 let expr2 = self.parse_expr()?;
3643 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3644 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3645 Ok(is_normalized)
3646 } else {
3647 self.expected(
3648 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3649 self.peek_token(),
3650 )
3651 }
3652 }
3653 Keyword::AT => {
3654 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3655 Ok(Expr::AtTimeZone {
3656 timestamp: Box::new(expr),
3657 time_zone: Box::new(self.parse_subexpr(precedence)?),
3658 })
3659 }
3660 Keyword::NOT
3661 | Keyword::IN
3662 | Keyword::BETWEEN
3663 | Keyword::LIKE
3664 | Keyword::ILIKE
3665 | Keyword::SIMILAR
3666 | Keyword::REGEXP
3667 | Keyword::RLIKE => {
3668 self.prev_token();
3669 let negated = self.parse_keyword(Keyword::NOT);
3670 let regexp = self.parse_keyword(Keyword::REGEXP);
3671 let rlike = self.parse_keyword(Keyword::RLIKE);
3672 let null = if !self.in_column_definition_state() {
3673 self.parse_keyword(Keyword::NULL)
3674 } else {
3675 false
3676 };
3677 if regexp || rlike {
3678 Ok(Expr::RLike {
3679 negated,
3680 expr: Box::new(expr),
3681 pattern: Box::new(
3682 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3683 ),
3684 regexp,
3685 })
3686 } else if negated && null {
3687 Ok(Expr::IsNotNull(Box::new(expr)))
3688 } else if self.parse_keyword(Keyword::IN) {
3689 self.parse_in(expr, negated)
3690 } else if self.parse_keyword(Keyword::BETWEEN) {
3691 self.parse_between(expr, negated)
3692 } else if self.parse_keyword(Keyword::LIKE) {
3693 Ok(Expr::Like {
3694 negated,
3695 any: self.parse_keyword(Keyword::ANY),
3696 expr: Box::new(expr),
3697 pattern: Box::new(
3698 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3699 ),
3700 escape_char: self.parse_escape_char()?,
3701 })
3702 } else if self.parse_keyword(Keyword::ILIKE) {
3703 Ok(Expr::ILike {
3704 negated,
3705 any: self.parse_keyword(Keyword::ANY),
3706 expr: Box::new(expr),
3707 pattern: Box::new(
3708 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3709 ),
3710 escape_char: self.parse_escape_char()?,
3711 })
3712 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3713 Ok(Expr::SimilarTo {
3714 negated,
3715 expr: Box::new(expr),
3716 pattern: Box::new(
3717 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3718 ),
3719 escape_char: self.parse_escape_char()?,
3720 })
3721 } else {
3722 self.expected("IN or BETWEEN after NOT", self.peek_token())
3723 }
3724 }
3725 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3726 Ok(Expr::IsNotNull(Box::new(expr)))
3727 }
3728 Keyword::MEMBER => {
3729 if self.parse_keyword(Keyword::OF) {
3730 self.expect_token(&Token::LParen)?;
3731 let array = self.parse_expr()?;
3732 self.expect_token(&Token::RParen)?;
3733 Ok(Expr::MemberOf(MemberOf {
3734 value: Box::new(expr),
3735 array: Box::new(array),
3736 }))
3737 } else {
3738 self.expected("OF after MEMBER", self.peek_token())
3739 }
3740 }
3741 _ => parser_err!(
3743 format!("No infix parser for token {:?}", tok.token),
3744 tok.span.start
3745 ),
3746 }
3747 } else if Token::DoubleColon == *tok {
3748 Ok(Expr::Cast {
3749 kind: CastKind::DoubleColon,
3750 expr: Box::new(expr),
3751 data_type: self.parse_data_type()?,
3752 format: None,
3753 })
3754 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3755 Ok(Expr::UnaryOp {
3756 op: UnaryOperator::PGPostfixFactorial,
3757 expr: Box::new(expr),
3758 })
3759 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3760 || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3761 {
3762 self.prev_token();
3763 self.parse_json_access(expr)
3764 } else {
3765 parser_err!(
3767 format!("No infix parser for token {:?}", tok.token),
3768 tok.span.start
3769 )
3770 }
3771 }
3772
3773 pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3775 if self.parse_keyword(Keyword::ESCAPE) {
3776 Ok(Some(self.parse_value()?.into()))
3777 } else {
3778 Ok(None)
3779 }
3780 }
3781
3782 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3792 let lower_bound = if self.consume_token(&Token::Colon) {
3794 None
3795 } else {
3796 Some(self.parse_expr()?)
3797 };
3798
3799 if self.consume_token(&Token::RBracket) {
3801 if let Some(lower_bound) = lower_bound {
3802 return Ok(Subscript::Index { index: lower_bound });
3803 };
3804 return Ok(Subscript::Slice {
3805 lower_bound,
3806 upper_bound: None,
3807 stride: None,
3808 });
3809 }
3810
3811 if lower_bound.is_some() {
3813 self.expect_token(&Token::Colon)?;
3814 }
3815
3816 let upper_bound = if self.consume_token(&Token::RBracket) {
3818 return Ok(Subscript::Slice {
3819 lower_bound,
3820 upper_bound: None,
3821 stride: None,
3822 });
3823 } else {
3824 Some(self.parse_expr()?)
3825 };
3826
3827 if self.consume_token(&Token::RBracket) {
3829 return Ok(Subscript::Slice {
3830 lower_bound,
3831 upper_bound,
3832 stride: None,
3833 });
3834 }
3835
3836 self.expect_token(&Token::Colon)?;
3838 let stride = if self.consume_token(&Token::RBracket) {
3839 None
3840 } else {
3841 Some(self.parse_expr()?)
3842 };
3843
3844 if stride.is_some() {
3845 self.expect_token(&Token::RBracket)?;
3846 }
3847
3848 Ok(Subscript::Slice {
3849 lower_bound,
3850 upper_bound,
3851 stride,
3852 })
3853 }
3854
3855 pub fn parse_multi_dim_subscript(
3857 &mut self,
3858 chain: &mut Vec<AccessExpr>,
3859 ) -> Result<(), ParserError> {
3860 while self.consume_token(&Token::LBracket) {
3861 self.parse_subscript(chain)?;
3862 }
3863 Ok(())
3864 }
3865
3866 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3870 let subscript = self.parse_subscript_inner()?;
3871 chain.push(AccessExpr::Subscript(subscript));
3872 Ok(())
3873 }
3874
3875 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3876 let token = self.next_token();
3877 match token.token {
3878 Token::Word(Word {
3879 value,
3880 quote_style: quote_style @ (Some('"') | None),
3882 keyword: _,
3885 }) => Ok(JsonPathElem::Dot {
3886 key: value,
3887 quoted: quote_style.is_some(),
3888 }),
3889
3890 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3894
3895 _ => self.expected("variant object key name", token),
3896 }
3897 }
3898
3899 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3900 let path = self.parse_json_path()?;
3901 Ok(Expr::JsonAccess {
3902 value: Box::new(expr),
3903 path,
3904 })
3905 }
3906
3907 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3908 let mut path = Vec::new();
3909 loop {
3910 match self.next_token().token {
3911 Token::Colon if path.is_empty() => {
3912 path.push(self.parse_json_path_object_key()?);
3913 }
3914 Token::Period if !path.is_empty() => {
3915 path.push(self.parse_json_path_object_key()?);
3916 }
3917 Token::LBracket => {
3918 let key = self.parse_expr()?;
3919 self.expect_token(&Token::RBracket)?;
3920
3921 path.push(JsonPathElem::Bracket { key });
3922 }
3923 _ => {
3924 self.prev_token();
3925 break;
3926 }
3927 };
3928 }
3929
3930 debug_assert!(!path.is_empty());
3931 Ok(JsonPath { path })
3932 }
3933
3934 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3936 if self.parse_keyword(Keyword::UNNEST) {
3939 self.expect_token(&Token::LParen)?;
3940 let array_expr = self.parse_expr()?;
3941 self.expect_token(&Token::RParen)?;
3942 return Ok(Expr::InUnnest {
3943 expr: Box::new(expr),
3944 array_expr: Box::new(array_expr),
3945 negated,
3946 });
3947 }
3948 self.expect_token(&Token::LParen)?;
3949 let in_op = match self.maybe_parse(|p| p.parse_query())? {
3950 Some(subquery) => Expr::InSubquery {
3951 expr: Box::new(expr),
3952 subquery,
3953 negated,
3954 },
3955 None => Expr::InList {
3956 expr: Box::new(expr),
3957 list: if self.dialect.supports_in_empty_list() {
3958 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3959 } else {
3960 self.parse_comma_separated(Parser::parse_expr)?
3961 },
3962 negated,
3963 },
3964 };
3965 self.expect_token(&Token::RParen)?;
3966 Ok(in_op)
3967 }
3968
3969 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3971 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3974 self.expect_keyword_is(Keyword::AND)?;
3975 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3976 Ok(Expr::Between {
3977 expr: Box::new(expr),
3978 negated,
3979 low: Box::new(low),
3980 high: Box::new(high),
3981 })
3982 }
3983
3984 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3986 Ok(Expr::Cast {
3987 kind: CastKind::DoubleColon,
3988 expr: Box::new(expr),
3989 data_type: self.parse_data_type()?,
3990 format: None,
3991 })
3992 }
3993
3994 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3996 self.dialect.get_next_precedence_default(self)
3997 }
3998
3999 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4002 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4003 }
4004
4005 pub fn peek_token(&self) -> TokenWithSpan {
4010 self.peek_nth_token(0)
4011 }
4012
4013 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4016 self.peek_nth_token_ref(0)
4017 }
4018
4019 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4042 self.peek_tokens_with_location()
4043 .map(|with_loc| with_loc.token)
4044 }
4045
4046 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4051 let mut index = self.index;
4052 core::array::from_fn(|_| loop {
4053 let token = self.tokens.get(index);
4054 index += 1;
4055 if let Some(TokenWithSpan {
4056 token: Token::Whitespace(_),
4057 span: _,
4058 }) = token
4059 {
4060 continue;
4061 }
4062 break token.cloned().unwrap_or(TokenWithSpan {
4063 token: Token::EOF,
4064 span: Span::empty(),
4065 });
4066 })
4067 }
4068
4069 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4074 let mut index = self.index;
4075 core::array::from_fn(|_| loop {
4076 let token = self.tokens.get(index);
4077 index += 1;
4078 if let Some(TokenWithSpan {
4079 token: Token::Whitespace(_),
4080 span: _,
4081 }) = token
4082 {
4083 continue;
4084 }
4085 break token.unwrap_or(&EOF_TOKEN);
4086 })
4087 }
4088
4089 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4091 self.peek_nth_token_ref(n).clone()
4092 }
4093
4094 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4096 let mut index = self.index;
4097 loop {
4098 index += 1;
4099 match self.tokens.get(index - 1) {
4100 Some(TokenWithSpan {
4101 token: Token::Whitespace(_),
4102 span: _,
4103 }) => continue,
4104 non_whitespace => {
4105 if n == 0 {
4106 return non_whitespace.unwrap_or(&EOF_TOKEN);
4107 }
4108 n -= 1;
4109 }
4110 }
4111 }
4112 }
4113
4114 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4117 self.peek_nth_token_no_skip(0)
4118 }
4119
4120 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4122 self.tokens
4123 .get(self.index + n)
4124 .cloned()
4125 .unwrap_or(TokenWithSpan {
4126 token: Token::EOF,
4127 span: Span::empty(),
4128 })
4129 }
4130
4131 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4135 let index = self.index;
4136 let matched = self.parse_keywords(expected);
4137 self.index = index;
4138 matched
4139 }
4140
4141 pub fn next_token(&mut self) -> TokenWithSpan {
4146 self.advance_token();
4147 self.get_current_token().clone()
4148 }
4149
4150 pub fn get_current_index(&self) -> usize {
4155 self.index.saturating_sub(1)
4156 }
4157
4158 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4160 self.index += 1;
4161 self.tokens.get(self.index - 1)
4162 }
4163
4164 pub fn advance_token(&mut self) {
4168 loop {
4169 self.index += 1;
4170 match self.tokens.get(self.index - 1) {
4171 Some(TokenWithSpan {
4172 token: Token::Whitespace(_),
4173 span: _,
4174 }) => continue,
4175 _ => break,
4176 }
4177 }
4178 }
4179
4180 pub fn get_current_token(&self) -> &TokenWithSpan {
4184 self.token_at(self.index.saturating_sub(1))
4185 }
4186
4187 pub fn get_previous_token(&self) -> &TokenWithSpan {
4191 self.token_at(self.index.saturating_sub(2))
4192 }
4193
4194 pub fn get_next_token(&self) -> &TokenWithSpan {
4198 self.token_at(self.index)
4199 }
4200
4201 pub fn prev_token(&mut self) {
4208 loop {
4209 assert!(self.index > 0);
4210 self.index -= 1;
4211 if let Some(TokenWithSpan {
4212 token: Token::Whitespace(_),
4213 span: _,
4214 }) = self.tokens.get(self.index)
4215 {
4216 continue;
4217 }
4218 return;
4219 }
4220 }
4221
4222 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4224 parser_err!(
4225 format!("Expected: {expected}, found: {found}"),
4226 found.span.start
4227 )
4228 }
4229
4230 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4232 parser_err!(
4233 format!("Expected: {expected}, found: {found}"),
4234 found.span.start
4235 )
4236 }
4237
4238 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4240 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4241 parser_err!(
4242 format!("Expected: {expected}, found: {found}"),
4243 found.span.start
4244 )
4245 }
4246
4247 #[must_use]
4250 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4251 if self.peek_keyword(expected) {
4252 self.advance_token();
4253 true
4254 } else {
4255 false
4256 }
4257 }
4258
4259 #[must_use]
4260 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4261 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4262 }
4263
4264 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4272 self.keyword_with_tokens(expected, tokens, true)
4273 }
4274
4275 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4280 self.keyword_with_tokens(expected, tokens, false)
4281 }
4282
4283 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4284 match &self.peek_token_ref().token {
4285 Token::Word(w) if expected == w.keyword => {
4286 for (idx, token) in tokens.iter().enumerate() {
4287 if self.peek_nth_token_ref(idx + 1).token != *token {
4288 return false;
4289 }
4290 }
4291
4292 if consume {
4293 for _ in 0..(tokens.len() + 1) {
4294 self.advance_token();
4295 }
4296 }
4297
4298 true
4299 }
4300 _ => false,
4301 }
4302 }
4303
4304 #[must_use]
4308 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4309 let index = self.index;
4310 for &keyword in keywords {
4311 if !self.parse_keyword(keyword) {
4312 self.index = index;
4315 return false;
4316 }
4317 }
4318 true
4319 }
4320
4321 #[must_use]
4324 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4325 for keyword in keywords {
4326 if self.peek_keyword(*keyword) {
4327 return Some(*keyword);
4328 }
4329 }
4330 None
4331 }
4332
4333 #[must_use]
4337 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4338 match &self.peek_token_ref().token {
4339 Token::Word(w) => {
4340 keywords
4341 .iter()
4342 .find(|keyword| **keyword == w.keyword)
4343 .map(|keyword| {
4344 self.advance_token();
4345 *keyword
4346 })
4347 }
4348 _ => None,
4349 }
4350 }
4351
4352 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4355 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4356 Ok(keyword)
4357 } else {
4358 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4359 self.expected_ref(
4360 &format!("one of {}", keywords.join(" or ")),
4361 self.peek_token_ref(),
4362 )
4363 }
4364 }
4365
4366 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4371 if self.parse_keyword(expected) {
4372 Ok(self.get_current_token().clone())
4373 } else {
4374 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4375 }
4376 }
4377
4378 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4384 if self.parse_keyword(expected) {
4385 Ok(())
4386 } else {
4387 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4388 }
4389 }
4390
4391 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4394 for &kw in expected {
4395 self.expect_keyword_is(kw)?;
4396 }
4397 Ok(())
4398 }
4399
4400 #[must_use]
4404 pub fn consume_token(&mut self, expected: &Token) -> bool {
4405 if self.peek_token_ref() == expected {
4406 self.advance_token();
4407 true
4408 } else {
4409 false
4410 }
4411 }
4412
4413 #[must_use]
4417 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4418 let index = self.index;
4419 for token in tokens {
4420 if !self.consume_token(token) {
4421 self.index = index;
4422 return false;
4423 }
4424 }
4425 true
4426 }
4427
4428 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4430 if self.peek_token_ref() == expected {
4431 Ok(self.next_token())
4432 } else {
4433 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4434 }
4435 }
4436
4437 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4438 where
4439 <T as FromStr>::Err: Display,
4440 {
4441 s.parse::<T>().map_err(|e| {
4442 ParserError::ParserError(format!(
4443 "Could not parse '{s}' as {}: {e}{loc}",
4444 core::any::type_name::<T>()
4445 ))
4446 })
4447 }
4448
4449 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4451 let trailing_commas =
4457 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4458
4459 self.parse_comma_separated_with_trailing_commas(
4460 |p| p.parse_select_item(),
4461 trailing_commas,
4462 Self::is_reserved_for_column_alias,
4463 )
4464 }
4465
4466 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4467 let mut values = vec![];
4468 loop {
4469 values.push(self.parse_grant_permission()?);
4470 if !self.consume_token(&Token::Comma) {
4471 break;
4472 } else if self.options.trailing_commas {
4473 match self.peek_token().token {
4474 Token::Word(kw) if kw.keyword == Keyword::ON => {
4475 break;
4476 }
4477 Token::RParen
4478 | Token::SemiColon
4479 | Token::EOF
4480 | Token::RBracket
4481 | Token::RBrace => break,
4482 _ => continue,
4483 }
4484 }
4485 }
4486 Ok(values)
4487 }
4488
4489 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4491 let trailing_commas = self.dialect.supports_from_trailing_commas();
4492
4493 self.parse_comma_separated_with_trailing_commas(
4494 Parser::parse_table_and_joins,
4495 trailing_commas,
4496 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4497 )
4498 }
4499
4500 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4507 &mut self,
4508 trailing_commas: bool,
4509 is_reserved_keyword: &R,
4510 ) -> bool
4511 where
4512 R: Fn(&Keyword, &mut Parser) -> bool,
4513 {
4514 if !self.consume_token(&Token::Comma) {
4515 true
4516 } else if trailing_commas {
4517 let token = self.next_token().token;
4518 let is_end = match token {
4519 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4520 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4521 true
4522 }
4523 _ => false,
4524 };
4525 self.prev_token();
4526
4527 is_end
4528 } else {
4529 false
4530 }
4531 }
4532
4533 fn is_parse_comma_separated_end(&mut self) -> bool {
4536 self.is_parse_comma_separated_end_with_trailing_commas(
4537 self.options.trailing_commas,
4538 &Self::is_reserved_for_column_alias,
4539 )
4540 }
4541
4542 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4544 where
4545 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4546 {
4547 self.parse_comma_separated_with_trailing_commas(
4548 f,
4549 self.options.trailing_commas,
4550 Self::is_reserved_for_column_alias,
4551 )
4552 }
4553
4554 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4559 &mut self,
4560 mut f: F,
4561 trailing_commas: bool,
4562 is_reserved_keyword: R,
4563 ) -> Result<Vec<T>, ParserError>
4564 where
4565 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4566 R: Fn(&Keyword, &mut Parser) -> bool,
4567 {
4568 let mut values = vec![];
4569 loop {
4570 values.push(f(self)?);
4571 if self.is_parse_comma_separated_end_with_trailing_commas(
4572 trailing_commas,
4573 &is_reserved_keyword,
4574 ) {
4575 break;
4576 }
4577 }
4578 Ok(values)
4579 }
4580
4581 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4583 where
4584 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4585 {
4586 let mut values = vec![];
4587 loop {
4588 values.push(f(self)?);
4589 if !self.consume_token(&Token::Period) {
4590 break;
4591 }
4592 }
4593 Ok(values)
4594 }
4595
4596 pub fn parse_keyword_separated<T, F>(
4598 &mut self,
4599 keyword: Keyword,
4600 mut f: F,
4601 ) -> Result<Vec<T>, ParserError>
4602 where
4603 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4604 {
4605 let mut values = vec![];
4606 loop {
4607 values.push(f(self)?);
4608 if !self.parse_keyword(keyword) {
4609 break;
4610 }
4611 }
4612 Ok(values)
4613 }
4614
4615 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4616 where
4617 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4618 {
4619 self.expect_token(&Token::LParen)?;
4620 let res = f(self)?;
4621 self.expect_token(&Token::RParen)?;
4622 Ok(res)
4623 }
4624
4625 pub fn parse_comma_separated0<T, F>(
4628 &mut self,
4629 f: F,
4630 end_token: Token,
4631 ) -> Result<Vec<T>, ParserError>
4632 where
4633 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4634 {
4635 if self.peek_token().token == end_token {
4636 return Ok(vec![]);
4637 }
4638
4639 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4640 let _ = self.consume_token(&Token::Comma);
4641 return Ok(vec![]);
4642 }
4643
4644 self.parse_comma_separated(f)
4645 }
4646
4647 pub(crate) fn parse_statement_list(
4651 &mut self,
4652 terminal_keywords: &[Keyword],
4653 ) -> Result<Vec<Statement>, ParserError> {
4654 let mut values = vec![];
4655 loop {
4656 match &self.peek_nth_token_ref(0).token {
4657 Token::EOF => break,
4658 Token::Word(w) => {
4659 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4660 break;
4661 }
4662 }
4663 _ => {}
4664 }
4665
4666 values.push(self.parse_statement()?);
4667 self.expect_token(&Token::SemiColon)?;
4668 }
4669 Ok(values)
4670 }
4671
4672 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4676 !parser.dialect.is_column_alias(kw, parser)
4677 }
4678
4679 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4683 where
4684 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4685 {
4686 match self.try_parse(f) {
4687 Ok(t) => Ok(Some(t)),
4688 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4689 _ => Ok(None),
4690 }
4691 }
4692
4693 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4695 where
4696 F: FnMut(&mut Parser) -> Result<T, ParserError>,
4697 {
4698 let index = self.index;
4699 match f(self) {
4700 Ok(t) => Ok(t),
4701 Err(e) => {
4702 self.index = index;
4704 Err(e)
4705 }
4706 }
4707 }
4708
4709 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4712 let loc = self.peek_token().span.start;
4713 let all = self.parse_keyword(Keyword::ALL);
4714 let distinct = self.parse_keyword(Keyword::DISTINCT);
4715 if !distinct {
4716 return Ok(None);
4717 }
4718 if all {
4719 return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4720 }
4721 let on = self.parse_keyword(Keyword::ON);
4722 if !on {
4723 return Ok(Some(Distinct::Distinct));
4724 }
4725
4726 self.expect_token(&Token::LParen)?;
4727 let col_names = if self.consume_token(&Token::RParen) {
4728 self.prev_token();
4729 Vec::new()
4730 } else {
4731 self.parse_comma_separated(Parser::parse_expr)?
4732 };
4733 self.expect_token(&Token::RParen)?;
4734 Ok(Some(Distinct::On(col_names)))
4735 }
4736
4737 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4739 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4740 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4741 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4742 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4743 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4744 let global: Option<bool> = if global {
4745 Some(true)
4746 } else if local {
4747 Some(false)
4748 } else {
4749 None
4750 };
4751 let temporary = self
4752 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4753 .is_some();
4754 let persistent = dialect_of!(self is DuckDbDialect)
4755 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4756 let create_view_params = self.parse_create_view_params()?;
4757 if self.parse_keyword(Keyword::TABLE) {
4758 self.parse_create_table(or_replace, temporary, global, transient)
4759 } else if self.peek_keyword(Keyword::MATERIALIZED)
4760 || self.peek_keyword(Keyword::VIEW)
4761 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4762 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4763 {
4764 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4765 } else if self.parse_keyword(Keyword::POLICY) {
4766 self.parse_create_policy()
4767 } else if self.parse_keyword(Keyword::EXTERNAL) {
4768 self.parse_create_external_table(or_replace)
4769 } else if self.parse_keyword(Keyword::FUNCTION) {
4770 self.parse_create_function(or_alter, or_replace, temporary)
4771 } else if self.parse_keyword(Keyword::DOMAIN) {
4772 self.parse_create_domain()
4773 } else if self.parse_keyword(Keyword::TRIGGER) {
4774 self.parse_create_trigger(temporary, or_alter, or_replace, false)
4775 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4776 self.parse_create_trigger(temporary, or_alter, or_replace, true)
4777 } else if self.parse_keyword(Keyword::MACRO) {
4778 self.parse_create_macro(or_replace, temporary)
4779 } else if self.parse_keyword(Keyword::SECRET) {
4780 self.parse_create_secret(or_replace, temporary, persistent)
4781 } else if self.parse_keyword(Keyword::USER) {
4782 self.parse_create_user(or_replace)
4783 } else if or_replace {
4784 self.expected(
4785 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4786 self.peek_token(),
4787 )
4788 } else if self.parse_keyword(Keyword::EXTENSION) {
4789 self.parse_create_extension()
4790 } else if self.parse_keyword(Keyword::INDEX) {
4791 self.parse_create_index(false)
4792 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4793 self.parse_create_index(true)
4794 } else if self.parse_keyword(Keyword::VIRTUAL) {
4795 self.parse_create_virtual_table()
4796 } else if self.parse_keyword(Keyword::SCHEMA) {
4797 self.parse_create_schema()
4798 } else if self.parse_keyword(Keyword::DATABASE) {
4799 self.parse_create_database()
4800 } else if self.parse_keyword(Keyword::ROLE) {
4801 self.parse_create_role()
4802 } else if self.parse_keyword(Keyword::SEQUENCE) {
4803 self.parse_create_sequence(temporary)
4804 } else if self.parse_keyword(Keyword::TYPE) {
4805 self.parse_create_type()
4806 } else if self.parse_keyword(Keyword::PROCEDURE) {
4807 self.parse_create_procedure(or_alter)
4808 } else if self.parse_keyword(Keyword::CONNECTOR) {
4809 self.parse_create_connector()
4810 } else if self.parse_keyword(Keyword::OPERATOR) {
4811 if self.parse_keyword(Keyword::FAMILY) {
4813 self.parse_create_operator_family()
4814 } else if self.parse_keyword(Keyword::CLASS) {
4815 self.parse_create_operator_class()
4816 } else {
4817 self.parse_create_operator()
4818 }
4819 } else if self.parse_keyword(Keyword::SERVER) {
4820 self.parse_pg_create_server()
4821 } else {
4822 self.expected("an object type after CREATE", self.peek_token())
4823 }
4824 }
4825
4826 fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4827 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4828 let name = self.parse_identifier()?;
4829 let options = self
4830 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4831 .options;
4832 let with_tags = self.parse_keyword(Keyword::WITH);
4833 let tags = if self.parse_keyword(Keyword::TAG) {
4834 self.parse_key_value_options(true, &[])?.options
4835 } else {
4836 vec![]
4837 };
4838 Ok(Statement::CreateUser(CreateUser {
4839 or_replace,
4840 if_not_exists,
4841 name,
4842 options: KeyValueOptions {
4843 options,
4844 delimiter: KeyValueOptionsDelimiter::Space,
4845 },
4846 with_tags,
4847 tags: KeyValueOptions {
4848 options: tags,
4849 delimiter: KeyValueOptionsDelimiter::Comma,
4850 },
4851 }))
4852 }
4853
4854 pub fn parse_create_secret(
4856 &mut self,
4857 or_replace: bool,
4858 temporary: bool,
4859 persistent: bool,
4860 ) -> Result<Statement, ParserError> {
4861 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4862
4863 let mut storage_specifier = None;
4864 let mut name = None;
4865 if self.peek_token() != Token::LParen {
4866 if self.parse_keyword(Keyword::IN) {
4867 storage_specifier = self.parse_identifier().ok()
4868 } else {
4869 name = self.parse_identifier().ok();
4870 }
4871
4872 if storage_specifier.is_none()
4874 && self.peek_token() != Token::LParen
4875 && self.parse_keyword(Keyword::IN)
4876 {
4877 storage_specifier = self.parse_identifier().ok();
4878 }
4879 }
4880
4881 self.expect_token(&Token::LParen)?;
4882 self.expect_keyword_is(Keyword::TYPE)?;
4883 let secret_type = self.parse_identifier()?;
4884
4885 let mut options = Vec::new();
4886 if self.consume_token(&Token::Comma) {
4887 options.append(&mut self.parse_comma_separated(|p| {
4888 let key = p.parse_identifier()?;
4889 let value = p.parse_identifier()?;
4890 Ok(SecretOption { key, value })
4891 })?);
4892 }
4893 self.expect_token(&Token::RParen)?;
4894
4895 let temp = match (temporary, persistent) {
4896 (true, false) => Some(true),
4897 (false, true) => Some(false),
4898 (false, false) => None,
4899 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4900 };
4901
4902 Ok(Statement::CreateSecret {
4903 or_replace,
4904 temporary: temp,
4905 if_not_exists,
4906 name,
4907 storage_specifier,
4908 secret_type,
4909 options,
4910 })
4911 }
4912
4913 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4915 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4916 if self.parse_keyword(Keyword::TABLE) {
4917 let table_name = self.parse_object_name(false)?;
4918 if self.peek_token().token != Token::EOF {
4919 if let Token::Word(word) = self.peek_token().token {
4920 if word.keyword == Keyword::OPTIONS {
4921 options = self.parse_options(Keyword::OPTIONS)?
4922 }
4923 };
4924
4925 if self.peek_token().token != Token::EOF {
4926 let (a, q) = self.parse_as_query()?;
4927 has_as = a;
4928 query = Some(q);
4929 }
4930
4931 Ok(Statement::Cache {
4932 table_flag,
4933 table_name,
4934 has_as,
4935 options,
4936 query,
4937 })
4938 } else {
4939 Ok(Statement::Cache {
4940 table_flag,
4941 table_name,
4942 has_as,
4943 options,
4944 query,
4945 })
4946 }
4947 } else {
4948 table_flag = Some(self.parse_object_name(false)?);
4949 if self.parse_keyword(Keyword::TABLE) {
4950 let table_name = self.parse_object_name(false)?;
4951 if self.peek_token() != Token::EOF {
4952 if let Token::Word(word) = self.peek_token().token {
4953 if word.keyword == Keyword::OPTIONS {
4954 options = self.parse_options(Keyword::OPTIONS)?
4955 }
4956 };
4957
4958 if self.peek_token() != Token::EOF {
4959 let (a, q) = self.parse_as_query()?;
4960 has_as = a;
4961 query = Some(q);
4962 }
4963
4964 Ok(Statement::Cache {
4965 table_flag,
4966 table_name,
4967 has_as,
4968 options,
4969 query,
4970 })
4971 } else {
4972 Ok(Statement::Cache {
4973 table_flag,
4974 table_name,
4975 has_as,
4976 options,
4977 query,
4978 })
4979 }
4980 } else {
4981 if self.peek_token() == Token::EOF {
4982 self.prev_token();
4983 }
4984 self.expected("a `TABLE` keyword", self.peek_token())
4985 }
4986 }
4987 }
4988
4989 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4991 match self.peek_token().token {
4992 Token::Word(word) => match word.keyword {
4993 Keyword::AS => {
4994 self.next_token();
4995 Ok((true, self.parse_query()?))
4996 }
4997 _ => Ok((false, self.parse_query()?)),
4998 },
4999 _ => self.expected("a QUERY statement", self.peek_token()),
5000 }
5001 }
5002
5003 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5005 self.expect_keyword_is(Keyword::TABLE)?;
5006 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5007 let table_name = self.parse_object_name(false)?;
5008 Ok(Statement::UNCache {
5009 table_name,
5010 if_exists,
5011 })
5012 }
5013
5014 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5016 self.expect_keyword_is(Keyword::TABLE)?;
5017 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5018 let table_name = self.parse_object_name(false)?;
5019 self.expect_keyword_is(Keyword::USING)?;
5020 let module_name = self.parse_identifier()?;
5021 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5026 Ok(Statement::CreateVirtualTable {
5027 name: table_name,
5028 if_not_exists,
5029 module_name,
5030 module_args,
5031 })
5032 }
5033
5034 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5035 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5036
5037 let schema_name = self.parse_schema_name()?;
5038
5039 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5040 Some(self.parse_expr()?)
5041 } else {
5042 None
5043 };
5044
5045 let with = if self.peek_keyword(Keyword::WITH) {
5046 Some(self.parse_options(Keyword::WITH)?)
5047 } else {
5048 None
5049 };
5050
5051 let options = if self.peek_keyword(Keyword::OPTIONS) {
5052 Some(self.parse_options(Keyword::OPTIONS)?)
5053 } else {
5054 None
5055 };
5056
5057 let clone = if self.parse_keyword(Keyword::CLONE) {
5058 Some(self.parse_object_name(false)?)
5059 } else {
5060 None
5061 };
5062
5063 Ok(Statement::CreateSchema {
5064 schema_name,
5065 if_not_exists,
5066 with,
5067 options,
5068 default_collate_spec,
5069 clone,
5070 })
5071 }
5072
5073 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5074 if self.parse_keyword(Keyword::AUTHORIZATION) {
5075 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5076 } else {
5077 let name = self.parse_object_name(false)?;
5078
5079 if self.parse_keyword(Keyword::AUTHORIZATION) {
5080 Ok(SchemaName::NamedAuthorization(
5081 name,
5082 self.parse_identifier()?,
5083 ))
5084 } else {
5085 Ok(SchemaName::Simple(name))
5086 }
5087 }
5088 }
5089
5090 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5091 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5092 let db_name = self.parse_object_name(false)?;
5093 let mut location = None;
5094 let mut managed_location = None;
5095 loop {
5096 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5097 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5098 Some(Keyword::MANAGEDLOCATION) => {
5099 managed_location = Some(self.parse_literal_string()?)
5100 }
5101 _ => break,
5102 }
5103 }
5104 let clone = if self.parse_keyword(Keyword::CLONE) {
5105 Some(self.parse_object_name(false)?)
5106 } else {
5107 None
5108 };
5109
5110 Ok(Statement::CreateDatabase {
5111 db_name,
5112 if_not_exists: ine,
5113 location,
5114 managed_location,
5115 or_replace: false,
5116 transient: false,
5117 clone,
5118 data_retention_time_in_days: None,
5119 max_data_extension_time_in_days: None,
5120 external_volume: None,
5121 catalog: None,
5122 replace_invalid_characters: None,
5123 default_ddl_collation: None,
5124 storage_serialization_policy: None,
5125 comment: None,
5126 catalog_sync: None,
5127 catalog_sync_namespace_mode: None,
5128 catalog_sync_namespace_flatten_delimiter: None,
5129 with_tags: None,
5130 with_contacts: None,
5131 })
5132 }
5133
5134 pub fn parse_optional_create_function_using(
5135 &mut self,
5136 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5137 if !self.parse_keyword(Keyword::USING) {
5138 return Ok(None);
5139 };
5140 let keyword =
5141 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5142
5143 let uri = self.parse_literal_string()?;
5144
5145 match keyword {
5146 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5147 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5148 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5149 _ => self.expected(
5150 "JAR, FILE or ARCHIVE, got {:?}",
5151 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5152 ),
5153 }
5154 }
5155
5156 pub fn parse_create_function(
5157 &mut self,
5158 or_alter: bool,
5159 or_replace: bool,
5160 temporary: bool,
5161 ) -> Result<Statement, ParserError> {
5162 if dialect_of!(self is HiveDialect) {
5163 self.parse_hive_create_function(or_replace, temporary)
5164 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5165 self.parse_postgres_create_function(or_replace, temporary)
5166 } else if dialect_of!(self is DuckDbDialect) {
5167 self.parse_create_macro(or_replace, temporary)
5168 } else if dialect_of!(self is BigQueryDialect) {
5169 self.parse_bigquery_create_function(or_replace, temporary)
5170 } else if dialect_of!(self is MsSqlDialect) {
5171 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5172 } else {
5173 self.prev_token();
5174 self.expected("an object type after CREATE", self.peek_token())
5175 }
5176 }
5177
5178 fn parse_postgres_create_function(
5182 &mut self,
5183 or_replace: bool,
5184 temporary: bool,
5185 ) -> Result<Statement, ParserError> {
5186 let name = self.parse_object_name(false)?;
5187
5188 self.expect_token(&Token::LParen)?;
5189 let args = if Token::RParen != self.peek_token_ref().token {
5190 self.parse_comma_separated(Parser::parse_function_arg)?
5191 } else {
5192 vec![]
5193 };
5194 self.expect_token(&Token::RParen)?;
5195
5196 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5197 Some(self.parse_data_type()?)
5198 } else {
5199 None
5200 };
5201
5202 #[derive(Default)]
5203 struct Body {
5204 language: Option<Ident>,
5205 behavior: Option<FunctionBehavior>,
5206 function_body: Option<CreateFunctionBody>,
5207 called_on_null: Option<FunctionCalledOnNull>,
5208 parallel: Option<FunctionParallel>,
5209 security: Option<FunctionSecurity>,
5210 }
5211 let mut body = Body::default();
5212 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5213 loop {
5214 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5215 if field.is_some() {
5216 return Err(ParserError::ParserError(format!(
5217 "{name} specified more than once",
5218 )));
5219 }
5220 Ok(())
5221 }
5222 if self.parse_keyword(Keyword::AS) {
5223 ensure_not_set(&body.function_body, "AS")?;
5224 body.function_body = Some(self.parse_create_function_body_string()?);
5225 } else if self.parse_keyword(Keyword::LANGUAGE) {
5226 ensure_not_set(&body.language, "LANGUAGE")?;
5227 body.language = Some(self.parse_identifier()?);
5228 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5229 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5230 body.behavior = Some(FunctionBehavior::Immutable);
5231 } else if self.parse_keyword(Keyword::STABLE) {
5232 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5233 body.behavior = Some(FunctionBehavior::Stable);
5234 } else if self.parse_keyword(Keyword::VOLATILE) {
5235 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5236 body.behavior = Some(FunctionBehavior::Volatile);
5237 } else if self.parse_keywords(&[
5238 Keyword::CALLED,
5239 Keyword::ON,
5240 Keyword::NULL,
5241 Keyword::INPUT,
5242 ]) {
5243 ensure_not_set(
5244 &body.called_on_null,
5245 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5246 )?;
5247 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5248 } else if self.parse_keywords(&[
5249 Keyword::RETURNS,
5250 Keyword::NULL,
5251 Keyword::ON,
5252 Keyword::NULL,
5253 Keyword::INPUT,
5254 ]) {
5255 ensure_not_set(
5256 &body.called_on_null,
5257 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5258 )?;
5259 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5260 } else if self.parse_keyword(Keyword::STRICT) {
5261 ensure_not_set(
5262 &body.called_on_null,
5263 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5264 )?;
5265 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5266 } else if self.parse_keyword(Keyword::PARALLEL) {
5267 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5268 if self.parse_keyword(Keyword::UNSAFE) {
5269 body.parallel = Some(FunctionParallel::Unsafe);
5270 } else if self.parse_keyword(Keyword::RESTRICTED) {
5271 body.parallel = Some(FunctionParallel::Restricted);
5272 } else if self.parse_keyword(Keyword::SAFE) {
5273 body.parallel = Some(FunctionParallel::Safe);
5274 } else {
5275 return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5276 }
5277 } else if self.parse_keyword(Keyword::SECURITY) {
5278 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5279 if self.parse_keyword(Keyword::DEFINER) {
5280 body.security = Some(FunctionSecurity::Definer);
5281 } else if self.parse_keyword(Keyword::INVOKER) {
5282 body.security = Some(FunctionSecurity::Invoker);
5283 } else {
5284 return self.expected("DEFINER or INVOKER", self.peek_token());
5285 }
5286 } else if self.parse_keyword(Keyword::SET) {
5287 let name = self.parse_identifier()?;
5288 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5289 FunctionSetValue::FromCurrent
5290 } else {
5291 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5292 return self.expected("= or TO", self.peek_token());
5293 }
5294 let values = self.parse_comma_separated(Parser::parse_expr)?;
5295 FunctionSetValue::Values(values)
5296 };
5297 set_params.push(FunctionDefinitionSetParam { name, value });
5298 } else if self.parse_keyword(Keyword::RETURN) {
5299 ensure_not_set(&body.function_body, "RETURN")?;
5300 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5301 } else {
5302 break;
5303 }
5304 }
5305
5306 Ok(Statement::CreateFunction(CreateFunction {
5307 or_alter: false,
5308 or_replace,
5309 temporary,
5310 name,
5311 args: Some(args),
5312 return_type,
5313 behavior: body.behavior,
5314 called_on_null: body.called_on_null,
5315 parallel: body.parallel,
5316 security: body.security,
5317 set_params,
5318 language: body.language,
5319 function_body: body.function_body,
5320 if_not_exists: false,
5321 using: None,
5322 determinism_specifier: None,
5323 options: None,
5324 remote_connection: None,
5325 }))
5326 }
5327
5328 fn parse_hive_create_function(
5332 &mut self,
5333 or_replace: bool,
5334 temporary: bool,
5335 ) -> Result<Statement, ParserError> {
5336 let name = self.parse_object_name(false)?;
5337 self.expect_keyword_is(Keyword::AS)?;
5338
5339 let body = self.parse_create_function_body_string()?;
5340 let using = self.parse_optional_create_function_using()?;
5341
5342 Ok(Statement::CreateFunction(CreateFunction {
5343 or_alter: false,
5344 or_replace,
5345 temporary,
5346 name,
5347 function_body: Some(body),
5348 using,
5349 if_not_exists: false,
5350 args: None,
5351 return_type: None,
5352 behavior: None,
5353 called_on_null: None,
5354 parallel: None,
5355 security: None,
5356 set_params: vec![],
5357 language: None,
5358 determinism_specifier: None,
5359 options: None,
5360 remote_connection: None,
5361 }))
5362 }
5363
5364 fn parse_bigquery_create_function(
5368 &mut self,
5369 or_replace: bool,
5370 temporary: bool,
5371 ) -> Result<Statement, ParserError> {
5372 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5373 let (name, args) = self.parse_create_function_name_and_params()?;
5374
5375 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5376 Some(self.parse_data_type()?)
5377 } else {
5378 None
5379 };
5380
5381 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5382 Some(FunctionDeterminismSpecifier::Deterministic)
5383 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5384 Some(FunctionDeterminismSpecifier::NotDeterministic)
5385 } else {
5386 None
5387 };
5388
5389 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5390 Some(self.parse_identifier()?)
5391 } else {
5392 None
5393 };
5394
5395 let remote_connection =
5396 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5397 Some(self.parse_object_name(false)?)
5398 } else {
5399 None
5400 };
5401
5402 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5405
5406 let function_body = if remote_connection.is_none() {
5407 self.expect_keyword_is(Keyword::AS)?;
5408 let expr = self.parse_expr()?;
5409 if options.is_none() {
5410 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5411 Some(CreateFunctionBody::AsBeforeOptions {
5412 body: expr,
5413 link_symbol: None,
5414 })
5415 } else {
5416 Some(CreateFunctionBody::AsAfterOptions(expr))
5417 }
5418 } else {
5419 None
5420 };
5421
5422 Ok(Statement::CreateFunction(CreateFunction {
5423 or_alter: false,
5424 or_replace,
5425 temporary,
5426 if_not_exists,
5427 name,
5428 args: Some(args),
5429 return_type,
5430 function_body,
5431 language,
5432 determinism_specifier,
5433 options,
5434 remote_connection,
5435 using: None,
5436 behavior: None,
5437 called_on_null: None,
5438 parallel: None,
5439 security: None,
5440 set_params: vec![],
5441 }))
5442 }
5443
5444 fn parse_mssql_create_function(
5448 &mut self,
5449 or_alter: bool,
5450 or_replace: bool,
5451 temporary: bool,
5452 ) -> Result<Statement, ParserError> {
5453 let (name, args) = self.parse_create_function_name_and_params()?;
5454
5455 self.expect_keyword(Keyword::RETURNS)?;
5456
5457 let return_table = self.maybe_parse(|p| {
5458 let return_table_name = p.parse_identifier()?;
5459
5460 p.expect_keyword_is(Keyword::TABLE)?;
5461 p.prev_token();
5462
5463 let table_column_defs = match p.parse_data_type()? {
5464 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5465 table_column_defs
5466 }
5467 _ => parser_err!(
5468 "Expected table column definitions after TABLE keyword",
5469 p.peek_token().span.start
5470 )?,
5471 };
5472
5473 Ok(DataType::NamedTable {
5474 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5475 columns: table_column_defs,
5476 })
5477 })?;
5478
5479 let return_type = if return_table.is_some() {
5480 return_table
5481 } else {
5482 Some(self.parse_data_type()?)
5483 };
5484
5485 let _ = self.parse_keyword(Keyword::AS);
5486
5487 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5488 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5489 let statements = self.parse_statement_list(&[Keyword::END])?;
5490 let end_token = self.expect_keyword(Keyword::END)?;
5491
5492 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5493 begin_token: AttachedToken(begin_token),
5494 statements,
5495 end_token: AttachedToken(end_token),
5496 }))
5497 } else if self.parse_keyword(Keyword::RETURN) {
5498 if self.peek_token() == Token::LParen {
5499 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5500 } else if self.peek_keyword(Keyword::SELECT) {
5501 let select = self.parse_select()?;
5502 Some(CreateFunctionBody::AsReturnSelect(select))
5503 } else {
5504 parser_err!(
5505 "Expected a subquery (or bare SELECT statement) after RETURN",
5506 self.peek_token().span.start
5507 )?
5508 }
5509 } else {
5510 parser_err!("Unparsable function body", self.peek_token().span.start)?
5511 };
5512
5513 Ok(Statement::CreateFunction(CreateFunction {
5514 or_alter,
5515 or_replace,
5516 temporary,
5517 if_not_exists: false,
5518 name,
5519 args: Some(args),
5520 return_type,
5521 function_body,
5522 language: None,
5523 determinism_specifier: None,
5524 options: None,
5525 remote_connection: None,
5526 using: None,
5527 behavior: None,
5528 called_on_null: None,
5529 parallel: None,
5530 security: None,
5531 set_params: vec![],
5532 }))
5533 }
5534
5535 fn parse_create_function_name_and_params(
5536 &mut self,
5537 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5538 let name = self.parse_object_name(false)?;
5539 let parse_function_param =
5540 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5541 let name = parser.parse_identifier()?;
5542 let data_type = parser.parse_data_type()?;
5543 let default_expr = if parser.consume_token(&Token::Eq) {
5544 Some(parser.parse_expr()?)
5545 } else {
5546 None
5547 };
5548
5549 Ok(OperateFunctionArg {
5550 mode: None,
5551 name: Some(name),
5552 data_type,
5553 default_expr,
5554 })
5555 };
5556 self.expect_token(&Token::LParen)?;
5557 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5558 self.expect_token(&Token::RParen)?;
5559 Ok((name, args))
5560 }
5561
5562 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5563 let mode = if self.parse_keyword(Keyword::IN) {
5564 Some(ArgMode::In)
5565 } else if self.parse_keyword(Keyword::OUT) {
5566 Some(ArgMode::Out)
5567 } else if self.parse_keyword(Keyword::INOUT) {
5568 Some(ArgMode::InOut)
5569 } else {
5570 None
5571 };
5572
5573 let mut name = None;
5575 let mut data_type = self.parse_data_type()?;
5576
5577 let data_type_idx = self.get_current_index();
5581
5582 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5584 if parser.peek_keyword(Keyword::DEFAULT) {
5585 parser_err!(
5587 "The DEFAULT keyword is not a type",
5588 parser.peek_token().span.start
5589 )
5590 } else {
5591 parser.parse_data_type()
5592 }
5593 }
5594
5595 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5596 let token = self.token_at(data_type_idx);
5597
5598 if !matches!(token.token, Token::Word(_)) {
5600 return self.expected("a name or type", token.clone());
5601 }
5602
5603 name = Some(Ident::new(token.to_string()));
5604 data_type = next_data_type;
5605 }
5606
5607 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5608 {
5609 Some(self.parse_expr()?)
5610 } else {
5611 None
5612 };
5613 Ok(OperateFunctionArg {
5614 mode,
5615 name,
5616 data_type,
5617 default_expr,
5618 })
5619 }
5620
5621 pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5627 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5628 {
5629 self.prev_token();
5630 return self.expected("an object type after DROP", self.peek_token());
5631 }
5632 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5633 let trigger_name = self.parse_object_name(false)?;
5634 let table_name = if self.parse_keyword(Keyword::ON) {
5635 Some(self.parse_object_name(false)?)
5636 } else {
5637 None
5638 };
5639 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5640 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5641 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5642 Some(unexpected_keyword) => return Err(ParserError::ParserError(
5643 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5644 )),
5645 None => None,
5646 };
5647 Ok(Statement::DropTrigger(DropTrigger {
5648 if_exists,
5649 trigger_name,
5650 table_name,
5651 option,
5652 }))
5653 }
5654
5655 pub fn parse_create_trigger(
5656 &mut self,
5657 temporary: bool,
5658 or_alter: bool,
5659 or_replace: bool,
5660 is_constraint: bool,
5661 ) -> Result<Statement, ParserError> {
5662 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5663 {
5664 self.prev_token();
5665 return self.expected("an object type after CREATE", self.peek_token());
5666 }
5667
5668 let name = self.parse_object_name(false)?;
5669 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5670
5671 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5672 self.expect_keyword_is(Keyword::ON)?;
5673 let table_name = self.parse_object_name(false)?;
5674
5675 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5676 self.parse_object_name(true).ok()
5677 } else {
5678 None
5679 };
5680
5681 let characteristics = self.parse_constraint_characteristics()?;
5682
5683 let mut referencing = vec![];
5684 if self.parse_keyword(Keyword::REFERENCING) {
5685 while let Some(refer) = self.parse_trigger_referencing()? {
5686 referencing.push(refer);
5687 }
5688 }
5689
5690 let trigger_object = if self.parse_keyword(Keyword::FOR) {
5691 let include_each = self.parse_keyword(Keyword::EACH);
5692 let trigger_object =
5693 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5694 Keyword::ROW => TriggerObject::Row,
5695 Keyword::STATEMENT => TriggerObject::Statement,
5696 unexpected_keyword => return Err(ParserError::ParserError(
5697 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5698 )),
5699 };
5700
5701 Some(if include_each {
5702 TriggerObjectKind::ForEach(trigger_object)
5703 } else {
5704 TriggerObjectKind::For(trigger_object)
5705 })
5706 } else {
5707 let _ = self.parse_keyword(Keyword::FOR);
5708
5709 None
5710 };
5711
5712 let condition = self
5713 .parse_keyword(Keyword::WHEN)
5714 .then(|| self.parse_expr())
5715 .transpose()?;
5716
5717 let mut exec_body = None;
5718 let mut statements = None;
5719 if self.parse_keyword(Keyword::EXECUTE) {
5720 exec_body = Some(self.parse_trigger_exec_body()?);
5721 } else {
5722 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5723 }
5724
5725 Ok(CreateTrigger {
5726 or_alter,
5727 temporary,
5728 or_replace,
5729 is_constraint,
5730 name,
5731 period,
5732 period_before_table: true,
5733 events,
5734 table_name,
5735 referenced_table_name,
5736 referencing,
5737 trigger_object,
5738 condition,
5739 exec_body,
5740 statements_as: false,
5741 statements,
5742 characteristics,
5743 }
5744 .into())
5745 }
5746
5747 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5748 Ok(
5749 match self.expect_one_of_keywords(&[
5750 Keyword::FOR,
5751 Keyword::BEFORE,
5752 Keyword::AFTER,
5753 Keyword::INSTEAD,
5754 ])? {
5755 Keyword::FOR => TriggerPeriod::For,
5756 Keyword::BEFORE => TriggerPeriod::Before,
5757 Keyword::AFTER => TriggerPeriod::After,
5758 Keyword::INSTEAD => self
5759 .expect_keyword_is(Keyword::OF)
5760 .map(|_| TriggerPeriod::InsteadOf)?,
5761 unexpected_keyword => return Err(ParserError::ParserError(
5762 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
5763 )),
5764 },
5765 )
5766 }
5767
5768 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5769 Ok(
5770 match self.expect_one_of_keywords(&[
5771 Keyword::INSERT,
5772 Keyword::UPDATE,
5773 Keyword::DELETE,
5774 Keyword::TRUNCATE,
5775 ])? {
5776 Keyword::INSERT => TriggerEvent::Insert,
5777 Keyword::UPDATE => {
5778 if self.parse_keyword(Keyword::OF) {
5779 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5780 TriggerEvent::Update(cols)
5781 } else {
5782 TriggerEvent::Update(vec![])
5783 }
5784 }
5785 Keyword::DELETE => TriggerEvent::Delete,
5786 Keyword::TRUNCATE => TriggerEvent::Truncate,
5787 unexpected_keyword => return Err(ParserError::ParserError(
5788 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
5789 )),
5790 },
5791 )
5792 }
5793
5794 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5795 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5796 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5797 TriggerReferencingType::OldTable
5798 }
5799 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5800 TriggerReferencingType::NewTable
5801 }
5802 _ => {
5803 return Ok(None);
5804 }
5805 };
5806
5807 let is_as = self.parse_keyword(Keyword::AS);
5808 let transition_relation_name = self.parse_object_name(false)?;
5809 Ok(Some(TriggerReferencing {
5810 refer_type,
5811 is_as,
5812 transition_relation_name,
5813 }))
5814 }
5815
5816 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5817 Ok(TriggerExecBody {
5818 exec_type: match self
5819 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5820 {
5821 Keyword::FUNCTION => TriggerExecBodyType::Function,
5822 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5823 unexpected_keyword => return Err(ParserError::ParserError(
5824 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
5825 )),
5826 },
5827 func_desc: self.parse_function_desc()?,
5828 })
5829 }
5830
5831 pub fn parse_create_macro(
5832 &mut self,
5833 or_replace: bool,
5834 temporary: bool,
5835 ) -> Result<Statement, ParserError> {
5836 if dialect_of!(self is DuckDbDialect | GenericDialect) {
5837 let name = self.parse_object_name(false)?;
5838 self.expect_token(&Token::LParen)?;
5839 let args = if self.consume_token(&Token::RParen) {
5840 self.prev_token();
5841 None
5842 } else {
5843 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5844 };
5845
5846 self.expect_token(&Token::RParen)?;
5847 self.expect_keyword_is(Keyword::AS)?;
5848
5849 Ok(Statement::CreateMacro {
5850 or_replace,
5851 temporary,
5852 name,
5853 args,
5854 definition: if self.parse_keyword(Keyword::TABLE) {
5855 MacroDefinition::Table(self.parse_query()?)
5856 } else {
5857 MacroDefinition::Expr(self.parse_expr()?)
5858 },
5859 })
5860 } else {
5861 self.prev_token();
5862 self.expected("an object type after CREATE", self.peek_token())
5863 }
5864 }
5865
5866 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5867 let name = self.parse_identifier()?;
5868
5869 let default_expr =
5870 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5871 Some(self.parse_expr()?)
5872 } else {
5873 None
5874 };
5875 Ok(MacroArg { name, default_expr })
5876 }
5877
5878 pub fn parse_create_external_table(
5879 &mut self,
5880 or_replace: bool,
5881 ) -> Result<Statement, ParserError> {
5882 self.expect_keyword_is(Keyword::TABLE)?;
5883 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5884 let table_name = self.parse_object_name(false)?;
5885 let (columns, constraints) = self.parse_columns()?;
5886
5887 let hive_distribution = self.parse_hive_distribution()?;
5888 let hive_formats = self.parse_hive_formats()?;
5889
5890 let file_format = if let Some(ref hf) = hive_formats {
5891 if let Some(ref ff) = hf.storage {
5892 match ff {
5893 HiveIOFormat::FileFormat { format } => Some(*format),
5894 _ => None,
5895 }
5896 } else {
5897 None
5898 }
5899 } else {
5900 None
5901 };
5902 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5903 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5904 let table_options = if !table_properties.is_empty() {
5905 CreateTableOptions::TableProperties(table_properties)
5906 } else {
5907 CreateTableOptions::None
5908 };
5909 Ok(CreateTableBuilder::new(table_name)
5910 .columns(columns)
5911 .constraints(constraints)
5912 .hive_distribution(hive_distribution)
5913 .hive_formats(hive_formats)
5914 .table_options(table_options)
5915 .or_replace(or_replace)
5916 .if_not_exists(if_not_exists)
5917 .external(true)
5918 .file_format(file_format)
5919 .location(location)
5920 .build())
5921 }
5922
5923 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5924 let next_token = self.next_token();
5925 match &next_token.token {
5926 Token::Word(w) => match w.keyword {
5927 Keyword::AVRO => Ok(FileFormat::AVRO),
5928 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5929 Keyword::ORC => Ok(FileFormat::ORC),
5930 Keyword::PARQUET => Ok(FileFormat::PARQUET),
5931 Keyword::RCFILE => Ok(FileFormat::RCFILE),
5932 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5933 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5934 _ => self.expected("fileformat", next_token),
5935 },
5936 _ => self.expected("fileformat", next_token),
5937 }
5938 }
5939
5940 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5941 if self.consume_token(&Token::Eq) {
5942 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5943 } else {
5944 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5945 }
5946 }
5947
5948 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5949 let next_token = self.next_token();
5950 match &next_token.token {
5951 Token::Word(w) => match w.keyword {
5952 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5953 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5954 Keyword::JSON => Ok(AnalyzeFormat::JSON),
5955 _ => self.expected("fileformat", next_token),
5956 },
5957 _ => self.expected("fileformat", next_token),
5958 }
5959 }
5960
5961 pub fn parse_create_view(
5962 &mut self,
5963 or_alter: bool,
5964 or_replace: bool,
5965 temporary: bool,
5966 create_view_params: Option<CreateViewParams>,
5967 ) -> Result<Statement, ParserError> {
5968 let secure = self.parse_keyword(Keyword::SECURE);
5969 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5970 self.expect_keyword_is(Keyword::VIEW)?;
5971 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5972 let if_not_exists_first =
5975 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5976 let name = self.parse_object_name(allow_unquoted_hyphen)?;
5977 let name_before_not_exists = !if_not_exists_first
5978 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5979 let if_not_exists = if_not_exists_first || name_before_not_exists;
5980 let columns = self.parse_view_columns()?;
5983 let mut options = CreateTableOptions::None;
5984 let with_options = self.parse_options(Keyword::WITH)?;
5985 if !with_options.is_empty() {
5986 options = CreateTableOptions::With(with_options);
5987 }
5988
5989 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5990 self.expect_keyword_is(Keyword::BY)?;
5991 self.parse_parenthesized_column_list(Optional, false)?
5992 } else {
5993 vec![]
5994 };
5995
5996 if dialect_of!(self is BigQueryDialect | GenericDialect) {
5997 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5998 if !opts.is_empty() {
5999 options = CreateTableOptions::Options(opts);
6000 }
6001 };
6002 }
6003
6004 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6005 && self.parse_keyword(Keyword::TO)
6006 {
6007 Some(self.parse_object_name(false)?)
6008 } else {
6009 None
6010 };
6011
6012 let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
6013 && self.parse_keyword(Keyword::COMMENT)
6014 {
6015 self.expect_token(&Token::Eq)?;
6016 Some(self.parse_comment_value()?)
6017 } else {
6018 None
6019 };
6020
6021 self.expect_keyword_is(Keyword::AS)?;
6022 let query = self.parse_query()?;
6023 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6026 && self.parse_keywords(&[
6027 Keyword::WITH,
6028 Keyword::NO,
6029 Keyword::SCHEMA,
6030 Keyword::BINDING,
6031 ]);
6032
6033 Ok(CreateView {
6034 or_alter,
6035 name,
6036 columns,
6037 query,
6038 materialized,
6039 secure,
6040 or_replace,
6041 options,
6042 cluster_by,
6043 comment,
6044 with_no_schema_binding,
6045 if_not_exists,
6046 temporary,
6047 to,
6048 params: create_view_params,
6049 name_before_not_exists,
6050 }
6051 .into())
6052 }
6053
6054 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6058 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6059 self.expect_token(&Token::Eq)?;
6060 Some(
6061 match self.expect_one_of_keywords(&[
6062 Keyword::UNDEFINED,
6063 Keyword::MERGE,
6064 Keyword::TEMPTABLE,
6065 ])? {
6066 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6067 Keyword::MERGE => CreateViewAlgorithm::Merge,
6068 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6069 _ => {
6070 self.prev_token();
6071 let found = self.next_token();
6072 return self
6073 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6074 }
6075 },
6076 )
6077 } else {
6078 None
6079 };
6080 let definer = if self.parse_keyword(Keyword::DEFINER) {
6081 self.expect_token(&Token::Eq)?;
6082 Some(self.parse_grantee_name()?)
6083 } else {
6084 None
6085 };
6086 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6087 Some(
6088 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6089 Keyword::DEFINER => CreateViewSecurity::Definer,
6090 Keyword::INVOKER => CreateViewSecurity::Invoker,
6091 _ => {
6092 self.prev_token();
6093 let found = self.next_token();
6094 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6095 }
6096 },
6097 )
6098 } else {
6099 None
6100 };
6101 if algorithm.is_some() || definer.is_some() || security.is_some() {
6102 Ok(Some(CreateViewParams {
6103 algorithm,
6104 definer,
6105 security,
6106 }))
6107 } else {
6108 Ok(None)
6109 }
6110 }
6111
6112 pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6113 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6114 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6115
6116 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6119 vec![Keyword::AUTHORIZATION]
6120 } else if dialect_of!(self is PostgreSqlDialect) {
6121 vec![
6122 Keyword::LOGIN,
6123 Keyword::NOLOGIN,
6124 Keyword::INHERIT,
6125 Keyword::NOINHERIT,
6126 Keyword::BYPASSRLS,
6127 Keyword::NOBYPASSRLS,
6128 Keyword::PASSWORD,
6129 Keyword::CREATEDB,
6130 Keyword::NOCREATEDB,
6131 Keyword::CREATEROLE,
6132 Keyword::NOCREATEROLE,
6133 Keyword::SUPERUSER,
6134 Keyword::NOSUPERUSER,
6135 Keyword::REPLICATION,
6136 Keyword::NOREPLICATION,
6137 Keyword::CONNECTION,
6138 Keyword::VALID,
6139 Keyword::IN,
6140 Keyword::ROLE,
6141 Keyword::ADMIN,
6142 Keyword::USER,
6143 ]
6144 } else {
6145 vec![]
6146 };
6147
6148 let mut authorization_owner = None;
6150 let mut login = None;
6152 let mut inherit = None;
6153 let mut bypassrls = None;
6154 let mut password = None;
6155 let mut create_db = None;
6156 let mut create_role = None;
6157 let mut superuser = None;
6158 let mut replication = None;
6159 let mut connection_limit = None;
6160 let mut valid_until = None;
6161 let mut in_role = vec![];
6162 let mut in_group = vec![];
6163 let mut role = vec![];
6164 let mut user = vec![];
6165 let mut admin = vec![];
6166
6167 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6168 let loc = self
6169 .tokens
6170 .get(self.index - 1)
6171 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6172 match keyword {
6173 Keyword::AUTHORIZATION => {
6174 if authorization_owner.is_some() {
6175 parser_err!("Found multiple AUTHORIZATION", loc)
6176 } else {
6177 authorization_owner = Some(self.parse_object_name(false)?);
6178 Ok(())
6179 }
6180 }
6181 Keyword::LOGIN | Keyword::NOLOGIN => {
6182 if login.is_some() {
6183 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6184 } else {
6185 login = Some(keyword == Keyword::LOGIN);
6186 Ok(())
6187 }
6188 }
6189 Keyword::INHERIT | Keyword::NOINHERIT => {
6190 if inherit.is_some() {
6191 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6192 } else {
6193 inherit = Some(keyword == Keyword::INHERIT);
6194 Ok(())
6195 }
6196 }
6197 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6198 if bypassrls.is_some() {
6199 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6200 } else {
6201 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6202 Ok(())
6203 }
6204 }
6205 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6206 if create_db.is_some() {
6207 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6208 } else {
6209 create_db = Some(keyword == Keyword::CREATEDB);
6210 Ok(())
6211 }
6212 }
6213 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6214 if create_role.is_some() {
6215 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6216 } else {
6217 create_role = Some(keyword == Keyword::CREATEROLE);
6218 Ok(())
6219 }
6220 }
6221 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6222 if superuser.is_some() {
6223 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6224 } else {
6225 superuser = Some(keyword == Keyword::SUPERUSER);
6226 Ok(())
6227 }
6228 }
6229 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6230 if replication.is_some() {
6231 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6232 } else {
6233 replication = Some(keyword == Keyword::REPLICATION);
6234 Ok(())
6235 }
6236 }
6237 Keyword::PASSWORD => {
6238 if password.is_some() {
6239 parser_err!("Found multiple PASSWORD", loc)
6240 } else {
6241 password = if self.parse_keyword(Keyword::NULL) {
6242 Some(Password::NullPassword)
6243 } else {
6244 Some(Password::Password(Expr::Value(self.parse_value()?)))
6245 };
6246 Ok(())
6247 }
6248 }
6249 Keyword::CONNECTION => {
6250 self.expect_keyword_is(Keyword::LIMIT)?;
6251 if connection_limit.is_some() {
6252 parser_err!("Found multiple CONNECTION LIMIT", loc)
6253 } else {
6254 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6255 Ok(())
6256 }
6257 }
6258 Keyword::VALID => {
6259 self.expect_keyword_is(Keyword::UNTIL)?;
6260 if valid_until.is_some() {
6261 parser_err!("Found multiple VALID UNTIL", loc)
6262 } else {
6263 valid_until = Some(Expr::Value(self.parse_value()?));
6264 Ok(())
6265 }
6266 }
6267 Keyword::IN => {
6268 if self.parse_keyword(Keyword::ROLE) {
6269 if !in_role.is_empty() {
6270 parser_err!("Found multiple IN ROLE", loc)
6271 } else {
6272 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6273 Ok(())
6274 }
6275 } else if self.parse_keyword(Keyword::GROUP) {
6276 if !in_group.is_empty() {
6277 parser_err!("Found multiple IN GROUP", loc)
6278 } else {
6279 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6280 Ok(())
6281 }
6282 } else {
6283 self.expected("ROLE or GROUP after IN", self.peek_token())
6284 }
6285 }
6286 Keyword::ROLE => {
6287 if !role.is_empty() {
6288 parser_err!("Found multiple ROLE", loc)
6289 } else {
6290 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6291 Ok(())
6292 }
6293 }
6294 Keyword::USER => {
6295 if !user.is_empty() {
6296 parser_err!("Found multiple USER", loc)
6297 } else {
6298 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6299 Ok(())
6300 }
6301 }
6302 Keyword::ADMIN => {
6303 if !admin.is_empty() {
6304 parser_err!("Found multiple ADMIN", loc)
6305 } else {
6306 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6307 Ok(())
6308 }
6309 }
6310 _ => break,
6311 }?
6312 }
6313
6314 Ok(CreateRole {
6315 names,
6316 if_not_exists,
6317 login,
6318 inherit,
6319 bypassrls,
6320 password,
6321 create_db,
6322 create_role,
6323 replication,
6324 superuser,
6325 connection_limit,
6326 valid_until,
6327 in_role,
6328 in_group,
6329 role,
6330 user,
6331 admin,
6332 authorization_owner,
6333 }
6334 .into())
6335 }
6336
6337 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6338 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6339 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6340 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6341 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6342 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6343 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6344 )),
6345 None => {
6346 match self.parse_identifier() {
6347 Ok(ident) => Owner::Ident(ident),
6348 Err(e) => {
6349 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6350 }
6351 }
6352 }
6353 };
6354 Ok(owner)
6355 }
6356
6357 fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6359 let name = self.parse_object_name(false)?;
6360 self.expect_keyword_is(Keyword::AS)?;
6361 let data_type = self.parse_data_type()?;
6362 let collation = if self.parse_keyword(Keyword::COLLATE) {
6363 Some(self.parse_identifier()?)
6364 } else {
6365 None
6366 };
6367 let default = if self.parse_keyword(Keyword::DEFAULT) {
6368 Some(self.parse_expr()?)
6369 } else {
6370 None
6371 };
6372 let mut constraints = Vec::new();
6373 while let Some(constraint) = self.parse_optional_table_constraint()? {
6374 constraints.push(constraint);
6375 }
6376
6377 Ok(Statement::CreateDomain(CreateDomain {
6378 name,
6379 data_type,
6380 collation,
6381 default,
6382 constraints,
6383 }))
6384 }
6385
6386 pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6396 let name = self.parse_identifier()?;
6397 self.expect_keyword_is(Keyword::ON)?;
6398 let table_name = self.parse_object_name(false)?;
6399
6400 let policy_type = if self.parse_keyword(Keyword::AS) {
6401 let keyword =
6402 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6403 Some(match keyword {
6404 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6405 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6406 unexpected_keyword => return Err(ParserError::ParserError(
6407 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6408 )),
6409 })
6410 } else {
6411 None
6412 };
6413
6414 let command = if self.parse_keyword(Keyword::FOR) {
6415 let keyword = self.expect_one_of_keywords(&[
6416 Keyword::ALL,
6417 Keyword::SELECT,
6418 Keyword::INSERT,
6419 Keyword::UPDATE,
6420 Keyword::DELETE,
6421 ])?;
6422 Some(match keyword {
6423 Keyword::ALL => CreatePolicyCommand::All,
6424 Keyword::SELECT => CreatePolicyCommand::Select,
6425 Keyword::INSERT => CreatePolicyCommand::Insert,
6426 Keyword::UPDATE => CreatePolicyCommand::Update,
6427 Keyword::DELETE => CreatePolicyCommand::Delete,
6428 unexpected_keyword => return Err(ParserError::ParserError(
6429 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6430 )),
6431 })
6432 } else {
6433 None
6434 };
6435
6436 let to = if self.parse_keyword(Keyword::TO) {
6437 Some(self.parse_comma_separated(|p| p.parse_owner())?)
6438 } else {
6439 None
6440 };
6441
6442 let using = if self.parse_keyword(Keyword::USING) {
6443 self.expect_token(&Token::LParen)?;
6444 let expr = self.parse_expr()?;
6445 self.expect_token(&Token::RParen)?;
6446 Some(expr)
6447 } else {
6448 None
6449 };
6450
6451 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6452 self.expect_token(&Token::LParen)?;
6453 let expr = self.parse_expr()?;
6454 self.expect_token(&Token::RParen)?;
6455 Some(expr)
6456 } else {
6457 None
6458 };
6459
6460 Ok(CreatePolicy {
6461 name,
6462 table_name,
6463 policy_type,
6464 command,
6465 to,
6466 using,
6467 with_check,
6468 })
6469 }
6470
6471 pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6481 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6482 let name = self.parse_identifier()?;
6483
6484 let connector_type = if self.parse_keyword(Keyword::TYPE) {
6485 Some(self.parse_literal_string()?)
6486 } else {
6487 None
6488 };
6489
6490 let url = if self.parse_keyword(Keyword::URL) {
6491 Some(self.parse_literal_string()?)
6492 } else {
6493 None
6494 };
6495
6496 let comment = self.parse_optional_inline_comment()?;
6497
6498 let with_dcproperties =
6499 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6500 properties if !properties.is_empty() => Some(properties),
6501 _ => None,
6502 };
6503
6504 Ok(Statement::CreateConnector(CreateConnector {
6505 name,
6506 if_not_exists,
6507 connector_type,
6508 url,
6509 comment,
6510 with_dcproperties,
6511 }))
6512 }
6513
6514 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6520 let mut parts = vec![];
6521 loop {
6522 parts.push(ObjectNamePart::Identifier(Ident::new(
6523 self.next_token().to_string(),
6524 )));
6525 if !self.consume_token(&Token::Period) {
6526 break;
6527 }
6528 }
6529 Ok(ObjectName(parts))
6530 }
6531
6532 pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6536 let name = self.parse_operator_name()?;
6537 self.expect_token(&Token::LParen)?;
6538
6539 let mut function: Option<ObjectName> = None;
6540 let mut is_procedure = false;
6541 let mut left_arg: Option<DataType> = None;
6542 let mut right_arg: Option<DataType> = None;
6543 let mut options: Vec<OperatorOption> = Vec::new();
6544
6545 loop {
6546 let keyword = self.expect_one_of_keywords(&[
6547 Keyword::FUNCTION,
6548 Keyword::PROCEDURE,
6549 Keyword::LEFTARG,
6550 Keyword::RIGHTARG,
6551 Keyword::COMMUTATOR,
6552 Keyword::NEGATOR,
6553 Keyword::RESTRICT,
6554 Keyword::JOIN,
6555 Keyword::HASHES,
6556 Keyword::MERGES,
6557 ])?;
6558
6559 match keyword {
6560 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6561 options.push(OperatorOption::Hashes);
6562 }
6563 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6564 options.push(OperatorOption::Merges);
6565 }
6566 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6567 self.expect_token(&Token::Eq)?;
6568 function = Some(self.parse_object_name(false)?);
6569 is_procedure = keyword == Keyword::PROCEDURE;
6570 }
6571 Keyword::LEFTARG if left_arg.is_none() => {
6572 self.expect_token(&Token::Eq)?;
6573 left_arg = Some(self.parse_data_type()?);
6574 }
6575 Keyword::RIGHTARG if right_arg.is_none() => {
6576 self.expect_token(&Token::Eq)?;
6577 right_arg = Some(self.parse_data_type()?);
6578 }
6579 Keyword::COMMUTATOR
6580 if !options
6581 .iter()
6582 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6583 {
6584 self.expect_token(&Token::Eq)?;
6585 if self.parse_keyword(Keyword::OPERATOR) {
6586 self.expect_token(&Token::LParen)?;
6587 let op = self.parse_operator_name()?;
6588 self.expect_token(&Token::RParen)?;
6589 options.push(OperatorOption::Commutator(op));
6590 } else {
6591 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6592 }
6593 }
6594 Keyword::NEGATOR
6595 if !options
6596 .iter()
6597 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6598 {
6599 self.expect_token(&Token::Eq)?;
6600 if self.parse_keyword(Keyword::OPERATOR) {
6601 self.expect_token(&Token::LParen)?;
6602 let op = self.parse_operator_name()?;
6603 self.expect_token(&Token::RParen)?;
6604 options.push(OperatorOption::Negator(op));
6605 } else {
6606 options.push(OperatorOption::Negator(self.parse_operator_name()?));
6607 }
6608 }
6609 Keyword::RESTRICT
6610 if !options
6611 .iter()
6612 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6613 {
6614 self.expect_token(&Token::Eq)?;
6615 options.push(OperatorOption::Restrict(Some(
6616 self.parse_object_name(false)?,
6617 )));
6618 }
6619 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6620 self.expect_token(&Token::Eq)?;
6621 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6622 }
6623 _ => {
6624 return Err(ParserError::ParserError(format!(
6625 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6626 keyword
6627 )))
6628 }
6629 }
6630
6631 if !self.consume_token(&Token::Comma) {
6632 break;
6633 }
6634 }
6635
6636 self.expect_token(&Token::RParen)?;
6638
6639 let function = function.ok_or_else(|| {
6641 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6642 })?;
6643
6644 Ok(Statement::CreateOperator(CreateOperator {
6645 name,
6646 function,
6647 is_procedure,
6648 left_arg,
6649 right_arg,
6650 options,
6651 }))
6652 }
6653
6654 pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6658 let name = self.parse_object_name(false)?;
6659 self.expect_keyword(Keyword::USING)?;
6660 let using = self.parse_identifier()?;
6661
6662 Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6663 name,
6664 using,
6665 }))
6666 }
6667
6668 pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6672 let name = self.parse_object_name(false)?;
6673 let default = self.parse_keyword(Keyword::DEFAULT);
6674 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6675 let for_type = self.parse_data_type()?;
6676 self.expect_keyword(Keyword::USING)?;
6677 let using = self.parse_identifier()?;
6678
6679 let family = if self.parse_keyword(Keyword::FAMILY) {
6680 Some(self.parse_object_name(false)?)
6681 } else {
6682 None
6683 };
6684
6685 self.expect_keyword(Keyword::AS)?;
6686
6687 let mut items = vec![];
6688 loop {
6689 if self.parse_keyword(Keyword::OPERATOR) {
6690 let strategy_number = self.parse_literal_uint()? as u32;
6691 let operator_name = self.parse_operator_name()?;
6692
6693 let op_types = if self.consume_token(&Token::LParen) {
6695 let left = self.parse_data_type()?;
6696 self.expect_token(&Token::Comma)?;
6697 let right = self.parse_data_type()?;
6698 self.expect_token(&Token::RParen)?;
6699 Some(OperatorArgTypes { left, right })
6700 } else {
6701 None
6702 };
6703
6704 let purpose = if self.parse_keyword(Keyword::FOR) {
6706 if self.parse_keyword(Keyword::SEARCH) {
6707 Some(OperatorPurpose::ForSearch)
6708 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6709 let sort_family = self.parse_object_name(false)?;
6710 Some(OperatorPurpose::ForOrderBy { sort_family })
6711 } else {
6712 return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6713 }
6714 } else {
6715 None
6716 };
6717
6718 items.push(OperatorClassItem::Operator {
6719 strategy_number,
6720 operator_name,
6721 op_types,
6722 purpose,
6723 });
6724 } else if self.parse_keyword(Keyword::FUNCTION) {
6725 let support_number = self.parse_literal_uint()? as u32;
6726
6727 let op_types =
6729 if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6730 let mut types = vec![];
6731 loop {
6732 types.push(self.parse_data_type()?);
6733 if !self.consume_token(&Token::Comma) {
6734 break;
6735 }
6736 }
6737 self.expect_token(&Token::RParen)?;
6738 Some(types)
6739 } else if self.consume_token(&Token::LParen) {
6740 self.expect_token(&Token::RParen)?;
6741 Some(vec![])
6742 } else {
6743 None
6744 };
6745
6746 let function_name = self.parse_object_name(false)?;
6747
6748 let argument_types = if self.consume_token(&Token::LParen) {
6750 let mut types = vec![];
6751 loop {
6752 if self.peek_token() == Token::RParen {
6753 break;
6754 }
6755 types.push(self.parse_data_type()?);
6756 if !self.consume_token(&Token::Comma) {
6757 break;
6758 }
6759 }
6760 self.expect_token(&Token::RParen)?;
6761 types
6762 } else {
6763 vec![]
6764 };
6765
6766 items.push(OperatorClassItem::Function {
6767 support_number,
6768 op_types,
6769 function_name,
6770 argument_types,
6771 });
6772 } else if self.parse_keyword(Keyword::STORAGE) {
6773 let storage_type = self.parse_data_type()?;
6774 items.push(OperatorClassItem::Storage { storage_type });
6775 } else {
6776 break;
6777 }
6778
6779 if !self.consume_token(&Token::Comma) {
6781 break;
6782 }
6783 }
6784
6785 Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6786 name,
6787 default,
6788 for_type,
6789 using,
6790 family,
6791 items,
6792 }))
6793 }
6794
6795 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6796 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6798 && self.parse_keyword(Keyword::TEMPORARY);
6799 let persistent = dialect_of!(self is DuckDbDialect)
6800 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6801
6802 let object_type = if self.parse_keyword(Keyword::TABLE) {
6803 ObjectType::Table
6804 } else if self.parse_keyword(Keyword::VIEW) {
6805 ObjectType::View
6806 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6807 ObjectType::MaterializedView
6808 } else if self.parse_keyword(Keyword::INDEX) {
6809 ObjectType::Index
6810 } else if self.parse_keyword(Keyword::ROLE) {
6811 ObjectType::Role
6812 } else if self.parse_keyword(Keyword::SCHEMA) {
6813 ObjectType::Schema
6814 } else if self.parse_keyword(Keyword::DATABASE) {
6815 ObjectType::Database
6816 } else if self.parse_keyword(Keyword::SEQUENCE) {
6817 ObjectType::Sequence
6818 } else if self.parse_keyword(Keyword::STAGE) {
6819 ObjectType::Stage
6820 } else if self.parse_keyword(Keyword::TYPE) {
6821 ObjectType::Type
6822 } else if self.parse_keyword(Keyword::USER) {
6823 ObjectType::User
6824 } else if self.parse_keyword(Keyword::STREAM) {
6825 ObjectType::Stream
6826 } else if self.parse_keyword(Keyword::FUNCTION) {
6827 return self.parse_drop_function();
6828 } else if self.parse_keyword(Keyword::POLICY) {
6829 return self.parse_drop_policy();
6830 } else if self.parse_keyword(Keyword::CONNECTOR) {
6831 return self.parse_drop_connector();
6832 } else if self.parse_keyword(Keyword::DOMAIN) {
6833 return self.parse_drop_domain();
6834 } else if self.parse_keyword(Keyword::PROCEDURE) {
6835 return self.parse_drop_procedure();
6836 } else if self.parse_keyword(Keyword::SECRET) {
6837 return self.parse_drop_secret(temporary, persistent);
6838 } else if self.parse_keyword(Keyword::TRIGGER) {
6839 return self.parse_drop_trigger();
6840 } else if self.parse_keyword(Keyword::EXTENSION) {
6841 return self.parse_drop_extension();
6842 } else if self.parse_keyword(Keyword::OPERATOR) {
6843 return if self.parse_keyword(Keyword::FAMILY) {
6845 self.parse_drop_operator_family()
6846 } else if self.parse_keyword(Keyword::CLASS) {
6847 self.parse_drop_operator_class()
6848 } else {
6849 self.parse_drop_operator()
6850 };
6851 } else {
6852 return self.expected(
6853 "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6854 self.peek_token(),
6855 );
6856 };
6857 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6860 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6861
6862 let loc = self.peek_token().span.start;
6863 let cascade = self.parse_keyword(Keyword::CASCADE);
6864 let restrict = self.parse_keyword(Keyword::RESTRICT);
6865 let purge = self.parse_keyword(Keyword::PURGE);
6866 if cascade && restrict {
6867 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6868 }
6869 if object_type == ObjectType::Role && (cascade || restrict || purge) {
6870 return parser_err!(
6871 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6872 loc
6873 );
6874 }
6875 let table = if self.parse_keyword(Keyword::ON) {
6876 Some(self.parse_object_name(false)?)
6877 } else {
6878 None
6879 };
6880 Ok(Statement::Drop {
6881 object_type,
6882 if_exists,
6883 names,
6884 cascade,
6885 restrict,
6886 purge,
6887 temporary,
6888 table,
6889 })
6890 }
6891
6892 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6893 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6894 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6895 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6896 _ => None,
6897 }
6898 }
6899
6900 fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6905 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6906 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6907 let drop_behavior = self.parse_optional_drop_behavior();
6908 Ok(Statement::DropFunction(DropFunction {
6909 if_exists,
6910 func_desc,
6911 drop_behavior,
6912 }))
6913 }
6914
6915 fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6921 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6922 let name = self.parse_identifier()?;
6923 self.expect_keyword_is(Keyword::ON)?;
6924 let table_name = self.parse_object_name(false)?;
6925 let drop_behavior = self.parse_optional_drop_behavior();
6926 Ok(Statement::DropPolicy {
6927 if_exists,
6928 name,
6929 table_name,
6930 drop_behavior,
6931 })
6932 }
6933 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6939 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6940 let name = self.parse_identifier()?;
6941 Ok(Statement::DropConnector { if_exists, name })
6942 }
6943
6944 fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6948 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6949 let name = self.parse_object_name(false)?;
6950 let drop_behavior = self.parse_optional_drop_behavior();
6951 Ok(Statement::DropDomain(DropDomain {
6952 if_exists,
6953 name,
6954 drop_behavior,
6955 }))
6956 }
6957
6958 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6963 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6964 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6965 let drop_behavior = self.parse_optional_drop_behavior();
6966 Ok(Statement::DropProcedure {
6967 if_exists,
6968 proc_desc,
6969 drop_behavior,
6970 })
6971 }
6972
6973 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6974 let name = self.parse_object_name(false)?;
6975
6976 let args = if self.consume_token(&Token::LParen) {
6977 if self.consume_token(&Token::RParen) {
6978 Some(vec![])
6979 } else {
6980 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6981 self.expect_token(&Token::RParen)?;
6982 Some(args)
6983 }
6984 } else {
6985 None
6986 };
6987
6988 Ok(FunctionDesc { name, args })
6989 }
6990
6991 fn parse_drop_secret(
6993 &mut self,
6994 temporary: bool,
6995 persistent: bool,
6996 ) -> Result<Statement, ParserError> {
6997 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6998 let name = self.parse_identifier()?;
6999 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7000 self.parse_identifier().ok()
7001 } else {
7002 None
7003 };
7004 let temp = match (temporary, persistent) {
7005 (true, false) => Some(true),
7006 (false, true) => Some(false),
7007 (false, false) => None,
7008 _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7009 };
7010
7011 Ok(Statement::DropSecret {
7012 if_exists,
7013 temporary: temp,
7014 name,
7015 storage_specifier,
7016 })
7017 }
7018
7019 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7029 if dialect_of!(self is BigQueryDialect) {
7030 return self.parse_big_query_declare();
7031 }
7032 if dialect_of!(self is SnowflakeDialect) {
7033 return self.parse_snowflake_declare();
7034 }
7035 if dialect_of!(self is MsSqlDialect) {
7036 return self.parse_mssql_declare();
7037 }
7038
7039 let name = self.parse_identifier()?;
7040
7041 let binary = Some(self.parse_keyword(Keyword::BINARY));
7042 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7043 Some(true)
7044 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7045 Some(false)
7046 } else {
7047 None
7048 };
7049 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7050 Some(true)
7051 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7052 Some(false)
7053 } else {
7054 None
7055 };
7056
7057 self.expect_keyword_is(Keyword::CURSOR)?;
7058 let declare_type = Some(DeclareType::Cursor);
7059
7060 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7061 Some(keyword) => {
7062 self.expect_keyword_is(Keyword::HOLD)?;
7063
7064 match keyword {
7065 Keyword::WITH => Some(true),
7066 Keyword::WITHOUT => Some(false),
7067 unexpected_keyword => return Err(ParserError::ParserError(
7068 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7069 )),
7070 }
7071 }
7072 None => None,
7073 };
7074
7075 self.expect_keyword_is(Keyword::FOR)?;
7076
7077 let query = Some(self.parse_query()?);
7078
7079 Ok(Statement::Declare {
7080 stmts: vec![Declare {
7081 names: vec![name],
7082 data_type: None,
7083 assignment: None,
7084 declare_type,
7085 binary,
7086 sensitive,
7087 scroll,
7088 hold,
7089 for_query: query,
7090 }],
7091 })
7092 }
7093
7094 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7102 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7103
7104 let data_type = match self.peek_token().token {
7105 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7106 _ => Some(self.parse_data_type()?),
7107 };
7108
7109 let expr = if data_type.is_some() {
7110 if self.parse_keyword(Keyword::DEFAULT) {
7111 Some(self.parse_expr()?)
7112 } else {
7113 None
7114 }
7115 } else {
7116 self.expect_keyword_is(Keyword::DEFAULT)?;
7119 Some(self.parse_expr()?)
7120 };
7121
7122 Ok(Statement::Declare {
7123 stmts: vec![Declare {
7124 names,
7125 data_type,
7126 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7127 declare_type: None,
7128 binary: None,
7129 sensitive: None,
7130 scroll: None,
7131 hold: None,
7132 for_query: None,
7133 }],
7134 })
7135 }
7136
7137 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7162 let mut stmts = vec![];
7163 loop {
7164 let name = self.parse_identifier()?;
7165 let (declare_type, for_query, assigned_expr, data_type) =
7166 if self.parse_keyword(Keyword::CURSOR) {
7167 self.expect_keyword_is(Keyword::FOR)?;
7168 match self.peek_token().token {
7169 Token::Word(w) if w.keyword == Keyword::SELECT => (
7170 Some(DeclareType::Cursor),
7171 Some(self.parse_query()?),
7172 None,
7173 None,
7174 ),
7175 _ => (
7176 Some(DeclareType::Cursor),
7177 None,
7178 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7179 None,
7180 ),
7181 }
7182 } else if self.parse_keyword(Keyword::RESULTSET) {
7183 let assigned_expr = if self.peek_token().token != Token::SemiColon {
7184 self.parse_snowflake_variable_declaration_expression()?
7185 } else {
7186 None
7188 };
7189
7190 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7191 } else if self.parse_keyword(Keyword::EXCEPTION) {
7192 let assigned_expr = if self.peek_token().token == Token::LParen {
7193 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7194 } else {
7195 None
7197 };
7198
7199 (Some(DeclareType::Exception), None, assigned_expr, None)
7200 } else {
7201 let (assigned_expr, data_type) = if let Some(assigned_expr) =
7203 self.parse_snowflake_variable_declaration_expression()?
7204 {
7205 (Some(assigned_expr), None)
7206 } else if let Token::Word(_) = self.peek_token().token {
7207 let data_type = self.parse_data_type()?;
7208 (
7209 self.parse_snowflake_variable_declaration_expression()?,
7210 Some(data_type),
7211 )
7212 } else {
7213 (None, None)
7214 };
7215 (None, None, assigned_expr, data_type)
7216 };
7217 let stmt = Declare {
7218 names: vec![name],
7219 data_type,
7220 assignment: assigned_expr,
7221 declare_type,
7222 binary: None,
7223 sensitive: None,
7224 scroll: None,
7225 hold: None,
7226 for_query,
7227 };
7228
7229 stmts.push(stmt);
7230 if self.consume_token(&Token::SemiColon) {
7231 match self.peek_token().token {
7232 Token::Word(w)
7233 if ALL_KEYWORDS
7234 .binary_search(&w.value.to_uppercase().as_str())
7235 .is_err() =>
7236 {
7237 continue;
7239 }
7240 _ => {
7241 self.prev_token();
7243 }
7244 }
7245 }
7246
7247 break;
7248 }
7249
7250 Ok(Statement::Declare { stmts })
7251 }
7252
7253 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7265 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7266
7267 Ok(Statement::Declare { stmts })
7268 }
7269
7270 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7281 let name = {
7282 let ident = self.parse_identifier()?;
7283 if !ident.value.starts_with('@')
7284 && !matches!(
7285 self.peek_token().token,
7286 Token::Word(w) if w.keyword == Keyword::CURSOR
7287 )
7288 {
7289 Err(ParserError::TokenizerError(
7290 "Invalid MsSql variable declaration.".to_string(),
7291 ))
7292 } else {
7293 Ok(ident)
7294 }
7295 }?;
7296
7297 let (declare_type, data_type) = match self.peek_token().token {
7298 Token::Word(w) => match w.keyword {
7299 Keyword::CURSOR => {
7300 self.next_token();
7301 (Some(DeclareType::Cursor), None)
7302 }
7303 Keyword::AS => {
7304 self.next_token();
7305 (None, Some(self.parse_data_type()?))
7306 }
7307 _ => (None, Some(self.parse_data_type()?)),
7308 },
7309 _ => (None, Some(self.parse_data_type()?)),
7310 };
7311
7312 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7313 self.next_token();
7314 let query = Some(self.parse_query()?);
7315 (query, None)
7316 } else {
7317 let assignment = self.parse_mssql_variable_declaration_expression()?;
7318 (None, assignment)
7319 };
7320
7321 Ok(Declare {
7322 names: vec![name],
7323 data_type,
7324 assignment,
7325 declare_type,
7326 binary: None,
7327 sensitive: None,
7328 scroll: None,
7329 hold: None,
7330 for_query,
7331 })
7332 }
7333
7334 pub fn parse_snowflake_variable_declaration_expression(
7342 &mut self,
7343 ) -> Result<Option<DeclareAssignment>, ParserError> {
7344 Ok(match self.peek_token().token {
7345 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7346 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7348 }
7349 Token::Assignment => {
7350 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
7352 self.parse_expr()?,
7353 )))
7354 }
7355 _ => None,
7356 })
7357 }
7358
7359 pub fn parse_mssql_variable_declaration_expression(
7366 &mut self,
7367 ) -> Result<Option<DeclareAssignment>, ParserError> {
7368 Ok(match self.peek_token().token {
7369 Token::Eq => {
7370 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
7372 self.parse_expr()?,
7373 )))
7374 }
7375 _ => None,
7376 })
7377 }
7378
7379 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7381 let direction = if self.parse_keyword(Keyword::NEXT) {
7382 FetchDirection::Next
7383 } else if self.parse_keyword(Keyword::PRIOR) {
7384 FetchDirection::Prior
7385 } else if self.parse_keyword(Keyword::FIRST) {
7386 FetchDirection::First
7387 } else if self.parse_keyword(Keyword::LAST) {
7388 FetchDirection::Last
7389 } else if self.parse_keyword(Keyword::ABSOLUTE) {
7390 FetchDirection::Absolute {
7391 limit: self.parse_number_value()?.value,
7392 }
7393 } else if self.parse_keyword(Keyword::RELATIVE) {
7394 FetchDirection::Relative {
7395 limit: self.parse_number_value()?.value,
7396 }
7397 } else if self.parse_keyword(Keyword::FORWARD) {
7398 if self.parse_keyword(Keyword::ALL) {
7399 FetchDirection::ForwardAll
7400 } else {
7401 FetchDirection::Forward {
7402 limit: Some(self.parse_number_value()?.value),
7404 }
7405 }
7406 } else if self.parse_keyword(Keyword::BACKWARD) {
7407 if self.parse_keyword(Keyword::ALL) {
7408 FetchDirection::BackwardAll
7409 } else {
7410 FetchDirection::Backward {
7411 limit: Some(self.parse_number_value()?.value),
7413 }
7414 }
7415 } else if self.parse_keyword(Keyword::ALL) {
7416 FetchDirection::All
7417 } else {
7418 FetchDirection::Count {
7419 limit: self.parse_number_value()?.value,
7420 }
7421 };
7422
7423 let position = if self.peek_keyword(Keyword::FROM) {
7424 self.expect_keyword(Keyword::FROM)?;
7425 FetchPosition::From
7426 } else if self.peek_keyword(Keyword::IN) {
7427 self.expect_keyword(Keyword::IN)?;
7428 FetchPosition::In
7429 } else {
7430 return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7431 };
7432
7433 let name = self.parse_identifier()?;
7434
7435 let into = if self.parse_keyword(Keyword::INTO) {
7436 Some(self.parse_object_name(false)?)
7437 } else {
7438 None
7439 };
7440
7441 Ok(Statement::Fetch {
7442 name,
7443 direction,
7444 position,
7445 into,
7446 })
7447 }
7448
7449 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7450 let object_type = if self.parse_keyword(Keyword::ALL) {
7451 DiscardObject::ALL
7452 } else if self.parse_keyword(Keyword::PLANS) {
7453 DiscardObject::PLANS
7454 } else if self.parse_keyword(Keyword::SEQUENCES) {
7455 DiscardObject::SEQUENCES
7456 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7457 DiscardObject::TEMP
7458 } else {
7459 return self.expected(
7460 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7461 self.peek_token(),
7462 );
7463 };
7464 Ok(Statement::Discard { object_type })
7465 }
7466
7467 pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7468 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7469 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7470
7471 let mut using = None;
7472
7473 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7474 let index_name = self.parse_object_name(false)?;
7475 using = self.parse_optional_using_then_index_type()?;
7477 self.expect_keyword_is(Keyword::ON)?;
7478 Some(index_name)
7479 } else {
7480 None
7481 };
7482
7483 let table_name = self.parse_object_name(false)?;
7484
7485 using = self.parse_optional_using_then_index_type()?.or(using);
7488
7489 let columns = self.parse_parenthesized_index_column_list()?;
7490
7491 let include = if self.parse_keyword(Keyword::INCLUDE) {
7492 self.expect_token(&Token::LParen)?;
7493 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7494 self.expect_token(&Token::RParen)?;
7495 columns
7496 } else {
7497 vec![]
7498 };
7499
7500 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7501 let not = self.parse_keyword(Keyword::NOT);
7502 self.expect_keyword_is(Keyword::DISTINCT)?;
7503 Some(!not)
7504 } else {
7505 None
7506 };
7507
7508 let with = if self.dialect.supports_create_index_with_clause()
7509 && self.parse_keyword(Keyword::WITH)
7510 {
7511 self.expect_token(&Token::LParen)?;
7512 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7513 self.expect_token(&Token::RParen)?;
7514 with_params
7515 } else {
7516 Vec::new()
7517 };
7518
7519 let predicate = if self.parse_keyword(Keyword::WHERE) {
7520 Some(self.parse_expr()?)
7521 } else {
7522 None
7523 };
7524
7525 let index_options = self.parse_index_options()?;
7531
7532 let mut alter_options = Vec::new();
7534 while self
7535 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7536 .is_some()
7537 {
7538 alter_options.push(self.parse_alter_table_operation()?)
7539 }
7540
7541 Ok(Statement::CreateIndex(CreateIndex {
7542 name: index_name,
7543 table_name,
7544 using,
7545 columns,
7546 unique,
7547 concurrently,
7548 if_not_exists,
7549 include,
7550 nulls_distinct,
7551 with,
7552 predicate,
7553 index_options,
7554 alter_options,
7555 }))
7556 }
7557
7558 pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7559 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7560 let name = self.parse_identifier()?;
7561
7562 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7563 let schema = if self.parse_keyword(Keyword::SCHEMA) {
7564 Some(self.parse_identifier()?)
7565 } else {
7566 None
7567 };
7568
7569 let version = if self.parse_keyword(Keyword::VERSION) {
7570 Some(self.parse_identifier()?)
7571 } else {
7572 None
7573 };
7574
7575 let cascade = self.parse_keyword(Keyword::CASCADE);
7576
7577 (schema, version, cascade)
7578 } else {
7579 (None, None, false)
7580 };
7581
7582 Ok(CreateExtension {
7583 name,
7584 if_not_exists,
7585 schema,
7586 version,
7587 cascade,
7588 }
7589 .into())
7590 }
7591
7592 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7594 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7595 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7596 let cascade_or_restrict =
7597 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7598 Ok(Statement::DropExtension(DropExtension {
7599 names,
7600 if_exists,
7601 cascade_or_restrict: cascade_or_restrict
7602 .map(|k| match k {
7603 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7604 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7605 _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7606 })
7607 .transpose()?,
7608 }))
7609 }
7610
7611 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7614 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7615 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7616 let drop_behavior = self.parse_optional_drop_behavior();
7617 Ok(Statement::DropOperator(DropOperator {
7618 if_exists,
7619 operators,
7620 drop_behavior,
7621 }))
7622 }
7623
7624 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7627 let name = self.parse_operator_name()?;
7628 self.expect_token(&Token::LParen)?;
7629
7630 let left_type = if self.parse_keyword(Keyword::NONE) {
7632 None
7633 } else {
7634 Some(self.parse_data_type()?)
7635 };
7636
7637 self.expect_token(&Token::Comma)?;
7638
7639 let right_type = self.parse_data_type()?;
7641
7642 self.expect_token(&Token::RParen)?;
7643
7644 Ok(DropOperatorSignature {
7645 name,
7646 left_type,
7647 right_type,
7648 })
7649 }
7650
7651 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7655 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7656 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7657 self.expect_keyword(Keyword::USING)?;
7658 let using = self.parse_identifier()?;
7659 let drop_behavior = self.parse_optional_drop_behavior();
7660 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7661 if_exists,
7662 names,
7663 using,
7664 drop_behavior,
7665 }))
7666 }
7667
7668 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7672 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7673 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7674 self.expect_keyword(Keyword::USING)?;
7675 let using = self.parse_identifier()?;
7676 let drop_behavior = self.parse_optional_drop_behavior();
7677 Ok(Statement::DropOperatorClass(DropOperatorClass {
7678 if_exists,
7679 names,
7680 using,
7681 drop_behavior,
7682 }))
7683 }
7684
7685 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7687 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7688 self.expect_token(&Token::LParen)?;
7689 let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7690 self.expect_token(&Token::RParen)?;
7691 Ok(HiveDistributionStyle::PARTITIONED { columns })
7692 } else {
7693 Ok(HiveDistributionStyle::NONE)
7694 }
7695 }
7696
7697 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7698 let mut hive_format: Option<HiveFormat> = None;
7699 loop {
7700 match self.parse_one_of_keywords(&[
7701 Keyword::ROW,
7702 Keyword::STORED,
7703 Keyword::LOCATION,
7704 Keyword::WITH,
7705 ]) {
7706 Some(Keyword::ROW) => {
7707 hive_format
7708 .get_or_insert_with(HiveFormat::default)
7709 .row_format = Some(self.parse_row_format()?);
7710 }
7711 Some(Keyword::STORED) => {
7712 self.expect_keyword_is(Keyword::AS)?;
7713 if self.parse_keyword(Keyword::INPUTFORMAT) {
7714 let input_format = self.parse_expr()?;
7715 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7716 let output_format = self.parse_expr()?;
7717 hive_format.get_or_insert_with(HiveFormat::default).storage =
7718 Some(HiveIOFormat::IOF {
7719 input_format,
7720 output_format,
7721 });
7722 } else {
7723 let format = self.parse_file_format()?;
7724 hive_format.get_or_insert_with(HiveFormat::default).storage =
7725 Some(HiveIOFormat::FileFormat { format });
7726 }
7727 }
7728 Some(Keyword::LOCATION) => {
7729 hive_format.get_or_insert_with(HiveFormat::default).location =
7730 Some(self.parse_literal_string()?);
7731 }
7732 Some(Keyword::WITH) => {
7733 self.prev_token();
7734 let properties = self
7735 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7736 if !properties.is_empty() {
7737 hive_format
7738 .get_or_insert_with(HiveFormat::default)
7739 .serde_properties = Some(properties);
7740 } else {
7741 break;
7742 }
7743 }
7744 None => break,
7745 _ => break,
7746 }
7747 }
7748
7749 Ok(hive_format)
7750 }
7751
7752 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7753 self.expect_keyword_is(Keyword::FORMAT)?;
7754 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7755 Some(Keyword::SERDE) => {
7756 let class = self.parse_literal_string()?;
7757 Ok(HiveRowFormat::SERDE { class })
7758 }
7759 _ => {
7760 let mut row_delimiters = vec![];
7761
7762 loop {
7763 match self.parse_one_of_keywords(&[
7764 Keyword::FIELDS,
7765 Keyword::COLLECTION,
7766 Keyword::MAP,
7767 Keyword::LINES,
7768 Keyword::NULL,
7769 ]) {
7770 Some(Keyword::FIELDS) => {
7771 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7772 row_delimiters.push(HiveRowDelimiter {
7773 delimiter: HiveDelimiter::FieldsTerminatedBy,
7774 char: self.parse_identifier()?,
7775 });
7776
7777 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7778 row_delimiters.push(HiveRowDelimiter {
7779 delimiter: HiveDelimiter::FieldsEscapedBy,
7780 char: self.parse_identifier()?,
7781 });
7782 }
7783 } else {
7784 break;
7785 }
7786 }
7787 Some(Keyword::COLLECTION) => {
7788 if self.parse_keywords(&[
7789 Keyword::ITEMS,
7790 Keyword::TERMINATED,
7791 Keyword::BY,
7792 ]) {
7793 row_delimiters.push(HiveRowDelimiter {
7794 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7795 char: self.parse_identifier()?,
7796 });
7797 } else {
7798 break;
7799 }
7800 }
7801 Some(Keyword::MAP) => {
7802 if self.parse_keywords(&[
7803 Keyword::KEYS,
7804 Keyword::TERMINATED,
7805 Keyword::BY,
7806 ]) {
7807 row_delimiters.push(HiveRowDelimiter {
7808 delimiter: HiveDelimiter::MapKeysTerminatedBy,
7809 char: self.parse_identifier()?,
7810 });
7811 } else {
7812 break;
7813 }
7814 }
7815 Some(Keyword::LINES) => {
7816 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7817 row_delimiters.push(HiveRowDelimiter {
7818 delimiter: HiveDelimiter::LinesTerminatedBy,
7819 char: self.parse_identifier()?,
7820 });
7821 } else {
7822 break;
7823 }
7824 }
7825 Some(Keyword::NULL) => {
7826 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7827 row_delimiters.push(HiveRowDelimiter {
7828 delimiter: HiveDelimiter::NullDefinedAs,
7829 char: self.parse_identifier()?,
7830 });
7831 } else {
7832 break;
7833 }
7834 }
7835 _ => {
7836 break;
7837 }
7838 }
7839 }
7840
7841 Ok(HiveRowFormat::DELIMITED {
7842 delimiters: row_delimiters,
7843 })
7844 }
7845 }
7846 }
7847
7848 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7849 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7850 Ok(Some(self.parse_identifier()?))
7851 } else {
7852 Ok(None)
7853 }
7854 }
7855
7856 pub fn parse_create_table(
7857 &mut self,
7858 or_replace: bool,
7859 temporary: bool,
7860 global: Option<bool>,
7861 transient: bool,
7862 ) -> Result<Statement, ParserError> {
7863 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7864 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7865 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7866
7867 let partition_of = if dialect_of!(self is PostgreSqlDialect | GenericDialect)
7869 && self.parse_keywords(&[Keyword::PARTITION, Keyword::OF])
7870 {
7871 Some(self.parse_object_name(allow_unquoted_hyphen)?)
7872 } else {
7873 None
7874 };
7875
7876 let on_cluster = self.parse_optional_on_cluster()?;
7878
7879 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7880
7881 let clone = if self.parse_keyword(Keyword::CLONE) {
7882 self.parse_object_name(allow_unquoted_hyphen).ok()
7883 } else {
7884 None
7885 };
7886
7887 let (columns, constraints) = self.parse_columns()?;
7889 let comment_after_column_def =
7890 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7891 let next_token = self.next_token();
7892 match next_token.token {
7893 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7894 _ => self.expected("comment", next_token)?,
7895 }
7896 } else {
7897 None
7898 };
7899
7900 let for_values = if partition_of.is_some() {
7902 Some(self.parse_partition_for_values()?)
7903 } else {
7904 None
7905 };
7906
7907 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7909
7910 let hive_distribution = self.parse_hive_distribution()?;
7911 let clustered_by = self.parse_optional_clustered_by()?;
7912 let hive_formats = self.parse_hive_formats()?;
7913
7914 let create_table_config = self.parse_optional_create_table_config()?;
7915
7916 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7919 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7920 {
7921 Some(Box::new(self.parse_expr()?))
7922 } else {
7923 None
7924 };
7925
7926 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7927 if self.consume_token(&Token::LParen) {
7928 let columns = if self.peek_token() != Token::RParen {
7929 self.parse_comma_separated(|p| p.parse_expr())?
7930 } else {
7931 vec![]
7932 };
7933 self.expect_token(&Token::RParen)?;
7934 Some(OneOrManyWithParens::Many(columns))
7935 } else {
7936 Some(OneOrManyWithParens::One(self.parse_expr()?))
7937 }
7938 } else {
7939 None
7940 };
7941
7942 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7943 Some(self.parse_create_table_on_commit()?)
7944 } else {
7945 None
7946 };
7947
7948 let strict = self.parse_keyword(Keyword::STRICT);
7949
7950 let query = if self.parse_keyword(Keyword::AS) {
7952 Some(self.parse_query()?)
7953 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7954 {
7955 self.prev_token();
7957 Some(self.parse_query()?)
7958 } else {
7959 None
7960 };
7961
7962 Ok(CreateTableBuilder::new(table_name)
7963 .temporary(temporary)
7964 .columns(columns)
7965 .constraints(constraints)
7966 .or_replace(or_replace)
7967 .if_not_exists(if_not_exists)
7968 .transient(transient)
7969 .hive_distribution(hive_distribution)
7970 .hive_formats(hive_formats)
7971 .global(global)
7972 .query(query)
7973 .without_rowid(without_rowid)
7974 .like(like)
7975 .clone_clause(clone)
7976 .comment_after_column_def(comment_after_column_def)
7977 .order_by(order_by)
7978 .on_commit(on_commit)
7979 .on_cluster(on_cluster)
7980 .clustered_by(clustered_by)
7981 .partition_by(create_table_config.partition_by)
7982 .cluster_by(create_table_config.cluster_by)
7983 .inherits(create_table_config.inherits)
7984 .partition_of(partition_of)
7985 .for_values(for_values)
7986 .table_options(create_table_config.table_options)
7987 .primary_key(primary_key)
7988 .strict(strict)
7989 .build())
7990 }
7991
7992 fn maybe_parse_create_table_like(
7993 &mut self,
7994 allow_unquoted_hyphen: bool,
7995 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
7996 let like = if self.dialect.supports_create_table_like_parenthesized()
7997 && self.consume_token(&Token::LParen)
7998 {
7999 if self.parse_keyword(Keyword::LIKE) {
8000 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8001 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8002 Some(CreateTableLikeDefaults::Including)
8003 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8004 Some(CreateTableLikeDefaults::Excluding)
8005 } else {
8006 None
8007 };
8008 self.expect_token(&Token::RParen)?;
8009 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8010 name,
8011 defaults,
8012 }))
8013 } else {
8014 self.prev_token();
8016 None
8017 }
8018 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8019 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8020 Some(CreateTableLikeKind::Plain(CreateTableLike {
8021 name,
8022 defaults: None,
8023 }))
8024 } else {
8025 None
8026 };
8027 Ok(like)
8028 }
8029
8030 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8031 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8032 Ok(OnCommit::DeleteRows)
8033 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8034 Ok(OnCommit::PreserveRows)
8035 } else if self.parse_keywords(&[Keyword::DROP]) {
8036 Ok(OnCommit::Drop)
8037 } else {
8038 parser_err!(
8039 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8040 self.peek_token()
8041 )
8042 }
8043 }
8044
8045 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8051 if self.parse_keyword(Keyword::DEFAULT) {
8052 return Ok(ForValues::Default);
8053 }
8054
8055 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8056
8057 if self.parse_keyword(Keyword::IN) {
8058 self.expect_token(&Token::LParen)?;
8060 let values = self.parse_comma_separated(Parser::parse_expr)?;
8061 self.expect_token(&Token::RParen)?;
8062 Ok(ForValues::In(values))
8063 } else if self.parse_keyword(Keyword::FROM) {
8064 self.expect_token(&Token::LParen)?;
8066 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8067 self.expect_token(&Token::RParen)?;
8068 self.expect_keyword(Keyword::TO)?;
8069 self.expect_token(&Token::LParen)?;
8070 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8071 self.expect_token(&Token::RParen)?;
8072 Ok(ForValues::From { from, to })
8073 } else if self.parse_keyword(Keyword::WITH) {
8074 self.expect_token(&Token::LParen)?;
8076 self.expect_keyword(Keyword::MODULUS)?;
8077 let modulus = self.parse_literal_uint()?;
8078 self.expect_token(&Token::Comma)?;
8079 self.expect_keyword(Keyword::REMAINDER)?;
8080 let remainder = self.parse_literal_uint()?;
8081 self.expect_token(&Token::RParen)?;
8082 Ok(ForValues::With { modulus, remainder })
8083 } else {
8084 self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8085 }
8086 }
8087
8088 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8090 if self.parse_keyword(Keyword::MINVALUE) {
8091 Ok(PartitionBoundValue::MinValue)
8092 } else if self.parse_keyword(Keyword::MAXVALUE) {
8093 Ok(PartitionBoundValue::MaxValue)
8094 } else {
8095 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8096 }
8097 }
8098
8099 fn parse_optional_create_table_config(
8105 &mut self,
8106 ) -> Result<CreateTableConfiguration, ParserError> {
8107 let mut table_options = CreateTableOptions::None;
8108
8109 let inherits = if self.parse_keyword(Keyword::INHERITS) {
8110 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8111 } else {
8112 None
8113 };
8114
8115 let with_options = self.parse_options(Keyword::WITH)?;
8117 if !with_options.is_empty() {
8118 table_options = CreateTableOptions::With(with_options)
8119 }
8120
8121 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8122 if !table_properties.is_empty() {
8123 table_options = CreateTableOptions::TableProperties(table_properties);
8124 }
8125 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8126 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8127 {
8128 Some(Box::new(self.parse_expr()?))
8129 } else {
8130 None
8131 };
8132
8133 let mut cluster_by = None;
8134 if dialect_of!(self is BigQueryDialect | GenericDialect) {
8135 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8136 cluster_by = Some(WrappedCollection::NoWrapping(
8137 self.parse_comma_separated(|p| p.parse_expr())?,
8138 ));
8139 };
8140
8141 if let Token::Word(word) = self.peek_token().token {
8142 if word.keyword == Keyword::OPTIONS {
8143 table_options =
8144 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8145 }
8146 };
8147 }
8148
8149 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8150 let plain_options = self.parse_plain_options()?;
8151 if !plain_options.is_empty() {
8152 table_options = CreateTableOptions::Plain(plain_options)
8153 }
8154 };
8155
8156 Ok(CreateTableConfiguration {
8157 partition_by,
8158 cluster_by,
8159 inherits,
8160 table_options,
8161 })
8162 }
8163
8164 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8165 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8168 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8169 }
8170
8171 if self.parse_keywords(&[Keyword::COMMENT]) {
8174 let has_eq = self.consume_token(&Token::Eq);
8175 let value = self.next_token();
8176
8177 let comment = match (has_eq, value.token) {
8178 (true, Token::SingleQuotedString(s)) => {
8179 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8180 }
8181 (false, Token::SingleQuotedString(s)) => {
8182 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8183 }
8184 (_, token) => {
8185 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8186 }
8187 };
8188 return comment;
8189 }
8190
8191 if self.parse_keywords(&[Keyword::ENGINE]) {
8194 let _ = self.consume_token(&Token::Eq);
8195 let value = self.next_token();
8196
8197 let engine = match value.token {
8198 Token::Word(w) => {
8199 let parameters = if self.peek_token() == Token::LParen {
8200 self.parse_parenthesized_identifiers()?
8201 } else {
8202 vec![]
8203 };
8204
8205 Ok(Some(SqlOption::NamedParenthesizedList(
8206 NamedParenthesizedList {
8207 key: Ident::new("ENGINE"),
8208 name: Some(Ident::new(w.value)),
8209 values: parameters,
8210 },
8211 )))
8212 }
8213 _ => {
8214 return self.expected("Token::Word", value)?;
8215 }
8216 };
8217
8218 return engine;
8219 }
8220
8221 if self.parse_keywords(&[Keyword::TABLESPACE]) {
8223 let _ = self.consume_token(&Token::Eq);
8224 let value = self.next_token();
8225
8226 let tablespace = match value.token {
8227 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8228 let storage = match self.parse_keyword(Keyword::STORAGE) {
8229 true => {
8230 let _ = self.consume_token(&Token::Eq);
8231 let storage_token = self.next_token();
8232 match &storage_token.token {
8233 Token::Word(w) => match w.value.to_uppercase().as_str() {
8234 "DISK" => Some(StorageType::Disk),
8235 "MEMORY" => Some(StorageType::Memory),
8236 _ => self
8237 .expected("Storage type (DISK or MEMORY)", storage_token)?,
8238 },
8239 _ => self.expected("Token::Word", storage_token)?,
8240 }
8241 }
8242 false => None,
8243 };
8244
8245 Ok(Some(SqlOption::TableSpace(TablespaceOption {
8246 name,
8247 storage,
8248 })))
8249 }
8250 _ => {
8251 return self.expected("Token::Word", value)?;
8252 }
8253 };
8254
8255 return tablespace;
8256 }
8257
8258 if self.parse_keyword(Keyword::UNION) {
8260 let _ = self.consume_token(&Token::Eq);
8261 let value = self.next_token();
8262
8263 match value.token {
8264 Token::LParen => {
8265 let tables: Vec<Ident> =
8266 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8267 self.expect_token(&Token::RParen)?;
8268
8269 return Ok(Some(SqlOption::NamedParenthesizedList(
8270 NamedParenthesizedList {
8271 key: Ident::new("UNION"),
8272 name: None,
8273 values: tables,
8274 },
8275 )));
8276 }
8277 _ => {
8278 return self.expected("Token::LParen", value)?;
8279 }
8280 }
8281 }
8282
8283 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8285 Ident::new("DEFAULT CHARSET")
8286 } else if self.parse_keyword(Keyword::CHARSET) {
8287 Ident::new("CHARSET")
8288 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8289 Ident::new("DEFAULT CHARACTER SET")
8290 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8291 Ident::new("CHARACTER SET")
8292 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8293 Ident::new("DEFAULT COLLATE")
8294 } else if self.parse_keyword(Keyword::COLLATE) {
8295 Ident::new("COLLATE")
8296 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8297 Ident::new("DATA DIRECTORY")
8298 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8299 Ident::new("INDEX DIRECTORY")
8300 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8301 Ident::new("KEY_BLOCK_SIZE")
8302 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8303 Ident::new("ROW_FORMAT")
8304 } else if self.parse_keyword(Keyword::PACK_KEYS) {
8305 Ident::new("PACK_KEYS")
8306 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8307 Ident::new("STATS_AUTO_RECALC")
8308 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8309 Ident::new("STATS_PERSISTENT")
8310 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8311 Ident::new("STATS_SAMPLE_PAGES")
8312 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8313 Ident::new("DELAY_KEY_WRITE")
8314 } else if self.parse_keyword(Keyword::COMPRESSION) {
8315 Ident::new("COMPRESSION")
8316 } else if self.parse_keyword(Keyword::ENCRYPTION) {
8317 Ident::new("ENCRYPTION")
8318 } else if self.parse_keyword(Keyword::MAX_ROWS) {
8319 Ident::new("MAX_ROWS")
8320 } else if self.parse_keyword(Keyword::MIN_ROWS) {
8321 Ident::new("MIN_ROWS")
8322 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8323 Ident::new("AUTOEXTEND_SIZE")
8324 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8325 Ident::new("AVG_ROW_LENGTH")
8326 } else if self.parse_keyword(Keyword::CHECKSUM) {
8327 Ident::new("CHECKSUM")
8328 } else if self.parse_keyword(Keyword::CONNECTION) {
8329 Ident::new("CONNECTION")
8330 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8331 Ident::new("ENGINE_ATTRIBUTE")
8332 } else if self.parse_keyword(Keyword::PASSWORD) {
8333 Ident::new("PASSWORD")
8334 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8335 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8336 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8337 Ident::new("INSERT_METHOD")
8338 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8339 Ident::new("AUTO_INCREMENT")
8340 } else {
8341 return Ok(None);
8342 };
8343
8344 let _ = self.consume_token(&Token::Eq);
8345
8346 let value = match self
8347 .maybe_parse(|parser| parser.parse_value())?
8348 .map(Expr::Value)
8349 {
8350 Some(expr) => expr,
8351 None => Expr::Identifier(self.parse_identifier()?),
8352 };
8353
8354 Ok(Some(SqlOption::KeyValue { key, value }))
8355 }
8356
8357 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8358 let mut options = Vec::new();
8359
8360 while let Some(option) = self.parse_plain_option()? {
8361 options.push(option);
8362 let _ = self.consume_token(&Token::Comma);
8365 }
8366
8367 Ok(options)
8368 }
8369
8370 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8371 let comment = if self.parse_keyword(Keyword::COMMENT) {
8372 let has_eq = self.consume_token(&Token::Eq);
8373 let comment = self.parse_comment_value()?;
8374 Some(if has_eq {
8375 CommentDef::WithEq(comment)
8376 } else {
8377 CommentDef::WithoutEq(comment)
8378 })
8379 } else {
8380 None
8381 };
8382 Ok(comment)
8383 }
8384
8385 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8386 let next_token = self.next_token();
8387 let value = match next_token.token {
8388 Token::SingleQuotedString(str) => str,
8389 Token::DollarQuotedString(str) => str.value,
8390 _ => self.expected("string literal", next_token)?,
8391 };
8392 Ok(value)
8393 }
8394
8395 pub fn parse_optional_procedure_parameters(
8396 &mut self,
8397 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8398 let mut params = vec![];
8399 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8400 return Ok(Some(params));
8401 }
8402 loop {
8403 if let Token::Word(_) = self.peek_token().token {
8404 params.push(self.parse_procedure_param()?)
8405 }
8406 let comma = self.consume_token(&Token::Comma);
8407 if self.consume_token(&Token::RParen) {
8408 break;
8410 } else if !comma {
8411 return self.expected("',' or ')' after parameter definition", self.peek_token());
8412 }
8413 }
8414 Ok(Some(params))
8415 }
8416
8417 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8418 let mut columns = vec![];
8419 let mut constraints = vec![];
8420 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8421 return Ok((columns, constraints));
8422 }
8423
8424 loop {
8425 if let Some(constraint) = self.parse_optional_table_constraint()? {
8426 constraints.push(constraint);
8427 } else if let Token::Word(_) = self.peek_token().token {
8428 columns.push(self.parse_column_def()?);
8429 } else {
8430 return self.expected("column name or constraint definition", self.peek_token());
8431 }
8432
8433 let comma = self.consume_token(&Token::Comma);
8434 let rparen = self.peek_token().token == Token::RParen;
8435
8436 if !comma && !rparen {
8437 return self.expected("',' or ')' after column definition", self.peek_token());
8438 };
8439
8440 if rparen
8441 && (!comma
8442 || self.dialect.supports_column_definition_trailing_commas()
8443 || self.options.trailing_commas)
8444 {
8445 let _ = self.consume_token(&Token::RParen);
8446 break;
8447 }
8448 }
8449
8450 Ok((columns, constraints))
8451 }
8452
8453 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8454 let mode = if self.parse_keyword(Keyword::IN) {
8455 Some(ArgMode::In)
8456 } else if self.parse_keyword(Keyword::OUT) {
8457 Some(ArgMode::Out)
8458 } else if self.parse_keyword(Keyword::INOUT) {
8459 Some(ArgMode::InOut)
8460 } else {
8461 None
8462 };
8463 let name = self.parse_identifier()?;
8464 let data_type = self.parse_data_type()?;
8465 let default = if self.consume_token(&Token::Eq) {
8466 Some(self.parse_expr()?)
8467 } else {
8468 None
8469 };
8470
8471 Ok(ProcedureParam {
8472 name,
8473 data_type,
8474 mode,
8475 default,
8476 })
8477 }
8478
8479 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8480 let col_name = self.parse_identifier()?;
8481 let data_type = if self.is_column_type_sqlite_unspecified() {
8482 DataType::Unspecified
8483 } else {
8484 self.parse_data_type()?
8485 };
8486 let mut options = vec![];
8487 loop {
8488 if self.parse_keyword(Keyword::CONSTRAINT) {
8489 let name = Some(self.parse_identifier()?);
8490 if let Some(option) = self.parse_optional_column_option()? {
8491 options.push(ColumnOptionDef { name, option });
8492 } else {
8493 return self.expected(
8494 "constraint details after CONSTRAINT <name>",
8495 self.peek_token(),
8496 );
8497 }
8498 } else if let Some(option) = self.parse_optional_column_option()? {
8499 options.push(ColumnOptionDef { name: None, option });
8500 } else {
8501 break;
8502 };
8503 }
8504 Ok(ColumnDef {
8505 name: col_name,
8506 data_type,
8507 options,
8508 })
8509 }
8510
8511 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8512 if dialect_of!(self is SQLiteDialect) {
8513 match self.peek_token().token {
8514 Token::Word(word) => matches!(
8515 word.keyword,
8516 Keyword::CONSTRAINT
8517 | Keyword::PRIMARY
8518 | Keyword::NOT
8519 | Keyword::UNIQUE
8520 | Keyword::CHECK
8521 | Keyword::DEFAULT
8522 | Keyword::COLLATE
8523 | Keyword::REFERENCES
8524 | Keyword::GENERATED
8525 | Keyword::AS
8526 ),
8527 _ => true, }
8529 } else {
8530 false
8531 }
8532 }
8533
8534 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8535 if let Some(option) = self.dialect.parse_column_option(self)? {
8536 return option;
8537 }
8538
8539 self.with_state(
8540 ColumnDefinition,
8541 |parser| -> Result<Option<ColumnOption>, ParserError> {
8542 parser.parse_optional_column_option_inner()
8543 },
8544 )
8545 }
8546
8547 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8548 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8549 Ok(Some(ColumnOption::CharacterSet(
8550 self.parse_object_name(false)?,
8551 )))
8552 } else if self.parse_keywords(&[Keyword::COLLATE]) {
8553 Ok(Some(ColumnOption::Collation(
8554 self.parse_object_name(false)?,
8555 )))
8556 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8557 Ok(Some(ColumnOption::NotNull))
8558 } else if self.parse_keywords(&[Keyword::COMMENT]) {
8559 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8560 } else if self.parse_keyword(Keyword::NULL) {
8561 Ok(Some(ColumnOption::Null))
8562 } else if self.parse_keyword(Keyword::DEFAULT) {
8563 Ok(Some(ColumnOption::Default(
8564 self.parse_column_option_expr()?,
8565 )))
8566 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8567 && self.parse_keyword(Keyword::MATERIALIZED)
8568 {
8569 Ok(Some(ColumnOption::Materialized(
8570 self.parse_column_option_expr()?,
8571 )))
8572 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8573 && self.parse_keyword(Keyword::ALIAS)
8574 {
8575 Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8576 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8577 && self.parse_keyword(Keyword::EPHEMERAL)
8578 {
8579 if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8582 Ok(Some(ColumnOption::Ephemeral(None)))
8583 } else {
8584 Ok(Some(ColumnOption::Ephemeral(Some(
8585 self.parse_column_option_expr()?,
8586 ))))
8587 }
8588 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8589 let characteristics = self.parse_constraint_characteristics()?;
8590 Ok(Some(
8591 PrimaryKeyConstraint {
8592 name: None,
8593 index_name: None,
8594 index_type: None,
8595 columns: vec![],
8596 index_options: vec![],
8597 characteristics,
8598 }
8599 .into(),
8600 ))
8601 } else if self.parse_keyword(Keyword::UNIQUE) {
8602 let characteristics = self.parse_constraint_characteristics()?;
8603 Ok(Some(
8604 UniqueConstraint {
8605 name: None,
8606 index_name: None,
8607 index_type_display: KeyOrIndexDisplay::None,
8608 index_type: None,
8609 columns: vec![],
8610 index_options: vec![],
8611 characteristics,
8612 nulls_distinct: NullsDistinctOption::None,
8613 }
8614 .into(),
8615 ))
8616 } else if self.parse_keyword(Keyword::REFERENCES) {
8617 let foreign_table = self.parse_object_name(false)?;
8618 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8621 let mut match_kind = None;
8622 let mut on_delete = None;
8623 let mut on_update = None;
8624 loop {
8625 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8626 match_kind = Some(self.parse_match_kind()?);
8627 } else if on_delete.is_none()
8628 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8629 {
8630 on_delete = Some(self.parse_referential_action()?);
8631 } else if on_update.is_none()
8632 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8633 {
8634 on_update = Some(self.parse_referential_action()?);
8635 } else {
8636 break;
8637 }
8638 }
8639 let characteristics = self.parse_constraint_characteristics()?;
8640
8641 Ok(Some(
8642 ForeignKeyConstraint {
8643 name: None, index_name: None, columns: vec![], foreign_table,
8647 referred_columns,
8648 on_delete,
8649 on_update,
8650 match_kind,
8651 characteristics,
8652 }
8653 .into(),
8654 ))
8655 } else if self.parse_keyword(Keyword::CHECK) {
8656 self.expect_token(&Token::LParen)?;
8657 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8659 self.expect_token(&Token::RParen)?;
8660 Ok(Some(
8661 CheckConstraint {
8662 name: None, expr: Box::new(expr),
8664 enforced: None, }
8666 .into(),
8667 ))
8668 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8669 && dialect_of!(self is MySqlDialect | GenericDialect)
8670 {
8671 Ok(Some(ColumnOption::DialectSpecific(vec![
8673 Token::make_keyword("AUTO_INCREMENT"),
8674 ])))
8675 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8676 && dialect_of!(self is SQLiteDialect | GenericDialect)
8677 {
8678 Ok(Some(ColumnOption::DialectSpecific(vec![
8680 Token::make_keyword("AUTOINCREMENT"),
8681 ])))
8682 } else if self.parse_keyword(Keyword::ASC)
8683 && self.dialect.supports_asc_desc_in_column_definition()
8684 {
8685 Ok(Some(ColumnOption::DialectSpecific(vec![
8687 Token::make_keyword("ASC"),
8688 ])))
8689 } else if self.parse_keyword(Keyword::DESC)
8690 && self.dialect.supports_asc_desc_in_column_definition()
8691 {
8692 Ok(Some(ColumnOption::DialectSpecific(vec![
8694 Token::make_keyword("DESC"),
8695 ])))
8696 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8697 && dialect_of!(self is MySqlDialect | GenericDialect)
8698 {
8699 let expr = self.parse_column_option_expr()?;
8700 Ok(Some(ColumnOption::OnUpdate(expr)))
8701 } else if self.parse_keyword(Keyword::GENERATED) {
8702 self.parse_optional_column_option_generated()
8703 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8704 && self.parse_keyword(Keyword::OPTIONS)
8705 {
8706 self.prev_token();
8707 Ok(Some(ColumnOption::Options(
8708 self.parse_options(Keyword::OPTIONS)?,
8709 )))
8710 } else if self.parse_keyword(Keyword::AS)
8711 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8712 {
8713 self.parse_optional_column_option_as()
8714 } else if self.parse_keyword(Keyword::SRID)
8715 && dialect_of!(self is MySqlDialect | GenericDialect)
8716 {
8717 Ok(Some(ColumnOption::Srid(Box::new(
8718 self.parse_column_option_expr()?,
8719 ))))
8720 } else if self.parse_keyword(Keyword::IDENTITY)
8721 && dialect_of!(self is MsSqlDialect | GenericDialect)
8722 {
8723 let parameters = if self.consume_token(&Token::LParen) {
8724 let seed = self.parse_number()?;
8725 self.expect_token(&Token::Comma)?;
8726 let increment = self.parse_number()?;
8727 self.expect_token(&Token::RParen)?;
8728
8729 Some(IdentityPropertyFormatKind::FunctionCall(
8730 IdentityParameters { seed, increment },
8731 ))
8732 } else {
8733 None
8734 };
8735 Ok(Some(ColumnOption::Identity(
8736 IdentityPropertyKind::Identity(IdentityProperty {
8737 parameters,
8738 order: None,
8739 }),
8740 )))
8741 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8742 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8743 {
8744 Ok(Some(ColumnOption::OnConflict(
8746 self.expect_one_of_keywords(&[
8747 Keyword::ROLLBACK,
8748 Keyword::ABORT,
8749 Keyword::FAIL,
8750 Keyword::IGNORE,
8751 Keyword::REPLACE,
8752 ])?,
8753 )))
8754 } else if self.parse_keyword(Keyword::INVISIBLE) {
8755 Ok(Some(ColumnOption::Invisible))
8756 } else {
8757 Ok(None)
8758 }
8759 }
8760
8761 fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8778 if self.peek_token_ref().token == Token::LParen {
8779 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8780 Ok(expr)
8781 } else {
8782 Ok(self.parse_expr()?)
8783 }
8784 }
8785
8786 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8787 let name = self.parse_object_name(false)?;
8788 self.expect_token(&Token::Eq)?;
8789 let value = self.parse_literal_string()?;
8790
8791 Ok(Tag::new(name, value))
8792 }
8793
8794 fn parse_optional_column_option_generated(
8795 &mut self,
8796 ) -> Result<Option<ColumnOption>, ParserError> {
8797 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8798 let mut sequence_options = vec![];
8799 if self.expect_token(&Token::LParen).is_ok() {
8800 sequence_options = self.parse_create_sequence_options()?;
8801 self.expect_token(&Token::RParen)?;
8802 }
8803 Ok(Some(ColumnOption::Generated {
8804 generated_as: GeneratedAs::Always,
8805 sequence_options: Some(sequence_options),
8806 generation_expr: None,
8807 generation_expr_mode: None,
8808 generated_keyword: true,
8809 }))
8810 } else if self.parse_keywords(&[
8811 Keyword::BY,
8812 Keyword::DEFAULT,
8813 Keyword::AS,
8814 Keyword::IDENTITY,
8815 ]) {
8816 let mut sequence_options = vec![];
8817 if self.expect_token(&Token::LParen).is_ok() {
8818 sequence_options = self.parse_create_sequence_options()?;
8819 self.expect_token(&Token::RParen)?;
8820 }
8821 Ok(Some(ColumnOption::Generated {
8822 generated_as: GeneratedAs::ByDefault,
8823 sequence_options: Some(sequence_options),
8824 generation_expr: None,
8825 generation_expr_mode: None,
8826 generated_keyword: true,
8827 }))
8828 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8829 if self.expect_token(&Token::LParen).is_ok() {
8830 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8831 self.expect_token(&Token::RParen)?;
8832 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8833 Ok((
8834 GeneratedAs::ExpStored,
8835 Some(GeneratedExpressionMode::Stored),
8836 ))
8837 } else if dialect_of!(self is PostgreSqlDialect) {
8838 self.expected("STORED", self.peek_token())
8840 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8841 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8842 } else {
8843 Ok((GeneratedAs::Always, None))
8844 }?;
8845
8846 Ok(Some(ColumnOption::Generated {
8847 generated_as: gen_as,
8848 sequence_options: None,
8849 generation_expr: Some(expr),
8850 generation_expr_mode: expr_mode,
8851 generated_keyword: true,
8852 }))
8853 } else {
8854 Ok(None)
8855 }
8856 } else {
8857 Ok(None)
8858 }
8859 }
8860
8861 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8862 self.expect_token(&Token::LParen)?;
8864 let expr = self.parse_expr()?;
8865 self.expect_token(&Token::RParen)?;
8866
8867 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8868 (
8869 GeneratedAs::ExpStored,
8870 Some(GeneratedExpressionMode::Stored),
8871 )
8872 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8873 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8874 } else {
8875 (GeneratedAs::Always, None)
8876 };
8877
8878 Ok(Some(ColumnOption::Generated {
8879 generated_as: gen_as,
8880 sequence_options: None,
8881 generation_expr: Some(expr),
8882 generation_expr_mode: expr_mode,
8883 generated_keyword: false,
8884 }))
8885 }
8886
8887 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8888 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8889 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8890 {
8891 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8892
8893 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8894 self.expect_token(&Token::LParen)?;
8895 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8896 self.expect_token(&Token::RParen)?;
8897 Some(sorted_by_columns)
8898 } else {
8899 None
8900 };
8901
8902 self.expect_keyword_is(Keyword::INTO)?;
8903 let num_buckets = self.parse_number_value()?.value;
8904 self.expect_keyword_is(Keyword::BUCKETS)?;
8905 Some(ClusteredBy {
8906 columns,
8907 sorted_by,
8908 num_buckets,
8909 })
8910 } else {
8911 None
8912 };
8913 Ok(clustered_by)
8914 }
8915
8916 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8917 if self.parse_keyword(Keyword::RESTRICT) {
8918 Ok(ReferentialAction::Restrict)
8919 } else if self.parse_keyword(Keyword::CASCADE) {
8920 Ok(ReferentialAction::Cascade)
8921 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8922 Ok(ReferentialAction::SetNull)
8923 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8924 Ok(ReferentialAction::NoAction)
8925 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8926 Ok(ReferentialAction::SetDefault)
8927 } else {
8928 self.expected(
8929 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8930 self.peek_token(),
8931 )
8932 }
8933 }
8934
8935 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
8936 if self.parse_keyword(Keyword::FULL) {
8937 Ok(ConstraintReferenceMatchKind::Full)
8938 } else if self.parse_keyword(Keyword::PARTIAL) {
8939 Ok(ConstraintReferenceMatchKind::Partial)
8940 } else if self.parse_keyword(Keyword::SIMPLE) {
8941 Ok(ConstraintReferenceMatchKind::Simple)
8942 } else {
8943 self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
8944 }
8945 }
8946
8947 pub fn parse_constraint_characteristics(
8948 &mut self,
8949 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8950 let mut cc = ConstraintCharacteristics::default();
8951
8952 loop {
8953 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8954 {
8955 cc.deferrable = Some(false);
8956 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8957 cc.deferrable = Some(true);
8958 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8959 if self.parse_keyword(Keyword::DEFERRED) {
8960 cc.initially = Some(DeferrableInitial::Deferred);
8961 } else if self.parse_keyword(Keyword::IMMEDIATE) {
8962 cc.initially = Some(DeferrableInitial::Immediate);
8963 } else {
8964 self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8965 }
8966 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8967 cc.enforced = Some(true);
8968 } else if cc.enforced.is_none()
8969 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8970 {
8971 cc.enforced = Some(false);
8972 } else {
8973 break;
8974 }
8975 }
8976
8977 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8978 Ok(Some(cc))
8979 } else {
8980 Ok(None)
8981 }
8982 }
8983
8984 pub fn parse_optional_table_constraint(
8985 &mut self,
8986 ) -> Result<Option<TableConstraint>, ParserError> {
8987 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8988 Some(self.parse_identifier()?)
8989 } else {
8990 None
8991 };
8992
8993 let next_token = self.next_token();
8994 match next_token.token {
8995 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8996 let index_type_display = self.parse_index_type_display();
8997 if !dialect_of!(self is GenericDialect | MySqlDialect)
8998 && !index_type_display.is_none()
8999 {
9000 return self
9001 .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
9002 }
9003
9004 let nulls_distinct = self.parse_optional_nulls_distinct()?;
9005
9006 let index_name = self.parse_optional_ident()?;
9008 let index_type = self.parse_optional_using_then_index_type()?;
9009
9010 let columns = self.parse_parenthesized_index_column_list()?;
9011 let index_options = self.parse_index_options()?;
9012 let characteristics = self.parse_constraint_characteristics()?;
9013 Ok(Some(
9014 UniqueConstraint {
9015 name,
9016 index_name,
9017 index_type_display,
9018 index_type,
9019 columns,
9020 index_options,
9021 characteristics,
9022 nulls_distinct,
9023 }
9024 .into(),
9025 ))
9026 }
9027 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9028 self.expect_keyword_is(Keyword::KEY)?;
9030
9031 let index_name = self.parse_optional_ident()?;
9033 let index_type = self.parse_optional_using_then_index_type()?;
9034
9035 let columns = self.parse_parenthesized_index_column_list()?;
9036 let index_options = self.parse_index_options()?;
9037 let characteristics = self.parse_constraint_characteristics()?;
9038 Ok(Some(
9039 PrimaryKeyConstraint {
9040 name,
9041 index_name,
9042 index_type,
9043 columns,
9044 index_options,
9045 characteristics,
9046 }
9047 .into(),
9048 ))
9049 }
9050 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9051 self.expect_keyword_is(Keyword::KEY)?;
9052 let index_name = self.parse_optional_ident()?;
9053 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9054 self.expect_keyword_is(Keyword::REFERENCES)?;
9055 let foreign_table = self.parse_object_name(false)?;
9056 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9057 let mut match_kind = None;
9058 let mut on_delete = None;
9059 let mut on_update = None;
9060 loop {
9061 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9062 match_kind = Some(self.parse_match_kind()?);
9063 } else if on_delete.is_none()
9064 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9065 {
9066 on_delete = Some(self.parse_referential_action()?);
9067 } else if on_update.is_none()
9068 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9069 {
9070 on_update = Some(self.parse_referential_action()?);
9071 } else {
9072 break;
9073 }
9074 }
9075
9076 let characteristics = self.parse_constraint_characteristics()?;
9077
9078 Ok(Some(
9079 ForeignKeyConstraint {
9080 name,
9081 index_name,
9082 columns,
9083 foreign_table,
9084 referred_columns,
9085 on_delete,
9086 on_update,
9087 match_kind,
9088 characteristics,
9089 }
9090 .into(),
9091 ))
9092 }
9093 Token::Word(w) if w.keyword == Keyword::CHECK => {
9094 self.expect_token(&Token::LParen)?;
9095 let expr = Box::new(self.parse_expr()?);
9096 self.expect_token(&Token::RParen)?;
9097
9098 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9099 Some(true)
9100 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9101 Some(false)
9102 } else {
9103 None
9104 };
9105
9106 Ok(Some(
9107 CheckConstraint {
9108 name,
9109 expr,
9110 enforced,
9111 }
9112 .into(),
9113 ))
9114 }
9115 Token::Word(w)
9116 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9117 && dialect_of!(self is GenericDialect | MySqlDialect)
9118 && name.is_none() =>
9119 {
9120 let display_as_key = w.keyword == Keyword::KEY;
9121
9122 let name = match self.peek_token().token {
9123 Token::Word(word) if word.keyword == Keyword::USING => None,
9124 _ => self.parse_optional_ident()?,
9125 };
9126
9127 let index_type = self.parse_optional_using_then_index_type()?;
9128 let columns = self.parse_parenthesized_index_column_list()?;
9129 let index_options = self.parse_index_options()?;
9130
9131 Ok(Some(
9132 IndexConstraint {
9133 display_as_key,
9134 name,
9135 index_type,
9136 columns,
9137 index_options,
9138 }
9139 .into(),
9140 ))
9141 }
9142 Token::Word(w)
9143 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9144 && dialect_of!(self is GenericDialect | MySqlDialect) =>
9145 {
9146 if let Some(name) = name {
9147 return self.expected(
9148 "FULLTEXT or SPATIAL option without constraint name",
9149 TokenWithSpan {
9150 token: Token::make_keyword(&name.to_string()),
9151 span: next_token.span,
9152 },
9153 );
9154 }
9155
9156 let fulltext = w.keyword == Keyword::FULLTEXT;
9157
9158 let index_type_display = self.parse_index_type_display();
9159
9160 let opt_index_name = self.parse_optional_ident()?;
9161
9162 let columns = self.parse_parenthesized_index_column_list()?;
9163
9164 Ok(Some(
9165 FullTextOrSpatialConstraint {
9166 fulltext,
9167 index_type_display,
9168 opt_index_name,
9169 columns,
9170 }
9171 .into(),
9172 ))
9173 }
9174 _ => {
9175 if name.is_some() {
9176 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9177 } else {
9178 self.prev_token();
9179 Ok(None)
9180 }
9181 }
9182 }
9183 }
9184
9185 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9186 Ok(if self.parse_keyword(Keyword::NULLS) {
9187 let not = self.parse_keyword(Keyword::NOT);
9188 self.expect_keyword_is(Keyword::DISTINCT)?;
9189 if not {
9190 NullsDistinctOption::NotDistinct
9191 } else {
9192 NullsDistinctOption::Distinct
9193 }
9194 } else {
9195 NullsDistinctOption::None
9196 })
9197 }
9198
9199 pub fn maybe_parse_options(
9200 &mut self,
9201 keyword: Keyword,
9202 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9203 if let Token::Word(word) = self.peek_token().token {
9204 if word.keyword == keyword {
9205 return Ok(Some(self.parse_options(keyword)?));
9206 }
9207 };
9208 Ok(None)
9209 }
9210
9211 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9212 if self.parse_keyword(keyword) {
9213 self.expect_token(&Token::LParen)?;
9214 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9215 self.expect_token(&Token::RParen)?;
9216 Ok(options)
9217 } else {
9218 Ok(vec![])
9219 }
9220 }
9221
9222 pub fn parse_options_with_keywords(
9223 &mut self,
9224 keywords: &[Keyword],
9225 ) -> Result<Vec<SqlOption>, ParserError> {
9226 if self.parse_keywords(keywords) {
9227 self.expect_token(&Token::LParen)?;
9228 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9229 self.expect_token(&Token::RParen)?;
9230 Ok(options)
9231 } else {
9232 Ok(vec![])
9233 }
9234 }
9235
9236 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9237 Ok(if self.parse_keyword(Keyword::BTREE) {
9238 IndexType::BTree
9239 } else if self.parse_keyword(Keyword::HASH) {
9240 IndexType::Hash
9241 } else if self.parse_keyword(Keyword::GIN) {
9242 IndexType::GIN
9243 } else if self.parse_keyword(Keyword::GIST) {
9244 IndexType::GiST
9245 } else if self.parse_keyword(Keyword::SPGIST) {
9246 IndexType::SPGiST
9247 } else if self.parse_keyword(Keyword::BRIN) {
9248 IndexType::BRIN
9249 } else if self.parse_keyword(Keyword::BLOOM) {
9250 IndexType::Bloom
9251 } else {
9252 IndexType::Custom(self.parse_identifier()?)
9253 })
9254 }
9255
9256 pub fn parse_optional_using_then_index_type(
9262 &mut self,
9263 ) -> Result<Option<IndexType>, ParserError> {
9264 if self.parse_keyword(Keyword::USING) {
9265 Ok(Some(self.parse_index_type()?))
9266 } else {
9267 Ok(None)
9268 }
9269 }
9270
9271 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9274 self.maybe_parse(|parser| parser.parse_identifier())
9275 }
9276
9277 #[must_use]
9278 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9279 if self.parse_keyword(Keyword::KEY) {
9280 KeyOrIndexDisplay::Key
9281 } else if self.parse_keyword(Keyword::INDEX) {
9282 KeyOrIndexDisplay::Index
9283 } else {
9284 KeyOrIndexDisplay::None
9285 }
9286 }
9287
9288 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9289 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9290 Ok(Some(IndexOption::Using(index_type)))
9291 } else if self.parse_keyword(Keyword::COMMENT) {
9292 let s = self.parse_literal_string()?;
9293 Ok(Some(IndexOption::Comment(s)))
9294 } else {
9295 Ok(None)
9296 }
9297 }
9298
9299 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9300 let mut options = Vec::new();
9301
9302 loop {
9303 match self.parse_optional_index_option()? {
9304 Some(index_option) => options.push(index_option),
9305 None => return Ok(options),
9306 }
9307 }
9308 }
9309
9310 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9311 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9312
9313 match self.peek_token().token {
9314 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9315 Ok(SqlOption::Ident(self.parse_identifier()?))
9316 }
9317 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9318 self.parse_option_partition()
9319 }
9320 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9321 self.parse_option_clustered()
9322 }
9323 _ => {
9324 let name = self.parse_identifier()?;
9325 self.expect_token(&Token::Eq)?;
9326 let value = self.parse_expr()?;
9327
9328 Ok(SqlOption::KeyValue { key: name, value })
9329 }
9330 }
9331 }
9332
9333 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9334 if self.parse_keywords(&[
9335 Keyword::CLUSTERED,
9336 Keyword::COLUMNSTORE,
9337 Keyword::INDEX,
9338 Keyword::ORDER,
9339 ]) {
9340 Ok(SqlOption::Clustered(
9341 TableOptionsClustered::ColumnstoreIndexOrder(
9342 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9343 ),
9344 ))
9345 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9346 Ok(SqlOption::Clustered(
9347 TableOptionsClustered::ColumnstoreIndex,
9348 ))
9349 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9350 self.expect_token(&Token::LParen)?;
9351
9352 let columns = self.parse_comma_separated(|p| {
9353 let name = p.parse_identifier()?;
9354 let asc = p.parse_asc_desc();
9355
9356 Ok(ClusteredIndex { name, asc })
9357 })?;
9358
9359 self.expect_token(&Token::RParen)?;
9360
9361 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9362 } else {
9363 Err(ParserError::ParserError(
9364 "invalid CLUSTERED sequence".to_string(),
9365 ))
9366 }
9367 }
9368
9369 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9370 self.expect_keyword_is(Keyword::PARTITION)?;
9371 self.expect_token(&Token::LParen)?;
9372 let column_name = self.parse_identifier()?;
9373
9374 self.expect_keyword_is(Keyword::RANGE)?;
9375 let range_direction = if self.parse_keyword(Keyword::LEFT) {
9376 Some(PartitionRangeDirection::Left)
9377 } else if self.parse_keyword(Keyword::RIGHT) {
9378 Some(PartitionRangeDirection::Right)
9379 } else {
9380 None
9381 };
9382
9383 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9384 self.expect_token(&Token::LParen)?;
9385
9386 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9387
9388 self.expect_token(&Token::RParen)?;
9389 self.expect_token(&Token::RParen)?;
9390
9391 Ok(SqlOption::Partition {
9392 column_name,
9393 range_direction,
9394 for_values,
9395 })
9396 }
9397
9398 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9399 self.expect_token(&Token::LParen)?;
9400 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9401 self.expect_token(&Token::RParen)?;
9402 Ok(Partition::Partitions(partitions))
9403 }
9404
9405 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9406 self.expect_token(&Token::LParen)?;
9407 self.expect_keyword_is(Keyword::SELECT)?;
9408 let projection = self.parse_projection()?;
9409 let group_by = self.parse_optional_group_by()?;
9410 let order_by = self.parse_optional_order_by()?;
9411 self.expect_token(&Token::RParen)?;
9412 Ok(ProjectionSelect {
9413 projection,
9414 group_by,
9415 order_by,
9416 })
9417 }
9418 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9419 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9420 let name = self.parse_identifier()?;
9421 let query = self.parse_projection_select()?;
9422 Ok(AlterTableOperation::AddProjection {
9423 if_not_exists,
9424 name,
9425 select: query,
9426 })
9427 }
9428
9429 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9430 let operation = if self.parse_keyword(Keyword::ADD) {
9431 if let Some(constraint) = self.parse_optional_table_constraint()? {
9432 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9433 AlterTableOperation::AddConstraint {
9434 constraint,
9435 not_valid,
9436 }
9437 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9438 && self.parse_keyword(Keyword::PROJECTION)
9439 {
9440 return self.parse_alter_table_add_projection();
9441 } else {
9442 let if_not_exists =
9443 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9444 let mut new_partitions = vec![];
9445 loop {
9446 if self.parse_keyword(Keyword::PARTITION) {
9447 new_partitions.push(self.parse_partition()?);
9448 } else {
9449 break;
9450 }
9451 }
9452 if !new_partitions.is_empty() {
9453 AlterTableOperation::AddPartitions {
9454 if_not_exists,
9455 new_partitions,
9456 }
9457 } else {
9458 let column_keyword = self.parse_keyword(Keyword::COLUMN);
9459
9460 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9461 {
9462 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9463 || if_not_exists
9464 } else {
9465 false
9466 };
9467
9468 let column_def = self.parse_column_def()?;
9469
9470 let column_position = self.parse_column_position()?;
9471
9472 AlterTableOperation::AddColumn {
9473 column_keyword,
9474 if_not_exists,
9475 column_def,
9476 column_position,
9477 }
9478 }
9479 }
9480 } else if self.parse_keyword(Keyword::RENAME) {
9481 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9482 let old_name = self.parse_identifier()?;
9483 self.expect_keyword_is(Keyword::TO)?;
9484 let new_name = self.parse_identifier()?;
9485 AlterTableOperation::RenameConstraint { old_name, new_name }
9486 } else if self.parse_keyword(Keyword::TO) {
9487 let table_name = self.parse_object_name(false)?;
9488 AlterTableOperation::RenameTable {
9489 table_name: RenameTableNameKind::To(table_name),
9490 }
9491 } else if self.parse_keyword(Keyword::AS) {
9492 let table_name = self.parse_object_name(false)?;
9493 AlterTableOperation::RenameTable {
9494 table_name: RenameTableNameKind::As(table_name),
9495 }
9496 } else {
9497 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
9499 self.expect_keyword_is(Keyword::TO)?;
9500 let new_column_name = self.parse_identifier()?;
9501 AlterTableOperation::RenameColumn {
9502 old_column_name,
9503 new_column_name,
9504 }
9505 }
9506 } else if self.parse_keyword(Keyword::DISABLE) {
9507 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9508 AlterTableOperation::DisableRowLevelSecurity {}
9509 } else if self.parse_keyword(Keyword::RULE) {
9510 let name = self.parse_identifier()?;
9511 AlterTableOperation::DisableRule { name }
9512 } else if self.parse_keyword(Keyword::TRIGGER) {
9513 let name = self.parse_identifier()?;
9514 AlterTableOperation::DisableTrigger { name }
9515 } else {
9516 return self.expected(
9517 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9518 self.peek_token(),
9519 );
9520 }
9521 } else if self.parse_keyword(Keyword::ENABLE) {
9522 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9523 let name = self.parse_identifier()?;
9524 AlterTableOperation::EnableAlwaysRule { name }
9525 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9526 let name = self.parse_identifier()?;
9527 AlterTableOperation::EnableAlwaysTrigger { name }
9528 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9529 AlterTableOperation::EnableRowLevelSecurity {}
9530 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9531 let name = self.parse_identifier()?;
9532 AlterTableOperation::EnableReplicaRule { name }
9533 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9534 let name = self.parse_identifier()?;
9535 AlterTableOperation::EnableReplicaTrigger { name }
9536 } else if self.parse_keyword(Keyword::RULE) {
9537 let name = self.parse_identifier()?;
9538 AlterTableOperation::EnableRule { name }
9539 } else if self.parse_keyword(Keyword::TRIGGER) {
9540 let name = self.parse_identifier()?;
9541 AlterTableOperation::EnableTrigger { name }
9542 } else {
9543 return self.expected(
9544 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9545 self.peek_token(),
9546 );
9547 }
9548 } else if self.parse_keywords(&[Keyword::FORCE, Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9549 AlterTableOperation::ForceRowLevelSecurity
9550 } else if self.parse_keywords(&[Keyword::NO, Keyword::FORCE, Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9551 AlterTableOperation::NoForceRowLevelSecurity
9552 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9553 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9554 {
9555 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9556 let name = self.parse_identifier()?;
9557 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9558 Some(self.parse_identifier()?)
9559 } else {
9560 None
9561 };
9562 AlterTableOperation::ClearProjection {
9563 if_exists,
9564 name,
9565 partition,
9566 }
9567 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9568 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9569 {
9570 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9571 let name = self.parse_identifier()?;
9572 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9573 Some(self.parse_identifier()?)
9574 } else {
9575 None
9576 };
9577 AlterTableOperation::MaterializeProjection {
9578 if_exists,
9579 name,
9580 partition,
9581 }
9582 } else if self.parse_keyword(Keyword::DROP) {
9583 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9584 self.expect_token(&Token::LParen)?;
9585 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9586 self.expect_token(&Token::RParen)?;
9587 AlterTableOperation::DropPartitions {
9588 partitions,
9589 if_exists: true,
9590 }
9591 } else if self.parse_keyword(Keyword::PARTITION) {
9592 self.expect_token(&Token::LParen)?;
9593 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9594 self.expect_token(&Token::RParen)?;
9595 AlterTableOperation::DropPartitions {
9596 partitions,
9597 if_exists: false,
9598 }
9599 } else if self.parse_keyword(Keyword::CONSTRAINT) {
9600 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9601 let name = self.parse_identifier()?;
9602 let drop_behavior = self.parse_optional_drop_behavior();
9603 AlterTableOperation::DropConstraint {
9604 if_exists,
9605 name,
9606 drop_behavior,
9607 }
9608 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9609 let drop_behavior = self.parse_optional_drop_behavior();
9610 AlterTableOperation::DropPrimaryKey { drop_behavior }
9611 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9612 let name = self.parse_identifier()?;
9613 let drop_behavior = self.parse_optional_drop_behavior();
9614 AlterTableOperation::DropForeignKey {
9615 name,
9616 drop_behavior,
9617 }
9618 } else if self.parse_keyword(Keyword::INDEX) {
9619 let name = self.parse_identifier()?;
9620 AlterTableOperation::DropIndex { name }
9621 } else if self.parse_keyword(Keyword::PROJECTION)
9622 && dialect_of!(self is ClickHouseDialect|GenericDialect)
9623 {
9624 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9625 let name = self.parse_identifier()?;
9626 AlterTableOperation::DropProjection { if_exists, name }
9627 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9628 AlterTableOperation::DropClusteringKey
9629 } else {
9630 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9632 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9633 self.parse_comma_separated(Parser::parse_identifier)?
9634 } else {
9635 vec![self.parse_identifier()?]
9636 };
9637 let drop_behavior = self.parse_optional_drop_behavior();
9638 AlterTableOperation::DropColumn {
9639 has_column_keyword,
9640 column_names,
9641 if_exists,
9642 drop_behavior,
9643 }
9644 }
9645 } else if self.parse_keyword(Keyword::PARTITION) {
9646 self.expect_token(&Token::LParen)?;
9647 let before = self.parse_comma_separated(Parser::parse_expr)?;
9648 self.expect_token(&Token::RParen)?;
9649 self.expect_keyword_is(Keyword::RENAME)?;
9650 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9651 self.expect_token(&Token::LParen)?;
9652 let renames = self.parse_comma_separated(Parser::parse_expr)?;
9653 self.expect_token(&Token::RParen)?;
9654 AlterTableOperation::RenamePartitions {
9655 old_partitions: before,
9656 new_partitions: renames,
9657 }
9658 } else if self.parse_keyword(Keyword::CHANGE) {
9659 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
9661 let new_name = self.parse_identifier()?;
9662 let data_type = self.parse_data_type()?;
9663 let mut options = vec![];
9664 while let Some(option) = self.parse_optional_column_option()? {
9665 options.push(option);
9666 }
9667
9668 let column_position = self.parse_column_position()?;
9669
9670 AlterTableOperation::ChangeColumn {
9671 old_name,
9672 new_name,
9673 data_type,
9674 options,
9675 column_position,
9676 }
9677 } else if self.parse_keyword(Keyword::MODIFY) {
9678 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
9680 let data_type = self.parse_data_type()?;
9681 let mut options = vec![];
9682 while let Some(option) = self.parse_optional_column_option()? {
9683 options.push(option);
9684 }
9685
9686 let column_position = self.parse_column_position()?;
9687
9688 AlterTableOperation::ModifyColumn {
9689 col_name,
9690 data_type,
9691 options,
9692 column_position,
9693 }
9694 } else if self.parse_keyword(Keyword::ALTER) {
9695 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
9697 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9698
9699 let op: AlterColumnOperation = if self.parse_keywords(&[
9700 Keyword::SET,
9701 Keyword::NOT,
9702 Keyword::NULL,
9703 ]) {
9704 AlterColumnOperation::SetNotNull {}
9705 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9706 AlterColumnOperation::DropNotNull {}
9707 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9708 AlterColumnOperation::SetDefault {
9709 value: self.parse_expr()?,
9710 }
9711 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9712 AlterColumnOperation::DropDefault {}
9713 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9714 self.parse_set_data_type(true)?
9715 } else if self.parse_keyword(Keyword::TYPE) {
9716 self.parse_set_data_type(false)?
9717 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9718 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9719 Some(GeneratedAs::Always)
9720 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9721 Some(GeneratedAs::ByDefault)
9722 } else {
9723 None
9724 };
9725
9726 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9727
9728 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9729
9730 if self.peek_token().token == Token::LParen {
9731 self.expect_token(&Token::LParen)?;
9732 sequence_options = Some(self.parse_create_sequence_options()?);
9733 self.expect_token(&Token::RParen)?;
9734 }
9735
9736 AlterColumnOperation::AddGenerated {
9737 generated_as,
9738 sequence_options,
9739 }
9740 } else {
9741 let message = if is_postgresql {
9742 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9743 } else {
9744 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9745 };
9746
9747 return self.expected(message, self.peek_token());
9748 };
9749 AlterTableOperation::AlterColumn { column_name, op }
9750 } else if self.parse_keyword(Keyword::SWAP) {
9751 self.expect_keyword_is(Keyword::WITH)?;
9752 let table_name = self.parse_object_name(false)?;
9753 AlterTableOperation::SwapWith { table_name }
9754 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9755 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9756 {
9757 let new_owner = self.parse_owner()?;
9758 AlterTableOperation::OwnerTo { new_owner }
9759 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9760 && self.parse_keyword(Keyword::ATTACH)
9761 {
9762 AlterTableOperation::AttachPartition {
9763 partition: self.parse_part_or_partition()?,
9764 }
9765 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9766 && self.parse_keyword(Keyword::DETACH)
9767 {
9768 AlterTableOperation::DetachPartition {
9769 partition: self.parse_part_or_partition()?,
9770 }
9771 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9772 && self.parse_keyword(Keyword::FREEZE)
9773 {
9774 let partition = self.parse_part_or_partition()?;
9775 let with_name = if self.parse_keyword(Keyword::WITH) {
9776 self.expect_keyword_is(Keyword::NAME)?;
9777 Some(self.parse_identifier()?)
9778 } else {
9779 None
9780 };
9781 AlterTableOperation::FreezePartition {
9782 partition,
9783 with_name,
9784 }
9785 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9786 && self.parse_keyword(Keyword::UNFREEZE)
9787 {
9788 let partition = self.parse_part_or_partition()?;
9789 let with_name = if self.parse_keyword(Keyword::WITH) {
9790 self.expect_keyword_is(Keyword::NAME)?;
9791 Some(self.parse_identifier()?)
9792 } else {
9793 None
9794 };
9795 AlterTableOperation::UnfreezePartition {
9796 partition,
9797 with_name,
9798 }
9799 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9800 self.expect_token(&Token::LParen)?;
9801 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9802 self.expect_token(&Token::RParen)?;
9803 AlterTableOperation::ClusterBy { exprs }
9804 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9805 AlterTableOperation::SuspendRecluster
9806 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9807 AlterTableOperation::ResumeRecluster
9808 } else if self.parse_keyword(Keyword::LOCK) {
9809 let equals = self.consume_token(&Token::Eq);
9810 let lock = match self.parse_one_of_keywords(&[
9811 Keyword::DEFAULT,
9812 Keyword::EXCLUSIVE,
9813 Keyword::NONE,
9814 Keyword::SHARED,
9815 ]) {
9816 Some(Keyword::DEFAULT) => AlterTableLock::Default,
9817 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9818 Some(Keyword::NONE) => AlterTableLock::None,
9819 Some(Keyword::SHARED) => AlterTableLock::Shared,
9820 _ => self.expected(
9821 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9822 self.peek_token(),
9823 )?,
9824 };
9825 AlterTableOperation::Lock { equals, lock }
9826 } else if self.parse_keyword(Keyword::ALGORITHM) {
9827 let equals = self.consume_token(&Token::Eq);
9828 let algorithm = match self.parse_one_of_keywords(&[
9829 Keyword::DEFAULT,
9830 Keyword::INSTANT,
9831 Keyword::INPLACE,
9832 Keyword::COPY,
9833 ]) {
9834 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9835 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9836 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9837 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9838 _ => self.expected(
9839 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9840 self.peek_token(),
9841 )?,
9842 };
9843 AlterTableOperation::Algorithm { equals, algorithm }
9844 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9845 let equals = self.consume_token(&Token::Eq);
9846 let value = self.parse_number_value()?;
9847 AlterTableOperation::AutoIncrement { equals, value }
9848 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9849 let identity = if self.parse_keyword(Keyword::NONE) {
9850 ReplicaIdentity::None
9851 } else if self.parse_keyword(Keyword::FULL) {
9852 ReplicaIdentity::Full
9853 } else if self.parse_keyword(Keyword::DEFAULT) {
9854 ReplicaIdentity::Default
9855 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9856 ReplicaIdentity::Index(self.parse_identifier()?)
9857 } else {
9858 return self.expected(
9859 "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9860 self.peek_token(),
9861 );
9862 };
9863
9864 AlterTableOperation::ReplicaIdentity { identity }
9865 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9866 let name = self.parse_identifier()?;
9867 AlterTableOperation::ValidateConstraint { name }
9868 } else {
9869 let mut options =
9870 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9871 if !options.is_empty() {
9872 AlterTableOperation::SetTblProperties {
9873 table_properties: options,
9874 }
9875 } else {
9876 options = self.parse_options(Keyword::SET)?;
9877 if !options.is_empty() {
9878 AlterTableOperation::SetOptionsParens { options }
9879 } else {
9880 return self.expected(
9881 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9882 self.peek_token(),
9883 );
9884 }
9885 }
9886 };
9887 Ok(operation)
9888 }
9889
9890 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9891 let data_type = self.parse_data_type()?;
9892 let using = if self.dialect.supports_alter_column_type_using()
9893 && self.parse_keyword(Keyword::USING)
9894 {
9895 Some(self.parse_expr()?)
9896 } else {
9897 None
9898 };
9899 Ok(AlterColumnOperation::SetDataType {
9900 data_type,
9901 using,
9902 had_set,
9903 })
9904 }
9905
9906 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9907 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9908 match keyword {
9909 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9910 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9911 unexpected_keyword => Err(ParserError::ParserError(
9913 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
9914 )),
9915 }
9916 }
9917
9918 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9919 let object_type = self.expect_one_of_keywords(&[
9920 Keyword::VIEW,
9921 Keyword::TYPE,
9922 Keyword::TABLE,
9923 Keyword::INDEX,
9924 Keyword::ROLE,
9925 Keyword::POLICY,
9926 Keyword::CONNECTOR,
9927 Keyword::ICEBERG,
9928 Keyword::SCHEMA,
9929 Keyword::USER,
9930 Keyword::OPERATOR,
9931 ])?;
9932 match object_type {
9933 Keyword::SCHEMA => {
9934 self.prev_token();
9935 self.prev_token();
9936 self.parse_alter_schema()
9937 }
9938 Keyword::VIEW => self.parse_alter_view(),
9939 Keyword::TYPE => self.parse_alter_type(),
9940 Keyword::TABLE => self.parse_alter_table(false),
9941 Keyword::ICEBERG => {
9942 self.expect_keyword(Keyword::TABLE)?;
9943 self.parse_alter_table(true)
9944 }
9945 Keyword::INDEX => {
9946 let index_name = self.parse_object_name(false)?;
9947 let operation = if self.parse_keyword(Keyword::RENAME) {
9948 if self.parse_keyword(Keyword::TO) {
9949 let index_name = self.parse_object_name(false)?;
9950 AlterIndexOperation::RenameIndex { index_name }
9951 } else {
9952 return self.expected("TO after RENAME", self.peek_token());
9953 }
9954 } else {
9955 return self.expected("RENAME after ALTER INDEX", self.peek_token());
9956 };
9957
9958 Ok(Statement::AlterIndex {
9959 name: index_name,
9960 operation,
9961 })
9962 }
9963 Keyword::OPERATOR => self.parse_alter_operator(),
9964 Keyword::ROLE => self.parse_alter_role(),
9965 Keyword::POLICY => self.parse_alter_policy(),
9966 Keyword::CONNECTOR => self.parse_alter_connector(),
9967 Keyword::USER => self.parse_alter_user(),
9968 unexpected_keyword => Err(ParserError::ParserError(
9970 format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
9971 )),
9972 }
9973 }
9974
9975 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9977 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9978 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
9980 let on_cluster = self.parse_optional_on_cluster()?;
9981 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9982
9983 let mut location = None;
9984 if self.parse_keyword(Keyword::LOCATION) {
9985 location = Some(HiveSetLocation {
9986 has_set: false,
9987 location: self.parse_identifier()?,
9988 });
9989 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9990 location = Some(HiveSetLocation {
9991 has_set: true,
9992 location: self.parse_identifier()?,
9993 });
9994 }
9995
9996 let end_token = if self.peek_token_ref().token == Token::SemiColon {
9997 self.peek_token_ref().clone()
9998 } else {
9999 self.get_current_token().clone()
10000 };
10001
10002 Ok(AlterTable {
10003 name: table_name,
10004 if_exists,
10005 only,
10006 operations,
10007 location,
10008 on_cluster,
10009 table_type: if iceberg {
10010 Some(AlterTableType::Iceberg)
10011 } else {
10012 None
10013 },
10014 end_token: AttachedToken(end_token),
10015 }
10016 .into())
10017 }
10018
10019 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
10020 let name = self.parse_object_name(false)?;
10021 let columns = self.parse_parenthesized_column_list(Optional, false)?;
10022
10023 let with_options = self.parse_options(Keyword::WITH)?;
10024
10025 self.expect_keyword_is(Keyword::AS)?;
10026 let query = self.parse_query()?;
10027
10028 Ok(Statement::AlterView {
10029 name,
10030 columns,
10031 query,
10032 with_options,
10033 })
10034 }
10035
10036 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10038 let name = self.parse_object_name(false)?;
10039
10040 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10041 let new_name = self.parse_identifier()?;
10042 Ok(Statement::AlterType(AlterType {
10043 name,
10044 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10045 }))
10046 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10047 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10048 let new_enum_value = self.parse_identifier()?;
10049 let position = if self.parse_keyword(Keyword::BEFORE) {
10050 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10051 } else if self.parse_keyword(Keyword::AFTER) {
10052 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10053 } else {
10054 None
10055 };
10056
10057 Ok(Statement::AlterType(AlterType {
10058 name,
10059 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10060 if_not_exists,
10061 value: new_enum_value,
10062 position,
10063 }),
10064 }))
10065 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10066 let existing_enum_value = self.parse_identifier()?;
10067 self.expect_keyword(Keyword::TO)?;
10068 let new_enum_value = self.parse_identifier()?;
10069
10070 Ok(Statement::AlterType(AlterType {
10071 name,
10072 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10073 from: existing_enum_value,
10074 to: new_enum_value,
10075 }),
10076 }))
10077 } else {
10078 self.expected_ref(
10079 "{RENAME TO | { RENAME | ADD } VALUE}",
10080 self.peek_token_ref(),
10081 )
10082 }
10083 }
10084
10085 pub fn parse_alter_operator(&mut self) -> Result<Statement, ParserError> {
10089 let name = self.parse_operator_name()?;
10090
10091 self.expect_token(&Token::LParen)?;
10093
10094 let left_type = if self.parse_keyword(Keyword::NONE) {
10095 None
10096 } else {
10097 Some(self.parse_data_type()?)
10098 };
10099
10100 self.expect_token(&Token::Comma)?;
10101 let right_type = self.parse_data_type()?;
10102 self.expect_token(&Token::RParen)?;
10103
10104 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10106 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10107 Owner::CurrentRole
10108 } else if self.parse_keyword(Keyword::CURRENT_USER) {
10109 Owner::CurrentUser
10110 } else if self.parse_keyword(Keyword::SESSION_USER) {
10111 Owner::SessionUser
10112 } else {
10113 Owner::Ident(self.parse_identifier()?)
10114 };
10115 AlterOperatorOperation::OwnerTo(owner)
10116 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10117 let schema_name = self.parse_object_name(false)?;
10118 AlterOperatorOperation::SetSchema { schema_name }
10119 } else if self.parse_keyword(Keyword::SET) {
10120 self.expect_token(&Token::LParen)?;
10121
10122 let mut options = Vec::new();
10123 loop {
10124 let keyword = self.expect_one_of_keywords(&[
10125 Keyword::RESTRICT,
10126 Keyword::JOIN,
10127 Keyword::COMMUTATOR,
10128 Keyword::NEGATOR,
10129 Keyword::HASHES,
10130 Keyword::MERGES,
10131 ])?;
10132
10133 match keyword {
10134 Keyword::RESTRICT => {
10135 self.expect_token(&Token::Eq)?;
10136 let proc_name = if self.parse_keyword(Keyword::NONE) {
10137 None
10138 } else {
10139 Some(self.parse_object_name(false)?)
10140 };
10141 options.push(OperatorOption::Restrict(proc_name));
10142 }
10143 Keyword::JOIN => {
10144 self.expect_token(&Token::Eq)?;
10145 let proc_name = if self.parse_keyword(Keyword::NONE) {
10146 None
10147 } else {
10148 Some(self.parse_object_name(false)?)
10149 };
10150 options.push(OperatorOption::Join(proc_name));
10151 }
10152 Keyword::COMMUTATOR => {
10153 self.expect_token(&Token::Eq)?;
10154 let op_name = self.parse_operator_name()?;
10155 options.push(OperatorOption::Commutator(op_name));
10156 }
10157 Keyword::NEGATOR => {
10158 self.expect_token(&Token::Eq)?;
10159 let op_name = self.parse_operator_name()?;
10160 options.push(OperatorOption::Negator(op_name));
10161 }
10162 Keyword::HASHES => {
10163 options.push(OperatorOption::Hashes);
10164 }
10165 Keyword::MERGES => {
10166 options.push(OperatorOption::Merges);
10167 }
10168 unexpected_keyword => return Err(ParserError::ParserError(
10169 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10170 )),
10171 }
10172
10173 if !self.consume_token(&Token::Comma) {
10174 break;
10175 }
10176 }
10177
10178 self.expect_token(&Token::RParen)?;
10179 AlterOperatorOperation::Set { options }
10180 } else {
10181 return self.expected_ref(
10182 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10183 self.peek_token_ref(),
10184 );
10185 };
10186
10187 Ok(Statement::AlterOperator(AlterOperator {
10188 name,
10189 left_type,
10190 right_type,
10191 operation,
10192 }))
10193 }
10194
10195 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10198 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10199 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10200 let name = self.parse_object_name(false)?;
10201 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10202 self.prev_token();
10203 let options = self.parse_options(Keyword::OPTIONS)?;
10204 AlterSchemaOperation::SetOptionsParens { options }
10205 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10206 let collate = self.parse_expr()?;
10207 AlterSchemaOperation::SetDefaultCollate { collate }
10208 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10209 let replica = self.parse_identifier()?;
10210 let options = if self.peek_keyword(Keyword::OPTIONS) {
10211 Some(self.parse_options(Keyword::OPTIONS)?)
10212 } else {
10213 None
10214 };
10215 AlterSchemaOperation::AddReplica { replica, options }
10216 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10217 let replica = self.parse_identifier()?;
10218 AlterSchemaOperation::DropReplica { replica }
10219 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10220 let new_name = self.parse_object_name(false)?;
10221 AlterSchemaOperation::Rename { name: new_name }
10222 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10223 let owner = self.parse_owner()?;
10224 AlterSchemaOperation::OwnerTo { owner }
10225 } else {
10226 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10227 };
10228 Ok(Statement::AlterSchema(AlterSchema {
10229 name,
10230 if_exists,
10231 operations: vec![operation],
10232 }))
10233 }
10234
10235 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10238 let object_name = self.parse_object_name(false)?;
10239 if self.peek_token().token == Token::LParen {
10240 match self.parse_function(object_name)? {
10241 Expr::Function(f) => Ok(Statement::Call(f)),
10242 other => parser_err!(
10243 format!("Expected a simple procedure call but found: {other}"),
10244 self.peek_token().span.start
10245 ),
10246 }
10247 } else {
10248 Ok(Statement::Call(Function {
10249 name: object_name,
10250 uses_odbc_syntax: false,
10251 parameters: FunctionArguments::None,
10252 args: FunctionArguments::None,
10253 over: None,
10254 filter: None,
10255 null_treatment: None,
10256 within_group: vec![],
10257 }))
10258 }
10259 }
10260
10261 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10263 let source;
10264 if self.consume_token(&Token::LParen) {
10265 source = CopySource::Query(self.parse_query()?);
10266 self.expect_token(&Token::RParen)?;
10267 } else {
10268 let table_name = self.parse_object_name(false)?;
10269 let columns = self.parse_parenthesized_column_list(Optional, false)?;
10270 source = CopySource::Table {
10271 table_name,
10272 columns,
10273 };
10274 }
10275 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10276 Some(Keyword::FROM) => false,
10277 Some(Keyword::TO) => true,
10278 _ => self.expected("FROM or TO", self.peek_token())?,
10279 };
10280 if !to {
10281 if let CopySource::Query(_) = source {
10284 return Err(ParserError::ParserError(
10285 "COPY ... FROM does not support query as a source".to_string(),
10286 ));
10287 }
10288 }
10289 let target = if self.parse_keyword(Keyword::STDIN) {
10290 CopyTarget::Stdin
10291 } else if self.parse_keyword(Keyword::STDOUT) {
10292 CopyTarget::Stdout
10293 } else if self.parse_keyword(Keyword::PROGRAM) {
10294 CopyTarget::Program {
10295 command: self.parse_literal_string()?,
10296 }
10297 } else {
10298 CopyTarget::File {
10299 filename: self.parse_literal_string()?,
10300 }
10301 };
10302 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
10304 if self.consume_token(&Token::LParen) {
10305 options = self.parse_comma_separated(Parser::parse_copy_option)?;
10306 self.expect_token(&Token::RParen)?;
10307 }
10308 let mut legacy_options = vec![];
10309 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10310 legacy_options.push(opt);
10311 }
10312 let values = if let CopyTarget::Stdin = target {
10313 self.expect_token(&Token::SemiColon)?;
10314 self.parse_tsv()
10315 } else {
10316 vec![]
10317 };
10318 Ok(Statement::Copy {
10319 source,
10320 to,
10321 target,
10322 options,
10323 legacy_options,
10324 values,
10325 })
10326 }
10327
10328 fn parse_open(&mut self) -> Result<Statement, ParserError> {
10330 self.expect_keyword(Keyword::OPEN)?;
10331 Ok(Statement::Open(OpenStatement {
10332 cursor_name: self.parse_identifier()?,
10333 }))
10334 }
10335
10336 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10337 let cursor = if self.parse_keyword(Keyword::ALL) {
10338 CloseCursor::All
10339 } else {
10340 let name = self.parse_identifier()?;
10341
10342 CloseCursor::Specific { name }
10343 };
10344
10345 Ok(Statement::Close { cursor })
10346 }
10347
10348 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10349 let ret = match self.parse_one_of_keywords(&[
10350 Keyword::FORMAT,
10351 Keyword::FREEZE,
10352 Keyword::DELIMITER,
10353 Keyword::NULL,
10354 Keyword::HEADER,
10355 Keyword::QUOTE,
10356 Keyword::ESCAPE,
10357 Keyword::FORCE_QUOTE,
10358 Keyword::FORCE_NOT_NULL,
10359 Keyword::FORCE_NULL,
10360 Keyword::ENCODING,
10361 ]) {
10362 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10363 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10364 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10365 Some(Keyword::FALSE)
10366 )),
10367 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10368 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10369 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10370 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10371 Some(Keyword::FALSE)
10372 )),
10373 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10374 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10375 Some(Keyword::FORCE_QUOTE) => {
10376 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10377 }
10378 Some(Keyword::FORCE_NOT_NULL) => {
10379 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10380 }
10381 Some(Keyword::FORCE_NULL) => {
10382 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10383 }
10384 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10385 _ => self.expected("option", self.peek_token())?,
10386 };
10387 Ok(ret)
10388 }
10389
10390 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10391 if self.parse_keyword(Keyword::FORMAT) {
10393 let _ = self.parse_keyword(Keyword::AS);
10394 }
10395
10396 let ret = match self.parse_one_of_keywords(&[
10397 Keyword::ACCEPTANYDATE,
10398 Keyword::ACCEPTINVCHARS,
10399 Keyword::ADDQUOTES,
10400 Keyword::ALLOWOVERWRITE,
10401 Keyword::BINARY,
10402 Keyword::BLANKSASNULL,
10403 Keyword::BZIP2,
10404 Keyword::CLEANPATH,
10405 Keyword::COMPUPDATE,
10406 Keyword::CSV,
10407 Keyword::DATEFORMAT,
10408 Keyword::DELIMITER,
10409 Keyword::EMPTYASNULL,
10410 Keyword::ENCRYPTED,
10411 Keyword::ESCAPE,
10412 Keyword::EXTENSION,
10413 Keyword::FIXEDWIDTH,
10414 Keyword::GZIP,
10415 Keyword::HEADER,
10416 Keyword::IAM_ROLE,
10417 Keyword::IGNOREHEADER,
10418 Keyword::JSON,
10419 Keyword::MANIFEST,
10420 Keyword::MAXFILESIZE,
10421 Keyword::NULL,
10422 Keyword::PARALLEL,
10423 Keyword::PARQUET,
10424 Keyword::PARTITION,
10425 Keyword::REGION,
10426 Keyword::REMOVEQUOTES,
10427 Keyword::ROWGROUPSIZE,
10428 Keyword::STATUPDATE,
10429 Keyword::TIMEFORMAT,
10430 Keyword::TRUNCATECOLUMNS,
10431 Keyword::ZSTD,
10432 ]) {
10433 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10434 Some(Keyword::ACCEPTINVCHARS) => {
10435 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10437 Some(self.parse_literal_string()?)
10438 } else {
10439 None
10440 };
10441 CopyLegacyOption::AcceptInvChars(ch)
10442 }
10443 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10444 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10445 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10446 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10447 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10448 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10449 Some(Keyword::COMPUPDATE) => {
10450 let preset = self.parse_keyword(Keyword::PRESET);
10451 let enabled = match self.parse_one_of_keywords(&[
10452 Keyword::TRUE,
10453 Keyword::FALSE,
10454 Keyword::ON,
10455 Keyword::OFF,
10456 ]) {
10457 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10458 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10459 _ => None,
10460 };
10461 CopyLegacyOption::CompUpdate { preset, enabled }
10462 }
10463 Some(Keyword::CSV) => CopyLegacyOption::Csv({
10464 let mut opts = vec![];
10465 while let Some(opt) =
10466 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10467 {
10468 opts.push(opt);
10469 }
10470 opts
10471 }),
10472 Some(Keyword::DATEFORMAT) => {
10473 let _ = self.parse_keyword(Keyword::AS);
10474 let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10475 Some(self.parse_literal_string()?)
10476 } else {
10477 None
10478 };
10479 CopyLegacyOption::DateFormat(fmt)
10480 }
10481 Some(Keyword::DELIMITER) => {
10482 let _ = self.parse_keyword(Keyword::AS);
10483 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10484 }
10485 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10486 Some(Keyword::ENCRYPTED) => {
10487 let auto = self.parse_keyword(Keyword::AUTO);
10488 CopyLegacyOption::Encrypted { auto }
10489 }
10490 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10491 Some(Keyword::EXTENSION) => {
10492 let ext = self.parse_literal_string()?;
10493 CopyLegacyOption::Extension(ext)
10494 }
10495 Some(Keyword::FIXEDWIDTH) => {
10496 let spec = self.parse_literal_string()?;
10497 CopyLegacyOption::FixedWidth(spec)
10498 }
10499 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10500 Some(Keyword::HEADER) => CopyLegacyOption::Header,
10501 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10502 Some(Keyword::IGNOREHEADER) => {
10503 let _ = self.parse_keyword(Keyword::AS);
10504 let num_rows = self.parse_literal_uint()?;
10505 CopyLegacyOption::IgnoreHeader(num_rows)
10506 }
10507 Some(Keyword::JSON) => CopyLegacyOption::Json,
10508 Some(Keyword::MANIFEST) => {
10509 let verbose = self.parse_keyword(Keyword::VERBOSE);
10510 CopyLegacyOption::Manifest { verbose }
10511 }
10512 Some(Keyword::MAXFILESIZE) => {
10513 let _ = self.parse_keyword(Keyword::AS);
10514 let size = self.parse_number_value()?.value;
10515 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10516 Some(Keyword::MB) => Some(FileSizeUnit::MB),
10517 Some(Keyword::GB) => Some(FileSizeUnit::GB),
10518 _ => None,
10519 };
10520 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10521 }
10522 Some(Keyword::NULL) => {
10523 let _ = self.parse_keyword(Keyword::AS);
10524 CopyLegacyOption::Null(self.parse_literal_string()?)
10525 }
10526 Some(Keyword::PARALLEL) => {
10527 let enabled = match self.parse_one_of_keywords(&[
10528 Keyword::TRUE,
10529 Keyword::FALSE,
10530 Keyword::ON,
10531 Keyword::OFF,
10532 ]) {
10533 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10534 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10535 _ => None,
10536 };
10537 CopyLegacyOption::Parallel(enabled)
10538 }
10539 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10540 Some(Keyword::PARTITION) => {
10541 self.expect_keyword(Keyword::BY)?;
10542 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10543 let include = self.parse_keyword(Keyword::INCLUDE);
10544 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10545 }
10546 Some(Keyword::REGION) => {
10547 let _ = self.parse_keyword(Keyword::AS);
10548 let region = self.parse_literal_string()?;
10549 CopyLegacyOption::Region(region)
10550 }
10551 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10552 Some(Keyword::ROWGROUPSIZE) => {
10553 let _ = self.parse_keyword(Keyword::AS);
10554 let file_size = self.parse_file_size()?;
10555 CopyLegacyOption::RowGroupSize(file_size)
10556 }
10557 Some(Keyword::STATUPDATE) => {
10558 let enabled = match self.parse_one_of_keywords(&[
10559 Keyword::TRUE,
10560 Keyword::FALSE,
10561 Keyword::ON,
10562 Keyword::OFF,
10563 ]) {
10564 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10565 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10566 _ => None,
10567 };
10568 CopyLegacyOption::StatUpdate(enabled)
10569 }
10570 Some(Keyword::TIMEFORMAT) => {
10571 let _ = self.parse_keyword(Keyword::AS);
10572 let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10573 Some(self.parse_literal_string()?)
10574 } else {
10575 None
10576 };
10577 CopyLegacyOption::TimeFormat(fmt)
10578 }
10579 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10580 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10581 _ => self.expected("option", self.peek_token())?,
10582 };
10583 Ok(ret)
10584 }
10585
10586 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10587 let size = self.parse_number_value()?.value;
10588 let unit = self.maybe_parse_file_size_unit();
10589 Ok(FileSize { size, unit })
10590 }
10591
10592 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10593 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10594 Some(Keyword::MB) => Some(FileSizeUnit::MB),
10595 Some(Keyword::GB) => Some(FileSizeUnit::GB),
10596 _ => None,
10597 }
10598 }
10599
10600 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10601 if self.parse_keyword(Keyword::DEFAULT) {
10602 Ok(IamRoleKind::Default)
10603 } else {
10604 let arn = self.parse_literal_string()?;
10605 Ok(IamRoleKind::Arn(arn))
10606 }
10607 }
10608
10609 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10610 let ret = match self.parse_one_of_keywords(&[
10611 Keyword::HEADER,
10612 Keyword::QUOTE,
10613 Keyword::ESCAPE,
10614 Keyword::FORCE,
10615 ]) {
10616 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10617 Some(Keyword::QUOTE) => {
10618 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10620 }
10621 Some(Keyword::ESCAPE) => {
10622 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10624 }
10625 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10626 CopyLegacyCsvOption::ForceNotNull(
10627 self.parse_comma_separated(|p| p.parse_identifier())?,
10628 )
10629 }
10630 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10631 CopyLegacyCsvOption::ForceQuote(
10632 self.parse_comma_separated(|p| p.parse_identifier())?,
10633 )
10634 }
10635 _ => self.expected("csv option", self.peek_token())?,
10636 };
10637 Ok(ret)
10638 }
10639
10640 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10641 let s = self.parse_literal_string()?;
10642 if s.len() != 1 {
10643 let loc = self
10644 .tokens
10645 .get(self.index - 1)
10646 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10647 return parser_err!(format!("Expect a char, found {s:?}"), loc);
10648 }
10649 Ok(s.chars().next().unwrap())
10650 }
10651
10652 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10655 self.parse_tab_value()
10656 }
10657
10658 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10659 let mut values = vec![];
10660 let mut content = String::from("");
10661 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10662 match t {
10663 Token::Whitespace(Whitespace::Tab) => {
10664 values.push(Some(content.to_string()));
10665 content.clear();
10666 }
10667 Token::Whitespace(Whitespace::Newline) => {
10668 values.push(Some(content.to_string()));
10669 content.clear();
10670 }
10671 Token::Backslash => {
10672 if self.consume_token(&Token::Period) {
10673 return values;
10674 }
10675 if let Token::Word(w) = self.next_token().token {
10676 if w.value == "N" {
10677 values.push(None);
10678 }
10679 }
10680 }
10681 _ => {
10682 content.push_str(&t.to_string());
10683 }
10684 }
10685 }
10686 values
10687 }
10688
10689 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10691 let next_token = self.next_token();
10692 let span = next_token.span;
10693 let ok_value = |value: Value| Ok(value.with_span(span));
10694 match next_token.token {
10695 Token::Word(w) => match w.keyword {
10696 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10697 ok_value(Value::Boolean(true))
10698 }
10699 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10700 ok_value(Value::Boolean(false))
10701 }
10702 Keyword::NULL => ok_value(Value::Null),
10703 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10704 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10705 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10706 _ => self.expected(
10707 "A value?",
10708 TokenWithSpan {
10709 token: Token::Word(w),
10710 span,
10711 },
10712 )?,
10713 },
10714 _ => self.expected(
10715 "a concrete value",
10716 TokenWithSpan {
10717 token: Token::Word(w),
10718 span,
10719 },
10720 ),
10721 },
10722 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10726 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10727 self.maybe_concat_string_literal(s.to_string()),
10728 )),
10729 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10730 self.maybe_concat_string_literal(s.to_string()),
10731 )),
10732 Token::TripleSingleQuotedString(ref s) => {
10733 ok_value(Value::TripleSingleQuotedString(s.to_string()))
10734 }
10735 Token::TripleDoubleQuotedString(ref s) => {
10736 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10737 }
10738 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10739 Token::SingleQuotedByteStringLiteral(ref s) => {
10740 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10741 }
10742 Token::DoubleQuotedByteStringLiteral(ref s) => {
10743 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10744 }
10745 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10746 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10747 }
10748 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10749 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10750 }
10751 Token::SingleQuotedRawStringLiteral(ref s) => {
10752 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10753 }
10754 Token::DoubleQuotedRawStringLiteral(ref s) => {
10755 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10756 }
10757 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
10758 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
10759 }
10760 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
10761 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
10762 }
10763 Token::NationalStringLiteral(ref s) => {
10764 ok_value(Value::NationalStringLiteral(s.to_string()))
10765 }
10766 Token::EscapedStringLiteral(ref s) => {
10767 ok_value(Value::EscapedStringLiteral(s.to_string()))
10768 }
10769 Token::UnicodeStringLiteral(ref s) => {
10770 ok_value(Value::UnicodeStringLiteral(s.to_string()))
10771 }
10772 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
10773 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
10774 tok @ Token::Colon | tok @ Token::AtSign => {
10775 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
10783 let ident = match next_token.token {
10784 Token::Word(w) => Ok(w.into_ident(next_token.span)),
10785 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
10786 _ => self.expected("placeholder", next_token),
10787 }?;
10788 Ok(Value::Placeholder(tok.to_string() + &ident.value)
10789 .with_span(Span::new(span.start, ident.span.end)))
10790 }
10791 unexpected => self.expected(
10792 "a value",
10793 TokenWithSpan {
10794 token: unexpected,
10795 span,
10796 },
10797 ),
10798 }
10799 }
10800
10801 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
10802 if self.dialect.supports_string_literal_concatenation() {
10803 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10804 self.peek_token_ref().token
10805 {
10806 str.push_str(s.clone().as_str());
10807 self.advance_token();
10808 }
10809 }
10810 str
10811 }
10812
10813 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10815 let value_wrapper = self.parse_value()?;
10816 match &value_wrapper.value {
10817 Value::Number(_, _) => Ok(value_wrapper),
10818 Value::Placeholder(_) => Ok(value_wrapper),
10819 _ => {
10820 self.prev_token();
10821 self.expected("literal number", self.peek_token())
10822 }
10823 }
10824 }
10825
10826 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10829 let next_token = self.next_token();
10830 match next_token.token {
10831 Token::Plus => Ok(Expr::UnaryOp {
10832 op: UnaryOperator::Plus,
10833 expr: Box::new(Expr::Value(self.parse_number_value()?)),
10834 }),
10835 Token::Minus => Ok(Expr::UnaryOp {
10836 op: UnaryOperator::Minus,
10837 expr: Box::new(Expr::Value(self.parse_number_value()?)),
10838 }),
10839 _ => {
10840 self.prev_token();
10841 Ok(Expr::Value(self.parse_number_value()?))
10842 }
10843 }
10844 }
10845
10846 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10847 let next_token = self.next_token();
10848 let span = next_token.span;
10849 match next_token.token {
10850 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10851 Value::SingleQuotedString(s.to_string()).with_span(span),
10852 )),
10853 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10854 Value::DoubleQuotedString(s.to_string()).with_span(span),
10855 )),
10856 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10857 Value::HexStringLiteral(s.to_string()).with_span(span),
10858 )),
10859 unexpected => self.expected(
10860 "a string value",
10861 TokenWithSpan {
10862 token: unexpected,
10863 span,
10864 },
10865 ),
10866 }
10867 }
10868
10869 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10871 let next_token = self.next_token();
10872 match next_token.token {
10873 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10874 _ => self.expected("literal int", next_token),
10875 }
10876 }
10877
10878 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
10881 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
10882 let peek_token = parser.peek_token();
10883 let span = peek_token.span;
10884 match peek_token.token {
10885 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
10886 {
10887 parser.next_token();
10888 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10889 }
10890 _ => Ok(Expr::Value(
10891 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
10892 )),
10893 }
10894 };
10895
10896 Ok(CreateFunctionBody::AsBeforeOptions {
10897 body: parse_string_expr(self)?,
10898 link_symbol: if self.consume_token(&Token::Comma) {
10899 Some(parse_string_expr(self)?)
10900 } else {
10901 None
10902 },
10903 })
10904 }
10905
10906 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10908 let next_token = self.next_token();
10909 match next_token.token {
10910 Token::Word(Word {
10911 value,
10912 keyword: Keyword::NoKeyword,
10913 ..
10914 }) => Ok(value),
10915 Token::SingleQuotedString(s) => Ok(s),
10916 Token::DoubleQuotedString(s) => Ok(s),
10917 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10918 Ok(s)
10919 }
10920 Token::UnicodeStringLiteral(s) => Ok(s),
10921 _ => self.expected("literal string", next_token),
10922 }
10923 }
10924
10925 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10927 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10928 Some(Keyword::TRUE) => Ok(true),
10929 Some(Keyword::FALSE) => Ok(false),
10930 _ => self.expected("TRUE or FALSE", self.peek_token()),
10931 }
10932 }
10933
10934 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10936 let neg = self.parse_keyword(Keyword::NOT);
10937 let normalized_form = self.maybe_parse(|parser| {
10938 match parser.parse_one_of_keywords(&[
10939 Keyword::NFC,
10940 Keyword::NFD,
10941 Keyword::NFKC,
10942 Keyword::NFKD,
10943 ]) {
10944 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10945 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10946 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10947 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
10948 _ => parser.expected("unicode normalization form", parser.peek_token()),
10949 }
10950 })?;
10951 if self.parse_keyword(Keyword::NORMALIZED) {
10952 return Ok(Expr::IsNormalized {
10953 expr: Box::new(expr),
10954 form: normalized_form,
10955 negated: neg,
10956 });
10957 }
10958 self.expected("unicode normalization form", self.peek_token())
10959 }
10960
10961 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
10962 self.expect_token(&Token::LParen)?;
10963 let values = self.parse_comma_separated(|parser| {
10964 let name = parser.parse_literal_string()?;
10965 let e = if parser.consume_token(&Token::Eq) {
10966 let value = parser.parse_number()?;
10967 EnumMember::NamedValue(name, value)
10968 } else {
10969 EnumMember::Name(name)
10970 };
10971 Ok(e)
10972 })?;
10973 self.expect_token(&Token::RParen)?;
10974
10975 Ok(values)
10976 }
10977
10978 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
10980 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
10981 if trailing_bracket.0 {
10982 return parser_err!(
10983 format!("unmatched > after parsing data type {ty}"),
10984 self.peek_token()
10985 );
10986 }
10987
10988 Ok(ty)
10989 }
10990
10991 fn parse_data_type_helper(
10992 &mut self,
10993 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
10994 let dialect = self.dialect;
10995 self.advance_token();
10996 let next_token = self.get_current_token();
10997 let next_token_index = self.get_current_index();
10998
10999 let mut trailing_bracket: MatchedTrailingBracket = false.into();
11000 let mut data = match &next_token.token {
11001 Token::Word(w) => match w.keyword {
11002 Keyword::BOOLEAN => Ok(DataType::Boolean),
11003 Keyword::BOOL => Ok(DataType::Bool),
11004 Keyword::FLOAT => {
11005 let precision = self.parse_exact_number_optional_precision_scale()?;
11006
11007 if self.parse_keyword(Keyword::UNSIGNED) {
11008 Ok(DataType::FloatUnsigned(precision))
11009 } else {
11010 Ok(DataType::Float(precision))
11011 }
11012 }
11013 Keyword::REAL => {
11014 if self.parse_keyword(Keyword::UNSIGNED) {
11015 Ok(DataType::RealUnsigned)
11016 } else {
11017 Ok(DataType::Real)
11018 }
11019 }
11020 Keyword::FLOAT4 => Ok(DataType::Float4),
11021 Keyword::FLOAT32 => Ok(DataType::Float32),
11022 Keyword::FLOAT64 => Ok(DataType::Float64),
11023 Keyword::FLOAT8 => Ok(DataType::Float8),
11024 Keyword::DOUBLE => {
11025 if self.parse_keyword(Keyword::PRECISION) {
11026 if self.parse_keyword(Keyword::UNSIGNED) {
11027 Ok(DataType::DoublePrecisionUnsigned)
11028 } else {
11029 Ok(DataType::DoublePrecision)
11030 }
11031 } else {
11032 let precision = self.parse_exact_number_optional_precision_scale()?;
11033
11034 if self.parse_keyword(Keyword::UNSIGNED) {
11035 Ok(DataType::DoubleUnsigned(precision))
11036 } else {
11037 Ok(DataType::Double(precision))
11038 }
11039 }
11040 }
11041 Keyword::TINYINT => {
11042 let optional_precision = self.parse_optional_precision();
11043 if self.parse_keyword(Keyword::UNSIGNED) {
11044 Ok(DataType::TinyIntUnsigned(optional_precision?))
11045 } else {
11046 if dialect.supports_data_type_signed_suffix() {
11047 let _ = self.parse_keyword(Keyword::SIGNED);
11048 }
11049 Ok(DataType::TinyInt(optional_precision?))
11050 }
11051 }
11052 Keyword::INT2 => {
11053 let optional_precision = self.parse_optional_precision();
11054 if self.parse_keyword(Keyword::UNSIGNED) {
11055 Ok(DataType::Int2Unsigned(optional_precision?))
11056 } else {
11057 Ok(DataType::Int2(optional_precision?))
11058 }
11059 }
11060 Keyword::SMALLINT => {
11061 let optional_precision = self.parse_optional_precision();
11062 if self.parse_keyword(Keyword::UNSIGNED) {
11063 Ok(DataType::SmallIntUnsigned(optional_precision?))
11064 } else {
11065 if dialect.supports_data_type_signed_suffix() {
11066 let _ = self.parse_keyword(Keyword::SIGNED);
11067 }
11068 Ok(DataType::SmallInt(optional_precision?))
11069 }
11070 }
11071 Keyword::MEDIUMINT => {
11072 let optional_precision = self.parse_optional_precision();
11073 if self.parse_keyword(Keyword::UNSIGNED) {
11074 Ok(DataType::MediumIntUnsigned(optional_precision?))
11075 } else {
11076 if dialect.supports_data_type_signed_suffix() {
11077 let _ = self.parse_keyword(Keyword::SIGNED);
11078 }
11079 Ok(DataType::MediumInt(optional_precision?))
11080 }
11081 }
11082 Keyword::INT => {
11083 let optional_precision = self.parse_optional_precision();
11084 if self.parse_keyword(Keyword::UNSIGNED) {
11085 Ok(DataType::IntUnsigned(optional_precision?))
11086 } else {
11087 if dialect.supports_data_type_signed_suffix() {
11088 let _ = self.parse_keyword(Keyword::SIGNED);
11089 }
11090 Ok(DataType::Int(optional_precision?))
11091 }
11092 }
11093 Keyword::INT4 => {
11094 let optional_precision = self.parse_optional_precision();
11095 if self.parse_keyword(Keyword::UNSIGNED) {
11096 Ok(DataType::Int4Unsigned(optional_precision?))
11097 } else {
11098 Ok(DataType::Int4(optional_precision?))
11099 }
11100 }
11101 Keyword::INT8 => {
11102 let optional_precision = self.parse_optional_precision();
11103 if self.parse_keyword(Keyword::UNSIGNED) {
11104 Ok(DataType::Int8Unsigned(optional_precision?))
11105 } else {
11106 Ok(DataType::Int8(optional_precision?))
11107 }
11108 }
11109 Keyword::INT16 => Ok(DataType::Int16),
11110 Keyword::INT32 => Ok(DataType::Int32),
11111 Keyword::INT64 => Ok(DataType::Int64),
11112 Keyword::INT128 => Ok(DataType::Int128),
11113 Keyword::INT256 => Ok(DataType::Int256),
11114 Keyword::INTEGER => {
11115 let optional_precision = self.parse_optional_precision();
11116 if self.parse_keyword(Keyword::UNSIGNED) {
11117 Ok(DataType::IntegerUnsigned(optional_precision?))
11118 } else {
11119 if dialect.supports_data_type_signed_suffix() {
11120 let _ = self.parse_keyword(Keyword::SIGNED);
11121 }
11122 Ok(DataType::Integer(optional_precision?))
11123 }
11124 }
11125 Keyword::BIGINT => {
11126 let optional_precision = self.parse_optional_precision();
11127 if self.parse_keyword(Keyword::UNSIGNED) {
11128 Ok(DataType::BigIntUnsigned(optional_precision?))
11129 } else {
11130 if dialect.supports_data_type_signed_suffix() {
11131 let _ = self.parse_keyword(Keyword::SIGNED);
11132 }
11133 Ok(DataType::BigInt(optional_precision?))
11134 }
11135 }
11136 Keyword::HUGEINT => Ok(DataType::HugeInt),
11137 Keyword::UBIGINT => Ok(DataType::UBigInt),
11138 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11139 Keyword::USMALLINT => Ok(DataType::USmallInt),
11140 Keyword::UTINYINT => Ok(DataType::UTinyInt),
11141 Keyword::UINT8 => Ok(DataType::UInt8),
11142 Keyword::UINT16 => Ok(DataType::UInt16),
11143 Keyword::UINT32 => Ok(DataType::UInt32),
11144 Keyword::UINT64 => Ok(DataType::UInt64),
11145 Keyword::UINT128 => Ok(DataType::UInt128),
11146 Keyword::UINT256 => Ok(DataType::UInt256),
11147 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11148 Keyword::NVARCHAR => {
11149 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11150 }
11151 Keyword::CHARACTER => {
11152 if self.parse_keyword(Keyword::VARYING) {
11153 Ok(DataType::CharacterVarying(
11154 self.parse_optional_character_length()?,
11155 ))
11156 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11157 Ok(DataType::CharacterLargeObject(
11158 self.parse_optional_precision()?,
11159 ))
11160 } else {
11161 Ok(DataType::Character(self.parse_optional_character_length()?))
11162 }
11163 }
11164 Keyword::CHAR => {
11165 if self.parse_keyword(Keyword::VARYING) {
11166 Ok(DataType::CharVarying(
11167 self.parse_optional_character_length()?,
11168 ))
11169 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11170 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11171 } else {
11172 Ok(DataType::Char(self.parse_optional_character_length()?))
11173 }
11174 }
11175 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11176 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11177 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11178 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11179 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11180 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11181 Keyword::LONGBLOB => Ok(DataType::LongBlob),
11182 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11183 Keyword::BIT => {
11184 if self.parse_keyword(Keyword::VARYING) {
11185 Ok(DataType::BitVarying(self.parse_optional_precision()?))
11186 } else {
11187 Ok(DataType::Bit(self.parse_optional_precision()?))
11188 }
11189 }
11190 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11191 Keyword::UUID => Ok(DataType::Uuid),
11192 Keyword::DATE => Ok(DataType::Date),
11193 Keyword::DATE32 => Ok(DataType::Date32),
11194 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11195 Keyword::DATETIME64 => {
11196 self.prev_token();
11197 let (precision, time_zone) = self.parse_datetime_64()?;
11198 Ok(DataType::Datetime64(precision, time_zone))
11199 }
11200 Keyword::TIMESTAMP => {
11201 let precision = self.parse_optional_precision()?;
11202 let tz = if self.parse_keyword(Keyword::WITH) {
11203 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11204 TimezoneInfo::WithTimeZone
11205 } else if self.parse_keyword(Keyword::WITHOUT) {
11206 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11207 TimezoneInfo::WithoutTimeZone
11208 } else {
11209 TimezoneInfo::None
11210 };
11211 Ok(DataType::Timestamp(precision, tz))
11212 }
11213 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11214 self.parse_optional_precision()?,
11215 TimezoneInfo::Tz,
11216 )),
11217 Keyword::TIMESTAMP_NTZ => {
11218 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11219 }
11220 Keyword::TIME => {
11221 let precision = self.parse_optional_precision()?;
11222 let tz = if self.parse_keyword(Keyword::WITH) {
11223 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11224 TimezoneInfo::WithTimeZone
11225 } else if self.parse_keyword(Keyword::WITHOUT) {
11226 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11227 TimezoneInfo::WithoutTimeZone
11228 } else {
11229 TimezoneInfo::None
11230 };
11231 Ok(DataType::Time(precision, tz))
11232 }
11233 Keyword::TIMETZ => Ok(DataType::Time(
11234 self.parse_optional_precision()?,
11235 TimezoneInfo::Tz,
11236 )),
11237 Keyword::INTERVAL => {
11238 if self.dialect.supports_interval_options() {
11239 let fields = self.maybe_parse_optional_interval_fields()?;
11240 let precision = self.parse_optional_precision()?;
11241 Ok(DataType::Interval { fields, precision })
11242 } else {
11243 Ok(DataType::Interval {
11244 fields: None,
11245 precision: None,
11246 })
11247 }
11248 }
11249 Keyword::JSON => Ok(DataType::JSON),
11250 Keyword::JSONB => Ok(DataType::JSONB),
11251 Keyword::REGCLASS => Ok(DataType::Regclass),
11252 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11253 Keyword::FIXEDSTRING => {
11254 self.expect_token(&Token::LParen)?;
11255 let character_length = self.parse_literal_uint()?;
11256 self.expect_token(&Token::RParen)?;
11257 Ok(DataType::FixedString(character_length))
11258 }
11259 Keyword::TEXT => Ok(DataType::Text),
11260 Keyword::TINYTEXT => Ok(DataType::TinyText),
11261 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11262 Keyword::LONGTEXT => Ok(DataType::LongText),
11263 Keyword::BYTEA => Ok(DataType::Bytea),
11264 Keyword::NUMERIC => Ok(DataType::Numeric(
11265 self.parse_exact_number_optional_precision_scale()?,
11266 )),
11267 Keyword::DECIMAL => {
11268 let precision = self.parse_exact_number_optional_precision_scale()?;
11269
11270 if self.parse_keyword(Keyword::UNSIGNED) {
11271 Ok(DataType::DecimalUnsigned(precision))
11272 } else {
11273 Ok(DataType::Decimal(precision))
11274 }
11275 }
11276 Keyword::DEC => {
11277 let precision = self.parse_exact_number_optional_precision_scale()?;
11278
11279 if self.parse_keyword(Keyword::UNSIGNED) {
11280 Ok(DataType::DecUnsigned(precision))
11281 } else {
11282 Ok(DataType::Dec(precision))
11283 }
11284 }
11285 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11286 self.parse_exact_number_optional_precision_scale()?,
11287 )),
11288 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11289 self.parse_exact_number_optional_precision_scale()?,
11290 )),
11291 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11292 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11293 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11294 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11295 Keyword::ARRAY => {
11296 if dialect_of!(self is SnowflakeDialect) {
11297 Ok(DataType::Array(ArrayElemTypeDef::None))
11298 } else if dialect_of!(self is ClickHouseDialect) {
11299 Ok(self.parse_sub_type(|internal_type| {
11300 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11301 })?)
11302 } else {
11303 self.expect_token(&Token::Lt)?;
11304 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11305 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11306 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11307 inside_type,
11308 ))))
11309 }
11310 }
11311 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11312 self.prev_token();
11313 let field_defs = self.parse_duckdb_struct_type_def()?;
11314 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11315 }
11316 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11317 self.prev_token();
11318 let (field_defs, _trailing_bracket) =
11319 self.parse_struct_type_def(Self::parse_struct_field_def)?;
11320 trailing_bracket = _trailing_bracket;
11321 Ok(DataType::Struct(
11322 field_defs,
11323 StructBracketKind::AngleBrackets,
11324 ))
11325 }
11326 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11327 self.prev_token();
11328 let fields = self.parse_union_type_def()?;
11329 Ok(DataType::Union(fields))
11330 }
11331 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11332 Ok(self.parse_sub_type(DataType::Nullable)?)
11333 }
11334 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11335 Ok(self.parse_sub_type(DataType::LowCardinality)?)
11336 }
11337 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11338 self.prev_token();
11339 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11340 Ok(DataType::Map(
11341 Box::new(key_data_type),
11342 Box::new(value_data_type),
11343 ))
11344 }
11345 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11346 self.expect_token(&Token::LParen)?;
11347 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11348 self.expect_token(&Token::RParen)?;
11349 Ok(DataType::Nested(field_defs))
11350 }
11351 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11352 self.prev_token();
11353 let field_defs = self.parse_click_house_tuple_def()?;
11354 Ok(DataType::Tuple(field_defs))
11355 }
11356 Keyword::TRIGGER => Ok(DataType::Trigger),
11357 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11358 let _ = self.parse_keyword(Keyword::TYPE);
11359 Ok(DataType::AnyType)
11360 }
11361 Keyword::TABLE => {
11362 if self.peek_token() == Token::LParen {
11365 let columns = self.parse_returns_table_columns()?;
11366 Ok(DataType::Table(Some(columns)))
11367 } else {
11368 Ok(DataType::Table(None))
11369 }
11370 }
11371 Keyword::SIGNED => {
11372 if self.parse_keyword(Keyword::INTEGER) {
11373 Ok(DataType::SignedInteger)
11374 } else {
11375 Ok(DataType::Signed)
11376 }
11377 }
11378 Keyword::UNSIGNED => {
11379 if self.parse_keyword(Keyword::INTEGER) {
11380 Ok(DataType::UnsignedInteger)
11381 } else {
11382 Ok(DataType::Unsigned)
11383 }
11384 }
11385 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11386 Ok(DataType::TsVector)
11387 }
11388 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11389 Ok(DataType::TsQuery)
11390 }
11391 _ => {
11392 self.prev_token();
11393 let type_name = self.parse_object_name(false)?;
11394 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11395 Ok(DataType::Custom(type_name, modifiers))
11396 } else {
11397 Ok(DataType::Custom(type_name, vec![]))
11398 }
11399 }
11400 },
11401 _ => self.expected_at("a data type name", next_token_index),
11402 }?;
11403
11404 if self.dialect.supports_array_typedef_with_brackets() {
11405 while self.consume_token(&Token::LBracket) {
11406 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11408 self.expect_token(&Token::RBracket)?;
11409 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11410 }
11411 }
11412 Ok((data, trailing_bracket))
11413 }
11414
11415 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11416 self.parse_column_def()
11417 }
11418
11419 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11420 self.expect_token(&Token::LParen)?;
11421 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11422 self.expect_token(&Token::RParen)?;
11423 Ok(columns)
11424 }
11425
11426 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11427 self.expect_token(&Token::LParen)?;
11428 let mut values = Vec::new();
11429 loop {
11430 let next_token = self.next_token();
11431 match next_token.token {
11432 Token::SingleQuotedString(value) => values.push(value),
11433 _ => self.expected("a string", next_token)?,
11434 }
11435 let next_token = self.next_token();
11436 match next_token.token {
11437 Token::Comma => (),
11438 Token::RParen => break,
11439 _ => self.expected(", or }", next_token)?,
11440 }
11441 }
11442 Ok(values)
11443 }
11444
11445 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11447 let ident = self.parse_identifier()?;
11448 self.expect_keyword_is(Keyword::AS)?;
11449 let alias = self.parse_identifier()?;
11450 Ok(IdentWithAlias { ident, alias })
11451 }
11452
11453 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11455 let ident = self.parse_identifier()?;
11456 let _after_as = self.parse_keyword(Keyword::AS);
11457 let alias = self.parse_identifier()?;
11458 Ok(IdentWithAlias { ident, alias })
11459 }
11460
11461 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11463 self.parse_comma_separated(|parser| {
11464 parser.expect_token(&Token::LParen)?;
11465 let query = parser.parse_query()?;
11466 parser.expect_token(&Token::RParen)?;
11467 Ok(*query)
11468 })
11469 }
11470
11471 fn parse_distinct_required_set_quantifier(
11473 &mut self,
11474 operator_name: &str,
11475 ) -> Result<SetQuantifier, ParserError> {
11476 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11477 match quantifier {
11478 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11479 _ => Err(ParserError::ParserError(format!(
11480 "{operator_name} pipe operator requires DISTINCT modifier",
11481 ))),
11482 }
11483 }
11484
11485 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11487 if self.parse_keyword(Keyword::AS) {
11488 Ok(Some(self.parse_identifier()?))
11489 } else {
11490 self.maybe_parse(|parser| parser.parse_identifier())
11492 }
11493 }
11494
11495 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11497 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11498 parser.dialect.is_select_item_alias(explicit, kw, parser)
11499 }
11500 self.parse_optional_alias_inner(None, validator)
11501 }
11502
11503 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11507 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11508 parser.dialect.is_table_factor_alias(explicit, kw, parser)
11509 }
11510 let explicit = self.peek_keyword(Keyword::AS);
11511 match self.parse_optional_alias_inner(None, validator)? {
11512 Some(name) => {
11513 let columns = self.parse_table_alias_column_defs()?;
11514 Ok(Some(TableAlias {
11515 explicit,
11516 name,
11517 columns,
11518 }))
11519 }
11520 None => Ok(None),
11521 }
11522 }
11523
11524 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11525 let mut hints = vec![];
11526 while let Some(hint_type) =
11527 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11528 {
11529 let hint_type = match hint_type {
11530 Keyword::USE => TableIndexHintType::Use,
11531 Keyword::IGNORE => TableIndexHintType::Ignore,
11532 Keyword::FORCE => TableIndexHintType::Force,
11533 _ => {
11534 return self.expected(
11535 "expected to match USE/IGNORE/FORCE keyword",
11536 self.peek_token(),
11537 )
11538 }
11539 };
11540 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11541 Some(Keyword::INDEX) => TableIndexType::Index,
11542 Some(Keyword::KEY) => TableIndexType::Key,
11543 _ => {
11544 return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11545 }
11546 };
11547 let for_clause = if self.parse_keyword(Keyword::FOR) {
11548 let clause = if self.parse_keyword(Keyword::JOIN) {
11549 TableIndexHintForClause::Join
11550 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11551 TableIndexHintForClause::OrderBy
11552 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11553 TableIndexHintForClause::GroupBy
11554 } else {
11555 return self.expected(
11556 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11557 self.peek_token(),
11558 );
11559 };
11560 Some(clause)
11561 } else {
11562 None
11563 };
11564
11565 self.expect_token(&Token::LParen)?;
11566 let index_names = if self.peek_token().token != Token::RParen {
11567 self.parse_comma_separated(Parser::parse_identifier)?
11568 } else {
11569 vec![]
11570 };
11571 self.expect_token(&Token::RParen)?;
11572 hints.push(TableIndexHints {
11573 hint_type,
11574 index_type,
11575 for_clause,
11576 index_names,
11577 });
11578 }
11579 Ok(hints)
11580 }
11581
11582 pub fn parse_optional_alias(
11586 &mut self,
11587 reserved_kwds: &[Keyword],
11588 ) -> Result<Option<Ident>, ParserError> {
11589 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11590 false
11591 }
11592 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11593 }
11594
11595 fn parse_optional_alias_inner<F>(
11602 &mut self,
11603 reserved_kwds: Option<&[Keyword]>,
11604 validator: F,
11605 ) -> Result<Option<Ident>, ParserError>
11606 where
11607 F: Fn(bool, &Keyword, &mut Parser) -> bool,
11608 {
11609 let after_as = self.parse_keyword(Keyword::AS);
11610
11611 let next_token = self.next_token();
11612 match next_token.token {
11613 Token::Word(w)
11616 if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
11617 {
11618 Ok(Some(w.into_ident(next_token.span)))
11619 }
11620 Token::Word(w) if validator(after_as, &w.keyword, self) => {
11624 Ok(Some(w.into_ident(next_token.span)))
11625 }
11626 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11628 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11629 _ => {
11630 if after_as {
11631 return self.expected("an identifier after AS", next_token);
11632 }
11633 self.prev_token();
11634 Ok(None) }
11636 }
11637 }
11638
11639 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11640 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11641 let expressions = if self.parse_keyword(Keyword::ALL) {
11642 None
11643 } else {
11644 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11645 };
11646
11647 let mut modifiers = vec![];
11648 if self.dialect.supports_group_by_with_modifier() {
11649 loop {
11650 if !self.parse_keyword(Keyword::WITH) {
11651 break;
11652 }
11653 let keyword = self.expect_one_of_keywords(&[
11654 Keyword::ROLLUP,
11655 Keyword::CUBE,
11656 Keyword::TOTALS,
11657 ])?;
11658 modifiers.push(match keyword {
11659 Keyword::ROLLUP => GroupByWithModifier::Rollup,
11660 Keyword::CUBE => GroupByWithModifier::Cube,
11661 Keyword::TOTALS => GroupByWithModifier::Totals,
11662 _ => {
11663 return parser_err!(
11664 "BUG: expected to match GroupBy modifier keyword",
11665 self.peek_token().span.start
11666 )
11667 }
11668 });
11669 }
11670 }
11671 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11672 self.expect_token(&Token::LParen)?;
11673 let result = self.parse_comma_separated(|p| {
11674 if p.peek_token_ref().token == Token::LParen {
11675 p.parse_tuple(true, true)
11676 } else {
11677 Ok(vec![p.parse_expr()?])
11678 }
11679 })?;
11680 self.expect_token(&Token::RParen)?;
11681 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11682 result,
11683 )));
11684 };
11685 let group_by = match expressions {
11686 None => GroupByExpr::All(modifiers),
11687 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11688 };
11689 Ok(Some(group_by))
11690 } else {
11691 Ok(None)
11692 }
11693 }
11694
11695 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11696 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11697 let order_by =
11698 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11699 let order_by_options = self.parse_order_by_options()?;
11700 OrderBy {
11701 kind: OrderByKind::All(order_by_options),
11702 interpolate: None,
11703 }
11704 } else {
11705 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11706 let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11707 self.parse_interpolations()?
11708 } else {
11709 None
11710 };
11711 OrderBy {
11712 kind: OrderByKind::Expressions(exprs),
11713 interpolate,
11714 }
11715 };
11716 Ok(Some(order_by))
11717 } else {
11718 Ok(None)
11719 }
11720 }
11721
11722 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11723 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11724 Some(self.parse_offset()?)
11725 } else {
11726 None
11727 };
11728
11729 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11730 let expr = self.parse_limit()?;
11731
11732 if self.dialect.supports_limit_comma()
11733 && offset.is_none()
11734 && expr.is_some() && self.consume_token(&Token::Comma)
11736 {
11737 let offset = expr.ok_or_else(|| {
11738 ParserError::ParserError(
11739 "Missing offset for LIMIT <offset>, <limit>".to_string(),
11740 )
11741 })?;
11742 return Ok(Some(LimitClause::OffsetCommaLimit {
11743 offset,
11744 limit: self.parse_expr()?,
11745 }));
11746 }
11747
11748 let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11749 && self.parse_keyword(Keyword::BY)
11750 {
11751 Some(self.parse_comma_separated(Parser::parse_expr)?)
11752 } else {
11753 None
11754 };
11755
11756 (Some(expr), limit_by)
11757 } else {
11758 (None, None)
11759 };
11760
11761 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
11762 offset = Some(self.parse_offset()?);
11763 }
11764
11765 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
11766 Ok(Some(LimitClause::LimitOffset {
11767 limit: limit.unwrap_or_default(),
11768 offset,
11769 limit_by: limit_by.unwrap_or_default(),
11770 }))
11771 } else {
11772 Ok(None)
11773 }
11774 }
11775
11776 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
11779 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
11780 let fn_name = self.parse_object_name(false)?;
11781 self.parse_function_call(fn_name)
11782 .map(TableObject::TableFunction)
11783 } else {
11784 self.parse_object_name(false).map(TableObject::TableName)
11785 }
11786 }
11787
11788 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
11795 self.parse_object_name_inner(in_table_clause, false)
11796 }
11797
11798 fn parse_object_name_inner(
11808 &mut self,
11809 in_table_clause: bool,
11810 allow_wildcards: bool,
11811 ) -> Result<ObjectName, ParserError> {
11812 let mut parts = vec![];
11813 if dialect_of!(self is BigQueryDialect) && in_table_clause {
11814 loop {
11815 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11816 parts.push(ObjectNamePart::Identifier(ident));
11817 if !self.consume_token(&Token::Period) && !end_with_period {
11818 break;
11819 }
11820 }
11821 } else {
11822 loop {
11823 if allow_wildcards && self.peek_token().token == Token::Mul {
11824 let span = self.next_token().span;
11825 parts.push(ObjectNamePart::Identifier(Ident {
11826 value: Token::Mul.to_string(),
11827 quote_style: None,
11828 span,
11829 }));
11830 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11831 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11832 parts.push(ObjectNamePart::Identifier(ident));
11833 if !self.consume_token(&Token::Period) && !end_with_period {
11834 break;
11835 }
11836 } else if self.dialect.supports_object_name_double_dot_notation()
11837 && parts.len() == 1
11838 && matches!(self.peek_token().token, Token::Period)
11839 {
11840 parts.push(ObjectNamePart::Identifier(Ident::new("")));
11842 } else {
11843 let ident = self.parse_identifier()?;
11844 let part = if self
11845 .dialect
11846 .is_identifier_generating_function_name(&ident, &parts)
11847 {
11848 self.expect_token(&Token::LParen)?;
11849 let args: Vec<FunctionArg> =
11850 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11851 self.expect_token(&Token::RParen)?;
11852 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11853 } else {
11854 ObjectNamePart::Identifier(ident)
11855 };
11856 parts.push(part);
11857 }
11858
11859 if !self.consume_token(&Token::Period) {
11860 break;
11861 }
11862 }
11863 }
11864
11865 if dialect_of!(self is BigQueryDialect)
11868 && parts.iter().any(|part| {
11869 part.as_ident()
11870 .is_some_and(|ident| ident.value.contains('.'))
11871 })
11872 {
11873 parts = parts
11874 .into_iter()
11875 .flat_map(|part| match part.as_ident() {
11876 Some(ident) => ident
11877 .value
11878 .split('.')
11879 .map(|value| {
11880 ObjectNamePart::Identifier(Ident {
11881 value: value.into(),
11882 quote_style: ident.quote_style,
11883 span: ident.span,
11884 })
11885 })
11886 .collect::<Vec<_>>(),
11887 None => vec![part],
11888 })
11889 .collect()
11890 }
11891
11892 Ok(ObjectName(parts))
11893 }
11894
11895 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11897 let mut idents = vec![];
11898 loop {
11899 match &self.peek_token_ref().token {
11900 Token::Word(w) => {
11901 idents.push(w.clone().into_ident(self.peek_token_ref().span));
11902 }
11903 Token::EOF | Token::Eq => break,
11904 _ => {}
11905 }
11906 self.advance_token();
11907 }
11908 Ok(idents)
11909 }
11910
11911 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
11951 let mut idents = vec![];
11952
11953 let next_token = self.next_token();
11955 match next_token.token {
11956 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11957 Token::EOF => {
11958 return Err(ParserError::ParserError(
11959 "Empty input when parsing identifier".to_string(),
11960 ))?
11961 }
11962 token => {
11963 return Err(ParserError::ParserError(format!(
11964 "Unexpected token in identifier: {token}"
11965 )))?
11966 }
11967 };
11968
11969 loop {
11971 match self.next_token().token {
11972 Token::Period => {
11974 let next_token = self.next_token();
11975 match next_token.token {
11976 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11977 Token::EOF => {
11978 return Err(ParserError::ParserError(
11979 "Trailing period in identifier".to_string(),
11980 ))?
11981 }
11982 token => {
11983 return Err(ParserError::ParserError(format!(
11984 "Unexpected token following period in identifier: {token}"
11985 )))?
11986 }
11987 }
11988 }
11989 Token::EOF => break,
11990 token => {
11991 return Err(ParserError::ParserError(format!(
11992 "Unexpected token in identifier: {token}"
11993 )))?;
11994 }
11995 }
11996 }
11997
11998 Ok(idents)
11999 }
12000
12001 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
12003 let next_token = self.next_token();
12004 match next_token.token {
12005 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12006 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
12007 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
12008 _ => self.expected("identifier", next_token),
12009 }
12010 }
12011
12012 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
12023 match self.peek_token().token {
12024 Token::Word(w) => {
12025 let quote_style_is_none = w.quote_style.is_none();
12026 let mut requires_whitespace = false;
12027 let mut ident = w.into_ident(self.next_token().span);
12028 if quote_style_is_none {
12029 while matches!(self.peek_token_no_skip().token, Token::Minus) {
12030 self.next_token();
12031 ident.value.push('-');
12032
12033 let token = self
12034 .next_token_no_skip()
12035 .cloned()
12036 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12037 requires_whitespace = match token.token {
12038 Token::Word(next_word) if next_word.quote_style.is_none() => {
12039 ident.value.push_str(&next_word.value);
12040 false
12041 }
12042 Token::Number(s, false) => {
12043 if s.ends_with('.') {
12050 let Some(s) = s.split('.').next().filter(|s| {
12051 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12052 }) else {
12053 return self.expected(
12054 "continuation of hyphenated identifier",
12055 TokenWithSpan::new(Token::Number(s, false), token.span),
12056 );
12057 };
12058 ident.value.push_str(s);
12059 return Ok((ident, true));
12060 } else {
12061 ident.value.push_str(&s);
12062 }
12063 !matches!(self.peek_token().token, Token::Period)
12066 }
12067 _ => {
12068 return self
12069 .expected("continuation of hyphenated identifier", token);
12070 }
12071 }
12072 }
12073
12074 if requires_whitespace {
12077 let token = self.next_token();
12078 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12079 return self
12080 .expected("whitespace following hyphenated identifier", token);
12081 }
12082 }
12083 }
12084 Ok((ident, false))
12085 }
12086 _ => Ok((self.parse_identifier()?, false)),
12087 }
12088 }
12089
12090 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12092 if self.consume_token(&Token::LParen) {
12093 if self.peek_token().token == Token::RParen {
12094 self.next_token();
12095 Ok(vec![])
12096 } else {
12097 let cols = self.parse_comma_separated_with_trailing_commas(
12098 Parser::parse_view_column,
12099 self.dialect.supports_column_definition_trailing_commas(),
12100 Self::is_reserved_for_column_alias,
12101 )?;
12102 self.expect_token(&Token::RParen)?;
12103 Ok(cols)
12104 }
12105 } else {
12106 Ok(vec![])
12107 }
12108 }
12109
12110 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12112 let name = self.parse_identifier()?;
12113 let options = self.parse_view_column_options()?;
12114 let data_type = if dialect_of!(self is ClickHouseDialect) {
12115 Some(self.parse_data_type()?)
12116 } else {
12117 None
12118 };
12119 Ok(ViewColumnDef {
12120 name,
12121 data_type,
12122 options,
12123 })
12124 }
12125
12126 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12127 let mut options = Vec::new();
12128 loop {
12129 let option = self.parse_optional_column_option()?;
12130 if let Some(option) = option {
12131 options.push(option);
12132 } else {
12133 break;
12134 }
12135 }
12136 if options.is_empty() {
12137 Ok(None)
12138 } else if self.dialect.supports_space_separated_column_options() {
12139 Ok(Some(ColumnOptions::SpaceSeparated(options)))
12140 } else {
12141 Ok(Some(ColumnOptions::CommaSeparated(options)))
12142 }
12143 }
12144
12145 pub fn parse_parenthesized_column_list(
12148 &mut self,
12149 optional: IsOptional,
12150 allow_empty: bool,
12151 ) -> Result<Vec<Ident>, ParserError> {
12152 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12153 }
12154
12155 pub fn parse_parenthesized_compound_identifier_list(
12156 &mut self,
12157 optional: IsOptional,
12158 allow_empty: bool,
12159 ) -> Result<Vec<Expr>, ParserError> {
12160 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12161 Ok(Expr::CompoundIdentifier(
12162 p.parse_period_separated(|p| p.parse_identifier())?,
12163 ))
12164 })
12165 }
12166
12167 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12170 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12171 p.parse_create_index_expr()
12172 })
12173 }
12174
12175 pub fn parse_parenthesized_qualified_column_list(
12178 &mut self,
12179 optional: IsOptional,
12180 allow_empty: bool,
12181 ) -> Result<Vec<ObjectName>, ParserError> {
12182 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12183 p.parse_object_name(true)
12184 })
12185 }
12186
12187 fn parse_parenthesized_column_list_inner<F, T>(
12190 &mut self,
12191 optional: IsOptional,
12192 allow_empty: bool,
12193 mut f: F,
12194 ) -> Result<Vec<T>, ParserError>
12195 where
12196 F: FnMut(&mut Parser) -> Result<T, ParserError>,
12197 {
12198 if self.consume_token(&Token::LParen) {
12199 if allow_empty && self.peek_token().token == Token::RParen {
12200 self.next_token();
12201 Ok(vec![])
12202 } else {
12203 let cols = self.parse_comma_separated(|p| f(p))?;
12204 self.expect_token(&Token::RParen)?;
12205 Ok(cols)
12206 }
12207 } else if optional == Optional {
12208 Ok(vec![])
12209 } else {
12210 self.expected("a list of columns in parentheses", self.peek_token())
12211 }
12212 }
12213
12214 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12216 if self.consume_token(&Token::LParen) {
12217 let cols = self.parse_comma_separated(|p| {
12218 let name = p.parse_identifier()?;
12219 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12220 Ok(TableAliasColumnDef { name, data_type })
12221 })?;
12222 self.expect_token(&Token::RParen)?;
12223 Ok(cols)
12224 } else {
12225 Ok(vec![])
12226 }
12227 }
12228
12229 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12230 self.expect_token(&Token::LParen)?;
12231 let n = self.parse_literal_uint()?;
12232 self.expect_token(&Token::RParen)?;
12233 Ok(n)
12234 }
12235
12236 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12237 if self.consume_token(&Token::LParen) {
12238 let n = self.parse_literal_uint()?;
12239 self.expect_token(&Token::RParen)?;
12240 Ok(Some(n))
12241 } else {
12242 Ok(None)
12243 }
12244 }
12245
12246 fn maybe_parse_optional_interval_fields(
12247 &mut self,
12248 ) -> Result<Option<IntervalFields>, ParserError> {
12249 match self.parse_one_of_keywords(&[
12250 Keyword::YEAR,
12252 Keyword::DAY,
12253 Keyword::HOUR,
12254 Keyword::MINUTE,
12255 Keyword::MONTH,
12257 Keyword::SECOND,
12258 ]) {
12259 Some(Keyword::YEAR) => {
12260 if self.peek_keyword(Keyword::TO) {
12261 self.expect_keyword(Keyword::TO)?;
12262 self.expect_keyword(Keyword::MONTH)?;
12263 Ok(Some(IntervalFields::YearToMonth))
12264 } else {
12265 Ok(Some(IntervalFields::Year))
12266 }
12267 }
12268 Some(Keyword::DAY) => {
12269 if self.peek_keyword(Keyword::TO) {
12270 self.expect_keyword(Keyword::TO)?;
12271 match self.expect_one_of_keywords(&[
12272 Keyword::HOUR,
12273 Keyword::MINUTE,
12274 Keyword::SECOND,
12275 ])? {
12276 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12277 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12278 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12279 _ => {
12280 self.prev_token();
12281 self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12282 }
12283 }
12284 } else {
12285 Ok(Some(IntervalFields::Day))
12286 }
12287 }
12288 Some(Keyword::HOUR) => {
12289 if self.peek_keyword(Keyword::TO) {
12290 self.expect_keyword(Keyword::TO)?;
12291 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12292 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12293 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12294 _ => {
12295 self.prev_token();
12296 self.expected("MINUTE or SECOND", self.peek_token())
12297 }
12298 }
12299 } else {
12300 Ok(Some(IntervalFields::Hour))
12301 }
12302 }
12303 Some(Keyword::MINUTE) => {
12304 if self.peek_keyword(Keyword::TO) {
12305 self.expect_keyword(Keyword::TO)?;
12306 self.expect_keyword(Keyword::SECOND)?;
12307 Ok(Some(IntervalFields::MinuteToSecond))
12308 } else {
12309 Ok(Some(IntervalFields::Minute))
12310 }
12311 }
12312 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12313 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12314 Some(_) => {
12315 self.prev_token();
12316 self.expected(
12317 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12318 self.peek_token(),
12319 )
12320 }
12321 None => Ok(None),
12322 }
12323 }
12324
12325 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12333 self.expect_keyword_is(Keyword::DATETIME64)?;
12334 self.expect_token(&Token::LParen)?;
12335 let precision = self.parse_literal_uint()?;
12336 let time_zone = if self.consume_token(&Token::Comma) {
12337 Some(self.parse_literal_string()?)
12338 } else {
12339 None
12340 };
12341 self.expect_token(&Token::RParen)?;
12342 Ok((precision, time_zone))
12343 }
12344
12345 pub fn parse_optional_character_length(
12346 &mut self,
12347 ) -> Result<Option<CharacterLength>, ParserError> {
12348 if self.consume_token(&Token::LParen) {
12349 let character_length = self.parse_character_length()?;
12350 self.expect_token(&Token::RParen)?;
12351 Ok(Some(character_length))
12352 } else {
12353 Ok(None)
12354 }
12355 }
12356
12357 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12358 if self.consume_token(&Token::LParen) {
12359 let binary_length = self.parse_binary_length()?;
12360 self.expect_token(&Token::RParen)?;
12361 Ok(Some(binary_length))
12362 } else {
12363 Ok(None)
12364 }
12365 }
12366
12367 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12368 if self.parse_keyword(Keyword::MAX) {
12369 return Ok(CharacterLength::Max);
12370 }
12371 let length = self.parse_literal_uint()?;
12372 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12373 Some(CharLengthUnits::Characters)
12374 } else if self.parse_keyword(Keyword::OCTETS) {
12375 Some(CharLengthUnits::Octets)
12376 } else {
12377 None
12378 };
12379 Ok(CharacterLength::IntegerLength { length, unit })
12380 }
12381
12382 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12383 if self.parse_keyword(Keyword::MAX) {
12384 return Ok(BinaryLength::Max);
12385 }
12386 let length = self.parse_literal_uint()?;
12387 Ok(BinaryLength::IntegerLength { length })
12388 }
12389
12390 pub fn parse_optional_precision_scale(
12391 &mut self,
12392 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12393 if self.consume_token(&Token::LParen) {
12394 let n = self.parse_literal_uint()?;
12395 let scale = if self.consume_token(&Token::Comma) {
12396 Some(self.parse_literal_uint()?)
12397 } else {
12398 None
12399 };
12400 self.expect_token(&Token::RParen)?;
12401 Ok((Some(n), scale))
12402 } else {
12403 Ok((None, None))
12404 }
12405 }
12406
12407 pub fn parse_exact_number_optional_precision_scale(
12408 &mut self,
12409 ) -> Result<ExactNumberInfo, ParserError> {
12410 if self.consume_token(&Token::LParen) {
12411 let precision = self.parse_literal_uint()?;
12412 let scale = if self.consume_token(&Token::Comma) {
12413 Some(self.parse_signed_integer()?)
12414 } else {
12415 None
12416 };
12417
12418 self.expect_token(&Token::RParen)?;
12419
12420 match scale {
12421 None => Ok(ExactNumberInfo::Precision(precision)),
12422 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12423 }
12424 } else {
12425 Ok(ExactNumberInfo::None)
12426 }
12427 }
12428
12429 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12431 let is_negative = self.consume_token(&Token::Minus);
12432
12433 if !is_negative {
12434 let _ = self.consume_token(&Token::Plus);
12435 }
12436
12437 let current_token = self.peek_token_ref();
12438 match ¤t_token.token {
12439 Token::Number(s, _) => {
12440 let s = s.clone();
12441 let span_start = current_token.span.start;
12442 self.advance_token();
12443 let value = Self::parse::<i64>(s, span_start)?;
12444 Ok(if is_negative { -value } else { value })
12445 }
12446 _ => self.expected_ref("number", current_token),
12447 }
12448 }
12449
12450 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12451 if self.consume_token(&Token::LParen) {
12452 let mut modifiers = Vec::new();
12453 loop {
12454 let next_token = self.next_token();
12455 match next_token.token {
12456 Token::Word(w) => modifiers.push(w.to_string()),
12457 Token::Number(n, _) => modifiers.push(n),
12458 Token::SingleQuotedString(s) => modifiers.push(s),
12459
12460 Token::Comma => {
12461 continue;
12462 }
12463 Token::RParen => {
12464 break;
12465 }
12466 _ => self.expected("type modifiers", next_token)?,
12467 }
12468 }
12469
12470 Ok(Some(modifiers))
12471 } else {
12472 Ok(None)
12473 }
12474 }
12475
12476 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12478 where
12479 F: FnOnce(Box<DataType>) -> DataType,
12480 {
12481 self.expect_token(&Token::LParen)?;
12482 let inside_type = self.parse_data_type()?;
12483 self.expect_token(&Token::RParen)?;
12484 Ok(parent_type(inside_type.into()))
12485 }
12486
12487 fn parse_delete_setexpr_boxed(
12491 &mut self,
12492 delete_token: TokenWithSpan,
12493 ) -> Result<Box<SetExpr>, ParserError> {
12494 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12495 }
12496
12497 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12498 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12499 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12502 (vec![], false)
12503 } else {
12504 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12505 self.expect_keyword_is(Keyword::FROM)?;
12506 (tables, true)
12507 }
12508 } else {
12509 (vec![], true)
12510 };
12511
12512 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12513 let using = if self.parse_keyword(Keyword::USING) {
12514 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12515 } else {
12516 None
12517 };
12518 let selection = if self.parse_keyword(Keyword::WHERE) {
12519 Some(self.parse_expr()?)
12520 } else {
12521 None
12522 };
12523 let returning = if self.parse_keyword(Keyword::RETURNING) {
12524 Some(self.parse_comma_separated(Parser::parse_select_item)?)
12525 } else {
12526 None
12527 };
12528 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12529 self.parse_comma_separated(Parser::parse_order_by_expr)?
12530 } else {
12531 vec![]
12532 };
12533 let limit = if self.parse_keyword(Keyword::LIMIT) {
12534 self.parse_limit()?
12535 } else {
12536 None
12537 };
12538
12539 Ok(Statement::Delete(Delete {
12540 delete_token: delete_token.into(),
12541 tables,
12542 from: if with_from_keyword {
12543 FromTable::WithFromKeyword(from)
12544 } else {
12545 FromTable::WithoutKeyword(from)
12546 },
12547 using,
12548 selection,
12549 returning,
12550 order_by,
12551 limit,
12552 }))
12553 }
12554
12555 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12557 let modifier_keyword =
12558 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12559
12560 let id = self.parse_literal_uint()?;
12561
12562 let modifier = match modifier_keyword {
12563 Some(Keyword::CONNECTION) => Some(KillType::Connection),
12564 Some(Keyword::QUERY) => Some(KillType::Query),
12565 Some(Keyword::MUTATION) => {
12566 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12567 Some(KillType::Mutation)
12568 } else {
12569 self.expected(
12570 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12571 self.peek_token(),
12572 )?
12573 }
12574 }
12575 _ => None,
12576 };
12577
12578 Ok(Statement::Kill { modifier, id })
12579 }
12580
12581 pub fn parse_explain(
12582 &mut self,
12583 describe_alias: DescribeAlias,
12584 ) -> Result<Statement, ParserError> {
12585 let mut analyze = false;
12586 let mut verbose = false;
12587 let mut query_plan = false;
12588 let mut estimate = false;
12589 let mut format = None;
12590 let mut options = None;
12591
12592 if describe_alias == DescribeAlias::Explain
12595 && self.dialect.supports_explain_with_utility_options()
12596 && self.peek_token().token == Token::LParen
12597 {
12598 options = Some(self.parse_utility_options()?)
12599 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12600 query_plan = true;
12601 } else if self.parse_keyword(Keyword::ESTIMATE) {
12602 estimate = true;
12603 } else {
12604 analyze = self.parse_keyword(Keyword::ANALYZE);
12605 verbose = self.parse_keyword(Keyword::VERBOSE);
12606 if self.parse_keyword(Keyword::FORMAT) {
12607 format = Some(self.parse_analyze_format_kind()?);
12608 }
12609 }
12610
12611 match self.maybe_parse(|parser| parser.parse_statement())? {
12612 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12613 ParserError::ParserError("Explain must be root of the plan".to_string()),
12614 ),
12615 Some(statement) => Ok(Statement::Explain {
12616 describe_alias,
12617 analyze,
12618 verbose,
12619 query_plan,
12620 estimate,
12621 statement: Box::new(statement),
12622 format,
12623 options,
12624 }),
12625 _ => {
12626 let hive_format =
12627 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12628 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12629 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12630 _ => None,
12631 };
12632
12633 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12634 self.parse_keyword(Keyword::TABLE)
12636 } else {
12637 false
12638 };
12639
12640 let table_name = self.parse_object_name(false)?;
12641 Ok(Statement::ExplainTable {
12642 describe_alias,
12643 hive_format,
12644 has_table_keyword,
12645 table_name,
12646 })
12647 }
12648 }
12649 }
12650
12651 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12656 let _guard = self.recursion_counter.try_decrease()?;
12657 let with = if self.parse_keyword(Keyword::WITH) {
12658 let with_token = self.get_current_token();
12659 Some(With {
12660 with_token: with_token.clone().into(),
12661 recursive: self.parse_keyword(Keyword::RECURSIVE),
12662 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12663 })
12664 } else {
12665 None
12666 };
12667 if self.parse_keyword(Keyword::INSERT) {
12668 Ok(Query {
12669 with,
12670 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12671 order_by: None,
12672 limit_clause: None,
12673 fetch: None,
12674 locks: vec![],
12675 for_clause: None,
12676 settings: None,
12677 format_clause: None,
12678 pipe_operators: vec![],
12679 }
12680 .into())
12681 } else if self.parse_keyword(Keyword::UPDATE) {
12682 Ok(Query {
12683 with,
12684 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12685 order_by: None,
12686 limit_clause: None,
12687 fetch: None,
12688 locks: vec![],
12689 for_clause: None,
12690 settings: None,
12691 format_clause: None,
12692 pipe_operators: vec![],
12693 }
12694 .into())
12695 } else if self.parse_keyword(Keyword::DELETE) {
12696 Ok(Query {
12697 with,
12698 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12699 limit_clause: None,
12700 order_by: None,
12701 fetch: None,
12702 locks: vec![],
12703 for_clause: None,
12704 settings: None,
12705 format_clause: None,
12706 pipe_operators: vec![],
12707 }
12708 .into())
12709 } else if self.parse_keyword(Keyword::MERGE) {
12710 Ok(Query {
12711 with,
12712 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12713 limit_clause: None,
12714 order_by: None,
12715 fetch: None,
12716 locks: vec![],
12717 for_clause: None,
12718 settings: None,
12719 format_clause: None,
12720 pipe_operators: vec![],
12721 }
12722 .into())
12723 } else {
12724 let body = self.parse_query_body(self.dialect.prec_unknown())?;
12725
12726 let order_by = self.parse_optional_order_by()?;
12727
12728 let limit_clause = self.parse_optional_limit_clause()?;
12729
12730 let settings = self.parse_settings()?;
12731
12732 let fetch = if self.parse_keyword(Keyword::FETCH) {
12733 Some(self.parse_fetch()?)
12734 } else {
12735 None
12736 };
12737
12738 let mut for_clause = None;
12739 let mut locks = Vec::new();
12740 while self.parse_keyword(Keyword::FOR) {
12741 if let Some(parsed_for_clause) = self.parse_for_clause()? {
12742 for_clause = Some(parsed_for_clause);
12743 break;
12744 } else {
12745 locks.push(self.parse_lock()?);
12746 }
12747 }
12748 let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12749 && self.parse_keyword(Keyword::FORMAT)
12750 {
12751 if self.parse_keyword(Keyword::NULL) {
12752 Some(FormatClause::Null)
12753 } else {
12754 let ident = self.parse_identifier()?;
12755 Some(FormatClause::Identifier(ident))
12756 }
12757 } else {
12758 None
12759 };
12760
12761 let pipe_operators = if self.dialect.supports_pipe_operator() {
12762 self.parse_pipe_operators()?
12763 } else {
12764 Vec::new()
12765 };
12766
12767 Ok(Query {
12768 with,
12769 body,
12770 order_by,
12771 limit_clause,
12772 fetch,
12773 locks,
12774 for_clause,
12775 settings,
12776 format_clause,
12777 pipe_operators,
12778 }
12779 .into())
12780 }
12781 }
12782
12783 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
12784 let mut pipe_operators = Vec::new();
12785
12786 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
12787 let kw = self.expect_one_of_keywords(&[
12788 Keyword::SELECT,
12789 Keyword::EXTEND,
12790 Keyword::SET,
12791 Keyword::DROP,
12792 Keyword::AS,
12793 Keyword::WHERE,
12794 Keyword::LIMIT,
12795 Keyword::AGGREGATE,
12796 Keyword::ORDER,
12797 Keyword::TABLESAMPLE,
12798 Keyword::RENAME,
12799 Keyword::UNION,
12800 Keyword::INTERSECT,
12801 Keyword::EXCEPT,
12802 Keyword::CALL,
12803 Keyword::PIVOT,
12804 Keyword::UNPIVOT,
12805 Keyword::JOIN,
12806 Keyword::INNER,
12807 Keyword::LEFT,
12808 Keyword::RIGHT,
12809 Keyword::FULL,
12810 Keyword::CROSS,
12811 ])?;
12812 match kw {
12813 Keyword::SELECT => {
12814 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12815 pipe_operators.push(PipeOperator::Select { exprs })
12816 }
12817 Keyword::EXTEND => {
12818 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12819 pipe_operators.push(PipeOperator::Extend { exprs })
12820 }
12821 Keyword::SET => {
12822 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12823 pipe_operators.push(PipeOperator::Set { assignments })
12824 }
12825 Keyword::DROP => {
12826 let columns = self.parse_identifiers()?;
12827 pipe_operators.push(PipeOperator::Drop { columns })
12828 }
12829 Keyword::AS => {
12830 let alias = self.parse_identifier()?;
12831 pipe_operators.push(PipeOperator::As { alias })
12832 }
12833 Keyword::WHERE => {
12834 let expr = self.parse_expr()?;
12835 pipe_operators.push(PipeOperator::Where { expr })
12836 }
12837 Keyword::LIMIT => {
12838 let expr = self.parse_expr()?;
12839 let offset = if self.parse_keyword(Keyword::OFFSET) {
12840 Some(self.parse_expr()?)
12841 } else {
12842 None
12843 };
12844 pipe_operators.push(PipeOperator::Limit { expr, offset })
12845 }
12846 Keyword::AGGREGATE => {
12847 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12848 vec![]
12849 } else {
12850 self.parse_comma_separated(|parser| {
12851 parser.parse_expr_with_alias_and_order_by()
12852 })?
12853 };
12854
12855 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12856 self.parse_comma_separated(|parser| {
12857 parser.parse_expr_with_alias_and_order_by()
12858 })?
12859 } else {
12860 vec![]
12861 };
12862
12863 pipe_operators.push(PipeOperator::Aggregate {
12864 full_table_exprs,
12865 group_by_expr,
12866 })
12867 }
12868 Keyword::ORDER => {
12869 self.expect_one_of_keywords(&[Keyword::BY])?;
12870 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12871 pipe_operators.push(PipeOperator::OrderBy { exprs })
12872 }
12873 Keyword::TABLESAMPLE => {
12874 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12875 pipe_operators.push(PipeOperator::TableSample { sample });
12876 }
12877 Keyword::RENAME => {
12878 let mappings =
12879 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12880 pipe_operators.push(PipeOperator::Rename { mappings });
12881 }
12882 Keyword::UNION => {
12883 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12884 let queries = self.parse_pipe_operator_queries()?;
12885 pipe_operators.push(PipeOperator::Union {
12886 set_quantifier,
12887 queries,
12888 });
12889 }
12890 Keyword::INTERSECT => {
12891 let set_quantifier =
12892 self.parse_distinct_required_set_quantifier("INTERSECT")?;
12893 let queries = self.parse_pipe_operator_queries()?;
12894 pipe_operators.push(PipeOperator::Intersect {
12895 set_quantifier,
12896 queries,
12897 });
12898 }
12899 Keyword::EXCEPT => {
12900 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12901 let queries = self.parse_pipe_operator_queries()?;
12902 pipe_operators.push(PipeOperator::Except {
12903 set_quantifier,
12904 queries,
12905 });
12906 }
12907 Keyword::CALL => {
12908 let function_name = self.parse_object_name(false)?;
12909 let function_expr = self.parse_function(function_name)?;
12910 if let Expr::Function(function) = function_expr {
12911 let alias = self.parse_identifier_optional_alias()?;
12912 pipe_operators.push(PipeOperator::Call { function, alias });
12913 } else {
12914 return Err(ParserError::ParserError(
12915 "Expected function call after CALL".to_string(),
12916 ));
12917 }
12918 }
12919 Keyword::PIVOT => {
12920 self.expect_token(&Token::LParen)?;
12921 let aggregate_functions =
12922 self.parse_comma_separated(Self::parse_aliased_function_call)?;
12923 self.expect_keyword_is(Keyword::FOR)?;
12924 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12925 self.expect_keyword_is(Keyword::IN)?;
12926
12927 self.expect_token(&Token::LParen)?;
12928 let value_source = if self.parse_keyword(Keyword::ANY) {
12929 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12930 self.parse_comma_separated(Parser::parse_order_by_expr)?
12931 } else {
12932 vec![]
12933 };
12934 PivotValueSource::Any(order_by)
12935 } else if self.peek_sub_query() {
12936 PivotValueSource::Subquery(self.parse_query()?)
12937 } else {
12938 PivotValueSource::List(
12939 self.parse_comma_separated(Self::parse_expr_with_alias)?,
12940 )
12941 };
12942 self.expect_token(&Token::RParen)?;
12943 self.expect_token(&Token::RParen)?;
12944
12945 let alias = self.parse_identifier_optional_alias()?;
12946
12947 pipe_operators.push(PipeOperator::Pivot {
12948 aggregate_functions,
12949 value_column,
12950 value_source,
12951 alias,
12952 });
12953 }
12954 Keyword::UNPIVOT => {
12955 self.expect_token(&Token::LParen)?;
12956 let value_column = self.parse_identifier()?;
12957 self.expect_keyword(Keyword::FOR)?;
12958 let name_column = self.parse_identifier()?;
12959 self.expect_keyword(Keyword::IN)?;
12960
12961 self.expect_token(&Token::LParen)?;
12962 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
12963 self.expect_token(&Token::RParen)?;
12964
12965 self.expect_token(&Token::RParen)?;
12966
12967 let alias = self.parse_identifier_optional_alias()?;
12968
12969 pipe_operators.push(PipeOperator::Unpivot {
12970 value_column,
12971 name_column,
12972 unpivot_columns,
12973 alias,
12974 });
12975 }
12976 Keyword::JOIN
12977 | Keyword::INNER
12978 | Keyword::LEFT
12979 | Keyword::RIGHT
12980 | Keyword::FULL
12981 | Keyword::CROSS => {
12982 self.prev_token();
12983 let mut joins = self.parse_joins()?;
12984 if joins.len() != 1 {
12985 return Err(ParserError::ParserError(
12986 "Join pipe operator must have a single join".to_string(),
12987 ));
12988 }
12989 let join = joins.swap_remove(0);
12990 pipe_operators.push(PipeOperator::Join(join))
12991 }
12992 unhandled => {
12993 return Err(ParserError::ParserError(format!(
12994 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
12995 )))
12996 }
12997 }
12998 }
12999 Ok(pipe_operators)
13000 }
13001
13002 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
13003 let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13004 && self.parse_keyword(Keyword::SETTINGS)
13005 {
13006 let key_values = self.parse_comma_separated(|p| {
13007 let key = p.parse_identifier()?;
13008 p.expect_token(&Token::Eq)?;
13009 let value = p.parse_expr()?;
13010 Ok(Setting { key, value })
13011 })?;
13012 Some(key_values)
13013 } else {
13014 None
13015 };
13016 Ok(settings)
13017 }
13018
13019 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
13021 if self.parse_keyword(Keyword::XML) {
13022 Ok(Some(self.parse_for_xml()?))
13023 } else if self.parse_keyword(Keyword::JSON) {
13024 Ok(Some(self.parse_for_json()?))
13025 } else if self.parse_keyword(Keyword::BROWSE) {
13026 Ok(Some(ForClause::Browse))
13027 } else {
13028 Ok(None)
13029 }
13030 }
13031
13032 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
13034 let for_xml = if self.parse_keyword(Keyword::RAW) {
13035 let mut element_name = None;
13036 if self.peek_token().token == Token::LParen {
13037 self.expect_token(&Token::LParen)?;
13038 element_name = Some(self.parse_literal_string()?);
13039 self.expect_token(&Token::RParen)?;
13040 }
13041 ForXml::Raw(element_name)
13042 } else if self.parse_keyword(Keyword::AUTO) {
13043 ForXml::Auto
13044 } else if self.parse_keyword(Keyword::EXPLICIT) {
13045 ForXml::Explicit
13046 } else if self.parse_keyword(Keyword::PATH) {
13047 let mut element_name = None;
13048 if self.peek_token().token == Token::LParen {
13049 self.expect_token(&Token::LParen)?;
13050 element_name = Some(self.parse_literal_string()?);
13051 self.expect_token(&Token::RParen)?;
13052 }
13053 ForXml::Path(element_name)
13054 } else {
13055 return Err(ParserError::ParserError(
13056 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13057 ));
13058 };
13059 let mut elements = false;
13060 let mut binary_base64 = false;
13061 let mut root = None;
13062 let mut r#type = false;
13063 while self.peek_token().token == Token::Comma {
13064 self.next_token();
13065 if self.parse_keyword(Keyword::ELEMENTS) {
13066 elements = true;
13067 } else if self.parse_keyword(Keyword::BINARY) {
13068 self.expect_keyword_is(Keyword::BASE64)?;
13069 binary_base64 = true;
13070 } else if self.parse_keyword(Keyword::ROOT) {
13071 self.expect_token(&Token::LParen)?;
13072 root = Some(self.parse_literal_string()?);
13073 self.expect_token(&Token::RParen)?;
13074 } else if self.parse_keyword(Keyword::TYPE) {
13075 r#type = true;
13076 }
13077 }
13078 Ok(ForClause::Xml {
13079 for_xml,
13080 elements,
13081 binary_base64,
13082 root,
13083 r#type,
13084 })
13085 }
13086
13087 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13089 let for_json = if self.parse_keyword(Keyword::AUTO) {
13090 ForJson::Auto
13091 } else if self.parse_keyword(Keyword::PATH) {
13092 ForJson::Path
13093 } else {
13094 return Err(ParserError::ParserError(
13095 "Expected FOR JSON [AUTO | PATH ]".to_string(),
13096 ));
13097 };
13098 let mut root = None;
13099 let mut include_null_values = false;
13100 let mut without_array_wrapper = false;
13101 while self.peek_token().token == Token::Comma {
13102 self.next_token();
13103 if self.parse_keyword(Keyword::ROOT) {
13104 self.expect_token(&Token::LParen)?;
13105 root = Some(self.parse_literal_string()?);
13106 self.expect_token(&Token::RParen)?;
13107 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13108 include_null_values = true;
13109 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13110 without_array_wrapper = true;
13111 }
13112 }
13113 Ok(ForClause::Json {
13114 for_json,
13115 root,
13116 include_null_values,
13117 without_array_wrapper,
13118 })
13119 }
13120
13121 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13123 let name = self.parse_identifier()?;
13124
13125 let mut cte = if self.parse_keyword(Keyword::AS) {
13126 let mut is_materialized = None;
13127 if dialect_of!(self is PostgreSqlDialect) {
13128 if self.parse_keyword(Keyword::MATERIALIZED) {
13129 is_materialized = Some(CteAsMaterialized::Materialized);
13130 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13131 is_materialized = Some(CteAsMaterialized::NotMaterialized);
13132 }
13133 }
13134 self.expect_token(&Token::LParen)?;
13135
13136 let query = self.parse_query()?;
13137 let closing_paren_token = self.expect_token(&Token::RParen)?;
13138
13139 let alias = TableAlias {
13140 explicit: false,
13141 name,
13142 columns: vec![],
13143 };
13144 Cte {
13145 alias,
13146 query,
13147 from: None,
13148 materialized: is_materialized,
13149 closing_paren_token: closing_paren_token.into(),
13150 }
13151 } else {
13152 let columns = self.parse_table_alias_column_defs()?;
13153 self.expect_keyword_is(Keyword::AS)?;
13154 let mut is_materialized = None;
13155 if dialect_of!(self is PostgreSqlDialect) {
13156 if self.parse_keyword(Keyword::MATERIALIZED) {
13157 is_materialized = Some(CteAsMaterialized::Materialized);
13158 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13159 is_materialized = Some(CteAsMaterialized::NotMaterialized);
13160 }
13161 }
13162 self.expect_token(&Token::LParen)?;
13163
13164 let query = self.parse_query()?;
13165 let closing_paren_token = self.expect_token(&Token::RParen)?;
13166
13167 let alias = TableAlias {
13168 explicit: false,
13169 name,
13170 columns,
13171 };
13172 Cte {
13173 alias,
13174 query,
13175 from: None,
13176 materialized: is_materialized,
13177 closing_paren_token: closing_paren_token.into(),
13178 }
13179 };
13180 if self.parse_keyword(Keyword::FROM) {
13181 cte.from = Some(self.parse_identifier()?);
13182 }
13183 Ok(cte)
13184 }
13185
13186 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13195 let expr = if self.peek_keyword(Keyword::SELECT)
13198 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13199 {
13200 SetExpr::Select(self.parse_select().map(Box::new)?)
13201 } else if self.consume_token(&Token::LParen) {
13202 let subquery = self.parse_query()?;
13204 self.expect_token(&Token::RParen)?;
13205 SetExpr::Query(subquery)
13206 } else if self.parse_keyword(Keyword::VALUES) {
13207 let is_mysql = dialect_of!(self is MySqlDialect);
13208 SetExpr::Values(self.parse_values(is_mysql, false)?)
13209 } else if self.parse_keyword(Keyword::VALUE) {
13210 let is_mysql = dialect_of!(self is MySqlDialect);
13211 SetExpr::Values(self.parse_values(is_mysql, true)?)
13212 } else if self.parse_keyword(Keyword::TABLE) {
13213 SetExpr::Table(Box::new(self.parse_as_table()?))
13214 } else {
13215 return self.expected(
13216 "SELECT, VALUES, or a subquery in the query body",
13217 self.peek_token(),
13218 );
13219 };
13220
13221 self.parse_remaining_set_exprs(expr, precedence)
13222 }
13223
13224 fn parse_remaining_set_exprs(
13228 &mut self,
13229 mut expr: SetExpr,
13230 precedence: u8,
13231 ) -> Result<Box<SetExpr>, ParserError> {
13232 loop {
13233 let op = self.parse_set_operator(&self.peek_token().token);
13235 let next_precedence = match op {
13236 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13238 10
13239 }
13240 Some(SetOperator::Intersect) => 20,
13242 None => break,
13244 };
13245 if precedence >= next_precedence {
13246 break;
13247 }
13248 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
13250 expr = SetExpr::SetOperation {
13251 left: Box::new(expr),
13252 op: op.unwrap(),
13253 set_quantifier,
13254 right: self.parse_query_body(next_precedence)?,
13255 };
13256 }
13257
13258 Ok(expr.into())
13259 }
13260
13261 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13262 match token {
13263 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13264 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13265 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13266 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13267 _ => None,
13268 }
13269 }
13270
13271 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13272 match op {
13273 Some(
13274 SetOperator::Except
13275 | SetOperator::Intersect
13276 | SetOperator::Union
13277 | SetOperator::Minus,
13278 ) => {
13279 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13280 SetQuantifier::DistinctByName
13281 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13282 SetQuantifier::ByName
13283 } else if self.parse_keyword(Keyword::ALL) {
13284 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13285 SetQuantifier::AllByName
13286 } else {
13287 SetQuantifier::All
13288 }
13289 } else if self.parse_keyword(Keyword::DISTINCT) {
13290 SetQuantifier::Distinct
13291 } else {
13292 SetQuantifier::None
13293 }
13294 }
13295 _ => SetQuantifier::None,
13296 }
13297 }
13298
13299 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13301 let mut from_first = None;
13302
13303 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13304 let from_token = self.expect_keyword(Keyword::FROM)?;
13305 let from = self.parse_table_with_joins()?;
13306 if !self.peek_keyword(Keyword::SELECT) {
13307 return Ok(Select {
13308 select_token: AttachedToken(from_token),
13309 distinct: None,
13310 top: None,
13311 top_before_distinct: false,
13312 projection: vec![],
13313 exclude: None,
13314 into: None,
13315 from,
13316 lateral_views: vec![],
13317 prewhere: None,
13318 selection: None,
13319 group_by: GroupByExpr::Expressions(vec![], vec![]),
13320 cluster_by: vec![],
13321 distribute_by: vec![],
13322 sort_by: vec![],
13323 having: None,
13324 named_window: vec![],
13325 window_before_qualify: false,
13326 qualify: None,
13327 value_table_mode: None,
13328 connect_by: None,
13329 flavor: SelectFlavor::FromFirstNoSelect,
13330 });
13331 }
13332 from_first = Some(from);
13333 }
13334
13335 let select_token = self.expect_keyword(Keyword::SELECT)?;
13336 let value_table_mode = self.parse_value_table_mode()?;
13337
13338 let mut top_before_distinct = false;
13339 let mut top = None;
13340 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13341 top = Some(self.parse_top()?);
13342 top_before_distinct = true;
13343 }
13344 let distinct = self.parse_all_or_distinct()?;
13345 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13346 top = Some(self.parse_top()?);
13347 }
13348
13349 let projection =
13350 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13351 vec![]
13352 } else {
13353 self.parse_projection()?
13354 };
13355
13356 let exclude = if self.dialect.supports_select_exclude() {
13357 self.parse_optional_select_item_exclude()?
13358 } else {
13359 None
13360 };
13361
13362 let into = if self.parse_keyword(Keyword::INTO) {
13363 Some(self.parse_select_into()?)
13364 } else {
13365 None
13366 };
13367
13368 let (from, from_first) = if let Some(from) = from_first.take() {
13374 (from, true)
13375 } else if self.parse_keyword(Keyword::FROM) {
13376 (self.parse_table_with_joins()?, false)
13377 } else {
13378 (vec![], false)
13379 };
13380
13381 let mut lateral_views = vec![];
13382 loop {
13383 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13384 let outer = self.parse_keyword(Keyword::OUTER);
13385 let lateral_view = self.parse_expr()?;
13386 let lateral_view_name = self.parse_object_name(false)?;
13387 let lateral_col_alias = self
13388 .parse_comma_separated(|parser| {
13389 parser.parse_optional_alias(&[
13390 Keyword::WHERE,
13391 Keyword::GROUP,
13392 Keyword::CLUSTER,
13393 Keyword::HAVING,
13394 Keyword::LATERAL,
13395 ]) })?
13397 .into_iter()
13398 .flatten()
13399 .collect();
13400
13401 lateral_views.push(LateralView {
13402 lateral_view,
13403 lateral_view_name,
13404 lateral_col_alias,
13405 outer,
13406 });
13407 } else {
13408 break;
13409 }
13410 }
13411
13412 let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13413 && self.parse_keyword(Keyword::PREWHERE)
13414 {
13415 Some(self.parse_expr()?)
13416 } else {
13417 None
13418 };
13419
13420 let selection = if self.parse_keyword(Keyword::WHERE) {
13421 Some(self.parse_expr()?)
13422 } else {
13423 None
13424 };
13425
13426 let group_by = self
13427 .parse_optional_group_by()?
13428 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13429
13430 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13431 self.parse_comma_separated(Parser::parse_expr)?
13432 } else {
13433 vec![]
13434 };
13435
13436 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13437 self.parse_comma_separated(Parser::parse_expr)?
13438 } else {
13439 vec![]
13440 };
13441
13442 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13443 self.parse_comma_separated(Parser::parse_order_by_expr)?
13444 } else {
13445 vec![]
13446 };
13447
13448 let having = if self.parse_keyword(Keyword::HAVING) {
13449 Some(self.parse_expr()?)
13450 } else {
13451 None
13452 };
13453
13454 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13456 {
13457 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13458 if self.parse_keyword(Keyword::QUALIFY) {
13459 (named_windows, Some(self.parse_expr()?), true)
13460 } else {
13461 (named_windows, None, true)
13462 }
13463 } else if self.parse_keyword(Keyword::QUALIFY) {
13464 let qualify = Some(self.parse_expr()?);
13465 if self.parse_keyword(Keyword::WINDOW) {
13466 (
13467 self.parse_comma_separated(Parser::parse_named_window)?,
13468 qualify,
13469 false,
13470 )
13471 } else {
13472 (Default::default(), qualify, false)
13473 }
13474 } else {
13475 Default::default()
13476 };
13477
13478 let connect_by = if self.dialect.supports_connect_by()
13479 && self
13480 .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13481 .is_some()
13482 {
13483 self.prev_token();
13484 Some(self.parse_connect_by()?)
13485 } else {
13486 None
13487 };
13488
13489 Ok(Select {
13490 select_token: AttachedToken(select_token),
13491 distinct,
13492 top,
13493 top_before_distinct,
13494 projection,
13495 exclude,
13496 into,
13497 from,
13498 lateral_views,
13499 prewhere,
13500 selection,
13501 group_by,
13502 cluster_by,
13503 distribute_by,
13504 sort_by,
13505 having,
13506 named_window: named_windows,
13507 window_before_qualify,
13508 qualify,
13509 value_table_mode,
13510 connect_by,
13511 flavor: if from_first {
13512 SelectFlavor::FromFirst
13513 } else {
13514 SelectFlavor::Standard
13515 },
13516 })
13517 }
13518
13519 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13520 if !dialect_of!(self is BigQueryDialect) {
13521 return Ok(None);
13522 }
13523
13524 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13525 Some(ValueTableMode::DistinctAsValue)
13526 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13527 Some(ValueTableMode::DistinctAsStruct)
13528 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13529 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13530 {
13531 Some(ValueTableMode::AsValue)
13532 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13533 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13534 {
13535 Some(ValueTableMode::AsStruct)
13536 } else if self.parse_keyword(Keyword::AS) {
13537 self.expected("VALUE or STRUCT", self.peek_token())?
13538 } else {
13539 None
13540 };
13541
13542 Ok(mode)
13543 }
13544
13545 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13549 where
13550 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13551 {
13552 let current_state = self.state;
13553 self.state = state;
13554 let res = f(self);
13555 self.state = current_state;
13556 res
13557 }
13558
13559 pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13560 let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13561 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13562 parser.parse_comma_separated(Parser::parse_expr)
13563 })?;
13564 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13565 let condition = self.parse_expr()?;
13566 (condition, relationships)
13567 } else {
13568 self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13569 let condition = self.parse_expr()?;
13570 self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13571 let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13572 parser.parse_comma_separated(Parser::parse_expr)
13573 })?;
13574 (condition, relationships)
13575 };
13576 Ok(ConnectBy {
13577 condition,
13578 relationships,
13579 })
13580 }
13581
13582 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13584 let token1 = self.next_token();
13585 let token2 = self.next_token();
13586 let token3 = self.next_token();
13587
13588 let table_name;
13589 let schema_name;
13590 if token2 == Token::Period {
13591 match token1.token {
13592 Token::Word(w) => {
13593 schema_name = w.value;
13594 }
13595 _ => {
13596 return self.expected("Schema name", token1);
13597 }
13598 }
13599 match token3.token {
13600 Token::Word(w) => {
13601 table_name = w.value;
13602 }
13603 _ => {
13604 return self.expected("Table name", token3);
13605 }
13606 }
13607 Ok(Table {
13608 table_name: Some(table_name),
13609 schema_name: Some(schema_name),
13610 })
13611 } else {
13612 match token1.token {
13613 Token::Word(w) => {
13614 table_name = w.value;
13615 }
13616 _ => {
13617 return self.expected("Table name", token1);
13618 }
13619 }
13620 Ok(Table {
13621 table_name: Some(table_name),
13622 schema_name: None,
13623 })
13624 }
13625 }
13626
13627 fn parse_set_role(
13629 &mut self,
13630 modifier: Option<ContextModifier>,
13631 ) -> Result<Statement, ParserError> {
13632 self.expect_keyword_is(Keyword::ROLE)?;
13633
13634 let role_name = if self.parse_keyword(Keyword::NONE) {
13635 None
13636 } else {
13637 Some(self.parse_identifier()?)
13638 };
13639 Ok(Statement::Set(Set::SetRole {
13640 context_modifier: modifier,
13641 role_name,
13642 }))
13643 }
13644
13645 fn parse_set_values(
13646 &mut self,
13647 parenthesized_assignment: bool,
13648 ) -> Result<Vec<Expr>, ParserError> {
13649 let mut values = vec![];
13650
13651 if parenthesized_assignment {
13652 self.expect_token(&Token::LParen)?;
13653 }
13654
13655 loop {
13656 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13657 expr
13658 } else if let Ok(expr) = self.parse_expr() {
13659 expr
13660 } else {
13661 self.expected("variable value", self.peek_token())?
13662 };
13663
13664 values.push(value);
13665 if self.consume_token(&Token::Comma) {
13666 continue;
13667 }
13668
13669 if parenthesized_assignment {
13670 self.expect_token(&Token::RParen)?;
13671 }
13672 return Ok(values);
13673 }
13674 }
13675
13676 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13677 let modifier =
13678 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13679
13680 Self::keyword_to_modifier(modifier)
13681 }
13682
13683 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13685 let scope = self.parse_context_modifier();
13686
13687 let name = if self.dialect.supports_parenthesized_set_variables()
13688 && self.consume_token(&Token::LParen)
13689 {
13690 self.expected("Unparenthesized assignment", self.peek_token())?
13694 } else {
13695 self.parse_object_name(false)?
13696 };
13697
13698 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13699 return self.expected("assignment operator", self.peek_token());
13700 }
13701
13702 let value = self.parse_expr()?;
13703
13704 Ok(SetAssignment { scope, name, value })
13705 }
13706
13707 fn parse_set(&mut self) -> Result<Statement, ParserError> {
13708 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13709
13710 let scope = if !hivevar {
13712 self.parse_context_modifier()
13713 } else {
13714 None
13715 };
13716
13717 if hivevar {
13718 self.expect_token(&Token::Colon)?;
13719 }
13720
13721 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13722 return Ok(set_role_stmt);
13723 }
13724
13725 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13727 || self.parse_keyword(Keyword::TIMEZONE)
13728 {
13729 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13730 return Ok(Set::SingleAssignment {
13731 scope,
13732 hivevar,
13733 variable: ObjectName::from(vec!["TIMEZONE".into()]),
13734 values: self.parse_set_values(false)?,
13735 }
13736 .into());
13737 } else {
13738 return Ok(Set::SetTimeZone {
13742 local: scope == Some(ContextModifier::Local),
13743 value: self.parse_expr()?,
13744 }
13745 .into());
13746 }
13747 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
13748 if self.parse_keyword(Keyword::DEFAULT) {
13749 return Ok(Set::SetNamesDefault {}.into());
13750 }
13751 let charset_name = self.parse_identifier()?;
13752 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
13753 Some(self.parse_literal_string()?)
13754 } else {
13755 None
13756 };
13757
13758 return Ok(Set::SetNames {
13759 charset_name,
13760 collation_name,
13761 }
13762 .into());
13763 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
13764 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
13765 return Ok(Set::SetTransaction {
13766 modes: self.parse_transaction_modes()?,
13767 snapshot: None,
13768 session: true,
13769 }
13770 .into());
13771 } else if self.parse_keyword(Keyword::TRANSACTION) {
13772 if self.parse_keyword(Keyword::SNAPSHOT) {
13773 let snapshot_id = self.parse_value()?.value;
13774 return Ok(Set::SetTransaction {
13775 modes: vec![],
13776 snapshot: Some(snapshot_id),
13777 session: false,
13778 }
13779 .into());
13780 }
13781 return Ok(Set::SetTransaction {
13782 modes: self.parse_transaction_modes()?,
13783 snapshot: None,
13784 session: false,
13785 }
13786 .into());
13787 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
13788 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
13789 SetSessionAuthorizationParamKind::Default
13790 } else {
13791 let value = self.parse_identifier()?;
13792 SetSessionAuthorizationParamKind::User(value)
13793 };
13794 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
13795 scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
13796 kind: auth_value,
13797 })
13798 .into());
13799 }
13800
13801 if self.dialect.supports_comma_separated_set_assignments() {
13802 if scope.is_some() {
13803 self.prev_token();
13804 }
13805
13806 if let Some(assignments) = self
13807 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
13808 {
13809 return if assignments.len() > 1 {
13810 Ok(Set::MultipleAssignments { assignments }.into())
13811 } else {
13812 let SetAssignment { scope, name, value } =
13813 assignments.into_iter().next().ok_or_else(|| {
13814 ParserError::ParserError("Expected at least one assignment".to_string())
13815 })?;
13816
13817 Ok(Set::SingleAssignment {
13818 scope,
13819 hivevar,
13820 variable: name,
13821 values: vec![value],
13822 }
13823 .into())
13824 };
13825 }
13826 }
13827
13828 let variables = if self.dialect.supports_parenthesized_set_variables()
13829 && self.consume_token(&Token::LParen)
13830 {
13831 let vars = OneOrManyWithParens::Many(
13832 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
13833 .into_iter()
13834 .map(|ident| ObjectName::from(vec![ident]))
13835 .collect(),
13836 );
13837 self.expect_token(&Token::RParen)?;
13838 vars
13839 } else {
13840 OneOrManyWithParens::One(self.parse_object_name(false)?)
13841 };
13842
13843 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13844 let stmt = match variables {
13845 OneOrManyWithParens::One(var) => Set::SingleAssignment {
13846 scope,
13847 hivevar,
13848 variable: var,
13849 values: self.parse_set_values(false)?,
13850 },
13851 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13852 variables: vars,
13853 values: self.parse_set_values(true)?,
13854 },
13855 };
13856
13857 return Ok(stmt.into());
13858 }
13859
13860 if self.dialect.supports_set_stmt_without_operator() {
13861 self.prev_token();
13862 return self.parse_set_session_params();
13863 };
13864
13865 self.expected("equals sign or TO", self.peek_token())
13866 }
13867
13868 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13869 if self.parse_keyword(Keyword::STATISTICS) {
13870 let topic = match self.parse_one_of_keywords(&[
13871 Keyword::IO,
13872 Keyword::PROFILE,
13873 Keyword::TIME,
13874 Keyword::XML,
13875 ]) {
13876 Some(Keyword::IO) => SessionParamStatsTopic::IO,
13877 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13878 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13879 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13880 _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13881 };
13882 let value = self.parse_session_param_value()?;
13883 Ok(
13884 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13885 topic,
13886 value,
13887 }))
13888 .into(),
13889 )
13890 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13891 let obj = self.parse_object_name(false)?;
13892 let value = self.parse_session_param_value()?;
13893 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13894 SetSessionParamIdentityInsert { obj, value },
13895 ))
13896 .into())
13897 } else if self.parse_keyword(Keyword::OFFSETS) {
13898 let keywords = self.parse_comma_separated(|parser| {
13899 let next_token = parser.next_token();
13900 match &next_token.token {
13901 Token::Word(w) => Ok(w.to_string()),
13902 _ => parser.expected("SQL keyword", next_token),
13903 }
13904 })?;
13905 let value = self.parse_session_param_value()?;
13906 Ok(
13907 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13908 keywords,
13909 value,
13910 }))
13911 .into(),
13912 )
13913 } else {
13914 let names = self.parse_comma_separated(|parser| {
13915 let next_token = parser.next_token();
13916 match next_token.token {
13917 Token::Word(w) => Ok(w.to_string()),
13918 _ => parser.expected("Session param name", next_token),
13919 }
13920 })?;
13921 let value = self.parse_expr()?.to_string();
13922 Ok(
13923 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13924 names,
13925 value,
13926 }))
13927 .into(),
13928 )
13929 }
13930 }
13931
13932 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13933 if self.parse_keyword(Keyword::ON) {
13934 Ok(SessionParamValue::On)
13935 } else if self.parse_keyword(Keyword::OFF) {
13936 Ok(SessionParamValue::Off)
13937 } else {
13938 self.expected("ON or OFF", self.peek_token())
13939 }
13940 }
13941
13942 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13943 let terse = self.parse_keyword(Keyword::TERSE);
13944 let extended = self.parse_keyword(Keyword::EXTENDED);
13945 let full = self.parse_keyword(Keyword::FULL);
13946 let session = self.parse_keyword(Keyword::SESSION);
13947 let global = self.parse_keyword(Keyword::GLOBAL);
13948 let external = self.parse_keyword(Keyword::EXTERNAL);
13949 if self
13950 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
13951 .is_some()
13952 {
13953 Ok(self.parse_show_columns(extended, full)?)
13954 } else if self.parse_keyword(Keyword::TABLES) {
13955 Ok(self.parse_show_tables(terse, extended, full, external)?)
13956 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
13957 Ok(self.parse_show_views(terse, true)?)
13958 } else if self.parse_keyword(Keyword::VIEWS) {
13959 Ok(self.parse_show_views(terse, false)?)
13960 } else if self.parse_keyword(Keyword::FUNCTIONS) {
13961 Ok(self.parse_show_functions()?)
13962 } else if extended || full {
13963 Err(ParserError::ParserError(
13964 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
13965 ))
13966 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
13967 Ok(self.parse_show_create()?)
13968 } else if self.parse_keyword(Keyword::COLLATION) {
13969 Ok(self.parse_show_collation()?)
13970 } else if self.parse_keyword(Keyword::VARIABLES)
13971 && dialect_of!(self is MySqlDialect | GenericDialect)
13972 {
13973 Ok(Statement::ShowVariables {
13974 filter: self.parse_show_statement_filter()?,
13975 session,
13976 global,
13977 })
13978 } else if self.parse_keyword(Keyword::STATUS)
13979 && dialect_of!(self is MySqlDialect | GenericDialect)
13980 {
13981 Ok(Statement::ShowStatus {
13982 filter: self.parse_show_statement_filter()?,
13983 session,
13984 global,
13985 })
13986 } else if self.parse_keyword(Keyword::DATABASES) {
13987 self.parse_show_databases(terse)
13988 } else if self.parse_keyword(Keyword::SCHEMAS) {
13989 self.parse_show_schemas(terse)
13990 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
13991 self.parse_show_charset(false)
13992 } else if self.parse_keyword(Keyword::CHARSET) {
13993 self.parse_show_charset(true)
13994 } else {
13995 Ok(Statement::ShowVariable {
13996 variable: self.parse_identifiers()?,
13997 })
13998 }
13999 }
14000
14001 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
14002 Ok(Statement::ShowCharset(ShowCharset {
14004 is_shorthand,
14005 filter: self.parse_show_statement_filter()?,
14006 }))
14007 }
14008
14009 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
14010 let history = self.parse_keyword(Keyword::HISTORY);
14011 let show_options = self.parse_show_stmt_options()?;
14012 Ok(Statement::ShowDatabases {
14013 terse,
14014 history,
14015 show_options,
14016 })
14017 }
14018
14019 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
14020 let history = self.parse_keyword(Keyword::HISTORY);
14021 let show_options = self.parse_show_stmt_options()?;
14022 Ok(Statement::ShowSchemas {
14023 terse,
14024 history,
14025 show_options,
14026 })
14027 }
14028
14029 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
14030 let obj_type = match self.expect_one_of_keywords(&[
14031 Keyword::TABLE,
14032 Keyword::TRIGGER,
14033 Keyword::FUNCTION,
14034 Keyword::PROCEDURE,
14035 Keyword::EVENT,
14036 Keyword::VIEW,
14037 ])? {
14038 Keyword::TABLE => Ok(ShowCreateObject::Table),
14039 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14040 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14041 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14042 Keyword::EVENT => Ok(ShowCreateObject::Event),
14043 Keyword::VIEW => Ok(ShowCreateObject::View),
14044 keyword => Err(ParserError::ParserError(format!(
14045 "Unable to map keyword to ShowCreateObject: {keyword:?}"
14046 ))),
14047 }?;
14048
14049 let obj_name = self.parse_object_name(false)?;
14050
14051 Ok(Statement::ShowCreate { obj_type, obj_name })
14052 }
14053
14054 pub fn parse_show_columns(
14055 &mut self,
14056 extended: bool,
14057 full: bool,
14058 ) -> Result<Statement, ParserError> {
14059 let show_options = self.parse_show_stmt_options()?;
14060 Ok(Statement::ShowColumns {
14061 extended,
14062 full,
14063 show_options,
14064 })
14065 }
14066
14067 fn parse_show_tables(
14068 &mut self,
14069 terse: bool,
14070 extended: bool,
14071 full: bool,
14072 external: bool,
14073 ) -> Result<Statement, ParserError> {
14074 let history = !external && self.parse_keyword(Keyword::HISTORY);
14075 let show_options = self.parse_show_stmt_options()?;
14076 Ok(Statement::ShowTables {
14077 terse,
14078 history,
14079 extended,
14080 full,
14081 external,
14082 show_options,
14083 })
14084 }
14085
14086 fn parse_show_views(
14087 &mut self,
14088 terse: bool,
14089 materialized: bool,
14090 ) -> Result<Statement, ParserError> {
14091 let show_options = self.parse_show_stmt_options()?;
14092 Ok(Statement::ShowViews {
14093 materialized,
14094 terse,
14095 show_options,
14096 })
14097 }
14098
14099 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14100 let filter = self.parse_show_statement_filter()?;
14101 Ok(Statement::ShowFunctions { filter })
14102 }
14103
14104 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14105 let filter = self.parse_show_statement_filter()?;
14106 Ok(Statement::ShowCollation { filter })
14107 }
14108
14109 pub fn parse_show_statement_filter(
14110 &mut self,
14111 ) -> Result<Option<ShowStatementFilter>, ParserError> {
14112 if self.parse_keyword(Keyword::LIKE) {
14113 Ok(Some(ShowStatementFilter::Like(
14114 self.parse_literal_string()?,
14115 )))
14116 } else if self.parse_keyword(Keyword::ILIKE) {
14117 Ok(Some(ShowStatementFilter::ILike(
14118 self.parse_literal_string()?,
14119 )))
14120 } else if self.parse_keyword(Keyword::WHERE) {
14121 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14122 } else {
14123 self.maybe_parse(|parser| -> Result<String, ParserError> {
14124 parser.parse_literal_string()
14125 })?
14126 .map_or(Ok(None), |filter| {
14127 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14128 })
14129 }
14130 }
14131
14132 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14133 let parsed_keyword = if dialect_of!(self is HiveDialect) {
14135 if self.parse_keyword(Keyword::DEFAULT) {
14137 return Ok(Statement::Use(Use::Default));
14138 }
14139 None } else if dialect_of!(self is DatabricksDialect) {
14141 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14142 } else if dialect_of!(self is SnowflakeDialect) {
14143 self.parse_one_of_keywords(&[
14144 Keyword::DATABASE,
14145 Keyword::SCHEMA,
14146 Keyword::WAREHOUSE,
14147 Keyword::ROLE,
14148 Keyword::SECONDARY,
14149 ])
14150 } else {
14151 None };
14153
14154 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14155 self.parse_secondary_roles()?
14156 } else {
14157 let obj_name = self.parse_object_name(false)?;
14158 match parsed_keyword {
14159 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14160 Some(Keyword::DATABASE) => Use::Database(obj_name),
14161 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14162 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14163 Some(Keyword::ROLE) => Use::Role(obj_name),
14164 _ => Use::Object(obj_name),
14165 }
14166 };
14167
14168 Ok(Statement::Use(result))
14169 }
14170
14171 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14172 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14173 if self.parse_keyword(Keyword::NONE) {
14174 Ok(Use::SecondaryRoles(SecondaryRoles::None))
14175 } else if self.parse_keyword(Keyword::ALL) {
14176 Ok(Use::SecondaryRoles(SecondaryRoles::All))
14177 } else {
14178 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14179 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14180 }
14181 }
14182
14183 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14184 let relation = self.parse_table_factor()?;
14185 let joins = self.parse_joins()?;
14189 Ok(TableWithJoins { relation, joins })
14190 }
14191
14192 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14193 let mut joins = vec![];
14194 loop {
14195 let global = self.parse_keyword(Keyword::GLOBAL);
14196 let join = if self.parse_keyword(Keyword::CROSS) {
14197 let join_operator = if self.parse_keyword(Keyword::JOIN) {
14198 JoinOperator::CrossJoin(JoinConstraint::None)
14199 } else if self.parse_keyword(Keyword::APPLY) {
14200 JoinOperator::CrossApply
14202 } else {
14203 return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14204 };
14205 let relation = self.parse_table_factor()?;
14206 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14207 && self.dialect.supports_cross_join_constraint()
14208 {
14209 let constraint = self.parse_join_constraint(false)?;
14210 JoinOperator::CrossJoin(constraint)
14211 } else {
14212 join_operator
14213 };
14214 Join {
14215 relation,
14216 global,
14217 join_operator,
14218 }
14219 } else if self.parse_keyword(Keyword::OUTER) {
14220 self.expect_keyword_is(Keyword::APPLY)?;
14222 Join {
14223 relation: self.parse_table_factor()?,
14224 global,
14225 join_operator: JoinOperator::OuterApply,
14226 }
14227 } else if self.parse_keyword(Keyword::ASOF) {
14228 self.expect_keyword_is(Keyword::JOIN)?;
14229 let relation = self.parse_table_factor()?;
14230 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14231 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14232 Join {
14233 relation,
14234 global,
14235 join_operator: JoinOperator::AsOf {
14236 match_condition,
14237 constraint: self.parse_join_constraint(false)?,
14238 },
14239 }
14240 } else {
14241 let natural = self.parse_keyword(Keyword::NATURAL);
14242 let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14243 w.keyword
14244 } else {
14245 Keyword::NoKeyword
14246 };
14247
14248 let join_operator_type = match peek_keyword {
14249 Keyword::INNER | Keyword::JOIN => {
14250 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
14252 if inner {
14253 JoinOperator::Inner
14254 } else {
14255 JoinOperator::Join
14256 }
14257 }
14258 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14259 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
14261 let join_type = self.parse_one_of_keywords(&[
14262 Keyword::OUTER,
14263 Keyword::SEMI,
14264 Keyword::ANTI,
14265 Keyword::JOIN,
14266 ]);
14267 match join_type {
14268 Some(Keyword::OUTER) => {
14269 self.expect_keyword_is(Keyword::JOIN)?;
14270 if is_left {
14271 JoinOperator::LeftOuter
14272 } else {
14273 JoinOperator::RightOuter
14274 }
14275 }
14276 Some(Keyword::SEMI) => {
14277 self.expect_keyword_is(Keyword::JOIN)?;
14278 if is_left {
14279 JoinOperator::LeftSemi
14280 } else {
14281 JoinOperator::RightSemi
14282 }
14283 }
14284 Some(Keyword::ANTI) => {
14285 self.expect_keyword_is(Keyword::JOIN)?;
14286 if is_left {
14287 JoinOperator::LeftAnti
14288 } else {
14289 JoinOperator::RightAnti
14290 }
14291 }
14292 Some(Keyword::JOIN) => {
14293 if is_left {
14294 JoinOperator::Left
14295 } else {
14296 JoinOperator::Right
14297 }
14298 }
14299 _ => {
14300 return Err(ParserError::ParserError(format!(
14301 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14302 )))
14303 }
14304 }
14305 }
14306 Keyword::ANTI => {
14307 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
14309 JoinOperator::Anti
14310 }
14311 Keyword::SEMI => {
14312 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
14314 JoinOperator::Semi
14315 }
14316 Keyword::FULL => {
14317 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
14320 JoinOperator::FullOuter
14321 }
14322 Keyword::OUTER => {
14323 return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14324 }
14325 Keyword::STRAIGHT_JOIN => {
14326 let _ = self.next_token(); JoinOperator::StraightJoin
14328 }
14329 _ if natural => {
14330 return self.expected("a join type after NATURAL", self.peek_token());
14331 }
14332 _ => break,
14333 };
14334 let mut relation = self.parse_table_factor()?;
14335
14336 if !self
14337 .dialect
14338 .supports_left_associative_joins_without_parens()
14339 && self.peek_parens_less_nested_join()
14340 {
14341 let joins = self.parse_joins()?;
14342 relation = TableFactor::NestedJoin {
14343 table_with_joins: Box::new(TableWithJoins { relation, joins }),
14344 alias: None,
14345 };
14346 }
14347
14348 let join_constraint = self.parse_join_constraint(natural)?;
14349 Join {
14350 relation,
14351 global,
14352 join_operator: join_operator_type(join_constraint),
14353 }
14354 };
14355 joins.push(join);
14356 }
14357 Ok(joins)
14358 }
14359
14360 fn peek_parens_less_nested_join(&self) -> bool {
14361 matches!(
14362 self.peek_token_ref().token,
14363 Token::Word(Word {
14364 keyword: Keyword::JOIN
14365 | Keyword::INNER
14366 | Keyword::LEFT
14367 | Keyword::RIGHT
14368 | Keyword::FULL,
14369 ..
14370 })
14371 )
14372 }
14373
14374 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14376 if self.parse_keyword(Keyword::LATERAL) {
14377 if self.consume_token(&Token::LParen) {
14379 self.parse_derived_table_factor(Lateral)
14380 } else {
14381 let name = self.parse_object_name(false)?;
14382 self.expect_token(&Token::LParen)?;
14383 let args = self.parse_optional_args()?;
14384 let alias = self.maybe_parse_table_alias()?;
14385 Ok(TableFactor::Function {
14386 lateral: true,
14387 name,
14388 args,
14389 alias,
14390 })
14391 }
14392 } else if self.parse_keyword(Keyword::TABLE) {
14393 self.expect_token(&Token::LParen)?;
14395 let expr = self.parse_expr()?;
14396 self.expect_token(&Token::RParen)?;
14397 let alias = self.maybe_parse_table_alias()?;
14398 Ok(TableFactor::TableFunction { expr, alias })
14399 } else if self.consume_token(&Token::LParen) {
14400 if let Some(mut table) =
14422 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14423 {
14424 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14425 {
14426 table = match kw {
14427 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14428 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14429 unexpected_keyword => return Err(ParserError::ParserError(
14430 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14431 )),
14432 }
14433 }
14434 return Ok(table);
14435 }
14436
14437 let mut table_and_joins = self.parse_table_and_joins()?;
14444
14445 #[allow(clippy::if_same_then_else)]
14446 if !table_and_joins.joins.is_empty() {
14447 self.expect_token(&Token::RParen)?;
14448 let alias = self.maybe_parse_table_alias()?;
14449 Ok(TableFactor::NestedJoin {
14450 table_with_joins: Box::new(table_and_joins),
14451 alias,
14452 }) } else if let TableFactor::NestedJoin {
14454 table_with_joins: _,
14455 alias: _,
14456 } = &table_and_joins.relation
14457 {
14458 self.expect_token(&Token::RParen)?;
14461 let alias = self.maybe_parse_table_alias()?;
14462 Ok(TableFactor::NestedJoin {
14463 table_with_joins: Box::new(table_and_joins),
14464 alias,
14465 })
14466 } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14467 self.expect_token(&Token::RParen)?;
14474
14475 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14476 match &mut table_and_joins.relation {
14479 TableFactor::Derived { alias, .. }
14480 | TableFactor::Table { alias, .. }
14481 | TableFactor::Function { alias, .. }
14482 | TableFactor::UNNEST { alias, .. }
14483 | TableFactor::JsonTable { alias, .. }
14484 | TableFactor::XmlTable { alias, .. }
14485 | TableFactor::OpenJsonTable { alias, .. }
14486 | TableFactor::TableFunction { alias, .. }
14487 | TableFactor::Pivot { alias, .. }
14488 | TableFactor::Unpivot { alias, .. }
14489 | TableFactor::MatchRecognize { alias, .. }
14490 | TableFactor::SemanticView { alias, .. }
14491 | TableFactor::NestedJoin { alias, .. } => {
14492 if let Some(inner_alias) = alias {
14494 return Err(ParserError::ParserError(format!(
14495 "duplicate alias {inner_alias}"
14496 )));
14497 }
14498 alias.replace(outer_alias);
14502 }
14503 };
14504 }
14505 Ok(table_and_joins.relation)
14507 } else {
14508 self.expected("joined table", self.peek_token())
14511 }
14512 } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14513 && matches!(
14514 self.peek_tokens(),
14515 [
14516 Token::Word(Word {
14517 keyword: Keyword::VALUES,
14518 ..
14519 }),
14520 Token::LParen
14521 ]
14522 )
14523 {
14524 self.expect_keyword_is(Keyword::VALUES)?;
14525
14526 let values = SetExpr::Values(self.parse_values(false, false)?);
14530 let alias = self.maybe_parse_table_alias()?;
14531 Ok(TableFactor::Derived {
14532 lateral: false,
14533 subquery: Box::new(Query {
14534 with: None,
14535 body: Box::new(values),
14536 order_by: None,
14537 limit_clause: None,
14538 fetch: None,
14539 locks: vec![],
14540 for_clause: None,
14541 settings: None,
14542 format_clause: None,
14543 pipe_operators: vec![],
14544 }),
14545 alias,
14546 })
14547 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14548 && self.parse_keyword(Keyword::UNNEST)
14549 {
14550 self.expect_token(&Token::LParen)?;
14551 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14552 self.expect_token(&Token::RParen)?;
14553
14554 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14555 let alias = match self.maybe_parse_table_alias() {
14556 Ok(Some(alias)) => Some(alias),
14557 Ok(None) => None,
14558 Err(e) => return Err(e),
14559 };
14560
14561 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14562 Ok(()) => true,
14563 Err(_) => false,
14564 };
14565
14566 let with_offset_alias = if with_offset {
14567 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14568 Ok(Some(alias)) => Some(alias),
14569 Ok(None) => None,
14570 Err(e) => return Err(e),
14571 }
14572 } else {
14573 None
14574 };
14575
14576 Ok(TableFactor::UNNEST {
14577 alias,
14578 array_exprs,
14579 with_offset,
14580 with_offset_alias,
14581 with_ordinality,
14582 })
14583 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14584 let json_expr = self.parse_expr()?;
14585 self.expect_token(&Token::Comma)?;
14586 let json_path = self.parse_value()?.value;
14587 self.expect_keyword_is(Keyword::COLUMNS)?;
14588 self.expect_token(&Token::LParen)?;
14589 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14590 self.expect_token(&Token::RParen)?;
14591 self.expect_token(&Token::RParen)?;
14592 let alias = self.maybe_parse_table_alias()?;
14593 Ok(TableFactor::JsonTable {
14594 json_expr,
14595 json_path,
14596 columns,
14597 alias,
14598 })
14599 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14600 self.prev_token();
14601 self.parse_open_json_table_factor()
14602 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14603 self.prev_token();
14604 self.parse_xml_table_factor()
14605 } else if self.dialect.supports_semantic_view_table_factor()
14606 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14607 {
14608 self.parse_semantic_view_table_factor()
14609 } else {
14610 let name = self.parse_object_name(true)?;
14611
14612 let json_path = match self.peek_token().token {
14613 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14614 _ => None,
14615 };
14616
14617 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14618 && self.parse_keyword(Keyword::PARTITION)
14619 {
14620 self.parse_parenthesized_identifiers()?
14621 } else {
14622 vec![]
14623 };
14624
14625 let version = self.maybe_parse_table_version()?;
14627
14628 let args = if self.consume_token(&Token::LParen) {
14630 Some(self.parse_table_function_args()?)
14631 } else {
14632 None
14633 };
14634
14635 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14636
14637 let mut sample = None;
14638 if self.dialect.supports_table_sample_before_alias() {
14639 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14640 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14641 }
14642 }
14643
14644 let alias = self.maybe_parse_table_alias()?;
14645
14646 let index_hints = if self.dialect.supports_table_hints() {
14648 self.maybe_parse(|p| p.parse_table_index_hints())?
14649 .unwrap_or(vec![])
14650 } else {
14651 vec![]
14652 };
14653
14654 let mut with_hints = vec![];
14656 if self.parse_keyword(Keyword::WITH) {
14657 if self.consume_token(&Token::LParen) {
14658 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14659 self.expect_token(&Token::RParen)?;
14660 } else {
14661 self.prev_token();
14663 }
14664 };
14665
14666 if !self.dialect.supports_table_sample_before_alias() {
14667 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14668 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14669 }
14670 }
14671
14672 let mut table = TableFactor::Table {
14673 name,
14674 alias,
14675 args,
14676 with_hints,
14677 version,
14678 partitions,
14679 with_ordinality,
14680 json_path,
14681 sample,
14682 index_hints,
14683 };
14684
14685 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14686 table = match kw {
14687 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14688 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14689 unexpected_keyword => return Err(ParserError::ParserError(
14690 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14691 )),
14692 }
14693 }
14694
14695 if self.dialect.supports_match_recognize()
14696 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14697 {
14698 table = self.parse_match_recognize(table)?;
14699 }
14700
14701 Ok(table)
14702 }
14703 }
14704
14705 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14706 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14707 TableSampleModifier::TableSample
14708 } else if self.parse_keyword(Keyword::SAMPLE) {
14709 TableSampleModifier::Sample
14710 } else {
14711 return Ok(None);
14712 };
14713 self.parse_table_sample(modifier).map(Some)
14714 }
14715
14716 fn parse_table_sample(
14717 &mut self,
14718 modifier: TableSampleModifier,
14719 ) -> Result<Box<TableSample>, ParserError> {
14720 let name = match self.parse_one_of_keywords(&[
14721 Keyword::BERNOULLI,
14722 Keyword::ROW,
14723 Keyword::SYSTEM,
14724 Keyword::BLOCK,
14725 ]) {
14726 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14727 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14728 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14729 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14730 _ => None,
14731 };
14732
14733 let parenthesized = self.consume_token(&Token::LParen);
14734
14735 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14736 let selected_bucket = self.parse_number_value()?.value;
14737 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14738 let total = self.parse_number_value()?.value;
14739 let on = if self.parse_keyword(Keyword::ON) {
14740 Some(self.parse_expr()?)
14741 } else {
14742 None
14743 };
14744 (
14745 None,
14746 Some(TableSampleBucket {
14747 bucket: selected_bucket,
14748 total,
14749 on,
14750 }),
14751 )
14752 } else {
14753 let value = match self.maybe_parse(|p| p.parse_expr())? {
14754 Some(num) => num,
14755 None => {
14756 let next_token = self.next_token();
14757 if let Token::Word(w) = next_token.token {
14758 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
14759 } else {
14760 return parser_err!(
14761 "Expecting number or byte length e.g. 100M",
14762 self.peek_token().span.start
14763 );
14764 }
14765 }
14766 };
14767 let unit = if self.parse_keyword(Keyword::ROWS) {
14768 Some(TableSampleUnit::Rows)
14769 } else if self.parse_keyword(Keyword::PERCENT) {
14770 Some(TableSampleUnit::Percent)
14771 } else {
14772 None
14773 };
14774 (
14775 Some(TableSampleQuantity {
14776 parenthesized,
14777 value,
14778 unit,
14779 }),
14780 None,
14781 )
14782 };
14783 if parenthesized {
14784 self.expect_token(&Token::RParen)?;
14785 }
14786
14787 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
14788 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
14789 } else if self.parse_keyword(Keyword::SEED) {
14790 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
14791 } else {
14792 None
14793 };
14794
14795 let offset = if self.parse_keyword(Keyword::OFFSET) {
14796 Some(self.parse_expr()?)
14797 } else {
14798 None
14799 };
14800
14801 Ok(Box::new(TableSample {
14802 modifier,
14803 name,
14804 quantity,
14805 seed,
14806 bucket,
14807 offset,
14808 }))
14809 }
14810
14811 fn parse_table_sample_seed(
14812 &mut self,
14813 modifier: TableSampleSeedModifier,
14814 ) -> Result<TableSampleSeed, ParserError> {
14815 self.expect_token(&Token::LParen)?;
14816 let value = self.parse_number_value()?.value;
14817 self.expect_token(&Token::RParen)?;
14818 Ok(TableSampleSeed { modifier, value })
14819 }
14820
14821 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14824 self.expect_token(&Token::LParen)?;
14825 let json_expr = self.parse_expr()?;
14826 let json_path = if self.consume_token(&Token::Comma) {
14827 Some(self.parse_value()?.value)
14828 } else {
14829 None
14830 };
14831 self.expect_token(&Token::RParen)?;
14832 let columns = if self.parse_keyword(Keyword::WITH) {
14833 self.expect_token(&Token::LParen)?;
14834 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
14835 self.expect_token(&Token::RParen)?;
14836 columns
14837 } else {
14838 Vec::new()
14839 };
14840 let alias = self.maybe_parse_table_alias()?;
14841 Ok(TableFactor::OpenJsonTable {
14842 json_expr,
14843 json_path,
14844 columns,
14845 alias,
14846 })
14847 }
14848
14849 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14850 self.expect_token(&Token::LParen)?;
14851 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14852 self.expect_token(&Token::LParen)?;
14853 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14854 self.expect_token(&Token::RParen)?;
14855 self.expect_token(&Token::Comma)?;
14856 namespaces
14857 } else {
14858 vec![]
14859 };
14860 let row_expression = self.parse_expr()?;
14861 let passing = self.parse_xml_passing_clause()?;
14862 self.expect_keyword_is(Keyword::COLUMNS)?;
14863 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14864 self.expect_token(&Token::RParen)?;
14865 let alias = self.maybe_parse_table_alias()?;
14866 Ok(TableFactor::XmlTable {
14867 namespaces,
14868 row_expression,
14869 passing,
14870 columns,
14871 alias,
14872 })
14873 }
14874
14875 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14876 let uri = self.parse_expr()?;
14877 self.expect_keyword_is(Keyword::AS)?;
14878 let name = self.parse_identifier()?;
14879 Ok(XmlNamespaceDefinition { uri, name })
14880 }
14881
14882 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14883 let name = self.parse_identifier()?;
14884
14885 let option = if self.parse_keyword(Keyword::FOR) {
14886 self.expect_keyword(Keyword::ORDINALITY)?;
14887 XmlTableColumnOption::ForOrdinality
14888 } else {
14889 let r#type = self.parse_data_type()?;
14890 let mut path = None;
14891 let mut default = None;
14892
14893 if self.parse_keyword(Keyword::PATH) {
14894 path = Some(self.parse_expr()?);
14895 }
14896
14897 if self.parse_keyword(Keyword::DEFAULT) {
14898 default = Some(self.parse_expr()?);
14899 }
14900
14901 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14902 if !not_null {
14903 let _ = self.parse_keyword(Keyword::NULL);
14905 }
14906
14907 XmlTableColumnOption::NamedInfo {
14908 r#type,
14909 path,
14910 default,
14911 nullable: !not_null,
14912 }
14913 };
14914 Ok(XmlTableColumn { name, option })
14915 }
14916
14917 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14918 let mut arguments = vec![];
14919 if self.parse_keyword(Keyword::PASSING) {
14920 loop {
14921 let by_value =
14922 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14923 let expr = self.parse_expr()?;
14924 let alias = if self.parse_keyword(Keyword::AS) {
14925 Some(self.parse_identifier()?)
14926 } else {
14927 None
14928 };
14929 arguments.push(XmlPassingArgument {
14930 expr,
14931 alias,
14932 by_value,
14933 });
14934 if !self.consume_token(&Token::Comma) {
14935 break;
14936 }
14937 }
14938 }
14939 Ok(XmlPassingClause { arguments })
14940 }
14941
14942 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14944 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
14945 self.expect_token(&Token::LParen)?;
14946
14947 let name = self.parse_object_name(true)?;
14948
14949 let mut dimensions = Vec::new();
14951 let mut metrics = Vec::new();
14952 let mut facts = Vec::new();
14953 let mut where_clause = None;
14954
14955 while self.peek_token().token != Token::RParen {
14956 if self.parse_keyword(Keyword::DIMENSIONS) {
14957 if !dimensions.is_empty() {
14958 return Err(ParserError::ParserError(
14959 "DIMENSIONS clause can only be specified once".to_string(),
14960 ));
14961 }
14962 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14963 } else if self.parse_keyword(Keyword::METRICS) {
14964 if !metrics.is_empty() {
14965 return Err(ParserError::ParserError(
14966 "METRICS clause can only be specified once".to_string(),
14967 ));
14968 }
14969 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14970 } else if self.parse_keyword(Keyword::FACTS) {
14971 if !facts.is_empty() {
14972 return Err(ParserError::ParserError(
14973 "FACTS clause can only be specified once".to_string(),
14974 ));
14975 }
14976 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14977 } else if self.parse_keyword(Keyword::WHERE) {
14978 if where_clause.is_some() {
14979 return Err(ParserError::ParserError(
14980 "WHERE clause can only be specified once".to_string(),
14981 ));
14982 }
14983 where_clause = Some(self.parse_expr()?);
14984 } else {
14985 return parser_err!(
14986 format!(
14987 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
14988 self.peek_token().token
14989 ),
14990 self.peek_token().span.start
14991 )?;
14992 }
14993 }
14994
14995 self.expect_token(&Token::RParen)?;
14996
14997 let alias = self.maybe_parse_table_alias()?;
14998
14999 Ok(TableFactor::SemanticView {
15000 name,
15001 dimensions,
15002 metrics,
15003 facts,
15004 where_clause,
15005 alias,
15006 })
15007 }
15008
15009 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
15010 self.expect_token(&Token::LParen)?;
15011
15012 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15013 self.parse_comma_separated(Parser::parse_expr)?
15014 } else {
15015 vec![]
15016 };
15017
15018 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15019 self.parse_comma_separated(Parser::parse_order_by_expr)?
15020 } else {
15021 vec![]
15022 };
15023
15024 let measures = if self.parse_keyword(Keyword::MEASURES) {
15025 self.parse_comma_separated(|p| {
15026 let expr = p.parse_expr()?;
15027 let _ = p.parse_keyword(Keyword::AS);
15028 let alias = p.parse_identifier()?;
15029 Ok(Measure { expr, alias })
15030 })?
15031 } else {
15032 vec![]
15033 };
15034
15035 let rows_per_match =
15036 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15037 Some(RowsPerMatch::OneRow)
15038 } else if self.parse_keywords(&[
15039 Keyword::ALL,
15040 Keyword::ROWS,
15041 Keyword::PER,
15042 Keyword::MATCH,
15043 ]) {
15044 Some(RowsPerMatch::AllRows(
15045 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15046 Some(EmptyMatchesMode::Show)
15047 } else if self.parse_keywords(&[
15048 Keyword::OMIT,
15049 Keyword::EMPTY,
15050 Keyword::MATCHES,
15051 ]) {
15052 Some(EmptyMatchesMode::Omit)
15053 } else if self.parse_keywords(&[
15054 Keyword::WITH,
15055 Keyword::UNMATCHED,
15056 Keyword::ROWS,
15057 ]) {
15058 Some(EmptyMatchesMode::WithUnmatched)
15059 } else {
15060 None
15061 },
15062 ))
15063 } else {
15064 None
15065 };
15066
15067 let after_match_skip =
15068 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15069 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15070 Some(AfterMatchSkip::PastLastRow)
15071 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15072 Some(AfterMatchSkip::ToNextRow)
15073 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15074 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15075 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15076 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15077 } else {
15078 let found = self.next_token();
15079 return self.expected("after match skip option", found);
15080 }
15081 } else {
15082 None
15083 };
15084
15085 self.expect_keyword_is(Keyword::PATTERN)?;
15086 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15087
15088 self.expect_keyword_is(Keyword::DEFINE)?;
15089
15090 let symbols = self.parse_comma_separated(|p| {
15091 let symbol = p.parse_identifier()?;
15092 p.expect_keyword_is(Keyword::AS)?;
15093 let definition = p.parse_expr()?;
15094 Ok(SymbolDefinition { symbol, definition })
15095 })?;
15096
15097 self.expect_token(&Token::RParen)?;
15098
15099 let alias = self.maybe_parse_table_alias()?;
15100
15101 Ok(TableFactor::MatchRecognize {
15102 table: Box::new(table),
15103 partition_by,
15104 order_by,
15105 measures,
15106 rows_per_match,
15107 after_match_skip,
15108 pattern,
15109 symbols,
15110 alias,
15111 })
15112 }
15113
15114 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15115 match self.next_token().token {
15116 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15117 Token::Placeholder(s) if s == "$" => {
15118 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15119 }
15120 Token::LBrace => {
15121 self.expect_token(&Token::Minus)?;
15122 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15123 self.expect_token(&Token::Minus)?;
15124 self.expect_token(&Token::RBrace)?;
15125 Ok(MatchRecognizePattern::Exclude(symbol))
15126 }
15127 Token::Word(Word {
15128 value,
15129 quote_style: None,
15130 ..
15131 }) if value == "PERMUTE" => {
15132 self.expect_token(&Token::LParen)?;
15133 let symbols = self.parse_comma_separated(|p| {
15134 p.parse_identifier().map(MatchRecognizeSymbol::Named)
15135 })?;
15136 self.expect_token(&Token::RParen)?;
15137 Ok(MatchRecognizePattern::Permute(symbols))
15138 }
15139 Token::LParen => {
15140 let pattern = self.parse_pattern()?;
15141 self.expect_token(&Token::RParen)?;
15142 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15143 }
15144 _ => {
15145 self.prev_token();
15146 self.parse_identifier()
15147 .map(MatchRecognizeSymbol::Named)
15148 .map(MatchRecognizePattern::Symbol)
15149 }
15150 }
15151 }
15152
15153 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15154 let mut pattern = self.parse_base_pattern()?;
15155 loop {
15156 let token = self.next_token();
15157 let quantifier = match token.token {
15158 Token::Mul => RepetitionQuantifier::ZeroOrMore,
15159 Token::Plus => RepetitionQuantifier::OneOrMore,
15160 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15161 Token::LBrace => {
15162 let token = self.next_token();
15164 match token.token {
15165 Token::Comma => {
15166 let next_token = self.next_token();
15167 let Token::Number(n, _) = next_token.token else {
15168 return self.expected("literal number", next_token);
15169 };
15170 self.expect_token(&Token::RBrace)?;
15171 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15172 }
15173 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15174 let next_token = self.next_token();
15175 match next_token.token {
15176 Token::Number(m, _) => {
15177 self.expect_token(&Token::RBrace)?;
15178 RepetitionQuantifier::Range(
15179 Self::parse(n, token.span.start)?,
15180 Self::parse(m, token.span.start)?,
15181 )
15182 }
15183 Token::RBrace => {
15184 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15185 }
15186 _ => {
15187 return self.expected("} or upper bound", next_token);
15188 }
15189 }
15190 }
15191 Token::Number(n, _) => {
15192 self.expect_token(&Token::RBrace)?;
15193 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15194 }
15195 _ => return self.expected("quantifier range", token),
15196 }
15197 }
15198 _ => {
15199 self.prev_token();
15200 break;
15201 }
15202 };
15203 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15204 }
15205 Ok(pattern)
15206 }
15207
15208 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15209 let mut patterns = vec![self.parse_repetition_pattern()?];
15210 while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15211 patterns.push(self.parse_repetition_pattern()?);
15212 }
15213 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15214 Ok([pattern]) => Ok(pattern),
15215 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15216 }
15217 }
15218
15219 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15220 let pattern = self.parse_concat_pattern()?;
15221 if self.consume_token(&Token::Pipe) {
15222 match self.parse_pattern()? {
15223 MatchRecognizePattern::Alternation(mut patterns) => {
15225 patterns.insert(0, pattern);
15226 Ok(MatchRecognizePattern::Alternation(patterns))
15227 }
15228 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15229 }
15230 } else {
15231 Ok(pattern)
15232 }
15233 }
15234
15235 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15237 if self.dialect.supports_timestamp_versioning() {
15238 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15239 {
15240 let expr = self.parse_expr()?;
15241 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15242 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15243 let func_name = self.parse_object_name(true)?;
15244 let func = self.parse_function(func_name)?;
15245 return Ok(Some(TableVersion::Function(func)));
15246 }
15247 }
15248 Ok(None)
15249 }
15250
15251 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15254 if self.parse_keyword(Keyword::NESTED) {
15255 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15256 let path = self.parse_value()?.value;
15257 self.expect_keyword_is(Keyword::COLUMNS)?;
15258 let columns = self.parse_parenthesized(|p| {
15259 p.parse_comma_separated(Self::parse_json_table_column_def)
15260 })?;
15261 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15262 path,
15263 columns,
15264 }));
15265 }
15266 let name = self.parse_identifier()?;
15267 if self.parse_keyword(Keyword::FOR) {
15268 self.expect_keyword_is(Keyword::ORDINALITY)?;
15269 return Ok(JsonTableColumn::ForOrdinality(name));
15270 }
15271 let r#type = self.parse_data_type()?;
15272 let exists = self.parse_keyword(Keyword::EXISTS);
15273 self.expect_keyword_is(Keyword::PATH)?;
15274 let path = self.parse_value()?.value;
15275 let mut on_empty = None;
15276 let mut on_error = None;
15277 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15278 if self.parse_keyword(Keyword::EMPTY) {
15279 on_empty = Some(error_handling);
15280 } else {
15281 self.expect_keyword_is(Keyword::ERROR)?;
15282 on_error = Some(error_handling);
15283 }
15284 }
15285 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15286 name,
15287 r#type,
15288 path,
15289 exists,
15290 on_empty,
15291 on_error,
15292 }))
15293 }
15294
15295 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15303 let name = self.parse_identifier()?;
15304 let r#type = self.parse_data_type()?;
15305 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15306 self.next_token();
15307 Some(path)
15308 } else {
15309 None
15310 };
15311 let as_json = self.parse_keyword(Keyword::AS);
15312 if as_json {
15313 self.expect_keyword_is(Keyword::JSON)?;
15314 }
15315 Ok(OpenJsonTableColumn {
15316 name,
15317 r#type,
15318 path,
15319 as_json,
15320 })
15321 }
15322
15323 fn parse_json_table_column_error_handling(
15324 &mut self,
15325 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15326 let res = if self.parse_keyword(Keyword::NULL) {
15327 JsonTableColumnErrorHandling::Null
15328 } else if self.parse_keyword(Keyword::ERROR) {
15329 JsonTableColumnErrorHandling::Error
15330 } else if self.parse_keyword(Keyword::DEFAULT) {
15331 JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15332 } else {
15333 return Ok(None);
15334 };
15335 self.expect_keyword_is(Keyword::ON)?;
15336 Ok(Some(res))
15337 }
15338
15339 pub fn parse_derived_table_factor(
15340 &mut self,
15341 lateral: IsLateral,
15342 ) -> Result<TableFactor, ParserError> {
15343 let subquery = self.parse_query()?;
15344 self.expect_token(&Token::RParen)?;
15345 let alias = self.maybe_parse_table_alias()?;
15346 Ok(TableFactor::Derived {
15347 lateral: match lateral {
15348 Lateral => true,
15349 NotLateral => false,
15350 },
15351 subquery,
15352 alias,
15353 })
15354 }
15355
15356 fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15357 let function_name = match self.next_token().token {
15358 Token::Word(w) => Ok(w.value),
15359 _ => self.expected("a function identifier", self.peek_token()),
15360 }?;
15361 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15362 let alias = if self.parse_keyword(Keyword::AS) {
15363 Some(self.parse_identifier()?)
15364 } else {
15365 None
15366 };
15367
15368 Ok(ExprWithAlias { expr, alias })
15369 }
15370 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15393 let expr = self.parse_expr()?;
15394 let alias = if self.parse_keyword(Keyword::AS) {
15395 Some(self.parse_identifier()?)
15396 } else {
15397 None
15398 };
15399
15400 Ok(ExprWithAlias { expr, alias })
15401 }
15402
15403 pub fn parse_pivot_table_factor(
15404 &mut self,
15405 table: TableFactor,
15406 ) -> Result<TableFactor, ParserError> {
15407 self.expect_token(&Token::LParen)?;
15408 let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15409 self.expect_keyword_is(Keyword::FOR)?;
15410 let value_column = if self.peek_token_ref().token == Token::LParen {
15411 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15412 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15413 })?
15414 } else {
15415 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15416 };
15417 self.expect_keyword_is(Keyword::IN)?;
15418
15419 self.expect_token(&Token::LParen)?;
15420 let value_source = if self.parse_keyword(Keyword::ANY) {
15421 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15422 self.parse_comma_separated(Parser::parse_order_by_expr)?
15423 } else {
15424 vec![]
15425 };
15426 PivotValueSource::Any(order_by)
15427 } else if self.peek_sub_query() {
15428 PivotValueSource::Subquery(self.parse_query()?)
15429 } else {
15430 PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15431 };
15432 self.expect_token(&Token::RParen)?;
15433
15434 let default_on_null =
15435 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15436 self.expect_token(&Token::LParen)?;
15437 let expr = self.parse_expr()?;
15438 self.expect_token(&Token::RParen)?;
15439 Some(expr)
15440 } else {
15441 None
15442 };
15443
15444 self.expect_token(&Token::RParen)?;
15445 let alias = self.maybe_parse_table_alias()?;
15446 Ok(TableFactor::Pivot {
15447 table: Box::new(table),
15448 aggregate_functions,
15449 value_column,
15450 value_source,
15451 default_on_null,
15452 alias,
15453 })
15454 }
15455
15456 pub fn parse_unpivot_table_factor(
15457 &mut self,
15458 table: TableFactor,
15459 ) -> Result<TableFactor, ParserError> {
15460 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15461 self.expect_keyword_is(Keyword::NULLS)?;
15462 Some(NullInclusion::IncludeNulls)
15463 } else if self.parse_keyword(Keyword::EXCLUDE) {
15464 self.expect_keyword_is(Keyword::NULLS)?;
15465 Some(NullInclusion::ExcludeNulls)
15466 } else {
15467 None
15468 };
15469 self.expect_token(&Token::LParen)?;
15470 let value = self.parse_expr()?;
15471 self.expect_keyword_is(Keyword::FOR)?;
15472 let name = self.parse_identifier()?;
15473 self.expect_keyword_is(Keyword::IN)?;
15474 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15475 p.parse_expr_with_alias()
15476 })?;
15477 self.expect_token(&Token::RParen)?;
15478 let alias = self.maybe_parse_table_alias()?;
15479 Ok(TableFactor::Unpivot {
15480 table: Box::new(table),
15481 value,
15482 null_inclusion,
15483 name,
15484 columns,
15485 alias,
15486 })
15487 }
15488
15489 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15490 if natural {
15491 Ok(JoinConstraint::Natural)
15492 } else if self.parse_keyword(Keyword::ON) {
15493 let constraint = self.parse_expr()?;
15494 Ok(JoinConstraint::On(constraint))
15495 } else if self.parse_keyword(Keyword::USING) {
15496 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15497 Ok(JoinConstraint::Using(columns))
15498 } else {
15499 Ok(JoinConstraint::None)
15500 }
15502 }
15503
15504 pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15506 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15507
15508 self.expect_keyword_is(Keyword::TO)?;
15509 let grantees = self.parse_grantees()?;
15510
15511 let with_grant_option =
15512 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15513
15514 let current_grants =
15515 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15516 Some(CurrentGrantsKind::CopyCurrentGrants)
15517 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15518 Some(CurrentGrantsKind::RevokeCurrentGrants)
15519 } else {
15520 None
15521 };
15522
15523 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15524 Some(self.parse_identifier()?)
15525 } else {
15526 None
15527 };
15528
15529 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15530 Some(self.parse_identifier()?)
15531 } else {
15532 None
15533 };
15534
15535 Ok(Statement::Grant {
15536 privileges,
15537 objects,
15538 grantees,
15539 with_grant_option,
15540 as_grantor,
15541 granted_by,
15542 current_grants,
15543 })
15544 }
15545
15546 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15547 let mut values = vec![];
15548 let mut grantee_type = GranteesType::None;
15549 loop {
15550 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15551 GranteesType::Role
15552 } else if self.parse_keyword(Keyword::USER) {
15553 GranteesType::User
15554 } else if self.parse_keyword(Keyword::SHARE) {
15555 GranteesType::Share
15556 } else if self.parse_keyword(Keyword::GROUP) {
15557 GranteesType::Group
15558 } else if self.parse_keyword(Keyword::PUBLIC) {
15559 GranteesType::Public
15560 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15561 GranteesType::DatabaseRole
15562 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15563 GranteesType::ApplicationRole
15564 } else if self.parse_keyword(Keyword::APPLICATION) {
15565 GranteesType::Application
15566 } else {
15567 grantee_type.clone() };
15569
15570 if self
15571 .dialect
15572 .get_reserved_grantees_types()
15573 .contains(&new_grantee_type)
15574 {
15575 self.prev_token();
15576 } else {
15577 grantee_type = new_grantee_type;
15578 }
15579
15580 let grantee = if grantee_type == GranteesType::Public {
15581 Grantee {
15582 grantee_type: grantee_type.clone(),
15583 name: None,
15584 }
15585 } else {
15586 let mut name = self.parse_grantee_name()?;
15587 if self.consume_token(&Token::Colon) {
15588 let ident = self.parse_identifier()?;
15592 if let GranteeName::ObjectName(namespace) = name {
15593 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15594 format!("{namespace}:{ident}"),
15595 )]));
15596 };
15597 }
15598 Grantee {
15599 grantee_type: grantee_type.clone(),
15600 name: Some(name),
15601 }
15602 };
15603
15604 values.push(grantee);
15605
15606 if !self.consume_token(&Token::Comma) {
15607 break;
15608 }
15609 }
15610
15611 Ok(values)
15612 }
15613
15614 pub fn parse_grant_deny_revoke_privileges_objects(
15615 &mut self,
15616 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15617 let privileges = if self.parse_keyword(Keyword::ALL) {
15618 Privileges::All {
15619 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15620 }
15621 } else {
15622 let actions = self.parse_actions_list()?;
15623 Privileges::Actions(actions)
15624 };
15625
15626 let objects = if self.parse_keyword(Keyword::ON) {
15627 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15628 Some(GrantObjects::AllTablesInSchema {
15629 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15630 })
15631 } else if self.parse_keywords(&[
15632 Keyword::ALL,
15633 Keyword::EXTERNAL,
15634 Keyword::TABLES,
15635 Keyword::IN,
15636 Keyword::SCHEMA,
15637 ]) {
15638 Some(GrantObjects::AllExternalTablesInSchema {
15639 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15640 })
15641 } else if self.parse_keywords(&[
15642 Keyword::ALL,
15643 Keyword::VIEWS,
15644 Keyword::IN,
15645 Keyword::SCHEMA,
15646 ]) {
15647 Some(GrantObjects::AllViewsInSchema {
15648 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15649 })
15650 } else if self.parse_keywords(&[
15651 Keyword::ALL,
15652 Keyword::MATERIALIZED,
15653 Keyword::VIEWS,
15654 Keyword::IN,
15655 Keyword::SCHEMA,
15656 ]) {
15657 Some(GrantObjects::AllMaterializedViewsInSchema {
15658 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15659 })
15660 } else if self.parse_keywords(&[
15661 Keyword::ALL,
15662 Keyword::FUNCTIONS,
15663 Keyword::IN,
15664 Keyword::SCHEMA,
15665 ]) {
15666 Some(GrantObjects::AllFunctionsInSchema {
15667 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15668 })
15669 } else if self.parse_keywords(&[
15670 Keyword::FUTURE,
15671 Keyword::SCHEMAS,
15672 Keyword::IN,
15673 Keyword::DATABASE,
15674 ]) {
15675 Some(GrantObjects::FutureSchemasInDatabase {
15676 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15677 })
15678 } else if self.parse_keywords(&[
15679 Keyword::FUTURE,
15680 Keyword::TABLES,
15681 Keyword::IN,
15682 Keyword::SCHEMA,
15683 ]) {
15684 Some(GrantObjects::FutureTablesInSchema {
15685 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15686 })
15687 } else if self.parse_keywords(&[
15688 Keyword::FUTURE,
15689 Keyword::EXTERNAL,
15690 Keyword::TABLES,
15691 Keyword::IN,
15692 Keyword::SCHEMA,
15693 ]) {
15694 Some(GrantObjects::FutureExternalTablesInSchema {
15695 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15696 })
15697 } else if self.parse_keywords(&[
15698 Keyword::FUTURE,
15699 Keyword::VIEWS,
15700 Keyword::IN,
15701 Keyword::SCHEMA,
15702 ]) {
15703 Some(GrantObjects::FutureViewsInSchema {
15704 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15705 })
15706 } else if self.parse_keywords(&[
15707 Keyword::FUTURE,
15708 Keyword::MATERIALIZED,
15709 Keyword::VIEWS,
15710 Keyword::IN,
15711 Keyword::SCHEMA,
15712 ]) {
15713 Some(GrantObjects::FutureMaterializedViewsInSchema {
15714 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15715 })
15716 } else if self.parse_keywords(&[
15717 Keyword::ALL,
15718 Keyword::SEQUENCES,
15719 Keyword::IN,
15720 Keyword::SCHEMA,
15721 ]) {
15722 Some(GrantObjects::AllSequencesInSchema {
15723 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15724 })
15725 } else if self.parse_keywords(&[
15726 Keyword::FUTURE,
15727 Keyword::SEQUENCES,
15728 Keyword::IN,
15729 Keyword::SCHEMA,
15730 ]) {
15731 Some(GrantObjects::FutureSequencesInSchema {
15732 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15733 })
15734 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15735 Some(GrantObjects::ResourceMonitors(
15736 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15737 ))
15738 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15739 Some(GrantObjects::ComputePools(
15740 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15741 ))
15742 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15743 Some(GrantObjects::FailoverGroup(
15744 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15745 ))
15746 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15747 Some(GrantObjects::ReplicationGroup(
15748 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15749 ))
15750 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15751 Some(GrantObjects::ExternalVolumes(
15752 self.parse_comma_separated(|p| p.parse_object_name(false))?,
15753 ))
15754 } else {
15755 let object_type = self.parse_one_of_keywords(&[
15756 Keyword::SEQUENCE,
15757 Keyword::DATABASE,
15758 Keyword::SCHEMA,
15759 Keyword::TABLE,
15760 Keyword::VIEW,
15761 Keyword::WAREHOUSE,
15762 Keyword::INTEGRATION,
15763 Keyword::VIEW,
15764 Keyword::WAREHOUSE,
15765 Keyword::INTEGRATION,
15766 Keyword::USER,
15767 Keyword::CONNECTION,
15768 Keyword::PROCEDURE,
15769 Keyword::FUNCTION,
15770 ]);
15771 let objects =
15772 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
15773 match object_type {
15774 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
15775 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
15776 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
15777 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
15778 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
15779 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
15780 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
15781 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
15782 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
15783 if let Some(name) = objects?.first() {
15784 self.parse_grant_procedure_or_function(name, &kw)?
15785 } else {
15786 self.expected("procedure or function name", self.peek_token())?
15787 }
15788 }
15789 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
15790 Some(unexpected_keyword) => return Err(ParserError::ParserError(
15791 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
15792 )),
15793 }
15794 }
15795 } else {
15796 None
15797 };
15798
15799 Ok((privileges, objects))
15800 }
15801
15802 fn parse_grant_procedure_or_function(
15803 &mut self,
15804 name: &ObjectName,
15805 kw: &Option<Keyword>,
15806 ) -> Result<Option<GrantObjects>, ParserError> {
15807 let arg_types = if self.consume_token(&Token::LParen) {
15808 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
15809 self.expect_token(&Token::RParen)?;
15810 list
15811 } else {
15812 vec![]
15813 };
15814 match kw {
15815 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
15816 name: name.clone(),
15817 arg_types,
15818 })),
15819 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
15820 name: name.clone(),
15821 arg_types,
15822 })),
15823 _ => self.expected("procedure or function keywords", self.peek_token())?,
15824 }
15825 }
15826
15827 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
15828 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
15829 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
15830 if columns.is_empty() {
15831 Ok(None)
15832 } else {
15833 Ok(Some(columns))
15834 }
15835 }
15836
15837 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
15839 Ok(Action::ImportedPrivileges)
15840 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
15841 Ok(Action::AddSearchOptimization)
15842 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
15843 Ok(Action::AttachListing)
15844 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15845 Ok(Action::AttachPolicy)
15846 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15847 Ok(Action::BindServiceEndpoint)
15848 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15849 let role = self.parse_object_name(false)?;
15850 Ok(Action::DatabaseRole { role })
15851 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15852 Ok(Action::EvolveSchema)
15853 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15854 Ok(Action::ImportShare)
15855 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15856 Ok(Action::ManageVersions)
15857 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15858 Ok(Action::ManageReleases)
15859 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15860 Ok(Action::OverrideShareRestrictions)
15861 } else if self.parse_keywords(&[
15862 Keyword::PURCHASE,
15863 Keyword::DATA,
15864 Keyword::EXCHANGE,
15865 Keyword::LISTING,
15866 ]) {
15867 Ok(Action::PurchaseDataExchangeListing)
15868 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15869 Ok(Action::ResolveAll)
15870 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15871 Ok(Action::ReadSession)
15872
15873 } else if self.parse_keyword(Keyword::APPLY) {
15875 let apply_type = self.parse_action_apply_type()?;
15876 Ok(Action::Apply { apply_type })
15877 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15878 Ok(Action::ApplyBudget)
15879 } else if self.parse_keyword(Keyword::AUDIT) {
15880 Ok(Action::Audit)
15881 } else if self.parse_keyword(Keyword::CONNECT) {
15882 Ok(Action::Connect)
15883 } else if self.parse_keyword(Keyword::CREATE) {
15884 let obj_type = self.maybe_parse_action_create_object_type();
15885 Ok(Action::Create { obj_type })
15886 } else if self.parse_keyword(Keyword::DELETE) {
15887 Ok(Action::Delete)
15888 } else if self.parse_keyword(Keyword::EXEC) {
15889 let obj_type = self.maybe_parse_action_execute_obj_type();
15890 Ok(Action::Exec { obj_type })
15891 } else if self.parse_keyword(Keyword::EXECUTE) {
15892 let obj_type = self.maybe_parse_action_execute_obj_type();
15893 Ok(Action::Execute { obj_type })
15894 } else if self.parse_keyword(Keyword::FAILOVER) {
15895 Ok(Action::Failover)
15896 } else if self.parse_keyword(Keyword::INSERT) {
15897 Ok(Action::Insert {
15898 columns: parse_columns(self)?,
15899 })
15900 } else if self.parse_keyword(Keyword::MANAGE) {
15901 let manage_type = self.parse_action_manage_type()?;
15902 Ok(Action::Manage { manage_type })
15903 } else if self.parse_keyword(Keyword::MODIFY) {
15904 let modify_type = self.parse_action_modify_type();
15905 Ok(Action::Modify { modify_type })
15906 } else if self.parse_keyword(Keyword::MONITOR) {
15907 let monitor_type = self.parse_action_monitor_type();
15908 Ok(Action::Monitor { monitor_type })
15909 } else if self.parse_keyword(Keyword::OPERATE) {
15910 Ok(Action::Operate)
15911 } else if self.parse_keyword(Keyword::REFERENCES) {
15912 Ok(Action::References {
15913 columns: parse_columns(self)?,
15914 })
15915 } else if self.parse_keyword(Keyword::READ) {
15916 Ok(Action::Read)
15917 } else if self.parse_keyword(Keyword::REPLICATE) {
15918 Ok(Action::Replicate)
15919 } else if self.parse_keyword(Keyword::ROLE) {
15920 let role = self.parse_object_name(false)?;
15921 Ok(Action::Role { role })
15922 } else if self.parse_keyword(Keyword::SELECT) {
15923 Ok(Action::Select {
15924 columns: parse_columns(self)?,
15925 })
15926 } else if self.parse_keyword(Keyword::TEMPORARY) {
15927 Ok(Action::Temporary)
15928 } else if self.parse_keyword(Keyword::TRIGGER) {
15929 Ok(Action::Trigger)
15930 } else if self.parse_keyword(Keyword::TRUNCATE) {
15931 Ok(Action::Truncate)
15932 } else if self.parse_keyword(Keyword::UPDATE) {
15933 Ok(Action::Update {
15934 columns: parse_columns(self)?,
15935 })
15936 } else if self.parse_keyword(Keyword::USAGE) {
15937 Ok(Action::Usage)
15938 } else if self.parse_keyword(Keyword::OWNERSHIP) {
15939 Ok(Action::Ownership)
15940 } else if self.parse_keyword(Keyword::DROP) {
15941 Ok(Action::Drop)
15942 } else {
15943 self.expected("a privilege keyword", self.peek_token())?
15944 }
15945 }
15946
15947 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
15948 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
15950 Some(ActionCreateObjectType::ApplicationPackage)
15951 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15952 Some(ActionCreateObjectType::ComputePool)
15953 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
15954 Some(ActionCreateObjectType::DataExchangeListing)
15955 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15956 Some(ActionCreateObjectType::ExternalVolume)
15957 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15958 Some(ActionCreateObjectType::FailoverGroup)
15959 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
15960 Some(ActionCreateObjectType::NetworkPolicy)
15961 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
15962 Some(ActionCreateObjectType::OrganiationListing)
15963 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15964 Some(ActionCreateObjectType::ReplicationGroup)
15965 }
15966 else if self.parse_keyword(Keyword::ACCOUNT) {
15968 Some(ActionCreateObjectType::Account)
15969 } else if self.parse_keyword(Keyword::APPLICATION) {
15970 Some(ActionCreateObjectType::Application)
15971 } else if self.parse_keyword(Keyword::DATABASE) {
15972 Some(ActionCreateObjectType::Database)
15973 } else if self.parse_keyword(Keyword::INTEGRATION) {
15974 Some(ActionCreateObjectType::Integration)
15975 } else if self.parse_keyword(Keyword::ROLE) {
15976 Some(ActionCreateObjectType::Role)
15977 } else if self.parse_keyword(Keyword::SCHEMA) {
15978 Some(ActionCreateObjectType::Schema)
15979 } else if self.parse_keyword(Keyword::SHARE) {
15980 Some(ActionCreateObjectType::Share)
15981 } else if self.parse_keyword(Keyword::USER) {
15982 Some(ActionCreateObjectType::User)
15983 } else if self.parse_keyword(Keyword::WAREHOUSE) {
15984 Some(ActionCreateObjectType::Warehouse)
15985 } else {
15986 None
15987 }
15988 }
15989
15990 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
15991 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
15992 Ok(ActionApplyType::AggregationPolicy)
15993 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
15994 Ok(ActionApplyType::AuthenticationPolicy)
15995 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
15996 Ok(ActionApplyType::JoinPolicy)
15997 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
15998 Ok(ActionApplyType::MaskingPolicy)
15999 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
16000 Ok(ActionApplyType::PackagesPolicy)
16001 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
16002 Ok(ActionApplyType::PasswordPolicy)
16003 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
16004 Ok(ActionApplyType::ProjectionPolicy)
16005 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
16006 Ok(ActionApplyType::RowAccessPolicy)
16007 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
16008 Ok(ActionApplyType::SessionPolicy)
16009 } else if self.parse_keyword(Keyword::TAG) {
16010 Ok(ActionApplyType::Tag)
16011 } else {
16012 self.expected("GRANT APPLY type", self.peek_token())
16013 }
16014 }
16015
16016 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
16017 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
16018 Some(ActionExecuteObjectType::DataMetricFunction)
16019 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
16020 Some(ActionExecuteObjectType::ManagedAlert)
16021 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
16022 Some(ActionExecuteObjectType::ManagedTask)
16023 } else if self.parse_keyword(Keyword::ALERT) {
16024 Some(ActionExecuteObjectType::Alert)
16025 } else if self.parse_keyword(Keyword::TASK) {
16026 Some(ActionExecuteObjectType::Task)
16027 } else {
16028 None
16029 }
16030 }
16031
16032 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
16033 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
16034 Ok(ActionManageType::AccountSupportCases)
16035 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16036 Ok(ActionManageType::EventSharing)
16037 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16038 Ok(ActionManageType::ListingAutoFulfillment)
16039 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16040 Ok(ActionManageType::OrganizationSupportCases)
16041 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16042 Ok(ActionManageType::UserSupportCases)
16043 } else if self.parse_keyword(Keyword::GRANTS) {
16044 Ok(ActionManageType::Grants)
16045 } else if self.parse_keyword(Keyword::WAREHOUSES) {
16046 Ok(ActionManageType::Warehouses)
16047 } else {
16048 self.expected("GRANT MANAGE type", self.peek_token())
16049 }
16050 }
16051
16052 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16053 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16054 Some(ActionModifyType::LogLevel)
16055 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16056 Some(ActionModifyType::TraceLevel)
16057 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16058 Some(ActionModifyType::SessionLogLevel)
16059 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16060 Some(ActionModifyType::SessionTraceLevel)
16061 } else {
16062 None
16063 }
16064 }
16065
16066 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16067 if self.parse_keyword(Keyword::EXECUTION) {
16068 Some(ActionMonitorType::Execution)
16069 } else if self.parse_keyword(Keyword::SECURITY) {
16070 Some(ActionMonitorType::Security)
16071 } else if self.parse_keyword(Keyword::USAGE) {
16072 Some(ActionMonitorType::Usage)
16073 } else {
16074 None
16075 }
16076 }
16077
16078 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16079 let mut name = self.parse_object_name(false)?;
16080 if self.dialect.supports_user_host_grantee()
16081 && name.0.len() == 1
16082 && name.0[0].as_ident().is_some()
16083 && self.consume_token(&Token::AtSign)
16084 {
16085 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16086 let host = self.parse_identifier()?;
16087 Ok(GranteeName::UserHost { user, host })
16088 } else {
16089 Ok(GranteeName::ObjectName(name))
16090 }
16091 }
16092
16093 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16095 self.expect_keyword(Keyword::DENY)?;
16096
16097 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16098 let objects = match objects {
16099 Some(o) => o,
16100 None => {
16101 return parser_err!(
16102 "DENY statements must specify an object",
16103 self.peek_token().span.start
16104 )
16105 }
16106 };
16107
16108 self.expect_keyword_is(Keyword::TO)?;
16109 let grantees = self.parse_grantees()?;
16110 let cascade = self.parse_cascade_option();
16111 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16112 Some(self.parse_identifier()?)
16113 } else {
16114 None
16115 };
16116
16117 Ok(Statement::Deny(DenyStatement {
16118 privileges,
16119 objects,
16120 grantees,
16121 cascade,
16122 granted_by,
16123 }))
16124 }
16125
16126 pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16128 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16129
16130 self.expect_keyword_is(Keyword::FROM)?;
16131 let grantees = self.parse_grantees()?;
16132
16133 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16134 Some(self.parse_identifier()?)
16135 } else {
16136 None
16137 };
16138
16139 let cascade = self.parse_cascade_option();
16140
16141 Ok(Statement::Revoke {
16142 privileges,
16143 objects,
16144 grantees,
16145 granted_by,
16146 cascade,
16147 })
16148 }
16149
16150 pub fn parse_replace(
16152 &mut self,
16153 replace_token: TokenWithSpan,
16154 ) -> Result<Statement, ParserError> {
16155 if !dialect_of!(self is MySqlDialect | GenericDialect) {
16156 return parser_err!(
16157 "Unsupported statement REPLACE",
16158 self.peek_token().span.start
16159 );
16160 }
16161
16162 let mut insert = self.parse_insert(replace_token)?;
16163 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16164 *replace_into = true;
16165 }
16166
16167 Ok(insert)
16168 }
16169
16170 fn parse_insert_setexpr_boxed(
16174 &mut self,
16175 insert_token: TokenWithSpan,
16176 ) -> Result<Box<SetExpr>, ParserError> {
16177 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16178 }
16179
16180 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16182 let or = self.parse_conflict_clause();
16183 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16184 None
16185 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16186 Some(MysqlInsertPriority::LowPriority)
16187 } else if self.parse_keyword(Keyword::DELAYED) {
16188 Some(MysqlInsertPriority::Delayed)
16189 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16190 Some(MysqlInsertPriority::HighPriority)
16191 } else {
16192 None
16193 };
16194
16195 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16196 && self.parse_keyword(Keyword::IGNORE);
16197
16198 let replace_into = false;
16199
16200 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16201 let into = self.parse_keyword(Keyword::INTO);
16202
16203 let local = self.parse_keyword(Keyword::LOCAL);
16204
16205 if self.parse_keyword(Keyword::DIRECTORY) {
16206 let path = self.parse_literal_string()?;
16207 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16208 Some(self.parse_file_format()?)
16209 } else {
16210 None
16211 };
16212 let source = self.parse_query()?;
16213 Ok(Statement::Directory {
16214 local,
16215 path,
16216 overwrite,
16217 file_format,
16218 source,
16219 })
16220 } else {
16221 let table = self.parse_keyword(Keyword::TABLE);
16223 let table_object = self.parse_table_object()?;
16224
16225 let table_alias =
16226 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16227 Some(self.parse_identifier()?)
16228 } else {
16229 None
16230 };
16231
16232 let is_mysql = dialect_of!(self is MySqlDialect);
16233
16234 let (columns, partitioned, after_columns, source, assignments) = if self
16235 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16236 {
16237 (vec![], None, vec![], None, vec![])
16238 } else {
16239 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16240 let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16241
16242 let partitioned = self.parse_insert_partition()?;
16243 let after_columns = if dialect_of!(self is HiveDialect) {
16245 self.parse_parenthesized_column_list(Optional, false)?
16246 } else {
16247 vec![]
16248 };
16249 (columns, partitioned, after_columns)
16250 } else {
16251 Default::default()
16252 };
16253
16254 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16255 || self.peek_keyword(Keyword::SETTINGS)
16256 {
16257 (None, vec![])
16258 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16259 (None, self.parse_comma_separated(Parser::parse_assignment)?)
16260 } else {
16261 (Some(self.parse_query()?), vec![])
16262 };
16263
16264 (columns, partitioned, after_columns, source, assignments)
16265 };
16266
16267 let (format_clause, settings) = if self.dialect.supports_insert_format() {
16268 let settings = self.parse_settings()?;
16271
16272 let format = if self.parse_keyword(Keyword::FORMAT) {
16273 Some(self.parse_input_format_clause()?)
16274 } else {
16275 None
16276 };
16277
16278 (format, settings)
16279 } else {
16280 Default::default()
16281 };
16282
16283 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16284 && self.parse_keyword(Keyword::AS)
16285 {
16286 let row_alias = self.parse_object_name(false)?;
16287 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16288 Some(InsertAliases {
16289 row_alias,
16290 col_aliases,
16291 })
16292 } else {
16293 None
16294 };
16295
16296 let on = if self.parse_keyword(Keyword::ON) {
16297 if self.parse_keyword(Keyword::CONFLICT) {
16298 let conflict_target =
16299 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16300 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16301 } else if self.peek_token() == Token::LParen {
16302 Some(ConflictTarget::Columns(
16303 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16304 ))
16305 } else {
16306 None
16307 };
16308
16309 self.expect_keyword_is(Keyword::DO)?;
16310 let action = if self.parse_keyword(Keyword::NOTHING) {
16311 OnConflictAction::DoNothing
16312 } else {
16313 self.expect_keyword_is(Keyword::UPDATE)?;
16314 self.expect_keyword_is(Keyword::SET)?;
16315 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16316 let selection = if self.parse_keyword(Keyword::WHERE) {
16317 Some(self.parse_expr()?)
16318 } else {
16319 None
16320 };
16321 OnConflictAction::DoUpdate(DoUpdate {
16322 assignments,
16323 selection,
16324 })
16325 };
16326
16327 Some(OnInsert::OnConflict(OnConflict {
16328 conflict_target,
16329 action,
16330 }))
16331 } else {
16332 self.expect_keyword_is(Keyword::DUPLICATE)?;
16333 self.expect_keyword_is(Keyword::KEY)?;
16334 self.expect_keyword_is(Keyword::UPDATE)?;
16335 let l = self.parse_comma_separated(Parser::parse_assignment)?;
16336
16337 Some(OnInsert::DuplicateKeyUpdate(l))
16338 }
16339 } else {
16340 None
16341 };
16342
16343 let returning = if self.parse_keyword(Keyword::RETURNING) {
16344 Some(self.parse_comma_separated(Parser::parse_select_item)?)
16345 } else {
16346 None
16347 };
16348
16349 Ok(Statement::Insert(Insert {
16350 insert_token: insert_token.into(),
16351 or,
16352 table: table_object,
16353 table_alias,
16354 ignore,
16355 into,
16356 overwrite,
16357 partitioned,
16358 columns,
16359 after_columns,
16360 source,
16361 assignments,
16362 has_table_keyword: table,
16363 on,
16364 returning,
16365 replace_into,
16366 priority,
16367 insert_alias,
16368 settings,
16369 format_clause,
16370 }))
16371 }
16372 }
16373
16374 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16378 let ident = self.parse_identifier()?;
16379 let values = self
16380 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16381 .unwrap_or_default();
16382
16383 Ok(InputFormatClause { ident, values })
16384 }
16385
16386 fn peek_subquery_start(&mut self) -> bool {
16389 let [maybe_lparen, maybe_select] = self.peek_tokens();
16390 Token::LParen == maybe_lparen
16391 && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16392 }
16393
16394 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16395 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16396 Some(SqliteOnConflict::Replace)
16397 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16398 Some(SqliteOnConflict::Rollback)
16399 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16400 Some(SqliteOnConflict::Abort)
16401 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16402 Some(SqliteOnConflict::Fail)
16403 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16404 Some(SqliteOnConflict::Ignore)
16405 } else if self.parse_keyword(Keyword::REPLACE) {
16406 Some(SqliteOnConflict::Replace)
16407 } else {
16408 None
16409 }
16410 }
16411
16412 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16413 if self.parse_keyword(Keyword::PARTITION) {
16414 self.expect_token(&Token::LParen)?;
16415 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16416 self.expect_token(&Token::RParen)?;
16417 Ok(partition_cols)
16418 } else {
16419 Ok(None)
16420 }
16421 }
16422
16423 pub fn parse_load_data_table_format(
16424 &mut self,
16425 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16426 if self.parse_keyword(Keyword::INPUTFORMAT) {
16427 let input_format = self.parse_expr()?;
16428 self.expect_keyword_is(Keyword::SERDE)?;
16429 let serde = self.parse_expr()?;
16430 Ok(Some(HiveLoadDataFormat {
16431 input_format,
16432 serde,
16433 }))
16434 } else {
16435 Ok(None)
16436 }
16437 }
16438
16439 fn parse_update_setexpr_boxed(
16443 &mut self,
16444 update_token: TokenWithSpan,
16445 ) -> Result<Box<SetExpr>, ParserError> {
16446 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16447 }
16448
16449 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16450 let or = self.parse_conflict_clause();
16451 let table = self.parse_table_and_joins()?;
16452 let from_before_set = if self.parse_keyword(Keyword::FROM) {
16453 Some(UpdateTableFromKind::BeforeSet(
16454 self.parse_table_with_joins()?,
16455 ))
16456 } else {
16457 None
16458 };
16459 self.expect_keyword(Keyword::SET)?;
16460 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16461 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16462 Some(UpdateTableFromKind::AfterSet(
16463 self.parse_table_with_joins()?,
16464 ))
16465 } else {
16466 from_before_set
16467 };
16468 let selection = if self.parse_keyword(Keyword::WHERE) {
16469 Some(self.parse_expr()?)
16470 } else {
16471 None
16472 };
16473 let returning = if self.parse_keyword(Keyword::RETURNING) {
16474 Some(self.parse_comma_separated(Parser::parse_select_item)?)
16475 } else {
16476 None
16477 };
16478 let limit = if self.parse_keyword(Keyword::LIMIT) {
16479 Some(self.parse_expr()?)
16480 } else {
16481 None
16482 };
16483 Ok(Update {
16484 update_token: update_token.into(),
16485 table,
16486 assignments,
16487 from,
16488 selection,
16489 returning,
16490 or,
16491 limit,
16492 }
16493 .into())
16494 }
16495
16496 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16498 let target = self.parse_assignment_target()?;
16499 self.expect_token(&Token::Eq)?;
16500 let value = self.parse_expr()?;
16501 Ok(Assignment { target, value })
16502 }
16503
16504 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16506 if self.consume_token(&Token::LParen) {
16507 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16508 self.expect_token(&Token::RParen)?;
16509 Ok(AssignmentTarget::Tuple(columns))
16510 } else {
16511 let column = self.parse_object_name(false)?;
16512 Ok(AssignmentTarget::ColumnName(column))
16513 }
16514 }
16515
16516 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16517 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16518 self.maybe_parse(|p| {
16519 let name = p.parse_expr()?;
16520 let operator = p.parse_function_named_arg_operator()?;
16521 let arg = p.parse_wildcard_expr()?.into();
16522 Ok(FunctionArg::ExprNamed {
16523 name,
16524 arg,
16525 operator,
16526 })
16527 })?
16528 } else {
16529 self.maybe_parse(|p| {
16530 let name = p.parse_identifier()?;
16531 let operator = p.parse_function_named_arg_operator()?;
16532 let arg = p.parse_wildcard_expr()?.into();
16533 Ok(FunctionArg::Named {
16534 name,
16535 arg,
16536 operator,
16537 })
16538 })?
16539 };
16540 if let Some(arg) = arg {
16541 return Ok(arg);
16542 }
16543 Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16544 }
16545
16546 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16547 if self.parse_keyword(Keyword::VALUE) {
16548 return Ok(FunctionArgOperator::Value);
16549 }
16550 let tok = self.next_token();
16551 match tok.token {
16552 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16553 Ok(FunctionArgOperator::RightArrow)
16554 }
16555 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16556 Ok(FunctionArgOperator::Equals)
16557 }
16558 Token::Assignment
16559 if self
16560 .dialect
16561 .supports_named_fn_args_with_assignment_operator() =>
16562 {
16563 Ok(FunctionArgOperator::Assignment)
16564 }
16565 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16566 Ok(FunctionArgOperator::Colon)
16567 }
16568 _ => {
16569 self.prev_token();
16570 self.expected("argument operator", tok)
16571 }
16572 }
16573 }
16574
16575 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16576 if self.consume_token(&Token::RParen) {
16577 Ok(vec![])
16578 } else {
16579 let args = self.parse_comma_separated(Parser::parse_function_args)?;
16580 self.expect_token(&Token::RParen)?;
16581 Ok(args)
16582 }
16583 }
16584
16585 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16586 if self.consume_token(&Token::RParen) {
16587 return Ok(TableFunctionArgs {
16588 args: vec![],
16589 settings: None,
16590 });
16591 }
16592 let mut args = vec![];
16593 let settings = loop {
16594 if let Some(settings) = self.parse_settings()? {
16595 break Some(settings);
16596 }
16597 args.push(self.parse_function_args()?);
16598 if self.is_parse_comma_separated_end() {
16599 break None;
16600 }
16601 };
16602 self.expect_token(&Token::RParen)?;
16603 Ok(TableFunctionArgs { args, settings })
16604 }
16605
16606 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16615 let mut clauses = vec![];
16616
16617 if let Some(null_clause) = self.parse_json_null_clause() {
16620 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16621 }
16622
16623 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16624 clauses.push(FunctionArgumentClause::JsonReturningClause(
16625 json_returning_clause,
16626 ));
16627 }
16628
16629 if self.consume_token(&Token::RParen) {
16630 return Ok(FunctionArgumentList {
16631 duplicate_treatment: None,
16632 args: vec![],
16633 clauses,
16634 });
16635 }
16636
16637 let duplicate_treatment = self.parse_duplicate_treatment()?;
16638 let args = self.parse_comma_separated(Parser::parse_function_args)?;
16639
16640 if self.dialect.supports_window_function_null_treatment_arg() {
16641 if let Some(null_treatment) = self.parse_null_treatment()? {
16642 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16643 }
16644 }
16645
16646 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16647 clauses.push(FunctionArgumentClause::OrderBy(
16648 self.parse_comma_separated(Parser::parse_order_by_expr)?,
16649 ));
16650 }
16651
16652 if self.parse_keyword(Keyword::LIMIT) {
16653 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16654 }
16655
16656 if dialect_of!(self is GenericDialect | BigQueryDialect)
16657 && self.parse_keyword(Keyword::HAVING)
16658 {
16659 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16660 Keyword::MIN => HavingBoundKind::Min,
16661 Keyword::MAX => HavingBoundKind::Max,
16662 unexpected_keyword => return Err(ParserError::ParserError(
16663 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
16664 )),
16665 };
16666 clauses.push(FunctionArgumentClause::Having(HavingBound(
16667 kind,
16668 self.parse_expr()?,
16669 )))
16670 }
16671
16672 if dialect_of!(self is GenericDialect | MySqlDialect)
16673 && self.parse_keyword(Keyword::SEPARATOR)
16674 {
16675 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16676 }
16677
16678 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16679 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16680 }
16681
16682 if let Some(null_clause) = self.parse_json_null_clause() {
16683 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16684 }
16685
16686 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16687 clauses.push(FunctionArgumentClause::JsonReturningClause(
16688 json_returning_clause,
16689 ));
16690 }
16691
16692 self.expect_token(&Token::RParen)?;
16693 Ok(FunctionArgumentList {
16694 duplicate_treatment,
16695 args,
16696 clauses,
16697 })
16698 }
16699
16700 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16701 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16702 Some(JsonNullClause::AbsentOnNull)
16703 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16704 Some(JsonNullClause::NullOnNull)
16705 } else {
16706 None
16707 }
16708 }
16709
16710 fn maybe_parse_json_returning_clause(
16711 &mut self,
16712 ) -> Result<Option<JsonReturningClause>, ParserError> {
16713 if self.parse_keyword(Keyword::RETURNING) {
16714 let data_type = self.parse_data_type()?;
16715 Ok(Some(JsonReturningClause { data_type }))
16716 } else {
16717 Ok(None)
16718 }
16719 }
16720
16721 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16722 let loc = self.peek_token().span.start;
16723 match (
16724 self.parse_keyword(Keyword::ALL),
16725 self.parse_keyword(Keyword::DISTINCT),
16726 ) {
16727 (true, false) => Ok(Some(DuplicateTreatment::All)),
16728 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16729 (false, false) => Ok(None),
16730 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16731 }
16732 }
16733
16734 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16736 let prefix = self
16737 .parse_one_of_keywords(
16738 self.dialect
16739 .get_reserved_keywords_for_select_item_operator(),
16740 )
16741 .map(|keyword| Ident::new(format!("{keyword:?}")));
16742
16743 match self.parse_wildcard_expr()? {
16744 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16745 SelectItemQualifiedWildcardKind::ObjectName(prefix),
16746 self.parse_wildcard_additional_options(token.0)?,
16747 )),
16748 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
16749 self.parse_wildcard_additional_options(token.0)?,
16750 )),
16751 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
16752 parser_err!(
16753 format!("Expected an expression, found: {}", v),
16754 self.peek_token().span.start
16755 )
16756 }
16757 Expr::BinaryOp {
16758 left,
16759 op: BinaryOperator::Eq,
16760 right,
16761 } if self.dialect.supports_eq_alias_assignment()
16762 && matches!(left.as_ref(), Expr::Identifier(_)) =>
16763 {
16764 let Expr::Identifier(alias) = *left else {
16765 return parser_err!(
16766 "BUG: expected identifier expression as alias",
16767 self.peek_token().span.start
16768 );
16769 };
16770 Ok(SelectItem::ExprWithAlias {
16771 expr: *right,
16772 alias,
16773 })
16774 }
16775 expr if self.dialect.supports_select_expr_star()
16776 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
16777 {
16778 let wildcard_token = self.get_previous_token().clone();
16779 Ok(SelectItem::QualifiedWildcard(
16780 SelectItemQualifiedWildcardKind::Expr(expr),
16781 self.parse_wildcard_additional_options(wildcard_token)?,
16782 ))
16783 }
16784 expr => self
16785 .maybe_parse_select_item_alias()
16786 .map(|alias| match alias {
16787 Some(alias) => SelectItem::ExprWithAlias {
16788 expr: maybe_prefixed_expr(expr, prefix),
16789 alias,
16790 },
16791 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
16792 }),
16793 }
16794 }
16795
16796 pub fn parse_wildcard_additional_options(
16800 &mut self,
16801 wildcard_token: TokenWithSpan,
16802 ) -> Result<WildcardAdditionalOptions, ParserError> {
16803 let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16804 self.parse_optional_select_item_ilike()?
16805 } else {
16806 None
16807 };
16808 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
16809 {
16810 self.parse_optional_select_item_exclude()?
16811 } else {
16812 None
16813 };
16814 let opt_except = if self.dialect.supports_select_wildcard_except() {
16815 self.parse_optional_select_item_except()?
16816 } else {
16817 None
16818 };
16819 let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
16820 {
16821 self.parse_optional_select_item_replace()?
16822 } else {
16823 None
16824 };
16825 let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16826 self.parse_optional_select_item_rename()?
16827 } else {
16828 None
16829 };
16830
16831 Ok(WildcardAdditionalOptions {
16832 wildcard_token: wildcard_token.into(),
16833 opt_ilike,
16834 opt_exclude,
16835 opt_except,
16836 opt_rename,
16837 opt_replace,
16838 })
16839 }
16840
16841 pub fn parse_optional_select_item_ilike(
16845 &mut self,
16846 ) -> Result<Option<IlikeSelectItem>, ParserError> {
16847 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
16848 let next_token = self.next_token();
16849 let pattern = match next_token.token {
16850 Token::SingleQuotedString(s) => s,
16851 _ => return self.expected("ilike pattern", next_token),
16852 };
16853 Some(IlikeSelectItem { pattern })
16854 } else {
16855 None
16856 };
16857 Ok(opt_ilike)
16858 }
16859
16860 pub fn parse_optional_select_item_exclude(
16864 &mut self,
16865 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16866 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16867 if self.consume_token(&Token::LParen) {
16868 let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16869 self.expect_token(&Token::RParen)?;
16870 Some(ExcludeSelectItem::Multiple(columns))
16871 } else {
16872 let column = self.parse_identifier()?;
16873 Some(ExcludeSelectItem::Single(column))
16874 }
16875 } else {
16876 None
16877 };
16878
16879 Ok(opt_exclude)
16880 }
16881
16882 pub fn parse_optional_select_item_except(
16886 &mut self,
16887 ) -> Result<Option<ExceptSelectItem>, ParserError> {
16888 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16889 if self.peek_token().token == Token::LParen {
16890 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16891 match &idents[..] {
16892 [] => {
16893 return self.expected(
16894 "at least one column should be parsed by the expect clause",
16895 self.peek_token(),
16896 )?;
16897 }
16898 [first, idents @ ..] => Some(ExceptSelectItem {
16899 first_element: first.clone(),
16900 additional_elements: idents.to_vec(),
16901 }),
16902 }
16903 } else {
16904 let ident = self.parse_identifier()?;
16906 Some(ExceptSelectItem {
16907 first_element: ident,
16908 additional_elements: vec![],
16909 })
16910 }
16911 } else {
16912 None
16913 };
16914
16915 Ok(opt_except)
16916 }
16917
16918 pub fn parse_optional_select_item_rename(
16920 &mut self,
16921 ) -> Result<Option<RenameSelectItem>, ParserError> {
16922 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16923 if self.consume_token(&Token::LParen) {
16924 let idents =
16925 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16926 self.expect_token(&Token::RParen)?;
16927 Some(RenameSelectItem::Multiple(idents))
16928 } else {
16929 let ident = self.parse_identifier_with_alias()?;
16930 Some(RenameSelectItem::Single(ident))
16931 }
16932 } else {
16933 None
16934 };
16935
16936 Ok(opt_rename)
16937 }
16938
16939 pub fn parse_optional_select_item_replace(
16941 &mut self,
16942 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16943 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
16944 if self.consume_token(&Token::LParen) {
16945 let items = self.parse_comma_separated(|parser| {
16946 Ok(Box::new(parser.parse_replace_elements()?))
16947 })?;
16948 self.expect_token(&Token::RParen)?;
16949 Some(ReplaceSelectItem { items })
16950 } else {
16951 let tok = self.next_token();
16952 return self.expected("( after REPLACE but", tok);
16953 }
16954 } else {
16955 None
16956 };
16957
16958 Ok(opt_replace)
16959 }
16960 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
16961 let expr = self.parse_expr()?;
16962 let as_keyword = self.parse_keyword(Keyword::AS);
16963 let ident = self.parse_identifier()?;
16964 Ok(ReplaceSelectElement {
16965 expr,
16966 column_name: ident,
16967 as_keyword,
16968 })
16969 }
16970
16971 pub fn parse_asc_desc(&mut self) -> Option<bool> {
16974 if self.parse_keyword(Keyword::ASC) {
16975 Some(true)
16976 } else if self.parse_keyword(Keyword::DESC) {
16977 Some(false)
16978 } else {
16979 None
16980 }
16981 }
16982
16983 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
16985 self.parse_order_by_expr_inner(false)
16986 .map(|(order_by, _)| order_by)
16987 }
16988
16989 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
16991 self.parse_order_by_expr_inner(true)
16992 .map(|(column, operator_class)| IndexColumn {
16993 column,
16994 operator_class,
16995 })
16996 }
16997
16998 fn parse_order_by_expr_inner(
16999 &mut self,
17000 with_operator_class: bool,
17001 ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
17002 let expr = self.parse_expr()?;
17003
17004 let operator_class: Option<Ident> = if with_operator_class {
17005 if self
17008 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
17009 .is_some()
17010 {
17011 None
17012 } else {
17013 self.maybe_parse(|parser| parser.parse_identifier())?
17014 }
17015 } else {
17016 None
17017 };
17018
17019 let options = self.parse_order_by_options()?;
17020
17021 let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
17022 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
17023 {
17024 Some(self.parse_with_fill()?)
17025 } else {
17026 None
17027 };
17028
17029 Ok((
17030 OrderByExpr {
17031 expr,
17032 options,
17033 with_fill,
17034 },
17035 operator_class,
17036 ))
17037 }
17038
17039 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17040 let asc = self.parse_asc_desc();
17041
17042 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17043 Some(true)
17044 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17045 Some(false)
17046 } else {
17047 None
17048 };
17049
17050 Ok(OrderByOptions { asc, nulls_first })
17051 }
17052
17053 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17056 let from = if self.parse_keyword(Keyword::FROM) {
17057 Some(self.parse_expr()?)
17058 } else {
17059 None
17060 };
17061
17062 let to = if self.parse_keyword(Keyword::TO) {
17063 Some(self.parse_expr()?)
17064 } else {
17065 None
17066 };
17067
17068 let step = if self.parse_keyword(Keyword::STEP) {
17069 Some(self.parse_expr()?)
17070 } else {
17071 None
17072 };
17073
17074 Ok(WithFill { from, to, step })
17075 }
17076
17077 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17080 if !self.parse_keyword(Keyword::INTERPOLATE) {
17081 return Ok(None);
17082 }
17083
17084 if self.consume_token(&Token::LParen) {
17085 let interpolations =
17086 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17087 self.expect_token(&Token::RParen)?;
17088 return Ok(Some(Interpolate {
17090 exprs: Some(interpolations),
17091 }));
17092 }
17093
17094 Ok(Some(Interpolate { exprs: None }))
17096 }
17097
17098 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17100 let column = self.parse_identifier()?;
17101 let expr = if self.parse_keyword(Keyword::AS) {
17102 Some(self.parse_expr()?)
17103 } else {
17104 None
17105 };
17106 Ok(InterpolateExpr { column, expr })
17107 }
17108
17109 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17112 let quantity = if self.consume_token(&Token::LParen) {
17113 let quantity = self.parse_expr()?;
17114 self.expect_token(&Token::RParen)?;
17115 Some(TopQuantity::Expr(quantity))
17116 } else {
17117 let next_token = self.next_token();
17118 let quantity = match next_token.token {
17119 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17120 _ => self.expected("literal int", next_token)?,
17121 };
17122 Some(TopQuantity::Constant(quantity))
17123 };
17124
17125 let percent = self.parse_keyword(Keyword::PERCENT);
17126
17127 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17128
17129 Ok(Top {
17130 with_ties,
17131 percent,
17132 quantity,
17133 })
17134 }
17135
17136 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17138 if self.parse_keyword(Keyword::ALL) {
17139 Ok(None)
17140 } else {
17141 Ok(Some(self.parse_expr()?))
17142 }
17143 }
17144
17145 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17147 let value = self.parse_expr()?;
17148 let rows = if self.parse_keyword(Keyword::ROW) {
17149 OffsetRows::Row
17150 } else if self.parse_keyword(Keyword::ROWS) {
17151 OffsetRows::Rows
17152 } else {
17153 OffsetRows::None
17154 };
17155 Ok(Offset { value, rows })
17156 }
17157
17158 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17160 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17161
17162 let (quantity, percent) = if self
17163 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17164 .is_some()
17165 {
17166 (None, false)
17167 } else {
17168 let quantity = Expr::Value(self.parse_value()?);
17169 let percent = self.parse_keyword(Keyword::PERCENT);
17170 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17171 (Some(quantity), percent)
17172 };
17173
17174 let with_ties = if self.parse_keyword(Keyword::ONLY) {
17175 false
17176 } else {
17177 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17178 };
17179
17180 Ok(Fetch {
17181 with_ties,
17182 percent,
17183 quantity,
17184 })
17185 }
17186
17187 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17189 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17190 Keyword::UPDATE => LockType::Update,
17191 Keyword::SHARE => LockType::Share,
17192 unexpected_keyword => return Err(ParserError::ParserError(
17193 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17194 )),
17195 };
17196 let of = if self.parse_keyword(Keyword::OF) {
17197 Some(self.parse_object_name(false)?)
17198 } else {
17199 None
17200 };
17201 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17202 Some(NonBlock::Nowait)
17203 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17204 Some(NonBlock::SkipLocked)
17205 } else {
17206 None
17207 };
17208 Ok(LockClause {
17209 lock_type,
17210 of,
17211 nonblock,
17212 })
17213 }
17214
17215 pub fn parse_values(
17216 &mut self,
17217 allow_empty: bool,
17218 value_keyword: bool,
17219 ) -> Result<Values, ParserError> {
17220 let mut explicit_row = false;
17221
17222 let rows = self.parse_comma_separated(|parser| {
17223 if parser.parse_keyword(Keyword::ROW) {
17224 explicit_row = true;
17225 }
17226
17227 parser.expect_token(&Token::LParen)?;
17228 if allow_empty && parser.peek_token().token == Token::RParen {
17229 parser.next_token();
17230 Ok(vec![])
17231 } else {
17232 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17233 parser.expect_token(&Token::RParen)?;
17234 Ok(exprs)
17235 }
17236 })?;
17237 Ok(Values {
17238 explicit_row,
17239 rows,
17240 value_keyword,
17241 })
17242 }
17243
17244 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17245 self.expect_keyword_is(Keyword::TRANSACTION)?;
17246 Ok(Statement::StartTransaction {
17247 modes: self.parse_transaction_modes()?,
17248 begin: false,
17249 transaction: Some(BeginTransactionKind::Transaction),
17250 modifier: None,
17251 statements: vec![],
17252 exception: None,
17253 has_end_keyword: false,
17254 })
17255 }
17256
17257 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17258 let modifier = if !self.dialect.supports_start_transaction_modifier() {
17259 None
17260 } else if self.parse_keyword(Keyword::DEFERRED) {
17261 Some(TransactionModifier::Deferred)
17262 } else if self.parse_keyword(Keyword::IMMEDIATE) {
17263 Some(TransactionModifier::Immediate)
17264 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17265 Some(TransactionModifier::Exclusive)
17266 } else if self.parse_keyword(Keyword::TRY) {
17267 Some(TransactionModifier::Try)
17268 } else if self.parse_keyword(Keyword::CATCH) {
17269 Some(TransactionModifier::Catch)
17270 } else {
17271 None
17272 };
17273 let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17274 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17275 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17276 _ => None,
17277 };
17278 Ok(Statement::StartTransaction {
17279 modes: self.parse_transaction_modes()?,
17280 begin: true,
17281 transaction,
17282 modifier,
17283 statements: vec![],
17284 exception: None,
17285 has_end_keyword: false,
17286 })
17287 }
17288
17289 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17290 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17291
17292 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17293 let mut when = Vec::new();
17294
17295 while !self.peek_keyword(Keyword::END) {
17297 self.expect_keyword(Keyword::WHEN)?;
17298
17299 let mut idents = Vec::new();
17303
17304 while !self.parse_keyword(Keyword::THEN) {
17305 let ident = self.parse_identifier()?;
17306 idents.push(ident);
17307
17308 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17309 }
17310
17311 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17312
17313 when.push(ExceptionWhen { idents, statements });
17314 }
17315
17316 Some(when)
17317 } else {
17318 None
17319 };
17320
17321 self.expect_keyword(Keyword::END)?;
17322
17323 Ok(Statement::StartTransaction {
17324 begin: true,
17325 statements,
17326 exception,
17327 has_end_keyword: true,
17328 transaction: None,
17329 modifier: None,
17330 modes: Default::default(),
17331 })
17332 }
17333
17334 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17335 let modifier = if !self.dialect.supports_end_transaction_modifier() {
17336 None
17337 } else if self.parse_keyword(Keyword::TRY) {
17338 Some(TransactionModifier::Try)
17339 } else if self.parse_keyword(Keyword::CATCH) {
17340 Some(TransactionModifier::Catch)
17341 } else {
17342 None
17343 };
17344 Ok(Statement::Commit {
17345 chain: self.parse_commit_rollback_chain()?,
17346 end: true,
17347 modifier,
17348 })
17349 }
17350
17351 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17352 let mut modes = vec![];
17353 let mut required = false;
17354 loop {
17355 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17356 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17357 TransactionIsolationLevel::ReadUncommitted
17358 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17359 TransactionIsolationLevel::ReadCommitted
17360 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17361 TransactionIsolationLevel::RepeatableRead
17362 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17363 TransactionIsolationLevel::Serializable
17364 } else if self.parse_keyword(Keyword::SNAPSHOT) {
17365 TransactionIsolationLevel::Snapshot
17366 } else {
17367 self.expected("isolation level", self.peek_token())?
17368 };
17369 TransactionMode::IsolationLevel(iso_level)
17370 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17371 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17372 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17373 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17374 } else if required {
17375 self.expected("transaction mode", self.peek_token())?
17376 } else {
17377 break;
17378 };
17379 modes.push(mode);
17380 required = self.consume_token(&Token::Comma);
17385 }
17386 Ok(modes)
17387 }
17388
17389 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17390 Ok(Statement::Commit {
17391 chain: self.parse_commit_rollback_chain()?,
17392 end: false,
17393 modifier: None,
17394 })
17395 }
17396
17397 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17398 let chain = self.parse_commit_rollback_chain()?;
17399 let savepoint = self.parse_rollback_savepoint()?;
17400
17401 Ok(Statement::Rollback { chain, savepoint })
17402 }
17403
17404 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17405 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17406 if self.parse_keyword(Keyword::AND) {
17407 let chain = !self.parse_keyword(Keyword::NO);
17408 self.expect_keyword_is(Keyword::CHAIN)?;
17409 Ok(chain)
17410 } else {
17411 Ok(false)
17412 }
17413 }
17414
17415 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17416 if self.parse_keyword(Keyword::TO) {
17417 let _ = self.parse_keyword(Keyword::SAVEPOINT);
17418 let savepoint = self.parse_identifier()?;
17419
17420 Ok(Some(savepoint))
17421 } else {
17422 Ok(None)
17423 }
17424 }
17425
17426 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17428 self.expect_token(&Token::LParen)?;
17429 let message = Box::new(self.parse_expr()?);
17430 self.expect_token(&Token::Comma)?;
17431 let severity = Box::new(self.parse_expr()?);
17432 self.expect_token(&Token::Comma)?;
17433 let state = Box::new(self.parse_expr()?);
17434 let arguments = if self.consume_token(&Token::Comma) {
17435 self.parse_comma_separated(Parser::parse_expr)?
17436 } else {
17437 vec![]
17438 };
17439 self.expect_token(&Token::RParen)?;
17440 let options = if self.parse_keyword(Keyword::WITH) {
17441 self.parse_comma_separated(Parser::parse_raiserror_option)?
17442 } else {
17443 vec![]
17444 };
17445 Ok(Statement::RaisError {
17446 message,
17447 severity,
17448 state,
17449 arguments,
17450 options,
17451 })
17452 }
17453
17454 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17455 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17456 Keyword::LOG => Ok(RaisErrorOption::Log),
17457 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17458 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17459 _ => self.expected(
17460 "LOG, NOWAIT OR SETERROR raiserror option",
17461 self.peek_token(),
17462 ),
17463 }
17464 }
17465
17466 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17467 let prepare = self.parse_keyword(Keyword::PREPARE);
17468 let name = self.parse_identifier()?;
17469 Ok(Statement::Deallocate { name, prepare })
17470 }
17471
17472 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17473 let name = if self.dialect.supports_execute_immediate()
17474 && self.parse_keyword(Keyword::IMMEDIATE)
17475 {
17476 None
17477 } else {
17478 let name = self.parse_object_name(false)?;
17479 Some(name)
17480 };
17481
17482 let has_parentheses = self.consume_token(&Token::LParen);
17483
17484 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17485 let end_token = match (has_parentheses, self.peek_token().token) {
17486 (true, _) => Token::RParen,
17487 (false, Token::EOF) => Token::EOF,
17488 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17489 (false, _) => Token::SemiColon,
17490 };
17491
17492 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17493
17494 if has_parentheses {
17495 self.expect_token(&Token::RParen)?;
17496 }
17497
17498 let into = if self.parse_keyword(Keyword::INTO) {
17499 self.parse_comma_separated(Self::parse_identifier)?
17500 } else {
17501 vec![]
17502 };
17503
17504 let using = if self.parse_keyword(Keyword::USING) {
17505 self.parse_comma_separated(Self::parse_expr_with_alias)?
17506 } else {
17507 vec![]
17508 };
17509
17510 let output = self.parse_keyword(Keyword::OUTPUT);
17511
17512 let default = self.parse_keyword(Keyword::DEFAULT);
17513
17514 Ok(Statement::Execute {
17515 immediate: name.is_none(),
17516 name,
17517 parameters,
17518 has_parentheses,
17519 into,
17520 using,
17521 output,
17522 default,
17523 })
17524 }
17525
17526 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17527 let name = self.parse_identifier()?;
17528
17529 let mut data_types = vec![];
17530 if self.consume_token(&Token::LParen) {
17531 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17532 self.expect_token(&Token::RParen)?;
17533 }
17534
17535 self.expect_keyword_is(Keyword::AS)?;
17536 let statement = Box::new(self.parse_statement()?);
17537 Ok(Statement::Prepare {
17538 name,
17539 data_types,
17540 statement,
17541 })
17542 }
17543
17544 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17545 self.expect_keyword(Keyword::UNLOAD)?;
17546 self.expect_token(&Token::LParen)?;
17547 let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17548 {
17549 (None, Some(self.parse_literal_string()?))
17550 } else {
17551 (Some(self.parse_query()?), None)
17552 };
17553 self.expect_token(&Token::RParen)?;
17554
17555 self.expect_keyword_is(Keyword::TO)?;
17556 let to = self.parse_identifier()?;
17557 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17558 Some(self.parse_iam_role_kind()?)
17559 } else {
17560 None
17561 };
17562 let with = self.parse_options(Keyword::WITH)?;
17563 let mut options = vec![];
17564 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17565 options.push(opt);
17566 }
17567 Ok(Statement::Unload {
17568 query,
17569 query_text,
17570 to,
17571 auth,
17572 with,
17573 options,
17574 })
17575 }
17576
17577 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17578 let temporary = self
17579 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17580 .is_some();
17581 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17582 let table = self.parse_keyword(Keyword::TABLE);
17583 let name = self.parse_object_name(false)?;
17584
17585 Ok(SelectInto {
17586 temporary,
17587 unlogged,
17588 table,
17589 name,
17590 })
17591 }
17592
17593 fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17594 match self.parse_value()?.value {
17595 v @ Value::SingleQuotedString(_) => Ok(v),
17596 v @ Value::DoubleQuotedString(_) => Ok(v),
17597 v @ Value::Number(_, _) => Ok(v),
17598 v @ Value::Placeholder(_) => Ok(v),
17599 _ => {
17600 self.prev_token();
17601 self.expected("number or string or ? placeholder", self.peek_token())
17602 }
17603 }
17604 }
17605
17606 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17608 let name = self.parse_object_name(false)?;
17609 if self.consume_token(&Token::LParen) {
17610 let value = self.parse_pragma_value()?;
17611 self.expect_token(&Token::RParen)?;
17612 Ok(Statement::Pragma {
17613 name,
17614 value: Some(value),
17615 is_eq: false,
17616 })
17617 } else if self.consume_token(&Token::Eq) {
17618 Ok(Statement::Pragma {
17619 name,
17620 value: Some(self.parse_pragma_value()?),
17621 is_eq: true,
17622 })
17623 } else {
17624 Ok(Statement::Pragma {
17625 name,
17626 value: None,
17627 is_eq: false,
17628 })
17629 }
17630 }
17631
17632 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17634 let extension_name = self.parse_identifier()?;
17635
17636 Ok(Statement::Install { extension_name })
17637 }
17638
17639 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17641 if self.dialect.supports_load_extension() {
17642 let extension_name = self.parse_identifier()?;
17643 Ok(Statement::Load { extension_name })
17644 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17645 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17646 self.expect_keyword_is(Keyword::INPATH)?;
17647 let inpath = self.parse_literal_string()?;
17648 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17649 self.expect_keyword_is(Keyword::INTO)?;
17650 self.expect_keyword_is(Keyword::TABLE)?;
17651 let table_name = self.parse_object_name(false)?;
17652 let partitioned = self.parse_insert_partition()?;
17653 let table_format = self.parse_load_data_table_format()?;
17654 Ok(Statement::LoadData {
17655 local,
17656 inpath,
17657 overwrite,
17658 table_name,
17659 partitioned,
17660 table_format,
17661 })
17662 } else {
17663 self.expected(
17664 "`DATA` or an extension name after `LOAD`",
17665 self.peek_token(),
17666 )
17667 }
17668 }
17669
17670 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17675 self.expect_keyword_is(Keyword::TABLE)?;
17676 let name = self.parse_object_name(false)?;
17677 let on_cluster = self.parse_optional_on_cluster()?;
17678
17679 let partition = if self.parse_keyword(Keyword::PARTITION) {
17680 if self.parse_keyword(Keyword::ID) {
17681 Some(Partition::Identifier(self.parse_identifier()?))
17682 } else {
17683 Some(Partition::Expr(self.parse_expr()?))
17684 }
17685 } else {
17686 None
17687 };
17688
17689 let include_final = self.parse_keyword(Keyword::FINAL);
17690 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17691 if self.parse_keyword(Keyword::BY) {
17692 Some(Deduplicate::ByExpression(self.parse_expr()?))
17693 } else {
17694 Some(Deduplicate::All)
17695 }
17696 } else {
17697 None
17698 };
17699
17700 Ok(Statement::OptimizeTable {
17701 name,
17702 on_cluster,
17703 partition,
17704 include_final,
17705 deduplicate,
17706 })
17707 }
17708
17709 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17715 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17717 let name = self.parse_object_name(false)?;
17719 let mut data_type: Option<DataType> = None;
17721 if self.parse_keywords(&[Keyword::AS]) {
17722 data_type = Some(self.parse_data_type()?)
17723 }
17724 let sequence_options = self.parse_create_sequence_options()?;
17725 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17727 if self.parse_keywords(&[Keyword::NONE]) {
17728 Some(ObjectName::from(vec![Ident::new("NONE")]))
17729 } else {
17730 Some(self.parse_object_name(false)?)
17731 }
17732 } else {
17733 None
17734 };
17735 Ok(Statement::CreateSequence {
17736 temporary,
17737 if_not_exists,
17738 name,
17739 data_type,
17740 sequence_options,
17741 owned_by,
17742 })
17743 }
17744
17745 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17746 let mut sequence_options = vec![];
17747 if self.parse_keywords(&[Keyword::INCREMENT]) {
17749 if self.parse_keywords(&[Keyword::BY]) {
17750 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17751 } else {
17752 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17753 }
17754 }
17755 if self.parse_keyword(Keyword::MINVALUE) {
17757 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17758 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17759 sequence_options.push(SequenceOptions::MinValue(None));
17760 }
17761 if self.parse_keywords(&[Keyword::MAXVALUE]) {
17763 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17764 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17765 sequence_options.push(SequenceOptions::MaxValue(None));
17766 }
17767
17768 if self.parse_keywords(&[Keyword::START]) {
17770 if self.parse_keywords(&[Keyword::WITH]) {
17771 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17772 } else {
17773 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17774 }
17775 }
17776 if self.parse_keywords(&[Keyword::CACHE]) {
17778 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17779 }
17780 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17782 sequence_options.push(SequenceOptions::Cycle(true));
17783 } else if self.parse_keywords(&[Keyword::CYCLE]) {
17784 sequence_options.push(SequenceOptions::Cycle(false));
17785 }
17786
17787 Ok(sequence_options)
17788 }
17789
17790 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17794 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17795 let name = self.parse_object_name(false)?;
17796
17797 let server_type = if self.parse_keyword(Keyword::TYPE) {
17798 Some(self.parse_identifier()?)
17799 } else {
17800 None
17801 };
17802
17803 let version = if self.parse_keyword(Keyword::VERSION) {
17804 Some(self.parse_identifier()?)
17805 } else {
17806 None
17807 };
17808
17809 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17810 let foreign_data_wrapper = self.parse_object_name(false)?;
17811
17812 let mut options = None;
17813 if self.parse_keyword(Keyword::OPTIONS) {
17814 self.expect_token(&Token::LParen)?;
17815 options = Some(self.parse_comma_separated(|p| {
17816 let key = p.parse_identifier()?;
17817 let value = p.parse_identifier()?;
17818 Ok(CreateServerOption { key, value })
17819 })?);
17820 self.expect_token(&Token::RParen)?;
17821 }
17822
17823 Ok(Statement::CreateServer(CreateServerStatement {
17824 name,
17825 if_not_exists: ine,
17826 server_type,
17827 version,
17828 foreign_data_wrapper,
17829 options,
17830 }))
17831 }
17832
17833 pub fn index(&self) -> usize {
17835 self.index
17836 }
17837
17838 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17839 let ident = self.parse_identifier()?;
17840 self.expect_keyword_is(Keyword::AS)?;
17841
17842 let window_expr = if self.consume_token(&Token::LParen) {
17843 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17844 } else if self.dialect.supports_window_clause_named_window_reference() {
17845 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17846 } else {
17847 return self.expected("(", self.peek_token());
17848 };
17849
17850 Ok(NamedWindowDefinition(ident, window_expr))
17851 }
17852
17853 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17854 let name = self.parse_object_name(false)?;
17855 let params = self.parse_optional_procedure_parameters()?;
17856
17857 let language = if self.parse_keyword(Keyword::LANGUAGE) {
17858 Some(self.parse_identifier()?)
17859 } else {
17860 None
17861 };
17862
17863 self.expect_keyword_is(Keyword::AS)?;
17864
17865 let body = self.parse_conditional_statements(&[Keyword::END])?;
17866
17867 Ok(Statement::CreateProcedure {
17868 name,
17869 or_alter,
17870 params,
17871 language,
17872 body,
17873 })
17874 }
17875
17876 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17877 let window_name = match self.peek_token().token {
17878 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17879 self.parse_optional_ident()?
17880 }
17881 _ => None,
17882 };
17883
17884 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17885 self.parse_comma_separated(Parser::parse_expr)?
17886 } else {
17887 vec![]
17888 };
17889 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17890 self.parse_comma_separated(Parser::parse_order_by_expr)?
17891 } else {
17892 vec![]
17893 };
17894
17895 let window_frame = if !self.consume_token(&Token::RParen) {
17896 let window_frame = self.parse_window_frame()?;
17897 self.expect_token(&Token::RParen)?;
17898 Some(window_frame)
17899 } else {
17900 None
17901 };
17902 Ok(WindowSpec {
17903 window_name,
17904 partition_by,
17905 order_by,
17906 window_frame,
17907 })
17908 }
17909
17910 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17911 let name = self.parse_object_name(false)?;
17912
17913 let has_as = self.parse_keyword(Keyword::AS);
17915
17916 if !has_as {
17917 if self.consume_token(&Token::LParen) {
17919 let options = self.parse_create_type_sql_definition_options()?;
17921 self.expect_token(&Token::RParen)?;
17922 return Ok(Statement::CreateType {
17923 name,
17924 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
17925 });
17926 }
17927
17928 return Ok(Statement::CreateType {
17930 name,
17931 representation: None,
17932 });
17933 }
17934
17935 if self.parse_keyword(Keyword::ENUM) {
17937 self.parse_create_type_enum(name)
17939 } else if self.parse_keyword(Keyword::RANGE) {
17940 self.parse_create_type_range(name)
17942 } else if self.consume_token(&Token::LParen) {
17943 self.parse_create_type_composite(name)
17945 } else {
17946 self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
17947 }
17948 }
17949
17950 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17954 if self.consume_token(&Token::RParen) {
17955 return Ok(Statement::CreateType {
17957 name,
17958 representation: Some(UserDefinedTypeRepresentation::Composite {
17959 attributes: vec![],
17960 }),
17961 });
17962 }
17963
17964 let mut attributes = vec![];
17965 loop {
17966 let attr_name = self.parse_identifier()?;
17967 let attr_data_type = self.parse_data_type()?;
17968 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
17969 Some(self.parse_object_name(false)?)
17970 } else {
17971 None
17972 };
17973 attributes.push(UserDefinedTypeCompositeAttributeDef {
17974 name: attr_name,
17975 data_type: attr_data_type,
17976 collation: attr_collation,
17977 });
17978
17979 if !self.consume_token(&Token::Comma) {
17980 break;
17981 }
17982 }
17983 self.expect_token(&Token::RParen)?;
17984
17985 Ok(Statement::CreateType {
17986 name,
17987 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
17988 })
17989 }
17990
17991 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17995 self.expect_token(&Token::LParen)?;
17996 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
17997 self.expect_token(&Token::RParen)?;
17998
17999 Ok(Statement::CreateType {
18000 name,
18001 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
18002 })
18003 }
18004
18005 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18009 self.expect_token(&Token::LParen)?;
18010 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
18011 self.expect_token(&Token::RParen)?;
18012
18013 Ok(Statement::CreateType {
18014 name,
18015 representation: Some(UserDefinedTypeRepresentation::Range { options }),
18016 })
18017 }
18018
18019 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
18021 let keyword = self.parse_one_of_keywords(&[
18022 Keyword::SUBTYPE,
18023 Keyword::SUBTYPE_OPCLASS,
18024 Keyword::COLLATION,
18025 Keyword::CANONICAL,
18026 Keyword::SUBTYPE_DIFF,
18027 Keyword::MULTIRANGE_TYPE_NAME,
18028 ]);
18029
18030 match keyword {
18031 Some(Keyword::SUBTYPE) => {
18032 self.expect_token(&Token::Eq)?;
18033 let data_type = self.parse_data_type()?;
18034 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18035 }
18036 Some(Keyword::SUBTYPE_OPCLASS) => {
18037 self.expect_token(&Token::Eq)?;
18038 let name = self.parse_object_name(false)?;
18039 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18040 }
18041 Some(Keyword::COLLATION) => {
18042 self.expect_token(&Token::Eq)?;
18043 let name = self.parse_object_name(false)?;
18044 Ok(UserDefinedTypeRangeOption::Collation(name))
18045 }
18046 Some(Keyword::CANONICAL) => {
18047 self.expect_token(&Token::Eq)?;
18048 let name = self.parse_object_name(false)?;
18049 Ok(UserDefinedTypeRangeOption::Canonical(name))
18050 }
18051 Some(Keyword::SUBTYPE_DIFF) => {
18052 self.expect_token(&Token::Eq)?;
18053 let name = self.parse_object_name(false)?;
18054 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18055 }
18056 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18057 self.expect_token(&Token::Eq)?;
18058 let name = self.parse_object_name(false)?;
18059 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18060 }
18061 _ => self.expected("range option keyword", self.peek_token()),
18062 }
18063 }
18064
18065 fn parse_create_type_sql_definition_options(
18067 &mut self,
18068 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18069 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18070 }
18071
18072 fn parse_sql_definition_option(
18074 &mut self,
18075 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18076 let keyword = self.parse_one_of_keywords(&[
18077 Keyword::INPUT,
18078 Keyword::OUTPUT,
18079 Keyword::RECEIVE,
18080 Keyword::SEND,
18081 Keyword::TYPMOD_IN,
18082 Keyword::TYPMOD_OUT,
18083 Keyword::ANALYZE,
18084 Keyword::SUBSCRIPT,
18085 Keyword::INTERNALLENGTH,
18086 Keyword::PASSEDBYVALUE,
18087 Keyword::ALIGNMENT,
18088 Keyword::STORAGE,
18089 Keyword::LIKE,
18090 Keyword::CATEGORY,
18091 Keyword::PREFERRED,
18092 Keyword::DEFAULT,
18093 Keyword::ELEMENT,
18094 Keyword::DELIMITER,
18095 Keyword::COLLATABLE,
18096 ]);
18097
18098 match keyword {
18099 Some(Keyword::INPUT) => {
18100 self.expect_token(&Token::Eq)?;
18101 let name = self.parse_object_name(false)?;
18102 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18103 }
18104 Some(Keyword::OUTPUT) => {
18105 self.expect_token(&Token::Eq)?;
18106 let name = self.parse_object_name(false)?;
18107 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18108 }
18109 Some(Keyword::RECEIVE) => {
18110 self.expect_token(&Token::Eq)?;
18111 let name = self.parse_object_name(false)?;
18112 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18113 }
18114 Some(Keyword::SEND) => {
18115 self.expect_token(&Token::Eq)?;
18116 let name = self.parse_object_name(false)?;
18117 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18118 }
18119 Some(Keyword::TYPMOD_IN) => {
18120 self.expect_token(&Token::Eq)?;
18121 let name = self.parse_object_name(false)?;
18122 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18123 }
18124 Some(Keyword::TYPMOD_OUT) => {
18125 self.expect_token(&Token::Eq)?;
18126 let name = self.parse_object_name(false)?;
18127 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18128 }
18129 Some(Keyword::ANALYZE) => {
18130 self.expect_token(&Token::Eq)?;
18131 let name = self.parse_object_name(false)?;
18132 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18133 }
18134 Some(Keyword::SUBSCRIPT) => {
18135 self.expect_token(&Token::Eq)?;
18136 let name = self.parse_object_name(false)?;
18137 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18138 }
18139 Some(Keyword::INTERNALLENGTH) => {
18140 self.expect_token(&Token::Eq)?;
18141 if self.parse_keyword(Keyword::VARIABLE) {
18142 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18143 UserDefinedTypeInternalLength::Variable,
18144 ))
18145 } else {
18146 let value = self.parse_literal_uint()?;
18147 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18148 UserDefinedTypeInternalLength::Fixed(value),
18149 ))
18150 }
18151 }
18152 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18153 Some(Keyword::ALIGNMENT) => {
18154 self.expect_token(&Token::Eq)?;
18155 let align_keyword = self.parse_one_of_keywords(&[
18156 Keyword::CHAR,
18157 Keyword::INT2,
18158 Keyword::INT4,
18159 Keyword::DOUBLE,
18160 ]);
18161 match align_keyword {
18162 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18163 Alignment::Char,
18164 )),
18165 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18166 Alignment::Int2,
18167 )),
18168 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18169 Alignment::Int4,
18170 )),
18171 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18172 Alignment::Double,
18173 )),
18174 _ => self.expected(
18175 "alignment value (char, int2, int4, or double)",
18176 self.peek_token(),
18177 ),
18178 }
18179 }
18180 Some(Keyword::STORAGE) => {
18181 self.expect_token(&Token::Eq)?;
18182 let storage_keyword = self.parse_one_of_keywords(&[
18183 Keyword::PLAIN,
18184 Keyword::EXTERNAL,
18185 Keyword::EXTENDED,
18186 Keyword::MAIN,
18187 ]);
18188 match storage_keyword {
18189 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18190 UserDefinedTypeStorage::Plain,
18191 )),
18192 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18193 UserDefinedTypeStorage::External,
18194 )),
18195 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18196 UserDefinedTypeStorage::Extended,
18197 )),
18198 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18199 UserDefinedTypeStorage::Main,
18200 )),
18201 _ => self.expected(
18202 "storage value (plain, external, extended, or main)",
18203 self.peek_token(),
18204 ),
18205 }
18206 }
18207 Some(Keyword::LIKE) => {
18208 self.expect_token(&Token::Eq)?;
18209 let name = self.parse_object_name(false)?;
18210 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18211 }
18212 Some(Keyword::CATEGORY) => {
18213 self.expect_token(&Token::Eq)?;
18214 let category_str = self.parse_literal_string()?;
18215 let category_char = category_str.chars().next().ok_or_else(|| {
18216 ParserError::ParserError(
18217 "CATEGORY value must be a single character".to_string(),
18218 )
18219 })?;
18220 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18221 }
18222 Some(Keyword::PREFERRED) => {
18223 self.expect_token(&Token::Eq)?;
18224 let value =
18225 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18226 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18227 }
18228 Some(Keyword::DEFAULT) => {
18229 self.expect_token(&Token::Eq)?;
18230 let expr = self.parse_expr()?;
18231 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18232 }
18233 Some(Keyword::ELEMENT) => {
18234 self.expect_token(&Token::Eq)?;
18235 let data_type = self.parse_data_type()?;
18236 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18237 }
18238 Some(Keyword::DELIMITER) => {
18239 self.expect_token(&Token::Eq)?;
18240 let delimiter = self.parse_literal_string()?;
18241 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18242 }
18243 Some(Keyword::COLLATABLE) => {
18244 self.expect_token(&Token::Eq)?;
18245 let value =
18246 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18247 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18248 }
18249 _ => self.expected("SQL definition option keyword", self.peek_token()),
18250 }
18251 }
18252
18253 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18254 self.expect_token(&Token::LParen)?;
18255 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18256 self.expect_token(&Token::RParen)?;
18257 Ok(idents)
18258 }
18259
18260 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18261 if dialect_of!(self is MySqlDialect | GenericDialect) {
18262 if self.parse_keyword(Keyword::FIRST) {
18263 Ok(Some(MySQLColumnPosition::First))
18264 } else if self.parse_keyword(Keyword::AFTER) {
18265 let ident = self.parse_identifier()?;
18266 Ok(Some(MySQLColumnPosition::After(ident)))
18267 } else {
18268 Ok(None)
18269 }
18270 } else {
18271 Ok(None)
18272 }
18273 }
18274
18275 fn parse_print(&mut self) -> Result<Statement, ParserError> {
18277 Ok(Statement::Print(PrintStatement {
18278 message: Box::new(self.parse_expr()?),
18279 }))
18280 }
18281
18282 fn parse_return(&mut self) -> Result<Statement, ParserError> {
18284 match self.maybe_parse(|p| p.parse_expr())? {
18285 Some(expr) => Ok(Statement::Return(ReturnStatement {
18286 value: Some(ReturnStatementValue::Expr(expr)),
18287 })),
18288 None => Ok(Statement::Return(ReturnStatement { value: None })),
18289 }
18290 }
18291
18292 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18296 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18297
18298 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18299 Some(self.parse_object_name(false)?)
18300 } else {
18301 None
18302 };
18303 self.expect_keyword(Keyword::OPTIONS)?;
18304 self.expect_token(&Token::LParen)?;
18305 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18306 self.expect_token(&Token::RParen)?;
18307 self.expect_keyword(Keyword::AS)?;
18308 let query = self.parse_query()?;
18309 Ok(Statement::ExportData(ExportData {
18310 options,
18311 query,
18312 connection,
18313 }))
18314 }
18315
18316 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18317 self.expect_keyword(Keyword::VACUUM)?;
18318 let full = self.parse_keyword(Keyword::FULL);
18319 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18320 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18321 let reindex = self.parse_keyword(Keyword::REINDEX);
18322 let recluster = self.parse_keyword(Keyword::RECLUSTER);
18323 let (table_name, threshold, boost) =
18324 match self.maybe_parse(|p| p.parse_object_name(false))? {
18325 Some(table_name) => {
18326 let threshold = if self.parse_keyword(Keyword::TO) {
18327 let value = self.parse_value()?;
18328 self.expect_keyword(Keyword::PERCENT)?;
18329 Some(value.value)
18330 } else {
18331 None
18332 };
18333 let boost = self.parse_keyword(Keyword::BOOST);
18334 (Some(table_name), threshold, boost)
18335 }
18336 _ => (None, None, false),
18337 };
18338 Ok(Statement::Vacuum(VacuumStatement {
18339 full,
18340 sort_only,
18341 delete_only,
18342 reindex,
18343 recluster,
18344 table_name,
18345 threshold,
18346 boost,
18347 }))
18348 }
18349
18350 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18352 self.tokens
18353 }
18354
18355 fn peek_sub_query(&mut self) -> bool {
18357 if self
18358 .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18359 .is_some()
18360 {
18361 self.prev_token();
18362 return true;
18363 }
18364 false
18365 }
18366
18367 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18368 let show_in;
18369 let mut filter_position = None;
18370 if self.dialect.supports_show_like_before_in() {
18371 if let Some(filter) = self.parse_show_statement_filter()? {
18372 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18373 }
18374 show_in = self.maybe_parse_show_stmt_in()?;
18375 } else {
18376 show_in = self.maybe_parse_show_stmt_in()?;
18377 if let Some(filter) = self.parse_show_statement_filter()? {
18378 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18379 }
18380 }
18381 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18382 let limit = self.maybe_parse_show_stmt_limit()?;
18383 let from = self.maybe_parse_show_stmt_from()?;
18384 Ok(ShowStatementOptions {
18385 filter_position,
18386 show_in,
18387 starts_with,
18388 limit,
18389 limit_from: from,
18390 })
18391 }
18392
18393 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18394 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18395 Some(Keyword::FROM) => ShowStatementInClause::FROM,
18396 Some(Keyword::IN) => ShowStatementInClause::IN,
18397 None => return Ok(None),
18398 _ => return self.expected("FROM or IN", self.peek_token()),
18399 };
18400
18401 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18402 Keyword::ACCOUNT,
18403 Keyword::DATABASE,
18404 Keyword::SCHEMA,
18405 Keyword::TABLE,
18406 Keyword::VIEW,
18407 ]) {
18408 Some(Keyword::DATABASE)
18410 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18411 | self.peek_keyword(Keyword::LIMIT) =>
18412 {
18413 (Some(ShowStatementInParentType::Database), None)
18414 }
18415 Some(Keyword::SCHEMA)
18416 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18417 | self.peek_keyword(Keyword::LIMIT) =>
18418 {
18419 (Some(ShowStatementInParentType::Schema), None)
18420 }
18421 Some(parent_kw) => {
18422 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18426 match parent_kw {
18427 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18428 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18429 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18430 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18431 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18432 _ => {
18433 return self.expected(
18434 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18435 self.peek_token(),
18436 )
18437 }
18438 }
18439 }
18440 None => {
18441 let mut parent_name = self.parse_object_name(false)?;
18444 if self
18445 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18446 .is_some()
18447 {
18448 parent_name
18449 .0
18450 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18451 }
18452 (None, Some(parent_name))
18453 }
18454 };
18455
18456 Ok(Some(ShowStatementIn {
18457 clause,
18458 parent_type,
18459 parent_name,
18460 }))
18461 }
18462
18463 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18464 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18465 Ok(Some(self.parse_value()?.value))
18466 } else {
18467 Ok(None)
18468 }
18469 }
18470
18471 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18472 if self.parse_keyword(Keyword::LIMIT) {
18473 Ok(self.parse_limit()?)
18474 } else {
18475 Ok(None)
18476 }
18477 }
18478
18479 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18480 if self.parse_keyword(Keyword::FROM) {
18481 Ok(Some(self.parse_value()?.value))
18482 } else {
18483 Ok(None)
18484 }
18485 }
18486
18487 pub(crate) fn in_column_definition_state(&self) -> bool {
18488 matches!(self.state, ColumnDefinition)
18489 }
18490
18491 pub(crate) fn parse_key_value_options(
18496 &mut self,
18497 parenthesized: bool,
18498 end_words: &[Keyword],
18499 ) -> Result<KeyValueOptions, ParserError> {
18500 let mut options: Vec<KeyValueOption> = Vec::new();
18501 let mut delimiter = KeyValueOptionsDelimiter::Space;
18502 if parenthesized {
18503 self.expect_token(&Token::LParen)?;
18504 }
18505 loop {
18506 match self.next_token().token {
18507 Token::RParen => {
18508 if parenthesized {
18509 break;
18510 } else {
18511 return self.expected(" another option or EOF", self.peek_token());
18512 }
18513 }
18514 Token::EOF => break,
18515 Token::Comma => {
18516 delimiter = KeyValueOptionsDelimiter::Comma;
18517 continue;
18518 }
18519 Token::Word(w) if !end_words.contains(&w.keyword) => {
18520 options.push(self.parse_key_value_option(&w)?)
18521 }
18522 Token::Word(w) if end_words.contains(&w.keyword) => {
18523 self.prev_token();
18524 break;
18525 }
18526 _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18527 };
18528 }
18529
18530 Ok(KeyValueOptions { delimiter, options })
18531 }
18532
18533 pub(crate) fn parse_key_value_option(
18535 &mut self,
18536 key: &Word,
18537 ) -> Result<KeyValueOption, ParserError> {
18538 self.expect_token(&Token::Eq)?;
18539 match self.peek_token().token {
18540 Token::SingleQuotedString(_) => Ok(KeyValueOption {
18541 option_name: key.value.clone(),
18542 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18543 }),
18544 Token::Word(word)
18545 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18546 {
18547 Ok(KeyValueOption {
18548 option_name: key.value.clone(),
18549 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18550 })
18551 }
18552 Token::Number(..) => Ok(KeyValueOption {
18553 option_name: key.value.clone(),
18554 option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18555 }),
18556 Token::Word(word) => {
18557 self.next_token();
18558 Ok(KeyValueOption {
18559 option_name: key.value.clone(),
18560 option_value: KeyValueOptionKind::Single(Value::Placeholder(
18561 word.value.clone(),
18562 )),
18563 })
18564 }
18565 Token::LParen => {
18566 match self.maybe_parse(|parser| {
18570 parser.expect_token(&Token::LParen)?;
18571 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18572 parser.expect_token(&Token::RParen)?;
18573 values
18574 })? {
18575 Some(values) => {
18576 let values = values.into_iter().map(|v| v.value).collect();
18577 Ok(KeyValueOption {
18578 option_name: key.value.clone(),
18579 option_value: KeyValueOptionKind::Multi(values),
18580 })
18581 }
18582 None => Ok(KeyValueOption {
18583 option_name: key.value.clone(),
18584 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18585 self.parse_key_value_options(true, &[])?,
18586 )),
18587 }),
18588 }
18589 }
18590 _ => self.expected("expected option value", self.peek_token()),
18591 }
18592 }
18593
18594 fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18596 if self.parse_keyword(Keyword::ALL) {
18597 return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18598 }
18599
18600 let obj = self.parse_object_name(false)?;
18601 Ok(Statement::Reset(ResetStatement {
18602 reset: Reset::ConfigurationParameter(obj),
18603 }))
18604 }
18605}
18606
18607fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18608 if let Some(prefix) = prefix {
18609 Expr::Prefixed {
18610 prefix,
18611 value: Box::new(expr),
18612 }
18613 } else {
18614 expr
18615 }
18616}
18617
18618impl Word {
18619 #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18620 pub fn to_ident(&self, span: Span) -> Ident {
18621 Ident {
18622 value: self.value.clone(),
18623 quote_style: self.quote_style,
18624 span,
18625 }
18626 }
18627
18628 pub fn into_ident(self, span: Span) -> Ident {
18630 Ident {
18631 value: self.value,
18632 quote_style: self.quote_style,
18633 span,
18634 }
18635 }
18636}
18637
18638#[cfg(test)]
18639mod tests {
18640 use crate::test_utils::{all_dialects, TestedDialects};
18641
18642 use super::*;
18643
18644 #[test]
18645 fn test_prev_index() {
18646 let sql = "SELECT version";
18647 all_dialects().run_parser_method(sql, |parser| {
18648 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18649 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18650 parser.prev_token();
18651 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18652 assert_eq!(parser.next_token(), Token::make_word("version", None));
18653 parser.prev_token();
18654 assert_eq!(parser.peek_token(), Token::make_word("version", None));
18655 assert_eq!(parser.next_token(), Token::make_word("version", None));
18656 assert_eq!(parser.peek_token(), Token::EOF);
18657 parser.prev_token();
18658 assert_eq!(parser.next_token(), Token::make_word("version", None));
18659 assert_eq!(parser.next_token(), Token::EOF);
18660 assert_eq!(parser.next_token(), Token::EOF);
18661 parser.prev_token();
18662 });
18663 }
18664
18665 #[test]
18666 fn test_peek_tokens() {
18667 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18668 assert!(matches!(
18669 parser.peek_tokens(),
18670 [Token::Word(Word {
18671 keyword: Keyword::SELECT,
18672 ..
18673 })]
18674 ));
18675
18676 assert!(matches!(
18677 parser.peek_tokens(),
18678 [
18679 Token::Word(Word {
18680 keyword: Keyword::SELECT,
18681 ..
18682 }),
18683 Token::Word(_),
18684 Token::Word(Word {
18685 keyword: Keyword::AS,
18686 ..
18687 }),
18688 ]
18689 ));
18690
18691 for _ in 0..4 {
18692 parser.next_token();
18693 }
18694
18695 assert!(matches!(
18696 parser.peek_tokens(),
18697 [
18698 Token::Word(Word {
18699 keyword: Keyword::FROM,
18700 ..
18701 }),
18702 Token::Word(_),
18703 Token::EOF,
18704 Token::EOF,
18705 ]
18706 ))
18707 })
18708 }
18709
18710 #[cfg(test)]
18711 mod test_parse_data_type {
18712 use crate::ast::{
18713 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18714 };
18715 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18716 use crate::test_utils::TestedDialects;
18717
18718 macro_rules! test_parse_data_type {
18719 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18720 $dialect.run_parser_method(&*$input, |parser| {
18721 let data_type = parser.parse_data_type().unwrap();
18722 assert_eq!($expected_type, data_type);
18723 assert_eq!($input.to_string(), data_type.to_string());
18724 });
18725 }};
18726 }
18727
18728 #[test]
18729 fn test_ansii_character_string_types() {
18730 let dialect =
18732 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18733
18734 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18735
18736 test_parse_data_type!(
18737 dialect,
18738 "CHARACTER(20)",
18739 DataType::Character(Some(CharacterLength::IntegerLength {
18740 length: 20,
18741 unit: None
18742 }))
18743 );
18744
18745 test_parse_data_type!(
18746 dialect,
18747 "CHARACTER(20 CHARACTERS)",
18748 DataType::Character(Some(CharacterLength::IntegerLength {
18749 length: 20,
18750 unit: Some(CharLengthUnits::Characters)
18751 }))
18752 );
18753
18754 test_parse_data_type!(
18755 dialect,
18756 "CHARACTER(20 OCTETS)",
18757 DataType::Character(Some(CharacterLength::IntegerLength {
18758 length: 20,
18759 unit: Some(CharLengthUnits::Octets)
18760 }))
18761 );
18762
18763 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
18764
18765 test_parse_data_type!(
18766 dialect,
18767 "CHAR(20)",
18768 DataType::Char(Some(CharacterLength::IntegerLength {
18769 length: 20,
18770 unit: None
18771 }))
18772 );
18773
18774 test_parse_data_type!(
18775 dialect,
18776 "CHAR(20 CHARACTERS)",
18777 DataType::Char(Some(CharacterLength::IntegerLength {
18778 length: 20,
18779 unit: Some(CharLengthUnits::Characters)
18780 }))
18781 );
18782
18783 test_parse_data_type!(
18784 dialect,
18785 "CHAR(20 OCTETS)",
18786 DataType::Char(Some(CharacterLength::IntegerLength {
18787 length: 20,
18788 unit: Some(CharLengthUnits::Octets)
18789 }))
18790 );
18791
18792 test_parse_data_type!(
18793 dialect,
18794 "CHARACTER VARYING(20)",
18795 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18796 length: 20,
18797 unit: None
18798 }))
18799 );
18800
18801 test_parse_data_type!(
18802 dialect,
18803 "CHARACTER VARYING(20 CHARACTERS)",
18804 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18805 length: 20,
18806 unit: Some(CharLengthUnits::Characters)
18807 }))
18808 );
18809
18810 test_parse_data_type!(
18811 dialect,
18812 "CHARACTER VARYING(20 OCTETS)",
18813 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18814 length: 20,
18815 unit: Some(CharLengthUnits::Octets)
18816 }))
18817 );
18818
18819 test_parse_data_type!(
18820 dialect,
18821 "CHAR VARYING(20)",
18822 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18823 length: 20,
18824 unit: None
18825 }))
18826 );
18827
18828 test_parse_data_type!(
18829 dialect,
18830 "CHAR VARYING(20 CHARACTERS)",
18831 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18832 length: 20,
18833 unit: Some(CharLengthUnits::Characters)
18834 }))
18835 );
18836
18837 test_parse_data_type!(
18838 dialect,
18839 "CHAR VARYING(20 OCTETS)",
18840 DataType::CharVarying(Some(CharacterLength::IntegerLength {
18841 length: 20,
18842 unit: Some(CharLengthUnits::Octets)
18843 }))
18844 );
18845
18846 test_parse_data_type!(
18847 dialect,
18848 "VARCHAR(20)",
18849 DataType::Varchar(Some(CharacterLength::IntegerLength {
18850 length: 20,
18851 unit: None
18852 }))
18853 );
18854 }
18855
18856 #[test]
18857 fn test_ansii_character_large_object_types() {
18858 let dialect =
18860 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18861
18862 test_parse_data_type!(
18863 dialect,
18864 "CHARACTER LARGE OBJECT",
18865 DataType::CharacterLargeObject(None)
18866 );
18867 test_parse_data_type!(
18868 dialect,
18869 "CHARACTER LARGE OBJECT(20)",
18870 DataType::CharacterLargeObject(Some(20))
18871 );
18872
18873 test_parse_data_type!(
18874 dialect,
18875 "CHAR LARGE OBJECT",
18876 DataType::CharLargeObject(None)
18877 );
18878 test_parse_data_type!(
18879 dialect,
18880 "CHAR LARGE OBJECT(20)",
18881 DataType::CharLargeObject(Some(20))
18882 );
18883
18884 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
18885 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
18886 }
18887
18888 #[test]
18889 fn test_parse_custom_types() {
18890 let dialect =
18891 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18892
18893 test_parse_data_type!(
18894 dialect,
18895 "GEOMETRY",
18896 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
18897 );
18898
18899 test_parse_data_type!(
18900 dialect,
18901 "GEOMETRY(POINT)",
18902 DataType::Custom(
18903 ObjectName::from(vec!["GEOMETRY".into()]),
18904 vec!["POINT".to_string()]
18905 )
18906 );
18907
18908 test_parse_data_type!(
18909 dialect,
18910 "GEOMETRY(POINT, 4326)",
18911 DataType::Custom(
18912 ObjectName::from(vec!["GEOMETRY".into()]),
18913 vec!["POINT".to_string(), "4326".to_string()]
18914 )
18915 );
18916 }
18917
18918 #[test]
18919 fn test_ansii_exact_numeric_types() {
18920 let dialect = TestedDialects::new(vec![
18922 Box::new(GenericDialect {}),
18923 Box::new(AnsiDialect {}),
18924 Box::new(PostgreSqlDialect {}),
18925 ]);
18926
18927 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
18928
18929 test_parse_data_type!(
18930 dialect,
18931 "NUMERIC(2)",
18932 DataType::Numeric(ExactNumberInfo::Precision(2))
18933 );
18934
18935 test_parse_data_type!(
18936 dialect,
18937 "NUMERIC(2,10)",
18938 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
18939 );
18940
18941 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
18942
18943 test_parse_data_type!(
18944 dialect,
18945 "DECIMAL(2)",
18946 DataType::Decimal(ExactNumberInfo::Precision(2))
18947 );
18948
18949 test_parse_data_type!(
18950 dialect,
18951 "DECIMAL(2,10)",
18952 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
18953 );
18954
18955 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
18956
18957 test_parse_data_type!(
18958 dialect,
18959 "DEC(2)",
18960 DataType::Dec(ExactNumberInfo::Precision(2))
18961 );
18962
18963 test_parse_data_type!(
18964 dialect,
18965 "DEC(2,10)",
18966 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
18967 );
18968
18969 test_parse_data_type!(
18971 dialect,
18972 "NUMERIC(10,-2)",
18973 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
18974 );
18975
18976 test_parse_data_type!(
18977 dialect,
18978 "DECIMAL(1000,-10)",
18979 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
18980 );
18981
18982 test_parse_data_type!(
18983 dialect,
18984 "DEC(5,-1000)",
18985 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
18986 );
18987
18988 test_parse_data_type!(
18989 dialect,
18990 "NUMERIC(10,-5)",
18991 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
18992 );
18993
18994 test_parse_data_type!(
18995 dialect,
18996 "DECIMAL(20,-10)",
18997 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
18998 );
18999
19000 test_parse_data_type!(
19001 dialect,
19002 "DEC(5,-2)",
19003 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
19004 );
19005
19006 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
19007 let data_type = parser.parse_data_type().unwrap();
19008 assert_eq!(
19009 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
19010 data_type
19011 );
19012 assert_eq!("NUMERIC(10,5)", data_type.to_string());
19014 });
19015 }
19016
19017 #[test]
19018 fn test_ansii_date_type() {
19019 let dialect =
19021 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19022
19023 test_parse_data_type!(dialect, "DATE", DataType::Date);
19024
19025 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
19026
19027 test_parse_data_type!(
19028 dialect,
19029 "TIME(6)",
19030 DataType::Time(Some(6), TimezoneInfo::None)
19031 );
19032
19033 test_parse_data_type!(
19034 dialect,
19035 "TIME WITH TIME ZONE",
19036 DataType::Time(None, TimezoneInfo::WithTimeZone)
19037 );
19038
19039 test_parse_data_type!(
19040 dialect,
19041 "TIME(6) WITH TIME ZONE",
19042 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19043 );
19044
19045 test_parse_data_type!(
19046 dialect,
19047 "TIME WITHOUT TIME ZONE",
19048 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19049 );
19050
19051 test_parse_data_type!(
19052 dialect,
19053 "TIME(6) WITHOUT TIME ZONE",
19054 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19055 );
19056
19057 test_parse_data_type!(
19058 dialect,
19059 "TIMESTAMP",
19060 DataType::Timestamp(None, TimezoneInfo::None)
19061 );
19062
19063 test_parse_data_type!(
19064 dialect,
19065 "TIMESTAMP(22)",
19066 DataType::Timestamp(Some(22), TimezoneInfo::None)
19067 );
19068
19069 test_parse_data_type!(
19070 dialect,
19071 "TIMESTAMP(22) WITH TIME ZONE",
19072 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19073 );
19074
19075 test_parse_data_type!(
19076 dialect,
19077 "TIMESTAMP(33) WITHOUT TIME ZONE",
19078 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19079 );
19080 }
19081 }
19082
19083 #[test]
19084 fn test_parse_schema_name() {
19085 macro_rules! test_parse_schema_name {
19087 ($input:expr, $expected_name:expr $(,)?) => {{
19088 all_dialects().run_parser_method(&*$input, |parser| {
19089 let schema_name = parser.parse_schema_name().unwrap();
19090 assert_eq!(schema_name, $expected_name);
19092 assert_eq!(schema_name.to_string(), $input.to_string());
19094 });
19095 }};
19096 }
19097
19098 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19099 let dummy_authorization = Ident::new("dummy_authorization");
19100
19101 test_parse_schema_name!(
19102 format!("{dummy_name}"),
19103 SchemaName::Simple(dummy_name.clone())
19104 );
19105
19106 test_parse_schema_name!(
19107 format!("AUTHORIZATION {dummy_authorization}"),
19108 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19109 );
19110 test_parse_schema_name!(
19111 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19112 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19113 );
19114 }
19115
19116 #[test]
19117 fn mysql_parse_index_table_constraint() {
19118 macro_rules! test_parse_table_constraint {
19119 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19120 $dialect.run_parser_method(&*$input, |parser| {
19121 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19122 assert_eq!(constraint, $expected);
19124 assert_eq!(constraint.to_string(), $input.to_string());
19126 });
19127 }};
19128 }
19129
19130 fn mk_expected_col(name: &str) -> IndexColumn {
19131 IndexColumn {
19132 column: OrderByExpr {
19133 expr: Expr::Identifier(name.into()),
19134 options: OrderByOptions {
19135 asc: None,
19136 nulls_first: None,
19137 },
19138 with_fill: None,
19139 },
19140 operator_class: None,
19141 }
19142 }
19143
19144 let dialect =
19145 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19146
19147 test_parse_table_constraint!(
19148 dialect,
19149 "INDEX (c1)",
19150 IndexConstraint {
19151 display_as_key: false,
19152 name: None,
19153 index_type: None,
19154 columns: vec![mk_expected_col("c1")],
19155 index_options: vec![],
19156 }
19157 .into()
19158 );
19159
19160 test_parse_table_constraint!(
19161 dialect,
19162 "KEY (c1)",
19163 IndexConstraint {
19164 display_as_key: true,
19165 name: None,
19166 index_type: None,
19167 columns: vec![mk_expected_col("c1")],
19168 index_options: vec![],
19169 }
19170 .into()
19171 );
19172
19173 test_parse_table_constraint!(
19174 dialect,
19175 "INDEX 'index' (c1, c2)",
19176 TableConstraint::Index(IndexConstraint {
19177 display_as_key: false,
19178 name: Some(Ident::with_quote('\'', "index")),
19179 index_type: None,
19180 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19181 index_options: vec![],
19182 })
19183 );
19184
19185 test_parse_table_constraint!(
19186 dialect,
19187 "INDEX USING BTREE (c1)",
19188 IndexConstraint {
19189 display_as_key: false,
19190 name: None,
19191 index_type: Some(IndexType::BTree),
19192 columns: vec![mk_expected_col("c1")],
19193 index_options: vec![],
19194 }
19195 .into()
19196 );
19197
19198 test_parse_table_constraint!(
19199 dialect,
19200 "INDEX USING HASH (c1)",
19201 IndexConstraint {
19202 display_as_key: false,
19203 name: None,
19204 index_type: Some(IndexType::Hash),
19205 columns: vec![mk_expected_col("c1")],
19206 index_options: vec![],
19207 }
19208 .into()
19209 );
19210
19211 test_parse_table_constraint!(
19212 dialect,
19213 "INDEX idx_name USING BTREE (c1)",
19214 IndexConstraint {
19215 display_as_key: false,
19216 name: Some(Ident::new("idx_name")),
19217 index_type: Some(IndexType::BTree),
19218 columns: vec![mk_expected_col("c1")],
19219 index_options: vec![],
19220 }
19221 .into()
19222 );
19223
19224 test_parse_table_constraint!(
19225 dialect,
19226 "INDEX idx_name USING HASH (c1)",
19227 IndexConstraint {
19228 display_as_key: false,
19229 name: Some(Ident::new("idx_name")),
19230 index_type: Some(IndexType::Hash),
19231 columns: vec![mk_expected_col("c1")],
19232 index_options: vec![],
19233 }
19234 .into()
19235 );
19236 }
19237
19238 #[test]
19239 fn test_tokenizer_error_loc() {
19240 let sql = "foo '";
19241 let ast = Parser::parse_sql(&GenericDialect, sql);
19242 assert_eq!(
19243 ast,
19244 Err(ParserError::TokenizerError(
19245 "Unterminated string literal at Line: 1, Column: 5".to_string()
19246 ))
19247 );
19248 }
19249
19250 #[test]
19251 fn test_parser_error_loc() {
19252 let sql = "SELECT this is a syntax error";
19253 let ast = Parser::parse_sql(&GenericDialect, sql);
19254 assert_eq!(
19255 ast,
19256 Err(ParserError::ParserError(
19257 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19258 .to_string()
19259 ))
19260 );
19261 }
19262
19263 #[test]
19264 fn test_nested_explain_error() {
19265 let sql = "EXPLAIN EXPLAIN SELECT 1";
19266 let ast = Parser::parse_sql(&GenericDialect, sql);
19267 assert_eq!(
19268 ast,
19269 Err(ParserError::ParserError(
19270 "Explain must be root of the plan".to_string()
19271 ))
19272 );
19273 }
19274
19275 #[test]
19276 fn test_parse_multipart_identifier_positive() {
19277 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19278
19279 let expected = vec![
19281 Ident {
19282 value: "CATALOG".to_string(),
19283 quote_style: None,
19284 span: Span::empty(),
19285 },
19286 Ident {
19287 value: "F(o)o. \"bar".to_string(),
19288 quote_style: Some('"'),
19289 span: Span::empty(),
19290 },
19291 Ident {
19292 value: "table".to_string(),
19293 quote_style: None,
19294 span: Span::empty(),
19295 },
19296 ];
19297 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19298 let actual = parser.parse_multipart_identifier().unwrap();
19299 assert_eq!(expected, actual);
19300 });
19301
19302 let expected = vec![
19304 Ident {
19305 value: "CATALOG".to_string(),
19306 quote_style: None,
19307 span: Span::empty(),
19308 },
19309 Ident {
19310 value: "table".to_string(),
19311 quote_style: None,
19312 span: Span::empty(),
19313 },
19314 ];
19315 dialect.run_parser_method("CATALOG . table", |parser| {
19316 let actual = parser.parse_multipart_identifier().unwrap();
19317 assert_eq!(expected, actual);
19318 });
19319 }
19320
19321 #[test]
19322 fn test_parse_multipart_identifier_negative() {
19323 macro_rules! test_parse_multipart_identifier_error {
19324 ($input:expr, $expected_err:expr $(,)?) => {{
19325 all_dialects().run_parser_method(&*$input, |parser| {
19326 let actual_err = parser.parse_multipart_identifier().unwrap_err();
19327 assert_eq!(actual_err.to_string(), $expected_err);
19328 });
19329 }};
19330 }
19331
19332 test_parse_multipart_identifier_error!(
19333 "",
19334 "sql parser error: Empty input when parsing identifier",
19335 );
19336
19337 test_parse_multipart_identifier_error!(
19338 "*schema.table",
19339 "sql parser error: Unexpected token in identifier: *",
19340 );
19341
19342 test_parse_multipart_identifier_error!(
19343 "schema.table*",
19344 "sql parser error: Unexpected token in identifier: *",
19345 );
19346
19347 test_parse_multipart_identifier_error!(
19348 "schema.table.",
19349 "sql parser error: Trailing period in identifier",
19350 );
19351
19352 test_parse_multipart_identifier_error!(
19353 "schema.*",
19354 "sql parser error: Unexpected token following period in identifier: *",
19355 );
19356 }
19357
19358 #[test]
19359 fn test_mysql_partition_selection() {
19360 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19361 let expected = vec!["p0", "p2"];
19362
19363 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19364 assert_eq!(ast.len(), 1);
19365 if let Statement::Query(v) = &ast[0] {
19366 if let SetExpr::Select(select) = &*v.body {
19367 assert_eq!(select.from.len(), 1);
19368 let from: &TableWithJoins = &select.from[0];
19369 let table_factor = &from.relation;
19370 if let TableFactor::Table { partitions, .. } = table_factor {
19371 let actual: Vec<&str> = partitions
19372 .iter()
19373 .map(|ident| ident.value.as_str())
19374 .collect();
19375 assert_eq!(expected, actual);
19376 }
19377 }
19378 } else {
19379 panic!("fail to parse mysql partition selection");
19380 }
19381 }
19382
19383 #[test]
19384 fn test_replace_into_placeholders() {
19385 let sql = "REPLACE INTO t (a) VALUES (&a)";
19386
19387 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19388 }
19389
19390 #[test]
19391 fn test_replace_into_set_placeholder() {
19392 let sql = "REPLACE INTO t SET ?";
19393
19394 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19395 }
19396
19397 #[test]
19398 fn test_replace_incomplete() {
19399 let sql = r#"REPLACE"#;
19400
19401 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19402 }
19403
19404 #[test]
19405 fn test_placeholder_invalid_whitespace() {
19406 for w in [" ", "/*invalid*/"] {
19407 let sql = format!("\nSELECT\n :{w}fooBar");
19408 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19409 }
19410 }
19411}